aboutsummaryrefslogtreecommitdiff
path: root/internal/lsp/template/completion.go
blob: 13dbdf1e52565bddd364e0cfeb8ff2a139b92435 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

package template

import (
	"bytes"
	"context"
	"fmt"
	"go/scanner"
	"go/token"
	"strings"

	"golang.org/x/tools/internal/lsp/protocol"
	"golang.org/x/tools/internal/lsp/source"
)

// information needed for completion
type completer struct {
	p      *Parsed
	pos    protocol.Position
	offset int // offset of the start of the Token
	ctx    protocol.CompletionContext
	syms   map[string]symbol
}

func Completion(ctx context.Context, snapshot source.Snapshot, fh source.VersionedFileHandle, pos protocol.Position, context protocol.CompletionContext) (*protocol.CompletionList, error) {
	all := New(snapshot.Templates())
	var start int // the beginning of the Token (completed or not)
	syms := make(map[string]symbol)
	var p *Parsed
	for fn, fc := range all.files {
		// collect symbols from all template files
		filterSyms(syms, fc.symbols)
		if fn.Filename() != fh.URI().Filename() {
			continue
		}
		if start = inTemplate(fc, pos); start == -1 {
			return nil, nil
		}
		p = fc
	}
	if p == nil {
		// this cannot happen unless the search missed a template file
		return nil, fmt.Errorf("%s not found", fh.FileIdentity().URI.Filename())
	}
	c := completer{
		p:      p,
		pos:    pos,
		offset: start + len(Left),
		ctx:    context,
		syms:   syms,
	}
	return c.complete()
}

func filterSyms(syms map[string]symbol, ns []symbol) {
	for _, xsym := range ns {
		switch xsym.kind {
		case protocol.Method, protocol.Package, protocol.Boolean, protocol.Namespace,
			protocol.Function:
			syms[xsym.name] = xsym // we don't care which symbol we get
		case protocol.Variable:
			if xsym.name != "dot" {
				syms[xsym.name] = xsym
			}
		case protocol.Constant:
			if xsym.name == "nil" {
				syms[xsym.name] = xsym
			}
		}
	}
}

// return the starting position of the enclosing token, or -1 if none
func inTemplate(fc *Parsed, pos protocol.Position) int {
	// pos is the pos-th character. if the cursor is at the beginning
	// of the file, pos is 0. That is, we've only seen characters before pos
	// 1. pos might be in a Token, return tk.Start
	// 2. pos might be after an elided but before a Token, return elided
	// 3. return -1 for false
	offset := fc.FromPosition(pos)
	// this could be a binary search, as the tokens are ordered
	for _, tk := range fc.tokens {
		if tk.Start < offset && offset <= tk.End {
			return tk.Start
		}
	}
	for _, x := range fc.elided {
		if x > offset {
			// fc.elided is sorted
			break
		}
		// If the interval [x,offset] does not contain Left or Right
		// then provide completions. (do we need the test for Right?)
		if !bytes.Contains(fc.buf[x:offset], []byte(Left)) && !bytes.Contains(fc.buf[x:offset], []byte(Right)) {
			return x
		}
	}
	return -1
}

var (
	keywords = []string{"if", "with", "else", "block", "range", "template", "end}}", "end"}
	globals  = []string{"and", "call", "html", "index", "slice", "js", "len", "not", "or",
		"urlquery", "printf", "println", "print", "eq", "ne", "le", "lt", "ge", "gt"}
)

// find the completions. start is the offset of either the Token enclosing pos, or where
// the incomplete token starts.
// The error return is always nil.
func (c *completer) complete() (*protocol.CompletionList, error) {
	ans := &protocol.CompletionList{IsIncomplete: true, Items: []protocol.CompletionItem{}}
	start := c.p.FromPosition(c.pos)
	sofar := c.p.buf[c.offset:start]
	if len(sofar) == 0 || sofar[len(sofar)-1] == ' ' || sofar[len(sofar)-1] == '\t' {
		return ans, nil
	}
	// sofar could be parsed by either c.analyzer() or scan(). The latter is precise
	// and slower, but fast enough
	words := scan(sofar)
	// 1. if pattern starts $, show variables
	// 2. if pattern starts ., show methods (and . by itself?)
	// 3. if len(words) == 1, show firstWords (but if it were a |, show functions and globals)
	// 4. ...? (parenthetical expressions, arguments, ...) (packages, namespaces, nil?)
	if len(words) == 0 {
		return nil, nil // if this happens, why were we called?
	}
	pattern := string(words[len(words)-1])
	if pattern[0] == '$' {
		// should we also return a raw "$"?
		for _, s := range c.syms {
			if s.kind == protocol.Variable && weakMatch(s.name, pattern) > 0 {
				ans.Items = append(ans.Items, protocol.CompletionItem{
					Label:  s.name,
					Kind:   protocol.VariableCompletion,
					Detail: "Variable",
				})
			}
		}
		return ans, nil
	}
	if pattern[0] == '.' {
		for _, s := range c.syms {
			if s.kind == protocol.Method && weakMatch("."+s.name, pattern) > 0 {
				ans.Items = append(ans.Items, protocol.CompletionItem{
					Label:  s.name,
					Kind:   protocol.MethodCompletion,
					Detail: "Method/member",
				})
			}
		}
		return ans, nil
	}
	// could we get completion attempts in strings or numbers, and if so, do we care?
	// globals
	for _, kw := range globals {
		if weakMatch(kw, string(pattern)) != 0 {
			ans.Items = append(ans.Items, protocol.CompletionItem{
				Label:  kw,
				Kind:   protocol.KeywordCompletion,
				Detail: "Function",
			})
		}
	}
	// and functions
	for _, s := range c.syms {
		if s.kind == protocol.Function && weakMatch(s.name, pattern) != 0 {
			ans.Items = append(ans.Items, protocol.CompletionItem{
				Label:  s.name,
				Kind:   protocol.FunctionCompletion,
				Detail: "Function",
			})
		}
	}
	// keywords if we're at the beginning
	if len(words) <= 1 || len(words[len(words)-2]) == 1 && words[len(words)-2][0] == '|' {
		for _, kw := range keywords {
			if weakMatch(kw, string(pattern)) != 0 {
				ans.Items = append(ans.Items, protocol.CompletionItem{
					Label:  kw,
					Kind:   protocol.KeywordCompletion,
					Detail: "keyword",
				})
			}
		}
	}
	return ans, nil
}

// someday think about comments, strings, backslashes, etc
// this would repeat some of the template parsing, but because the user is typing
// there may be no parse tree here.
// (go/scanner will report 2 tokens for $a, as $ is not a legal go identifier character)
// (go/scanner is about 2.7 times more expensive)
func (c *completer) analyze(buf []byte) [][]byte {
	// we want to split on whitespace and before dots
	var working []byte
	var ans [][]byte
	for _, ch := range buf {
		if ch == '.' && len(working) > 0 {
			ans = append(ans, working)
			working = []byte{'.'}
			continue
		}
		if ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' {
			if len(working) > 0 {
				ans = append(ans, working)
				working = []byte{}
				continue
			}
		}
		working = append(working, ch)
	}
	if len(working) > 0 {
		ans = append(ans, working)
	}
	ch := buf[len(buf)-1]
	if ch == ' ' || ch == '\t' {
		// avoid completing on whitespace
		ans = append(ans, []byte{ch})
	}
	return ans
}

// version of c.analyze that uses go/scanner.
func scan(buf []byte) []string {
	fset := token.NewFileSet()
	fp := fset.AddFile("", -1, len(buf))
	var sc scanner.Scanner
	sc.Init(fp, buf, func(pos token.Position, msg string) {}, scanner.ScanComments)
	ans := make([]string, 0, 10) // preallocating gives a measurable savings
	for {
		_, tok, lit := sc.Scan() // tok is an int
		if tok == token.EOF {
			break // done
		} else if tok == token.SEMICOLON && lit == "\n" {
			continue // don't care, but probably can't happen
		} else if tok == token.PERIOD {
			ans = append(ans, ".") // lit is empty
		} else if tok == token.IDENT && len(ans) > 0 && ans[len(ans)-1] == "." {
			ans[len(ans)-1] = "." + lit
		} else if tok == token.IDENT && len(ans) > 0 && ans[len(ans)-1] == "$" {
			ans[len(ans)-1] = "$" + lit
		} else if lit != "" {
			ans = append(ans, lit)
		}
	}
	return ans
}

// pattern is what the user has typed
func weakMatch(choice, pattern string) float64 {
	lower := strings.ToLower(choice)
	// for now, use only lower-case everywhere
	pattern = strings.ToLower(pattern)
	// The first char has to match
	if pattern[0] != lower[0] {
		return 0
	}
	// If they start with ., then the second char has to match
	from := 1
	if pattern[0] == '.' {
		if len(pattern) < 2 {
			return 1 // pattern just a ., so it matches
		}
		if pattern[1] != lower[1] {
			return 0
		}
		from = 2
	}
	// check that all the characters of pattern occur as a subsequence of choice
	i, j := from, from
	for ; i < len(lower) && j < len(pattern); j++ {
		if pattern[j] == lower[i] {
			i++
			if i >= len(lower) {
				return 0
			}
		}
	}
	if j < len(pattern) {
		return 0
	}
	return 1
}

// for debug printing
func strContext(c protocol.CompletionContext) string {
	switch c.TriggerKind {
	case protocol.Invoked:
		return "invoked"
	case protocol.TriggerCharacter:
		return fmt.Sprintf("triggered(%s)", c.TriggerCharacter)
	case protocol.TriggerForIncompleteCompletions:
		// gopls doesn't seem to handle these explicitly anywhere
		return "incomplete"
	}
	return fmt.Sprintf("?%v", c)
}