зеркало из https://github.com/golang/tools.git
gopls/internal/golang: semtok: use type information consistently
Previously there were two competing mechanisms for annotating identifiers: the syntactic traversal and type information. This meant that identifiers in different grammatical places were annotated differently. This change annotates identifiers using type information exclusively. In addition to making things consistent, it also means that types are reported independently (using modifiers) to symbol kind, so, for example, a "var x func()" is a Variable with modifier Signature, indicating that its type is a function. Also, the rules for "defaultLibrary", "readonly", and so on are more simply and consistently enforced. The "deprecated" modifier is however lost as a consequence, as it relied on the syntax. + Tests for both issues. Also, use an enum for all the Modifiers. Also, document the complete current set of token types and modifiers that gopls returns. Fixes golang/go#66809 Fixes golang/go#70251 Change-Id: I15e59d10f5a9269bc6be87f30e3502a9054d88e7 Reviewed-on: https://go-review.googlesource.com/c/tools/+/626279 Reviewed-by: Robert Findley <rfindley@google.com> Reviewed-by: Peter Weinberger <pjw@google.com> LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com>
This commit is contained in:
Родитель
fd8d0289c0
Коммит
e26dff9fb9
|
@ -211,9 +211,46 @@ a portion of it.
|
|||
The client may use this information to provide syntax highlighting
|
||||
that conveys semantic distinctions between, for example, functions and
|
||||
types, constants and variables, or library functions and built-ins.
|
||||
Gopls also reports a modifier for the top-level constructor of each symbols's type, one of:
|
||||
`interface`, `struct`, `signature`, `pointer`, `array`, `map`, `slice`, `chan`, `string`, `number`, `bool`, `invalid`.
|
||||
The client specifies the sets of types and modifiers it is interested in.
|
||||
|
||||
The client must specify the sets of types and modifiers it is interested in.
|
||||
|
||||
Gopls reports the following token types:
|
||||
|
||||
- `"comment"`: a comment
|
||||
- `"function"`: a function
|
||||
- `"keyword"`: a keyword
|
||||
- `"label"`: a control label (not an LSP standard type)
|
||||
- `"macro"`: text/template tokens
|
||||
- `"method"`: a method
|
||||
- `"namespace"`: an imported package name
|
||||
- `"number"`: a numeric literal
|
||||
- `"operator"`: an operator
|
||||
- `"parameter"`: a parameter variable
|
||||
- `"string"`: a string literal
|
||||
- `"type"`: a type name (plus other uses)
|
||||
- `"typeParameter"`: a type parameter
|
||||
- `"variable"`: a var or const (see `readonly` modifier)
|
||||
|
||||
Gopls also reports the following standard modifiers:
|
||||
|
||||
- `"defaultLibrary": predeclared symbols
|
||||
- `"definition"`: the declaring identifier of a symbol
|
||||
- `"readonly"`: for constants
|
||||
|
||||
plus these non-standard modifiers each representing the top-level
|
||||
constructor of each symbols's type:
|
||||
|
||||
- `"array"`
|
||||
- `"bool"`
|
||||
- `"chan"`
|
||||
- `"interface"`
|
||||
- `"map"`
|
||||
- `"number"`
|
||||
- `"pointer"`
|
||||
- `"signature"`
|
||||
- `"slice"`
|
||||
- `"string"`
|
||||
- `"struct"`
|
||||
|
||||
Settings:
|
||||
- The [`semanticTokens`](../settings.md#semanticTokens) setting determines whether
|
||||
|
|
|
@ -818,9 +818,9 @@ const c = 0
|
|||
got := res.stdout
|
||||
want := `
|
||||
/*⇒7,keyword,[]*/package /*⇒1,namespace,[]*/a
|
||||
/*⇒4,keyword,[]*/func /*⇒1,function,[definition]*/f()
|
||||
/*⇒3,keyword,[]*/var /*⇒1,variable,[definition]*/v /*⇒3,type,[defaultLibrary number]*/int
|
||||
/*⇒5,keyword,[]*/const /*⇒1,variable,[definition readonly]*/c = /*⇒1,number,[]*/0
|
||||
/*⇒4,keyword,[]*/func /*⇒1,function,[definition signature]*/f()
|
||||
/*⇒3,keyword,[]*/var /*⇒1,variable,[definition number]*/v /*⇒3,type,[defaultLibrary number]*/int
|
||||
/*⇒5,keyword,[]*/const /*⇒1,variable,[definition readonly number]*/c = /*⇒1,number,[]*/0
|
||||
`[1:]
|
||||
if got != want {
|
||||
t.Errorf("semtok: got <<%s>>, want <<%s>>", got, want)
|
||||
|
|
|
@ -109,9 +109,9 @@ type tokenVisitor struct {
|
|||
func (tv *tokenVisitor) visit() {
|
||||
f := tv.pgf.File
|
||||
// may not be in range, but harmless
|
||||
tv.token(f.Package, len("package"), semtok.TokKeyword, nil)
|
||||
tv.token(f.Package, len("package"), semtok.TokKeyword)
|
||||
if f.Name != nil {
|
||||
tv.token(f.Name.NamePos, len(f.Name.Name), semtok.TokNamespace, nil)
|
||||
tv.token(f.Name.NamePos, len(f.Name.Name), semtok.TokNamespace)
|
||||
}
|
||||
for _, decl := range f.Decls {
|
||||
// Only look at the decls that overlap the range.
|
||||
|
@ -208,21 +208,6 @@ func (tv *tokenVisitor) comment(c *ast.Comment, importByName map[string]*types.P
|
|||
}
|
||||
}
|
||||
|
||||
tokenTypeByObject := func(obj types.Object) (semtok.TokenType, []string) {
|
||||
switch obj.(type) {
|
||||
case *types.PkgName:
|
||||
return semtok.TokNamespace, nil
|
||||
case *types.Func:
|
||||
return semtok.TokFunction, nil
|
||||
case *types.TypeName:
|
||||
return semtok.TokType, appendTypeModifiers(nil, obj)
|
||||
case *types.Const, *types.Var:
|
||||
return semtok.TokVariable, nil
|
||||
default:
|
||||
return semtok.TokComment, nil
|
||||
}
|
||||
}
|
||||
|
||||
pos := c.Pos()
|
||||
for _, line := range strings.Split(c.Text, "\n") {
|
||||
last := 0
|
||||
|
@ -232,32 +217,32 @@ func (tv *tokenVisitor) comment(c *ast.Comment, importByName map[string]*types.P
|
|||
name := line[idx[2]:idx[3]]
|
||||
if objs := lookupObjects(name); len(objs) > 0 {
|
||||
if last < idx[2] {
|
||||
tv.token(pos+token.Pos(last), idx[2]-last, semtok.TokComment, nil)
|
||||
tv.token(pos+token.Pos(last), idx[2]-last, semtok.TokComment)
|
||||
}
|
||||
offset := pos + token.Pos(idx[2])
|
||||
for i, obj := range objs {
|
||||
if i > 0 {
|
||||
tv.token(offset, len("."), semtok.TokComment, nil)
|
||||
tv.token(offset, len("."), semtok.TokComment)
|
||||
offset += token.Pos(len("."))
|
||||
}
|
||||
id, rest, _ := strings.Cut(name, ".")
|
||||
name = rest
|
||||
tok, mods := tokenTypeByObject(obj)
|
||||
tv.token(offset, len(id), tok, mods)
|
||||
tok, mods := tv.appendObjectModifiers(nil, obj)
|
||||
tv.token(offset, len(id), tok, mods...)
|
||||
offset += token.Pos(len(id))
|
||||
}
|
||||
last = idx[3]
|
||||
}
|
||||
}
|
||||
if last != len(c.Text) {
|
||||
tv.token(pos+token.Pos(last), len(line)-last, semtok.TokComment, nil)
|
||||
tv.token(pos+token.Pos(last), len(line)-last, semtok.TokComment)
|
||||
}
|
||||
pos += token.Pos(len(line) + 1)
|
||||
}
|
||||
}
|
||||
|
||||
// token emits a token of the specified extent and semantics.
|
||||
func (tv *tokenVisitor) token(start token.Pos, length int, typ semtok.TokenType, modifiers []string) {
|
||||
func (tv *tokenVisitor) token(start token.Pos, length int, typ semtok.TokenType, modifiers ...semtok.Modifier) {
|
||||
if !start.IsValid() {
|
||||
return
|
||||
}
|
||||
|
@ -338,7 +323,7 @@ func (tv *tokenVisitor) inspect(n ast.Node) (descend bool) {
|
|||
switch n := n.(type) {
|
||||
case *ast.ArrayType:
|
||||
case *ast.AssignStmt:
|
||||
tv.token(n.TokPos, len(n.Tok.String()), semtok.TokOperator, nil)
|
||||
tv.token(n.TokPos, len(n.Tok.String()), semtok.TokOperator)
|
||||
case *ast.BasicLit:
|
||||
if strings.Contains(n.Value, "\n") {
|
||||
// has to be a string.
|
||||
|
@ -349,123 +334,119 @@ func (tv *tokenVisitor) inspect(n ast.Node) (descend bool) {
|
|||
if n.Kind == token.STRING {
|
||||
what = semtok.TokString
|
||||
}
|
||||
tv.token(n.Pos(), len(n.Value), what, nil)
|
||||
tv.token(n.Pos(), len(n.Value), what)
|
||||
case *ast.BinaryExpr:
|
||||
tv.token(n.OpPos, len(n.Op.String()), semtok.TokOperator, nil)
|
||||
tv.token(n.OpPos, len(n.Op.String()), semtok.TokOperator)
|
||||
case *ast.BlockStmt:
|
||||
case *ast.BranchStmt:
|
||||
tv.token(n.TokPos, len(n.Tok.String()), semtok.TokKeyword, nil)
|
||||
if n.Label != nil {
|
||||
tv.token(n.Label.Pos(), len(n.Label.Name), semtok.TokLabel, nil)
|
||||
}
|
||||
tv.token(n.TokPos, len(n.Tok.String()), semtok.TokKeyword)
|
||||
case *ast.CallExpr:
|
||||
if n.Ellipsis.IsValid() {
|
||||
tv.token(n.Ellipsis, len("..."), semtok.TokOperator, nil)
|
||||
tv.token(n.Ellipsis, len("..."), semtok.TokOperator)
|
||||
}
|
||||
case *ast.CaseClause:
|
||||
iam := "case"
|
||||
if n.List == nil {
|
||||
iam = "default"
|
||||
}
|
||||
tv.token(n.Case, len(iam), semtok.TokKeyword, nil)
|
||||
tv.token(n.Case, len(iam), semtok.TokKeyword)
|
||||
case *ast.ChanType:
|
||||
// chan | chan <- | <- chan
|
||||
switch {
|
||||
case n.Arrow == token.NoPos:
|
||||
tv.token(n.Begin, len("chan"), semtok.TokKeyword, nil)
|
||||
tv.token(n.Begin, len("chan"), semtok.TokKeyword)
|
||||
case n.Arrow == n.Begin:
|
||||
tv.token(n.Arrow, 2, semtok.TokOperator, nil)
|
||||
tv.token(n.Arrow, 2, semtok.TokOperator)
|
||||
pos := tv.findKeyword("chan", n.Begin+2, n.Value.Pos())
|
||||
tv.token(pos, len("chan"), semtok.TokKeyword, nil)
|
||||
tv.token(pos, len("chan"), semtok.TokKeyword)
|
||||
case n.Arrow != n.Begin:
|
||||
tv.token(n.Begin, len("chan"), semtok.TokKeyword, nil)
|
||||
tv.token(n.Arrow, 2, semtok.TokOperator, nil)
|
||||
tv.token(n.Begin, len("chan"), semtok.TokKeyword)
|
||||
tv.token(n.Arrow, 2, semtok.TokOperator)
|
||||
}
|
||||
case *ast.CommClause:
|
||||
length := len("case")
|
||||
if n.Comm == nil {
|
||||
length = len("default")
|
||||
}
|
||||
tv.token(n.Case, length, semtok.TokKeyword, nil)
|
||||
tv.token(n.Case, length, semtok.TokKeyword)
|
||||
case *ast.CompositeLit:
|
||||
case *ast.DeclStmt:
|
||||
case *ast.DeferStmt:
|
||||
tv.token(n.Defer, len("defer"), semtok.TokKeyword, nil)
|
||||
tv.token(n.Defer, len("defer"), semtok.TokKeyword)
|
||||
case *ast.Ellipsis:
|
||||
tv.token(n.Ellipsis, len("..."), semtok.TokOperator, nil)
|
||||
tv.token(n.Ellipsis, len("..."), semtok.TokOperator)
|
||||
case *ast.EmptyStmt:
|
||||
case *ast.ExprStmt:
|
||||
case *ast.Field:
|
||||
case *ast.FieldList:
|
||||
case *ast.ForStmt:
|
||||
tv.token(n.For, len("for"), semtok.TokKeyword, nil)
|
||||
tv.token(n.For, len("for"), semtok.TokKeyword)
|
||||
case *ast.FuncDecl:
|
||||
case *ast.FuncLit:
|
||||
case *ast.FuncType:
|
||||
if n.Func != token.NoPos {
|
||||
tv.token(n.Func, len("func"), semtok.TokKeyword, nil)
|
||||
tv.token(n.Func, len("func"), semtok.TokKeyword)
|
||||
}
|
||||
case *ast.GenDecl:
|
||||
tv.token(n.TokPos, len(n.Tok.String()), semtok.TokKeyword, nil)
|
||||
tv.token(n.TokPos, len(n.Tok.String()), semtok.TokKeyword)
|
||||
case *ast.GoStmt:
|
||||
tv.token(n.Go, len("go"), semtok.TokKeyword, nil)
|
||||
tv.token(n.Go, len("go"), semtok.TokKeyword)
|
||||
case *ast.Ident:
|
||||
tv.ident(n)
|
||||
case *ast.IfStmt:
|
||||
tv.token(n.If, len("if"), semtok.TokKeyword, nil)
|
||||
tv.token(n.If, len("if"), semtok.TokKeyword)
|
||||
if n.Else != nil {
|
||||
// x.Body.End() or x.Body.End()+1, not that it matters
|
||||
pos := tv.findKeyword("else", n.Body.End(), n.Else.Pos())
|
||||
tv.token(pos, len("else"), semtok.TokKeyword, nil)
|
||||
tv.token(pos, len("else"), semtok.TokKeyword)
|
||||
}
|
||||
case *ast.ImportSpec:
|
||||
tv.importSpec(n)
|
||||
return false
|
||||
case *ast.IncDecStmt:
|
||||
tv.token(n.TokPos, len(n.Tok.String()), semtok.TokOperator, nil)
|
||||
tv.token(n.TokPos, len(n.Tok.String()), semtok.TokOperator)
|
||||
case *ast.IndexExpr:
|
||||
case *ast.IndexListExpr:
|
||||
case *ast.InterfaceType:
|
||||
tv.token(n.Interface, len("interface"), semtok.TokKeyword, nil)
|
||||
tv.token(n.Interface, len("interface"), semtok.TokKeyword)
|
||||
case *ast.KeyValueExpr:
|
||||
case *ast.LabeledStmt:
|
||||
tv.token(n.Label.Pos(), len(n.Label.Name), semtok.TokLabel, []string{"definition"})
|
||||
case *ast.MapType:
|
||||
tv.token(n.Map, len("map"), semtok.TokKeyword, nil)
|
||||
tv.token(n.Map, len("map"), semtok.TokKeyword)
|
||||
case *ast.ParenExpr:
|
||||
case *ast.RangeStmt:
|
||||
tv.token(n.For, len("for"), semtok.TokKeyword, nil)
|
||||
tv.token(n.For, len("for"), semtok.TokKeyword)
|
||||
// x.TokPos == token.NoPos is legal (for range foo {})
|
||||
offset := n.TokPos
|
||||
if offset == token.NoPos {
|
||||
offset = n.For
|
||||
}
|
||||
pos := tv.findKeyword("range", offset, n.X.Pos())
|
||||
tv.token(pos, len("range"), semtok.TokKeyword, nil)
|
||||
tv.token(pos, len("range"), semtok.TokKeyword)
|
||||
case *ast.ReturnStmt:
|
||||
tv.token(n.Return, len("return"), semtok.TokKeyword, nil)
|
||||
tv.token(n.Return, len("return"), semtok.TokKeyword)
|
||||
case *ast.SelectStmt:
|
||||
tv.token(n.Select, len("select"), semtok.TokKeyword, nil)
|
||||
tv.token(n.Select, len("select"), semtok.TokKeyword)
|
||||
case *ast.SelectorExpr:
|
||||
case *ast.SendStmt:
|
||||
tv.token(n.Arrow, len("<-"), semtok.TokOperator, nil)
|
||||
tv.token(n.Arrow, len("<-"), semtok.TokOperator)
|
||||
case *ast.SliceExpr:
|
||||
case *ast.StarExpr:
|
||||
tv.token(n.Star, len("*"), semtok.TokOperator, nil)
|
||||
tv.token(n.Star, len("*"), semtok.TokOperator)
|
||||
case *ast.StructType:
|
||||
tv.token(n.Struct, len("struct"), semtok.TokKeyword, nil)
|
||||
tv.token(n.Struct, len("struct"), semtok.TokKeyword)
|
||||
case *ast.SwitchStmt:
|
||||
tv.token(n.Switch, len("switch"), semtok.TokKeyword, nil)
|
||||
tv.token(n.Switch, len("switch"), semtok.TokKeyword)
|
||||
case *ast.TypeAssertExpr:
|
||||
if n.Type == nil {
|
||||
pos := tv.findKeyword("type", n.Lparen, n.Rparen)
|
||||
tv.token(pos, len("type"), semtok.TokKeyword, nil)
|
||||
tv.token(pos, len("type"), semtok.TokKeyword)
|
||||
}
|
||||
case *ast.TypeSpec:
|
||||
case *ast.TypeSwitchStmt:
|
||||
tv.token(n.Switch, len("switch"), semtok.TokKeyword, nil)
|
||||
tv.token(n.Switch, len("switch"), semtok.TokKeyword)
|
||||
case *ast.UnaryExpr:
|
||||
tv.token(n.OpPos, len(n.Op.String()), semtok.TokOperator, nil)
|
||||
tv.token(n.OpPos, len(n.Op.String()), semtok.TokOperator)
|
||||
case *ast.ValueSpec:
|
||||
// things only seen with parsing or type errors, so ignore them
|
||||
case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
|
||||
|
@ -482,40 +463,94 @@ func (tv *tokenVisitor) inspect(n ast.Node) (descend bool) {
|
|||
return true
|
||||
}
|
||||
|
||||
func (tv *tokenVisitor) appendObjectModifiers(mods []semtok.Modifier, obj types.Object) (semtok.TokenType, []semtok.Modifier) {
|
||||
if obj.Pkg() == nil {
|
||||
mods = append(mods, semtok.ModDefaultLibrary)
|
||||
}
|
||||
|
||||
// Note: PkgName, Builtin, Label have type Invalid, which adds no modifiers.
|
||||
mods = appendTypeModifiers(mods, obj.Type())
|
||||
|
||||
switch obj := obj.(type) {
|
||||
case *types.PkgName:
|
||||
return semtok.TokNamespace, mods
|
||||
|
||||
case *types.Builtin:
|
||||
return semtok.TokFunction, mods
|
||||
|
||||
case *types.Func:
|
||||
if obj.Signature().Recv() != nil {
|
||||
return semtok.TokMethod, mods
|
||||
} else {
|
||||
return semtok.TokFunction, mods
|
||||
}
|
||||
|
||||
case *types.TypeName:
|
||||
if is[*types.TypeParam](types.Unalias(obj.Type())) {
|
||||
return semtok.TokTypeParam, mods
|
||||
}
|
||||
return semtok.TokType, mods
|
||||
|
||||
case *types.Const:
|
||||
mods = append(mods, semtok.ModReadonly)
|
||||
return semtok.TokVariable, mods
|
||||
|
||||
case *types.Var:
|
||||
if tv.isParam(obj.Pos()) {
|
||||
return semtok.TokParameter, mods
|
||||
} else {
|
||||
return semtok.TokVariable, mods
|
||||
}
|
||||
|
||||
case *types.Label:
|
||||
return semtok.TokLabel, mods
|
||||
|
||||
case *types.Nil:
|
||||
mods = append(mods, semtok.ModReadonly)
|
||||
return semtok.TokVariable, mods
|
||||
}
|
||||
|
||||
panic(obj)
|
||||
}
|
||||
|
||||
// appendTypeModifiers appends optional modifiers that describe the top-level
|
||||
// type constructor of obj.Type(): "pointer", "map", etc.
|
||||
func appendTypeModifiers(mods []string, obj types.Object) []string {
|
||||
switch t := obj.Type().Underlying().(type) {
|
||||
// type constructor of t: "pointer", "map", etc.
|
||||
func appendTypeModifiers(mods []semtok.Modifier, t types.Type) []semtok.Modifier {
|
||||
// For a type parameter, don't report "interface".
|
||||
if is[*types.TypeParam](types.Unalias(t)) {
|
||||
return mods
|
||||
}
|
||||
|
||||
switch t := t.Underlying().(type) {
|
||||
case *types.Interface:
|
||||
mods = append(mods, "interface")
|
||||
mods = append(mods, semtok.ModInterface)
|
||||
case *types.Struct:
|
||||
mods = append(mods, "struct")
|
||||
mods = append(mods, semtok.ModStruct)
|
||||
case *types.Signature:
|
||||
mods = append(mods, "signature")
|
||||
mods = append(mods, semtok.ModSignature)
|
||||
case *types.Pointer:
|
||||
mods = append(mods, "pointer")
|
||||
mods = append(mods, semtok.ModPointer)
|
||||
case *types.Array:
|
||||
mods = append(mods, "array")
|
||||
mods = append(mods, semtok.ModArray)
|
||||
case *types.Map:
|
||||
mods = append(mods, "map")
|
||||
mods = append(mods, semtok.ModMap)
|
||||
case *types.Slice:
|
||||
mods = append(mods, "slice")
|
||||
mods = append(mods, semtok.ModSlice)
|
||||
case *types.Chan:
|
||||
mods = append(mods, "chan")
|
||||
mods = append(mods, semtok.ModChan)
|
||||
case *types.Basic:
|
||||
mods = append(mods, "defaultLibrary")
|
||||
switch t.Kind() {
|
||||
case types.Invalid:
|
||||
mods = append(mods, "invalid")
|
||||
// ignore (e.g. Builtin, PkgName, Label)
|
||||
case types.String:
|
||||
mods = append(mods, "string")
|
||||
mods = append(mods, semtok.ModString)
|
||||
case types.Bool:
|
||||
mods = append(mods, "bool")
|
||||
mods = append(mods, semtok.ModBool)
|
||||
case types.UnsafePointer:
|
||||
mods = append(mods, "pointer")
|
||||
mods = append(mods, semtok.ModPointer)
|
||||
default:
|
||||
if t.Info()&types.IsNumeric != 0 {
|
||||
mods = append(mods, "number")
|
||||
mods = append(mods, semtok.ModNumber)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -523,76 +558,38 @@ func appendTypeModifiers(mods []string, obj types.Object) []string {
|
|||
}
|
||||
|
||||
func (tv *tokenVisitor) ident(id *ast.Ident) {
|
||||
var obj types.Object
|
||||
var (
|
||||
tok semtok.TokenType
|
||||
mods []semtok.Modifier
|
||||
obj types.Object
|
||||
ok bool
|
||||
)
|
||||
if obj, ok = tv.info.Defs[id]; obj != nil {
|
||||
// definition
|
||||
mods = append(mods, semtok.ModDefinition)
|
||||
tok, mods = tv.appendObjectModifiers(mods, obj)
|
||||
|
||||
// emit emits a token for the identifier's extent.
|
||||
emit := func(tok semtok.TokenType, modifiers ...string) {
|
||||
tv.token(id.Pos(), len(id.Name), tok, modifiers)
|
||||
if semDebug {
|
||||
q := "nil"
|
||||
if obj != nil {
|
||||
q = fmt.Sprintf("%T", obj.Type()) // e.g. "*types.Map"
|
||||
}
|
||||
log.Printf(" use %s/%T/%s got %s %v (%s)",
|
||||
id.Name, obj, q, tok, modifiers, tv.strStack())
|
||||
}
|
||||
}
|
||||
} else if obj, ok = tv.info.Uses[id]; ok {
|
||||
// use
|
||||
tok, mods = tv.appendObjectModifiers(mods, obj)
|
||||
|
||||
// definition?
|
||||
obj = tv.info.Defs[id]
|
||||
if obj != nil {
|
||||
if tok, modifiers := tv.definitionFor(id, obj); tok != "" {
|
||||
emit(tok, modifiers...)
|
||||
} else if semDebug {
|
||||
log.Printf(" for %s/%T/%T got '' %v (%s)",
|
||||
id.Name, obj, obj.Type(), modifiers, tv.strStack())
|
||||
}
|
||||
} else if tok, mods = tv.unkIdent(id); tok != "" {
|
||||
// ok
|
||||
|
||||
} else {
|
||||
return
|
||||
}
|
||||
|
||||
// use?
|
||||
obj = tv.info.Uses[id]
|
||||
switch obj := obj.(type) {
|
||||
case *types.Builtin:
|
||||
emit(semtok.TokFunction, "defaultLibrary")
|
||||
case *types.Const:
|
||||
if is[*types.Basic](obj.Type()) &&
|
||||
(id.Name == "iota" || id.Name == "true" || id.Name == "false") {
|
||||
emit(semtok.TokVariable, "readonly", "defaultLibrary")
|
||||
} else {
|
||||
emit(semtok.TokVariable, "readonly")
|
||||
// Emit a token for the identifier's extent.
|
||||
tv.token(id.Pos(), len(id.Name), tok, mods...)
|
||||
|
||||
if semDebug {
|
||||
q := "nil"
|
||||
if obj != nil {
|
||||
q = fmt.Sprintf("%T", obj.Type()) // e.g. "*types.Map"
|
||||
}
|
||||
case *types.Func:
|
||||
emit(semtok.TokFunction)
|
||||
case *types.Label:
|
||||
// Labels are reliably covered by the syntax traversal.
|
||||
case *types.Nil:
|
||||
// nil is a predeclared identifier
|
||||
emit(semtok.TokVariable, "readonly", "defaultLibrary")
|
||||
case *types.PkgName:
|
||||
emit(semtok.TokNamespace)
|
||||
case *types.TypeName: // could be a TypeParam
|
||||
if is[*types.TypeParam](types.Unalias(obj.Type())) {
|
||||
emit(semtok.TokTypeParam)
|
||||
} else {
|
||||
emit(semtok.TokType, appendTypeModifiers(nil, obj)...)
|
||||
}
|
||||
case *types.Var:
|
||||
if is[*types.Signature](types.Unalias(obj.Type())) {
|
||||
emit(semtok.TokFunction)
|
||||
} else if tv.isParam(obj.Pos()) {
|
||||
// variable, unless use.pos is the pos of a Field in an ancestor FuncDecl
|
||||
// or FuncLit and then it's a parameter
|
||||
emit(semtok.TokParameter)
|
||||
} else {
|
||||
emit(semtok.TokVariable)
|
||||
}
|
||||
case nil:
|
||||
if tok, modifiers := tv.unkIdent(id); tok != "" {
|
||||
emit(tok, modifiers...)
|
||||
}
|
||||
default:
|
||||
panic(obj)
|
||||
log.Printf(" use %s/%T/%s got %s %v (%s)",
|
||||
id.Name, obj, q, tok, mods, tv.strStack())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -626,8 +623,8 @@ func (tv *tokenVisitor) isParam(pos token.Pos) bool {
|
|||
// def), use the parse stack.
|
||||
// A lot of these only happen when the package doesn't compile,
|
||||
// but in that case it is all best-effort from the parse tree.
|
||||
func (tv *tokenVisitor) unkIdent(id *ast.Ident) (semtok.TokenType, []string) {
|
||||
def := []string{"definition"}
|
||||
func (tv *tokenVisitor) unkIdent(id *ast.Ident) (semtok.TokenType, []semtok.Modifier) {
|
||||
def := []semtok.Modifier{semtok.ModDefinition}
|
||||
n := len(tv.stack) - 2 // parent of Ident; stack is [File ... Ident]
|
||||
if n < 0 {
|
||||
tv.errorf("no stack") // can't happen
|
||||
|
@ -748,115 +745,6 @@ func (tv *tokenVisitor) unkIdent(id *ast.Ident) (semtok.TokenType, []string) {
|
|||
return "", nil
|
||||
}
|
||||
|
||||
func isDeprecated(n *ast.CommentGroup) bool {
|
||||
if n != nil {
|
||||
for _, c := range n.List {
|
||||
if strings.HasPrefix(c.Text, "// Deprecated") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// definitionFor handles a defining identifier.
|
||||
func (tv *tokenVisitor) definitionFor(id *ast.Ident, obj types.Object) (semtok.TokenType, []string) {
|
||||
// The definition of a types.Label cannot be found by
|
||||
// ascending the syntax tree, and doing so will reach the
|
||||
// FuncDecl, causing us to misinterpret the label as a
|
||||
// parameter (#65494).
|
||||
//
|
||||
// However, labels are reliably covered by the syntax
|
||||
// traversal, so we don't need to use type information.
|
||||
if is[*types.Label](obj) {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
// PJW: look into replacing these syntactic tests with types more generally
|
||||
modifiers := []string{"definition"}
|
||||
for i := len(tv.stack) - 1; i >= 0; i-- {
|
||||
switch ancestor := tv.stack[i].(type) {
|
||||
case *ast.AssignStmt, *ast.RangeStmt:
|
||||
if id.Name == "_" {
|
||||
return "", nil // not really a variable
|
||||
}
|
||||
return semtok.TokVariable, modifiers
|
||||
case *ast.GenDecl:
|
||||
if isDeprecated(ancestor.Doc) {
|
||||
modifiers = append(modifiers, "deprecated")
|
||||
}
|
||||
if ancestor.Tok == token.CONST {
|
||||
modifiers = append(modifiers, "readonly")
|
||||
}
|
||||
return semtok.TokVariable, modifiers
|
||||
case *ast.FuncDecl:
|
||||
// If x is immediately under a FuncDecl, it is a function or method
|
||||
if i == len(tv.stack)-2 {
|
||||
if isDeprecated(ancestor.Doc) {
|
||||
modifiers = append(modifiers, "deprecated")
|
||||
}
|
||||
if ancestor.Recv != nil {
|
||||
return semtok.TokMethod, modifiers
|
||||
}
|
||||
return semtok.TokFunction, modifiers
|
||||
}
|
||||
// if x < ... < FieldList < FuncDecl, this is the receiver, a variable
|
||||
// PJW: maybe not. it might be a typeparameter in the type of the receiver
|
||||
if is[*ast.FieldList](tv.stack[i+1]) {
|
||||
if is[*types.TypeName](obj) {
|
||||
return semtok.TokTypeParam, modifiers
|
||||
}
|
||||
return semtok.TokVariable, nil
|
||||
}
|
||||
// if x < ... < FieldList < FuncType < FuncDecl, this is a param
|
||||
return semtok.TokParameter, modifiers
|
||||
case *ast.FuncType:
|
||||
if isTypeParam(id, ancestor) {
|
||||
return semtok.TokTypeParam, modifiers
|
||||
}
|
||||
return semtok.TokParameter, modifiers
|
||||
case *ast.InterfaceType:
|
||||
return semtok.TokMethod, modifiers
|
||||
case *ast.TypeSpec:
|
||||
// GenDecl/Typespec/FuncType/FieldList/Field/Ident
|
||||
// (type A func(b uint64)) (err error)
|
||||
// b and err should not be semtok.TokType, but semtok.TokVariable
|
||||
// and in GenDecl/TpeSpec/StructType/FieldList/Field/Ident
|
||||
// (type A struct{b uint64}
|
||||
// but on type B struct{C}), C is a type, but is not being defined.
|
||||
// GenDecl/TypeSpec/FieldList/Field/Ident is a typeParam
|
||||
if is[*ast.FieldList](tv.stack[i+1]) {
|
||||
return semtok.TokTypeParam, modifiers
|
||||
}
|
||||
fldm := tv.stack[len(tv.stack)-2]
|
||||
if fld, ok := fldm.(*ast.Field); ok {
|
||||
// if len(fld.names) == 0 this is a semtok.TokType, being used
|
||||
if len(fld.Names) == 0 {
|
||||
return semtok.TokType, appendTypeModifiers(nil, obj)
|
||||
}
|
||||
return semtok.TokVariable, modifiers
|
||||
}
|
||||
return semtok.TokType, appendTypeModifiers(modifiers, obj)
|
||||
}
|
||||
}
|
||||
// can't happen
|
||||
tv.errorf("failed to find the decl for %s", safetoken.Position(tv.pgf.Tok, id.Pos()))
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func isTypeParam(id *ast.Ident, t *ast.FuncType) bool {
|
||||
if tp := t.TypeParams; tp != nil {
|
||||
for _, p := range tp.List {
|
||||
for _, n := range p.Names {
|
||||
if id == n {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// multiline emits a multiline token (`string` or /*comment*/).
|
||||
func (tv *tokenVisitor) multiline(start, end token.Pos, tok semtok.TokenType) {
|
||||
// TODO(adonovan): test with non-ASCII.
|
||||
|
@ -875,13 +763,13 @@ func (tv *tokenVisitor) multiline(start, end token.Pos, tok semtok.TokenType) {
|
|||
sline := spos.Line
|
||||
eline := epos.Line
|
||||
// first line is from spos.Column to end
|
||||
tv.token(start, length(sline)-spos.Column, tok, nil) // leng(sline)-1 - (spos.Column-1)
|
||||
tv.token(start, length(sline)-spos.Column, tok) // leng(sline)-1 - (spos.Column-1)
|
||||
for i := sline + 1; i < eline; i++ {
|
||||
// intermediate lines are from 1 to end
|
||||
tv.token(f.LineStart(i), length(i)-1, tok, nil) // avoid the newline
|
||||
tv.token(f.LineStart(i), length(i)-1, tok) // avoid the newline
|
||||
}
|
||||
// last line is from 1 to epos.Column
|
||||
tv.token(f.LineStart(eline), epos.Column-1, tok, nil) // columns are 1-based
|
||||
tv.token(f.LineStart(eline), epos.Column-1, tok) // columns are 1-based
|
||||
}
|
||||
|
||||
// findKeyword returns the position of a keyword by searching within
|
||||
|
@ -907,7 +795,7 @@ func (tv *tokenVisitor) importSpec(spec *ast.ImportSpec) {
|
|||
if spec.Name != nil {
|
||||
name := spec.Name.String()
|
||||
if name != "_" && name != "." {
|
||||
tv.token(spec.Name.Pos(), len(name), semtok.TokNamespace, nil)
|
||||
tv.token(spec.Name.Pos(), len(name), semtok.TokNamespace)
|
||||
}
|
||||
return // don't mark anything for . or _
|
||||
}
|
||||
|
@ -933,7 +821,7 @@ func (tv *tokenVisitor) importSpec(spec *ast.ImportSpec) {
|
|||
}
|
||||
// Report virtual declaration at the position of the substring.
|
||||
start := spec.Path.Pos() + token.Pos(j)
|
||||
tv.token(start, len(depMD.Name), semtok.TokNamespace, nil)
|
||||
tv.token(start, len(depMD.Name), semtok.TokNamespace)
|
||||
}
|
||||
|
||||
// errorf logs an error and reports a bug.
|
||||
|
@ -968,19 +856,19 @@ func (tv *tokenVisitor) godirective(c *ast.Comment) {
|
|||
kind, _ := stringsCutPrefix(directive, "//go:")
|
||||
if _, ok := godirectives[kind]; !ok {
|
||||
// Unknown 'go:' directive.
|
||||
tv.token(c.Pos(), len(c.Text), semtok.TokComment, nil)
|
||||
tv.token(c.Pos(), len(c.Text), semtok.TokComment)
|
||||
return
|
||||
}
|
||||
|
||||
// Make the 'go:directive' part stand out, the rest is comments.
|
||||
tv.token(c.Pos(), len("//"), semtok.TokComment, nil)
|
||||
tv.token(c.Pos(), len("//"), semtok.TokComment)
|
||||
|
||||
directiveStart := c.Pos() + token.Pos(len("//"))
|
||||
tv.token(directiveStart, len(directive[len("//"):]), semtok.TokNamespace, nil)
|
||||
tv.token(directiveStart, len(directive[len("//"):]), semtok.TokNamespace)
|
||||
|
||||
if len(args) > 0 {
|
||||
tailStart := c.Pos() + token.Pos(len(directive)+len(" "))
|
||||
tv.token(tailStart, len(args), semtok.TokComment, nil)
|
||||
tv.token(tailStart, len(args), semtok.TokComment)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -12,33 +12,79 @@ type Token struct {
|
|||
Line, Start uint32
|
||||
Len uint32
|
||||
Type TokenType
|
||||
Modifiers []string
|
||||
Modifiers []Modifier
|
||||
}
|
||||
|
||||
type TokenType string
|
||||
|
||||
const (
|
||||
// These are the tokens defined by LSP 3.17, but a client is
|
||||
// These are the tokens defined by LSP 3.18, but a client is
|
||||
// free to send its own set; any tokens that the server emits
|
||||
// that are not in this set are simply not encoded in the bitfield.
|
||||
TokNamespace TokenType = "namespace"
|
||||
TokType TokenType = "type"
|
||||
TokInterface TokenType = "interface"
|
||||
TokTypeParam TokenType = "typeParameter"
|
||||
TokParameter TokenType = "parameter"
|
||||
TokVariable TokenType = "variable"
|
||||
TokMethod TokenType = "method"
|
||||
TokFunction TokenType = "function"
|
||||
TokKeyword TokenType = "keyword"
|
||||
TokComment TokenType = "comment"
|
||||
TokString TokenType = "string"
|
||||
TokNumber TokenType = "number"
|
||||
TokOperator TokenType = "operator"
|
||||
TokMacro TokenType = "macro" // for templates
|
||||
//
|
||||
// If you add or uncomment a token type, document it in
|
||||
// gopls/doc/features/passive.md#semantic-tokens.
|
||||
TokComment TokenType = "comment" // for a comment
|
||||
TokFunction TokenType = "function" // for a function
|
||||
TokKeyword TokenType = "keyword" // for a keyword
|
||||
TokLabel TokenType = "label" // for a control label (LSP 3.18)
|
||||
TokMacro TokenType = "macro" // for text/template tokens
|
||||
TokMethod TokenType = "method" // for a method
|
||||
TokNamespace TokenType = "namespace" // for an imported package name
|
||||
TokNumber TokenType = "number" // for a numeric literal
|
||||
TokOperator TokenType = "operator" // for an operator
|
||||
TokParameter TokenType = "parameter" // for a parameter variable
|
||||
TokString TokenType = "string" // for a string literal
|
||||
TokType TokenType = "type" // for a type name (plus other uses)
|
||||
TokTypeParam TokenType = "typeParameter" // for a type parameter
|
||||
TokVariable TokenType = "variable" // for a var or const
|
||||
// TokClass TokenType = "class"
|
||||
// TokDecorator TokenType = "decorator"
|
||||
// TokEnum TokenType = "enum"
|
||||
// TokEnumMember TokenType = "enumMember"
|
||||
// TokEvent TokenType = "event"
|
||||
// TokInterface TokenType = "interface"
|
||||
// TokModifier TokenType = "modifier"
|
||||
// TokProperty TokenType = "property"
|
||||
// TokRegexp TokenType = "regexp"
|
||||
// TokStruct TokenType = "struct"
|
||||
)
|
||||
|
||||
// not part of LSP 3.17 (even though JS has labels)
|
||||
// https://github.com/microsoft/vscode-languageserver-node/issues/1422
|
||||
TokLabel TokenType = "label"
|
||||
type Modifier string
|
||||
|
||||
const (
|
||||
// LSP 3.18 standard modifiers
|
||||
// As with TokenTypes, clients get only the modifiers they request.
|
||||
//
|
||||
// If you add or uncomment a modifier, document it in
|
||||
// gopls/doc/features/passive.md#semantic-tokens.
|
||||
ModDefaultLibrary Modifier = "defaultLibrary" // for predeclared symbols
|
||||
ModDefinition Modifier = "definition" // for the declaring identifier of a symbol
|
||||
ModReadonly Modifier = "readonly" // for constants (TokVariable)
|
||||
// ModAbstract Modifier = "abstract"
|
||||
// ModAsync Modifier = "async"
|
||||
// ModDeclaration Modifier = "declaration"
|
||||
// ModDeprecated Modifier = "deprecated"
|
||||
// ModDocumentation Modifier = "documentation"
|
||||
// ModModification Modifier = "modification"
|
||||
// ModStatic Modifier = "static"
|
||||
|
||||
// non-standard modifiers
|
||||
//
|
||||
// Since the type of a symbol is orthogonal to its kind,
|
||||
// (e.g. a variable can have function type),
|
||||
// we use modifiers for the top-level type constructor.
|
||||
ModArray Modifier = "array"
|
||||
ModBool Modifier = "bool"
|
||||
ModChan Modifier = "chan"
|
||||
ModInterface Modifier = "interface"
|
||||
ModMap Modifier = "map"
|
||||
ModNumber Modifier = "number"
|
||||
ModPointer Modifier = "pointer"
|
||||
ModSignature Modifier = "signature" // for function types
|
||||
ModSlice Modifier = "slice"
|
||||
ModString Modifier = "string"
|
||||
ModStruct Modifier = "struct"
|
||||
)
|
||||
|
||||
// Encode returns the LSP encoding of a sequence of tokens.
|
||||
|
@ -62,9 +108,9 @@ func Encode(
|
|||
typeMap[TokenType(t)] = i
|
||||
}
|
||||
|
||||
modMap := make(map[string]int)
|
||||
modMap := make(map[Modifier]int)
|
||||
for i, m := range modifiers {
|
||||
modMap[m] = 1 << uint(i) // go 1.12 compatibility
|
||||
modMap[Modifier(m)] = 1 << i
|
||||
}
|
||||
|
||||
// each semantic token needs five values
|
||||
|
|
|
@ -53,23 +53,23 @@ func TestSemantic_2527(t *testing.T) {
|
|||
want := []fake.SemanticToken{
|
||||
{Token: "package", TokenType: "keyword"},
|
||||
{Token: "foo", TokenType: "namespace"},
|
||||
{Token: "// Deprecated (for testing)", TokenType: "comment"},
|
||||
{Token: "// comment", TokenType: "comment"},
|
||||
{Token: "func", TokenType: "keyword"},
|
||||
{Token: "Add", TokenType: "function", Mod: "definition deprecated"},
|
||||
{Token: "Add", TokenType: "function", Mod: "definition signature"},
|
||||
{Token: "T", TokenType: "typeParameter", Mod: "definition"},
|
||||
{Token: "int", TokenType: "type", Mod: "defaultLibrary number"},
|
||||
{Token: "target", TokenType: "parameter", Mod: "definition"},
|
||||
{Token: "T", TokenType: "typeParameter"},
|
||||
{Token: "l", TokenType: "parameter", Mod: "definition"},
|
||||
{Token: "l", TokenType: "parameter", Mod: "definition slice"},
|
||||
{Token: "T", TokenType: "typeParameter"},
|
||||
{Token: "T", TokenType: "typeParameter"},
|
||||
{Token: "return", TokenType: "keyword"},
|
||||
{Token: "append", TokenType: "function", Mod: "defaultLibrary"},
|
||||
{Token: "l", TokenType: "parameter"},
|
||||
{Token: "l", TokenType: "parameter", Mod: "slice"},
|
||||
{Token: "target", TokenType: "parameter"},
|
||||
{Token: "for", TokenType: "keyword"},
|
||||
{Token: "range", TokenType: "keyword"},
|
||||
{Token: "l", TokenType: "parameter"},
|
||||
{Token: "l", TokenType: "parameter", Mod: "slice"},
|
||||
{Token: "// test coverage", TokenType: "comment"},
|
||||
{Token: "return", TokenType: "keyword"},
|
||||
{Token: "nil", TokenType: "variable", Mod: "readonly defaultLibrary"},
|
||||
|
@ -81,7 +81,7 @@ module example.com
|
|||
go 1.19
|
||||
-- main.go --
|
||||
package foo
|
||||
// Deprecated (for testing)
|
||||
// comment
|
||||
func Add[T int](target T, l []T) []T {
|
||||
return append(l, target)
|
||||
for range l {} // test coverage
|
||||
|
@ -167,18 +167,18 @@ func bar() {}
|
|||
{Token: "go:linkname", TokenType: "namespace"},
|
||||
{Token: "now time.Now", TokenType: "comment"},
|
||||
{Token: "func", TokenType: "keyword"},
|
||||
{Token: "now", TokenType: "function", Mod: "definition"},
|
||||
{Token: "now", TokenType: "function", Mod: "definition signature"},
|
||||
|
||||
{Token: "//", TokenType: "comment"},
|
||||
{Token: "go:noinline", TokenType: "namespace"},
|
||||
{Token: "func", TokenType: "keyword"},
|
||||
{Token: "foo", TokenType: "function", Mod: "definition"},
|
||||
{Token: "foo", TokenType: "function", Mod: "definition signature"},
|
||||
|
||||
{Token: "// Mentioning go:noinline should not tokenize.", TokenType: "comment"},
|
||||
|
||||
{Token: "//go:notadirective", TokenType: "comment"},
|
||||
{Token: "func", TokenType: "keyword"},
|
||||
{Token: "bar", TokenType: "function", Mod: "definition"},
|
||||
{Token: "bar", TokenType: "function", Mod: "definition signature"},
|
||||
}
|
||||
|
||||
WithOptions(
|
||||
|
|
|
@ -17,5 +17,5 @@ func _() {
|
|||
}
|
||||
|
||||
const (
|
||||
c = iota //@ token("iota", "variable", "readonly defaultLibrary")
|
||||
c = iota //@ token("iota", "variable", "readonly defaultLibrary number")
|
||||
)
|
||||
|
|
|
@ -21,21 +21,21 @@ var B = 2
|
|||
type Foo int
|
||||
|
||||
|
||||
// [F] accept a [Foo], and print it. //@token("F", "function", ""),token("Foo", "type", "defaultLibrary number")
|
||||
// [F] accept a [Foo], and print it. //@token("F", "function", "signature"),token("Foo", "type", "number")
|
||||
func F(v Foo) {
|
||||
println(v)
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
[F1] print [A] and [B] //@token("F1", "function", ""),token("A", "variable", ""),token("B", "variable", "")
|
||||
[F1] print [A] and [B] //@token("F1", "function", "signature"),token("A", "variable", "readonly number"),token("B", "variable", "number")
|
||||
*/
|
||||
func F1() {
|
||||
// print [A] and [B]. //@token("A", "variable", ""),token("B", "variable", "")
|
||||
// print [A] and [B]. //@token("A", "variable", "readonly number"),token("B", "variable", "number")
|
||||
println(A, B)
|
||||
}
|
||||
|
||||
// [F2] use [strconv.Atoi] convert s, then print it //@token("F2", "function", ""),token("strconv", "namespace", ""),token("Atoi", "function", "")
|
||||
// [F2] use [strconv.Atoi] convert s, then print it //@token("F2", "function", "signature"),token("strconv", "namespace", ""),token("Atoi", "function", "signature")
|
||||
func F2(s string) {
|
||||
a, _ := strconv.Atoi("42")
|
||||
b, _ := strconv.Atoi("42")
|
||||
|
@ -44,12 +44,12 @@ func F2(s string) {
|
|||
-- b.go --
|
||||
package p
|
||||
|
||||
// [F3] accept [*Foo] //@token("F3", "function", ""),token("Foo", "type", "defaultLibrary number")
|
||||
// [F3] accept [*Foo] //@token("F3", "function", "signature"),token("Foo", "type", "number")
|
||||
func F3(v *Foo) {
|
||||
println(*v)
|
||||
}
|
||||
|
||||
// [F4] equal [strconv.Atoi] //@token("F4", "function", ""),token("strconv", "namespace", ""),token("Atoi", "function", "")
|
||||
// [F4] equal [strconv.Atoi] //@token("F4", "function", "signature"),token("strconv", "namespace", ""),token("Atoi", "function", "signature")
|
||||
func F4(s string) (int, error) {
|
||||
return 0, nil
|
||||
}
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
This is a regression test for #66809 (missing modifiers for
|
||||
declarations of function-type variables).
|
||||
|
||||
-- settings.json --
|
||||
{
|
||||
"semanticTokens": true
|
||||
}
|
||||
|
||||
-- main.go --
|
||||
package main
|
||||
|
||||
func main() {
|
||||
foo := func(x string) string { return x } //@token("foo", "variable", "definition signature")
|
||||
_ = foo //@token("foo", "variable", "signature")
|
||||
foo("hello") //@token("foo", "variable", "signature")
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
This is a regression test for #70251 (missing modifiers for
|
||||
predeclared interfaces).
|
||||
|
||||
-- settings.json --
|
||||
{
|
||||
"semanticTokens": true
|
||||
}
|
||||
|
||||
-- a/a.go --
|
||||
package a
|
||||
|
||||
var _ any //@token("any", "type", "defaultLibrary interface")
|
||||
var _ error //@token("error", "type", "defaultLibrary interface")
|
|
@ -10,12 +10,12 @@ TODO: add more assertions.
|
|||
-- a.go --
|
||||
package p //@token("package", "keyword", "")
|
||||
|
||||
const C = 42 //@token("C", "variable", "definition readonly")
|
||||
const C = 42 //@token("C", "variable", "definition readonly number")
|
||||
|
||||
func F() { //@token("F", "function", "definition")
|
||||
x := 2 + 3//@token("x", "variable", "definition"),token("2", "number", ""),token("+", "operator", "")
|
||||
_ = x //@token("x", "variable", "")
|
||||
_ = F //@token("F", "function", "")
|
||||
func F() { //@token("F", "function", "definition signature")
|
||||
x := 2 + 3//@token("x", "variable", "definition number"),token("2", "number", ""),token("+", "operator", "")
|
||||
_ = x //@token("x", "variable", "number")
|
||||
_ = F //@token("F", "function", "signature")
|
||||
}
|
||||
|
||||
func _() {
|
||||
|
|
Загрузка…
Ссылка в новой задаче