internal/lsp: refactor source package to use an interface

This change separates a cache package out of the
golang.org/x/tools/internal/lsp/source package. The source package now
uses an interface instead a File struct, which will allow it be reused
more easily. The cache package contains the View and File structs now.

Change-Id: Ia2114e9dafc5214c8b21bceba3adae1c36b9799d
Reviewed-on: https://go-review.googlesource.com/c/152798
Reviewed-by: Ian Cottrell <iancottrell@google.com>
This commit is contained in:
Rebecca Stambler 2018-12-05 17:00:36 -05:00
Родитель 62e1d13d53
Коммит 3576414c54
14 изменённых файлов: 351 добавлений и 309 удалений

126
internal/lsp/cache/file.go поставляемый Normal file
Просмотреть файл

@ -0,0 +1,126 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cache
import (
"fmt"
"go/ast"
"go/token"
"io/ioutil"
"golang.org/x/tools/go/packages"
"golang.org/x/tools/internal/lsp/source"
)
// File holds all the information we know about a file.
type File struct {
URI source.URI
view *View
active bool
content []byte
ast *ast.File
token *token.File
pkg *packages.Package
}
// SetContent sets the overlay contents for a file.
// Setting it to nil will revert it to the on disk contents, and remove it
// from the active set.
func (f *File) SetContent(content []byte) {
f.view.mu.Lock()
defer f.view.mu.Unlock()
f.content = content
// the ast and token fields are invalid
f.ast = nil
f.token = nil
f.pkg = nil
// and we might need to update the overlay
switch {
case f.active && content == nil:
// we were active, and want to forget the content
f.active = false
if filename, err := f.URI.Filename(); err == nil {
delete(f.view.Config.Overlay, filename)
}
f.content = nil
case content != nil:
// an active overlay, update the map
f.active = true
if filename, err := f.URI.Filename(); err == nil {
f.view.Config.Overlay[filename] = f.content
}
}
}
// Read returns the contents of the file, reading it from file system if needed.
func (f *File) Read() ([]byte, error) {
f.view.mu.Lock()
defer f.view.mu.Unlock()
return f.read()
}
func (f *File) GetFileSet() (*token.FileSet, error) {
if f.view.Config == nil {
return nil, fmt.Errorf("no config for file view")
}
if f.view.Config.Fset == nil {
return nil, fmt.Errorf("no fileset for file view config")
}
return f.view.Config.Fset, nil
}
func (f *File) GetToken() (*token.File, error) {
f.view.mu.Lock()
defer f.view.mu.Unlock()
if f.token == nil {
if err := f.view.parse(f.URI); err != nil {
return nil, err
}
if f.token == nil {
return nil, fmt.Errorf("failed to find or parse %v", f.URI)
}
}
return f.token, nil
}
func (f *File) GetAST() (*ast.File, error) {
f.view.mu.Lock()
defer f.view.mu.Unlock()
if f.ast == nil {
if err := f.view.parse(f.URI); err != nil {
return nil, err
}
}
return f.ast, nil
}
func (f *File) GetPackage() (*packages.Package, error) {
f.view.mu.Lock()
defer f.view.mu.Unlock()
if f.pkg == nil {
if err := f.view.parse(f.URI); err != nil {
return nil, err
}
}
return f.pkg, nil
}
// read is the internal part of Read that presumes the lock is already held
func (f *File) read() ([]byte, error) {
if f.content != nil {
return f.content, nil
}
// we don't know the content yet, so read it
filename, err := f.URI.Filename()
if err != nil {
return nil, err
}
content, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
f.content = content
return f.content, nil
}

Просмотреть файл

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package source package cache
import ( import (
"fmt" "fmt"
@ -10,6 +10,7 @@ import (
"sync" "sync"
"golang.org/x/tools/go/packages" "golang.org/x/tools/go/packages"
"golang.org/x/tools/internal/lsp/source"
) )
type View struct { type View struct {
@ -17,7 +18,7 @@ type View struct {
Config *packages.Config Config *packages.Config
files map[URI]*File files map[source.URI]*File
} }
func NewView() *View { func NewView() *View {
@ -28,13 +29,13 @@ func NewView() *View {
Tests: true, Tests: true,
Overlay: make(map[string][]byte), Overlay: make(map[string][]byte),
}, },
files: make(map[URI]*File), files: make(map[source.URI]*File),
} }
} }
// GetFile returns a File for the given uri. // GetFile returns a File for the given uri.
// It will always succeed, adding the file to the managed set if needed. // It will always succeed, adding the file to the managed set if needed.
func (v *View) GetFile(uri URI) *File { func (v *View) GetFile(uri source.URI) *File {
v.mu.Lock() v.mu.Lock()
f := v.getFile(uri) f := v.getFile(uri)
v.mu.Unlock() v.mu.Unlock()
@ -42,7 +43,7 @@ func (v *View) GetFile(uri URI) *File {
} }
// getFile is the unlocked internal implementation of GetFile. // getFile is the unlocked internal implementation of GetFile.
func (v *View) getFile(uri URI) *File { func (v *View) getFile(uri source.URI) *File {
f, found := v.files[uri] f, found := v.files[uri]
if !found { if !found {
f = &File{ f = &File{
@ -54,7 +55,7 @@ func (v *View) getFile(uri URI) *File {
return f return f
} }
func (v *View) parse(uri URI) error { func (v *View) parse(uri source.URI) error {
path, err := uri.Filename() path, err := uri.Filename()
if err != nil { if err != nil {
return err return err
@ -71,7 +72,7 @@ func (v *View) parse(uri URI) error {
for _, fAST := range pkg.Syntax { for _, fAST := range pkg.Syntax {
// if a file was in multiple packages, which token/ast/pkg do we store // if a file was in multiple packages, which token/ast/pkg do we store
fToken := v.Config.Fset.File(fAST.Pos()) fToken := v.Config.Fset.File(fAST.Pos())
fURI := ToURI(fToken.Name()) fURI := source.ToURI(fToken.Name())
f := v.getFile(fURI) f := v.getFile(fURI)
f.token = fToken f.token = fToken
f.ast = fAST f.ast = fAST

Просмотреть файл

@ -5,40 +5,57 @@
package lsp package lsp
import ( import (
"context"
"sort" "sort"
"golang.org/x/tools/internal/lsp/cache"
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
) )
func toProtocolDiagnostics(v *source.View, diagnostics []source.Diagnostic) []protocol.Diagnostic { func (s *server) CacheAndDiagnose(ctx context.Context, uri protocol.DocumentURI, text string) {
f := s.view.GetFile(source.URI(uri))
f.SetContent([]byte(text))
go func() {
reports, err := source.Diagnostics(ctx, f)
if err != nil {
return // handle error?
}
for filename, diagnostics := range reports {
s.client.PublishDiagnostics(ctx, &protocol.PublishDiagnosticsParams{
URI: protocol.DocumentURI(source.ToURI(filename)),
Diagnostics: toProtocolDiagnostics(s.view, diagnostics),
})
}
}()
}
func toProtocolDiagnostics(v *cache.View, diagnostics []source.Diagnostic) []protocol.Diagnostic {
reports := []protocol.Diagnostic{} reports := []protocol.Diagnostic{}
for _, diag := range diagnostics { for _, diag := range diagnostics {
tok := v.Config.Fset.File(diag.Range.Start) f := v.GetFile(source.ToURI(diag.Filename))
tok, err := f.GetToken()
if err != nil {
continue // handle error?
}
pos := fromTokenPosition(tok, diag.Position)
if !pos.IsValid() {
continue // handle error?
}
reports = append(reports, protocol.Diagnostic{ reports = append(reports, protocol.Diagnostic{
Message: diag.Message, Message: diag.Message,
Range: toProtocolRange(tok, diag.Range), Range: toProtocolRange(tok, source.Range{
Severity: toProtocolSeverity(diag.Severity), Start: pos,
End: pos,
}),
Severity: protocol.SeverityError, // all diagnostics have error severity for now
Source: "LSP", Source: "LSP",
}) })
} }
return reports return reports
} }
func toProtocolSeverity(severity source.DiagnosticSeverity) protocol.DiagnosticSeverity {
switch severity {
case source.SeverityError:
return protocol.SeverityError
case source.SeverityWarning:
return protocol.SeverityWarning
case source.SeverityHint:
return protocol.SeverityHint
case source.SeverityInformation:
return protocol.SeverityInformation
}
return protocol.SeverityError // default
}
func sorted(d []protocol.Diagnostic) { func sorted(d []protocol.Diagnostic) {
sort.Slice(d, func(i int, j int) bool { sort.Slice(d, func(i int, j int) bool {
if d[i].Range.Start.Line == d[j].Range.Start.Line { if d[i].Range.Start.Line == d[j].Range.Start.Line {

60
internal/lsp/format.go Normal file
Просмотреть файл

@ -0,0 +1,60 @@
package lsp
import (
"context"
"go/token"
"golang.org/x/tools/internal/lsp/cache"
"golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source"
)
// formatRange formats a document with a given range.
func formatRange(ctx context.Context, v *cache.View, uri protocol.DocumentURI, rng *protocol.Range) ([]protocol.TextEdit, error) {
f := v.GetFile(source.URI(uri))
tok, err := f.GetToken()
if err != nil {
return nil, err
}
var r source.Range
if rng == nil {
r.Start = tok.Pos(0)
r.End = tok.Pos(tok.Size())
} else {
r = fromProtocolRange(tok, *rng)
}
content, err := f.Read()
if err != nil {
return nil, err
}
edits, err := source.Format(ctx, f, r)
if err != nil {
return nil, err
}
return toProtocolEdits(tok, content, edits), nil
}
func toProtocolEdits(tok *token.File, content []byte, edits []source.TextEdit) []protocol.TextEdit {
if edits == nil {
return nil
}
// When a file ends with an empty line, the newline character is counted
// as part of the previous line. This causes the formatter to insert
// another unnecessary newline on each formatting. We handle this case by
// checking if the file already ends with a newline character.
hasExtraNewline := content[len(content)-1] == '\n'
result := make([]protocol.TextEdit, len(edits))
for i, edit := range edits {
rng := toProtocolRange(tok, edit.Range)
// If the edit ends at the end of the file, add the extra line.
if hasExtraNewline && tok.Offset(edit.Range.End) == len(content) {
rng.End.Line++
rng.End.Character = 0
}
result[i] = protocol.TextEdit{
Range: rng,
NewText: edit.NewText,
}
}
return result
}

Просмотреть файл

@ -17,6 +17,7 @@ import (
"golang.org/x/tools/go/packages" "golang.org/x/tools/go/packages"
"golang.org/x/tools/go/packages/packagestest" "golang.org/x/tools/go/packages/packagestest"
"golang.org/x/tools/internal/lsp/cache"
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
) )
@ -57,7 +58,7 @@ func testLSP(t *testing.T, exporter packagestest.Exporter) {
defer exported.Cleanup() defer exported.Cleanup()
s := &server{ s := &server{
view: source.NewView(), view: cache.NewView(),
} }
// Merge the exported.Config with the view.Config. // Merge the exported.Config with the view.Config.
cfg := *exported.Config cfg := *exported.Config
@ -150,11 +151,11 @@ type completions map[token.Position][]token.Pos
type formats map[string]string type formats map[string]string
type definitions map[protocol.Location]protocol.Location type definitions map[protocol.Location]protocol.Location
func (d diagnostics) test(t *testing.T, exported *packagestest.Exported, v *source.View) int { func (d diagnostics) test(t *testing.T, exported *packagestest.Exported, v *cache.View) int {
count := 0 count := 0
for filename, want := range d { for filename, want := range d {
f := v.GetFile(source.ToURI(filename)) f := v.GetFile(source.ToURI(filename))
sourceDiagnostics, err := source.Diagnostics(context.Background(), v, f) sourceDiagnostics, err := source.Diagnostics(context.Background(), f)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }

Просмотреть файл

@ -7,6 +7,7 @@ package lsp
import ( import (
"go/token" "go/token"
"golang.org/x/tools/internal/lsp/cache"
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
) )
@ -14,7 +15,7 @@ import (
// fromProtocolLocation converts from a protocol location to a source range. // fromProtocolLocation converts from a protocol location to a source range.
// It will return an error if the file of the location was not valid. // It will return an error if the file of the location was not valid.
// It uses fromProtocolRange to convert the start and end positions. // It uses fromProtocolRange to convert the start and end positions.
func fromProtocolLocation(v *source.View, loc protocol.Location) (source.Range, error) { func fromProtocolLocation(v *cache.View, loc protocol.Location) (source.Range, error) {
f := v.GetFile(source.URI(loc.URI)) f := v.GetFile(source.URI(loc.URI))
tok, err := f.GetToken() tok, err := f.GetToken()
if err != nil { if err != nil {
@ -83,6 +84,14 @@ func toProtocolPosition(f *token.File, pos token.Pos) protocol.Position {
} }
} }
// fromTokenPosition converts a token.Position (1-based line and column
// number) to a token.Pos (byte offset value).
// It requires the token file the pos belongs to in order to do this.
func fromTokenPosition(f *token.File, pos token.Position) token.Pos {
line := lineStart(f, pos.Line)
return line + token.Pos(pos.Column-1) // TODO: this is wrong, bytes not characters
}
// this functionality was borrowed from the analysisutil package // this functionality was borrowed from the analysisutil package
func lineStart(f *token.File, line int) token.Pos { func lineStart(f *token.File, line int) token.Pos {
// Use binary search to find the start offset of this line. // Use binary search to find the start offset of this line.

Просмотреть файл

@ -6,11 +6,11 @@ package lsp
import ( import (
"context" "context"
"go/token"
"os" "os"
"sync" "sync"
"golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/jsonrpc2"
"golang.org/x/tools/internal/lsp/cache"
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
) )
@ -33,7 +33,7 @@ type server struct {
signatureHelpEnabled bool signatureHelpEnabled bool
snippetsSupported bool snippetsSupported bool
view *source.View view *cache.View
} }
func (s *server) Initialize(ctx context.Context, params *protocol.InitializeParams) (*protocol.InitializeResult, error) { func (s *server) Initialize(ctx context.Context, params *protocol.InitializeParams) (*protocol.InitializeResult, error) {
@ -42,7 +42,7 @@ func (s *server) Initialize(ctx context.Context, params *protocol.InitializePara
if s.initialized { if s.initialized {
return nil, jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidRequest, "server already initialized") return nil, jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidRequest, "server already initialized")
} }
s.view = source.NewView() s.view = cache.NewView()
s.initialized = true // mark server as initialized now s.initialized = true // mark server as initialized now
// Check if the client supports snippets in completion items. // Check if the client supports snippets in completion items.
@ -113,7 +113,7 @@ func (s *server) ExecuteCommand(context.Context, *protocol.ExecuteCommandParams)
} }
func (s *server) DidOpen(ctx context.Context, params *protocol.DidOpenTextDocumentParams) error { func (s *server) DidOpen(ctx context.Context, params *protocol.DidOpenTextDocumentParams) error {
s.cacheAndDiagnoseFile(ctx, params.TextDocument.URI, params.TextDocument.Text) s.CacheAndDiagnose(ctx, params.TextDocument.URI, params.TextDocument.Text)
return nil return nil
} }
@ -123,29 +123,11 @@ func (s *server) DidChange(ctx context.Context, params *protocol.DidChangeTextDo
} }
// We expect the full content of file, i.e. a single change with no range. // We expect the full content of file, i.e. a single change with no range.
if change := params.ContentChanges[0]; change.RangeLength == 0 { if change := params.ContentChanges[0]; change.RangeLength == 0 {
s.cacheAndDiagnoseFile(ctx, params.TextDocument.URI, change.Text) s.CacheAndDiagnose(ctx, params.TextDocument.URI, change.Text)
} }
return nil return nil
} }
func (s *server) cacheAndDiagnoseFile(ctx context.Context, uri protocol.DocumentURI, text string) {
f := s.view.GetFile(source.URI(uri))
f.SetContent([]byte(text))
go func() {
f := s.view.GetFile(source.URI(uri))
reports, err := source.Diagnostics(ctx, s.view, f)
if err != nil {
return // handle error?
}
for filename, diagnostics := range reports {
s.client.PublishDiagnostics(ctx, &protocol.PublishDiagnosticsParams{
URI: protocol.DocumentURI(source.ToURI(filename)),
Diagnostics: toProtocolDiagnostics(s.view, diagnostics),
})
}
}()
}
func (s *server) WillSave(context.Context, *protocol.WillSaveTextDocumentParams) error { func (s *server) WillSave(context.Context, *protocol.WillSaveTextDocumentParams) error {
return notImplemented("WillSave") return notImplemented("WillSave")
} }
@ -299,56 +281,6 @@ func (s *server) RangeFormatting(ctx context.Context, params *protocol.DocumentR
return formatRange(ctx, s.view, params.TextDocument.URI, &params.Range) return formatRange(ctx, s.view, params.TextDocument.URI, &params.Range)
} }
// formatRange formats a document with a given range.
func formatRange(ctx context.Context, v *source.View, uri protocol.DocumentURI, rng *protocol.Range) ([]protocol.TextEdit, error) {
f := v.GetFile(source.URI(uri))
tok, err := f.GetToken()
if err != nil {
return nil, err
}
var r source.Range
if rng == nil {
r.Start = tok.Pos(0)
r.End = tok.Pos(tok.Size())
} else {
r = fromProtocolRange(tok, *rng)
}
content, err := f.Read()
if err != nil {
return nil, err
}
edits, err := source.Format(ctx, f, r)
if err != nil {
return nil, err
}
return toProtocolEdits(tok, content, edits), nil
}
func toProtocolEdits(tok *token.File, content []byte, edits []source.TextEdit) []protocol.TextEdit {
if edits == nil {
return nil
}
// When a file ends with an empty line, the newline character is counted
// as part of the previous line. This causes the formatter to insert
// another unnecessary newline on each formatting. We handle this case by
// checking if the file already ends with a newline character.
hasExtraNewline := content[len(content)-1] == '\n'
result := make([]protocol.TextEdit, len(edits))
for i, edit := range edits {
rng := toProtocolRange(tok, edit.Range)
// If the edit ends at the end of the file, add the extra line.
if hasExtraNewline && tok.Offset(edit.Range.End) == len(content) {
rng.End.Line++
rng.End.Character = 0
}
result[i] = protocol.TextEdit{
Range: rng,
NewText: edit.NewText,
}
}
return result
}
func (s *server) OnTypeFormatting(context.Context, *protocol.DocumentOnTypeFormattingParams) ([]protocol.TextEdit, error) { func (s *server) OnTypeFormatting(context.Context, *protocol.DocumentOnTypeFormattingParams) ([]protocol.TextEdit, error) {
return nil, notImplemented("OnTypeFormatting") return nil, notImplemented("OnTypeFormatting")
} }

Просмотреть файл

@ -34,7 +34,19 @@ const (
PackageCompletionItem PackageCompletionItem
) )
func Completion(ctx context.Context, f *File, pos token.Pos) ([]CompletionItem, string, error) { // stdScore is the base score value set for all completion items.
const stdScore float64 = 1.0
// finder is a function used to record a completion candidate item in a list of
// completion items.
type finder func(types.Object, float64, []CompletionItem) []CompletionItem
// Completion returns a list of possible candidates for completion, given a
// a file and a position. The prefix is computed based on the preceding
// identifier and can be used by the client to score the quality of the
// completion. For instance, some clients may tolerate imperfect matches as
// valid completion results, since users may make typos.
func Completion(ctx context.Context, f File, pos token.Pos) (items []CompletionItem, prefix string, err error) {
file, err := f.GetAST() file, err := f.GetAST()
if err != nil { if err != nil {
return nil, "", err return nil, "", err
@ -43,19 +55,6 @@ func Completion(ctx context.Context, f *File, pos token.Pos) ([]CompletionItem,
if err != nil { if err != nil {
return nil, "", err return nil, "", err
} }
return completions(file, pos, pkg.Fset, pkg.Types, pkg.TypesInfo)
}
const stdScore float64 = 1.0
type finder func(types.Object, float64, []CompletionItem) []CompletionItem
// completions returns the map of possible candidates for completion, given a
// position, a file AST, and type information. The prefix is computed based on
// the preceding identifier and can be used by the client to score the quality
// of the completion. For instance, some clients may tolerate imperfect matches
// as valid completion results, since users may make typos.
func completions(file *ast.File, pos token.Pos, fset *token.FileSet, pkg *types.Package, info *types.Info) (items []CompletionItem, prefix string, err error) {
path, _ := astutil.PathEnclosingInterval(file, pos, pos) path, _ := astutil.PathEnclosingInterval(file, pos, pos)
if path == nil { if path == nil {
return nil, "", fmt.Errorf("cannot find node enclosing position") return nil, "", fmt.Errorf("cannot find node enclosing position")
@ -75,16 +74,16 @@ func completions(file *ast.File, pos token.Pos, fset *token.FileSet, pkg *types.
// Save certain facts about the query position, including the expected type // Save certain facts about the query position, including the expected type
// of the completion result, the signature of the function enclosing the // of the completion result, the signature of the function enclosing the
// position. // position.
typ := expectedType(path, pos, info) typ := expectedType(path, pos, pkg.TypesInfo)
sig := enclosingFunction(path, pos, info) sig := enclosingFunction(path, pos, pkg.TypesInfo)
pkgStringer := qualifier(file, pkg, info) pkgStringer := qualifier(file, pkg.Types, pkg.TypesInfo)
seen := make(map[types.Object]bool) seen := make(map[types.Object]bool)
// found adds a candidate completion. // found adds a candidate completion.
// Only the first candidate of a given name is considered. // Only the first candidate of a given name is considered.
found := func(obj types.Object, weight float64, items []CompletionItem) []CompletionItem { found := func(obj types.Object, weight float64, items []CompletionItem) []CompletionItem {
if obj.Pkg() != nil && obj.Pkg() != pkg && !obj.Exported() { if obj.Pkg() != nil && obj.Pkg() != pkg.Types && !obj.Exported() {
return items // inaccessible return items // inaccessible
} }
if !seen[obj] { if !seen[obj] {
@ -101,7 +100,7 @@ func completions(file *ast.File, pos token.Pos, fset *token.FileSet, pkg *types.
} }
// The position is within a composite literal. // The position is within a composite literal.
if items, prefix, ok := complit(path, pos, pkg, info, found); ok { if items, prefix, ok := complit(path, pos, pkg.Types, pkg.TypesInfo, found); ok {
return items, prefix, nil return items, prefix, nil
} }
switch n := path[0].(type) { switch n := path[0].(type) {
@ -111,39 +110,39 @@ func completions(file *ast.File, pos token.Pos, fset *token.FileSet, pkg *types.
// Is this the Sel part of a selector? // Is this the Sel part of a selector?
if sel, ok := path[1].(*ast.SelectorExpr); ok && sel.Sel == n { if sel, ok := path[1].(*ast.SelectorExpr); ok && sel.Sel == n {
items, err = selector(sel, pos, info, found) items, err = selector(sel, pos, pkg.TypesInfo, found)
return items, prefix, err return items, prefix, err
} }
// reject defining identifiers // reject defining identifiers
if obj, ok := info.Defs[n]; ok { if obj, ok := pkg.TypesInfo.Defs[n]; ok {
if v, ok := obj.(*types.Var); ok && v.IsField() { if v, ok := obj.(*types.Var); ok && v.IsField() {
// An anonymous field is also a reference to a type. // An anonymous field is also a reference to a type.
} else { } else {
of := "" of := ""
if obj != nil { if obj != nil {
qual := types.RelativeTo(pkg) qual := types.RelativeTo(pkg.Types)
of += ", of " + types.ObjectString(obj, qual) of += ", of " + types.ObjectString(obj, qual)
} }
return nil, "", fmt.Errorf("this is a definition%s", of) return nil, "", fmt.Errorf("this is a definition%s", of)
} }
} }
items = append(items, lexical(path, pos, pkg, info, found)...) items = append(items, lexical(path, pos, pkg.Types, pkg.TypesInfo, found)...)
// The function name hasn't been typed yet, but the parens are there: // The function name hasn't been typed yet, but the parens are there:
// recv.‸(arg) // recv.‸(arg)
case *ast.TypeAssertExpr: case *ast.TypeAssertExpr:
// Create a fake selector expression. // Create a fake selector expression.
items, err = selector(&ast.SelectorExpr{X: n.X}, pos, info, found) items, err = selector(&ast.SelectorExpr{X: n.X}, pos, pkg.TypesInfo, found)
return items, prefix, err return items, prefix, err
case *ast.SelectorExpr: case *ast.SelectorExpr:
items, err = selector(n, pos, info, found) items, err = selector(n, pos, pkg.TypesInfo, found)
return items, prefix, err return items, prefix, err
default: default:
// fallback to lexical completions // fallback to lexical completions
return lexical(path, pos, pkg, info, found), "", nil return lexical(path, pos, pkg.Types, pkg.TypesInfo, found), "", nil
} }
return items, prefix, nil return items, prefix, nil

Просмотреть файл

@ -16,7 +16,7 @@ import (
"golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/astutil"
) )
func Definition(ctx context.Context, f *File, pos token.Pos) (Range, error) { func Definition(ctx context.Context, f File, pos token.Pos) (Range, error) {
fAST, err := f.GetAST() fAST, err := f.GetAST()
if err != nil { if err != nil {
return Range{}, err return Range{}, err
@ -45,10 +45,14 @@ func Definition(ctx context.Context, f *File, pos token.Pos) (Range, error) {
} }
} }
} }
return objToRange(f.view.Config.Fset, obj), nil fset, err := f.GetFileSet()
if err != nil {
return Range{}, err
}
return objToRange(fset, obj), nil
} }
func TypeDefinition(ctx context.Context, f *File, pos token.Pos) (Range, error) { func TypeDefinition(ctx context.Context, f File, pos token.Pos) (Range, error) {
fAST, err := f.GetAST() fAST, err := f.GetAST()
if err != nil { if err != nil {
return Range{}, err return Range{}, err
@ -72,7 +76,11 @@ func TypeDefinition(ctx context.Context, f *File, pos token.Pos) (Range, error)
if obj == nil { if obj == nil {
return Range{}, fmt.Errorf("no object for type %s", typ.String()) return Range{}, fmt.Errorf("no object for type %s", typ.String())
} }
return objToRange(f.view.Config.Fset, obj), nil fset, err := f.GetFileSet()
if err != nil {
return Range{}, err
}
return objToRange(fset, obj), nil
} }
func typeToObject(typ types.Type) (obj types.Object) { func typeToObject(typ types.Type) (obj types.Object) {
@ -156,3 +164,33 @@ func objToRange(fSet *token.FileSet, obj types.Object) Range {
End: p + token.Pos(len([]byte(obj.Name()))), // TODO: use real range of obj End: p + token.Pos(len([]byte(obj.Name()))), // TODO: use real range of obj
} }
} }
// this functionality was borrowed from the analysisutil package
func lineStart(f *token.File, line int) token.Pos {
// Use binary search to find the start offset of this line.
//
// TODO(adonovan): eventually replace this function with the
// simpler and more efficient (*go/token.File).LineStart, added
// in go1.12.
min := 0 // inclusive
max := f.Size() // exclusive
for {
offset := (min + max) / 2
pos := f.Pos(offset)
posn := f.Position(pos)
if posn.Line == line {
return pos - (token.Pos(posn.Column) - 1)
}
if min+1 >= max {
return token.NoPos
}
if posn.Line < line {
min = offset
} else {
max = offset
}
}
}

Просмотреть файл

@ -14,21 +14,11 @@ import (
) )
type Diagnostic struct { type Diagnostic struct {
Range Range token.Position
Severity DiagnosticSeverity Message string
Message string
} }
type DiagnosticSeverity int func Diagnostics(ctx context.Context, f File) (map[string][]Diagnostic, error) {
const (
SeverityError DiagnosticSeverity = iota
SeverityWarning
SeverityHint
SeverityInformation
)
func Diagnostics(ctx context.Context, v *View, f *File) (map[string][]Diagnostic, error) {
pkg, err := f.GetPackage() pkg, err := f.GetPackage()
if err != nil { if err != nil {
return nil, err return nil, err
@ -56,25 +46,27 @@ func Diagnostics(ctx context.Context, v *View, f *File) (map[string][]Diagnostic
diags = parseErrors diags = parseErrors
} }
for _, diag := range diags { for _, diag := range diags {
filename, start := v.errorPos(diag) pos := errorPos(diag)
// TODO(rstambler): Add support for diagnostic ranges.
end := start
diagnostic := Diagnostic{ diagnostic := Diagnostic{
Range: Range{ Position: pos,
Start: start,
End: end,
},
Message: diag.Msg, Message: diag.Msg,
Severity: SeverityError,
} }
if _, ok := reports[filename]; ok { if _, ok := reports[pos.Filename]; ok {
reports[filename] = append(reports[filename], diagnostic) reports[pos.Filename] = append(reports[pos.Filename], diagnostic)
} }
} }
return reports, nil return reports, nil
} }
func (v *View) errorPos(pkgErr packages.Error) (string, token.Pos) { // FromTokenPosition converts a token.Position (1-based line and column
// number) to a token.Pos (byte offset value).
// It requires the token file the pos belongs to in order to do this.
func FromTokenPosition(f *token.File, pos token.Position) token.Pos {
line := lineStart(f, pos.Line)
return line + token.Pos(pos.Column-1) // TODO: this is wrong, bytes not characters
}
func errorPos(pkgErr packages.Error) token.Position {
remainder1, first, hasLine := chop(pkgErr.Pos) remainder1, first, hasLine := chop(pkgErr.Pos)
remainder2, second, hasColumn := chop(remainder1) remainder2, second, hasColumn := chop(remainder1)
var pos token.Position var pos token.Position
@ -86,15 +78,7 @@ func (v *View) errorPos(pkgErr packages.Error) (string, token.Pos) {
pos.Filename = remainder1 pos.Filename = remainder1
pos.Line = first pos.Line = first
} }
f := v.GetFile(ToURI(pos.Filename)) return pos
if f == nil {
return "", token.NoPos
}
tok, err := f.GetToken()
if err != nil {
return "", token.NoPos
}
return pos.Filename, fromTokenPosition(tok, pos)
} }
func chop(text string) (remainder string, value int, ok bool) { func chop(text string) (remainder string, value int, ok bool) {
@ -108,41 +92,3 @@ func chop(text string) (remainder string, value int, ok bool) {
} }
return text[:i], int(v), true return text[:i], int(v), true
} }
// fromTokenPosition converts a token.Position (1-based line and column
// number) to a token.Pos (byte offset value).
// It requires the token file the pos belongs to in order to do this.
func fromTokenPosition(f *token.File, pos token.Position) token.Pos {
line := lineStart(f, pos.Line)
return line + token.Pos(pos.Column-1) // TODO: this is wrong, bytes not characters
}
// this functionality was borrowed from the analysisutil package
func lineStart(f *token.File, line int) token.Pos {
// Use binary search to find the start offset of this line.
//
// TODO(adonovan): eventually replace this function with the
// simpler and more efficient (*go/token.File).LineStart, added
// in go1.12.
min := 0 // inclusive
max := f.Size() // exclusive
for {
offset := (min + max) / 2
pos := f.Pos(offset)
posn := f.Position(pos)
if posn.Line == line {
return pos - (token.Pos(posn.Column) - 1)
}
if min+1 >= max {
return token.NoPos
}
if posn.Line < line {
min = offset
} else {
max = offset
}
}
}

Просмотреть файл

@ -5,23 +5,21 @@
package source package source
import ( import (
"fmt"
"go/ast" "go/ast"
"go/token" "go/token"
"io/ioutil"
"golang.org/x/tools/go/packages" "golang.org/x/tools/go/packages"
) )
// File holds all the information we know about a file. // File represents a Go source file that has been type-checked. It is the input
type File struct { // to most of the exported functions in this package, as it wraps up the
URI URI // building blocks for most queries. Users of the source package can abstract
view *View // the loading of packages into their own caching systems.
active bool type File interface {
content []byte GetAST() (*ast.File, error)
ast *ast.File GetFileSet() (*token.FileSet, error)
token *token.File GetPackage() (*packages.Package, error)
pkg *packages.Package GetToken() (*token.File, error)
} }
// Range represents a start and end position. // Range represents a start and end position.
@ -39,93 +37,3 @@ type TextEdit struct {
Range Range Range Range
NewText string NewText string
} }
// SetContent sets the overlay contents for a file.
// Setting it to nil will revert it to the on disk contents, and remove it
// from the active set.
func (f *File) SetContent(content []byte) {
f.view.mu.Lock()
defer f.view.mu.Unlock()
f.content = content
// the ast and token fields are invalid
f.ast = nil
f.token = nil
f.pkg = nil
// and we might need to update the overlay
switch {
case f.active && content == nil:
// we were active, and want to forget the content
f.active = false
if filename, err := f.URI.Filename(); err == nil {
delete(f.view.Config.Overlay, filename)
}
f.content = nil
case content != nil:
// an active overlay, update the map
f.active = true
if filename, err := f.URI.Filename(); err == nil {
f.view.Config.Overlay[filename] = f.content
}
}
}
// Read returns the contents of the file, reading it from file system if needed.
func (f *File) Read() ([]byte, error) {
f.view.mu.Lock()
defer f.view.mu.Unlock()
return f.read()
}
func (f *File) GetToken() (*token.File, error) {
f.view.mu.Lock()
defer f.view.mu.Unlock()
if f.token == nil {
if err := f.view.parse(f.URI); err != nil {
return nil, err
}
if f.token == nil {
return nil, fmt.Errorf("failed to find or parse %v", f.URI)
}
}
return f.token, nil
}
func (f *File) GetAST() (*ast.File, error) {
f.view.mu.Lock()
defer f.view.mu.Unlock()
if f.ast == nil {
if err := f.view.parse(f.URI); err != nil {
return nil, err
}
}
return f.ast, nil
}
func (f *File) GetPackage() (*packages.Package, error) {
f.view.mu.Lock()
defer f.view.mu.Unlock()
if f.pkg == nil {
if err := f.view.parse(f.URI); err != nil {
return nil, err
}
}
return f.pkg, nil
}
// read is the internal part of Read that presumes the lock is already held
func (f *File) read() ([]byte, error) {
if f.content != nil {
return f.content, nil
}
// we don't know the content yet, so read it
filename, err := f.URI.Filename()
if err != nil {
return nil, err
}
content, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
f.content = content
return f.content, nil
}

Просмотреть файл

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// Package source provides core features for use by Go editors and tools.
package source package source
import ( import (
@ -15,7 +16,7 @@ import (
) )
// Format formats a document with a given range. // Format formats a document with a given range.
func Format(ctx context.Context, f *File, rng Range) ([]TextEdit, error) { func Format(ctx context.Context, f File, rng Range) ([]TextEdit, error) {
fAST, err := f.GetAST() fAST, err := f.GetAST()
if err != nil { if err != nil {
return nil, err return nil, err
@ -45,8 +46,12 @@ func Format(ctx context.Context, f *File, rng Range) ([]TextEdit, error) {
// of Go used to build the LSP server will determine how it formats code. // of Go used to build the LSP server will determine how it formats code.
// This should be acceptable for all users, who likely be prompted to rebuild // This should be acceptable for all users, who likely be prompted to rebuild
// the LSP server on each Go release. // the LSP server on each Go release.
fset, err := f.GetFileSet()
if err != nil {
return nil, err
}
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
if err := format.Node(buf, f.view.Config.Fset, node); err != nil { if err := format.Node(buf, fset, node); err != nil {
return nil, err return nil, err
} }
// TODO(rstambler): Compute text edits instead of replacing whole file. // TODO(rstambler): Compute text edits instead of replacing whole file.

Просмотреть файл

@ -11,7 +11,7 @@ import (
"go/types" "go/types"
) )
func Hover(ctx context.Context, f *File, pos token.Pos) (string, Range, error) { func Hover(ctx context.Context, f File, pos token.Pos) (string, Range, error) {
fAST, err := f.GetAST() fAST, err := f.GetAST()
if err != nil { if err != nil {
return "", Range{}, err return "", Range{}, err

Просмотреть файл

@ -24,7 +24,7 @@ type ParameterInformation struct {
Label string Label string
} }
func SignatureHelp(ctx context.Context, f *File, pos token.Pos) (*SignatureInformation, error) { func SignatureHelp(ctx context.Context, f File, pos token.Pos) (*SignatureInformation, error) {
fAST, err := f.GetAST() fAST, err := f.GetAST()
if err != nil { if err != nil {
return nil, err return nil, err