all: handle Go 1.18 code on Go 1.16 AppEngine using new internal/backport/go/*
The immediate need was go/build, because the Go 1.16 go/build rejects code that has //go:build without // +build, as Go 1.18 code now does. But displaying code using generics would also have failed, for inability to parse that code. Add the full go/ast, go/doc, go/format, go/parser, go/printer, go/scanner, and go/token to fix those failures, which would become more frequent as generics are used more inside the standard library. The code is all from the go1.18 tag of the Go repo, post-processed to rewrite imports and apply gofmt -w -r 'any -> interface{}' to keep it building with the other backports and with Go 1.16 syntax. For golang/go#51686. Change-Id: I1e14f4634d8bc09bdaa04c014eadb1be97ea5047 Reviewed-on: https://go-review.googlesource.com/c/website/+/393194 Trust: Russ Cox <rsc@golang.org> Run-TryBot: Russ Cox <rsc@golang.org> TryBot-Result: Gopher Robot <gobot@golang.org> Reviewed-by: Dmitri Shuralyov <dmitshur@google.com>
This commit is contained in:
Родитель
f22c23a0f3
Коммит
7a60f303d8
|
@ -14,7 +14,6 @@ import (
|
|||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"io/fs"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
|
@ -32,6 +31,7 @@ import (
|
|||
"cloud.google.com/go/datastore"
|
||||
"golang.org/x/build/repos"
|
||||
"golang.org/x/website"
|
||||
"golang.org/x/website/internal/backport/go/format"
|
||||
"golang.org/x/website/internal/backport/html/template"
|
||||
"golang.org/x/website/internal/blog"
|
||||
"golang.org/x/website/internal/codewalk"
|
||||
|
|
|
@ -5,10 +5,11 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"go/build"
|
||||
"os"
|
||||
"runtime"
|
||||
"testing"
|
||||
|
||||
"golang.org/x/website/internal/backport/go/build"
|
||||
)
|
||||
|
||||
func TestParseVersionRow(t *testing.T) {
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,79 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var comments = []struct {
|
||||
list []string
|
||||
text string
|
||||
}{
|
||||
{[]string{"//"}, ""},
|
||||
{[]string{"// "}, ""},
|
||||
{[]string{"//", "//", "// "}, ""},
|
||||
{[]string{"// foo "}, "foo\n"},
|
||||
{[]string{"//", "//", "// foo"}, "foo\n"},
|
||||
{[]string{"// foo bar "}, "foo bar\n"},
|
||||
{[]string{"// foo", "// bar"}, "foo\nbar\n"},
|
||||
{[]string{"// foo", "//", "//", "//", "// bar"}, "foo\n\nbar\n"},
|
||||
{[]string{"// foo", "/* bar */"}, "foo\n bar\n"},
|
||||
{[]string{"//", "//", "//", "// foo", "//", "//", "//"}, "foo\n"},
|
||||
|
||||
{[]string{"/**/"}, ""},
|
||||
{[]string{"/* */"}, ""},
|
||||
{[]string{"/**/", "/**/", "/* */"}, ""},
|
||||
{[]string{"/* Foo */"}, " Foo\n"},
|
||||
{[]string{"/* Foo Bar */"}, " Foo Bar\n"},
|
||||
{[]string{"/* Foo*/", "/* Bar*/"}, " Foo\n Bar\n"},
|
||||
{[]string{"/* Foo*/", "/**/", "/**/", "/**/", "// Bar"}, " Foo\n\nBar\n"},
|
||||
{[]string{"/* Foo*/", "/*\n*/", "//", "/*\n*/", "// Bar"}, " Foo\n\nBar\n"},
|
||||
{[]string{"/* Foo*/", "// Bar"}, " Foo\nBar\n"},
|
||||
{[]string{"/* Foo\n Bar*/"}, " Foo\n Bar\n"},
|
||||
|
||||
{[]string{"// foo", "//go:noinline", "// bar", "//:baz"}, "foo\nbar\n:baz\n"},
|
||||
{[]string{"// foo", "//lint123:ignore", "// bar"}, "foo\nbar\n"},
|
||||
}
|
||||
|
||||
func TestCommentText(t *testing.T) {
|
||||
for i, c := range comments {
|
||||
list := make([]*Comment, len(c.list))
|
||||
for i, s := range c.list {
|
||||
list[i] = &Comment{Text: s}
|
||||
}
|
||||
|
||||
text := (&CommentGroup{list}).Text()
|
||||
if text != c.text {
|
||||
t.Errorf("case %d: got %q; expected %q", i, text, c.text)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var isDirectiveTests = []struct {
|
||||
in string
|
||||
ok bool
|
||||
}{
|
||||
{"abc", false},
|
||||
{"go:inline", true},
|
||||
{"Go:inline", false},
|
||||
{"go:Inline", false},
|
||||
{":inline", false},
|
||||
{"lint:ignore", true},
|
||||
{"lint:1234", true},
|
||||
{"1234:lint", true},
|
||||
{"go: inline", false},
|
||||
{"go:", false},
|
||||
{"go:*", false},
|
||||
{"go:x*", true},
|
||||
}
|
||||
|
||||
func TestIsDirective(t *testing.T) {
|
||||
for _, tt := range isDirectiveTests {
|
||||
if ok := isDirective(tt.in); ok != tt.ok {
|
||||
t.Errorf("isDirective(%q) = %v, want %v", tt.in, ok, tt.ok)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,329 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
"sort"
|
||||
)
|
||||
|
||||
type byPos []*CommentGroup
|
||||
|
||||
func (a byPos) Len() int { return len(a) }
|
||||
func (a byPos) Less(i, j int) bool { return a[i].Pos() < a[j].Pos() }
|
||||
func (a byPos) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
|
||||
// sortComments sorts the list of comment groups in source order.
|
||||
func sortComments(list []*CommentGroup) {
|
||||
// TODO(gri): Does it make sense to check for sorted-ness
|
||||
// first (because we know that sorted-ness is
|
||||
// very likely)?
|
||||
if orderedList := byPos(list); !sort.IsSorted(orderedList) {
|
||||
sort.Sort(orderedList)
|
||||
}
|
||||
}
|
||||
|
||||
// A CommentMap maps an AST node to a list of comment groups
|
||||
// associated with it. See NewCommentMap for a description of
|
||||
// the association.
|
||||
type CommentMap map[Node][]*CommentGroup
|
||||
|
||||
func (cmap CommentMap) addComment(n Node, c *CommentGroup) {
|
||||
list := cmap[n]
|
||||
if len(list) == 0 {
|
||||
list = []*CommentGroup{c}
|
||||
} else {
|
||||
list = append(list, c)
|
||||
}
|
||||
cmap[n] = list
|
||||
}
|
||||
|
||||
type byInterval []Node
|
||||
|
||||
func (a byInterval) Len() int { return len(a) }
|
||||
func (a byInterval) Less(i, j int) bool {
|
||||
pi, pj := a[i].Pos(), a[j].Pos()
|
||||
return pi < pj || pi == pj && a[i].End() > a[j].End()
|
||||
}
|
||||
func (a byInterval) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
|
||||
// nodeList returns the list of nodes of the AST n in source order.
|
||||
func nodeList(n Node) []Node {
|
||||
var list []Node
|
||||
Inspect(n, func(n Node) bool {
|
||||
// don't collect comments
|
||||
switch n.(type) {
|
||||
case nil, *CommentGroup, *Comment:
|
||||
return false
|
||||
}
|
||||
list = append(list, n)
|
||||
return true
|
||||
})
|
||||
// Note: The current implementation assumes that Inspect traverses the
|
||||
// AST in depth-first and thus _source_ order. If AST traversal
|
||||
// does not follow source order, the sorting call below will be
|
||||
// required.
|
||||
// sort.Sort(byInterval(list))
|
||||
return list
|
||||
}
|
||||
|
||||
// A commentListReader helps iterating through a list of comment groups.
|
||||
type commentListReader struct {
|
||||
fset *token.FileSet
|
||||
list []*CommentGroup
|
||||
index int
|
||||
comment *CommentGroup // comment group at current index
|
||||
pos, end token.Position // source interval of comment group at current index
|
||||
}
|
||||
|
||||
func (r *commentListReader) eol() bool {
|
||||
return r.index >= len(r.list)
|
||||
}
|
||||
|
||||
func (r *commentListReader) next() {
|
||||
if !r.eol() {
|
||||
r.comment = r.list[r.index]
|
||||
r.pos = r.fset.Position(r.comment.Pos())
|
||||
r.end = r.fset.Position(r.comment.End())
|
||||
r.index++
|
||||
}
|
||||
}
|
||||
|
||||
// A nodeStack keeps track of nested nodes.
|
||||
// A node lower on the stack lexically contains the nodes higher on the stack.
|
||||
type nodeStack []Node
|
||||
|
||||
// push pops all nodes that appear lexically before n
|
||||
// and then pushes n on the stack.
|
||||
func (s *nodeStack) push(n Node) {
|
||||
s.pop(n.Pos())
|
||||
*s = append((*s), n)
|
||||
}
|
||||
|
||||
// pop pops all nodes that appear lexically before pos
|
||||
// (i.e., whose lexical extent has ended before or at pos).
|
||||
// It returns the last node popped.
|
||||
func (s *nodeStack) pop(pos token.Pos) (top Node) {
|
||||
i := len(*s)
|
||||
for i > 0 && (*s)[i-1].End() <= pos {
|
||||
top = (*s)[i-1]
|
||||
i--
|
||||
}
|
||||
*s = (*s)[0:i]
|
||||
return top
|
||||
}
|
||||
|
||||
// NewCommentMap creates a new comment map by associating comment groups
|
||||
// of the comments list with the nodes of the AST specified by node.
|
||||
//
|
||||
// A comment group g is associated with a node n if:
|
||||
//
|
||||
// - g starts on the same line as n ends
|
||||
// - g starts on the line immediately following n, and there is
|
||||
// at least one empty line after g and before the next node
|
||||
// - g starts before n and is not associated to the node before n
|
||||
// via the previous rules
|
||||
//
|
||||
// NewCommentMap tries to associate a comment group to the "largest"
|
||||
// node possible: For instance, if the comment is a line comment
|
||||
// trailing an assignment, the comment is associated with the entire
|
||||
// assignment rather than just the last operand in the assignment.
|
||||
func NewCommentMap(fset *token.FileSet, node Node, comments []*CommentGroup) CommentMap {
|
||||
if len(comments) == 0 {
|
||||
return nil // no comments to map
|
||||
}
|
||||
|
||||
cmap := make(CommentMap)
|
||||
|
||||
// set up comment reader r
|
||||
tmp := make([]*CommentGroup, len(comments))
|
||||
copy(tmp, comments) // don't change incoming comments
|
||||
sortComments(tmp)
|
||||
r := commentListReader{fset: fset, list: tmp} // !r.eol() because len(comments) > 0
|
||||
r.next()
|
||||
|
||||
// create node list in lexical order
|
||||
nodes := nodeList(node)
|
||||
nodes = append(nodes, nil) // append sentinel
|
||||
|
||||
// set up iteration variables
|
||||
var (
|
||||
p Node // previous node
|
||||
pend token.Position // end of p
|
||||
pg Node // previous node group (enclosing nodes of "importance")
|
||||
pgend token.Position // end of pg
|
||||
stack nodeStack // stack of node groups
|
||||
)
|
||||
|
||||
for _, q := range nodes {
|
||||
var qpos token.Position
|
||||
if q != nil {
|
||||
qpos = fset.Position(q.Pos()) // current node position
|
||||
} else {
|
||||
// set fake sentinel position to infinity so that
|
||||
// all comments get processed before the sentinel
|
||||
const infinity = 1 << 30
|
||||
qpos.Offset = infinity
|
||||
qpos.Line = infinity
|
||||
}
|
||||
|
||||
// process comments before current node
|
||||
for r.end.Offset <= qpos.Offset {
|
||||
// determine recent node group
|
||||
if top := stack.pop(r.comment.Pos()); top != nil {
|
||||
pg = top
|
||||
pgend = fset.Position(pg.End())
|
||||
}
|
||||
// Try to associate a comment first with a node group
|
||||
// (i.e., a node of "importance" such as a declaration);
|
||||
// if that fails, try to associate it with the most recent
|
||||
// node.
|
||||
// TODO(gri) try to simplify the logic below
|
||||
var assoc Node
|
||||
switch {
|
||||
case pg != nil &&
|
||||
(pgend.Line == r.pos.Line ||
|
||||
pgend.Line+1 == r.pos.Line && r.end.Line+1 < qpos.Line):
|
||||
// 1) comment starts on same line as previous node group ends, or
|
||||
// 2) comment starts on the line immediately after the
|
||||
// previous node group and there is an empty line before
|
||||
// the current node
|
||||
// => associate comment with previous node group
|
||||
assoc = pg
|
||||
case p != nil &&
|
||||
(pend.Line == r.pos.Line ||
|
||||
pend.Line+1 == r.pos.Line && r.end.Line+1 < qpos.Line ||
|
||||
q == nil):
|
||||
// same rules apply as above for p rather than pg,
|
||||
// but also associate with p if we are at the end (q == nil)
|
||||
assoc = p
|
||||
default:
|
||||
// otherwise, associate comment with current node
|
||||
if q == nil {
|
||||
// we can only reach here if there was no p
|
||||
// which would imply that there were no nodes
|
||||
panic("internal error: no comments should be associated with sentinel")
|
||||
}
|
||||
assoc = q
|
||||
}
|
||||
cmap.addComment(assoc, r.comment)
|
||||
if r.eol() {
|
||||
return cmap
|
||||
}
|
||||
r.next()
|
||||
}
|
||||
|
||||
// update previous node
|
||||
p = q
|
||||
pend = fset.Position(p.End())
|
||||
|
||||
// update previous node group if we see an "important" node
|
||||
switch q.(type) {
|
||||
case *File, *Field, Decl, Spec, Stmt:
|
||||
stack.push(q)
|
||||
}
|
||||
}
|
||||
|
||||
return cmap
|
||||
}
|
||||
|
||||
// Update replaces an old node in the comment map with the new node
|
||||
// and returns the new node. Comments that were associated with the
|
||||
// old node are associated with the new node.
|
||||
func (cmap CommentMap) Update(old, new Node) Node {
|
||||
if list := cmap[old]; len(list) > 0 {
|
||||
delete(cmap, old)
|
||||
cmap[new] = append(cmap[new], list...)
|
||||
}
|
||||
return new
|
||||
}
|
||||
|
||||
// Filter returns a new comment map consisting of only those
|
||||
// entries of cmap for which a corresponding node exists in
|
||||
// the AST specified by node.
|
||||
func (cmap CommentMap) Filter(node Node) CommentMap {
|
||||
umap := make(CommentMap)
|
||||
Inspect(node, func(n Node) bool {
|
||||
if g := cmap[n]; len(g) > 0 {
|
||||
umap[n] = g
|
||||
}
|
||||
return true
|
||||
})
|
||||
return umap
|
||||
}
|
||||
|
||||
// Comments returns the list of comment groups in the comment map.
|
||||
// The result is sorted in source order.
|
||||
func (cmap CommentMap) Comments() []*CommentGroup {
|
||||
list := make([]*CommentGroup, 0, len(cmap))
|
||||
for _, e := range cmap {
|
||||
list = append(list, e...)
|
||||
}
|
||||
sortComments(list)
|
||||
return list
|
||||
}
|
||||
|
||||
func summary(list []*CommentGroup) string {
|
||||
const maxLen = 40
|
||||
var buf bytes.Buffer
|
||||
|
||||
// collect comments text
|
||||
loop:
|
||||
for _, group := range list {
|
||||
// Note: CommentGroup.Text() does too much work for what we
|
||||
// need and would only replace this innermost loop.
|
||||
// Just do it explicitly.
|
||||
for _, comment := range group.List {
|
||||
if buf.Len() >= maxLen {
|
||||
break loop
|
||||
}
|
||||
buf.WriteString(comment.Text)
|
||||
}
|
||||
}
|
||||
|
||||
// truncate if too long
|
||||
if buf.Len() > maxLen {
|
||||
buf.Truncate(maxLen - 3)
|
||||
buf.WriteString("...")
|
||||
}
|
||||
|
||||
// replace any invisibles with blanks
|
||||
bytes := buf.Bytes()
|
||||
for i, b := range bytes {
|
||||
switch b {
|
||||
case '\t', '\n', '\r':
|
||||
bytes[i] = ' '
|
||||
}
|
||||
}
|
||||
|
||||
return string(bytes)
|
||||
}
|
||||
|
||||
func (cmap CommentMap) String() string {
|
||||
// print map entries in sorted order
|
||||
var nodes []Node
|
||||
for node := range cmap {
|
||||
nodes = append(nodes, node)
|
||||
}
|
||||
sort.Sort(byInterval(nodes))
|
||||
|
||||
var buf bytes.Buffer
|
||||
fmt.Fprintln(&buf, "CommentMap {")
|
||||
for _, node := range nodes {
|
||||
comment := cmap[node]
|
||||
// print name of identifiers; print node type for other nodes
|
||||
var s string
|
||||
if ident, ok := node.(*Ident); ok {
|
||||
s = ident.Name
|
||||
} else {
|
||||
s = fmt.Sprintf("%T", node)
|
||||
}
|
||||
fmt.Fprintf(&buf, "\t%p %20s: %s\n", node, s, summary(comment))
|
||||
}
|
||||
fmt.Fprintln(&buf, "}")
|
||||
return buf.String()
|
||||
}
|
|
@ -0,0 +1,170 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// To avoid a cyclic dependency with go/parser, this file is in a separate package.
|
||||
|
||||
package ast_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"sort"
|
||||
"testing"
|
||||
|
||||
. "golang.org/x/website/internal/backport/go/ast"
|
||||
"golang.org/x/website/internal/backport/go/parser"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
)
|
||||
|
||||
const src = `
|
||||
// the very first comment
|
||||
|
||||
// package p
|
||||
package p /* the name is p */
|
||||
|
||||
// imports
|
||||
import (
|
||||
"bytes" // bytes
|
||||
"fmt" // fmt
|
||||
"golang.org/x/website/internal/backport/go/ast"
|
||||
"golang.org/x/website/internal/backport/go/parser"
|
||||
)
|
||||
|
||||
// T
|
||||
type T struct {
|
||||
a, b, c int // associated with a, b, c
|
||||
// associated with x, y
|
||||
x, y float64 // float values
|
||||
z complex128 // complex value
|
||||
}
|
||||
// also associated with T
|
||||
|
||||
// x
|
||||
var x = 0 // x = 0
|
||||
// also associated with x
|
||||
|
||||
// f1
|
||||
func f1() {
|
||||
/* associated with s1 */
|
||||
s1()
|
||||
// also associated with s1
|
||||
|
||||
// associated with s2
|
||||
|
||||
// also associated with s2
|
||||
s2() // line comment for s2
|
||||
}
|
||||
// associated with f1
|
||||
// also associated with f1
|
||||
|
||||
// associated with f2
|
||||
|
||||
// f2
|
||||
func f2() {
|
||||
}
|
||||
|
||||
func f3() {
|
||||
i := 1 /* 1 */ + 2 // addition
|
||||
_ = i
|
||||
}
|
||||
|
||||
// the very last comment
|
||||
`
|
||||
|
||||
// res maps a key of the form "line number: node type"
|
||||
// to the associated comments' text.
|
||||
var res = map[string]string{
|
||||
" 5: *ast.File": "the very first comment\npackage p\n",
|
||||
" 5: *ast.Ident": " the name is p\n",
|
||||
" 8: *ast.GenDecl": "imports\n",
|
||||
" 9: *ast.ImportSpec": "bytes\n",
|
||||
"10: *ast.ImportSpec": "fmt\n",
|
||||
"16: *ast.GenDecl": "T\nalso associated with T\n",
|
||||
"17: *ast.Field": "associated with a, b, c\n",
|
||||
"19: *ast.Field": "associated with x, y\nfloat values\n",
|
||||
"20: *ast.Field": "complex value\n",
|
||||
"25: *ast.GenDecl": "x\nx = 0\nalso associated with x\n",
|
||||
"29: *ast.FuncDecl": "f1\nassociated with f1\nalso associated with f1\n",
|
||||
"31: *ast.ExprStmt": " associated with s1\nalso associated with s1\n",
|
||||
"37: *ast.ExprStmt": "associated with s2\nalso associated with s2\nline comment for s2\n",
|
||||
"45: *ast.FuncDecl": "associated with f2\nf2\n",
|
||||
"49: *ast.AssignStmt": "addition\n",
|
||||
"49: *ast.BasicLit": " 1\n",
|
||||
"50: *ast.Ident": "the very last comment\n",
|
||||
}
|
||||
|
||||
func ctext(list []*CommentGroup) string {
|
||||
var buf bytes.Buffer
|
||||
for _, g := range list {
|
||||
buf.WriteString(g.Text())
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func TestCommentMap(t *testing.T) {
|
||||
fset := token.NewFileSet()
|
||||
f, err := parser.ParseFile(fset, "", src, parser.ParseComments)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
cmap := NewCommentMap(fset, f, f.Comments)
|
||||
|
||||
// very correct association of comments
|
||||
for n, list := range cmap {
|
||||
key := fmt.Sprintf("%2d: %T", fset.Position(n.Pos()).Line, n)
|
||||
got := ctext(list)
|
||||
want := res[key]
|
||||
if got != want {
|
||||
t.Errorf("%s: got %q; want %q", key, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
// verify that no comments got lost
|
||||
if n := len(cmap.Comments()); n != len(f.Comments) {
|
||||
t.Errorf("got %d comment groups in map; want %d", n, len(f.Comments))
|
||||
}
|
||||
|
||||
// support code to update test:
|
||||
// set genMap to true to generate res map
|
||||
const genMap = false
|
||||
if genMap {
|
||||
out := make([]string, 0, len(cmap))
|
||||
for n, list := range cmap {
|
||||
out = append(out, fmt.Sprintf("\t\"%2d: %T\":\t%q,", fset.Position(n.Pos()).Line, n, ctext(list)))
|
||||
}
|
||||
sort.Strings(out)
|
||||
for _, s := range out {
|
||||
fmt.Println(s)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFilter(t *testing.T) {
|
||||
fset := token.NewFileSet()
|
||||
f, err := parser.ParseFile(fset, "", src, parser.ParseComments)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
cmap := NewCommentMap(fset, f, f.Comments)
|
||||
|
||||
// delete variable declaration
|
||||
for i, decl := range f.Decls {
|
||||
if gen, ok := decl.(*GenDecl); ok && gen.Tok == token.VAR {
|
||||
copy(f.Decls[i:], f.Decls[i+1:])
|
||||
f.Decls = f.Decls[:len(f.Decls)-1]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// check if comments are filtered correctly
|
||||
cc := cmap.Filter(f)
|
||||
for n, list := range cc {
|
||||
key := fmt.Sprintf("%2d: %T", fset.Position(n.Pos()).Line, n)
|
||||
got := ctext(list)
|
||||
want := res[key]
|
||||
if key == "25: *ast.GenDecl" || got != want {
|
||||
t.Errorf("%s: got %q; want %q", key, got, want)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,206 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"golang.org/x/website/internal/backport/go/ast"
|
||||
"golang.org/x/website/internal/backport/go/format"
|
||||
"golang.org/x/website/internal/backport/go/parser"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
)
|
||||
|
||||
// This example demonstrates how to inspect the AST of a Go program.
|
||||
func ExampleInspect() {
|
||||
// src is the input for which we want to inspect the AST.
|
||||
src := `
|
||||
package p
|
||||
const c = 1.0
|
||||
var X = f(3.14)*2 + c
|
||||
`
|
||||
|
||||
// Create the AST by parsing src.
|
||||
fset := token.NewFileSet() // positions are relative to fset
|
||||
f, err := parser.ParseFile(fset, "src.go", src, 0)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Inspect the AST and print all identifiers and literals.
|
||||
ast.Inspect(f, func(n ast.Node) bool {
|
||||
var s string
|
||||
switch x := n.(type) {
|
||||
case *ast.BasicLit:
|
||||
s = x.Value
|
||||
case *ast.Ident:
|
||||
s = x.Name
|
||||
}
|
||||
if s != "" {
|
||||
fmt.Printf("%s:\t%s\n", fset.Position(n.Pos()), s)
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
// Output:
|
||||
// src.go:2:9: p
|
||||
// src.go:3:7: c
|
||||
// src.go:3:11: 1.0
|
||||
// src.go:4:5: X
|
||||
// src.go:4:9: f
|
||||
// src.go:4:11: 3.14
|
||||
// src.go:4:17: 2
|
||||
// src.go:4:21: c
|
||||
}
|
||||
|
||||
// This example shows what an AST looks like when printed for debugging.
|
||||
func ExamplePrint() {
|
||||
// src is the input for which we want to print the AST.
|
||||
src := `
|
||||
package main
|
||||
func main() {
|
||||
println("Hello, World!")
|
||||
}
|
||||
`
|
||||
|
||||
// Create the AST by parsing src.
|
||||
fset := token.NewFileSet() // positions are relative to fset
|
||||
f, err := parser.ParseFile(fset, "", src, 0)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Print the AST.
|
||||
ast.Print(fset, f)
|
||||
|
||||
// Output:
|
||||
// 0 *ast.File {
|
||||
// 1 . Package: 2:1
|
||||
// 2 . Name: *ast.Ident {
|
||||
// 3 . . NamePos: 2:9
|
||||
// 4 . . Name: "main"
|
||||
// 5 . }
|
||||
// 6 . Decls: []ast.Decl (len = 1) {
|
||||
// 7 . . 0: *ast.FuncDecl {
|
||||
// 8 . . . Name: *ast.Ident {
|
||||
// 9 . . . . NamePos: 3:6
|
||||
// 10 . . . . Name: "main"
|
||||
// 11 . . . . Obj: *ast.Object {
|
||||
// 12 . . . . . Kind: func
|
||||
// 13 . . . . . Name: "main"
|
||||
// 14 . . . . . Decl: *(obj @ 7)
|
||||
// 15 . . . . }
|
||||
// 16 . . . }
|
||||
// 17 . . . Type: *ast.FuncType {
|
||||
// 18 . . . . Func: 3:1
|
||||
// 19 . . . . Params: *ast.FieldList {
|
||||
// 20 . . . . . Opening: 3:10
|
||||
// 21 . . . . . Closing: 3:11
|
||||
// 22 . . . . }
|
||||
// 23 . . . }
|
||||
// 24 . . . Body: *ast.BlockStmt {
|
||||
// 25 . . . . Lbrace: 3:13
|
||||
// 26 . . . . List: []ast.Stmt (len = 1) {
|
||||
// 27 . . . . . 0: *ast.ExprStmt {
|
||||
// 28 . . . . . . X: *ast.CallExpr {
|
||||
// 29 . . . . . . . Fun: *ast.Ident {
|
||||
// 30 . . . . . . . . NamePos: 4:2
|
||||
// 31 . . . . . . . . Name: "println"
|
||||
// 32 . . . . . . . }
|
||||
// 33 . . . . . . . Lparen: 4:9
|
||||
// 34 . . . . . . . Args: []ast.Expr (len = 1) {
|
||||
// 35 . . . . . . . . 0: *ast.BasicLit {
|
||||
// 36 . . . . . . . . . ValuePos: 4:10
|
||||
// 37 . . . . . . . . . Kind: STRING
|
||||
// 38 . . . . . . . . . Value: "\"Hello, World!\""
|
||||
// 39 . . . . . . . . }
|
||||
// 40 . . . . . . . }
|
||||
// 41 . . . . . . . Ellipsis: -
|
||||
// 42 . . . . . . . Rparen: 4:25
|
||||
// 43 . . . . . . }
|
||||
// 44 . . . . . }
|
||||
// 45 . . . . }
|
||||
// 46 . . . . Rbrace: 5:1
|
||||
// 47 . . . }
|
||||
// 48 . . }
|
||||
// 49 . }
|
||||
// 50 . Scope: *ast.Scope {
|
||||
// 51 . . Objects: map[string]*ast.Object (len = 1) {
|
||||
// 52 . . . "main": *(obj @ 11)
|
||||
// 53 . . }
|
||||
// 54 . }
|
||||
// 55 . Unresolved: []*ast.Ident (len = 1) {
|
||||
// 56 . . 0: *(obj @ 29)
|
||||
// 57 . }
|
||||
// 58 }
|
||||
}
|
||||
|
||||
// This example illustrates how to remove a variable declaration
|
||||
// in a Go program while maintaining correct comment association
|
||||
// using an ast.CommentMap.
|
||||
func ExampleCommentMap() {
|
||||
// src is the input for which we create the AST that we
|
||||
// are going to manipulate.
|
||||
src := `
|
||||
// This is the package comment.
|
||||
package main
|
||||
|
||||
// This comment is associated with the hello constant.
|
||||
const hello = "Hello, World!" // line comment 1
|
||||
|
||||
// This comment is associated with the foo variable.
|
||||
var foo = hello // line comment 2
|
||||
|
||||
// This comment is associated with the main function.
|
||||
func main() {
|
||||
fmt.Println(hello) // line comment 3
|
||||
}
|
||||
`
|
||||
|
||||
// Create the AST by parsing src.
|
||||
fset := token.NewFileSet() // positions are relative to fset
|
||||
f, err := parser.ParseFile(fset, "src.go", src, parser.ParseComments)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Create an ast.CommentMap from the ast.File's comments.
|
||||
// This helps keeping the association between comments
|
||||
// and AST nodes.
|
||||
cmap := ast.NewCommentMap(fset, f, f.Comments)
|
||||
|
||||
// Remove the first variable declaration from the list of declarations.
|
||||
for i, decl := range f.Decls {
|
||||
if gen, ok := decl.(*ast.GenDecl); ok && gen.Tok == token.VAR {
|
||||
copy(f.Decls[i:], f.Decls[i+1:])
|
||||
f.Decls = f.Decls[:len(f.Decls)-1]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Use the comment map to filter comments that don't belong anymore
|
||||
// (the comments associated with the variable declaration), and create
|
||||
// the new comments list.
|
||||
f.Comments = cmap.Filter(f).Comments()
|
||||
|
||||
// Print the modified AST.
|
||||
var buf bytes.Buffer
|
||||
if err := format.Node(&buf, fset, f); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
fmt.Printf("%s", buf.Bytes())
|
||||
|
||||
// Output:
|
||||
// // This is the package comment.
|
||||
// package main
|
||||
//
|
||||
// // This comment is associated with the hello constant.
|
||||
// const hello = "Hello, World!" // line comment 1
|
||||
//
|
||||
// // This comment is associated with the main function.
|
||||
// func main() {
|
||||
// fmt.Println(hello) // line comment 3
|
||||
// }
|
||||
}
|
|
@ -0,0 +1,488 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Export filtering
|
||||
|
||||
// exportFilter is a special filter function to extract exported nodes.
|
||||
func exportFilter(name string) bool {
|
||||
return IsExported(name)
|
||||
}
|
||||
|
||||
// FileExports trims the AST for a Go source file in place such that
|
||||
// only exported nodes remain: all top-level identifiers which are not exported
|
||||
// and their associated information (such as type, initial value, or function
|
||||
// body) are removed. Non-exported fields and methods of exported types are
|
||||
// stripped. The File.Comments list is not changed.
|
||||
//
|
||||
// FileExports reports whether there are exported declarations.
|
||||
func FileExports(src *File) bool {
|
||||
return filterFile(src, exportFilter, true)
|
||||
}
|
||||
|
||||
// PackageExports trims the AST for a Go package in place such that
|
||||
// only exported nodes remain. The pkg.Files list is not changed, so that
|
||||
// file names and top-level package comments don't get lost.
|
||||
//
|
||||
// PackageExports reports whether there are exported declarations;
|
||||
// it returns false otherwise.
|
||||
func PackageExports(pkg *Package) bool {
|
||||
return filterPackage(pkg, exportFilter, true)
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// General filtering
|
||||
|
||||
type Filter func(string) bool
|
||||
|
||||
func filterIdentList(list []*Ident, f Filter) []*Ident {
|
||||
j := 0
|
||||
for _, x := range list {
|
||||
if f(x.Name) {
|
||||
list[j] = x
|
||||
j++
|
||||
}
|
||||
}
|
||||
return list[0:j]
|
||||
}
|
||||
|
||||
// fieldName assumes that x is the type of an anonymous field and
|
||||
// returns the corresponding field name. If x is not an acceptable
|
||||
// anonymous field, the result is nil.
|
||||
func fieldName(x Expr) *Ident {
|
||||
switch t := x.(type) {
|
||||
case *Ident:
|
||||
return t
|
||||
case *SelectorExpr:
|
||||
if _, ok := t.X.(*Ident); ok {
|
||||
return t.Sel
|
||||
}
|
||||
case *StarExpr:
|
||||
return fieldName(t.X)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func filterFieldList(fields *FieldList, filter Filter, export bool) (removedFields bool) {
|
||||
if fields == nil {
|
||||
return false
|
||||
}
|
||||
list := fields.List
|
||||
j := 0
|
||||
for _, f := range list {
|
||||
keepField := false
|
||||
if len(f.Names) == 0 {
|
||||
// anonymous field
|
||||
name := fieldName(f.Type)
|
||||
keepField = name != nil && filter(name.Name)
|
||||
} else {
|
||||
n := len(f.Names)
|
||||
f.Names = filterIdentList(f.Names, filter)
|
||||
if len(f.Names) < n {
|
||||
removedFields = true
|
||||
}
|
||||
keepField = len(f.Names) > 0
|
||||
}
|
||||
if keepField {
|
||||
if export {
|
||||
filterType(f.Type, filter, export)
|
||||
}
|
||||
list[j] = f
|
||||
j++
|
||||
}
|
||||
}
|
||||
if j < len(list) {
|
||||
removedFields = true
|
||||
}
|
||||
fields.List = list[0:j]
|
||||
return
|
||||
}
|
||||
|
||||
func filterCompositeLit(lit *CompositeLit, filter Filter, export bool) {
|
||||
n := len(lit.Elts)
|
||||
lit.Elts = filterExprList(lit.Elts, filter, export)
|
||||
if len(lit.Elts) < n {
|
||||
lit.Incomplete = true
|
||||
}
|
||||
}
|
||||
|
||||
func filterExprList(list []Expr, filter Filter, export bool) []Expr {
|
||||
j := 0
|
||||
for _, exp := range list {
|
||||
switch x := exp.(type) {
|
||||
case *CompositeLit:
|
||||
filterCompositeLit(x, filter, export)
|
||||
case *KeyValueExpr:
|
||||
if x, ok := x.Key.(*Ident); ok && !filter(x.Name) {
|
||||
continue
|
||||
}
|
||||
if x, ok := x.Value.(*CompositeLit); ok {
|
||||
filterCompositeLit(x, filter, export)
|
||||
}
|
||||
}
|
||||
list[j] = exp
|
||||
j++
|
||||
}
|
||||
return list[0:j]
|
||||
}
|
||||
|
||||
func filterParamList(fields *FieldList, filter Filter, export bool) bool {
|
||||
if fields == nil {
|
||||
return false
|
||||
}
|
||||
var b bool
|
||||
for _, f := range fields.List {
|
||||
if filterType(f.Type, filter, export) {
|
||||
b = true
|
||||
}
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func filterType(typ Expr, f Filter, export bool) bool {
|
||||
switch t := typ.(type) {
|
||||
case *Ident:
|
||||
return f(t.Name)
|
||||
case *ParenExpr:
|
||||
return filterType(t.X, f, export)
|
||||
case *ArrayType:
|
||||
return filterType(t.Elt, f, export)
|
||||
case *StructType:
|
||||
if filterFieldList(t.Fields, f, export) {
|
||||
t.Incomplete = true
|
||||
}
|
||||
return len(t.Fields.List) > 0
|
||||
case *FuncType:
|
||||
b1 := filterParamList(t.Params, f, export)
|
||||
b2 := filterParamList(t.Results, f, export)
|
||||
return b1 || b2
|
||||
case *InterfaceType:
|
||||
if filterFieldList(t.Methods, f, export) {
|
||||
t.Incomplete = true
|
||||
}
|
||||
return len(t.Methods.List) > 0
|
||||
case *MapType:
|
||||
b1 := filterType(t.Key, f, export)
|
||||
b2 := filterType(t.Value, f, export)
|
||||
return b1 || b2
|
||||
case *ChanType:
|
||||
return filterType(t.Value, f, export)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func filterSpec(spec Spec, f Filter, export bool) bool {
|
||||
switch s := spec.(type) {
|
||||
case *ValueSpec:
|
||||
s.Names = filterIdentList(s.Names, f)
|
||||
s.Values = filterExprList(s.Values, f, export)
|
||||
if len(s.Names) > 0 {
|
||||
if export {
|
||||
filterType(s.Type, f, export)
|
||||
}
|
||||
return true
|
||||
}
|
||||
case *TypeSpec:
|
||||
if f(s.Name.Name) {
|
||||
if export {
|
||||
filterType(s.Type, f, export)
|
||||
}
|
||||
return true
|
||||
}
|
||||
if !export {
|
||||
// For general filtering (not just exports),
|
||||
// filter type even if name is not filtered
|
||||
// out.
|
||||
// If the type contains filtered elements,
|
||||
// keep the declaration.
|
||||
return filterType(s.Type, f, export)
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func filterSpecList(list []Spec, f Filter, export bool) []Spec {
|
||||
j := 0
|
||||
for _, s := range list {
|
||||
if filterSpec(s, f, export) {
|
||||
list[j] = s
|
||||
j++
|
||||
}
|
||||
}
|
||||
return list[0:j]
|
||||
}
|
||||
|
||||
// FilterDecl trims the AST for a Go declaration in place by removing
|
||||
// all names (including struct field and interface method names, but
|
||||
// not from parameter lists) that don't pass through the filter f.
|
||||
//
|
||||
// FilterDecl reports whether there are any declared names left after
|
||||
// filtering.
|
||||
func FilterDecl(decl Decl, f Filter) bool {
|
||||
return filterDecl(decl, f, false)
|
||||
}
|
||||
|
||||
func filterDecl(decl Decl, f Filter, export bool) bool {
|
||||
switch d := decl.(type) {
|
||||
case *GenDecl:
|
||||
d.Specs = filterSpecList(d.Specs, f, export)
|
||||
return len(d.Specs) > 0
|
||||
case *FuncDecl:
|
||||
return f(d.Name.Name)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// FilterFile trims the AST for a Go file in place by removing all
|
||||
// names from top-level declarations (including struct field and
|
||||
// interface method names, but not from parameter lists) that don't
|
||||
// pass through the filter f. If the declaration is empty afterwards,
|
||||
// the declaration is removed from the AST. Import declarations are
|
||||
// always removed. The File.Comments list is not changed.
|
||||
//
|
||||
// FilterFile reports whether there are any top-level declarations
|
||||
// left after filtering.
|
||||
func FilterFile(src *File, f Filter) bool {
|
||||
return filterFile(src, f, false)
|
||||
}
|
||||
|
||||
func filterFile(src *File, f Filter, export bool) bool {
|
||||
j := 0
|
||||
for _, d := range src.Decls {
|
||||
if filterDecl(d, f, export) {
|
||||
src.Decls[j] = d
|
||||
j++
|
||||
}
|
||||
}
|
||||
src.Decls = src.Decls[0:j]
|
||||
return j > 0
|
||||
}
|
||||
|
||||
// FilterPackage trims the AST for a Go package in place by removing
|
||||
// all names from top-level declarations (including struct field and
|
||||
// interface method names, but not from parameter lists) that don't
|
||||
// pass through the filter f. If the declaration is empty afterwards,
|
||||
// the declaration is removed from the AST. The pkg.Files list is not
|
||||
// changed, so that file names and top-level package comments don't get
|
||||
// lost.
|
||||
//
|
||||
// FilterPackage reports whether there are any top-level declarations
|
||||
// left after filtering.
|
||||
func FilterPackage(pkg *Package, f Filter) bool {
|
||||
return filterPackage(pkg, f, false)
|
||||
}
|
||||
|
||||
func filterPackage(pkg *Package, f Filter, export bool) bool {
|
||||
hasDecls := false
|
||||
for _, src := range pkg.Files {
|
||||
if filterFile(src, f, export) {
|
||||
hasDecls = true
|
||||
}
|
||||
}
|
||||
return hasDecls
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Merging of package files
|
||||
|
||||
// The MergeMode flags control the behavior of MergePackageFiles.
|
||||
type MergeMode uint
|
||||
|
||||
const (
|
||||
// If set, duplicate function declarations are excluded.
|
||||
FilterFuncDuplicates MergeMode = 1 << iota
|
||||
// If set, comments that are not associated with a specific
|
||||
// AST node (as Doc or Comment) are excluded.
|
||||
FilterUnassociatedComments
|
||||
// If set, duplicate import declarations are excluded.
|
||||
FilterImportDuplicates
|
||||
)
|
||||
|
||||
// nameOf returns the function (foo) or method name (foo.bar) for
|
||||
// the given function declaration. If the AST is incorrect for the
|
||||
// receiver, it assumes a function instead.
|
||||
func nameOf(f *FuncDecl) string {
|
||||
if r := f.Recv; r != nil && len(r.List) == 1 {
|
||||
// looks like a correct receiver declaration
|
||||
t := r.List[0].Type
|
||||
// dereference pointer receiver types
|
||||
if p, _ := t.(*StarExpr); p != nil {
|
||||
t = p.X
|
||||
}
|
||||
// the receiver type must be a type name
|
||||
if p, _ := t.(*Ident); p != nil {
|
||||
return p.Name + "." + f.Name.Name
|
||||
}
|
||||
// otherwise assume a function instead
|
||||
}
|
||||
return f.Name.Name
|
||||
}
|
||||
|
||||
// separator is an empty //-style comment that is interspersed between
|
||||
// different comment groups when they are concatenated into a single group
|
||||
var separator = &Comment{token.NoPos, "//"}
|
||||
|
||||
// MergePackageFiles creates a file AST by merging the ASTs of the
|
||||
// files belonging to a package. The mode flags control merging behavior.
|
||||
func MergePackageFiles(pkg *Package, mode MergeMode) *File {
|
||||
// Count the number of package docs, comments and declarations across
|
||||
// all package files. Also, compute sorted list of filenames, so that
|
||||
// subsequent iterations can always iterate in the same order.
|
||||
ndocs := 0
|
||||
ncomments := 0
|
||||
ndecls := 0
|
||||
filenames := make([]string, len(pkg.Files))
|
||||
i := 0
|
||||
for filename, f := range pkg.Files {
|
||||
filenames[i] = filename
|
||||
i++
|
||||
if f.Doc != nil {
|
||||
ndocs += len(f.Doc.List) + 1 // +1 for separator
|
||||
}
|
||||
ncomments += len(f.Comments)
|
||||
ndecls += len(f.Decls)
|
||||
}
|
||||
sort.Strings(filenames)
|
||||
|
||||
// Collect package comments from all package files into a single
|
||||
// CommentGroup - the collected package documentation. In general
|
||||
// there should be only one file with a package comment; but it's
|
||||
// better to collect extra comments than drop them on the floor.
|
||||
var doc *CommentGroup
|
||||
var pos token.Pos
|
||||
if ndocs > 0 {
|
||||
list := make([]*Comment, ndocs-1) // -1: no separator before first group
|
||||
i := 0
|
||||
for _, filename := range filenames {
|
||||
f := pkg.Files[filename]
|
||||
if f.Doc != nil {
|
||||
if i > 0 {
|
||||
// not the first group - add separator
|
||||
list[i] = separator
|
||||
i++
|
||||
}
|
||||
for _, c := range f.Doc.List {
|
||||
list[i] = c
|
||||
i++
|
||||
}
|
||||
if f.Package > pos {
|
||||
// Keep the maximum package clause position as
|
||||
// position for the package clause of the merged
|
||||
// files.
|
||||
pos = f.Package
|
||||
}
|
||||
}
|
||||
}
|
||||
doc = &CommentGroup{list}
|
||||
}
|
||||
|
||||
// Collect declarations from all package files.
|
||||
var decls []Decl
|
||||
if ndecls > 0 {
|
||||
decls = make([]Decl, ndecls)
|
||||
funcs := make(map[string]int) // map of func name -> decls index
|
||||
i := 0 // current index
|
||||
n := 0 // number of filtered entries
|
||||
for _, filename := range filenames {
|
||||
f := pkg.Files[filename]
|
||||
for _, d := range f.Decls {
|
||||
if mode&FilterFuncDuplicates != 0 {
|
||||
// A language entity may be declared multiple
|
||||
// times in different package files; only at
|
||||
// build time declarations must be unique.
|
||||
// For now, exclude multiple declarations of
|
||||
// functions - keep the one with documentation.
|
||||
//
|
||||
// TODO(gri): Expand this filtering to other
|
||||
// entities (const, type, vars) if
|
||||
// multiple declarations are common.
|
||||
if f, isFun := d.(*FuncDecl); isFun {
|
||||
name := nameOf(f)
|
||||
if j, exists := funcs[name]; exists {
|
||||
// function declared already
|
||||
if decls[j] != nil && decls[j].(*FuncDecl).Doc == nil {
|
||||
// existing declaration has no documentation;
|
||||
// ignore the existing declaration
|
||||
decls[j] = nil
|
||||
} else {
|
||||
// ignore the new declaration
|
||||
d = nil
|
||||
}
|
||||
n++ // filtered an entry
|
||||
} else {
|
||||
funcs[name] = i
|
||||
}
|
||||
}
|
||||
}
|
||||
decls[i] = d
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
// Eliminate nil entries from the decls list if entries were
|
||||
// filtered. We do this using a 2nd pass in order to not disturb
|
||||
// the original declaration order in the source (otherwise, this
|
||||
// would also invalidate the monotonically increasing position
|
||||
// info within a single file).
|
||||
if n > 0 {
|
||||
i = 0
|
||||
for _, d := range decls {
|
||||
if d != nil {
|
||||
decls[i] = d
|
||||
i++
|
||||
}
|
||||
}
|
||||
decls = decls[0:i]
|
||||
}
|
||||
}
|
||||
|
||||
// Collect import specs from all package files.
|
||||
var imports []*ImportSpec
|
||||
if mode&FilterImportDuplicates != 0 {
|
||||
seen := make(map[string]bool)
|
||||
for _, filename := range filenames {
|
||||
f := pkg.Files[filename]
|
||||
for _, imp := range f.Imports {
|
||||
if path := imp.Path.Value; !seen[path] {
|
||||
// TODO: consider handling cases where:
|
||||
// - 2 imports exist with the same import path but
|
||||
// have different local names (one should probably
|
||||
// keep both of them)
|
||||
// - 2 imports exist but only one has a comment
|
||||
// - 2 imports exist and they both have (possibly
|
||||
// different) comments
|
||||
imports = append(imports, imp)
|
||||
seen[path] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Iterate over filenames for deterministic order.
|
||||
for _, filename := range filenames {
|
||||
f := pkg.Files[filename]
|
||||
imports = append(imports, f.Imports...)
|
||||
}
|
||||
}
|
||||
|
||||
// Collect comments from all package files.
|
||||
var comments []*CommentGroup
|
||||
if mode&FilterUnassociatedComments == 0 {
|
||||
comments = make([]*CommentGroup, ncomments)
|
||||
i := 0
|
||||
for _, filename := range filenames {
|
||||
f := pkg.Files[filename]
|
||||
i += copy(comments[i:], f.Comments)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(gri) need to compute unresolved identifiers!
|
||||
return &File{doc, pos, NewIdent(pkg.Name), decls, pkg.Scope, imports, nil, comments}
|
||||
}
|
|
@ -0,0 +1,85 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// To avoid a cyclic dependency with go/parser, this file is in a separate package.
|
||||
|
||||
package ast_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"golang.org/x/website/internal/backport/go/ast"
|
||||
"golang.org/x/website/internal/backport/go/format"
|
||||
"golang.org/x/website/internal/backport/go/parser"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
"testing"
|
||||
)
|
||||
|
||||
const input = `package p
|
||||
|
||||
type t1 struct{}
|
||||
type t2 struct{}
|
||||
|
||||
func f1() {}
|
||||
func f1() {}
|
||||
func f2() {}
|
||||
|
||||
func (*t1) f1() {}
|
||||
func (t1) f1() {}
|
||||
func (t1) f2() {}
|
||||
|
||||
func (t2) f1() {}
|
||||
func (t2) f2() {}
|
||||
func (x *t2) f2() {}
|
||||
`
|
||||
|
||||
// Calling ast.MergePackageFiles with ast.FilterFuncDuplicates
|
||||
// keeps a duplicate entry with attached documentation in favor
|
||||
// of one without, and it favors duplicate entries appearing
|
||||
// later in the source over ones appearing earlier. This is why
|
||||
// (*t2).f2 is kept and t2.f2 is eliminated in this test case.
|
||||
const golden = `package p
|
||||
|
||||
type t1 struct{}
|
||||
type t2 struct{}
|
||||
|
||||
func f1() {}
|
||||
func f2() {}
|
||||
|
||||
func (t1) f1() {}
|
||||
func (t1) f2() {}
|
||||
|
||||
func (t2) f1() {}
|
||||
|
||||
func (x *t2) f2() {}
|
||||
`
|
||||
|
||||
func TestFilterDuplicates(t *testing.T) {
|
||||
// parse input
|
||||
fset := token.NewFileSet()
|
||||
file, err := parser.ParseFile(fset, "", input, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// create package
|
||||
files := map[string]*ast.File{"": file}
|
||||
pkg, err := ast.NewPackage(fset, files, nil, nil)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// filter
|
||||
merged := ast.MergePackageFiles(pkg, ast.FilterFuncDuplicates)
|
||||
|
||||
// pretty-print
|
||||
var buf bytes.Buffer
|
||||
if err := format.Node(&buf, fset, merged); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
output := buf.String()
|
||||
|
||||
if output != golden {
|
||||
t.Errorf("incorrect output:\n%s", output)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,230 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
"sort"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// SortImports sorts runs of consecutive import lines in import blocks in f.
|
||||
// It also removes duplicate imports when it is possible to do so without data loss.
|
||||
func SortImports(fset *token.FileSet, f *File) {
|
||||
for _, d := range f.Decls {
|
||||
d, ok := d.(*GenDecl)
|
||||
if !ok || d.Tok != token.IMPORT {
|
||||
// Not an import declaration, so we're done.
|
||||
// Imports are always first.
|
||||
break
|
||||
}
|
||||
|
||||
if !d.Lparen.IsValid() {
|
||||
// Not a block: sorted by default.
|
||||
continue
|
||||
}
|
||||
|
||||
// Identify and sort runs of specs on successive lines.
|
||||
i := 0
|
||||
specs := d.Specs[:0]
|
||||
for j, s := range d.Specs {
|
||||
if j > i && lineAt(fset, s.Pos()) > 1+lineAt(fset, d.Specs[j-1].End()) {
|
||||
// j begins a new run. End this one.
|
||||
specs = append(specs, sortSpecs(fset, f, d.Specs[i:j])...)
|
||||
i = j
|
||||
}
|
||||
}
|
||||
specs = append(specs, sortSpecs(fset, f, d.Specs[i:])...)
|
||||
d.Specs = specs
|
||||
|
||||
// Deduping can leave a blank line before the rparen; clean that up.
|
||||
if len(d.Specs) > 0 {
|
||||
lastSpec := d.Specs[len(d.Specs)-1]
|
||||
lastLine := lineAt(fset, lastSpec.Pos())
|
||||
rParenLine := lineAt(fset, d.Rparen)
|
||||
for rParenLine > lastLine+1 {
|
||||
rParenLine--
|
||||
fset.File(d.Rparen).MergeLine(rParenLine)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func lineAt(fset *token.FileSet, pos token.Pos) int {
|
||||
return fset.PositionFor(pos, false).Line
|
||||
}
|
||||
|
||||
func importPath(s Spec) string {
|
||||
t, err := strconv.Unquote(s.(*ImportSpec).Path.Value)
|
||||
if err == nil {
|
||||
return t
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func importName(s Spec) string {
|
||||
n := s.(*ImportSpec).Name
|
||||
if n == nil {
|
||||
return ""
|
||||
}
|
||||
return n.Name
|
||||
}
|
||||
|
||||
func importComment(s Spec) string {
|
||||
c := s.(*ImportSpec).Comment
|
||||
if c == nil {
|
||||
return ""
|
||||
}
|
||||
return c.Text()
|
||||
}
|
||||
|
||||
// collapse indicates whether prev may be removed, leaving only next.
|
||||
func collapse(prev, next Spec) bool {
|
||||
if importPath(next) != importPath(prev) || importName(next) != importName(prev) {
|
||||
return false
|
||||
}
|
||||
return prev.(*ImportSpec).Comment == nil
|
||||
}
|
||||
|
||||
type posSpan struct {
|
||||
Start token.Pos
|
||||
End token.Pos
|
||||
}
|
||||
|
||||
type cgPos struct {
|
||||
left bool // true if comment is to the left of the spec, false otherwise.
|
||||
cg *CommentGroup
|
||||
}
|
||||
|
||||
func sortSpecs(fset *token.FileSet, f *File, specs []Spec) []Spec {
|
||||
// Can't short-circuit here even if specs are already sorted,
|
||||
// since they might yet need deduplication.
|
||||
// A lone import, however, may be safely ignored.
|
||||
if len(specs) <= 1 {
|
||||
return specs
|
||||
}
|
||||
|
||||
// Record positions for specs.
|
||||
pos := make([]posSpan, len(specs))
|
||||
for i, s := range specs {
|
||||
pos[i] = posSpan{s.Pos(), s.End()}
|
||||
}
|
||||
|
||||
// Identify comments in this range.
|
||||
begSpecs := pos[0].Start
|
||||
endSpecs := pos[len(pos)-1].End
|
||||
beg := fset.File(begSpecs).LineStart(lineAt(fset, begSpecs))
|
||||
endLine := lineAt(fset, endSpecs)
|
||||
endFile := fset.File(endSpecs)
|
||||
var end token.Pos
|
||||
if endLine == endFile.LineCount() {
|
||||
end = endSpecs
|
||||
} else {
|
||||
end = endFile.LineStart(endLine + 1) // beginning of next line
|
||||
}
|
||||
first := len(f.Comments)
|
||||
last := -1
|
||||
for i, g := range f.Comments {
|
||||
if g.End() >= end {
|
||||
break
|
||||
}
|
||||
// g.End() < end
|
||||
if beg <= g.Pos() {
|
||||
// comment is within the range [beg, end[ of import declarations
|
||||
if i < first {
|
||||
first = i
|
||||
}
|
||||
if i > last {
|
||||
last = i
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var comments []*CommentGroup
|
||||
if last >= 0 {
|
||||
comments = f.Comments[first : last+1]
|
||||
}
|
||||
|
||||
// Assign each comment to the import spec on the same line.
|
||||
importComments := map[*ImportSpec][]cgPos{}
|
||||
specIndex := 0
|
||||
for _, g := range comments {
|
||||
for specIndex+1 < len(specs) && pos[specIndex+1].Start <= g.Pos() {
|
||||
specIndex++
|
||||
}
|
||||
var left bool
|
||||
// A block comment can appear before the first import spec.
|
||||
if specIndex == 0 && pos[specIndex].Start > g.Pos() {
|
||||
left = true
|
||||
} else if specIndex+1 < len(specs) && // Or it can appear on the left of an import spec.
|
||||
lineAt(fset, pos[specIndex].Start)+1 == lineAt(fset, g.Pos()) {
|
||||
specIndex++
|
||||
left = true
|
||||
}
|
||||
s := specs[specIndex].(*ImportSpec)
|
||||
importComments[s] = append(importComments[s], cgPos{left: left, cg: g})
|
||||
}
|
||||
|
||||
// Sort the import specs by import path.
|
||||
// Remove duplicates, when possible without data loss.
|
||||
// Reassign the import paths to have the same position sequence.
|
||||
// Reassign each comment to the spec on the same line.
|
||||
// Sort the comments by new position.
|
||||
sort.Slice(specs, func(i, j int) bool {
|
||||
ipath := importPath(specs[i])
|
||||
jpath := importPath(specs[j])
|
||||
if ipath != jpath {
|
||||
return ipath < jpath
|
||||
}
|
||||
iname := importName(specs[i])
|
||||
jname := importName(specs[j])
|
||||
if iname != jname {
|
||||
return iname < jname
|
||||
}
|
||||
return importComment(specs[i]) < importComment(specs[j])
|
||||
})
|
||||
|
||||
// Dedup. Thanks to our sorting, we can just consider
|
||||
// adjacent pairs of imports.
|
||||
deduped := specs[:0]
|
||||
for i, s := range specs {
|
||||
if i == len(specs)-1 || !collapse(s, specs[i+1]) {
|
||||
deduped = append(deduped, s)
|
||||
} else {
|
||||
p := s.Pos()
|
||||
fset.File(p).MergeLine(lineAt(fset, p))
|
||||
}
|
||||
}
|
||||
specs = deduped
|
||||
|
||||
// Fix up comment positions
|
||||
for i, s := range specs {
|
||||
s := s.(*ImportSpec)
|
||||
if s.Name != nil {
|
||||
s.Name.NamePos = pos[i].Start
|
||||
}
|
||||
s.Path.ValuePos = pos[i].Start
|
||||
s.EndPos = pos[i].End
|
||||
for _, g := range importComments[s] {
|
||||
for _, c := range g.cg.List {
|
||||
if g.left {
|
||||
c.Slash = pos[i].Start - 1
|
||||
} else {
|
||||
// An import spec can have both block comment and a line comment
|
||||
// to its right. In that case, both of them will have the same pos.
|
||||
// But while formatting the AST, the line comment gets moved to
|
||||
// after the block comment.
|
||||
c.Slash = pos[i].End
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(comments, func(i, j int) bool {
|
||||
return comments[i].Pos() < comments[j].Pos()
|
||||
})
|
||||
|
||||
return specs
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast_test
|
||||
|
||||
import (
|
||||
"golang.org/x/website/internal/backport/go/ast"
|
||||
"golang.org/x/website/internal/backport/go/parser"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestIssue33649(t *testing.T) {
|
||||
for _, src := range []string{
|
||||
`package p; func _()`,
|
||||
`package p; func _() {`,
|
||||
`package p; func _() { _ = 0`,
|
||||
`package p; func _() { _ = 0 }`,
|
||||
} {
|
||||
fset := token.NewFileSet()
|
||||
f, _ := parser.ParseFile(fset, "", src, parser.AllErrors)
|
||||
if f == nil {
|
||||
panic("invalid test setup: parser didn't return an AST")
|
||||
}
|
||||
|
||||
// find corresponding token.File
|
||||
var tf *token.File
|
||||
fset.Iterate(func(f *token.File) bool {
|
||||
tf = f
|
||||
return true
|
||||
})
|
||||
tfEnd := tf.Base() + tf.Size()
|
||||
|
||||
fd := f.Decls[len(f.Decls)-1].(*ast.FuncDecl)
|
||||
fdEnd := int(fd.End())
|
||||
|
||||
if fdEnd != tfEnd {
|
||||
t.Errorf("%q: got fdEnd = %d; want %d (base = %d, size = %d)", src, fdEnd, tfEnd, tf.Base(), tf.Size())
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,254 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains printing support for ASTs.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
"io"
|
||||
"os"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// A FieldFilter may be provided to Fprint to control the output.
|
||||
type FieldFilter func(name string, value reflect.Value) bool
|
||||
|
||||
// NotNilFilter returns true for field values that are not nil;
|
||||
// it returns false otherwise.
|
||||
func NotNilFilter(_ string, v reflect.Value) bool {
|
||||
switch v.Kind() {
|
||||
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
|
||||
return !v.IsNil()
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Fprint prints the (sub-)tree starting at AST node x to w.
|
||||
// If fset != nil, position information is interpreted relative
|
||||
// to that file set. Otherwise positions are printed as integer
|
||||
// values (file set specific offsets).
|
||||
//
|
||||
// A non-nil FieldFilter f may be provided to control the output:
|
||||
// struct fields for which f(fieldname, fieldvalue) is true are
|
||||
// printed; all others are filtered from the output. Unexported
|
||||
// struct fields are never printed.
|
||||
func Fprint(w io.Writer, fset *token.FileSet, x interface{}, f FieldFilter) error {
|
||||
return fprint(w, fset, x, f)
|
||||
}
|
||||
|
||||
func fprint(w io.Writer, fset *token.FileSet, x interface{}, f FieldFilter) (err error) {
|
||||
// setup printer
|
||||
p := printer{
|
||||
output: w,
|
||||
fset: fset,
|
||||
filter: f,
|
||||
ptrmap: make(map[interface{}]int),
|
||||
last: '\n', // force printing of line number on first line
|
||||
}
|
||||
|
||||
// install error handler
|
||||
defer func() {
|
||||
if e := recover(); e != nil {
|
||||
err = e.(localError).err // re-panics if it's not a localError
|
||||
}
|
||||
}()
|
||||
|
||||
// print x
|
||||
if x == nil {
|
||||
p.printf("nil\n")
|
||||
return
|
||||
}
|
||||
p.print(reflect.ValueOf(x))
|
||||
p.printf("\n")
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Print prints x to standard output, skipping nil fields.
|
||||
// Print(fset, x) is the same as Fprint(os.Stdout, fset, x, NotNilFilter).
|
||||
func Print(fset *token.FileSet, x interface{}) error {
|
||||
return Fprint(os.Stdout, fset, x, NotNilFilter)
|
||||
}
|
||||
|
||||
type printer struct {
|
||||
output io.Writer
|
||||
fset *token.FileSet
|
||||
filter FieldFilter
|
||||
ptrmap map[interface{}]int // *T -> line number
|
||||
indent int // current indentation level
|
||||
last byte // the last byte processed by Write
|
||||
line int // current line number
|
||||
}
|
||||
|
||||
var indent = []byte(". ")
|
||||
|
||||
func (p *printer) Write(data []byte) (n int, err error) {
|
||||
var m int
|
||||
for i, b := range data {
|
||||
// invariant: data[0:n] has been written
|
||||
if b == '\n' {
|
||||
m, err = p.output.Write(data[n : i+1])
|
||||
n += m
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
p.line++
|
||||
} else if p.last == '\n' {
|
||||
_, err = fmt.Fprintf(p.output, "%6d ", p.line)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
for j := p.indent; j > 0; j-- {
|
||||
_, err = p.output.Write(indent)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
p.last = b
|
||||
}
|
||||
if len(data) > n {
|
||||
m, err = p.output.Write(data[n:])
|
||||
n += m
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// localError wraps locally caught errors so we can distinguish
|
||||
// them from genuine panics which we don't want to return as errors.
|
||||
type localError struct {
|
||||
err error
|
||||
}
|
||||
|
||||
// printf is a convenience wrapper that takes care of print errors.
|
||||
func (p *printer) printf(format string, args ...interface{}) {
|
||||
if _, err := fmt.Fprintf(p, format, args...); err != nil {
|
||||
panic(localError{err})
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation note: Print is written for AST nodes but could be
|
||||
// used to print arbitrary data structures; such a version should
|
||||
// probably be in a different package.
|
||||
//
|
||||
// Note: This code detects (some) cycles created via pointers but
|
||||
// not cycles that are created via slices or maps containing the
|
||||
// same slice or map. Code for general data structures probably
|
||||
// should catch those as well.
|
||||
|
||||
func (p *printer) print(x reflect.Value) {
|
||||
if !NotNilFilter("", x) {
|
||||
p.printf("nil")
|
||||
return
|
||||
}
|
||||
|
||||
switch x.Kind() {
|
||||
case reflect.Interface:
|
||||
p.print(x.Elem())
|
||||
|
||||
case reflect.Map:
|
||||
p.printf("%s (len = %d) {", x.Type(), x.Len())
|
||||
if x.Len() > 0 {
|
||||
p.indent++
|
||||
p.printf("\n")
|
||||
for _, key := range x.MapKeys() {
|
||||
p.print(key)
|
||||
p.printf(": ")
|
||||
p.print(x.MapIndex(key))
|
||||
p.printf("\n")
|
||||
}
|
||||
p.indent--
|
||||
}
|
||||
p.printf("}")
|
||||
|
||||
case reflect.Ptr:
|
||||
p.printf("*")
|
||||
// type-checked ASTs may contain cycles - use ptrmap
|
||||
// to keep track of objects that have been printed
|
||||
// already and print the respective line number instead
|
||||
ptr := x.Interface()
|
||||
if line, exists := p.ptrmap[ptr]; exists {
|
||||
p.printf("(obj @ %d)", line)
|
||||
} else {
|
||||
p.ptrmap[ptr] = p.line
|
||||
p.print(x.Elem())
|
||||
}
|
||||
|
||||
case reflect.Array:
|
||||
p.printf("%s {", x.Type())
|
||||
if x.Len() > 0 {
|
||||
p.indent++
|
||||
p.printf("\n")
|
||||
for i, n := 0, x.Len(); i < n; i++ {
|
||||
p.printf("%d: ", i)
|
||||
p.print(x.Index(i))
|
||||
p.printf("\n")
|
||||
}
|
||||
p.indent--
|
||||
}
|
||||
p.printf("}")
|
||||
|
||||
case reflect.Slice:
|
||||
if s, ok := x.Interface().([]byte); ok {
|
||||
p.printf("%#q", s)
|
||||
return
|
||||
}
|
||||
p.printf("%s (len = %d) {", x.Type(), x.Len())
|
||||
if x.Len() > 0 {
|
||||
p.indent++
|
||||
p.printf("\n")
|
||||
for i, n := 0, x.Len(); i < n; i++ {
|
||||
p.printf("%d: ", i)
|
||||
p.print(x.Index(i))
|
||||
p.printf("\n")
|
||||
}
|
||||
p.indent--
|
||||
}
|
||||
p.printf("}")
|
||||
|
||||
case reflect.Struct:
|
||||
t := x.Type()
|
||||
p.printf("%s {", t)
|
||||
p.indent++
|
||||
first := true
|
||||
for i, n := 0, t.NumField(); i < n; i++ {
|
||||
// exclude non-exported fields because their
|
||||
// values cannot be accessed via reflection
|
||||
if name := t.Field(i).Name; IsExported(name) {
|
||||
value := x.Field(i)
|
||||
if p.filter == nil || p.filter(name, value) {
|
||||
if first {
|
||||
p.printf("\n")
|
||||
first = false
|
||||
}
|
||||
p.printf("%s: ", name)
|
||||
p.print(value)
|
||||
p.printf("\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
p.indent--
|
||||
p.printf("}")
|
||||
|
||||
default:
|
||||
v := x.Interface()
|
||||
switch v := v.(type) {
|
||||
case string:
|
||||
// print strings in quotes
|
||||
p.printf("%q", v)
|
||||
return
|
||||
case token.Pos:
|
||||
// position values can be printed nicely if we have a file set
|
||||
if p.fset != nil {
|
||||
p.printf("%s", p.fset.Position(v))
|
||||
return
|
||||
}
|
||||
}
|
||||
// default
|
||||
p.printf("%v", v)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,97 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var tests = []struct {
|
||||
x interface{} // x is printed as s
|
||||
s string
|
||||
}{
|
||||
// basic types
|
||||
{nil, "0 nil"},
|
||||
{true, "0 true"},
|
||||
{42, "0 42"},
|
||||
{3.14, "0 3.14"},
|
||||
{1 + 2.718i, "0 (1+2.718i)"},
|
||||
{"foobar", "0 \"foobar\""},
|
||||
|
||||
// maps
|
||||
{map[Expr]string{}, `0 map[ast.Expr]string (len = 0) {}`},
|
||||
{map[string]int{"a": 1},
|
||||
`0 map[string]int (len = 1) {
|
||||
1 . "a": 1
|
||||
2 }`},
|
||||
|
||||
// pointers
|
||||
{new(int), "0 *0"},
|
||||
|
||||
// arrays
|
||||
{[0]int{}, `0 [0]int {}`},
|
||||
{[3]int{1, 2, 3},
|
||||
`0 [3]int {
|
||||
1 . 0: 1
|
||||
2 . 1: 2
|
||||
3 . 2: 3
|
||||
4 }`},
|
||||
{[...]int{42},
|
||||
`0 [1]int {
|
||||
1 . 0: 42
|
||||
2 }`},
|
||||
|
||||
// slices
|
||||
{[]int{}, `0 []int (len = 0) {}`},
|
||||
{[]int{1, 2, 3},
|
||||
`0 []int (len = 3) {
|
||||
1 . 0: 1
|
||||
2 . 1: 2
|
||||
3 . 2: 3
|
||||
4 }`},
|
||||
|
||||
// structs
|
||||
{struct{}{}, `0 struct {} {}`},
|
||||
{struct{ x int }{007}, `0 struct { x int } {}`},
|
||||
{struct{ X, y int }{42, 991},
|
||||
`0 struct { X int; y int } {
|
||||
1 . X: 42
|
||||
2 }`},
|
||||
{struct{ X, Y int }{42, 991},
|
||||
`0 struct { X int; Y int } {
|
||||
1 . X: 42
|
||||
2 . Y: 991
|
||||
3 }`},
|
||||
}
|
||||
|
||||
// Split s into lines, trim whitespace from all lines, and return
|
||||
// the concatenated non-empty lines.
|
||||
func trim(s string) string {
|
||||
lines := strings.Split(s, "\n")
|
||||
i := 0
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if line != "" {
|
||||
lines[i] = line
|
||||
i++
|
||||
}
|
||||
}
|
||||
return strings.Join(lines[0:i], "\n")
|
||||
}
|
||||
|
||||
func TestPrint(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
for _, test := range tests {
|
||||
buf.Reset()
|
||||
if err := Fprint(&buf, nil, test.x, nil); err != nil {
|
||||
t.Errorf("Fprint failed: %s", err)
|
||||
}
|
||||
if s, ts := trim(buf.String()), trim(test.s); s != ts {
|
||||
t.Errorf("got:\n%s\nexpected:\n%s\n", s, ts)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,173 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file implements NewPackage.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"golang.org/x/website/internal/backport/go/scanner"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type pkgBuilder struct {
|
||||
fset *token.FileSet
|
||||
errors scanner.ErrorList
|
||||
}
|
||||
|
||||
func (p *pkgBuilder) error(pos token.Pos, msg string) {
|
||||
p.errors.Add(p.fset.Position(pos), msg)
|
||||
}
|
||||
|
||||
func (p *pkgBuilder) errorf(pos token.Pos, format string, args ...interface{}) {
|
||||
p.error(pos, fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
func (p *pkgBuilder) declare(scope, altScope *Scope, obj *Object) {
|
||||
alt := scope.Insert(obj)
|
||||
if alt == nil && altScope != nil {
|
||||
// see if there is a conflicting declaration in altScope
|
||||
alt = altScope.Lookup(obj.Name)
|
||||
}
|
||||
if alt != nil {
|
||||
prevDecl := ""
|
||||
if pos := alt.Pos(); pos.IsValid() {
|
||||
prevDecl = fmt.Sprintf("\n\tprevious declaration at %s", p.fset.Position(pos))
|
||||
}
|
||||
p.error(obj.Pos(), fmt.Sprintf("%s redeclared in this block%s", obj.Name, prevDecl))
|
||||
}
|
||||
}
|
||||
|
||||
func resolve(scope *Scope, ident *Ident) bool {
|
||||
for ; scope != nil; scope = scope.Outer {
|
||||
if obj := scope.Lookup(ident.Name); obj != nil {
|
||||
ident.Obj = obj
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// An Importer resolves import paths to package Objects.
|
||||
// The imports map records the packages already imported,
|
||||
// indexed by package id (canonical import path).
|
||||
// An Importer must determine the canonical import path and
|
||||
// check the map to see if it is already present in the imports map.
|
||||
// If so, the Importer can return the map entry. Otherwise, the
|
||||
// Importer should load the package data for the given path into
|
||||
// a new *Object (pkg), record pkg in the imports map, and then
|
||||
// return pkg.
|
||||
type Importer func(imports map[string]*Object, path string) (pkg *Object, err error)
|
||||
|
||||
// NewPackage creates a new Package node from a set of File nodes. It resolves
|
||||
// unresolved identifiers across files and updates each file's Unresolved list
|
||||
// accordingly. If a non-nil importer and universe scope are provided, they are
|
||||
// used to resolve identifiers not declared in any of the package files. Any
|
||||
// remaining unresolved identifiers are reported as undeclared. If the files
|
||||
// belong to different packages, one package name is selected and files with
|
||||
// different package names are reported and then ignored.
|
||||
// The result is a package node and a scanner.ErrorList if there were errors.
|
||||
func NewPackage(fset *token.FileSet, files map[string]*File, importer Importer, universe *Scope) (*Package, error) {
|
||||
var p pkgBuilder
|
||||
p.fset = fset
|
||||
|
||||
// complete package scope
|
||||
pkgName := ""
|
||||
pkgScope := NewScope(universe)
|
||||
for _, file := range files {
|
||||
// package names must match
|
||||
switch name := file.Name.Name; {
|
||||
case pkgName == "":
|
||||
pkgName = name
|
||||
case name != pkgName:
|
||||
p.errorf(file.Package, "package %s; expected %s", name, pkgName)
|
||||
continue // ignore this file
|
||||
}
|
||||
|
||||
// collect top-level file objects in package scope
|
||||
for _, obj := range file.Scope.Objects {
|
||||
p.declare(pkgScope, nil, obj)
|
||||
}
|
||||
}
|
||||
|
||||
// package global mapping of imported package ids to package objects
|
||||
imports := make(map[string]*Object)
|
||||
|
||||
// complete file scopes with imports and resolve identifiers
|
||||
for _, file := range files {
|
||||
// ignore file if it belongs to a different package
|
||||
// (error has already been reported)
|
||||
if file.Name.Name != pkgName {
|
||||
continue
|
||||
}
|
||||
|
||||
// build file scope by processing all imports
|
||||
importErrors := false
|
||||
fileScope := NewScope(pkgScope)
|
||||
for _, spec := range file.Imports {
|
||||
if importer == nil {
|
||||
importErrors = true
|
||||
continue
|
||||
}
|
||||
path, _ := strconv.Unquote(spec.Path.Value)
|
||||
pkg, err := importer(imports, path)
|
||||
if err != nil {
|
||||
p.errorf(spec.Path.Pos(), "could not import %s (%s)", path, err)
|
||||
importErrors = true
|
||||
continue
|
||||
}
|
||||
// TODO(gri) If a local package name != "." is provided,
|
||||
// global identifier resolution could proceed even if the
|
||||
// import failed. Consider adjusting the logic here a bit.
|
||||
|
||||
// local name overrides imported package name
|
||||
name := pkg.Name
|
||||
if spec.Name != nil {
|
||||
name = spec.Name.Name
|
||||
}
|
||||
|
||||
// add import to file scope
|
||||
if name == "." {
|
||||
// merge imported scope with file scope
|
||||
for _, obj := range pkg.Data.(*Scope).Objects {
|
||||
p.declare(fileScope, pkgScope, obj)
|
||||
}
|
||||
} else if name != "_" {
|
||||
// declare imported package object in file scope
|
||||
// (do not re-use pkg in the file scope but create
|
||||
// a new object instead; the Decl field is different
|
||||
// for different files)
|
||||
obj := NewObj(Pkg, name)
|
||||
obj.Decl = spec
|
||||
obj.Data = pkg.Data
|
||||
p.declare(fileScope, pkgScope, obj)
|
||||
}
|
||||
}
|
||||
|
||||
// resolve identifiers
|
||||
if importErrors {
|
||||
// don't use the universe scope without correct imports
|
||||
// (objects in the universe may be shadowed by imports;
|
||||
// with missing imports, identifiers might get resolved
|
||||
// incorrectly to universe objects)
|
||||
pkgScope.Outer = nil
|
||||
}
|
||||
i := 0
|
||||
for _, ident := range file.Unresolved {
|
||||
if !resolve(fileScope, ident) {
|
||||
p.errorf(ident.Pos(), "undeclared name: %s", ident.Name)
|
||||
file.Unresolved[i] = ident
|
||||
i++
|
||||
}
|
||||
|
||||
}
|
||||
file.Unresolved = file.Unresolved[0:i]
|
||||
pkgScope.Outer = universe // reset universe scope
|
||||
}
|
||||
|
||||
p.errors.Sort()
|
||||
return &Package{pkgName, pkgScope, imports, files}, p.errors.Err()
|
||||
}
|
|
@ -0,0 +1,156 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file implements scopes and the objects they contain.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
)
|
||||
|
||||
// A Scope maintains the set of named language entities declared
|
||||
// in the scope and a link to the immediately surrounding (outer)
|
||||
// scope.
|
||||
type Scope struct {
|
||||
Outer *Scope
|
||||
Objects map[string]*Object
|
||||
}
|
||||
|
||||
// NewScope creates a new scope nested in the outer scope.
|
||||
func NewScope(outer *Scope) *Scope {
|
||||
const n = 4 // initial scope capacity
|
||||
return &Scope{outer, make(map[string]*Object, n)}
|
||||
}
|
||||
|
||||
// Lookup returns the object with the given name if it is
|
||||
// found in scope s, otherwise it returns nil. Outer scopes
|
||||
// are ignored.
|
||||
func (s *Scope) Lookup(name string) *Object {
|
||||
return s.Objects[name]
|
||||
}
|
||||
|
||||
// Insert attempts to insert a named object obj into the scope s.
|
||||
// If the scope already contains an object alt with the same name,
|
||||
// Insert leaves the scope unchanged and returns alt. Otherwise
|
||||
// it inserts obj and returns nil.
|
||||
func (s *Scope) Insert(obj *Object) (alt *Object) {
|
||||
if alt = s.Objects[obj.Name]; alt == nil {
|
||||
s.Objects[obj.Name] = obj
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Debugging support
|
||||
func (s *Scope) String() string {
|
||||
var buf bytes.Buffer
|
||||
fmt.Fprintf(&buf, "scope %p {", s)
|
||||
if s != nil && len(s.Objects) > 0 {
|
||||
fmt.Fprintln(&buf)
|
||||
for _, obj := range s.Objects {
|
||||
fmt.Fprintf(&buf, "\t%s %s\n", obj.Kind, obj.Name)
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(&buf, "}\n")
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Objects
|
||||
|
||||
// An Object describes a named language entity such as a package,
|
||||
// constant, type, variable, function (incl. methods), or label.
|
||||
//
|
||||
// The Data fields contains object-specific data:
|
||||
//
|
||||
// Kind Data type Data value
|
||||
// Pkg *Scope package scope
|
||||
// Con int iota for the respective declaration
|
||||
type Object struct {
|
||||
Kind ObjKind
|
||||
Name string // declared name
|
||||
Decl interface{} // corresponding Field, XxxSpec, FuncDecl, LabeledStmt, AssignStmt, Scope; or nil
|
||||
Data interface{} // object-specific data; or nil
|
||||
Type interface{} // placeholder for type information; may be nil
|
||||
}
|
||||
|
||||
// NewObj creates a new object of a given kind and name.
|
||||
func NewObj(kind ObjKind, name string) *Object {
|
||||
return &Object{Kind: kind, Name: name}
|
||||
}
|
||||
|
||||
// Pos computes the source position of the declaration of an object name.
|
||||
// The result may be an invalid position if it cannot be computed
|
||||
// (obj.Decl may be nil or not correct).
|
||||
func (obj *Object) Pos() token.Pos {
|
||||
name := obj.Name
|
||||
switch d := obj.Decl.(type) {
|
||||
case *Field:
|
||||
for _, n := range d.Names {
|
||||
if n.Name == name {
|
||||
return n.Pos()
|
||||
}
|
||||
}
|
||||
case *ImportSpec:
|
||||
if d.Name != nil && d.Name.Name == name {
|
||||
return d.Name.Pos()
|
||||
}
|
||||
return d.Path.Pos()
|
||||
case *ValueSpec:
|
||||
for _, n := range d.Names {
|
||||
if n.Name == name {
|
||||
return n.Pos()
|
||||
}
|
||||
}
|
||||
case *TypeSpec:
|
||||
if d.Name.Name == name {
|
||||
return d.Name.Pos()
|
||||
}
|
||||
case *FuncDecl:
|
||||
if d.Name.Name == name {
|
||||
return d.Name.Pos()
|
||||
}
|
||||
case *LabeledStmt:
|
||||
if d.Label.Name == name {
|
||||
return d.Label.Pos()
|
||||
}
|
||||
case *AssignStmt:
|
||||
for _, x := range d.Lhs {
|
||||
if ident, isIdent := x.(*Ident); isIdent && ident.Name == name {
|
||||
return ident.Pos()
|
||||
}
|
||||
}
|
||||
case *Scope:
|
||||
// predeclared object - nothing to do for now
|
||||
}
|
||||
return token.NoPos
|
||||
}
|
||||
|
||||
// ObjKind describes what an object represents.
|
||||
type ObjKind int
|
||||
|
||||
// The list of possible Object kinds.
|
||||
const (
|
||||
Bad ObjKind = iota // for error handling
|
||||
Pkg // package
|
||||
Con // constant
|
||||
Typ // type
|
||||
Var // variable
|
||||
Fun // function or method
|
||||
Lbl // label
|
||||
)
|
||||
|
||||
var objKindStrings = [...]string{
|
||||
Bad: "bad",
|
||||
Pkg: "package",
|
||||
Con: "const",
|
||||
Typ: "type",
|
||||
Var: "var",
|
||||
Fun: "func",
|
||||
Lbl: "label",
|
||||
}
|
||||
|
||||
func (kind ObjKind) String() string { return objKindStrings[kind] }
|
|
@ -0,0 +1,398 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import "fmt"
|
||||
|
||||
// A Visitor's Visit method is invoked for each node encountered by Walk.
|
||||
// If the result visitor w is not nil, Walk visits each of the children
|
||||
// of node with the visitor w, followed by a call of w.Visit(nil).
|
||||
type Visitor interface {
|
||||
Visit(node Node) (w Visitor)
|
||||
}
|
||||
|
||||
// Helper functions for common node lists. They may be empty.
|
||||
|
||||
func walkIdentList(v Visitor, list []*Ident) {
|
||||
for _, x := range list {
|
||||
Walk(v, x)
|
||||
}
|
||||
}
|
||||
|
||||
func walkExprList(v Visitor, list []Expr) {
|
||||
for _, x := range list {
|
||||
Walk(v, x)
|
||||
}
|
||||
}
|
||||
|
||||
func walkStmtList(v Visitor, list []Stmt) {
|
||||
for _, x := range list {
|
||||
Walk(v, x)
|
||||
}
|
||||
}
|
||||
|
||||
func walkDeclList(v Visitor, list []Decl) {
|
||||
for _, x := range list {
|
||||
Walk(v, x)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(gri): Investigate if providing a closure to Walk leads to
|
||||
// simpler use (and may help eliminate Inspect in turn).
|
||||
|
||||
// Walk traverses an AST in depth-first order: It starts by calling
|
||||
// v.Visit(node); node must not be nil. If the visitor w returned by
|
||||
// v.Visit(node) is not nil, Walk is invoked recursively with visitor
|
||||
// w for each of the non-nil children of node, followed by a call of
|
||||
// w.Visit(nil).
|
||||
func Walk(v Visitor, node Node) {
|
||||
if v = v.Visit(node); v == nil {
|
||||
return
|
||||
}
|
||||
|
||||
// walk children
|
||||
// (the order of the cases matches the order
|
||||
// of the corresponding node types in ast.go)
|
||||
switch n := node.(type) {
|
||||
// Comments and fields
|
||||
case *Comment:
|
||||
// nothing to do
|
||||
|
||||
case *CommentGroup:
|
||||
for _, c := range n.List {
|
||||
Walk(v, c)
|
||||
}
|
||||
|
||||
case *Field:
|
||||
if n.Doc != nil {
|
||||
Walk(v, n.Doc)
|
||||
}
|
||||
walkIdentList(v, n.Names)
|
||||
if n.Type != nil {
|
||||
Walk(v, n.Type)
|
||||
}
|
||||
if n.Tag != nil {
|
||||
Walk(v, n.Tag)
|
||||
}
|
||||
if n.Comment != nil {
|
||||
Walk(v, n.Comment)
|
||||
}
|
||||
|
||||
case *FieldList:
|
||||
for _, f := range n.List {
|
||||
Walk(v, f)
|
||||
}
|
||||
|
||||
// Expressions
|
||||
case *BadExpr, *Ident, *BasicLit:
|
||||
// nothing to do
|
||||
|
||||
case *Ellipsis:
|
||||
if n.Elt != nil {
|
||||
Walk(v, n.Elt)
|
||||
}
|
||||
|
||||
case *FuncLit:
|
||||
Walk(v, n.Type)
|
||||
Walk(v, n.Body)
|
||||
|
||||
case *CompositeLit:
|
||||
if n.Type != nil {
|
||||
Walk(v, n.Type)
|
||||
}
|
||||
walkExprList(v, n.Elts)
|
||||
|
||||
case *ParenExpr:
|
||||
Walk(v, n.X)
|
||||
|
||||
case *SelectorExpr:
|
||||
Walk(v, n.X)
|
||||
Walk(v, n.Sel)
|
||||
|
||||
case *IndexExpr:
|
||||
Walk(v, n.X)
|
||||
Walk(v, n.Index)
|
||||
|
||||
case *IndexListExpr:
|
||||
Walk(v, n.X)
|
||||
for _, index := range n.Indices {
|
||||
Walk(v, index)
|
||||
}
|
||||
|
||||
case *SliceExpr:
|
||||
Walk(v, n.X)
|
||||
if n.Low != nil {
|
||||
Walk(v, n.Low)
|
||||
}
|
||||
if n.High != nil {
|
||||
Walk(v, n.High)
|
||||
}
|
||||
if n.Max != nil {
|
||||
Walk(v, n.Max)
|
||||
}
|
||||
|
||||
case *TypeAssertExpr:
|
||||
Walk(v, n.X)
|
||||
if n.Type != nil {
|
||||
Walk(v, n.Type)
|
||||
}
|
||||
|
||||
case *CallExpr:
|
||||
Walk(v, n.Fun)
|
||||
walkExprList(v, n.Args)
|
||||
|
||||
case *StarExpr:
|
||||
Walk(v, n.X)
|
||||
|
||||
case *UnaryExpr:
|
||||
Walk(v, n.X)
|
||||
|
||||
case *BinaryExpr:
|
||||
Walk(v, n.X)
|
||||
Walk(v, n.Y)
|
||||
|
||||
case *KeyValueExpr:
|
||||
Walk(v, n.Key)
|
||||
Walk(v, n.Value)
|
||||
|
||||
// Types
|
||||
case *ArrayType:
|
||||
if n.Len != nil {
|
||||
Walk(v, n.Len)
|
||||
}
|
||||
Walk(v, n.Elt)
|
||||
|
||||
case *StructType:
|
||||
Walk(v, n.Fields)
|
||||
|
||||
case *FuncType:
|
||||
if n.TypeParams != nil {
|
||||
Walk(v, n.TypeParams)
|
||||
}
|
||||
if n.Params != nil {
|
||||
Walk(v, n.Params)
|
||||
}
|
||||
if n.Results != nil {
|
||||
Walk(v, n.Results)
|
||||
}
|
||||
|
||||
case *InterfaceType:
|
||||
Walk(v, n.Methods)
|
||||
|
||||
case *MapType:
|
||||
Walk(v, n.Key)
|
||||
Walk(v, n.Value)
|
||||
|
||||
case *ChanType:
|
||||
Walk(v, n.Value)
|
||||
|
||||
// Statements
|
||||
case *BadStmt:
|
||||
// nothing to do
|
||||
|
||||
case *DeclStmt:
|
||||
Walk(v, n.Decl)
|
||||
|
||||
case *EmptyStmt:
|
||||
// nothing to do
|
||||
|
||||
case *LabeledStmt:
|
||||
Walk(v, n.Label)
|
||||
Walk(v, n.Stmt)
|
||||
|
||||
case *ExprStmt:
|
||||
Walk(v, n.X)
|
||||
|
||||
case *SendStmt:
|
||||
Walk(v, n.Chan)
|
||||
Walk(v, n.Value)
|
||||
|
||||
case *IncDecStmt:
|
||||
Walk(v, n.X)
|
||||
|
||||
case *AssignStmt:
|
||||
walkExprList(v, n.Lhs)
|
||||
walkExprList(v, n.Rhs)
|
||||
|
||||
case *GoStmt:
|
||||
Walk(v, n.Call)
|
||||
|
||||
case *DeferStmt:
|
||||
Walk(v, n.Call)
|
||||
|
||||
case *ReturnStmt:
|
||||
walkExprList(v, n.Results)
|
||||
|
||||
case *BranchStmt:
|
||||
if n.Label != nil {
|
||||
Walk(v, n.Label)
|
||||
}
|
||||
|
||||
case *BlockStmt:
|
||||
walkStmtList(v, n.List)
|
||||
|
||||
case *IfStmt:
|
||||
if n.Init != nil {
|
||||
Walk(v, n.Init)
|
||||
}
|
||||
Walk(v, n.Cond)
|
||||
Walk(v, n.Body)
|
||||
if n.Else != nil {
|
||||
Walk(v, n.Else)
|
||||
}
|
||||
|
||||
case *CaseClause:
|
||||
walkExprList(v, n.List)
|
||||
walkStmtList(v, n.Body)
|
||||
|
||||
case *SwitchStmt:
|
||||
if n.Init != nil {
|
||||
Walk(v, n.Init)
|
||||
}
|
||||
if n.Tag != nil {
|
||||
Walk(v, n.Tag)
|
||||
}
|
||||
Walk(v, n.Body)
|
||||
|
||||
case *TypeSwitchStmt:
|
||||
if n.Init != nil {
|
||||
Walk(v, n.Init)
|
||||
}
|
||||
Walk(v, n.Assign)
|
||||
Walk(v, n.Body)
|
||||
|
||||
case *CommClause:
|
||||
if n.Comm != nil {
|
||||
Walk(v, n.Comm)
|
||||
}
|
||||
walkStmtList(v, n.Body)
|
||||
|
||||
case *SelectStmt:
|
||||
Walk(v, n.Body)
|
||||
|
||||
case *ForStmt:
|
||||
if n.Init != nil {
|
||||
Walk(v, n.Init)
|
||||
}
|
||||
if n.Cond != nil {
|
||||
Walk(v, n.Cond)
|
||||
}
|
||||
if n.Post != nil {
|
||||
Walk(v, n.Post)
|
||||
}
|
||||
Walk(v, n.Body)
|
||||
|
||||
case *RangeStmt:
|
||||
if n.Key != nil {
|
||||
Walk(v, n.Key)
|
||||
}
|
||||
if n.Value != nil {
|
||||
Walk(v, n.Value)
|
||||
}
|
||||
Walk(v, n.X)
|
||||
Walk(v, n.Body)
|
||||
|
||||
// Declarations
|
||||
case *ImportSpec:
|
||||
if n.Doc != nil {
|
||||
Walk(v, n.Doc)
|
||||
}
|
||||
if n.Name != nil {
|
||||
Walk(v, n.Name)
|
||||
}
|
||||
Walk(v, n.Path)
|
||||
if n.Comment != nil {
|
||||
Walk(v, n.Comment)
|
||||
}
|
||||
|
||||
case *ValueSpec:
|
||||
if n.Doc != nil {
|
||||
Walk(v, n.Doc)
|
||||
}
|
||||
walkIdentList(v, n.Names)
|
||||
if n.Type != nil {
|
||||
Walk(v, n.Type)
|
||||
}
|
||||
walkExprList(v, n.Values)
|
||||
if n.Comment != nil {
|
||||
Walk(v, n.Comment)
|
||||
}
|
||||
|
||||
case *TypeSpec:
|
||||
if n.Doc != nil {
|
||||
Walk(v, n.Doc)
|
||||
}
|
||||
Walk(v, n.Name)
|
||||
if n.TypeParams != nil {
|
||||
Walk(v, n.TypeParams)
|
||||
}
|
||||
Walk(v, n.Type)
|
||||
if n.Comment != nil {
|
||||
Walk(v, n.Comment)
|
||||
}
|
||||
|
||||
case *BadDecl:
|
||||
// nothing to do
|
||||
|
||||
case *GenDecl:
|
||||
if n.Doc != nil {
|
||||
Walk(v, n.Doc)
|
||||
}
|
||||
for _, s := range n.Specs {
|
||||
Walk(v, s)
|
||||
}
|
||||
|
||||
case *FuncDecl:
|
||||
if n.Doc != nil {
|
||||
Walk(v, n.Doc)
|
||||
}
|
||||
if n.Recv != nil {
|
||||
Walk(v, n.Recv)
|
||||
}
|
||||
Walk(v, n.Name)
|
||||
Walk(v, n.Type)
|
||||
if n.Body != nil {
|
||||
Walk(v, n.Body)
|
||||
}
|
||||
|
||||
// Files and packages
|
||||
case *File:
|
||||
if n.Doc != nil {
|
||||
Walk(v, n.Doc)
|
||||
}
|
||||
Walk(v, n.Name)
|
||||
walkDeclList(v, n.Decls)
|
||||
// don't walk n.Comments - they have been
|
||||
// visited already through the individual
|
||||
// nodes
|
||||
|
||||
case *Package:
|
||||
for _, f := range n.Files {
|
||||
Walk(v, f)
|
||||
}
|
||||
|
||||
default:
|
||||
panic(fmt.Sprintf("ast.Walk: unexpected node type %T", n))
|
||||
}
|
||||
|
||||
v.Visit(nil)
|
||||
}
|
||||
|
||||
type inspector func(Node) bool
|
||||
|
||||
func (f inspector) Visit(node Node) Visitor {
|
||||
if f(node) {
|
||||
return f
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Inspect traverses an AST in depth-first order: It starts by calling
|
||||
// f(node); node must not be nil. If f returns true, Inspect invokes f
|
||||
// recursively for each of the non-nil children of node, followed by a
|
||||
// call of f(nil).
|
||||
func Inspect(node Node, f func(Node) bool) {
|
||||
Walk(inspector(f), node)
|
||||
}
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,97 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package build gathers information about Go packages.
|
||||
//
|
||||
// # Go Path
|
||||
//
|
||||
// The Go path is a list of directory trees containing Go source code.
|
||||
// It is consulted to resolve imports that cannot be found in the standard
|
||||
// Go tree. The default path is the value of the GOPATH environment
|
||||
// variable, interpreted as a path list appropriate to the operating system
|
||||
// (on Unix, the variable is a colon-separated string;
|
||||
// on Windows, a semicolon-separated string;
|
||||
// on Plan 9, a list).
|
||||
//
|
||||
// Each directory listed in the Go path must have a prescribed structure:
|
||||
//
|
||||
// The src/ directory holds source code. The path below 'src' determines
|
||||
// the import path or executable name.
|
||||
//
|
||||
// The pkg/ directory holds installed package objects.
|
||||
// As in the Go tree, each target operating system and
|
||||
// architecture pair has its own subdirectory of pkg
|
||||
// (pkg/GOOS_GOARCH).
|
||||
//
|
||||
// If DIR is a directory listed in the Go path, a package with
|
||||
// source in DIR/src/foo/bar can be imported as "foo/bar" and
|
||||
// has its compiled form installed to "DIR/pkg/GOOS_GOARCH/foo/bar.a"
|
||||
// (or, for gccgo, "DIR/pkg/gccgo/foo/libbar.a").
|
||||
//
|
||||
// The bin/ directory holds compiled commands.
|
||||
// Each command is named for its source directory, but only
|
||||
// using the final element, not the entire path. That is, the
|
||||
// command with source in DIR/src/foo/quux is installed into
|
||||
// DIR/bin/quux, not DIR/bin/foo/quux. The foo/ is stripped
|
||||
// so that you can add DIR/bin to your PATH to get at the
|
||||
// installed commands.
|
||||
//
|
||||
// Here's an example directory layout:
|
||||
//
|
||||
// GOPATH=/home/user/gocode
|
||||
//
|
||||
// /home/user/gocode/
|
||||
// src/
|
||||
// foo/
|
||||
// bar/ (go code in package bar)
|
||||
// x.go
|
||||
// quux/ (go code in package main)
|
||||
// y.go
|
||||
// bin/
|
||||
// quux (installed command)
|
||||
// pkg/
|
||||
// linux_amd64/
|
||||
// foo/
|
||||
// bar.a (installed package object)
|
||||
//
|
||||
// # Build Constraints
|
||||
//
|
||||
// A build constraint, also known as a build tag, is a line comment that begins
|
||||
//
|
||||
// //go:build
|
||||
//
|
||||
// that lists the conditions under which a file should be included in the
|
||||
// package. Build constraints may also be part of a file's name
|
||||
// (for example, source_windows.go will only be included if the target
|
||||
// operating system is windows).
|
||||
//
|
||||
// See 'go help buildconstraint'
|
||||
// (https://golang.org/cmd/go/#hdr-Build_constraints) for details.
|
||||
//
|
||||
// # Binary-Only Packages
|
||||
//
|
||||
// In Go 1.12 and earlier, it was possible to distribute packages in binary
|
||||
// form without including the source code used for compiling the package.
|
||||
// The package was distributed with a source file not excluded by build
|
||||
// constraints and containing a "//go:binary-only-package" comment. Like a
|
||||
// build constraint, this comment appeared at the top of a file, preceded
|
||||
// only by blank lines and other line comments and with a blank line
|
||||
// following the comment, to separate it from the package documentation.
|
||||
// Unlike build constraints, this comment is only recognized in non-test
|
||||
// Go source files.
|
||||
//
|
||||
// The minimal source code for a binary-only package was therefore:
|
||||
//
|
||||
// //go:binary-only-package
|
||||
//
|
||||
// package mypkg
|
||||
//
|
||||
// The source code could include additional Go code. That code was never
|
||||
// compiled but would be processed by tools like godoc and might be useful
|
||||
// as end-user documentation.
|
||||
//
|
||||
// "go build" and other commands no longer support binary-only-packages.
|
||||
// Import and ImportDir will still set the BinaryOnly flag in packages
|
||||
// containing these comments for use in tools and error messages.
|
||||
package build
|
|
@ -0,0 +1,18 @@
|
|||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build gc
|
||||
// +build gc
|
||||
|
||||
package build
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
)
|
||||
|
||||
// getToolDir returns the default value of ToolDir.
|
||||
func getToolDir() string {
|
||||
return filepath.Join(runtime.GOROOT(), "pkg/tool/"+runtime.GOOS+"_"+runtime.GOARCH)
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build gccgo
|
||||
// +build gccgo
|
||||
|
||||
package build
|
||||
|
||||
import "runtime"
|
||||
|
||||
// getToolDir returns the default value of ToolDir.
|
||||
func getToolDir() string {
|
||||
return envOr("GCCGOTOOLDIR", runtime.GCCGOTOOLDIR)
|
||||
}
|
|
@ -0,0 +1,578 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package build
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"golang.org/x/website/internal/backport/go/ast"
|
||||
"golang.org/x/website/internal/backport/go/parser"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type importReader struct {
|
||||
b *bufio.Reader
|
||||
buf []byte
|
||||
peek byte
|
||||
err error
|
||||
eof bool
|
||||
nerr int
|
||||
pos token.Position
|
||||
}
|
||||
|
||||
var bom = []byte{0xef, 0xbb, 0xbf}
|
||||
|
||||
func newImportReader(name string, r io.Reader) *importReader {
|
||||
b := bufio.NewReader(r)
|
||||
// Remove leading UTF-8 BOM.
|
||||
// Per https://golang.org/ref/spec#Source_code_representation:
|
||||
// a compiler may ignore a UTF-8-encoded byte order mark (U+FEFF)
|
||||
// if it is the first Unicode code point in the source text.
|
||||
if leadingBytes, err := b.Peek(3); err == nil && bytes.Equal(leadingBytes, bom) {
|
||||
b.Discard(3)
|
||||
}
|
||||
return &importReader{
|
||||
b: b,
|
||||
pos: token.Position{
|
||||
Filename: name,
|
||||
Line: 1,
|
||||
Column: 1,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func isIdent(c byte) bool {
|
||||
return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '_' || c >= utf8.RuneSelf
|
||||
}
|
||||
|
||||
var (
|
||||
errSyntax = errors.New("syntax error")
|
||||
errNUL = errors.New("unexpected NUL in input")
|
||||
)
|
||||
|
||||
// syntaxError records a syntax error, but only if an I/O error has not already been recorded.
|
||||
func (r *importReader) syntaxError() {
|
||||
if r.err == nil {
|
||||
r.err = errSyntax
|
||||
}
|
||||
}
|
||||
|
||||
// readByte reads the next byte from the input, saves it in buf, and returns it.
|
||||
// If an error occurs, readByte records the error in r.err and returns 0.
|
||||
func (r *importReader) readByte() byte {
|
||||
c, err := r.b.ReadByte()
|
||||
if err == nil {
|
||||
r.buf = append(r.buf, c)
|
||||
if c == 0 {
|
||||
err = errNUL
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
r.eof = true
|
||||
} else if r.err == nil {
|
||||
r.err = err
|
||||
}
|
||||
c = 0
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// readByteNoBuf is like readByte but doesn't buffer the byte.
|
||||
// It exhausts r.buf before reading from r.b.
|
||||
func (r *importReader) readByteNoBuf() byte {
|
||||
var c byte
|
||||
var err error
|
||||
if len(r.buf) > 0 {
|
||||
c = r.buf[0]
|
||||
r.buf = r.buf[1:]
|
||||
} else {
|
||||
c, err = r.b.ReadByte()
|
||||
if err == nil && c == 0 {
|
||||
err = errNUL
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
r.eof = true
|
||||
} else if r.err == nil {
|
||||
r.err = err
|
||||
}
|
||||
return 0
|
||||
}
|
||||
r.pos.Offset++
|
||||
if c == '\n' {
|
||||
r.pos.Line++
|
||||
r.pos.Column = 1
|
||||
} else {
|
||||
r.pos.Column++
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// peekByte returns the next byte from the input reader but does not advance beyond it.
|
||||
// If skipSpace is set, peekByte skips leading spaces and comments.
|
||||
func (r *importReader) peekByte(skipSpace bool) byte {
|
||||
if r.err != nil {
|
||||
if r.nerr++; r.nerr > 10000 {
|
||||
panic("go/build: import reader looping")
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// Use r.peek as first input byte.
|
||||
// Don't just return r.peek here: it might have been left by peekByte(false)
|
||||
// and this might be peekByte(true).
|
||||
c := r.peek
|
||||
if c == 0 {
|
||||
c = r.readByte()
|
||||
}
|
||||
for r.err == nil && !r.eof {
|
||||
if skipSpace {
|
||||
// For the purposes of this reader, semicolons are never necessary to
|
||||
// understand the input and are treated as spaces.
|
||||
switch c {
|
||||
case ' ', '\f', '\t', '\r', '\n', ';':
|
||||
c = r.readByte()
|
||||
continue
|
||||
|
||||
case '/':
|
||||
c = r.readByte()
|
||||
if c == '/' {
|
||||
for c != '\n' && r.err == nil && !r.eof {
|
||||
c = r.readByte()
|
||||
}
|
||||
} else if c == '*' {
|
||||
var c1 byte
|
||||
for (c != '*' || c1 != '/') && r.err == nil {
|
||||
if r.eof {
|
||||
r.syntaxError()
|
||||
}
|
||||
c, c1 = c1, r.readByte()
|
||||
}
|
||||
} else {
|
||||
r.syntaxError()
|
||||
}
|
||||
c = r.readByte()
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
r.peek = c
|
||||
return r.peek
|
||||
}
|
||||
|
||||
// nextByte is like peekByte but advances beyond the returned byte.
|
||||
func (r *importReader) nextByte(skipSpace bool) byte {
|
||||
c := r.peekByte(skipSpace)
|
||||
r.peek = 0
|
||||
return c
|
||||
}
|
||||
|
||||
var goEmbed = []byte("go:embed")
|
||||
|
||||
// findEmbed advances the input reader to the next //go:embed comment.
|
||||
// It reports whether it found a comment.
|
||||
// (Otherwise it found an error or EOF.)
|
||||
func (r *importReader) findEmbed(first bool) bool {
|
||||
// The import block scan stopped after a non-space character,
|
||||
// so the reader is not at the start of a line on the first call.
|
||||
// After that, each //go:embed extraction leaves the reader
|
||||
// at the end of a line.
|
||||
startLine := !first
|
||||
var c byte
|
||||
for r.err == nil && !r.eof {
|
||||
c = r.readByteNoBuf()
|
||||
Reswitch:
|
||||
switch c {
|
||||
default:
|
||||
startLine = false
|
||||
|
||||
case '\n':
|
||||
startLine = true
|
||||
|
||||
case ' ', '\t':
|
||||
// leave startLine alone
|
||||
|
||||
case '"':
|
||||
startLine = false
|
||||
for r.err == nil {
|
||||
if r.eof {
|
||||
r.syntaxError()
|
||||
}
|
||||
c = r.readByteNoBuf()
|
||||
if c == '\\' {
|
||||
r.readByteNoBuf()
|
||||
if r.err != nil {
|
||||
r.syntaxError()
|
||||
return false
|
||||
}
|
||||
continue
|
||||
}
|
||||
if c == '"' {
|
||||
c = r.readByteNoBuf()
|
||||
goto Reswitch
|
||||
}
|
||||
}
|
||||
goto Reswitch
|
||||
|
||||
case '`':
|
||||
startLine = false
|
||||
for r.err == nil {
|
||||
if r.eof {
|
||||
r.syntaxError()
|
||||
}
|
||||
c = r.readByteNoBuf()
|
||||
if c == '`' {
|
||||
c = r.readByteNoBuf()
|
||||
goto Reswitch
|
||||
}
|
||||
}
|
||||
|
||||
case '\'':
|
||||
startLine = false
|
||||
for r.err == nil {
|
||||
if r.eof {
|
||||
r.syntaxError()
|
||||
}
|
||||
c = r.readByteNoBuf()
|
||||
if c == '\\' {
|
||||
r.readByteNoBuf()
|
||||
if r.err != nil {
|
||||
r.syntaxError()
|
||||
return false
|
||||
}
|
||||
continue
|
||||
}
|
||||
if c == '\'' {
|
||||
c = r.readByteNoBuf()
|
||||
goto Reswitch
|
||||
}
|
||||
}
|
||||
|
||||
case '/':
|
||||
c = r.readByteNoBuf()
|
||||
switch c {
|
||||
default:
|
||||
startLine = false
|
||||
goto Reswitch
|
||||
|
||||
case '*':
|
||||
var c1 byte
|
||||
for (c != '*' || c1 != '/') && r.err == nil {
|
||||
if r.eof {
|
||||
r.syntaxError()
|
||||
}
|
||||
c, c1 = c1, r.readByteNoBuf()
|
||||
}
|
||||
startLine = false
|
||||
|
||||
case '/':
|
||||
if startLine {
|
||||
// Try to read this as a //go:embed comment.
|
||||
for i := range goEmbed {
|
||||
c = r.readByteNoBuf()
|
||||
if c != goEmbed[i] {
|
||||
goto SkipSlashSlash
|
||||
}
|
||||
}
|
||||
c = r.readByteNoBuf()
|
||||
if c == ' ' || c == '\t' {
|
||||
// Found one!
|
||||
return true
|
||||
}
|
||||
}
|
||||
SkipSlashSlash:
|
||||
for c != '\n' && r.err == nil && !r.eof {
|
||||
c = r.readByteNoBuf()
|
||||
}
|
||||
startLine = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// readKeyword reads the given keyword from the input.
|
||||
// If the keyword is not present, readKeyword records a syntax error.
|
||||
func (r *importReader) readKeyword(kw string) {
|
||||
r.peekByte(true)
|
||||
for i := 0; i < len(kw); i++ {
|
||||
if r.nextByte(false) != kw[i] {
|
||||
r.syntaxError()
|
||||
return
|
||||
}
|
||||
}
|
||||
if isIdent(r.peekByte(false)) {
|
||||
r.syntaxError()
|
||||
}
|
||||
}
|
||||
|
||||
// readIdent reads an identifier from the input.
|
||||
// If an identifier is not present, readIdent records a syntax error.
|
||||
func (r *importReader) readIdent() {
|
||||
c := r.peekByte(true)
|
||||
if !isIdent(c) {
|
||||
r.syntaxError()
|
||||
return
|
||||
}
|
||||
for isIdent(r.peekByte(false)) {
|
||||
r.peek = 0
|
||||
}
|
||||
}
|
||||
|
||||
// readString reads a quoted string literal from the input.
|
||||
// If an identifier is not present, readString records a syntax error.
|
||||
func (r *importReader) readString() {
|
||||
switch r.nextByte(true) {
|
||||
case '`':
|
||||
for r.err == nil {
|
||||
if r.nextByte(false) == '`' {
|
||||
break
|
||||
}
|
||||
if r.eof {
|
||||
r.syntaxError()
|
||||
}
|
||||
}
|
||||
case '"':
|
||||
for r.err == nil {
|
||||
c := r.nextByte(false)
|
||||
if c == '"' {
|
||||
break
|
||||
}
|
||||
if r.eof || c == '\n' {
|
||||
r.syntaxError()
|
||||
}
|
||||
if c == '\\' {
|
||||
r.nextByte(false)
|
||||
}
|
||||
}
|
||||
default:
|
||||
r.syntaxError()
|
||||
}
|
||||
}
|
||||
|
||||
// readImport reads an import clause - optional identifier followed by quoted string -
|
||||
// from the input.
|
||||
func (r *importReader) readImport() {
|
||||
c := r.peekByte(true)
|
||||
if c == '.' {
|
||||
r.peek = 0
|
||||
} else if isIdent(c) {
|
||||
r.readIdent()
|
||||
}
|
||||
r.readString()
|
||||
}
|
||||
|
||||
// readComments is like io.ReadAll, except that it only reads the leading
|
||||
// block of comments in the file.
|
||||
func readComments(f io.Reader) ([]byte, error) {
|
||||
r := newImportReader("", f)
|
||||
r.peekByte(true)
|
||||
if r.err == nil && !r.eof {
|
||||
// Didn't reach EOF, so must have found a non-space byte. Remove it.
|
||||
r.buf = r.buf[:len(r.buf)-1]
|
||||
}
|
||||
return r.buf, r.err
|
||||
}
|
||||
|
||||
// readGoInfo expects a Go file as input and reads the file up to and including the import section.
|
||||
// It records what it learned in *info.
|
||||
// If info.fset is non-nil, readGoInfo parses the file and sets info.parsed, info.parseErr,
|
||||
// info.imports, info.embeds, and info.embedErr.
|
||||
//
|
||||
// It only returns an error if there are problems reading the file,
|
||||
// not for syntax errors in the file itself.
|
||||
func readGoInfo(f io.Reader, info *fileInfo) error {
|
||||
r := newImportReader(info.name, f)
|
||||
|
||||
r.readKeyword("package")
|
||||
r.readIdent()
|
||||
for r.peekByte(true) == 'i' {
|
||||
r.readKeyword("import")
|
||||
if r.peekByte(true) == '(' {
|
||||
r.nextByte(false)
|
||||
for r.peekByte(true) != ')' && r.err == nil {
|
||||
r.readImport()
|
||||
}
|
||||
r.nextByte(false)
|
||||
} else {
|
||||
r.readImport()
|
||||
}
|
||||
}
|
||||
|
||||
info.header = r.buf
|
||||
|
||||
// If we stopped successfully before EOF, we read a byte that told us we were done.
|
||||
// Return all but that last byte, which would cause a syntax error if we let it through.
|
||||
if r.err == nil && !r.eof {
|
||||
info.header = r.buf[:len(r.buf)-1]
|
||||
}
|
||||
|
||||
// If we stopped for a syntax error, consume the whole file so that
|
||||
// we are sure we don't change the errors that go/parser returns.
|
||||
if r.err == errSyntax {
|
||||
r.err = nil
|
||||
for r.err == nil && !r.eof {
|
||||
r.readByte()
|
||||
}
|
||||
info.header = r.buf
|
||||
}
|
||||
if r.err != nil {
|
||||
return r.err
|
||||
}
|
||||
|
||||
if info.fset == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Parse file header & record imports.
|
||||
info.parsed, info.parseErr = parser.ParseFile(info.fset, info.name, info.header, parser.ImportsOnly|parser.ParseComments)
|
||||
if info.parseErr != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
hasEmbed := false
|
||||
for _, decl := range info.parsed.Decls {
|
||||
d, ok := decl.(*ast.GenDecl)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
for _, dspec := range d.Specs {
|
||||
spec, ok := dspec.(*ast.ImportSpec)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
quoted := spec.Path.Value
|
||||
path, err := strconv.Unquote(quoted)
|
||||
if err != nil {
|
||||
return fmt.Errorf("parser returned invalid quoted string: <%s>", quoted)
|
||||
}
|
||||
if path == "embed" {
|
||||
hasEmbed = true
|
||||
}
|
||||
|
||||
doc := spec.Doc
|
||||
if doc == nil && len(d.Specs) == 1 {
|
||||
doc = d.Doc
|
||||
}
|
||||
info.imports = append(info.imports, fileImport{path, spec.Pos(), doc})
|
||||
}
|
||||
}
|
||||
|
||||
// If the file imports "embed",
|
||||
// we have to look for //go:embed comments
|
||||
// in the remainder of the file.
|
||||
// The compiler will enforce the mapping of comments to
|
||||
// declared variables. We just need to know the patterns.
|
||||
// If there were //go:embed comments earlier in the file
|
||||
// (near the package statement or imports), the compiler
|
||||
// will reject them. They can be (and have already been) ignored.
|
||||
if hasEmbed {
|
||||
var line []byte
|
||||
for first := true; r.findEmbed(first); first = false {
|
||||
line = line[:0]
|
||||
pos := r.pos
|
||||
for {
|
||||
c := r.readByteNoBuf()
|
||||
if c == '\n' || r.err != nil || r.eof {
|
||||
break
|
||||
}
|
||||
line = append(line, c)
|
||||
}
|
||||
// Add args if line is well-formed.
|
||||
// Ignore badly-formed lines - the compiler will report them when it finds them,
|
||||
// and we can pretend they are not there to help go list succeed with what it knows.
|
||||
embs, err := parseGoEmbed(string(line), pos)
|
||||
if err == nil {
|
||||
info.embeds = append(info.embeds, embs...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// parseGoEmbed parses the text following "//go:embed" to extract the glob patterns.
|
||||
// It accepts unquoted space-separated patterns as well as double-quoted and back-quoted Go strings.
|
||||
// This is based on a similar function in cmd/compile/internal/gc/noder.go;
|
||||
// this version calculates position information as well.
|
||||
func parseGoEmbed(args string, pos token.Position) ([]fileEmbed, error) {
|
||||
trimBytes := func(n int) {
|
||||
pos.Offset += n
|
||||
pos.Column += utf8.RuneCountInString(args[:n])
|
||||
args = args[n:]
|
||||
}
|
||||
trimSpace := func() {
|
||||
trim := strings.TrimLeftFunc(args, unicode.IsSpace)
|
||||
trimBytes(len(args) - len(trim))
|
||||
}
|
||||
|
||||
var list []fileEmbed
|
||||
for trimSpace(); args != ""; trimSpace() {
|
||||
var path string
|
||||
pathPos := pos
|
||||
Switch:
|
||||
switch args[0] {
|
||||
default:
|
||||
i := len(args)
|
||||
for j, c := range args {
|
||||
if unicode.IsSpace(c) {
|
||||
i = j
|
||||
break
|
||||
}
|
||||
}
|
||||
path = args[:i]
|
||||
trimBytes(i)
|
||||
|
||||
case '`':
|
||||
var ok bool
|
||||
path, _, ok = stringsCut(args[1:], "`")
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
|
||||
}
|
||||
trimBytes(1 + len(path) + 1)
|
||||
|
||||
case '"':
|
||||
i := 1
|
||||
for ; i < len(args); i++ {
|
||||
if args[i] == '\\' {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
if args[i] == '"' {
|
||||
q, err := strconv.Unquote(args[:i+1])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args[:i+1])
|
||||
}
|
||||
path = q
|
||||
trimBytes(i + 1)
|
||||
break Switch
|
||||
}
|
||||
}
|
||||
if i >= len(args) {
|
||||
return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
|
||||
}
|
||||
}
|
||||
|
||||
if args != "" {
|
||||
r, _ := utf8.DecodeRuneInString(args)
|
||||
if !unicode.IsSpace(r) {
|
||||
return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
|
||||
}
|
||||
}
|
||||
list = append(list, fileEmbed{path, pathPos})
|
||||
}
|
||||
return list, nil
|
||||
}
|
|
@ -0,0 +1,360 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package build
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
)
|
||||
|
||||
const quote = "`"
|
||||
|
||||
type readTest struct {
|
||||
// Test input contains ℙ where readGoInfo should stop.
|
||||
in string
|
||||
err string
|
||||
}
|
||||
|
||||
var readGoInfoTests = []readTest{
|
||||
{
|
||||
`package p`,
|
||||
"",
|
||||
},
|
||||
{
|
||||
`package p; import "x"`,
|
||||
"",
|
||||
},
|
||||
{
|
||||
`package p; import . "x"`,
|
||||
"",
|
||||
},
|
||||
{
|
||||
`package p; import "x";ℙvar x = 1`,
|
||||
"",
|
||||
},
|
||||
{
|
||||
`package p
|
||||
|
||||
// comment
|
||||
|
||||
import "x"
|
||||
import _ "x"
|
||||
import a "x"
|
||||
|
||||
/* comment */
|
||||
|
||||
import (
|
||||
"x" /* comment */
|
||||
_ "x"
|
||||
a "x" // comment
|
||||
` + quote + `x` + quote + `
|
||||
_ /*comment*/ ` + quote + `x` + quote + `
|
||||
a ` + quote + `x` + quote + `
|
||||
)
|
||||
import (
|
||||
)
|
||||
import ()
|
||||
import()import()import()
|
||||
import();import();import()
|
||||
|
||||
ℙvar x = 1
|
||||
`,
|
||||
"",
|
||||
},
|
||||
{
|
||||
"\ufeff𝔻" + `package p; import "x";ℙvar x = 1`,
|
||||
"",
|
||||
},
|
||||
}
|
||||
|
||||
var readCommentsTests = []readTest{
|
||||
{
|
||||
`ℙpackage p`,
|
||||
"",
|
||||
},
|
||||
{
|
||||
`ℙpackage p; import "x"`,
|
||||
"",
|
||||
},
|
||||
{
|
||||
`ℙpackage p; import . "x"`,
|
||||
"",
|
||||
},
|
||||
{
|
||||
"\ufeff𝔻" + `ℙpackage p; import . "x"`,
|
||||
"",
|
||||
},
|
||||
{
|
||||
`// foo
|
||||
|
||||
/* bar */
|
||||
|
||||
/* quux */ // baz
|
||||
|
||||
/*/ zot */
|
||||
|
||||
// asdf
|
||||
ℙHello, world`,
|
||||
"",
|
||||
},
|
||||
{
|
||||
"\ufeff𝔻" + `// foo
|
||||
|
||||
/* bar */
|
||||
|
||||
/* quux */ // baz
|
||||
|
||||
/*/ zot */
|
||||
|
||||
// asdf
|
||||
ℙHello, world`,
|
||||
"",
|
||||
},
|
||||
}
|
||||
|
||||
func testRead(t *testing.T, tests []readTest, read func(io.Reader) ([]byte, error)) {
|
||||
for i, tt := range tests {
|
||||
beforeP, afterP, _ := stringsCut1(tt.in, "ℙ")
|
||||
in := beforeP + afterP
|
||||
testOut := beforeP
|
||||
|
||||
if beforeD, afterD, ok := stringsCut1(beforeP, "𝔻"); ok {
|
||||
in = beforeD + afterD + afterP
|
||||
testOut = afterD
|
||||
}
|
||||
|
||||
r := strings.NewReader(in)
|
||||
buf, err := read(r)
|
||||
if err != nil {
|
||||
if tt.err == "" {
|
||||
t.Errorf("#%d: err=%q, expected success (%q)", i, err, string(buf))
|
||||
} else if !strings.Contains(err.Error(), tt.err) {
|
||||
t.Errorf("#%d: err=%q, expected %q", i, err, tt.err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
if tt.err != "" {
|
||||
t.Errorf("#%d: success, expected %q", i, tt.err)
|
||||
continue
|
||||
}
|
||||
|
||||
out := string(buf)
|
||||
if out != testOut {
|
||||
t.Errorf("#%d: wrong output:\nhave %q\nwant %q\n", i, out, testOut)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadGoInfo(t *testing.T) {
|
||||
testRead(t, readGoInfoTests, func(r io.Reader) ([]byte, error) {
|
||||
var info fileInfo
|
||||
err := readGoInfo(r, &info)
|
||||
return info.header, err
|
||||
})
|
||||
}
|
||||
|
||||
func TestReadComments(t *testing.T) {
|
||||
testRead(t, readCommentsTests, readComments)
|
||||
}
|
||||
|
||||
var readFailuresTests = []readTest{
|
||||
{
|
||||
`package`,
|
||||
"syntax error",
|
||||
},
|
||||
{
|
||||
"package p\n\x00\nimport `math`\n",
|
||||
"unexpected NUL in input",
|
||||
},
|
||||
{
|
||||
`package p; import`,
|
||||
"syntax error",
|
||||
},
|
||||
{
|
||||
`package p; import "`,
|
||||
"syntax error",
|
||||
},
|
||||
{
|
||||
"package p; import ` \n\n",
|
||||
"syntax error",
|
||||
},
|
||||
{
|
||||
`package p; import "x`,
|
||||
"syntax error",
|
||||
},
|
||||
{
|
||||
`package p; import _`,
|
||||
"syntax error",
|
||||
},
|
||||
{
|
||||
`package p; import _ "`,
|
||||
"syntax error",
|
||||
},
|
||||
{
|
||||
`package p; import _ "x`,
|
||||
"syntax error",
|
||||
},
|
||||
{
|
||||
`package p; import .`,
|
||||
"syntax error",
|
||||
},
|
||||
{
|
||||
`package p; import . "`,
|
||||
"syntax error",
|
||||
},
|
||||
{
|
||||
`package p; import . "x`,
|
||||
"syntax error",
|
||||
},
|
||||
{
|
||||
`package p; import (`,
|
||||
"syntax error",
|
||||
},
|
||||
{
|
||||
`package p; import ("`,
|
||||
"syntax error",
|
||||
},
|
||||
{
|
||||
`package p; import ("x`,
|
||||
"syntax error",
|
||||
},
|
||||
{
|
||||
`package p; import ("x"`,
|
||||
"syntax error",
|
||||
},
|
||||
}
|
||||
|
||||
func TestReadFailuresIgnored(t *testing.T) {
|
||||
// Syntax errors should not be reported (false arg to readImports).
|
||||
// Instead, entire file should be the output and no error.
|
||||
// Convert tests not to return syntax errors.
|
||||
tests := make([]readTest, len(readFailuresTests))
|
||||
copy(tests, readFailuresTests)
|
||||
for i := range tests {
|
||||
tt := &tests[i]
|
||||
if !strings.Contains(tt.err, "NUL") {
|
||||
tt.err = ""
|
||||
}
|
||||
}
|
||||
testRead(t, tests, func(r io.Reader) ([]byte, error) {
|
||||
var info fileInfo
|
||||
err := readGoInfo(r, &info)
|
||||
return info.header, err
|
||||
})
|
||||
}
|
||||
|
||||
var readEmbedTests = []struct {
|
||||
in, out string
|
||||
}{
|
||||
{
|
||||
"package p\n",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"package p\nimport \"embed\"\nvar i int\n//go:embed x y z\nvar files embed.FS",
|
||||
`test:4:12:x
|
||||
test:4:14:y
|
||||
test:4:16:z`,
|
||||
},
|
||||
{
|
||||
"package p\nimport \"embed\"\nvar i int\n//go:embed x \"\\x79\" `z`\nvar files embed.FS",
|
||||
`test:4:12:x
|
||||
test:4:14:y
|
||||
test:4:21:z`,
|
||||
},
|
||||
{
|
||||
"package p\nimport \"embed\"\nvar i int\n//go:embed x y\n//go:embed z\nvar files embed.FS",
|
||||
`test:4:12:x
|
||||
test:4:14:y
|
||||
test:5:12:z`,
|
||||
},
|
||||
{
|
||||
"package p\nimport \"embed\"\nvar i int\n\t //go:embed x y\n\t //go:embed z\n\t var files embed.FS",
|
||||
`test:4:14:x
|
||||
test:4:16:y
|
||||
test:5:14:z`,
|
||||
},
|
||||
{
|
||||
"package p\nimport \"embed\"\n//go:embed x y z\nvar files embed.FS",
|
||||
`test:3:12:x
|
||||
test:3:14:y
|
||||
test:3:16:z`,
|
||||
},
|
||||
{
|
||||
"\ufeffpackage p\nimport \"embed\"\n//go:embed x y z\nvar files embed.FS",
|
||||
`test:3:12:x
|
||||
test:3:14:y
|
||||
test:3:16:z`,
|
||||
},
|
||||
{
|
||||
"package p\nimport \"embed\"\nvar s = \"/*\"\n//go:embed x\nvar files embed.FS",
|
||||
`test:4:12:x`,
|
||||
},
|
||||
{
|
||||
`package p
|
||||
import "embed"
|
||||
var s = "\"\\\\"
|
||||
//go:embed x
|
||||
var files embed.FS`,
|
||||
`test:4:15:x`,
|
||||
},
|
||||
{
|
||||
"package p\nimport \"embed\"\nvar s = `/*`\n//go:embed x\nvar files embed.FS",
|
||||
`test:4:12:x`,
|
||||
},
|
||||
{
|
||||
"package p\nimport \"embed\"\nvar s = z/ *y\n//go:embed pointer\nvar pointer embed.FS",
|
||||
"test:4:12:pointer",
|
||||
},
|
||||
{
|
||||
"package p\n//go:embed x y z\n", // no import, no scan
|
||||
"",
|
||||
},
|
||||
{
|
||||
"package p\n//go:embed x y z\nvar files embed.FS", // no import, no scan
|
||||
"",
|
||||
},
|
||||
{
|
||||
"\ufeffpackage p\n//go:embed x y z\nvar files embed.FS", // no import, no scan
|
||||
"",
|
||||
},
|
||||
}
|
||||
|
||||
func TestReadEmbed(t *testing.T) {
|
||||
fset := token.NewFileSet()
|
||||
for i, tt := range readEmbedTests {
|
||||
info := fileInfo{
|
||||
name: "test",
|
||||
fset: fset,
|
||||
}
|
||||
err := readGoInfo(strings.NewReader(tt.in), &info)
|
||||
if err != nil {
|
||||
t.Errorf("#%d: %v", i, err)
|
||||
continue
|
||||
}
|
||||
b := &strings.Builder{}
|
||||
sep := ""
|
||||
for _, emb := range info.embeds {
|
||||
fmt.Fprintf(b, "%s%v:%s", sep, emb.pos, emb.pattern)
|
||||
sep = "\n"
|
||||
}
|
||||
got := b.String()
|
||||
want := strings.Join(strings.Fields(tt.out), "\n")
|
||||
if got != want {
|
||||
t.Errorf("#%d: embeds:\n%s\nwant:\n%s", i, got, want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func stringsCut1(s, sep string) (before, after string, found bool) {
|
||||
if i := strings.Index(s, sep); i >= 0 {
|
||||
return s[:i], s[i+len(sep):], true
|
||||
}
|
||||
return s, "", false
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package build
|
||||
|
||||
// List of past, present, and future known GOOS and GOARCH values.
|
||||
// Do not remove from this list, as these are used for go/build filename matching.
|
||||
|
||||
const goosList = "aix android darwin dragonfly freebsd hurd illumos ios js linux nacl netbsd openbsd plan9 solaris windows zos "
|
||||
const goarchList = "386 amd64 amd64p32 arm armbe arm64 arm64be loong64 mips mipsle mips64 mips64le mips64p32 mips64p32le ppc ppc64 ppc64le riscv riscv64 s390 s390x sparc sparc64 wasm "
|
|
@ -0,0 +1,62 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package build
|
||||
|
||||
import (
|
||||
"runtime"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var (
|
||||
thisOS = runtime.GOOS
|
||||
thisArch = runtime.GOARCH
|
||||
otherOS = anotherOS()
|
||||
otherArch = anotherArch()
|
||||
)
|
||||
|
||||
func anotherOS() string {
|
||||
if thisOS != "darwin" && thisOS != "ios" {
|
||||
return "darwin"
|
||||
}
|
||||
return "linux"
|
||||
}
|
||||
|
||||
func anotherArch() string {
|
||||
if thisArch != "amd64" {
|
||||
return "amd64"
|
||||
}
|
||||
return "386"
|
||||
}
|
||||
|
||||
type GoodFileTest struct {
|
||||
name string
|
||||
result bool
|
||||
}
|
||||
|
||||
var tests = []GoodFileTest{
|
||||
{"file.go", true},
|
||||
{"file.c", true},
|
||||
{"file_foo.go", true},
|
||||
{"file_" + thisArch + ".go", true},
|
||||
{"file_" + otherArch + ".go", false},
|
||||
{"file_" + thisOS + ".go", true},
|
||||
{"file_" + otherOS + ".go", false},
|
||||
{"file_" + thisOS + "_" + thisArch + ".go", true},
|
||||
{"file_" + otherOS + "_" + thisArch + ".go", false},
|
||||
{"file_" + thisOS + "_" + otherArch + ".go", false},
|
||||
{"file_" + otherOS + "_" + otherArch + ".go", false},
|
||||
{"file_foo_" + thisArch + ".go", true},
|
||||
{"file_foo_" + otherArch + ".go", false},
|
||||
{"file_" + thisOS + ".c", true},
|
||||
{"file_" + otherOS + ".c", false},
|
||||
}
|
||||
|
||||
func TestGoodOSArch(t *testing.T) {
|
||||
for _, test := range tests {
|
||||
if Default.goodOSArchFile(test.name, make(map[string]bool)) != test.result {
|
||||
t.Fatalf("goodOSArchFile(%q) != %v", test.name, test.result)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
# Copyright 2009 The Go Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style
|
||||
# license that can be found in the LICENSE file.
|
||||
|
||||
# Script to test heading detection heuristic
|
||||
headscan: headscan.go
|
||||
go build headscan.go
|
|
@ -0,0 +1,518 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Godoc comment extraction and comment -> HTML formatting.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
"text/template" // for HTMLEscape
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
const (
|
||||
ldquo = "“"
|
||||
rdquo = "”"
|
||||
ulquo = "“"
|
||||
urquo = "”"
|
||||
)
|
||||
|
||||
var (
|
||||
htmlQuoteReplacer = strings.NewReplacer(ulquo, ldquo, urquo, rdquo)
|
||||
unicodeQuoteReplacer = strings.NewReplacer("``", ulquo, "''", urquo)
|
||||
)
|
||||
|
||||
// Escape comment text for HTML. If nice is set,
|
||||
// also turn “ into “ and ” into ”.
|
||||
func commentEscape(w io.Writer, text string, nice bool) {
|
||||
if nice {
|
||||
// In the first pass, we convert `` and '' into their unicode equivalents.
|
||||
// This prevents them from being escaped in HTMLEscape.
|
||||
text = convertQuotes(text)
|
||||
var buf bytes.Buffer
|
||||
template.HTMLEscape(&buf, []byte(text))
|
||||
// Now we convert the unicode quotes to their HTML escaped entities to maintain old behavior.
|
||||
// We need to use a temp buffer to read the string back and do the conversion,
|
||||
// otherwise HTMLEscape will escape & to &
|
||||
htmlQuoteReplacer.WriteString(w, buf.String())
|
||||
return
|
||||
}
|
||||
template.HTMLEscape(w, []byte(text))
|
||||
}
|
||||
|
||||
func convertQuotes(text string) string {
|
||||
return unicodeQuoteReplacer.Replace(text)
|
||||
}
|
||||
|
||||
const (
|
||||
// Regexp for Go identifiers
|
||||
identRx = `[\pL_][\pL_0-9]*`
|
||||
|
||||
// Regexp for URLs
|
||||
// Match parens, and check later for balance - see #5043, #22285
|
||||
// Match .,:;?! within path, but not at end - see #18139, #16565
|
||||
// This excludes some rare yet valid urls ending in common punctuation
|
||||
// in order to allow sentences ending in URLs.
|
||||
|
||||
// protocol (required) e.g. http
|
||||
protoPart = `(https?|ftp|file|gopher|mailto|nntp)`
|
||||
// host (required) e.g. www.example.com or [::1]:8080
|
||||
hostPart = `([a-zA-Z0-9_@\-.\[\]:]+)`
|
||||
// path+query+fragment (optional) e.g. /path/index.html?q=foo#bar
|
||||
pathPart = `([.,:;?!]*[a-zA-Z0-9$'()*+&#=@~_/\-\[\]%])*`
|
||||
|
||||
urlRx = protoPart + `://` + hostPart + pathPart
|
||||
)
|
||||
|
||||
var matchRx = regexp.MustCompile(`(` + urlRx + `)|(` + identRx + `)`)
|
||||
|
||||
var (
|
||||
html_a = []byte(`<a href="`)
|
||||
html_aq = []byte(`">`)
|
||||
html_enda = []byte("</a>")
|
||||
html_i = []byte("<i>")
|
||||
html_endi = []byte("</i>")
|
||||
html_p = []byte("<p>\n")
|
||||
html_endp = []byte("</p>\n")
|
||||
html_pre = []byte("<pre>")
|
||||
html_endpre = []byte("</pre>\n")
|
||||
html_h = []byte(`<h3 id="`)
|
||||
html_hq = []byte(`">`)
|
||||
html_endh = []byte("</h3>\n")
|
||||
)
|
||||
|
||||
// Emphasize and escape a line of text for HTML. URLs are converted into links;
|
||||
// if the URL also appears in the words map, the link is taken from the map (if
|
||||
// the corresponding map value is the empty string, the URL is not converted
|
||||
// into a link). Go identifiers that appear in the words map are italicized; if
|
||||
// the corresponding map value is not the empty string, it is considered a URL
|
||||
// and the word is converted into a link. If nice is set, the remaining text's
|
||||
// appearance is improved where it makes sense (e.g., “ is turned into “
|
||||
// and ” into ”).
|
||||
func emphasize(w io.Writer, line string, words map[string]string, nice bool) {
|
||||
for {
|
||||
m := matchRx.FindStringSubmatchIndex(line)
|
||||
if m == nil {
|
||||
break
|
||||
}
|
||||
// m >= 6 (two parenthesized sub-regexps in matchRx, 1st one is urlRx)
|
||||
|
||||
// write text before match
|
||||
commentEscape(w, line[0:m[0]], nice)
|
||||
|
||||
// adjust match for URLs
|
||||
match := line[m[0]:m[1]]
|
||||
if strings.Contains(match, "://") {
|
||||
m0, m1 := m[0], m[1]
|
||||
for _, s := range []string{"()", "{}", "[]"} {
|
||||
open, close := s[:1], s[1:] // E.g., "(" and ")"
|
||||
// require opening parentheses before closing parentheses (#22285)
|
||||
if i := strings.Index(match, close); i >= 0 && i < strings.Index(match, open) {
|
||||
m1 = m0 + i
|
||||
match = line[m0:m1]
|
||||
}
|
||||
// require balanced pairs of parentheses (#5043)
|
||||
for i := 0; strings.Count(match, open) != strings.Count(match, close) && i < 10; i++ {
|
||||
m1 = strings.LastIndexAny(line[:m1], s)
|
||||
match = line[m0:m1]
|
||||
}
|
||||
}
|
||||
if m1 != m[1] {
|
||||
// redo matching with shortened line for correct indices
|
||||
m = matchRx.FindStringSubmatchIndex(line[:m[0]+len(match)])
|
||||
}
|
||||
}
|
||||
|
||||
// analyze match
|
||||
url := ""
|
||||
italics := false
|
||||
if words != nil {
|
||||
url, italics = words[match]
|
||||
}
|
||||
if m[2] >= 0 {
|
||||
// match against first parenthesized sub-regexp; must be match against urlRx
|
||||
if !italics {
|
||||
// no alternative URL in words list, use match instead
|
||||
url = match
|
||||
}
|
||||
italics = false // don't italicize URLs
|
||||
}
|
||||
|
||||
// write match
|
||||
if len(url) > 0 {
|
||||
w.Write(html_a)
|
||||
template.HTMLEscape(w, []byte(url))
|
||||
w.Write(html_aq)
|
||||
}
|
||||
if italics {
|
||||
w.Write(html_i)
|
||||
}
|
||||
commentEscape(w, match, nice)
|
||||
if italics {
|
||||
w.Write(html_endi)
|
||||
}
|
||||
if len(url) > 0 {
|
||||
w.Write(html_enda)
|
||||
}
|
||||
|
||||
// advance
|
||||
line = line[m[1]:]
|
||||
}
|
||||
commentEscape(w, line, nice)
|
||||
}
|
||||
|
||||
func indentLen(s string) int {
|
||||
i := 0
|
||||
for i < len(s) && (s[i] == ' ' || s[i] == '\t') {
|
||||
i++
|
||||
}
|
||||
return i
|
||||
}
|
||||
|
||||
func isBlank(s string) bool {
|
||||
return len(s) == 0 || (len(s) == 1 && s[0] == '\n')
|
||||
}
|
||||
|
||||
func commonPrefix(a, b string) string {
|
||||
i := 0
|
||||
for i < len(a) && i < len(b) && a[i] == b[i] {
|
||||
i++
|
||||
}
|
||||
return a[0:i]
|
||||
}
|
||||
|
||||
func unindent(block []string) {
|
||||
if len(block) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// compute maximum common white prefix
|
||||
prefix := block[0][0:indentLen(block[0])]
|
||||
for _, line := range block {
|
||||
if !isBlank(line) {
|
||||
prefix = commonPrefix(prefix, line[0:indentLen(line)])
|
||||
}
|
||||
}
|
||||
n := len(prefix)
|
||||
|
||||
// remove
|
||||
for i, line := range block {
|
||||
if !isBlank(line) {
|
||||
block[i] = line[n:]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// heading returns the trimmed line if it passes as a section heading;
|
||||
// otherwise it returns the empty string.
|
||||
func heading(line string) string {
|
||||
line = strings.TrimSpace(line)
|
||||
if len(line) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
// a heading must start with an uppercase letter
|
||||
r, _ := utf8.DecodeRuneInString(line)
|
||||
if !unicode.IsLetter(r) || !unicode.IsUpper(r) {
|
||||
return ""
|
||||
}
|
||||
|
||||
// it must end in a letter or digit:
|
||||
r, _ = utf8.DecodeLastRuneInString(line)
|
||||
if !unicode.IsLetter(r) && !unicode.IsDigit(r) {
|
||||
return ""
|
||||
}
|
||||
|
||||
// exclude lines with illegal characters. we allow "(),"
|
||||
if strings.ContainsAny(line, ";:!?+*/=[]{}_^°&§~%#@<\">\\") {
|
||||
return ""
|
||||
}
|
||||
|
||||
// allow "'" for possessive "'s" only
|
||||
for b := line; ; {
|
||||
var ok bool
|
||||
if _, b, ok = stringsCut(b, "'"); !ok {
|
||||
break
|
||||
}
|
||||
if b != "s" && !strings.HasPrefix(b, "s ") {
|
||||
return "" // ' not followed by s and then end-of-word
|
||||
}
|
||||
}
|
||||
|
||||
// allow "." when followed by non-space
|
||||
for b := line; ; {
|
||||
var ok bool
|
||||
if _, b, ok = stringsCut(b, "."); !ok {
|
||||
break
|
||||
}
|
||||
if b == "" || strings.HasPrefix(b, " ") {
|
||||
return "" // not followed by non-space
|
||||
}
|
||||
}
|
||||
|
||||
return line
|
||||
}
|
||||
|
||||
type op int
|
||||
|
||||
const (
|
||||
opPara op = iota
|
||||
opHead
|
||||
opPre
|
||||
)
|
||||
|
||||
type block struct {
|
||||
op op
|
||||
lines []string
|
||||
}
|
||||
|
||||
var nonAlphaNumRx = regexp.MustCompile(`[^a-zA-Z0-9]`)
|
||||
|
||||
func anchorID(line string) string {
|
||||
// Add a "hdr-" prefix to avoid conflicting with IDs used for package symbols.
|
||||
return "hdr-" + nonAlphaNumRx.ReplaceAllString(line, "_")
|
||||
}
|
||||
|
||||
// ToHTML converts comment text to formatted HTML.
|
||||
// The comment was prepared by DocReader,
|
||||
// so it is known not to have leading, trailing blank lines
|
||||
// nor to have trailing spaces at the end of lines.
|
||||
// The comment markers have already been removed.
|
||||
//
|
||||
// Each span of unindented non-blank lines is converted into
|
||||
// a single paragraph. There is one exception to the rule: a span that
|
||||
// consists of a single line, is followed by another paragraph span,
|
||||
// begins with a capital letter, and contains no punctuation
|
||||
// other than parentheses and commas is formatted as a heading.
|
||||
//
|
||||
// A span of indented lines is converted into a <pre> block,
|
||||
// with the common indent prefix removed.
|
||||
//
|
||||
// URLs in the comment text are converted into links; if the URL also appears
|
||||
// in the words map, the link is taken from the map (if the corresponding map
|
||||
// value is the empty string, the URL is not converted into a link).
|
||||
//
|
||||
// A pair of (consecutive) backticks (`) is converted to a unicode left quote (“), and a pair of (consecutive)
|
||||
// single quotes (') is converted to a unicode right quote (”).
|
||||
//
|
||||
// Go identifiers that appear in the words map are italicized; if the corresponding
|
||||
// map value is not the empty string, it is considered a URL and the word is converted
|
||||
// into a link.
|
||||
func ToHTML(w io.Writer, text string, words map[string]string) {
|
||||
for _, b := range blocks(text) {
|
||||
switch b.op {
|
||||
case opPara:
|
||||
w.Write(html_p)
|
||||
for _, line := range b.lines {
|
||||
emphasize(w, line, words, true)
|
||||
}
|
||||
w.Write(html_endp)
|
||||
case opHead:
|
||||
w.Write(html_h)
|
||||
id := ""
|
||||
for _, line := range b.lines {
|
||||
if id == "" {
|
||||
id = anchorID(line)
|
||||
w.Write([]byte(id))
|
||||
w.Write(html_hq)
|
||||
}
|
||||
commentEscape(w, line, true)
|
||||
}
|
||||
if id == "" {
|
||||
w.Write(html_hq)
|
||||
}
|
||||
w.Write(html_endh)
|
||||
case opPre:
|
||||
w.Write(html_pre)
|
||||
for _, line := range b.lines {
|
||||
emphasize(w, line, nil, false)
|
||||
}
|
||||
w.Write(html_endpre)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func blocks(text string) []block {
|
||||
var (
|
||||
out []block
|
||||
para []string
|
||||
|
||||
lastWasBlank = false
|
||||
lastWasHeading = false
|
||||
)
|
||||
|
||||
close := func() {
|
||||
if para != nil {
|
||||
out = append(out, block{opPara, para})
|
||||
para = nil
|
||||
}
|
||||
}
|
||||
|
||||
lines := strings.SplitAfter(text, "\n")
|
||||
unindent(lines)
|
||||
for i := 0; i < len(lines); {
|
||||
line := lines[i]
|
||||
if isBlank(line) {
|
||||
// close paragraph
|
||||
close()
|
||||
i++
|
||||
lastWasBlank = true
|
||||
continue
|
||||
}
|
||||
if indentLen(line) > 0 {
|
||||
// close paragraph
|
||||
close()
|
||||
|
||||
// count indented or blank lines
|
||||
j := i + 1
|
||||
for j < len(lines) && (isBlank(lines[j]) || indentLen(lines[j]) > 0) {
|
||||
j++
|
||||
}
|
||||
// but not trailing blank lines
|
||||
for j > i && isBlank(lines[j-1]) {
|
||||
j--
|
||||
}
|
||||
pre := lines[i:j]
|
||||
i = j
|
||||
|
||||
unindent(pre)
|
||||
|
||||
// put those lines in a pre block
|
||||
out = append(out, block{opPre, pre})
|
||||
lastWasHeading = false
|
||||
continue
|
||||
}
|
||||
|
||||
if lastWasBlank && !lastWasHeading && i+2 < len(lines) &&
|
||||
isBlank(lines[i+1]) && !isBlank(lines[i+2]) && indentLen(lines[i+2]) == 0 {
|
||||
// current line is non-blank, surrounded by blank lines
|
||||
// and the next non-blank line is not indented: this
|
||||
// might be a heading.
|
||||
if head := heading(line); head != "" {
|
||||
close()
|
||||
out = append(out, block{opHead, []string{head}})
|
||||
i += 2
|
||||
lastWasHeading = true
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// open paragraph
|
||||
lastWasBlank = false
|
||||
lastWasHeading = false
|
||||
para = append(para, lines[i])
|
||||
i++
|
||||
}
|
||||
close()
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
// ToText prepares comment text for presentation in textual output.
|
||||
// It wraps paragraphs of text to width or fewer Unicode code points
|
||||
// and then prefixes each line with the indent. In preformatted sections
|
||||
// (such as program text), it prefixes each non-blank line with preIndent.
|
||||
//
|
||||
// A pair of (consecutive) backticks (`) is converted to a unicode left quote (“), and a pair of (consecutive)
|
||||
// single quotes (') is converted to a unicode right quote (”).
|
||||
func ToText(w io.Writer, text string, indent, preIndent string, width int) {
|
||||
l := lineWrapper{
|
||||
out: w,
|
||||
width: width,
|
||||
indent: indent,
|
||||
}
|
||||
for _, b := range blocks(text) {
|
||||
switch b.op {
|
||||
case opPara:
|
||||
// l.write will add leading newline if required
|
||||
for _, line := range b.lines {
|
||||
line = convertQuotes(line)
|
||||
l.write(line)
|
||||
}
|
||||
l.flush()
|
||||
case opHead:
|
||||
w.Write(nl)
|
||||
for _, line := range b.lines {
|
||||
line = convertQuotes(line)
|
||||
l.write(line + "\n")
|
||||
}
|
||||
l.flush()
|
||||
case opPre:
|
||||
w.Write(nl)
|
||||
for _, line := range b.lines {
|
||||
if isBlank(line) {
|
||||
w.Write([]byte("\n"))
|
||||
} else {
|
||||
w.Write([]byte(preIndent))
|
||||
w.Write([]byte(line))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type lineWrapper struct {
|
||||
out io.Writer
|
||||
printed bool
|
||||
width int
|
||||
indent string
|
||||
n int
|
||||
pendSpace int
|
||||
}
|
||||
|
||||
var nl = []byte("\n")
|
||||
var space = []byte(" ")
|
||||
var prefix = []byte("// ")
|
||||
|
||||
func (l *lineWrapper) write(text string) {
|
||||
if l.n == 0 && l.printed {
|
||||
l.out.Write(nl) // blank line before new paragraph
|
||||
}
|
||||
l.printed = true
|
||||
|
||||
needsPrefix := false
|
||||
isComment := strings.HasPrefix(text, "//")
|
||||
for _, f := range strings.Fields(text) {
|
||||
w := utf8.RuneCountInString(f)
|
||||
// wrap if line is too long
|
||||
if l.n > 0 && l.n+l.pendSpace+w > l.width {
|
||||
l.out.Write(nl)
|
||||
l.n = 0
|
||||
l.pendSpace = 0
|
||||
needsPrefix = isComment && !strings.HasPrefix(f, "//")
|
||||
}
|
||||
if l.n == 0 {
|
||||
l.out.Write([]byte(l.indent))
|
||||
}
|
||||
if needsPrefix {
|
||||
l.out.Write(prefix)
|
||||
needsPrefix = false
|
||||
}
|
||||
l.out.Write(space[:l.pendSpace])
|
||||
l.out.Write([]byte(f))
|
||||
l.n += l.pendSpace + w
|
||||
l.pendSpace = 1
|
||||
}
|
||||
}
|
||||
|
||||
func (l *lineWrapper) flush() {
|
||||
if l.n == 0 {
|
||||
return
|
||||
}
|
||||
l.out.Write(nl)
|
||||
l.pendSpace = 0
|
||||
l.n = 0
|
||||
}
|
||||
|
||||
func stringsCut(s, sep string) (before, after string, found bool) {
|
||||
if i := strings.Index(s, sep); i >= 0 {
|
||||
return s[:i], s[i+len(sep):], true
|
||||
}
|
||||
return s, "", false
|
||||
}
|
|
@ -0,0 +1,247 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var headingTests = []struct {
|
||||
line string
|
||||
ok bool
|
||||
}{
|
||||
{"Section", true},
|
||||
{"A typical usage", true},
|
||||
{"ΔΛΞ is Greek", true},
|
||||
{"Foo 42", true},
|
||||
{"", false},
|
||||
{"section", false},
|
||||
{"A typical usage:", false},
|
||||
{"This code:", false},
|
||||
{"δ is Greek", false},
|
||||
{"Foo §", false},
|
||||
{"Fermat's Last Sentence", true},
|
||||
{"Fermat's", true},
|
||||
{"'sX", false},
|
||||
{"Ted 'Too' Bar", false},
|
||||
{"Use n+m", false},
|
||||
{"Scanning:", false},
|
||||
{"N:M", false},
|
||||
}
|
||||
|
||||
func TestIsHeading(t *testing.T) {
|
||||
for _, tt := range headingTests {
|
||||
if h := heading(tt.line); (len(h) > 0) != tt.ok {
|
||||
t.Errorf("isHeading(%q) = %v, want %v", tt.line, h, tt.ok)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var blocksTests = []struct {
|
||||
in string
|
||||
out []block
|
||||
text string
|
||||
}{
|
||||
{
|
||||
in: `Para 1.
|
||||
Para 1 line 2.
|
||||
|
||||
Para 2.
|
||||
|
||||
Section
|
||||
|
||||
Para 3.
|
||||
|
||||
pre
|
||||
pre1
|
||||
|
||||
Para 4.
|
||||
|
||||
pre
|
||||
pre1
|
||||
|
||||
pre2
|
||||
|
||||
Para 5.
|
||||
|
||||
|
||||
pre
|
||||
|
||||
|
||||
pre1
|
||||
pre2
|
||||
|
||||
Para 6.
|
||||
pre
|
||||
pre2
|
||||
`,
|
||||
out: []block{
|
||||
{opPara, []string{"Para 1.\n", "Para 1 line 2.\n"}},
|
||||
{opPara, []string{"Para 2.\n"}},
|
||||
{opHead, []string{"Section"}},
|
||||
{opPara, []string{"Para 3.\n"}},
|
||||
{opPre, []string{"pre\n", "pre1\n"}},
|
||||
{opPara, []string{"Para 4.\n"}},
|
||||
{opPre, []string{"pre\n", "pre1\n", "\n", "pre2\n"}},
|
||||
{opPara, []string{"Para 5.\n"}},
|
||||
{opPre, []string{"pre\n", "\n", "\n", "pre1\n", "pre2\n"}},
|
||||
{opPara, []string{"Para 6.\n"}},
|
||||
{opPre, []string{"pre\n", "pre2\n"}},
|
||||
},
|
||||
text: `. Para 1. Para 1 line 2.
|
||||
|
||||
. Para 2.
|
||||
|
||||
|
||||
. Section
|
||||
|
||||
. Para 3.
|
||||
|
||||
$ pre
|
||||
$ pre1
|
||||
|
||||
. Para 4.
|
||||
|
||||
$ pre
|
||||
$ pre1
|
||||
|
||||
$ pre2
|
||||
|
||||
. Para 5.
|
||||
|
||||
$ pre
|
||||
|
||||
|
||||
$ pre1
|
||||
$ pre2
|
||||
|
||||
. Para 6.
|
||||
|
||||
$ pre
|
||||
$ pre2
|
||||
`,
|
||||
},
|
||||
{
|
||||
in: "Para.\n\tshould not be ``escaped''",
|
||||
out: []block{
|
||||
{opPara, []string{"Para.\n"}},
|
||||
{opPre, []string{"should not be ``escaped''"}},
|
||||
},
|
||||
text: ". Para.\n\n$ should not be ``escaped''",
|
||||
},
|
||||
{
|
||||
in: "// A very long line of 46 char for line wrapping.",
|
||||
out: []block{
|
||||
{opPara, []string{"// A very long line of 46 char for line wrapping."}},
|
||||
},
|
||||
text: `. // A very long line of 46 char for line
|
||||
. // wrapping.
|
||||
`,
|
||||
},
|
||||
{
|
||||
in: `/* A very long line of 46 char for line wrapping.
|
||||
A very long line of 46 char for line wrapping. */`,
|
||||
out: []block{
|
||||
{opPara, []string{"/* A very long line of 46 char for line wrapping.\n", "A very long line of 46 char for line wrapping. */"}},
|
||||
},
|
||||
text: `. /* A very long line of 46 char for line
|
||||
. wrapping. A very long line of 46 char
|
||||
. for line wrapping. */
|
||||
`,
|
||||
},
|
||||
{
|
||||
in: `A line of 36 char for line wrapping.
|
||||
//Another line starting with //`,
|
||||
out: []block{
|
||||
{opPara, []string{"A line of 36 char for line wrapping.\n",
|
||||
"//Another line starting with //"}},
|
||||
},
|
||||
text: `. A line of 36 char for line wrapping.
|
||||
. //Another line starting with //
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
func TestBlocks(t *testing.T) {
|
||||
for i, tt := range blocksTests {
|
||||
b := blocks(tt.in)
|
||||
if !reflect.DeepEqual(b, tt.out) {
|
||||
t.Errorf("#%d: mismatch\nhave: %v\nwant: %v", i, b, tt.out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestToText(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
for i, tt := range blocksTests {
|
||||
ToText(&buf, tt.in, ". ", "$\t", 40)
|
||||
if have := buf.String(); have != tt.text {
|
||||
t.Errorf("#%d: mismatch\nhave: %s\nwant: %s\nhave vs want:\n%q\n%q", i, have, tt.text, have, tt.text)
|
||||
}
|
||||
buf.Reset()
|
||||
}
|
||||
}
|
||||
|
||||
var emphasizeTests = []struct {
|
||||
in, out string
|
||||
}{
|
||||
{"", ""},
|
||||
{"http://[::1]:8080/foo.txt", `<a href="http://[::1]:8080/foo.txt">http://[::1]:8080/foo.txt</a>`},
|
||||
{"before (https://www.google.com) after", `before (<a href="https://www.google.com">https://www.google.com</a>) after`},
|
||||
{"before https://www.google.com:30/x/y/z:b::c. After", `before <a href="https://www.google.com:30/x/y/z:b::c">https://www.google.com:30/x/y/z:b::c</a>. After`},
|
||||
{"http://www.google.com/path/:;!-/?query=%34b#093124", `<a href="http://www.google.com/path/:;!-/?query=%34b#093124">http://www.google.com/path/:;!-/?query=%34b#093124</a>`},
|
||||
{"http://www.google.com/path/:;!-/?query=%34bar#093124", `<a href="http://www.google.com/path/:;!-/?query=%34bar#093124">http://www.google.com/path/:;!-/?query=%34bar#093124</a>`},
|
||||
{"http://www.google.com/index.html! After", `<a href="http://www.google.com/index.html">http://www.google.com/index.html</a>! After`},
|
||||
{"http://www.google.com/", `<a href="http://www.google.com/">http://www.google.com/</a>`},
|
||||
{"https://www.google.com/", `<a href="https://www.google.com/">https://www.google.com/</a>`},
|
||||
{"http://www.google.com/path.", `<a href="http://www.google.com/path">http://www.google.com/path</a>.`},
|
||||
{"http://en.wikipedia.org/wiki/Camellia_(cipher)", `<a href="http://en.wikipedia.org/wiki/Camellia_(cipher)">http://en.wikipedia.org/wiki/Camellia_(cipher)</a>`},
|
||||
{"(http://www.google.com/)", `(<a href="http://www.google.com/">http://www.google.com/</a>)`},
|
||||
{"http://gmail.com)", `<a href="http://gmail.com">http://gmail.com</a>)`},
|
||||
{"((http://gmail.com))", `((<a href="http://gmail.com">http://gmail.com</a>))`},
|
||||
{"http://gmail.com ((http://gmail.com)) ()", `<a href="http://gmail.com">http://gmail.com</a> ((<a href="http://gmail.com">http://gmail.com</a>)) ()`},
|
||||
{"Foo bar http://example.com/ quux!", `Foo bar <a href="http://example.com/">http://example.com/</a> quux!`},
|
||||
{"Hello http://example.com/%2f/ /world.", `Hello <a href="http://example.com/%2f/">http://example.com/%2f/</a> /world.`},
|
||||
{"Lorem http: ipsum //host/path", "Lorem http: ipsum //host/path"},
|
||||
{"javascript://is/not/linked", "javascript://is/not/linked"},
|
||||
{"http://foo", `<a href="http://foo">http://foo</a>`},
|
||||
{"art by [[https://www.example.com/person/][Person Name]]", `art by [[<a href="https://www.example.com/person/">https://www.example.com/person/</a>][Person Name]]`},
|
||||
{"please visit (http://golang.org/)", `please visit (<a href="http://golang.org/">http://golang.org/</a>)`},
|
||||
{"please visit http://golang.org/hello())", `please visit <a href="http://golang.org/hello()">http://golang.org/hello()</a>)`},
|
||||
{"http://git.qemu.org/?p=qemu.git;a=blob;f=qapi-schema.json;hb=HEAD", `<a href="http://git.qemu.org/?p=qemu.git;a=blob;f=qapi-schema.json;hb=HEAD">http://git.qemu.org/?p=qemu.git;a=blob;f=qapi-schema.json;hb=HEAD</a>`},
|
||||
{"https://foo.bar/bal/x(])", `<a href="https://foo.bar/bal/x(">https://foo.bar/bal/x(</a>])`}, // inner ] causes (]) to be cut off from URL
|
||||
{"foo [ http://bar(])", `foo [ <a href="http://bar(">http://bar(</a>])`}, // outer [ causes ]) to be cut off from URL
|
||||
}
|
||||
|
||||
func TestEmphasize(t *testing.T) {
|
||||
for i, tt := range emphasizeTests {
|
||||
var buf bytes.Buffer
|
||||
emphasize(&buf, tt.in, nil, true)
|
||||
out := buf.String()
|
||||
if out != tt.out {
|
||||
t.Errorf("#%d: mismatch\nhave: %v\nwant: %v", i, out, tt.out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCommentEscape(t *testing.T) {
|
||||
commentTests := []struct {
|
||||
in, out string
|
||||
}{
|
||||
{"typically invoked as ``go tool asm'',", "typically invoked as " + ldquo + "go tool asm" + rdquo + ","},
|
||||
{"For more detail, run ``go help test'' and ``go help testflag''", "For more detail, run " + ldquo + "go help test" + rdquo + " and " + ldquo + "go help testflag" + rdquo},
|
||||
}
|
||||
for i, tt := range commentTests {
|
||||
var buf strings.Builder
|
||||
commentEscape(&buf, tt.in, true)
|
||||
out := buf.String()
|
||||
if out != tt.out {
|
||||
t.Errorf("#%d: mismatch\nhave: %q\nwant: %q", i, out, tt.out)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,220 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package doc extracts source code documentation from a Go AST.
|
||||
package doc
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"golang.org/x/website/internal/backport/go/ast"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Package is the documentation for an entire package.
|
||||
type Package struct {
|
||||
Doc string
|
||||
Name string
|
||||
ImportPath string
|
||||
Imports []string
|
||||
Filenames []string
|
||||
Notes map[string][]*Note
|
||||
|
||||
// Deprecated: For backward compatibility Bugs is still populated,
|
||||
// but all new code should use Notes instead.
|
||||
Bugs []string
|
||||
|
||||
// declarations
|
||||
Consts []*Value
|
||||
Types []*Type
|
||||
Vars []*Value
|
||||
Funcs []*Func
|
||||
|
||||
// Examples is a sorted list of examples associated with
|
||||
// the package. Examples are extracted from _test.go files
|
||||
// provided to NewFromFiles.
|
||||
Examples []*Example
|
||||
}
|
||||
|
||||
// Value is the documentation for a (possibly grouped) var or const declaration.
|
||||
type Value struct {
|
||||
Doc string
|
||||
Names []string // var or const names in declaration order
|
||||
Decl *ast.GenDecl
|
||||
|
||||
order int
|
||||
}
|
||||
|
||||
// Type is the documentation for a type declaration.
|
||||
type Type struct {
|
||||
Doc string
|
||||
Name string
|
||||
Decl *ast.GenDecl
|
||||
|
||||
// associated declarations
|
||||
Consts []*Value // sorted list of constants of (mostly) this type
|
||||
Vars []*Value // sorted list of variables of (mostly) this type
|
||||
Funcs []*Func // sorted list of functions returning this type
|
||||
Methods []*Func // sorted list of methods (including embedded ones) of this type
|
||||
|
||||
// Examples is a sorted list of examples associated with
|
||||
// this type. Examples are extracted from _test.go files
|
||||
// provided to NewFromFiles.
|
||||
Examples []*Example
|
||||
}
|
||||
|
||||
// Func is the documentation for a func declaration.
|
||||
type Func struct {
|
||||
Doc string
|
||||
Name string
|
||||
Decl *ast.FuncDecl
|
||||
|
||||
// methods
|
||||
// (for functions, these fields have the respective zero value)
|
||||
Recv string // actual receiver "T" or "*T"
|
||||
Orig string // original receiver "T" or "*T"
|
||||
Level int // embedding level; 0 means not embedded
|
||||
|
||||
// Examples is a sorted list of examples associated with this
|
||||
// function or method. Examples are extracted from _test.go files
|
||||
// provided to NewFromFiles.
|
||||
Examples []*Example
|
||||
}
|
||||
|
||||
// A Note represents a marked comment starting with "MARKER(uid): note body".
|
||||
// Any note with a marker of 2 or more upper case [A-Z] letters and a uid of
|
||||
// at least one character is recognized. The ":" following the uid is optional.
|
||||
// Notes are collected in the Package.Notes map indexed by the notes marker.
|
||||
type Note struct {
|
||||
Pos, End token.Pos // position range of the comment containing the marker
|
||||
UID string // uid found with the marker
|
||||
Body string // note body text
|
||||
}
|
||||
|
||||
// Mode values control the operation of New and NewFromFiles.
|
||||
type Mode int
|
||||
|
||||
const (
|
||||
// AllDecls says to extract documentation for all package-level
|
||||
// declarations, not just exported ones.
|
||||
AllDecls Mode = 1 << iota
|
||||
|
||||
// AllMethods says to show all embedded methods, not just the ones of
|
||||
// invisible (unexported) anonymous fields.
|
||||
AllMethods
|
||||
|
||||
// PreserveAST says to leave the AST unmodified. Originally, pieces of
|
||||
// the AST such as function bodies were nil-ed out to save memory in
|
||||
// godoc, but not all programs want that behavior.
|
||||
PreserveAST
|
||||
)
|
||||
|
||||
// New computes the package documentation for the given package AST.
|
||||
// New takes ownership of the AST pkg and may edit or overwrite it.
|
||||
// To have the Examples fields populated, use NewFromFiles and include
|
||||
// the package's _test.go files.
|
||||
func New(pkg *ast.Package, importPath string, mode Mode) *Package {
|
||||
var r reader
|
||||
r.readPackage(pkg, mode)
|
||||
r.computeMethodSets()
|
||||
r.cleanupTypes()
|
||||
return &Package{
|
||||
Doc: r.doc,
|
||||
Name: pkg.Name,
|
||||
ImportPath: importPath,
|
||||
Imports: sortedKeys(r.imports),
|
||||
Filenames: r.filenames,
|
||||
Notes: r.notes,
|
||||
Bugs: noteBodies(r.notes["BUG"]),
|
||||
Consts: sortedValues(r.values, token.CONST),
|
||||
Types: sortedTypes(r.types, mode&AllMethods != 0),
|
||||
Vars: sortedValues(r.values, token.VAR),
|
||||
Funcs: sortedFuncs(r.funcs, true),
|
||||
}
|
||||
}
|
||||
|
||||
// NewFromFiles computes documentation for a package.
|
||||
//
|
||||
// The package is specified by a list of *ast.Files and corresponding
|
||||
// file set, which must not be nil.
|
||||
// NewFromFiles uses all provided files when computing documentation,
|
||||
// so it is the caller's responsibility to provide only the files that
|
||||
// match the desired build context. "go/build".Context.MatchFile can
|
||||
// be used for determining whether a file matches a build context with
|
||||
// the desired GOOS and GOARCH values, and other build constraints.
|
||||
// The import path of the package is specified by importPath.
|
||||
//
|
||||
// Examples found in _test.go files are associated with the corresponding
|
||||
// type, function, method, or the package, based on their name.
|
||||
// If the example has a suffix in its name, it is set in the
|
||||
// Example.Suffix field. Examples with malformed names are skipped.
|
||||
//
|
||||
// Optionally, a single extra argument of type Mode can be provided to
|
||||
// control low-level aspects of the documentation extraction behavior.
|
||||
//
|
||||
// NewFromFiles takes ownership of the AST files and may edit them,
|
||||
// unless the PreserveAST Mode bit is on.
|
||||
func NewFromFiles(fset *token.FileSet, files []*ast.File, importPath string, opts ...interface{}) (*Package, error) {
|
||||
// Check for invalid API usage.
|
||||
if fset == nil {
|
||||
panic(fmt.Errorf("doc.NewFromFiles: no token.FileSet provided (fset == nil)"))
|
||||
}
|
||||
var mode Mode
|
||||
switch len(opts) { // There can only be 0 or 1 options, so a simple switch works for now.
|
||||
case 0:
|
||||
// Nothing to do.
|
||||
case 1:
|
||||
m, ok := opts[0].(Mode)
|
||||
if !ok {
|
||||
panic(fmt.Errorf("doc.NewFromFiles: option argument type must be doc.Mode"))
|
||||
}
|
||||
mode = m
|
||||
default:
|
||||
panic(fmt.Errorf("doc.NewFromFiles: there must not be more than 1 option argument"))
|
||||
}
|
||||
|
||||
// Collect .go and _test.go files.
|
||||
var (
|
||||
goFiles = make(map[string]*ast.File)
|
||||
testGoFiles []*ast.File
|
||||
)
|
||||
for i := range files {
|
||||
f := fset.File(files[i].Pos())
|
||||
if f == nil {
|
||||
return nil, fmt.Errorf("file files[%d] is not found in the provided file set", i)
|
||||
}
|
||||
switch name := f.Name(); {
|
||||
case strings.HasSuffix(name, ".go") && !strings.HasSuffix(name, "_test.go"):
|
||||
goFiles[name] = files[i]
|
||||
case strings.HasSuffix(name, "_test.go"):
|
||||
testGoFiles = append(testGoFiles, files[i])
|
||||
default:
|
||||
return nil, fmt.Errorf("file files[%d] filename %q does not have a .go extension", i, name)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(dmitshur,gri): A relatively high level call to ast.NewPackage with a simpleImporter
|
||||
// ast.Importer implementation is made below. It might be possible to short-circuit and simplify.
|
||||
|
||||
// Compute package documentation.
|
||||
pkg, _ := ast.NewPackage(fset, goFiles, simpleImporter, nil) // Ignore errors that can happen due to unresolved identifiers.
|
||||
p := New(pkg, importPath, mode)
|
||||
classifyExamples(p, Examples(testGoFiles...))
|
||||
return p, nil
|
||||
}
|
||||
|
||||
// simpleImporter returns a (dummy) package object named by the last path
|
||||
// component of the provided package path (as is the convention for packages).
|
||||
// This is sufficient to resolve package identifiers without doing an actual
|
||||
// import. It never returns an error.
|
||||
func simpleImporter(imports map[string]*ast.Object, path string) (*ast.Object, error) {
|
||||
pkg := imports[path]
|
||||
if pkg == nil {
|
||||
// note that strings.LastIndex returns -1 if there is no "/"
|
||||
pkg = ast.NewObj(ast.Pkg, path[strings.LastIndex(path, "/")+1:])
|
||||
pkg.Data = ast.NewScope(nil) // required by ast.NewPackage for dot-import
|
||||
imports[path] = pkg
|
||||
}
|
||||
return pkg, nil
|
||||
}
|
|
@ -0,0 +1,246 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
"text/template"
|
||||
|
||||
"golang.org/x/website/internal/backport/go/ast"
|
||||
"golang.org/x/website/internal/backport/go/parser"
|
||||
"golang.org/x/website/internal/backport/go/printer"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
)
|
||||
|
||||
var update = flag.Bool("update", false, "update golden (.out) files")
|
||||
var files = flag.String("files", "", "consider only Go test files matching this regular expression")
|
||||
|
||||
const dataDir = "testdata"
|
||||
|
||||
var templateTxt = readTemplate("template.txt")
|
||||
|
||||
func readTemplate(filename string) *template.Template {
|
||||
t := template.New(filename)
|
||||
t.Funcs(template.FuncMap{
|
||||
"node": nodeFmt,
|
||||
"synopsis": synopsisFmt,
|
||||
"indent": indentFmt,
|
||||
})
|
||||
return template.Must(t.ParseFiles(filepath.Join(dataDir, filename)))
|
||||
}
|
||||
|
||||
func nodeFmt(node interface{}, fset *token.FileSet) string {
|
||||
var buf bytes.Buffer
|
||||
printer.Fprint(&buf, fset, node)
|
||||
return strings.ReplaceAll(strings.TrimSpace(buf.String()), "\n", "\n\t")
|
||||
}
|
||||
|
||||
func synopsisFmt(s string) string {
|
||||
const n = 64
|
||||
if len(s) > n {
|
||||
// cut off excess text and go back to a word boundary
|
||||
s = s[0:n]
|
||||
if i := strings.LastIndexAny(s, "\t\n "); i >= 0 {
|
||||
s = s[0:i]
|
||||
}
|
||||
s = strings.TrimSpace(s) + " ..."
|
||||
}
|
||||
return "// " + strings.ReplaceAll(s, "\n", " ")
|
||||
}
|
||||
|
||||
func indentFmt(indent, s string) string {
|
||||
end := ""
|
||||
if strings.HasSuffix(s, "\n") {
|
||||
end = "\n"
|
||||
s = s[:len(s)-1]
|
||||
}
|
||||
return indent + strings.ReplaceAll(s, "\n", "\n"+indent) + end
|
||||
}
|
||||
|
||||
func isGoFile(fi fs.FileInfo) bool {
|
||||
name := fi.Name()
|
||||
return !fi.IsDir() &&
|
||||
len(name) > 0 && name[0] != '.' && // ignore .files
|
||||
filepath.Ext(name) == ".go"
|
||||
}
|
||||
|
||||
type bundle struct {
|
||||
*Package
|
||||
FSet *token.FileSet
|
||||
}
|
||||
|
||||
func test(t *testing.T, mode Mode) {
|
||||
// determine file filter
|
||||
filter := isGoFile
|
||||
if *files != "" {
|
||||
rx, err := regexp.Compile(*files)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
filter = func(fi fs.FileInfo) bool {
|
||||
return isGoFile(fi) && rx.MatchString(fi.Name())
|
||||
}
|
||||
}
|
||||
|
||||
// get packages
|
||||
fset := token.NewFileSet()
|
||||
pkgs, err := parser.ParseDir(fset, dataDir, filter, parser.ParseComments)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// test packages
|
||||
for _, pkg := range pkgs {
|
||||
t.Run(pkg.Name, func(t *testing.T) {
|
||||
importPath := dataDir + "/" + pkg.Name
|
||||
var files []*ast.File
|
||||
for _, f := range pkg.Files {
|
||||
files = append(files, f)
|
||||
}
|
||||
doc, err := NewFromFiles(fset, files, importPath, mode)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// golden files always use / in filenames - canonicalize them
|
||||
for i, filename := range doc.Filenames {
|
||||
doc.Filenames[i] = filepath.ToSlash(filename)
|
||||
}
|
||||
|
||||
// print documentation
|
||||
var buf bytes.Buffer
|
||||
if err := templateTxt.Execute(&buf, bundle{doc, fset}); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
got := buf.Bytes()
|
||||
|
||||
// update golden file if necessary
|
||||
golden := filepath.Join(dataDir, fmt.Sprintf("%s.%d.golden", pkg.Name, mode))
|
||||
if *update {
|
||||
err := os.WriteFile(golden, got, 0644)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
// get golden file
|
||||
want, err := os.ReadFile(golden)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// compare
|
||||
if !bytes.Equal(got, want) {
|
||||
t.Errorf("package %s\n\tgot:\n%s\n\twant:\n%s", pkg.Name, got, want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test(t *testing.T) {
|
||||
t.Run("default", func(t *testing.T) { test(t, 0) })
|
||||
t.Run("AllDecls", func(t *testing.T) { test(t, AllDecls) })
|
||||
t.Run("AllMethods", func(t *testing.T) { test(t, AllMethods) })
|
||||
}
|
||||
|
||||
func TestAnchorID(t *testing.T) {
|
||||
const in = "Important Things 2 Know & Stuff"
|
||||
const want = "hdr-Important_Things_2_Know___Stuff"
|
||||
got := anchorID(in)
|
||||
if got != want {
|
||||
t.Errorf("anchorID(%q) = %q; want %q", in, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
const funcsTestFile = `
|
||||
package funcs
|
||||
|
||||
func F() {}
|
||||
|
||||
type S1 struct {
|
||||
S2 // embedded, exported
|
||||
s3 // embedded, unexported
|
||||
}
|
||||
|
||||
func NewS1() S1 {return S1{} }
|
||||
func NewS1p() *S1 { return &S1{} }
|
||||
|
||||
func (S1) M1() {}
|
||||
func (r S1) M2() {}
|
||||
func(S1) m3() {} // unexported not shown
|
||||
func (*S1) P1() {} // pointer receiver
|
||||
|
||||
type S2 int
|
||||
func (S2) M3() {} // shown on S2
|
||||
|
||||
type s3 int
|
||||
func (s3) M4() {} // shown on S1
|
||||
|
||||
type G1[T any] struct {
|
||||
*s3
|
||||
}
|
||||
|
||||
func NewG1[T any]() G1[T] { return G1[T]{} }
|
||||
|
||||
func (G1[T]) MG1() {}
|
||||
func (*G1[U]) MG2() {}
|
||||
|
||||
type G2[T, U any] struct {}
|
||||
|
||||
func NewG2[T, U any]() G2[T, U] { return G2[T, U]{} }
|
||||
|
||||
func (G2[T, U]) MG3() {}
|
||||
func (*G2[A, B]) MG4() {}
|
||||
|
||||
|
||||
`
|
||||
|
||||
var funcsPackage = &Package{
|
||||
Funcs: []*Func{{Name: "F"}},
|
||||
Types: []*Type{
|
||||
{
|
||||
Name: "G1",
|
||||
Funcs: []*Func{{Name: "NewG1"}},
|
||||
Methods: []*Func{
|
||||
{Name: "M4", Recv: "G1", // TODO: synthesize a param for G1?
|
||||
Orig: "s3", Level: 1},
|
||||
{Name: "MG1", Recv: "G1[T]", Orig: "G1[T]", Level: 0},
|
||||
{Name: "MG2", Recv: "*G1[U]", Orig: "*G1[U]", Level: 0},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "G2",
|
||||
Funcs: []*Func{{Name: "NewG2"}},
|
||||
Methods: []*Func{
|
||||
{Name: "MG3", Recv: "G2[T, U]", Orig: "G2[T, U]", Level: 0},
|
||||
{Name: "MG4", Recv: "*G2[A, B]", Orig: "*G2[A, B]", Level: 0},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "S1",
|
||||
Funcs: []*Func{{Name: "NewS1"}, {Name: "NewS1p"}},
|
||||
Methods: []*Func{
|
||||
{Name: "M1", Recv: "S1", Orig: "S1", Level: 0},
|
||||
{Name: "M2", Recv: "S1", Orig: "S1", Level: 0},
|
||||
{Name: "M4", Recv: "S1", Orig: "s3", Level: 1},
|
||||
{Name: "P1", Recv: "*S1", Orig: "*S1", Level: 0},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "S2",
|
||||
Methods: []*Func{
|
||||
{Name: "M3", Recv: "S2", Orig: "S2", Level: 0},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
|
@ -0,0 +1,550 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Extract example functions from file ASTs.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"path"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"golang.org/x/website/internal/backport/go/ast"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
)
|
||||
|
||||
// An Example represents an example function found in a test source file.
|
||||
type Example struct {
|
||||
Name string // name of the item being exemplified (including optional suffix)
|
||||
Suffix string // example suffix, without leading '_' (only populated by NewFromFiles)
|
||||
Doc string // example function doc string
|
||||
Code ast.Node
|
||||
Play *ast.File // a whole program version of the example
|
||||
Comments []*ast.CommentGroup
|
||||
Output string // expected output
|
||||
Unordered bool
|
||||
EmptyOutput bool // expect empty output
|
||||
Order int // original source code order
|
||||
}
|
||||
|
||||
// Examples returns the examples found in testFiles, sorted by Name field.
|
||||
// The Order fields record the order in which the examples were encountered.
|
||||
// The Suffix field is not populated when Examples is called directly, it is
|
||||
// only populated by NewFromFiles for examples it finds in _test.go files.
|
||||
//
|
||||
// Playable Examples must be in a package whose name ends in "_test".
|
||||
// An Example is "playable" (the Play field is non-nil) in either of these
|
||||
// circumstances:
|
||||
// - The example function is self-contained: the function references only
|
||||
// identifiers from other packages (or predeclared identifiers, such as
|
||||
// "int") and the test file does not include a dot import.
|
||||
// - The entire test file is the example: the file contains exactly one
|
||||
// example function, zero test, fuzz test, or benchmark function, and at
|
||||
// least one top-level function, type, variable, or constant declaration
|
||||
// other than the example function.
|
||||
func Examples(testFiles ...*ast.File) []*Example {
|
||||
var list []*Example
|
||||
for _, file := range testFiles {
|
||||
hasTests := false // file contains tests, fuzz test, or benchmarks
|
||||
numDecl := 0 // number of non-import declarations in the file
|
||||
var flist []*Example
|
||||
for _, decl := range file.Decls {
|
||||
if g, ok := decl.(*ast.GenDecl); ok && g.Tok != token.IMPORT {
|
||||
numDecl++
|
||||
continue
|
||||
}
|
||||
f, ok := decl.(*ast.FuncDecl)
|
||||
if !ok || f.Recv != nil {
|
||||
continue
|
||||
}
|
||||
numDecl++
|
||||
name := f.Name.Name
|
||||
if isTest(name, "Test") || isTest(name, "Benchmark") || isTest(name, "Fuzz") {
|
||||
hasTests = true
|
||||
continue
|
||||
}
|
||||
if !isTest(name, "Example") {
|
||||
continue
|
||||
}
|
||||
if params := f.Type.Params; len(params.List) != 0 {
|
||||
continue // function has params; not a valid example
|
||||
}
|
||||
if f.Body == nil { // ast.File.Body nil dereference (see issue 28044)
|
||||
continue
|
||||
}
|
||||
var doc string
|
||||
if f.Doc != nil {
|
||||
doc = f.Doc.Text()
|
||||
}
|
||||
output, unordered, hasOutput := exampleOutput(f.Body, file.Comments)
|
||||
flist = append(flist, &Example{
|
||||
Name: name[len("Example"):],
|
||||
Doc: doc,
|
||||
Code: f.Body,
|
||||
Play: playExample(file, f),
|
||||
Comments: file.Comments,
|
||||
Output: output,
|
||||
Unordered: unordered,
|
||||
EmptyOutput: output == "" && hasOutput,
|
||||
Order: len(flist),
|
||||
})
|
||||
}
|
||||
if !hasTests && numDecl > 1 && len(flist) == 1 {
|
||||
// If this file only has one example function, some
|
||||
// other top-level declarations, and no tests or
|
||||
// benchmarks, use the whole file as the example.
|
||||
flist[0].Code = file
|
||||
flist[0].Play = playExampleFile(file)
|
||||
}
|
||||
list = append(list, flist...)
|
||||
}
|
||||
// sort by name
|
||||
sort.Slice(list, func(i, j int) bool {
|
||||
return list[i].Name < list[j].Name
|
||||
})
|
||||
return list
|
||||
}
|
||||
|
||||
var outputPrefix = regexp.MustCompile(`(?i)^[[:space:]]*(unordered )?output:`)
|
||||
|
||||
// Extracts the expected output and whether there was a valid output comment
|
||||
func exampleOutput(b *ast.BlockStmt, comments []*ast.CommentGroup) (output string, unordered, ok bool) {
|
||||
if _, last := lastComment(b, comments); last != nil {
|
||||
// test that it begins with the correct prefix
|
||||
text := last.Text()
|
||||
if loc := outputPrefix.FindStringSubmatchIndex(text); loc != nil {
|
||||
if loc[2] != -1 {
|
||||
unordered = true
|
||||
}
|
||||
text = text[loc[1]:]
|
||||
// Strip zero or more spaces followed by \n or a single space.
|
||||
text = strings.TrimLeft(text, " ")
|
||||
if len(text) > 0 && text[0] == '\n' {
|
||||
text = text[1:]
|
||||
}
|
||||
return text, unordered, true
|
||||
}
|
||||
}
|
||||
return "", false, false // no suitable comment found
|
||||
}
|
||||
|
||||
// isTest tells whether name looks like a test, example, fuzz test, or
|
||||
// benchmark. It is a Test (say) if there is a character after Test that is not
|
||||
// a lower-case letter. (We don't want Testiness.)
|
||||
func isTest(name, prefix string) bool {
|
||||
if !strings.HasPrefix(name, prefix) {
|
||||
return false
|
||||
}
|
||||
if len(name) == len(prefix) { // "Test" is ok
|
||||
return true
|
||||
}
|
||||
rune, _ := utf8.DecodeRuneInString(name[len(prefix):])
|
||||
return !unicode.IsLower(rune)
|
||||
}
|
||||
|
||||
// playExample synthesizes a new *ast.File based on the provided
|
||||
// file with the provided function body as the body of main.
|
||||
func playExample(file *ast.File, f *ast.FuncDecl) *ast.File {
|
||||
body := f.Body
|
||||
|
||||
if !strings.HasSuffix(file.Name.Name, "_test") {
|
||||
// We don't support examples that are part of the
|
||||
// greater package (yet).
|
||||
return nil
|
||||
}
|
||||
|
||||
// Collect top-level declarations in the file.
|
||||
topDecls := make(map[*ast.Object]ast.Decl)
|
||||
typMethods := make(map[string][]ast.Decl)
|
||||
|
||||
for _, decl := range file.Decls {
|
||||
switch d := decl.(type) {
|
||||
case *ast.FuncDecl:
|
||||
if d.Recv == nil {
|
||||
topDecls[d.Name.Obj] = d
|
||||
} else {
|
||||
if len(d.Recv.List) == 1 {
|
||||
t := d.Recv.List[0].Type
|
||||
tname, _ := baseTypeName(t)
|
||||
typMethods[tname] = append(typMethods[tname], d)
|
||||
}
|
||||
}
|
||||
case *ast.GenDecl:
|
||||
for _, spec := range d.Specs {
|
||||
switch s := spec.(type) {
|
||||
case *ast.TypeSpec:
|
||||
topDecls[s.Name.Obj] = d
|
||||
case *ast.ValueSpec:
|
||||
for _, name := range s.Names {
|
||||
topDecls[name.Obj] = d
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find unresolved identifiers and uses of top-level declarations.
|
||||
unresolved := make(map[string]bool)
|
||||
var depDecls []ast.Decl
|
||||
hasDepDecls := make(map[ast.Decl]bool)
|
||||
|
||||
var inspectFunc func(ast.Node) bool
|
||||
inspectFunc = func(n ast.Node) bool {
|
||||
switch e := n.(type) {
|
||||
case *ast.Ident:
|
||||
if e.Obj == nil && e.Name != "_" {
|
||||
unresolved[e.Name] = true
|
||||
} else if d := topDecls[e.Obj]; d != nil {
|
||||
if !hasDepDecls[d] {
|
||||
hasDepDecls[d] = true
|
||||
depDecls = append(depDecls, d)
|
||||
}
|
||||
}
|
||||
return true
|
||||
case *ast.SelectorExpr:
|
||||
// For selector expressions, only inspect the left hand side.
|
||||
// (For an expression like fmt.Println, only add "fmt" to the
|
||||
// set of unresolved names, not "Println".)
|
||||
ast.Inspect(e.X, inspectFunc)
|
||||
return false
|
||||
case *ast.KeyValueExpr:
|
||||
// For key value expressions, only inspect the value
|
||||
// as the key should be resolved by the type of the
|
||||
// composite literal.
|
||||
ast.Inspect(e.Value, inspectFunc)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
ast.Inspect(body, inspectFunc)
|
||||
for i := 0; i < len(depDecls); i++ {
|
||||
switch d := depDecls[i].(type) {
|
||||
case *ast.FuncDecl:
|
||||
// Inspect types of parameters and results. See #28492.
|
||||
if d.Type.Params != nil {
|
||||
for _, p := range d.Type.Params.List {
|
||||
ast.Inspect(p.Type, inspectFunc)
|
||||
}
|
||||
}
|
||||
if d.Type.Results != nil {
|
||||
for _, r := range d.Type.Results.List {
|
||||
ast.Inspect(r.Type, inspectFunc)
|
||||
}
|
||||
}
|
||||
|
||||
// Functions might not have a body. See #42706.
|
||||
if d.Body != nil {
|
||||
ast.Inspect(d.Body, inspectFunc)
|
||||
}
|
||||
case *ast.GenDecl:
|
||||
for _, spec := range d.Specs {
|
||||
switch s := spec.(type) {
|
||||
case *ast.TypeSpec:
|
||||
ast.Inspect(s.Type, inspectFunc)
|
||||
|
||||
depDecls = append(depDecls, typMethods[s.Name.Name]...)
|
||||
case *ast.ValueSpec:
|
||||
if s.Type != nil {
|
||||
ast.Inspect(s.Type, inspectFunc)
|
||||
}
|
||||
for _, val := range s.Values {
|
||||
ast.Inspect(val, inspectFunc)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove predeclared identifiers from unresolved list.
|
||||
for n := range unresolved {
|
||||
if predeclaredTypes[n] || predeclaredConstants[n] || predeclaredFuncs[n] {
|
||||
delete(unresolved, n)
|
||||
}
|
||||
}
|
||||
|
||||
// Use unresolved identifiers to determine the imports used by this
|
||||
// example. The heuristic assumes package names match base import
|
||||
// paths for imports w/o renames (should be good enough most of the time).
|
||||
namedImports := make(map[string]string) // [name]path
|
||||
var blankImports []ast.Spec // _ imports
|
||||
for _, s := range file.Imports {
|
||||
p, err := strconv.Unquote(s.Path.Value)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if p == "syscall/js" {
|
||||
// We don't support examples that import syscall/js,
|
||||
// because the package syscall/js is not available in the playground.
|
||||
return nil
|
||||
}
|
||||
n := path.Base(p)
|
||||
if s.Name != nil {
|
||||
n = s.Name.Name
|
||||
switch n {
|
||||
case "_":
|
||||
blankImports = append(blankImports, s)
|
||||
continue
|
||||
case ".":
|
||||
// We can't resolve dot imports (yet).
|
||||
return nil
|
||||
}
|
||||
}
|
||||
if unresolved[n] {
|
||||
namedImports[n] = p
|
||||
delete(unresolved, n)
|
||||
}
|
||||
}
|
||||
|
||||
// If there are other unresolved identifiers, give up because this
|
||||
// synthesized file is not going to build.
|
||||
if len(unresolved) > 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Include documentation belonging to blank imports.
|
||||
var comments []*ast.CommentGroup
|
||||
for _, s := range blankImports {
|
||||
if c := s.(*ast.ImportSpec).Doc; c != nil {
|
||||
comments = append(comments, c)
|
||||
}
|
||||
}
|
||||
|
||||
// Include comments that are inside the function body.
|
||||
for _, c := range file.Comments {
|
||||
if body.Pos() <= c.Pos() && c.End() <= body.End() {
|
||||
comments = append(comments, c)
|
||||
}
|
||||
}
|
||||
|
||||
// Strip the "Output:" or "Unordered output:" comment and adjust body
|
||||
// end position.
|
||||
body, comments = stripOutputComment(body, comments)
|
||||
|
||||
// Include documentation belonging to dependent declarations.
|
||||
for _, d := range depDecls {
|
||||
switch d := d.(type) {
|
||||
case *ast.GenDecl:
|
||||
if d.Doc != nil {
|
||||
comments = append(comments, d.Doc)
|
||||
}
|
||||
case *ast.FuncDecl:
|
||||
if d.Doc != nil {
|
||||
comments = append(comments, d.Doc)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Synthesize import declaration.
|
||||
importDecl := &ast.GenDecl{
|
||||
Tok: token.IMPORT,
|
||||
Lparen: 1, // Need non-zero Lparen and Rparen so that printer
|
||||
Rparen: 1, // treats this as a factored import.
|
||||
}
|
||||
for n, p := range namedImports {
|
||||
s := &ast.ImportSpec{Path: &ast.BasicLit{Value: strconv.Quote(p)}}
|
||||
if path.Base(p) != n {
|
||||
s.Name = ast.NewIdent(n)
|
||||
}
|
||||
importDecl.Specs = append(importDecl.Specs, s)
|
||||
}
|
||||
importDecl.Specs = append(importDecl.Specs, blankImports...)
|
||||
|
||||
// Synthesize main function.
|
||||
funcDecl := &ast.FuncDecl{
|
||||
Name: ast.NewIdent("main"),
|
||||
Type: f.Type,
|
||||
Body: body,
|
||||
}
|
||||
|
||||
decls := make([]ast.Decl, 0, 2+len(depDecls))
|
||||
decls = append(decls, importDecl)
|
||||
decls = append(decls, depDecls...)
|
||||
decls = append(decls, funcDecl)
|
||||
|
||||
sort.Slice(decls, func(i, j int) bool {
|
||||
return decls[i].Pos() < decls[j].Pos()
|
||||
})
|
||||
|
||||
sort.Slice(comments, func(i, j int) bool {
|
||||
return comments[i].Pos() < comments[j].Pos()
|
||||
})
|
||||
|
||||
// Synthesize file.
|
||||
return &ast.File{
|
||||
Name: ast.NewIdent("main"),
|
||||
Decls: decls,
|
||||
Comments: comments,
|
||||
}
|
||||
}
|
||||
|
||||
// playExampleFile takes a whole file example and synthesizes a new *ast.File
|
||||
// such that the example is function main in package main.
|
||||
func playExampleFile(file *ast.File) *ast.File {
|
||||
// Strip copyright comment if present.
|
||||
comments := file.Comments
|
||||
if len(comments) > 0 && strings.HasPrefix(comments[0].Text(), "Copyright") {
|
||||
comments = comments[1:]
|
||||
}
|
||||
|
||||
// Copy declaration slice, rewriting the ExampleX function to main.
|
||||
var decls []ast.Decl
|
||||
for _, d := range file.Decls {
|
||||
if f, ok := d.(*ast.FuncDecl); ok && isTest(f.Name.Name, "Example") {
|
||||
// Copy the FuncDecl, as it may be used elsewhere.
|
||||
newF := *f
|
||||
newF.Name = ast.NewIdent("main")
|
||||
newF.Body, comments = stripOutputComment(f.Body, comments)
|
||||
d = &newF
|
||||
}
|
||||
decls = append(decls, d)
|
||||
}
|
||||
|
||||
// Copy the File, as it may be used elsewhere.
|
||||
f := *file
|
||||
f.Name = ast.NewIdent("main")
|
||||
f.Decls = decls
|
||||
f.Comments = comments
|
||||
return &f
|
||||
}
|
||||
|
||||
// stripOutputComment finds and removes the "Output:" or "Unordered output:"
|
||||
// comment from body and comments, and adjusts the body block's end position.
|
||||
func stripOutputComment(body *ast.BlockStmt, comments []*ast.CommentGroup) (*ast.BlockStmt, []*ast.CommentGroup) {
|
||||
// Do nothing if there is no "Output:" or "Unordered output:" comment.
|
||||
i, last := lastComment(body, comments)
|
||||
if last == nil || !outputPrefix.MatchString(last.Text()) {
|
||||
return body, comments
|
||||
}
|
||||
|
||||
// Copy body and comments, as the originals may be used elsewhere.
|
||||
newBody := &ast.BlockStmt{
|
||||
Lbrace: body.Lbrace,
|
||||
List: body.List,
|
||||
Rbrace: last.Pos(),
|
||||
}
|
||||
newComments := make([]*ast.CommentGroup, len(comments)-1)
|
||||
copy(newComments, comments[:i])
|
||||
copy(newComments[i:], comments[i+1:])
|
||||
return newBody, newComments
|
||||
}
|
||||
|
||||
// lastComment returns the last comment inside the provided block.
|
||||
func lastComment(b *ast.BlockStmt, c []*ast.CommentGroup) (i int, last *ast.CommentGroup) {
|
||||
if b == nil {
|
||||
return
|
||||
}
|
||||
pos, end := b.Pos(), b.End()
|
||||
for j, cg := range c {
|
||||
if cg.Pos() < pos {
|
||||
continue
|
||||
}
|
||||
if cg.End() > end {
|
||||
break
|
||||
}
|
||||
i, last = j, cg
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// classifyExamples classifies examples and assigns them to the Examples field
|
||||
// of the relevant Func, Type, or Package that the example is associated with.
|
||||
//
|
||||
// The classification process is ambiguous in some cases:
|
||||
//
|
||||
// - ExampleFoo_Bar matches a type named Foo_Bar
|
||||
// or a method named Foo.Bar.
|
||||
// - ExampleFoo_bar matches a type named Foo_bar
|
||||
// or Foo (with a "bar" suffix).
|
||||
//
|
||||
// Examples with malformed names are not associated with anything.
|
||||
func classifyExamples(p *Package, examples []*Example) {
|
||||
if len(examples) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Mapping of names for funcs, types, and methods to the example listing.
|
||||
ids := make(map[string]*[]*Example)
|
||||
ids[""] = &p.Examples // package-level examples have an empty name
|
||||
for _, f := range p.Funcs {
|
||||
if !token.IsExported(f.Name) {
|
||||
continue
|
||||
}
|
||||
ids[f.Name] = &f.Examples
|
||||
}
|
||||
for _, t := range p.Types {
|
||||
if !token.IsExported(t.Name) {
|
||||
continue
|
||||
}
|
||||
ids[t.Name] = &t.Examples
|
||||
for _, f := range t.Funcs {
|
||||
if !token.IsExported(f.Name) {
|
||||
continue
|
||||
}
|
||||
ids[f.Name] = &f.Examples
|
||||
}
|
||||
for _, m := range t.Methods {
|
||||
if !token.IsExported(m.Name) {
|
||||
continue
|
||||
}
|
||||
ids[strings.TrimPrefix(m.Recv, "*")+"_"+m.Name] = &m.Examples
|
||||
}
|
||||
}
|
||||
|
||||
// Group each example with the associated func, type, or method.
|
||||
for _, ex := range examples {
|
||||
// Consider all possible split points for the suffix
|
||||
// by starting at the end of string (no suffix case),
|
||||
// then trying all positions that contain a '_' character.
|
||||
//
|
||||
// An association is made on the first successful match.
|
||||
// Examples with malformed names that match nothing are skipped.
|
||||
for i := len(ex.Name); i >= 0; i = strings.LastIndexByte(ex.Name[:i], '_') {
|
||||
prefix, suffix, ok := splitExampleName(ex.Name, i)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
exs, ok := ids[prefix]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
ex.Suffix = suffix
|
||||
*exs = append(*exs, ex)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Sort list of example according to the user-specified suffix name.
|
||||
for _, exs := range ids {
|
||||
sort.Slice((*exs), func(i, j int) bool {
|
||||
return (*exs)[i].Suffix < (*exs)[j].Suffix
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// splitExampleName attempts to split example name s at index i,
|
||||
// and reports if that produces a valid split. The suffix may be
|
||||
// absent. Otherwise, it must start with a lower-case letter and
|
||||
// be preceded by '_'.
|
||||
//
|
||||
// One of i == len(s) or s[i] == '_' must be true.
|
||||
func splitExampleName(s string, i int) (prefix, suffix string, ok bool) {
|
||||
if i == len(s) {
|
||||
return s, "", true
|
||||
}
|
||||
if i == len(s)-1 {
|
||||
return "", "", false
|
||||
}
|
||||
prefix, suffix = s[:i], s[i+1:]
|
||||
return prefix, suffix, isExampleSuffix(suffix)
|
||||
}
|
||||
|
||||
func isExampleSuffix(s string) bool {
|
||||
r, size := utf8.DecodeRuneInString(s)
|
||||
return size > 0 && unicode.IsLower(r)
|
||||
}
|
|
@ -0,0 +1,749 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package doc_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"golang.org/x/website/internal/backport/go/ast"
|
||||
"golang.org/x/website/internal/backport/go/doc"
|
||||
"golang.org/x/website/internal/backport/go/format"
|
||||
"golang.org/x/website/internal/backport/go/parser"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
const exampleTestFile = `
|
||||
package foo_test
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"sort"
|
||||
"os/exec"
|
||||
)
|
||||
|
||||
func ExampleHello() {
|
||||
fmt.Println("Hello, world!")
|
||||
// Output: Hello, world!
|
||||
}
|
||||
|
||||
func ExampleImport() {
|
||||
out, err := exec.Command("date").Output()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Printf("The date is %s\n", out)
|
||||
}
|
||||
|
||||
func ExampleKeyValue() {
|
||||
v := struct {
|
||||
a string
|
||||
b int
|
||||
}{
|
||||
a: "A",
|
||||
b: 1,
|
||||
}
|
||||
fmt.Print(v)
|
||||
// Output: a: "A", b: 1
|
||||
}
|
||||
|
||||
func ExampleKeyValueImport() {
|
||||
f := flag.Flag{
|
||||
Name: "play",
|
||||
}
|
||||
fmt.Print(f)
|
||||
// Output: Name: "play"
|
||||
}
|
||||
|
||||
var keyValueTopDecl = struct {
|
||||
a string
|
||||
b int
|
||||
}{
|
||||
a: "B",
|
||||
b: 2,
|
||||
}
|
||||
|
||||
func ExampleKeyValueTopDecl() {
|
||||
fmt.Print(keyValueTopDecl)
|
||||
// Output: a: "B", b: 2
|
||||
}
|
||||
|
||||
// Person represents a person by name and age.
|
||||
type Person struct {
|
||||
Name string
|
||||
Age int
|
||||
}
|
||||
|
||||
// String returns a string representation of the Person.
|
||||
func (p Person) String() string {
|
||||
return fmt.Sprintf("%s: %d", p.Name, p.Age)
|
||||
}
|
||||
|
||||
// ByAge implements sort.Interface for []Person based on
|
||||
// the Age field.
|
||||
type ByAge []Person
|
||||
|
||||
// Len returns the number of elements in ByAge.
|
||||
func (a (ByAge)) Len() int { return len(a) }
|
||||
|
||||
// Swap swaps the elements in ByAge.
|
||||
func (a ByAge) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a ByAge) Less(i, j int) bool { return a[i].Age < a[j].Age }
|
||||
|
||||
// people is the array of Person
|
||||
var people = []Person{
|
||||
{"Bob", 31},
|
||||
{"John", 42},
|
||||
{"Michael", 17},
|
||||
{"Jenny", 26},
|
||||
}
|
||||
|
||||
func ExampleSort() {
|
||||
fmt.Println(people)
|
||||
sort.Sort(ByAge(people))
|
||||
fmt.Println(people)
|
||||
// Output:
|
||||
// [Bob: 31 John: 42 Michael: 17 Jenny: 26]
|
||||
// [Michael: 17 Jenny: 26 Bob: 31 John: 42]
|
||||
}
|
||||
`
|
||||
|
||||
var exampleTestCases = []struct {
|
||||
Name, Play, Output string
|
||||
}{
|
||||
{
|
||||
Name: "Hello",
|
||||
Play: exampleHelloPlay,
|
||||
Output: "Hello, world!\n",
|
||||
},
|
||||
{
|
||||
Name: "Import",
|
||||
Play: exampleImportPlay,
|
||||
},
|
||||
{
|
||||
Name: "KeyValue",
|
||||
Play: exampleKeyValuePlay,
|
||||
Output: "a: \"A\", b: 1\n",
|
||||
},
|
||||
{
|
||||
Name: "KeyValueImport",
|
||||
Play: exampleKeyValueImportPlay,
|
||||
Output: "Name: \"play\"\n",
|
||||
},
|
||||
{
|
||||
Name: "KeyValueTopDecl",
|
||||
Play: exampleKeyValueTopDeclPlay,
|
||||
Output: "a: \"B\", b: 2\n",
|
||||
},
|
||||
{
|
||||
Name: "Sort",
|
||||
Play: exampleSortPlay,
|
||||
Output: "[Bob: 31 John: 42 Michael: 17 Jenny: 26]\n[Michael: 17 Jenny: 26 Bob: 31 John: 42]\n",
|
||||
},
|
||||
}
|
||||
|
||||
const exampleHelloPlay = `package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func main() {
|
||||
fmt.Println("Hello, world!")
|
||||
}
|
||||
`
|
||||
const exampleImportPlay = `package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os/exec"
|
||||
)
|
||||
|
||||
func main() {
|
||||
out, err := exec.Command("date").Output()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Printf("The date is %s\n", out)
|
||||
}
|
||||
`
|
||||
|
||||
const exampleKeyValuePlay = `package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func main() {
|
||||
v := struct {
|
||||
a string
|
||||
b int
|
||||
}{
|
||||
a: "A",
|
||||
b: 1,
|
||||
}
|
||||
fmt.Print(v)
|
||||
}
|
||||
`
|
||||
|
||||
const exampleKeyValueImportPlay = `package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func main() {
|
||||
f := flag.Flag{
|
||||
Name: "play",
|
||||
}
|
||||
fmt.Print(f)
|
||||
}
|
||||
`
|
||||
|
||||
const exampleKeyValueTopDeclPlay = `package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
var keyValueTopDecl = struct {
|
||||
a string
|
||||
b int
|
||||
}{
|
||||
a: "B",
|
||||
b: 2,
|
||||
}
|
||||
|
||||
func main() {
|
||||
fmt.Print(keyValueTopDecl)
|
||||
}
|
||||
`
|
||||
|
||||
const exampleSortPlay = `package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// Person represents a person by name and age.
|
||||
type Person struct {
|
||||
Name string
|
||||
Age int
|
||||
}
|
||||
|
||||
// String returns a string representation of the Person.
|
||||
func (p Person) String() string {
|
||||
return fmt.Sprintf("%s: %d", p.Name, p.Age)
|
||||
}
|
||||
|
||||
// ByAge implements sort.Interface for []Person based on
|
||||
// the Age field.
|
||||
type ByAge []Person
|
||||
|
||||
// Len returns the number of elements in ByAge.
|
||||
func (a ByAge) Len() int { return len(a) }
|
||||
|
||||
// Swap swaps the elements in ByAge.
|
||||
func (a ByAge) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a ByAge) Less(i, j int) bool { return a[i].Age < a[j].Age }
|
||||
|
||||
// people is the array of Person
|
||||
var people = []Person{
|
||||
{"Bob", 31},
|
||||
{"John", 42},
|
||||
{"Michael", 17},
|
||||
{"Jenny", 26},
|
||||
}
|
||||
|
||||
func main() {
|
||||
fmt.Println(people)
|
||||
sort.Sort(ByAge(people))
|
||||
fmt.Println(people)
|
||||
}
|
||||
`
|
||||
|
||||
func TestExamples(t *testing.T) {
|
||||
fset := token.NewFileSet()
|
||||
file, err := parser.ParseFile(fset, "test.go", strings.NewReader(exampleTestFile), parser.ParseComments)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
for i, e := range doc.Examples(file) {
|
||||
c := exampleTestCases[i]
|
||||
if e.Name != c.Name {
|
||||
t.Errorf("got Name == %q, want %q", e.Name, c.Name)
|
||||
}
|
||||
if w := c.Play; w != "" {
|
||||
g := formatFile(t, fset, e.Play)
|
||||
if g != w {
|
||||
t.Errorf("%s: got Play == %q, want %q", c.Name, g, w)
|
||||
}
|
||||
}
|
||||
if g, w := e.Output, c.Output; g != w {
|
||||
t.Errorf("%s: got Output == %q, want %q", c.Name, g, w)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const exampleWholeFile = `package foo_test
|
||||
|
||||
type X int
|
||||
|
||||
func (X) Foo() {
|
||||
}
|
||||
|
||||
func (X) TestBlah() {
|
||||
}
|
||||
|
||||
func (X) BenchmarkFoo() {
|
||||
}
|
||||
|
||||
func (X) FuzzFoo() {
|
||||
}
|
||||
|
||||
func Example() {
|
||||
fmt.Println("Hello, world!")
|
||||
// Output: Hello, world!
|
||||
}
|
||||
`
|
||||
|
||||
const exampleWholeFileOutput = `package main
|
||||
|
||||
type X int
|
||||
|
||||
func (X) Foo() {
|
||||
}
|
||||
|
||||
func (X) TestBlah() {
|
||||
}
|
||||
|
||||
func (X) BenchmarkFoo() {
|
||||
}
|
||||
|
||||
func (X) FuzzFoo() {
|
||||
}
|
||||
|
||||
func main() {
|
||||
fmt.Println("Hello, world!")
|
||||
}
|
||||
`
|
||||
|
||||
const exampleWholeFileFunction = `package foo_test
|
||||
|
||||
func Foo(x int) {
|
||||
}
|
||||
|
||||
func Example() {
|
||||
fmt.Println("Hello, world!")
|
||||
// Output: Hello, world!
|
||||
}
|
||||
`
|
||||
|
||||
const exampleWholeFileFunctionOutput = `package main
|
||||
|
||||
func Foo(x int) {
|
||||
}
|
||||
|
||||
func main() {
|
||||
fmt.Println("Hello, world!")
|
||||
}
|
||||
`
|
||||
|
||||
const exampleWholeFileExternalFunction = `package foo_test
|
||||
|
||||
func foo(int)
|
||||
|
||||
func Example() {
|
||||
foo(42)
|
||||
// Output:
|
||||
}
|
||||
`
|
||||
|
||||
const exampleWholeFileExternalFunctionOutput = `package main
|
||||
|
||||
func foo(int)
|
||||
|
||||
func main() {
|
||||
foo(42)
|
||||
}
|
||||
`
|
||||
|
||||
var exampleWholeFileTestCases = []struct {
|
||||
Title, Source, Play, Output string
|
||||
}{
|
||||
{
|
||||
"Methods",
|
||||
exampleWholeFile,
|
||||
exampleWholeFileOutput,
|
||||
"Hello, world!\n",
|
||||
},
|
||||
{
|
||||
"Function",
|
||||
exampleWholeFileFunction,
|
||||
exampleWholeFileFunctionOutput,
|
||||
"Hello, world!\n",
|
||||
},
|
||||
{
|
||||
"ExternalFunction",
|
||||
exampleWholeFileExternalFunction,
|
||||
exampleWholeFileExternalFunctionOutput,
|
||||
"",
|
||||
},
|
||||
}
|
||||
|
||||
func TestExamplesWholeFile(t *testing.T) {
|
||||
for _, c := range exampleWholeFileTestCases {
|
||||
fset := token.NewFileSet()
|
||||
file, err := parser.ParseFile(fset, "test.go", strings.NewReader(c.Source), parser.ParseComments)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
es := doc.Examples(file)
|
||||
if len(es) != 1 {
|
||||
t.Fatalf("%s: wrong number of examples; got %d want 1", c.Title, len(es))
|
||||
}
|
||||
e := es[0]
|
||||
if e.Name != "" {
|
||||
t.Errorf("%s: got Name == %q, want %q", c.Title, e.Name, "")
|
||||
}
|
||||
if g, w := formatFile(t, fset, e.Play), c.Play; g != w {
|
||||
t.Errorf("%s: got Play == %q, want %q", c.Title, g, w)
|
||||
}
|
||||
if g, w := e.Output, c.Output; g != w {
|
||||
t.Errorf("%s: got Output == %q, want %q", c.Title, g, w)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const exampleInspectSignature = `package foo_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
)
|
||||
|
||||
func getReader() io.Reader { return nil }
|
||||
|
||||
func do(b bytes.Reader) {}
|
||||
|
||||
func Example() {
|
||||
getReader()
|
||||
do()
|
||||
// Output:
|
||||
}
|
||||
|
||||
func ExampleIgnored() {
|
||||
}
|
||||
`
|
||||
|
||||
const exampleInspectSignatureOutput = `package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
)
|
||||
|
||||
func getReader() io.Reader { return nil }
|
||||
|
||||
func do(b bytes.Reader) {}
|
||||
|
||||
func main() {
|
||||
getReader()
|
||||
do()
|
||||
}
|
||||
`
|
||||
|
||||
func TestExampleInspectSignature(t *testing.T) {
|
||||
// Verify that "bytes" and "io" are imported. See issue #28492.
|
||||
fset := token.NewFileSet()
|
||||
file, err := parser.ParseFile(fset, "test.go", strings.NewReader(exampleInspectSignature), parser.ParseComments)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
es := doc.Examples(file)
|
||||
if len(es) != 2 {
|
||||
t.Fatalf("wrong number of examples; got %d want 2", len(es))
|
||||
}
|
||||
// We are interested in the first example only.
|
||||
e := es[0]
|
||||
if e.Name != "" {
|
||||
t.Errorf("got Name == %q, want %q", e.Name, "")
|
||||
}
|
||||
if g, w := formatFile(t, fset, e.Play), exampleInspectSignatureOutput; g != w {
|
||||
t.Errorf("got Play == %q, want %q", g, w)
|
||||
}
|
||||
if g, w := e.Output, ""; g != w {
|
||||
t.Errorf("got Output == %q, want %q", g, w)
|
||||
}
|
||||
}
|
||||
|
||||
const exampleEmpty = `
|
||||
package p
|
||||
func Example() {}
|
||||
func Example_a()
|
||||
`
|
||||
|
||||
const exampleEmptyOutput = `package main
|
||||
|
||||
func main() {}
|
||||
func main()
|
||||
`
|
||||
|
||||
func TestExampleEmpty(t *testing.T) {
|
||||
fset := token.NewFileSet()
|
||||
file, err := parser.ParseFile(fset, "test.go", strings.NewReader(exampleEmpty), parser.ParseComments)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
es := doc.Examples(file)
|
||||
if len(es) != 1 {
|
||||
t.Fatalf("wrong number of examples; got %d want 1", len(es))
|
||||
}
|
||||
e := es[0]
|
||||
if e.Name != "" {
|
||||
t.Errorf("got Name == %q, want %q", e.Name, "")
|
||||
}
|
||||
if g, w := formatFile(t, fset, e.Play), exampleEmptyOutput; g != w {
|
||||
t.Errorf("got Play == %q, want %q", g, w)
|
||||
}
|
||||
if g, w := e.Output, ""; g != w {
|
||||
t.Errorf("got Output == %q, want %q", g, w)
|
||||
}
|
||||
}
|
||||
|
||||
func formatFile(t *testing.T, fset *token.FileSet, n *ast.File) string {
|
||||
if n == nil {
|
||||
return "<nil>"
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
if err := format.Node(&buf, fset, n); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// This example illustrates how to use NewFromFiles
|
||||
// to compute package documentation with examples.
|
||||
func ExampleNewFromFiles() {
|
||||
// src and test are two source files that make up
|
||||
// a package whose documentation will be computed.
|
||||
const src = `
|
||||
// This is the package comment.
|
||||
package p
|
||||
|
||||
import "fmt"
|
||||
|
||||
// This comment is associated with the Greet function.
|
||||
func Greet(who string) {
|
||||
fmt.Printf("Hello, %s!\n", who)
|
||||
}
|
||||
`
|
||||
const test = `
|
||||
package p_test
|
||||
|
||||
// This comment is associated with the ExampleGreet_world example.
|
||||
func ExampleGreet_world() {
|
||||
Greet("world")
|
||||
}
|
||||
`
|
||||
|
||||
// Create the AST by parsing src and test.
|
||||
fset := token.NewFileSet()
|
||||
files := []*ast.File{
|
||||
mustParse(fset, "src.go", src),
|
||||
mustParse(fset, "src_test.go", test),
|
||||
}
|
||||
|
||||
// Compute package documentation with examples.
|
||||
p, err := doc.NewFromFiles(fset, files, "example.com/p")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf("package %s - %s", p.Name, p.Doc)
|
||||
fmt.Printf("func %s - %s", p.Funcs[0].Name, p.Funcs[0].Doc)
|
||||
fmt.Printf(" ⤷ example with suffix %q - %s", p.Funcs[0].Examples[0].Suffix, p.Funcs[0].Examples[0].Doc)
|
||||
|
||||
// Output:
|
||||
// package p - This is the package comment.
|
||||
// func Greet - This comment is associated with the Greet function.
|
||||
// ⤷ example with suffix "world" - This comment is associated with the ExampleGreet_world example.
|
||||
}
|
||||
|
||||
func TestClassifyExamples(t *testing.T) {
|
||||
const src = `
|
||||
package p
|
||||
|
||||
const Const1 = 0
|
||||
var Var1 = 0
|
||||
|
||||
type (
|
||||
Type1 int
|
||||
Type1_Foo int
|
||||
Type1_foo int
|
||||
type2 int
|
||||
|
||||
Embed struct { Type1 }
|
||||
Uembed struct { type2 }
|
||||
)
|
||||
|
||||
func Func1() {}
|
||||
func Func1_Foo() {}
|
||||
func Func1_foo() {}
|
||||
func func2() {}
|
||||
|
||||
func (Type1) Func1() {}
|
||||
func (Type1) Func1_Foo() {}
|
||||
func (Type1) Func1_foo() {}
|
||||
func (Type1) func2() {}
|
||||
|
||||
func (type2) Func1() {}
|
||||
|
||||
type (
|
||||
Conflict int
|
||||
Conflict_Conflict int
|
||||
Conflict_conflict int
|
||||
)
|
||||
|
||||
func (Conflict) Conflict() {}
|
||||
`
|
||||
const test = `
|
||||
package p_test
|
||||
|
||||
func ExampleConst1() {} // invalid - no support for consts and vars
|
||||
func ExampleVar1() {} // invalid - no support for consts and vars
|
||||
|
||||
func Example() {}
|
||||
func Example_() {} // invalid - suffix must start with a lower-case letter
|
||||
func Example_suffix() {}
|
||||
func Example_suffix_xX_X_x() {}
|
||||
func Example_世界() {} // invalid - suffix must start with a lower-case letter
|
||||
func Example_123() {} // invalid - suffix must start with a lower-case letter
|
||||
func Example_BadSuffix() {} // invalid - suffix must start with a lower-case letter
|
||||
|
||||
func ExampleType1() {}
|
||||
func ExampleType1_() {} // invalid - suffix must start with a lower-case letter
|
||||
func ExampleType1_suffix() {}
|
||||
func ExampleType1_BadSuffix() {} // invalid - suffix must start with a lower-case letter
|
||||
func ExampleType1_Foo() {}
|
||||
func ExampleType1_Foo_suffix() {}
|
||||
func ExampleType1_Foo_BadSuffix() {} // invalid - suffix must start with a lower-case letter
|
||||
func ExampleType1_foo() {}
|
||||
func ExampleType1_foo_suffix() {}
|
||||
func ExampleType1_foo_Suffix() {} // matches Type1, instead of Type1_foo
|
||||
func Exampletype2() {} // invalid - cannot match unexported
|
||||
|
||||
func ExampleFunc1() {}
|
||||
func ExampleFunc1_() {} // invalid - suffix must start with a lower-case letter
|
||||
func ExampleFunc1_suffix() {}
|
||||
func ExampleFunc1_BadSuffix() {} // invalid - suffix must start with a lower-case letter
|
||||
func ExampleFunc1_Foo() {}
|
||||
func ExampleFunc1_Foo_suffix() {}
|
||||
func ExampleFunc1_Foo_BadSuffix() {} // invalid - suffix must start with a lower-case letter
|
||||
func ExampleFunc1_foo() {}
|
||||
func ExampleFunc1_foo_suffix() {}
|
||||
func ExampleFunc1_foo_Suffix() {} // matches Func1, instead of Func1_foo
|
||||
func Examplefunc1() {} // invalid - cannot match unexported
|
||||
|
||||
func ExampleType1_Func1() {}
|
||||
func ExampleType1_Func1_() {} // invalid - suffix must start with a lower-case letter
|
||||
func ExampleType1_Func1_suffix() {}
|
||||
func ExampleType1_Func1_BadSuffix() {} // invalid - suffix must start with a lower-case letter
|
||||
func ExampleType1_Func1_Foo() {}
|
||||
func ExampleType1_Func1_Foo_suffix() {}
|
||||
func ExampleType1_Func1_Foo_BadSuffix() {} // invalid - suffix must start with a lower-case letter
|
||||
func ExampleType1_Func1_foo() {}
|
||||
func ExampleType1_Func1_foo_suffix() {}
|
||||
func ExampleType1_Func1_foo_Suffix() {} // matches Type1.Func1, instead of Type1.Func1_foo
|
||||
func ExampleType1_func2() {} // matches Type1, instead of Type1.func2
|
||||
|
||||
func ExampleEmbed_Func1() {} // invalid - no support for forwarded methods from embedding exported type
|
||||
func ExampleUembed_Func1() {} // methods from embedding unexported types are OK
|
||||
func ExampleUembed_Func1_suffix() {}
|
||||
|
||||
func ExampleConflict_Conflict() {} // ambiguous with either Conflict or Conflict_Conflict type
|
||||
func ExampleConflict_conflict() {} // ambiguous with either Conflict or Conflict_conflict type
|
||||
func ExampleConflict_Conflict_suffix() {} // ambiguous with either Conflict or Conflict_Conflict type
|
||||
func ExampleConflict_conflict_suffix() {} // ambiguous with either Conflict or Conflict_conflict type
|
||||
`
|
||||
|
||||
// Parse literal source code as a *doc.Package.
|
||||
fset := token.NewFileSet()
|
||||
files := []*ast.File{
|
||||
mustParse(fset, "src.go", src),
|
||||
mustParse(fset, "src_test.go", test),
|
||||
}
|
||||
p, err := doc.NewFromFiles(fset, files, "example.com/p")
|
||||
if err != nil {
|
||||
t.Fatalf("doc.NewFromFiles: %v", err)
|
||||
}
|
||||
|
||||
// Collect the association of examples to top-level identifiers.
|
||||
got := map[string][]string{}
|
||||
got[""] = exampleNames(p.Examples)
|
||||
for _, f := range p.Funcs {
|
||||
got[f.Name] = exampleNames(f.Examples)
|
||||
}
|
||||
for _, t := range p.Types {
|
||||
got[t.Name] = exampleNames(t.Examples)
|
||||
for _, f := range t.Funcs {
|
||||
got[f.Name] = exampleNames(f.Examples)
|
||||
}
|
||||
for _, m := range t.Methods {
|
||||
got[t.Name+"."+m.Name] = exampleNames(m.Examples)
|
||||
}
|
||||
}
|
||||
|
||||
want := map[string][]string{
|
||||
"": {"", "suffix", "suffix_xX_X_x"}, // Package-level examples.
|
||||
|
||||
"Type1": {"", "foo_Suffix", "func2", "suffix"},
|
||||
"Type1_Foo": {"", "suffix"},
|
||||
"Type1_foo": {"", "suffix"},
|
||||
|
||||
"Func1": {"", "foo_Suffix", "suffix"},
|
||||
"Func1_Foo": {"", "suffix"},
|
||||
"Func1_foo": {"", "suffix"},
|
||||
|
||||
"Type1.Func1": {"", "foo_Suffix", "suffix"},
|
||||
"Type1.Func1_Foo": {"", "suffix"},
|
||||
"Type1.Func1_foo": {"", "suffix"},
|
||||
|
||||
"Uembed.Func1": {"", "suffix"},
|
||||
|
||||
// These are implementation dependent due to the ambiguous parsing.
|
||||
"Conflict_Conflict": {"", "suffix"},
|
||||
"Conflict_conflict": {"", "suffix"},
|
||||
}
|
||||
|
||||
for id := range got {
|
||||
if !reflect.DeepEqual(got[id], want[id]) {
|
||||
t.Errorf("classification mismatch for %q:\ngot %q\nwant %q", id, got[id], want[id])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func exampleNames(exs []*doc.Example) (out []string) {
|
||||
for _, ex := range exs {
|
||||
out = append(out, ex.Suffix)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func mustParse(fset *token.FileSet, filename, src string) *ast.File {
|
||||
f, err := parser.ParseFile(fset, filename, src, parser.ParseComments)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return f
|
||||
}
|
|
@ -0,0 +1,324 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file implements export filtering of an AST.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"golang.org/x/website/internal/backport/go/ast"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
)
|
||||
|
||||
// filterIdentList removes unexported names from list in place
|
||||
// and returns the resulting list.
|
||||
func filterIdentList(list []*ast.Ident) []*ast.Ident {
|
||||
j := 0
|
||||
for _, x := range list {
|
||||
if token.IsExported(x.Name) {
|
||||
list[j] = x
|
||||
j++
|
||||
}
|
||||
}
|
||||
return list[0:j]
|
||||
}
|
||||
|
||||
var underscore = ast.NewIdent("_")
|
||||
|
||||
func filterCompositeLit(lit *ast.CompositeLit, filter Filter, export bool) {
|
||||
n := len(lit.Elts)
|
||||
lit.Elts = filterExprList(lit.Elts, filter, export)
|
||||
if len(lit.Elts) < n {
|
||||
lit.Incomplete = true
|
||||
}
|
||||
}
|
||||
|
||||
func filterExprList(list []ast.Expr, filter Filter, export bool) []ast.Expr {
|
||||
j := 0
|
||||
for _, exp := range list {
|
||||
switch x := exp.(type) {
|
||||
case *ast.CompositeLit:
|
||||
filterCompositeLit(x, filter, export)
|
||||
case *ast.KeyValueExpr:
|
||||
if x, ok := x.Key.(*ast.Ident); ok && !filter(x.Name) {
|
||||
continue
|
||||
}
|
||||
if x, ok := x.Value.(*ast.CompositeLit); ok {
|
||||
filterCompositeLit(x, filter, export)
|
||||
}
|
||||
}
|
||||
list[j] = exp
|
||||
j++
|
||||
}
|
||||
return list[0:j]
|
||||
}
|
||||
|
||||
// updateIdentList replaces all unexported identifiers with underscore
|
||||
// and reports whether at least one exported name exists.
|
||||
func updateIdentList(list []*ast.Ident) (hasExported bool) {
|
||||
for i, x := range list {
|
||||
if token.IsExported(x.Name) {
|
||||
hasExported = true
|
||||
} else {
|
||||
list[i] = underscore
|
||||
}
|
||||
}
|
||||
return hasExported
|
||||
}
|
||||
|
||||
// hasExportedName reports whether list contains any exported names.
|
||||
func hasExportedName(list []*ast.Ident) bool {
|
||||
for _, x := range list {
|
||||
if x.IsExported() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// removeAnonymousField removes anonymous fields named name from an interface.
|
||||
func removeAnonymousField(name string, ityp *ast.InterfaceType) {
|
||||
list := ityp.Methods.List // we know that ityp.Methods != nil
|
||||
j := 0
|
||||
for _, field := range list {
|
||||
keepField := true
|
||||
if n := len(field.Names); n == 0 {
|
||||
// anonymous field
|
||||
if fname, _ := baseTypeName(field.Type); fname == name {
|
||||
keepField = false
|
||||
}
|
||||
}
|
||||
if keepField {
|
||||
list[j] = field
|
||||
j++
|
||||
}
|
||||
}
|
||||
if j < len(list) {
|
||||
ityp.Incomplete = true
|
||||
}
|
||||
ityp.Methods.List = list[0:j]
|
||||
}
|
||||
|
||||
// filterFieldList removes unexported fields (field names) from the field list
|
||||
// in place and reports whether fields were removed. Anonymous fields are
|
||||
// recorded with the parent type. filterType is called with the types of
|
||||
// all remaining fields.
|
||||
func (r *reader) filterFieldList(parent *namedType, fields *ast.FieldList, ityp *ast.InterfaceType) (removedFields bool) {
|
||||
if fields == nil {
|
||||
return
|
||||
}
|
||||
list := fields.List
|
||||
j := 0
|
||||
for _, field := range list {
|
||||
keepField := false
|
||||
if n := len(field.Names); n == 0 {
|
||||
// anonymous field or embedded type or union element
|
||||
fname := r.recordAnonymousField(parent, field.Type)
|
||||
if fname != "" {
|
||||
if token.IsExported(fname) {
|
||||
keepField = true
|
||||
} else if ityp != nil && predeclaredTypes[fname] {
|
||||
// possibly an embedded predeclared type; keep it for now but
|
||||
// remember this interface so that it can be fixed if name is also
|
||||
// defined locally
|
||||
keepField = true
|
||||
r.remember(fname, ityp)
|
||||
}
|
||||
} else {
|
||||
// If we're operating on an interface, assume that this is an embedded
|
||||
// type or union element.
|
||||
//
|
||||
// TODO(rfindley): consider traversing into approximation/unions
|
||||
// elements to see if they are entirely unexported.
|
||||
keepField = ityp != nil
|
||||
}
|
||||
} else {
|
||||
field.Names = filterIdentList(field.Names)
|
||||
if len(field.Names) < n {
|
||||
removedFields = true
|
||||
}
|
||||
if len(field.Names) > 0 {
|
||||
keepField = true
|
||||
}
|
||||
}
|
||||
if keepField {
|
||||
r.filterType(nil, field.Type)
|
||||
list[j] = field
|
||||
j++
|
||||
}
|
||||
}
|
||||
if j < len(list) {
|
||||
removedFields = true
|
||||
}
|
||||
fields.List = list[0:j]
|
||||
return
|
||||
}
|
||||
|
||||
// filterParamList applies filterType to each parameter type in fields.
|
||||
func (r *reader) filterParamList(fields *ast.FieldList) {
|
||||
if fields != nil {
|
||||
for _, f := range fields.List {
|
||||
r.filterType(nil, f.Type)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// filterType strips any unexported struct fields or method types from typ
|
||||
// in place. If fields (or methods) have been removed, the corresponding
|
||||
// struct or interface type has the Incomplete field set to true.
|
||||
func (r *reader) filterType(parent *namedType, typ ast.Expr) {
|
||||
switch t := typ.(type) {
|
||||
case *ast.Ident:
|
||||
// nothing to do
|
||||
case *ast.ParenExpr:
|
||||
r.filterType(nil, t.X)
|
||||
case *ast.StarExpr: // possibly an embedded type literal
|
||||
r.filterType(nil, t.X)
|
||||
case *ast.UnaryExpr:
|
||||
if t.Op == token.TILDE { // approximation element
|
||||
r.filterType(nil, t.X)
|
||||
}
|
||||
case *ast.BinaryExpr:
|
||||
if t.Op == token.OR { // union
|
||||
r.filterType(nil, t.X)
|
||||
r.filterType(nil, t.Y)
|
||||
}
|
||||
case *ast.ArrayType:
|
||||
r.filterType(nil, t.Elt)
|
||||
case *ast.StructType:
|
||||
if r.filterFieldList(parent, t.Fields, nil) {
|
||||
t.Incomplete = true
|
||||
}
|
||||
case *ast.FuncType:
|
||||
r.filterParamList(t.TypeParams)
|
||||
r.filterParamList(t.Params)
|
||||
r.filterParamList(t.Results)
|
||||
case *ast.InterfaceType:
|
||||
if r.filterFieldList(parent, t.Methods, t) {
|
||||
t.Incomplete = true
|
||||
}
|
||||
case *ast.MapType:
|
||||
r.filterType(nil, t.Key)
|
||||
r.filterType(nil, t.Value)
|
||||
case *ast.ChanType:
|
||||
r.filterType(nil, t.Value)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *reader) filterSpec(spec ast.Spec) bool {
|
||||
switch s := spec.(type) {
|
||||
case *ast.ImportSpec:
|
||||
// always keep imports so we can collect them
|
||||
return true
|
||||
case *ast.ValueSpec:
|
||||
s.Values = filterExprList(s.Values, token.IsExported, true)
|
||||
if len(s.Values) > 0 || s.Type == nil && len(s.Values) == 0 {
|
||||
// If there are values declared on RHS, just replace the unexported
|
||||
// identifiers on the LHS with underscore, so that it matches
|
||||
// the sequence of expression on the RHS.
|
||||
//
|
||||
// Similarly, if there are no type and values, then this expression
|
||||
// must be following an iota expression, where order matters.
|
||||
if updateIdentList(s.Names) {
|
||||
r.filterType(nil, s.Type)
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
s.Names = filterIdentList(s.Names)
|
||||
if len(s.Names) > 0 {
|
||||
r.filterType(nil, s.Type)
|
||||
return true
|
||||
}
|
||||
}
|
||||
case *ast.TypeSpec:
|
||||
// Don't filter type parameters here, by analogy with function parameters
|
||||
// which are not filtered for top-level function declarations.
|
||||
if name := s.Name.Name; token.IsExported(name) {
|
||||
r.filterType(r.lookupType(s.Name.Name), s.Type)
|
||||
return true
|
||||
} else if IsPredeclared(name) {
|
||||
if r.shadowedPredecl == nil {
|
||||
r.shadowedPredecl = make(map[string]bool)
|
||||
}
|
||||
r.shadowedPredecl[name] = true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// copyConstType returns a copy of typ with position pos.
|
||||
// typ must be a valid constant type.
|
||||
// In practice, only (possibly qualified) identifiers are possible.
|
||||
func copyConstType(typ ast.Expr, pos token.Pos) ast.Expr {
|
||||
switch typ := typ.(type) {
|
||||
case *ast.Ident:
|
||||
return &ast.Ident{Name: typ.Name, NamePos: pos}
|
||||
case *ast.SelectorExpr:
|
||||
if id, ok := typ.X.(*ast.Ident); ok {
|
||||
// presumably a qualified identifier
|
||||
return &ast.SelectorExpr{
|
||||
Sel: ast.NewIdent(typ.Sel.Name),
|
||||
X: &ast.Ident{Name: id.Name, NamePos: pos},
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil // shouldn't happen, but be conservative and don't panic
|
||||
}
|
||||
|
||||
func (r *reader) filterSpecList(list []ast.Spec, tok token.Token) []ast.Spec {
|
||||
if tok == token.CONST {
|
||||
// Propagate any type information that would get lost otherwise
|
||||
// when unexported constants are filtered.
|
||||
var prevType ast.Expr
|
||||
for _, spec := range list {
|
||||
spec := spec.(*ast.ValueSpec)
|
||||
if spec.Type == nil && len(spec.Values) == 0 && prevType != nil {
|
||||
// provide current spec with an explicit type
|
||||
spec.Type = copyConstType(prevType, spec.Pos())
|
||||
}
|
||||
if hasExportedName(spec.Names) {
|
||||
// exported names are preserved so there's no need to propagate the type
|
||||
prevType = nil
|
||||
} else {
|
||||
prevType = spec.Type
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
j := 0
|
||||
for _, s := range list {
|
||||
if r.filterSpec(s) {
|
||||
list[j] = s
|
||||
j++
|
||||
}
|
||||
}
|
||||
return list[0:j]
|
||||
}
|
||||
|
||||
func (r *reader) filterDecl(decl ast.Decl) bool {
|
||||
switch d := decl.(type) {
|
||||
case *ast.GenDecl:
|
||||
d.Specs = r.filterSpecList(d.Specs, d.Tok)
|
||||
return len(d.Specs) > 0
|
||||
case *ast.FuncDecl:
|
||||
// ok to filter these methods early because any
|
||||
// conflicting method will be filtered here, too -
|
||||
// thus, removing these methods early will not lead
|
||||
// to the false removal of possible conflicts
|
||||
return token.IsExported(d.Name.Name)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// fileExports removes unexported declarations from src in place.
|
||||
func (r *reader) fileExports(src *ast.File) {
|
||||
j := 0
|
||||
for _, d := range src.Decls {
|
||||
if r.filterDecl(d) {
|
||||
src.Decls[j] = d
|
||||
j++
|
||||
}
|
||||
}
|
||||
src.Decls = src.Decls[0:j]
|
||||
}
|
|
@ -0,0 +1,106 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package doc
|
||||
|
||||
import "golang.org/x/website/internal/backport/go/ast"
|
||||
|
||||
type Filter func(string) bool
|
||||
|
||||
func matchFields(fields *ast.FieldList, f Filter) bool {
|
||||
if fields != nil {
|
||||
for _, field := range fields.List {
|
||||
for _, name := range field.Names {
|
||||
if f(name.Name) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func matchDecl(d *ast.GenDecl, f Filter) bool {
|
||||
for _, d := range d.Specs {
|
||||
switch v := d.(type) {
|
||||
case *ast.ValueSpec:
|
||||
for _, name := range v.Names {
|
||||
if f(name.Name) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
case *ast.TypeSpec:
|
||||
if f(v.Name.Name) {
|
||||
return true
|
||||
}
|
||||
// We don't match ordinary parameters in filterFuncs, so by analogy don't
|
||||
// match type parameters here.
|
||||
switch t := v.Type.(type) {
|
||||
case *ast.StructType:
|
||||
if matchFields(t.Fields, f) {
|
||||
return true
|
||||
}
|
||||
case *ast.InterfaceType:
|
||||
if matchFields(t.Methods, f) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func filterValues(a []*Value, f Filter) []*Value {
|
||||
w := 0
|
||||
for _, vd := range a {
|
||||
if matchDecl(vd.Decl, f) {
|
||||
a[w] = vd
|
||||
w++
|
||||
}
|
||||
}
|
||||
return a[0:w]
|
||||
}
|
||||
|
||||
func filterFuncs(a []*Func, f Filter) []*Func {
|
||||
w := 0
|
||||
for _, fd := range a {
|
||||
if f(fd.Name) {
|
||||
a[w] = fd
|
||||
w++
|
||||
}
|
||||
}
|
||||
return a[0:w]
|
||||
}
|
||||
|
||||
func filterTypes(a []*Type, f Filter) []*Type {
|
||||
w := 0
|
||||
for _, td := range a {
|
||||
n := 0 // number of matches
|
||||
if matchDecl(td.Decl, f) {
|
||||
n = 1
|
||||
} else {
|
||||
// type name doesn't match, but we may have matching consts, vars, factories or methods
|
||||
td.Consts = filterValues(td.Consts, f)
|
||||
td.Vars = filterValues(td.Vars, f)
|
||||
td.Funcs = filterFuncs(td.Funcs, f)
|
||||
td.Methods = filterFuncs(td.Methods, f)
|
||||
n += len(td.Consts) + len(td.Vars) + len(td.Funcs) + len(td.Methods)
|
||||
}
|
||||
if n > 0 {
|
||||
a[w] = td
|
||||
w++
|
||||
}
|
||||
}
|
||||
return a[0:w]
|
||||
}
|
||||
|
||||
// Filter eliminates documentation for names that don't pass through the filter f.
|
||||
// TODO(gri): Recognize "Type.Method" as a name.
|
||||
func (p *Package) Filter(f Filter) {
|
||||
p.Consts = filterValues(p.Consts, f)
|
||||
p.Vars = filterValues(p.Vars, f)
|
||||
p.Types = filterTypes(p.Types, f)
|
||||
p.Funcs = filterFuncs(p.Funcs, f)
|
||||
p.Doc = "" // don't show top-level package doc
|
||||
}
|
|
@ -0,0 +1,953 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/website/internal/backport/go/ast"
|
||||
"golang.org/x/website/internal/backport/go/token"
|
||||
)
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// function/method sets
|
||||
//
|
||||
// Internally, we treat functions like methods and collect them in method sets.
|
||||
|
||||
// A methodSet describes a set of methods. Entries where Decl == nil are conflict
|
||||
// entries (more than one method with the same name at the same embedding level).
|
||||
type methodSet map[string]*Func
|
||||
|
||||
// recvString returns a string representation of recv of the form "T", "*T",
|
||||
// "T[A, ...]", "*T[A, ...]" or "BADRECV" (if not a proper receiver type).
|
||||
func recvString(recv ast.Expr) string {
|
||||
switch t := recv.(type) {
|
||||
case *ast.Ident:
|
||||
return t.Name
|
||||
case *ast.StarExpr:
|
||||
return "*" + recvString(t.X)
|
||||
case *ast.IndexExpr:
|
||||
// Generic type with one parameter.
|
||||
return fmt.Sprintf("%s[%s]", recvString(t.X), recvParam(t.Index))
|
||||
case *ast.IndexListExpr:
|
||||
// Generic type with multiple parameters.
|
||||
if len(t.Indices) > 0 {
|
||||
var b strings.Builder
|
||||
b.WriteString(recvString(t.X))
|
||||
b.WriteByte('[')
|
||||
b.WriteString(recvParam(t.Indices[0]))
|
||||
for _, e := range t.Indices[1:] {
|
||||
b.WriteString(", ")
|
||||
b.WriteString(recvParam(e))
|
||||
}
|
||||
b.WriteByte(']')
|
||||
return b.String()
|
||||
}
|
||||
}
|
||||
return "BADRECV"
|
||||
}
|
||||
|
||||
func recvParam(p ast.Expr) string {
|
||||
if id, ok := p.(*ast.Ident); ok {
|
||||
return id.Name
|
||||
}
|
||||
return "BADPARAM"
|
||||
}
|
||||
|
||||
// set creates the corresponding Func for f and adds it to mset.
|
||||
// If there are multiple f's with the same name, set keeps the first
|
||||
// one with documentation; conflicts are ignored. The boolean
|
||||
// specifies whether to leave the AST untouched.
|
||||
func (mset methodSet) set(f *ast.FuncDecl, preserveAST bool) {
|
||||
name := f.Name.Name
|
||||
if g := mset[name]; g != nil && g.Doc != "" {
|
||||
// A function with the same name has already been registered;
|
||||
// since it has documentation, assume f is simply another
|
||||
// implementation and ignore it. This does not happen if the
|
||||
// caller is using go/build.ScanDir to determine the list of
|
||||
// files implementing a package.
|
||||
return
|
||||
}
|
||||
// function doesn't exist or has no documentation; use f
|
||||
recv := ""
|
||||
if f.Recv != nil {
|
||||
var typ ast.Expr
|
||||
// be careful in case of incorrect ASTs
|
||||
if list := f.Recv.List; len(list) == 1 {
|
||||
typ = list[0].Type
|
||||
}
|
||||
recv = recvString(typ)
|
||||
}
|
||||
mset[name] = &Func{
|
||||
Doc: f.Doc.Text(),
|
||||
Name: name,
|
||||
Decl: f,
|
||||
Recv: recv,
|
||||
Orig: recv,
|
||||
}
|
||||
if !preserveAST {
|
||||
f.Doc = nil // doc consumed - remove from AST
|
||||
}
|
||||
}
|
||||
|
||||
// add adds method m to the method set; m is ignored if the method set
|
||||
// already contains a method with the same name at the same or a higher
|
||||
// level than m.
|
||||
func (mset methodSet) add(m *Func) {
|
||||
old := mset[m.Name]
|
||||
if old == nil || m.Level < old.Level {
|
||||
mset[m.Name] = m
|
||||
return
|
||||
}
|
||||
if m.Level == old.Level {
|
||||
// conflict - mark it using a method with nil Decl
|
||||
mset[m.Name] = &Func{
|
||||
Name: m.Name,
|
||||
Level: m.Level,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Named types
|
||||
|
||||
// baseTypeName returns the name of the base type of x (or "")
|
||||
// and whether the type is imported or not.
|
||||
func baseTypeName(x ast.Expr) (name string, imported bool) {
|
||||
switch t := x.(type) {
|
||||
case *ast.Ident:
|
||||
return t.Name, false
|
||||
case *ast.IndexExpr:
|
||||
return baseTypeName(t.X)
|
||||
case *ast.IndexListExpr:
|
||||
return baseTypeName(t.X)
|
||||
case *ast.SelectorExpr:
|
||||
if _, ok := t.X.(*ast.Ident); ok {
|
||||
// only possible for qualified type names;
|
||||
// assume type is imported
|
||||
return t.Sel.Name, true
|
||||
}
|
||||
case *ast.ParenExpr:
|
||||
return baseTypeName(t.X)
|
||||
case *ast.StarExpr:
|
||||
return baseTypeName(t.X)
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
// An embeddedSet describes a set of embedded types.
|
||||
type embeddedSet map[*namedType]bool
|
||||
|
||||
// A namedType represents a named unqualified (package local, or possibly
|
||||
// predeclared) type. The namedType for a type name is always found via
|
||||
// reader.lookupType.
|
||||
type namedType struct {
|
||||
doc string // doc comment for type
|
||||
name string // type name
|
||||
decl *ast.GenDecl // nil if declaration hasn't been seen yet
|
||||
|
||||
isEmbedded bool // true if this type is embedded
|
||||
isStruct bool // true if this type is a struct
|
||||
embedded embeddedSet // true if the embedded type is a pointer
|
||||
|
||||
// associated declarations
|
||||
values []*Value // consts and vars
|
||||
funcs methodSet
|
||||
methods methodSet
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// AST reader
|
||||
|
||||
// reader accumulates documentation for a single package.
|
||||
// It modifies the AST: Comments (declaration documentation)
|
||||
// that have been collected by the reader are set to nil
|
||||
// in the respective AST nodes so that they are not printed
|
||||
// twice (once when printing the documentation and once when
|
||||
// printing the corresponding AST node).
|
||||
type reader struct {
|
||||
mode Mode
|
||||
|
||||
// package properties
|
||||
doc string // package documentation, if any
|
||||
filenames []string
|
||||
notes map[string][]*Note
|
||||
|
||||
// declarations
|
||||
imports map[string]int
|
||||
hasDotImp bool // if set, package contains a dot import
|
||||
values []*Value // consts and vars
|
||||
order int // sort order of const and var declarations (when we can't use a name)
|
||||
types map[string]*namedType
|
||||
funcs methodSet
|
||||
|
||||
// support for package-local shadowing of predeclared types
|
||||
shadowedPredecl map[string]bool
|
||||
fixmap map[string][]*ast.InterfaceType
|
||||
}
|
||||
|
||||
func (r *reader) isVisible(name string) bool {
|
||||
return r.mode&AllDecls != 0 || token.IsExported(name)
|
||||
}
|
||||
|
||||
// lookupType returns the base type with the given name.
|
||||
// If the base type has not been encountered yet, a new
|
||||
// type with the given name but no associated declaration
|
||||
// is added to the type map.
|
||||
func (r *reader) lookupType(name string) *namedType {
|
||||
if name == "" || name == "_" {
|
||||
return nil // no type docs for anonymous types
|
||||
}
|
||||
if typ, found := r.types[name]; found {
|
||||
return typ
|
||||
}
|
||||
// type not found - add one without declaration
|
||||
typ := &namedType{
|
||||
name: name,
|
||||
embedded: make(embeddedSet),
|
||||
funcs: make(methodSet),
|
||||
methods: make(methodSet),
|
||||
}
|
||||
r.types[name] = typ
|
||||
return typ
|
||||
}
|
||||
|
||||
// recordAnonymousField registers fieldType as the type of an
|
||||
// anonymous field in the parent type. If the field is imported
|
||||
// (qualified name) or the parent is nil, the field is ignored.
|
||||
// The function returns the field name.
|
||||
func (r *reader) recordAnonymousField(parent *namedType, fieldType ast.Expr) (fname string) {
|
||||
fname, imp := baseTypeName(fieldType)
|
||||
if parent == nil || imp {
|
||||
return
|
||||
}
|
||||
if ftype := r.lookupType(fname); ftype != nil {
|
||||
ftype.isEmbedded = true
|
||||
_, ptr := fieldType.(*ast.StarExpr)
|
||||
parent.embedded[ftype] = ptr
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (r *reader) readDoc(comment *ast.CommentGroup) {
|
||||
// By convention there should be only one package comment
|
||||
// but collect all of them if there are more than one.
|
||||
text := comment.Text()
|
||||
if r.doc == "" {
|
||||
r.doc = text
|
||||
return
|
||||
}
|
||||
r.doc += "\n" + text
|
||||
}
|
||||
|
||||
func (r *reader) remember(predecl string, typ *ast.InterfaceType) {
|
||||
if r.fixmap == nil {
|
||||
r.fixmap = make(map[string][]*ast.InterfaceType)
|
||||
}
|
||||
r.fixmap[predecl] = append(r.fixmap[predecl], typ)
|
||||
}
|
||||
|
||||
func specNames(specs []ast.Spec) []string {
|
||||
names := make([]string, 0, len(specs)) // reasonable estimate
|
||||
for _, s := range specs {
|
||||
// s guaranteed to be an *ast.ValueSpec by readValue
|
||||
for _, ident := range s.(*ast.ValueSpec).Names {
|
||||
names = append(names, ident.Name)
|
||||
}
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
// readValue processes a const or var declaration.
|
||||
func (r *reader) readValue(decl *ast.GenDecl) {
|
||||
// determine if decl should be associated with a type
|
||||
// Heuristic: For each typed entry, determine the type name, if any.
|
||||
// If there is exactly one type name that is sufficiently
|
||||
// frequent, associate the decl with the respective type.
|
||||
domName := ""
|
||||
domFreq := 0
|
||||
prev := ""
|
||||
n := 0
|
||||
for _, spec := range decl.Specs {
|
||||
s, ok := spec.(*ast.ValueSpec)
|
||||
if !ok {
|
||||
continue // should not happen, but be conservative
|
||||
}
|
||||
name := ""
|
||||
switch {
|
||||
case s.Type != nil:
|
||||
// a type is present; determine its name
|
||||
if n, imp := baseTypeName(s.Type); !imp {
|
||||
name = n
|
||||
}
|
||||
case decl.Tok == token.CONST && len(s.Values) == 0:
|
||||
// no type or value is present but we have a constant declaration;
|
||||
// use the previous type name (possibly the empty string)
|
||||
name = prev
|
||||
}
|
||||
if name != "" {
|
||||
// entry has a named type
|
||||
if domName != "" && domName != name {
|
||||
// more than one type name - do not associate
|
||||
// with any type
|
||||
domName = ""
|
||||
break
|
||||
}
|
||||
domName = name
|
||||
domFreq++
|
||||
}
|
||||
prev = name
|
||||
n++
|
||||
}
|
||||
|
||||
// nothing to do w/o a legal declaration
|
||||
if n == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// determine values list with which to associate the Value for this decl
|
||||
values := &r.values
|
||||
const threshold = 0.75
|
||||
if domName != "" && r.isVisible(domName) && domFreq >= int(float64(len(decl.Specs))*threshold) {
|
||||
// typed entries are sufficiently frequent
|
||||
if typ := r.lookupType(domName); typ != nil {
|
||||
values = &typ.values // associate with that type
|
||||
}
|
||||
}
|
||||
|
||||
*values = append(*values, &Value{
|
||||
Doc: decl.Doc.Text(),
|
||||
Names: specNames(decl.Specs),
|
||||
Decl: decl,
|
||||
order: r.order,
|
||||
})
|
||||
if r.mode&PreserveAST == 0 {
|
||||
decl.Doc = nil // doc consumed - remove from AST
|
||||
}
|
||||
// Note: It's important that the order used here is global because the cleanupTypes
|
||||
// methods may move values associated with types back into the global list. If the
|
||||
// order is list-specific, sorting is not deterministic because the same order value
|
||||
// may appear multiple times (was bug, found when fixing #16153).
|
||||
r.order++
|
||||
}
|
||||
|
||||
// fields returns a struct's fields or an interface's methods.
|
||||
func fields(typ ast.Expr) (list []*ast.Field, isStruct bool) {
|
||||
var fields *ast.FieldList
|
||||
switch t := typ.(type) {
|
||||
case *ast.StructType:
|
||||
fields = t.Fields
|
||||
isStruct = true
|
||||
case *ast.InterfaceType:
|
||||
fields = t.Methods
|
||||
}
|
||||
if fields != nil {
|
||||
list = fields.List
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// readType processes a type declaration.
|
||||
func (r *reader) readType(decl *ast.GenDecl, spec *ast.TypeSpec) {
|
||||
typ := r.lookupType(spec.Name.Name)
|
||||
if typ == nil {
|
||||
return // no name or blank name - ignore the type
|
||||
}
|
||||
|
||||
// A type should be added at most once, so typ.decl
|
||||
// should be nil - if it is not, simply overwrite it.
|
||||
typ.decl = decl
|
||||
|
||||
// compute documentation
|
||||
doc := spec.Doc
|
||||
if doc == nil {
|
||||
// no doc associated with the spec, use the declaration doc, if any
|
||||
doc = decl.Doc
|
||||
}
|
||||
if r.mode&PreserveAST == 0 {
|
||||
spec.Doc = nil // doc consumed - remove from AST
|
||||
decl.Doc = nil // doc consumed - remove from AST
|
||||
}
|
||||
typ.doc = doc.Text()
|
||||
|
||||
// record anonymous fields (they may contribute methods)
|
||||
// (some fields may have been recorded already when filtering
|
||||
// exports, but that's ok)
|
||||
var list []*ast.Field
|
||||
list, typ.isStruct = fields(spec.Type)
|
||||
for _, field := range list {
|
||||
if len(field.Names) == 0 {
|
||||
r.recordAnonymousField(typ, field.Type)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// isPredeclared reports whether n denotes a predeclared type.
|
||||
func (r *reader) isPredeclared(n string) bool {
|
||||
return predeclaredTypes[n] && r.types[n] == nil
|
||||
}
|
||||
|
||||
// readFunc processes a func or method declaration.
|
||||
func (r *reader) readFunc(fun *ast.FuncDecl) {
|
||||
// strip function body if requested.
|
||||
if r.mode&PreserveAST == 0 {
|
||||
fun.Body = nil
|
||||
}
|
||||
|
||||
// associate methods with the receiver type, if any
|
||||
if fun.Recv != nil {
|
||||
// method
|
||||
if len(fun.Recv.List) == 0 {
|
||||
// should not happen (incorrect AST); (See issue 17788)
|
||||
// don't show this method
|
||||
return
|
||||
}
|
||||
recvTypeName, imp := baseTypeName(fun.Recv.List[0].Type)
|
||||
if imp {
|
||||
// should not happen (incorrect AST);
|
||||
// don't show this method
|
||||
return
|
||||
}
|
||||
if typ := r.lookupType(recvTypeName); typ != nil {
|
||||
typ.methods.set(fun, r.mode&PreserveAST != 0)
|
||||
}
|
||||
// otherwise ignore the method
|
||||
// TODO(gri): There may be exported methods of non-exported types
|
||||
// that can be called because of exported values (consts, vars, or
|
||||
// function results) of that type. Could determine if that is the
|
||||
// case and then show those methods in an appropriate section.
|
||||
return
|
||||
}
|
||||
|
||||
// Associate factory functions with the first visible result type, as long as
|
||||
// others are predeclared types.
|
||||
if fun.Type.Results.NumFields() >= 1 {
|
||||
var typ *namedType // type to associate the function with
|
||||
numResultTypes := 0
|
||||
for _, res := range fun.Type.Results.List {
|
||||
factoryType := res.Type
|
||||
if t, ok := factoryType.(*ast.ArrayType); ok {
|
||||
// We consider functions that return slices or arrays of type
|
||||
// T (or pointers to T) as factory functions of T.
|
||||
factoryType = t.Elt
|
||||
}
|
||||
if n, imp := baseTypeName(factoryType); !imp && r.isVisible(n) && !r.isPredeclared(n) {
|
||||
if lookupTypeParam(n, fun.Type.TypeParams) != nil {
|
||||
// Issue #49477: don't associate fun with its type parameter result.
|
||||
// A type parameter is not a defined type.
|
||||
continue
|
||||
}
|
||||
if t := r.lookupType(n); t != nil {
|
||||
typ = t
|
||||
numResultTypes++
|
||||
if numResultTypes > 1 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// If there is exactly one result type,
|
||||
// associate the function with that type.
|
||||
if numResultTypes == 1 {
|
||||
typ.funcs.set(fun, r.mode&PreserveAST != 0)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// just an ordinary function
|
||||
r.funcs.set(fun, r.mode&PreserveAST != 0)
|
||||
}
|
||||
|
||||
// lookupTypeParam searches for type parameters named name within the tparams
|
||||
// field list, returning the relevant identifier if found, or nil if not.
|
||||
func lookupTypeParam(name string, tparams *ast.FieldList) *ast.Ident {
|
||||
if tparams == nil {
|
||||
return nil
|
||||
}
|
||||
for _, field := range tparams.List {
|
||||
for _, id := range field.Names {
|
||||
if id.Name == name {
|
||||
return id
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
noteMarker = `([A-Z][A-Z]+)\(([^)]+)\):?` // MARKER(uid), MARKER at least 2 chars, uid at least 1 char
|
||||
noteMarkerRx = regexp.MustCompile(`^[ \t]*` + noteMarker) // MARKER(uid) at text start
|
||||
noteCommentRx = regexp.MustCompile(`^/[/*][ \t]*` + noteMarker) // MARKER(uid) at comment start
|
||||
)
|
||||
|
||||
// readNote collects a single note from a sequence of comments.
|
||||
func (r *reader) readNote(list []*ast.Comment) {
|
||||
text := (&ast.CommentGroup{List: list}).Text()
|
||||
if m := noteMarkerRx.FindStringSubmatchIndex(text); m != nil {
|
||||
// The note body starts after the marker.
|
||||
// We remove any formatting so that we don't
|
||||
// get spurious line breaks/indentation when
|
||||
// showing the TODO body.
|
||||
body := clean(text[m[1]:], keepNL)
|
||||
if body != "" {
|
||||
marker := text[m[2]:m[3]]
|
||||
r.notes[marker] = append(r.notes[marker], &Note{
|
||||
Pos: list[0].Pos(),
|
||||
End: list[len(list)-1].End(),
|
||||
UID: text[m[4]:m[5]],
|
||||
Body: body,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// readNotes extracts notes from comments.
|
||||
// A note must start at the beginning of a comment with "MARKER(uid):"
|
||||
// and is followed by the note body (e.g., "// BUG(gri): fix this").
|
||||
// The note ends at the end of the comment group or at the start of
|
||||
// another note in the same comment group, whichever comes first.
|
||||
func (r *reader) readNotes(comments []*ast.CommentGroup) {
|
||||
for _, group := range comments {
|
||||
i := -1 // comment index of most recent note start, valid if >= 0
|
||||
list := group.List
|
||||
for j, c := range list {
|
||||
if noteCommentRx.MatchString(c.Text) {
|
||||
if i >= 0 {
|
||||
r.readNote(list[i:j])
|
||||
}
|
||||
i = j
|
||||
}
|
||||
}
|
||||
if i >= 0 {
|
||||
r.readNote(list[i:])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// readFile adds the AST for a source file to the reader.
|
||||
func (r *reader) readFile(src *ast.File) {
|
||||
// add package documentation
|
||||
if src.Doc != nil {
|
||||
r.readDoc(src.Doc)
|
||||
if r.mode&PreserveAST == 0 {
|
||||
src.Doc = nil // doc consumed - remove from AST
|
||||
}
|
||||
}
|
||||
|
||||
// add all declarations but for functions which are processed in a separate pass
|
||||
for _, decl := range src.Decls {
|
||||
switch d := decl.(type) {
|
||||
case *ast.GenDecl:
|
||||
switch d.Tok {
|
||||
case token.IMPORT:
|
||||
// imports are handled individually
|
||||
for _, spec := range d.Specs {
|
||||
if s, ok := spec.(*ast.ImportSpec); ok {
|
||||
if import_, err := strconv.Unquote(s.Path.Value); err == nil {
|
||||
r.imports[import_] = 1
|
||||
if s.Name != nil && s.Name.Name == "." {
|
||||
r.hasDotImp = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
case token.CONST, token.VAR:
|
||||
// constants and variables are always handled as a group
|
||||
r.readValue(d)
|
||||
case token.TYPE:
|
||||
// types are handled individually
|
||||
if len(d.Specs) == 1 && !d.Lparen.IsValid() {
|
||||
// common case: single declaration w/o parentheses
|
||||
// (if a single declaration is parenthesized,
|
||||
// create a new fake declaration below, so that
|
||||
// go/doc type declarations always appear w/o
|
||||
// parentheses)
|
||||
if s, ok := d.Specs[0].(*ast.TypeSpec); ok {
|
||||
r.readType(d, s)
|
||||
}
|
||||
break
|
||||
}
|
||||
for _, spec := range d.Specs {
|
||||
if s, ok := spec.(*ast.TypeSpec); ok {
|
||||
// use an individual (possibly fake) declaration
|
||||
// for each type; this also ensures that each type
|
||||
// gets to (re-)use the declaration documentation
|
||||
// if there's none associated with the spec itself
|
||||
fake := &ast.GenDecl{
|
||||
Doc: d.Doc,
|
||||
// don't use the existing TokPos because it
|
||||
// will lead to the wrong selection range for
|
||||
// the fake declaration if there are more
|
||||
// than one type in the group (this affects
|
||||
// src/cmd/godoc/godoc.go's posLink_urlFunc)
|
||||
TokPos: s.Pos(),
|
||||
Tok: token.TYPE,
|
||||
Specs: []ast.Spec{s},
|
||||
}
|
||||
r.readType(fake, s)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// collect MARKER(...): annotations
|
||||
r.readNotes(src.Comments)
|
||||
if r.mode&PreserveAST == 0 {
|
||||
src.Comments = nil // consumed unassociated comments - remove from AST
|
||||
}
|
||||
}
|
||||
|
||||
func (r *reader) readPackage(pkg *ast.Package, mode Mode) {
|
||||
// initialize reader
|
||||
r.filenames = make([]string, len(pkg.Files))
|
||||
r.imports = make(map[string]int)
|
||||
r.mode = mode
|
||||
r.types = make(map[string]*namedType)
|
||||
r.funcs = make(methodSet)
|
||||
r.notes = make(map[string][]*Note)
|
||||
|
||||
// sort package files before reading them so that the
|
||||
// result does not depend on map iteration order
|
||||
i := 0
|
||||
for filename := range pkg.Files {
|
||||
r.filenames[i] = filename
|
||||
i++
|
||||
}
|
||||
sort.Strings(r.filenames)
|
||||
|
||||
// process files in sorted order
|
||||
for _, filename := range r.filenames {
|
||||
f := pkg.Files[filename]
|
||||
if mode&AllDecls == 0 {
|
||||
r.fileExports(f)
|
||||
}
|
||||
r.readFile(f)
|
||||
}
|
||||
|
||||
// process functions now that we have better type information
|
||||
for _, f := range pkg.Files {
|
||||
for _, decl := range f.Decls {
|
||||
if d, ok := decl.(*ast.FuncDecl); ok {
|
||||
r.readFunc(d)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Types
|
||||
|
||||
func customizeRecv(f *Func, recvTypeName string, embeddedIsPtr bool, level int) *Func {
|
||||
if f == nil || f.Decl == nil || f.Decl.Recv == nil || len(f.Decl.Recv.List) != 1 {
|
||||
return f // shouldn't happen, but be safe
|
||||
}
|
||||
|
||||
// copy existing receiver field and set new type
|
||||
newField := *f.Decl.Recv.List[0]
|
||||
origPos := newField.Type.Pos()
|
||||
_, origRecvIsPtr := newField.Type.(*ast.StarExpr)
|
||||
newIdent := &ast.Ident{NamePos: origPos, Name: recvTypeName}
|
||||
var typ ast.Expr = newIdent
|
||||
if !embeddedIsPtr && origRecvIsPtr {
|
||||
newIdent.NamePos++ // '*' is one character
|
||||
typ = &ast.StarExpr{Star: origPos, X: newIdent}
|
||||
}
|
||||
newField.Type = typ
|
||||
|
||||
// copy existing receiver field list and set new receiver field
|
||||
newFieldList := *f.Decl.Recv
|
||||
newFieldList.List = []*ast.Field{&newField}
|
||||
|
||||
// copy existing function declaration and set new receiver field list
|
||||
newFuncDecl := *f.Decl
|
||||
newFuncDecl.Recv = &newFieldList
|
||||
|
||||
// copy existing function documentation and set new declaration
|
||||
newF := *f
|
||||
newF.Decl = &newFuncDecl
|
||||
newF.Recv = recvString(typ)
|
||||
// the Orig field never changes
|
||||
newF.Level = level
|
||||
|
||||
return &newF
|
||||
}
|
||||
|
||||
// collectEmbeddedMethods collects the embedded methods of typ in mset.
|
||||
func (r *reader) collectEmbeddedMethods(mset methodSet, typ *namedType, recvTypeName string, embeddedIsPtr bool, level int, visited embeddedSet) {
|
||||
visited[typ] = true
|
||||
for embedded, isPtr := range typ.embedded {
|
||||
// Once an embedded type is embedded as a pointer type
|
||||
// all embedded types in those types are treated like
|
||||
// pointer types for the purpose of the receiver type
|
||||
// computation; i.e., embeddedIsPtr is sticky for this
|
||||
// embedding hierarchy.
|
||||
thisEmbeddedIsPtr := embeddedIsPtr || isPtr
|
||||
for _, m := range embedded.methods {
|
||||
// only top-level methods are embedded
|
||||
if m.Level == 0 {
|
||||
mset.add(customizeRecv(m, recvTypeName, thisEmbeddedIsPtr, level))
|
||||
}
|
||||
}
|
||||
if !visited[embedded] {
|
||||
r.collectEmbeddedMethods(mset, embedded, recvTypeName, thisEmbeddedIsPtr, level+1, visited)
|
||||
}
|
||||
}
|
||||
delete(visited, typ)
|
||||
}
|
||||
|
||||
// computeMethodSets determines the actual method sets for each type encountered.
|
||||
func (r *reader) computeMethodSets() {
|
||||
for _, t := range r.types {
|
||||
// collect embedded methods for t
|
||||
if t.isStruct {
|
||||
// struct
|
||||
r.collectEmbeddedMethods(t.methods, t, t.name, false, 1, make(embeddedSet))
|
||||
} else {
|
||||
// interface
|
||||
// TODO(gri) fix this
|
||||
}
|
||||
}
|
||||
|
||||
// For any predeclared names that are declared locally, don't treat them as
|
||||
// exported fields anymore.
|
||||
for predecl := range r.shadowedPredecl {
|
||||
for _, ityp := range r.fixmap[predecl] {
|
||||
removeAnonymousField(predecl, ityp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// cleanupTypes removes the association of functions and methods with
|
||||
// types that have no declaration. Instead, these functions and methods
|
||||
// are shown at the package level. It also removes types with missing
|
||||
// declarations or which are not visible.
|
||||
func (r *reader) cleanupTypes() {
|
||||
for _, t := range r.types {
|
||||
visible := r.isVisible(t.name)
|
||||
predeclared := predeclaredTypes[t.name]
|
||||
|
||||
if t.decl == nil && (predeclared || visible && (t.isEmbedded || r.hasDotImp)) {
|
||||
// t.name is a predeclared type (and was not redeclared in this package),
|
||||
// or it was embedded somewhere but its declaration is missing (because
|
||||
// the AST is incomplete), or we have a dot-import (and all bets are off):
|
||||
// move any associated values, funcs, and methods back to the top-level so
|
||||
// that they are not lost.
|
||||
// 1) move values
|
||||
r.values = append(r.values, t.values...)
|
||||
// 2) move factory functions
|
||||
for name, f := range t.funcs {
|
||||
// in a correct AST, package-level function names
|
||||
// are all different - no need to check for conflicts
|
||||
r.funcs[name] = f
|
||||
}
|
||||
// 3) move methods
|
||||
if !predeclared {
|
||||
for name, m := range t.methods {
|
||||
// don't overwrite functions with the same name - drop them
|
||||
if _, found := r.funcs[name]; !found {
|
||||
r.funcs[name] = m
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// remove types w/o declaration or which are not visible
|
||||
if t.decl == nil || !visible {
|
||||
delete(r.types, t.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Sorting
|
||||
|
||||
type data struct {
|
||||
n int
|
||||
swap func(i, j int)
|
||||
less func(i, j int) bool
|
||||
}
|
||||
|
||||
func (d *data) Len() int { return d.n }
|
||||
func (d *data) Swap(i, j int) { d.swap(i, j) }
|
||||
func (d *data) Less(i, j int) bool { return d.less(i, j) }
|
||||
|
||||
// sortBy is a helper function for sorting
|
||||
func sortBy(less func(i, j int) bool, swap func(i, j int), n int) {
|
||||
sort.Sort(&data{n, swap, less})
|
||||
}
|
||||
|
||||
func sortedKeys(m map[string]int) []string {
|
||||
list := make([]string, len(m))
|
||||
i := 0
|
||||
for key := range m {
|
||||
list[i] = key
|
||||
i++
|
||||
}
|
||||
sort.Strings(list)
|
||||
return list
|
||||
}
|
||||
|
||||
// sortingName returns the name to use when sorting d into place.
|
||||
func sortingName(d *ast.GenDecl) string {
|
||||
if len(d.Specs) == 1 {
|
||||
if s, ok := d.Specs[0].(*ast.ValueSpec); ok {
|
||||
return s.Names[0].Name
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func sortedValues(m []*Value, tok token.Token) []*Value {
|
||||
list := make([]*Value, len(m)) // big enough in any case
|
||||
i := 0
|
||||
for _, val := range m {
|
||||
if val.Decl.Tok == tok {
|
||||
list[i] = val
|
||||
i++
|
||||
}
|
||||
}
|
||||
list = list[0:i]
|
||||
|
||||
sortBy(
|
||||
func(i, j int) bool {
|
||||
if ni, nj := sortingName(list[i].Decl), sortingName(list[j].Decl); ni != nj {
|
||||
return ni < nj
|
||||
}
|
||||
return list[i].order < list[j].order
|
||||
},
|
||||
func(i, j int) { list[i], list[j] = list[j], list[i] },
|
||||
len(list),
|
||||
)
|
||||
|
||||
return list
|
||||
}
|
||||
|
||||
func sortedTypes(m map[string]*namedType, allMethods bool) []*Type {
|
||||
list := make([]*Type, len(m))
|
||||
i := 0
|
||||
for _, t := range m {
|
||||
list[i] = &Type{
|
||||
Doc: t.doc,
|
||||
Name: t.name,
|
||||
Decl: t.decl,
|
||||
Consts: sortedValues(t.values, token.CONST),
|
||||
Vars: sortedValues(t.values, token.VAR),
|
||||
Funcs: sortedFuncs(t.funcs, true),
|
||||
Methods: sortedFuncs(t.methods, allMethods),
|
||||
}
|
||||
i++
|
||||
}
|
||||
|
||||
sortBy(
|
||||
func(i, j int) bool { return list[i].Name < list[j].Name },
|
||||
func(i, j int) { list[i], list[j] = list[j], list[i] },
|
||||
len(list),
|
||||
)
|
||||
|
||||
return list
|
||||
}
|
||||
|
||||
func removeStar(s string) string {
|
||||
if len(s) > 0 && s[0] == '*' {
|
||||
return s[1:]
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func sortedFuncs(m methodSet, allMethods bool) []*Func {
|
||||
list := make([]*Func, len(m))
|
||||
i := 0
|
||||
for _, m := range m {
|
||||
// determine which methods to include
|
||||
switch {
|
||||
case m.Decl == nil:
|
||||
// exclude conflict entry
|
||||
case allMethods, m.Level == 0, !token.IsExported(removeStar(m.Orig)):
|
||||
// forced inclusion, method not embedded, or method
|
||||
// embedded but original receiver type not exported
|
||||
list[i] = m
|
||||
i++
|
||||
}
|
||||
}
|
||||
list = list[0:i]
|
||||
sortBy(
|
||||
func(i, j int) bool { return list[i].Name < list[j].Name },
|
||||
func(i, j int) { list[i], list[j] = list[j], list[i] },
|
||||
len(list),
|
||||
)
|
||||
return list
|
||||
}
|
||||
|
||||
// noteBodies returns a list of note body strings given a list of notes.
|
||||
// This is only used to populate the deprecated Package.Bugs field.
|
||||
func noteBodies(notes []*Note) []string {
|
||||
var list []string
|
||||
for _, n := range notes {
|
||||
list = append(list, n.Body)
|
||||
}
|
||||
return list
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Predeclared identifiers
|
||||
|
||||
// IsPredeclared reports whether s is a predeclared identifier.
|
||||
func IsPredeclared(s string) bool {
|
||||
return predeclaredTypes[s] || predeclaredFuncs[s] || predeclaredConstants[s]
|
||||
}
|
||||
|
||||
var predeclaredTypes = map[string]bool{
|
||||
"any": true,
|
||||
"bool": true,
|
||||
"byte": true,
|
||||
"comparable": true,
|
||||
"complex64": true,
|
||||
"complex128": true,
|
||||
"error": true,
|
||||
"float32": true,
|
||||
"float64": true,
|
||||
"int": true,
|
||||
"int8": true,
|
||||
"int16": true,
|
||||
"int32": true,
|
||||
"int64": true,
|
||||
"rune": true,
|
||||
"string": true,
|
||||
"uint": true,
|
||||
"uint8": true,
|
||||
"uint16": true,
|
||||
"uint32": true,
|
||||
"uint64": true,
|
||||
"uintptr": true,
|
||||
}
|
||||
|
||||
var predeclaredFuncs = map[string]bool{
|
||||
"append": true,
|
||||
"cap": true,
|
||||
"close": true,
|
||||
"complex": true,
|
||||
"copy": true,
|
||||
"delete": true,
|
||||
"imag": true,
|
||||
"len": true,
|
||||
"make": true,
|
||||
"new": true,
|
||||
"panic": true,
|
||||
"print": true,
|
||||
"println": true,
|
||||
"real": true,
|
||||
"recover": true,
|
||||
}
|
||||
|
||||
var predeclaredConstants = map[string]bool{
|
||||
"false": true,
|
||||
"iota": true,
|
||||
"nil": true,
|
||||
"true": true,
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
// firstSentenceLen returns the length of the first sentence in s.
|
||||
// The sentence ends after the first period followed by space and
|
||||
// not preceded by exactly one uppercase letter.
|
||||
func firstSentenceLen(s string) int {
|
||||
var ppp, pp, p rune
|
||||
for i, q := range s {
|
||||
if q == '\n' || q == '\r' || q == '\t' {
|
||||
q = ' '
|
||||
}
|
||||
if q == ' ' && p == '.' && (!unicode.IsUpper(pp) || unicode.IsUpper(ppp)) {
|
||||
return i
|
||||
}
|
||||
if p == '。' || p == '.' {
|
||||
return i
|
||||
}
|
||||
ppp, pp, p = pp, p, q
|
||||
}
|
||||
return len(s)
|
||||
}
|
||||
|
||||
const (
|
||||
keepNL = 1 << iota
|
||||
)
|
||||
|
||||
// clean replaces each sequence of space, \n, \r, or \t characters
|
||||
// with a single space and removes any trailing and leading spaces.
|
||||
// If the keepNL flag is set, newline characters are passed through
|
||||
// instead of being change to spaces.
|
||||
func clean(s string, flags int) string {
|
||||
var b []byte
|
||||
p := byte(' ')
|
||||
for i := 0; i < len(s); i++ {
|
||||
q := s[i]
|
||||
if (flags&keepNL) == 0 && q == '\n' || q == '\r' || q == '\t' {
|
||||
q = ' '
|
||||
}
|
||||
if q != ' ' || p != ' ' {
|
||||
b = append(b, q)
|
||||
p = q
|
||||
}
|
||||
}
|
||||
// remove trailing blank, if any
|
||||
if n := len(b); n > 0 && p == ' ' {
|
||||
b = b[0 : n-1]
|
||||
}
|
||||
return string(b)
|
||||
}
|
||||
|
||||
// Synopsis returns a cleaned version of the first sentence in s.
|
||||
// That sentence ends after the first period followed by space and
|
||||
// not preceded by exactly one uppercase letter. The result string
|
||||
// has no \n, \r, or \t characters and uses only single spaces between
|
||||
// words. If s starts with any of the IllegalPrefixes, the result
|
||||
// is the empty string.
|
||||
func Synopsis(s string) string {
|
||||
s = clean(s[0:firstSentenceLen(s)], 0)
|
||||
for _, prefix := range IllegalPrefixes {
|
||||
if strings.HasPrefix(strings.ToLower(s), prefix) {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
s = convertQuotes(s)
|
||||
return s
|
||||
}
|
||||
|
||||
var IllegalPrefixes = []string{
|
||||
"copyright",
|
||||
"all rights",
|
||||
"author",
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package doc
|
||||
|
||||
import "testing"
|
||||
|
||||
var tests = []struct {
|
||||
txt string
|
||||
fsl int
|
||||
syn string
|
||||
}{
|
||||
{"", 0, ""},
|
||||
{"foo", 3, "foo"},
|
||||
{"foo.", 4, "foo."},
|
||||
{"foo.bar", 7, "foo.bar"},
|
||||
{" foo. ", 6, "foo."},
|
||||
{" foo\t bar.\n", 12, "foo bar."},
|
||||
{" foo\t bar.\n", 12, "foo bar."},
|
||||
{"a b\n\nc\r\rd\t\t", 12, "a b c d"},
|
||||
{"a b\n\nc\r\rd\t\t . BLA", 15, "a b c d ."},
|
||||
{"Package poems by T.S.Eliot. To rhyme...", 27, "Package poems by T.S.Eliot."},
|
||||
{"Package poems by T. S. Eliot. To rhyme...", 29, "Package poems by T. S. Eliot."},
|
||||
{"foo implements the foo ABI. The foo ABI is...", 27, "foo implements the foo ABI."},
|
||||
{"Package\nfoo. ..", 12, "Package foo."},
|
||||
{"P . Q.", 3, "P ."},
|
||||
{"P. Q. ", 8, "P. Q."},
|
||||
{"Package Καλημέρα κόσμε.", 36, "Package Καλημέρα κόσμε."},
|
||||
{"Package こんにちは 世界\n", 31, "Package こんにちは 世界"},
|
||||
{"Package こんにちは。世界", 26, "Package こんにちは。"},
|
||||
{"Package 안녕.世界", 17, "Package 안녕."},
|
||||
{"Package foo does bar.", 21, "Package foo does bar."},
|
||||
{"Copyright 2012 Google, Inc. Package foo does bar.", 27, ""},
|
||||
{"All Rights reserved. Package foo does bar.", 20, ""},
|
||||
{"All rights reserved. Package foo does bar.", 20, ""},
|
||||
{"Authors: foo@bar.com. Package foo does bar.", 21, ""},
|
||||
{"typically invoked as ``go tool asm'',", 37, "typically invoked as " + ulquo + "go tool asm" + urquo + ","},
|
||||
}
|
||||
|
||||
func TestSynopsis(t *testing.T) {
|
||||
for _, e := range tests {
|
||||
fsl := firstSentenceLen(e.txt)
|
||||
if fsl != e.fsl {
|
||||
t.Errorf("got fsl = %d; want %d for %q\n", fsl, e.fsl, e.txt)
|
||||
}
|
||||
syn := Synopsis(e.txt)
|
||||
if syn != e.syn {
|
||||
t.Errorf("got syn = %q; want %q for %q\n", syn, e.syn, e.txt)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
// comment 0 comment 1
|
||||
PACKAGE a
|
||||
|
||||
IMPORTPATH
|
||||
testdata/a
|
||||
|
||||
FILENAMES
|
||||
testdata/a0.go
|
||||
testdata/a1.go
|
||||
|
||||
BUGS .Bugs is now deprecated, please use .Notes instead
|
||||
bug0
|
||||
|
||||
bug1
|
||||
|
||||
|
||||
BUGS
|
||||
BUG(uid) bug0
|
||||
|
||||
BUG(uid) bug1
|
||||
|
||||
|
||||
NOTES
|
||||
NOTE(uid)
|
||||
|
||||
NOTE(foo) 1 of 4 - this is the first line of note 1
|
||||
- note 1 continues on this 2nd line
|
||||
- note 1 continues on this 3rd line
|
||||
|
||||
NOTE(foo) 2 of 4
|
||||
|
||||
NOTE(bar) 3 of 4
|
||||
|
||||
NOTE(bar) 4 of 4
|
||||
- this is the last line of note 4
|
||||
|
||||
NOTE(bam) This note which contains a (parenthesized) subphrase
|
||||
must appear in its entirety.
|
||||
|
||||
NOTE(xxx) The ':' after the marker and uid is optional.
|
||||
|
||||
|
||||
SECBUGS
|
||||
SECBUG(uid) sec hole 0
|
||||
need to fix asap
|
||||
|
||||
|
||||
TODOS
|
||||
TODO(uid) todo0
|
||||
|
||||
TODO(uid) todo1
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
// comment 0 comment 1
|
||||
PACKAGE a
|
||||
|
||||
IMPORTPATH
|
||||
testdata/a
|
||||
|
||||
FILENAMES
|
||||
testdata/a0.go
|
||||
testdata/a1.go
|
||||
|
||||
BUGS .Bugs is now deprecated, please use .Notes instead
|
||||
bug0
|
||||
|
||||
bug1
|
||||
|
||||
|
||||
BUGS
|
||||
BUG(uid) bug0
|
||||
|
||||
BUG(uid) bug1
|
||||
|
||||
|
||||
NOTES
|
||||
NOTE(uid)
|
||||
|
||||
NOTE(foo) 1 of 4 - this is the first line of note 1
|
||||
- note 1 continues on this 2nd line
|
||||
- note 1 continues on this 3rd line
|
||||
|
||||
NOTE(foo) 2 of 4
|
||||
|
||||
NOTE(bar) 3 of 4
|
||||
|
||||
NOTE(bar) 4 of 4
|
||||
- this is the last line of note 4
|
||||
|
||||
NOTE(bam) This note which contains a (parenthesized) subphrase
|
||||
must appear in its entirety.
|
||||
|
||||
NOTE(xxx) The ':' after the marker and uid is optional.
|
||||
|
||||
|
||||
SECBUGS
|
||||
SECBUG(uid) sec hole 0
|
||||
need to fix asap
|
||||
|
||||
|
||||
TODOS
|
||||
TODO(uid) todo0
|
||||
|
||||
TODO(uid) todo1
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
// comment 0 comment 1
|
||||
PACKAGE a
|
||||
|
||||
IMPORTPATH
|
||||
testdata/a
|
||||
|
||||
FILENAMES
|
||||
testdata/a0.go
|
||||
testdata/a1.go
|
||||
|
||||
BUGS .Bugs is now deprecated, please use .Notes instead
|
||||
bug0
|
||||
|
||||
bug1
|
||||
|
||||
|
||||
BUGS
|
||||
BUG(uid) bug0
|
||||
|
||||
BUG(uid) bug1
|
||||
|
||||
|
||||
NOTES
|
||||
NOTE(uid)
|
||||
|
||||
NOTE(foo) 1 of 4 - this is the first line of note 1
|
||||
- note 1 continues on this 2nd line
|
||||
- note 1 continues on this 3rd line
|
||||
|
||||
NOTE(foo) 2 of 4
|
||||
|
||||
NOTE(bar) 3 of 4
|
||||
|
||||
NOTE(bar) 4 of 4
|
||||
- this is the last line of note 4
|
||||
|
||||
NOTE(bam) This note which contains a (parenthesized) subphrase
|
||||
must appear in its entirety.
|
||||
|
||||
NOTE(xxx) The ':' after the marker and uid is optional.
|
||||
|
||||
|
||||
SECBUGS
|
||||
SECBUG(uid) sec hole 0
|
||||
need to fix asap
|
||||
|
||||
|
||||
TODOS
|
||||
TODO(uid) todo0
|
||||
|
||||
TODO(uid) todo1
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// comment 0
|
||||
package a
|
||||
|
||||
//BUG(uid): bug0
|
||||
|
||||
//TODO(uid): todo0
|
||||
|
||||
// A note with some spaces after it, should be ignored (watch out for
|
||||
// emacs modes that remove trailing whitespace).
|
||||
//NOTE(uid):
|
||||
|
||||
// SECBUG(uid): sec hole 0
|
||||
// need to fix asap
|
||||
|
||||
// Multiple notes may be in the same comment group and should be
|
||||
// recognized individually. Notes may start in the middle of a
|
||||
// comment group as long as they start at the beginning of an
|
||||
// individual comment.
|
||||
//
|
||||
// NOTE(foo): 1 of 4 - this is the first line of note 1
|
||||
// - note 1 continues on this 2nd line
|
||||
// - note 1 continues on this 3rd line
|
||||
// NOTE(foo): 2 of 4
|
||||
// NOTE(bar): 3 of 4
|
||||
/* NOTE(bar): 4 of 4 */
|
||||
// - this is the last line of note 4
|
||||
//
|
||||
//
|
||||
|
||||
// NOTE(bam): This note which contains a (parenthesized) subphrase
|
||||
// must appear in its entirety.
|
||||
|
||||
// NOTE(xxx) The ':' after the marker and uid is optional.
|
||||
|
||||
// NOTE(): NO uid - should not show up.
|
||||
// NOTE() NO uid - should not show up.
|
|
@ -0,0 +1,12 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// comment 1
|
||||
package a
|
||||
|
||||
//BUG(uid): bug1
|
||||
|
||||
//TODO(uid): todo1
|
||||
|
||||
//TODO(): ignored
|
|
@ -0,0 +1,74 @@
|
|||
//
|
||||
PACKAGE b
|
||||
|
||||
IMPORTPATH
|
||||
testdata/b
|
||||
|
||||
IMPORTS
|
||||
a
|
||||
|
||||
FILENAMES
|
||||
testdata/b.go
|
||||
|
||||
CONSTANTS
|
||||
//
|
||||
const (
|
||||
C1 notExported = iota
|
||||
C2
|
||||
|
||||
C4
|
||||
C5
|
||||
)
|
||||
|
||||
//
|
||||
const C notExported = 0
|
||||
|
||||
//
|
||||
const Pi = 3.14 // Pi
|
||||
|
||||
|
||||
VARIABLES
|
||||
//
|
||||
var (
|
||||
U1, U2, U4, U5 notExported
|
||||
|
||||
U7 notExported = 7
|
||||
)
|
||||
|
||||
//
|
||||
var MaxInt int // MaxInt
|
||||
|
||||
//
|
||||
var V notExported
|
||||
|
||||
//
|
||||
var V1, V2, V4, V5 notExported
|
||||
|
||||
|
||||
FUNCTIONS
|
||||
// Associated with comparable type if AllDecls is set.
|
||||
func ComparableFactory() comparable
|
||||
|
||||
//
|
||||
func F(x int) int
|
||||
|
||||
//
|
||||
func F1() notExported
|
||||
|
||||
// Always under the package functions list.
|
||||
func NotAFactory() int
|
||||
|
||||
// Associated with uint type if AllDecls is set.
|
||||
func UintFactory() uint
|
||||
|
||||
|
||||
TYPES
|
||||
//
|
||||
type T struct{} // T
|
||||
|
||||
//
|
||||
var V T // v
|
||||
|
||||
//
|
||||
func (x *T) M()
|
||||
|
|
@ -0,0 +1,89 @@
|
|||
//
|
||||
PACKAGE b
|
||||
|
||||
IMPORTPATH
|
||||
testdata/b
|
||||
|
||||
IMPORTS
|
||||
a
|
||||
|
||||
FILENAMES
|
||||
testdata/b.go
|
||||
|
||||
CONSTANTS
|
||||
//
|
||||
const Pi = 3.14 // Pi
|
||||
|
||||
|
||||
VARIABLES
|
||||
//
|
||||
var MaxInt int // MaxInt
|
||||
|
||||
|
||||
FUNCTIONS
|
||||
//
|
||||
func F(x int) int
|
||||
|
||||
// Always under the package functions list.
|
||||
func NotAFactory() int
|
||||
|
||||
|
||||
TYPES
|
||||
//
|
||||
type T struct{} // T
|
||||
|
||||
//
|
||||
var V T // v
|
||||
|
||||
//
|
||||
func (x *T) M()
|
||||
|
||||
// Should only appear if AllDecls is set.
|
||||
type comparable struct{} // overrides a predeclared type comparable
|
||||
|
||||
// Associated with comparable type if AllDecls is set.
|
||||
func ComparableFactory() comparable
|
||||
|
||||
//
|
||||
type notExported int
|
||||
|
||||
//
|
||||
const (
|
||||
C1 notExported = iota
|
||||
C2
|
||||
c3
|
||||
C4
|
||||
C5
|
||||
)
|
||||
|
||||
//
|
||||
const C notExported = 0
|
||||
|
||||
//
|
||||
var (
|
||||
U1, U2, u3, U4, U5 notExported
|
||||
u6 notExported
|
||||
U7 notExported = 7
|
||||
)
|
||||
|
||||
//
|
||||
var V notExported
|
||||
|
||||
//
|
||||
var V1, V2, v3, V4, V5 notExported
|
||||
|
||||
//
|
||||
func F1() notExported
|
||||
|
||||
//
|
||||
func f2() notExported
|
||||
|
||||
// Should only appear if AllDecls is set.
|
||||
type uint struct{} // overrides a predeclared type uint
|
||||
|
||||
// Associated with uint type if AllDecls is set.
|
||||
func UintFactory() uint
|
||||
|
||||
// Associated with uint type if AllDecls is set.
|
||||
func uintFactory() uint
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
//
|
||||
PACKAGE b
|
||||
|
||||
IMPORTPATH
|
||||
testdata/b
|
||||
|
||||
IMPORTS
|
||||
a
|
||||
|
||||
FILENAMES
|
||||
testdata/b.go
|
||||
|
||||
CONSTANTS
|
||||
//
|
||||
const (
|
||||
C1 notExported = iota
|
||||
C2
|
||||
|
||||
C4
|
||||
C5
|
||||
)
|
||||
|
||||
//
|
||||
const C notExported = 0
|
||||
|
||||
//
|
||||
const Pi = 3.14 // Pi
|
||||
|
||||
|
||||
VARIABLES
|
||||
//
|
||||
var (
|
||||
U1, U2, U4, U5 notExported
|
||||
|
||||
U7 notExported = 7
|
||||
)
|
||||
|
||||
//
|
||||
var MaxInt int // MaxInt
|
||||
|
||||
//
|
||||
var V notExported
|
||||
|
||||
//
|
||||
var V1, V2, V4, V5 notExported
|
||||
|
||||
|
||||
FUNCTIONS
|
||||
// Associated with comparable type if AllDecls is set.
|
||||
func ComparableFactory() comparable
|
||||
|
||||
//
|
||||
func F(x int) int
|
||||
|
||||
//
|
||||
func F1() notExported
|
||||
|
||||
// Always under the package functions list.
|
||||
func NotAFactory() int
|
||||
|
||||
// Associated with uint type if AllDecls is set.
|
||||
func UintFactory() uint
|
||||
|
||||
|
||||
TYPES
|
||||
//
|
||||
type T struct{} // T
|
||||
|
||||
//
|
||||
var V T // v
|
||||
|
||||
//
|
||||
func (x *T) M()
|
||||
|
|
@ -0,0 +1,64 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package b
|
||||
|
||||
import "a"
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Basic declarations
|
||||
|
||||
const Pi = 3.14 // Pi
|
||||
var MaxInt int // MaxInt
|
||||
type T struct{} // T
|
||||
var V T // v
|
||||
func F(x int) int {} // F
|
||||
func (x *T) M() {} // M
|
||||
|
||||
// Corner cases: association with (presumed) predeclared types
|
||||
|
||||
// Always under the package functions list.
|
||||
func NotAFactory() int {}
|
||||
|
||||
// Associated with uint type if AllDecls is set.
|
||||
func UintFactory() uint {}
|
||||
|
||||
// Associated with uint type if AllDecls is set.
|
||||
func uintFactory() uint {}
|
||||
|
||||
// Associated with comparable type if AllDecls is set.
|
||||
func ComparableFactory() comparable {}
|
||||
|
||||
// Should only appear if AllDecls is set.
|
||||
type uint struct{} // overrides a predeclared type uint
|
||||
|
||||
// Should only appear if AllDecls is set.
|
||||
type comparable struct{} // overrides a predeclared type comparable
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Exported declarations associated with non-exported types must always be shown.
|
||||
|
||||
type notExported int
|
||||
|
||||
const C notExported = 0
|
||||
|
||||
const (
|
||||
C1 notExported = iota
|
||||
C2
|
||||
c3
|
||||
C4
|
||||
C5
|
||||
)
|
||||
|
||||
var V notExported
|
||||
var V1, V2, v3, V4, V5 notExported
|
||||
|
||||
var (
|
||||
U1, U2, u3, U4, U5 notExported
|
||||
u6 notExported
|
||||
U7 notExported = 7
|
||||
)
|
||||
|
||||
func F1() notExported {}
|
||||
func f2() notExported {}
|
|
@ -0,0 +1,293 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package testing
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime"
|
||||
"time"
|
||||
)
|
||||
|
||||
var matchBenchmarks = flag.String("test.bench", "", "regular expression to select benchmarks to run")
|
||||
var benchTime = flag.Duration("test.benchtime", 1*time.Second, "approximate run time for each benchmark")
|
||||
|
||||
// An internal type but exported because it is cross-package; part of the implementation
|
||||
// of go test.
|
||||
type InternalBenchmark struct {
|
||||
Name string
|
||||
F func(b *B)
|
||||
}
|
||||
|
||||
// B is a type passed to Benchmark functions to manage benchmark
|
||||
// timing and to specify the number of iterations to run.
|
||||
type B struct {
|
||||
common
|
||||
N int
|
||||
benchmark InternalBenchmark
|
||||
bytes int64
|
||||
timerOn bool
|
||||
result BenchmarkResult
|
||||
}
|
||||
|
||||
// StartTimer starts timing a test. This function is called automatically
|
||||
// before a benchmark starts, but it can also used to resume timing after
|
||||
// a call to StopTimer.
|
||||
func (b *B) StartTimer() {
|
||||
if !b.timerOn {
|
||||
b.start = time.Now()
|
||||
b.timerOn = true
|
||||
}
|
||||
}
|
||||
|
||||
// StopTimer stops timing a test. This can be used to pause the timer
|
||||
// while performing complex initialization that you don't
|
||||
// want to measure.
|
||||
func (b *B) StopTimer() {
|
||||
if b.timerOn {
|
||||
b.duration += time.Now().Sub(b.start)
|
||||
b.timerOn = false
|
||||
}
|
||||
}
|
||||
|
||||
// ResetTimer sets the elapsed benchmark time to zero.
|
||||
// It does not affect whether the timer is running.
|
||||
func (b *B) ResetTimer() {
|
||||
if b.timerOn {
|
||||
b.start = time.Now()
|
||||
}
|
||||
b.duration = 0
|
||||
}
|
||||
|
||||
// SetBytes records the number of bytes processed in a single operation.
|
||||
// If this is called, the benchmark will report ns/op and MB/s.
|
||||
func (b *B) SetBytes(n int64) { b.bytes = n }
|
||||
|
||||
func (b *B) nsPerOp() int64 {
|
||||
if b.N <= 0 {
|
||||
return 0
|
||||
}
|
||||
return b.duration.Nanoseconds() / int64(b.N)
|
||||
}
|
||||
|
||||
// runN runs a single benchmark for the specified number of iterations.
|
||||
func (b *B) runN(n int) {
|
||||
// Try to get a comparable environment for each run
|
||||
// by clearing garbage from previous runs.
|
||||
runtime.GC()
|
||||
b.N = n
|
||||
b.ResetTimer()
|
||||
b.StartTimer()
|
||||
b.benchmark.F(b)
|
||||
b.StopTimer()
|
||||
}
|
||||
|
||||
func min(x, y int) int {
|
||||
if x > y {
|
||||
return y
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
func max(x, y int) int {
|
||||
if x < y {
|
||||
return y
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
// roundDown10 rounds a number down to the nearest power of 10.
|
||||
func roundDown10(n int) int {
|
||||
var tens = 0
|
||||
// tens = floor(log_10(n))
|
||||
for n > 10 {
|
||||
n = n / 10
|
||||
tens++
|
||||
}
|
||||
// result = 10^tens
|
||||
result := 1
|
||||
for i := 0; i < tens; i++ {
|
||||
result *= 10
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// roundUp rounds x up to a number of the form [1eX, 2eX, 5eX].
|
||||
func roundUp(n int) int {
|
||||
base := roundDown10(n)
|
||||
if n < (2 * base) {
|
||||
return 2 * base
|
||||
}
|
||||
if n < (5 * base) {
|
||||
return 5 * base
|
||||
}
|
||||
return 10 * base
|
||||
}
|
||||
|
||||
// run times the benchmark function in a separate goroutine.
|
||||
func (b *B) run() BenchmarkResult {
|
||||
go b.launch()
|
||||
<-b.signal
|
||||
return b.result
|
||||
}
|
||||
|
||||
// launch launches the benchmark function. It gradually increases the number
|
||||
// of benchmark iterations until the benchmark runs for a second in order
|
||||
// to get a reasonable measurement. It prints timing information in this form
|
||||
// testing.BenchmarkHello 100000 19 ns/op
|
||||
// launch is run by the fun function as a separate goroutine.
|
||||
func (b *B) launch() {
|
||||
// Run the benchmark for a single iteration in case it's expensive.
|
||||
n := 1
|
||||
|
||||
// Signal that we're done whether we return normally
|
||||
// or by FailNow's runtime.Goexit.
|
||||
defer func() {
|
||||
b.signal <- b
|
||||
}()
|
||||
|
||||
b.runN(n)
|
||||
// Run the benchmark for at least the specified amount of time.
|
||||
d := *benchTime
|
||||
for !b.failed && b.duration < d && n < 1e9 {
|
||||
last := n
|
||||
// Predict iterations/sec.
|
||||
if b.nsPerOp() == 0 {
|
||||
n = 1e9
|
||||
} else {
|
||||
n = int(d.Nanoseconds() / b.nsPerOp())
|
||||
}
|
||||
// Run more iterations than we think we'll need for a second (1.5x).
|
||||
// Don't grow too fast in case we had timing errors previously.
|
||||
// Be sure to run at least one more than last time.
|
||||
n = max(min(n+n/2, 100*last), last+1)
|
||||
// Round up to something easy to read.
|
||||
n = roundUp(n)
|
||||
b.runN(n)
|
||||
}
|
||||
b.result = BenchmarkResult{b.N, b.duration, b.bytes}
|
||||
}
|
||||
|
||||
// The results of a benchmark run.
|
||||
type BenchmarkResult struct {
|
||||
N int // The number of iterations.
|
||||
T time.Duration // The total time taken.
|
||||
Bytes int64 // Bytes processed in one iteration.
|
||||
}
|
||||
|
||||
func (r BenchmarkResult) NsPerOp() int64 {
|
||||
if r.N <= 0 {
|
||||
return 0
|
||||
}
|
||||
return r.T.Nanoseconds() / int64(r.N)
|
||||
}
|
||||
|
||||
func (r BenchmarkResult) mbPerSec() float64 {
|
||||
if r.Bytes <= 0 || r.T <= 0 || r.N <= 0 {
|
||||
return 0
|
||||
}
|
||||
return (float64(r.Bytes) * float64(r.N) / 1e6) / r.T.Seconds()
|
||||
}
|
||||
|
||||
func (r BenchmarkResult) String() string {
|
||||
mbs := r.mbPerSec()
|
||||
mb := ""
|
||||
if mbs != 0 {
|
||||
mb = fmt.Sprintf("\t%7.2f MB/s", mbs)
|
||||
}
|
||||
nsop := r.NsPerOp()
|
||||
ns := fmt.Sprintf("%10d ns/op", nsop)
|
||||
if r.N > 0 && nsop < 100 {
|
||||
// The format specifiers here make sure that
|
||||
// the ones digits line up for all three possible formats.
|
||||
if nsop < 10 {
|
||||
ns = fmt.Sprintf("%13.2f ns/op", float64(r.T.Nanoseconds())/float64(r.N))
|
||||
} else {
|
||||
ns = fmt.Sprintf("%12.1f ns/op", float64(r.T.Nanoseconds())/float64(r.N))
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%8d\t%s%s", r.N, ns, mb)
|
||||
}
|
||||
|
||||
// An internal function but exported because it is cross-package; part of the implementation
|
||||
// of go test.
|
||||
func RunBenchmarks(matchString func(pat, str string) (bool, error), benchmarks []InternalBenchmark) {
|
||||
// If no flag was specified, don't run benchmarks.
|
||||
if len(*matchBenchmarks) == 0 {
|
||||
return
|
||||
}
|
||||
for _, Benchmark := range benchmarks {
|
||||
matched, err := matchString(*matchBenchmarks, Benchmark.Name)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "testing: invalid regexp for -test.bench: %s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
if !matched {
|
||||
continue
|
||||
}
|
||||
for _, procs := range cpuList {
|
||||
runtime.GOMAXPROCS(procs)
|
||||
b := &B{
|
||||
common: common{
|
||||
signal: make(chan any),
|
||||
},
|
||||
benchmark: Benchmark,
|
||||
}
|
||||
benchName := Benchmark.Name
|
||||
if procs != 1 {
|
||||
benchName = fmt.Sprintf("%s-%d", Benchmark.Name, procs)
|
||||
}
|
||||
fmt.Printf("%s\t", benchName)
|
||||
r := b.run()
|
||||
if b.failed {
|
||||
// The output could be very long here, but probably isn't.
|
||||
// We print it all, regardless, because we don't want to trim the reason
|
||||
// the benchmark failed.
|
||||
fmt.Printf("--- FAIL: %s\n%s", benchName, b.output)
|
||||
continue
|
||||
}
|
||||
fmt.Printf("%v\n", r)
|
||||
// Unlike with tests, we ignore the -chatty flag and always print output for
|
||||
// benchmarks since the output generation time will skew the results.
|
||||
if len(b.output) > 0 {
|
||||
b.trimOutput()
|
||||
fmt.Printf("--- BENCH: %s\n%s", benchName, b.output)
|
||||
}
|
||||
if p := runtime.GOMAXPROCS(-1); p != procs {
|
||||
fmt.Fprintf(os.Stderr, "testing: %s left GOMAXPROCS set to %d\n", benchName, p)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// trimOutput shortens the output from a benchmark, which can be very long.
|
||||
func (b *B) trimOutput() {
|
||||
// The output is likely to appear multiple times because the benchmark
|
||||
// is run multiple times, but at least it will be seen. This is not a big deal
|
||||
// because benchmarks rarely print, but just in case, we trim it if it's too long.
|
||||
const maxNewlines = 10
|
||||
for nlCount, j := 0, 0; j < len(b.output); j++ {
|
||||
if b.output[j] == '\n' {
|
||||
nlCount++
|
||||
if nlCount >= maxNewlines {
|
||||
b.output = append(b.output[:j], "\n\t... [output truncated]\n"...)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Benchmark benchmarks a single function. Useful for creating
|
||||
// custom benchmarks that do not use go test.
|
||||
func Benchmark(f func(b *B)) BenchmarkResult {
|
||||
b := &B{
|
||||
common: common{
|
||||
signal: make(chan any),
|
||||
},
|
||||
benchmark: InternalBenchmark{"", f},
|
||||
}
|
||||
return b.run()
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
// Package blank is a go/doc test for the handling of _. See issue ...
|
||||
PACKAGE blank
|
||||
|
||||
IMPORTPATH
|
||||
testdata/blank
|
||||
|
||||
IMPORTS
|
||||
os
|
||||
|
||||
FILENAMES
|
||||
testdata/blank.go
|
||||
|
||||
CONSTANTS
|
||||
// T constants counting from unexported constants.
|
||||
const (
|
||||
C1 T
|
||||
C2
|
||||
|
||||
C3
|
||||
|
||||
C4 int
|
||||
)
|
||||
|
||||
// Constants with a single type that is not propagated.
|
||||
const (
|
||||
Default = 0644
|
||||
Useless = 0312
|
||||
WideOpen = 0777
|
||||
)
|
||||
|
||||
// Constants with an imported type that is propagated.
|
||||
const (
|
||||
M1 os.FileMode
|
||||
M2
|
||||
M3
|
||||
)
|
||||
|
||||
// Package constants.
|
||||
const (
|
||||
I1 int
|
||||
I2
|
||||
)
|
||||
|
||||
|
||||
TYPES
|
||||
// S has a padding field.
|
||||
type S struct {
|
||||
H uint32
|
||||
|
||||
A uint8
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
//
|
||||
type T int
|
||||
|
||||
// T constants counting from a blank constant.
|
||||
const (
|
||||
T1 T
|
||||
T2
|
||||
)
|
||||
|
|
@ -0,0 +1,83 @@
|
|||
// Package blank is a go/doc test for the handling of _. See issue ...
|
||||
PACKAGE blank
|
||||
|
||||
IMPORTPATH
|
||||
testdata/blank
|
||||
|
||||
IMPORTS
|
||||
os
|
||||
|
||||
FILENAMES
|
||||
testdata/blank.go
|
||||
|
||||
CONSTANTS
|
||||
// T constants counting from unexported constants.
|
||||
const (
|
||||
tweedledee T = iota
|
||||
tweedledum
|
||||
C1
|
||||
C2
|
||||
alice
|
||||
C3
|
||||
redQueen int = iota
|
||||
C4
|
||||
)
|
||||
|
||||
// Constants with a single type that is not propagated.
|
||||
const (
|
||||
zero os.FileMode = 0
|
||||
Default = 0644
|
||||
Useless = 0312
|
||||
WideOpen = 0777
|
||||
)
|
||||
|
||||
// Constants with an imported type that is propagated.
|
||||
const (
|
||||
zero os.FileMode = 0
|
||||
M1
|
||||
M2
|
||||
M3
|
||||
)
|
||||
|
||||
// Package constants.
|
||||
const (
|
||||
_ int = iota
|
||||
I1
|
||||
I2
|
||||
)
|
||||
|
||||
// Unexported constants counting from blank iota. See issue 9615.
|
||||
const (
|
||||
_ = iota
|
||||
one = iota + 1
|
||||
)
|
||||
|
||||
|
||||
VARIABLES
|
||||
//
|
||||
var _ = T(55)
|
||||
|
||||
|
||||
FUNCTIONS
|
||||
//
|
||||
func _()
|
||||
|
||||
|
||||
TYPES
|
||||
// S has a padding field.
|
||||
type S struct {
|
||||
H uint32
|
||||
_ uint8
|
||||
A uint8
|
||||
}
|
||||
|
||||
//
|
||||
type T int
|
||||
|
||||
// T constants counting from a blank constant.
|
||||
const (
|
||||
_ T = iota
|
||||
T1
|
||||
T2
|
||||
)
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
// Package blank is a go/doc test for the handling of _. See issue ...
|
||||
PACKAGE blank
|
||||
|
||||
IMPORTPATH
|
||||
testdata/blank
|
||||
|
||||
IMPORTS
|
||||
os
|
||||
|
||||
FILENAMES
|
||||
testdata/blank.go
|
||||
|
||||
CONSTANTS
|
||||
// T constants counting from unexported constants.
|
||||
const (
|
||||
C1 T
|
||||
C2
|
||||
|
||||
C3
|
||||
|
||||
C4 int
|
||||
)
|
||||
|
||||
// Constants with a single type that is not propagated.
|
||||
const (
|
||||
Default = 0644
|
||||
Useless = 0312
|
||||
WideOpen = 0777
|
||||
)
|
||||
|
||||
// Constants with an imported type that is propagated.
|
||||
const (
|
||||
M1 os.FileMode
|
||||
M2
|
||||
M3
|
||||
)
|
||||
|
||||
// Package constants.
|
||||
const (
|
||||
I1 int
|
||||
I2
|
||||
)
|
||||
|
||||
|
||||
TYPES
|
||||
// S has a padding field.
|
||||
type S struct {
|
||||
H uint32
|
||||
|
||||
A uint8
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
//
|
||||
type T int
|
||||
|
||||
// T constants counting from a blank constant.
|
||||
const (
|
||||
T1 T
|
||||
T2
|
||||
)
|
||||
|
|
@ -0,0 +1,75 @@
|
|||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package blank is a go/doc test for the handling of _.
|
||||
// See issue 5397.
|
||||
package blank
|
||||
|
||||
import "os"
|
||||
|
||||
type T int
|
||||
|
||||
// T constants counting from a blank constant.
|
||||
const (
|
||||
_ T = iota
|
||||
T1
|
||||
T2
|
||||
)
|
||||
|
||||
// T constants counting from unexported constants.
|
||||
const (
|
||||
tweedledee T = iota
|
||||
tweedledum
|
||||
C1
|
||||
C2
|
||||
alice
|
||||
C3
|
||||
redQueen int = iota
|
||||
C4
|
||||
)
|
||||
|
||||
// Constants with a single type that is not propagated.
|
||||
const (
|
||||
zero os.FileMode = 0
|
||||
Default = 0644
|
||||
Useless = 0312
|
||||
WideOpen = 0777
|
||||
)
|
||||
|
||||
// Constants with an imported type that is propagated.
|
||||
const (
|
||||
zero os.FileMode = 0
|
||||
M1
|
||||
M2
|
||||
M3
|
||||
)
|
||||
|
||||
// Package constants.
|
||||
const (
|
||||
_ int = iota
|
||||
I1
|
||||
I2
|
||||
)
|
||||
|
||||
// Unexported constants counting from blank iota.
|
||||
// See issue 9615.
|
||||
const (
|
||||
_ = iota
|
||||
one = iota + 1
|
||||
)
|
||||
|
||||
// Blanks not in doc output:
|
||||
|
||||
// S has a padding field.
|
||||
type S struct {
|
||||
H uint32
|
||||
_ uint8
|
||||
A uint8
|
||||
}
|
||||
|
||||
func _() {}
|
||||
|
||||
type _ T
|
||||
|
||||
var _ = T(55)
|
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
PACKAGE bugpara
|
||||
|
||||
IMPORTPATH
|
||||
testdata/bugpara
|
||||
|
||||
FILENAMES
|
||||
testdata/bugpara.go
|
||||
|
||||
BUGS .Bugs is now deprecated, please use .Notes instead
|
||||
Sometimes bugs have multiple paragraphs.
|
||||
|
||||
Like this one.
|
||||
|
||||
|
||||
BUGS
|
||||
BUG(rsc) Sometimes bugs have multiple paragraphs.
|
||||
|
||||
Like this one.
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
PACKAGE bugpara
|
||||
|
||||
IMPORTPATH
|
||||
testdata/bugpara
|
||||
|
||||
FILENAMES
|
||||
testdata/bugpara.go
|
||||
|
||||
BUGS .Bugs is now deprecated, please use .Notes instead
|
||||
Sometimes bugs have multiple paragraphs.
|
||||
|
||||
Like this one.
|
||||
|
||||
|
||||
BUGS
|
||||
BUG(rsc) Sometimes bugs have multiple paragraphs.
|
||||
|
||||
Like this one.
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
PACKAGE bugpara
|
||||
|
||||
IMPORTPATH
|
||||
testdata/bugpara
|
||||
|
||||
FILENAMES
|
||||
testdata/bugpara.go
|
||||
|
||||
BUGS .Bugs is now deprecated, please use .Notes instead
|
||||
Sometimes bugs have multiple paragraphs.
|
||||
|
||||
Like this one.
|
||||
|
||||
|
||||
BUGS
|
||||
BUG(rsc) Sometimes bugs have multiple paragraphs.
|
||||
|
||||
Like this one.
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package bugpara
|
||||
|
||||
// BUG(rsc): Sometimes bugs have multiple paragraphs.
|
||||
//
|
||||
// Like this one.
|
|
@ -0,0 +1,48 @@
|
|||
//
|
||||
PACKAGE c
|
||||
|
||||
IMPORTPATH
|
||||
testdata/c
|
||||
|
||||
IMPORTS
|
||||
a
|
||||
|
||||
FILENAMES
|
||||
testdata/c.go
|
||||
|
||||
TYPES
|
||||
// A (should see this)
|
||||
type A struct{}
|
||||
|
||||
// B (should see this)
|
||||
type B struct{}
|
||||
|
||||
// C (should see this)
|
||||
type C struct{}
|
||||
|
||||
// D (should see this)
|
||||
type D struct{}
|
||||
|
||||
// E1 (should see this)
|
||||
type E1 struct{}
|
||||
|
||||
// E (should see this for E2 and E3)
|
||||
type E2 struct{}
|
||||
|
||||
// E (should see this for E2 and E3)
|
||||
type E3 struct{}
|
||||
|
||||
// E4 (should see this)
|
||||
type E4 struct{}
|
||||
|
||||
//
|
||||
type T1 struct{}
|
||||
|
||||
//
|
||||
func (t1 *T1) M()
|
||||
|
||||
// T2 must not show methods of local T1
|
||||
type T2 struct {
|
||||
a.T1 // not the same as locally declared T1
|
||||
}
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
//
|
||||
PACKAGE c
|
||||
|
||||
IMPORTPATH
|
||||
testdata/c
|
||||
|
||||
IMPORTS
|
||||
a
|
||||
|
||||
FILENAMES
|
||||
testdata/c.go
|
||||
|
||||
TYPES
|
||||
// A (should see this)
|
||||
type A struct{}
|
||||
|
||||
// B (should see this)
|
||||
type B struct{}
|
||||
|
||||
// C (should see this)
|
||||
type C struct{}
|
||||
|
||||
// D (should see this)
|
||||
type D struct{}
|
||||
|
||||
// E1 (should see this)
|
||||
type E1 struct{}
|
||||
|
||||
// E (should see this for E2 and E3)
|
||||
type E2 struct{}
|
||||
|
||||
// E (should see this for E2 and E3)
|
||||
type E3 struct{}
|
||||
|
||||
// E4 (should see this)
|
||||
type E4 struct{}
|
||||
|
||||
//
|
||||
type T1 struct{}
|
||||
|
||||
//
|
||||
func (t1 *T1) M()
|
||||
|
||||
// T2 must not show methods of local T1
|
||||
type T2 struct {
|
||||
a.T1 // not the same as locally declared T1
|
||||
}
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
//
|
||||
PACKAGE c
|
||||
|
||||
IMPORTPATH
|
||||
testdata/c
|
||||
|
||||
IMPORTS
|
||||
a
|
||||
|
||||
FILENAMES
|
||||
testdata/c.go
|
||||
|
||||
TYPES
|
||||
// A (should see this)
|
||||
type A struct{}
|
||||
|
||||
// B (should see this)
|
||||
type B struct{}
|
||||
|
||||
// C (should see this)
|
||||
type C struct{}
|
||||
|
||||
// D (should see this)
|
||||
type D struct{}
|
||||
|
||||
// E1 (should see this)
|
||||
type E1 struct{}
|
||||
|
||||
// E (should see this for E2 and E3)
|
||||
type E2 struct{}
|
||||
|
||||
// E (should see this for E2 and E3)
|
||||
type E3 struct{}
|
||||
|
||||
// E4 (should see this)
|
||||
type E4 struct{}
|
||||
|
||||
//
|
||||
type T1 struct{}
|
||||
|
||||
//
|
||||
func (t1 *T1) M()
|
||||
|
||||
// T2 must not show methods of local T1
|
||||
type T2 struct {
|
||||
a.T1 // not the same as locally declared T1
|
||||
}
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package c
|
||||
|
||||
import "a"
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Test that empty declarations don't cause problems
|
||||
|
||||
const ()
|
||||
|
||||
type ()
|
||||
|
||||
var ()
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Test that types with documentation on both, the Decl and the Spec node
|
||||
// are handled correctly.
|
||||
|
||||
// A (should see this)
|
||||
type A struct{}
|
||||
|
||||
// B (should see this)
|
||||
type (
|
||||
B struct{}
|
||||
)
|
||||
|
||||
type (
|
||||
// C (should see this)
|
||||
C struct{}
|
||||
)
|
||||
|
||||
// D (should not see this)
|
||||
type (
|
||||
// D (should see this)
|
||||
D struct{}
|
||||
)
|
||||
|
||||
// E (should see this for E2 and E3)
|
||||
type (
|
||||
// E1 (should see this)
|
||||
E1 struct{}
|
||||
E2 struct{}
|
||||
E3 struct{}
|
||||
// E4 (should see this)
|
||||
E4 struct{}
|
||||
)
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Test that local and imported types are different when
|
||||
// handling anonymous fields.
|
||||
|
||||
type T1 struct{}
|
||||
|
||||
func (t1 *T1) M() {}
|
||||
|
||||
// T2 must not show methods of local T1
|
||||
type T2 struct {
|
||||
a.T1 // not the same as locally declared T1
|
||||
}
|
|
@ -0,0 +1,104 @@
|
|||
//
|
||||
PACKAGE d
|
||||
|
||||
IMPORTPATH
|
||||
testdata/d
|
||||
|
||||
FILENAMES
|
||||
testdata/d1.go
|
||||
testdata/d2.go
|
||||
|
||||
CONSTANTS
|
||||
// CBx constants should appear before CAx constants.
|
||||
const (
|
||||
CB2 = iota // before CB1
|
||||
CB1 // before CB0
|
||||
CB0 // at end
|
||||
)
|
||||
|
||||
// CAx constants should appear after CBx constants.
|
||||
const (
|
||||
CA2 = iota // before CA1
|
||||
CA1 // before CA0
|
||||
CA0 // at end
|
||||
)
|
||||
|
||||
// C0 should be first.
|
||||
const C0 = 0
|
||||
|
||||
// C1 should be second.
|
||||
const C1 = 1
|
||||
|
||||
// C2 should be third.
|
||||
const C2 = 2
|
||||
|
||||
//
|
||||
const (
|
||||
// Single const declarations inside ()'s are considered ungrouped
|
||||
// and show up in sorted order.
|
||||
Cungrouped = 0
|
||||
)
|
||||
|
||||
|
||||
VARIABLES
|
||||
// VBx variables should appear before VAx variables.
|
||||
var (
|
||||
VB2 int // before VB1
|
||||
VB1 int // before VB0
|
||||
VB0 int // at end
|
||||
)
|
||||
|
||||
// VAx variables should appear after VBx variables.
|
||||
var (
|
||||
VA2 int // before VA1
|
||||
VA1 int // before VA0
|
||||
VA0 int // at end
|
||||
)
|
||||
|
||||
// V0 should be first.
|
||||
var V0 uintptr
|
||||
|
||||
// V1 should be second.
|
||||
var V1 uint
|
||||
|
||||
// V2 should be third.
|
||||
var V2 int
|
||||
|
||||
//
|
||||
var (
|
||||
// Single var declarations inside ()'s are considered ungrouped
|
||||
// and show up in sorted order.
|
||||
Vungrouped = 0
|
||||
)
|
||||
|
||||
|
||||
FUNCTIONS
|
||||
// F0 should be first.
|
||||
func F0()
|
||||
|
||||
// F1 should be second.
|
||||
func F1()
|
||||
|
||||
// F2 should be third.
|
||||
func F2()
|
||||
|
||||
|
||||
TYPES
|
||||
// T0 should be first.
|
||||
type T0 struct{}
|
||||
|
||||
// T1 should be second.
|
||||
type T1 struct{}
|
||||
|
||||
// T2 should be third.
|
||||
type T2 struct{}
|
||||
|
||||
// TG0 should be first.
|
||||
type TG0 struct{}
|
||||
|
||||
// TG1 should be second.
|
||||
type TG1 struct{}
|
||||
|
||||
// TG2 should be third.
|
||||
type TG2 struct{}
|
||||
|
|
@ -0,0 +1,104 @@
|
|||
//
|
||||
PACKAGE d
|
||||
|
||||
IMPORTPATH
|
||||
testdata/d
|
||||
|
||||
FILENAMES
|
||||
testdata/d1.go
|
||||
testdata/d2.go
|
||||
|
||||
CONSTANTS
|
||||
// CBx constants should appear before CAx constants.
|
||||
const (
|
||||
CB2 = iota // before CB1
|
||||
CB1 // before CB0
|
||||
CB0 // at end
|
||||
)
|
||||
|
||||
// CAx constants should appear after CBx constants.
|
||||
const (
|
||||
CA2 = iota // before CA1
|
||||
CA1 // before CA0
|
||||
CA0 // at end
|
||||
)
|
||||
|
||||
// C0 should be first.
|
||||
const C0 = 0
|
||||
|
||||
// C1 should be second.
|
||||
const C1 = 1
|
||||
|
||||
// C2 should be third.
|
||||
const C2 = 2
|
||||
|
||||
//
|
||||
const (
|
||||
// Single const declarations inside ()'s are considered ungrouped
|
||||
// and show up in sorted order.
|
||||
Cungrouped = 0
|
||||
)
|
||||
|
||||
|
||||
VARIABLES
|
||||
// VBx variables should appear before VAx variables.
|
||||
var (
|
||||
VB2 int // before VB1
|
||||
VB1 int // before VB0
|
||||
VB0 int // at end
|
||||
)
|
||||
|
||||
// VAx variables should appear after VBx variables.
|
||||
var (
|
||||
VA2 int // before VA1
|
||||
VA1 int // before VA0
|
||||
VA0 int // at end
|
||||
)
|
||||
|
||||
// V0 should be first.
|
||||
var V0 uintptr
|
||||
|
||||
// V1 should be second.
|
||||
var V1 uint
|
||||
|
||||
// V2 should be third.
|
||||
var V2 int
|
||||
|
||||
//
|
||||
var (
|
||||
// Single var declarations inside ()'s are considered ungrouped
|
||||
// and show up in sorted order.
|
||||
Vungrouped = 0
|
||||
)
|
||||
|
||||
|
||||
FUNCTIONS
|
||||
// F0 should be first.
|
||||
func F0()
|
||||
|
||||
// F1 should be second.
|
||||
func F1()
|
||||
|
||||
// F2 should be third.
|
||||
func F2()
|
||||
|
||||
|
||||
TYPES
|
||||
// T0 should be first.
|
||||
type T0 struct{}
|
||||
|
||||
// T1 should be second.
|
||||
type T1 struct{}
|
||||
|
||||
// T2 should be third.
|
||||
type T2 struct{}
|
||||
|
||||
// TG0 should be first.
|
||||
type TG0 struct{}
|
||||
|
||||
// TG1 should be second.
|
||||
type TG1 struct{}
|
||||
|
||||
// TG2 should be third.
|
||||
type TG2 struct{}
|
||||
|
|
@ -0,0 +1,104 @@
|
|||
//
|
||||
PACKAGE d
|
||||
|
||||
IMPORTPATH
|
||||
testdata/d
|
||||
|
||||
FILENAMES
|
||||
testdata/d1.go
|
||||
testdata/d2.go
|
||||
|
||||
CONSTANTS
|
||||
// CBx constants should appear before CAx constants.
|
||||
const (
|
||||
CB2 = iota // before CB1
|
||||
CB1 // before CB0
|
||||
CB0 // at end
|
||||
)
|
||||
|
||||
// CAx constants should appear after CBx constants.
|
||||
const (
|
||||
CA2 = iota // before CA1
|
||||
CA1 // before CA0
|
||||
CA0 // at end
|
||||
)
|
||||
|
||||
// C0 should be first.
|
||||
const C0 = 0
|
||||
|
||||
// C1 should be second.
|
||||
const C1 = 1
|
||||
|
||||
// C2 should be third.
|
||||
const C2 = 2
|
||||
|
||||
//
|
||||
const (
|
||||
// Single const declarations inside ()'s are considered ungrouped
|
||||
// and show up in sorted order.
|
||||
Cungrouped = 0
|
||||
)
|
||||
|
||||
|
||||
VARIABLES
|
||||
// VBx variables should appear before VAx variables.
|
||||
var (
|
||||
VB2 int // before VB1
|
||||
VB1 int // before VB0
|
||||
VB0 int // at end
|
||||
)
|
||||
|
||||
// VAx variables should appear after VBx variables.
|
||||
var (
|
||||
VA2 int // before VA1
|
||||
VA1 int // before VA0
|
||||
VA0 int // at end
|
||||
)
|
||||
|
||||
// V0 should be first.
|
||||
var V0 uintptr
|
||||
|
||||
// V1 should be second.
|
||||
var V1 uint
|
||||
|
||||
// V2 should be third.
|
||||
var V2 int
|
||||
|
||||
//
|
||||
var (
|
||||
// Single var declarations inside ()'s are considered ungrouped
|
||||
// and show up in sorted order.
|
||||
Vungrouped = 0
|
||||
)
|
||||
|
||||
|
||||
FUNCTIONS
|
||||
// F0 should be first.
|
||||
func F0()
|
||||
|
||||
// F1 should be second.
|
||||
func F1()
|
||||
|
||||
// F2 should be third.
|
||||
func F2()
|
||||
|
||||
|
||||
TYPES
|
||||
// T0 should be first.
|
||||
type T0 struct{}
|
||||
|
||||
// T1 should be second.
|
||||
type T1 struct{}
|
||||
|
||||
// T2 should be third.
|
||||
type T2 struct{}
|
||||
|
||||
// TG0 should be first.
|
||||
type TG0 struct{}
|
||||
|
||||
// TG1 should be second.
|
||||
type TG1 struct{}
|
||||
|
||||
// TG2 should be third.
|
||||
type TG2 struct{}
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Test cases for sort order of declarations.
|
||||
|
||||
package d
|
||||
|
||||
// C2 should be third.
|
||||
const C2 = 2
|
||||
|
||||
// V2 should be third.
|
||||
var V2 int
|
||||
|
||||
// CBx constants should appear before CAx constants.
|
||||
const (
|
||||
CB2 = iota // before CB1
|
||||
CB1 // before CB0
|
||||
CB0 // at end
|
||||
)
|
||||
|
||||
// VBx variables should appear before VAx variables.
|
||||
var (
|
||||
VB2 int // before VB1
|
||||
VB1 int // before VB0
|
||||
VB0 int // at end
|
||||
)
|
||||
|
||||
const (
|
||||
// Single const declarations inside ()'s are considered ungrouped
|
||||
// and show up in sorted order.
|
||||
Cungrouped = 0
|
||||
)
|
||||
|
||||
var (
|
||||
// Single var declarations inside ()'s are considered ungrouped
|
||||
// and show up in sorted order.
|
||||
Vungrouped = 0
|
||||
)
|
||||
|
||||
// T2 should be third.
|
||||
type T2 struct{}
|
||||
|
||||
// Grouped types are sorted nevertheless.
|
||||
type (
|
||||
// TG2 should be third.
|
||||
TG2 struct{}
|
||||
|
||||
// TG1 should be second.
|
||||
TG1 struct{}
|
||||
|
||||
// TG0 should be first.
|
||||
TG0 struct{}
|
||||
)
|
||||
|
||||
// F2 should be third.
|
||||
func F2() {}
|
|
@ -0,0 +1,45 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Test cases for sort order of declarations.
|
||||
|
||||
package d
|
||||
|
||||
// C1 should be second.
|
||||
const C1 = 1
|
||||
|
||||
// C0 should be first.
|
||||
const C0 = 0
|
||||
|
||||
// V1 should be second.
|
||||
var V1 uint
|
||||
|
||||
// V0 should be first.
|
||||
var V0 uintptr
|
||||
|
||||
// CAx constants should appear after CBx constants.
|
||||
const (
|
||||
CA2 = iota // before CA1
|
||||
CA1 // before CA0
|
||||
CA0 // at end
|
||||
)
|
||||
|
||||
// VAx variables should appear after VBx variables.
|
||||
var (
|
||||
VA2 int // before VA1
|
||||
VA1 int // before VA0
|
||||
VA0 int // at end
|
||||
)
|
||||
|
||||
// T1 should be second.
|
||||
type T1 struct{}
|
||||
|
||||
// T0 should be first.
|
||||
type T0 struct{}
|
||||
|
||||
// F1 should be second.
|
||||
func F1() {}
|
||||
|
||||
// F0 should be first.
|
||||
func F0() {}
|
|
@ -0,0 +1,109 @@
|
|||
// The package e is a go/doc test for embedded methods.
|
||||
PACKAGE e
|
||||
|
||||
IMPORTPATH
|
||||
testdata/e
|
||||
|
||||
FILENAMES
|
||||
testdata/e.go
|
||||
|
||||
TYPES
|
||||
// T1 has no embedded (level 1) M method due to conflict.
|
||||
type T1 struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
// T2 has only M as top-level method.
|
||||
type T2 struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
// T2.M should appear as method of T2.
|
||||
func (T2) M()
|
||||
|
||||
// T3 has only M as top-level method.
|
||||
type T3 struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
// T3.M should appear as method of T3.
|
||||
func (T3) M()
|
||||
|
||||
//
|
||||
type T4 struct{}
|
||||
|
||||
// T4.M should appear as method of T5 only if AllMethods is set.
|
||||
func (*T4) M()
|
||||
|
||||
//
|
||||
type T5 struct {
|
||||
T4
|
||||
}
|
||||
|
||||
//
|
||||
type U1 struct {
|
||||
*U1
|
||||
}
|
||||
|
||||
// U1.M should appear as method of U1.
|
||||
func (*U1) M()
|
||||
|
||||
//
|
||||
type U2 struct {
|
||||
*U3
|
||||
}
|
||||
|
||||
// U2.M should appear as method of U2 and as method of U3 only if ...
|
||||
func (*U2) M()
|
||||
|
||||
//
|
||||
type U3 struct {
|
||||
*U2
|
||||
}
|
||||
|
||||
// U3.N should appear as method of U3 and as method of U2 only if ...
|
||||
func (*U3) N()
|
||||
|
||||
//
|
||||
type U4 struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
// U4.M should appear as method of U4.
|
||||
func (*U4) M()
|
||||
|
||||
//
|
||||
type V1 struct {
|
||||
*V2
|
||||
*V5
|
||||
}
|
||||
|
||||
//
|
||||
type V2 struct {
|
||||
*V3
|
||||
}
|
||||
|
||||
//
|
||||
type V3 struct {
|
||||
*V4
|
||||
}
|
||||
|
||||
//
|
||||
type V4 struct {
|
||||
*V5
|
||||
}
|
||||
|
||||
// V4.M should appear as method of V2 and V3 if AllMethods is set.
|
||||
func (*V4) M()
|
||||
|
||||
//
|
||||
type V5 struct {
|
||||
*V6
|
||||
}
|
||||
|
||||
//
|
||||
type V6 struct{}
|
||||
|
||||
// V6.M should appear as method of V1 and V5 if AllMethods is set.
|
||||
func (*V6) M()
|
||||
|
|
@ -0,0 +1,144 @@
|
|||
// The package e is a go/doc test for embedded methods.
|
||||
PACKAGE e
|
||||
|
||||
IMPORTPATH
|
||||
testdata/e
|
||||
|
||||
FILENAMES
|
||||
testdata/e.go
|
||||
|
||||
TYPES
|
||||
// T1 has no embedded (level 1) M method due to conflict.
|
||||
type T1 struct {
|
||||
t1
|
||||
t2
|
||||
}
|
||||
|
||||
// T2 has only M as top-level method.
|
||||
type T2 struct {
|
||||
t1
|
||||
}
|
||||
|
||||
// T2.M should appear as method of T2.
|
||||
func (T2) M()
|
||||
|
||||
// T3 has only M as top-level method.
|
||||
type T3 struct {
|
||||
t1e
|
||||
t2e
|
||||
}
|
||||
|
||||
// T3.M should appear as method of T3.
|
||||
func (T3) M()
|
||||
|
||||
//
|
||||
type T4 struct{}
|
||||
|
||||
// T4.M should appear as method of T5 only if AllMethods is set.
|
||||
func (*T4) M()
|
||||
|
||||
//
|
||||
type T5 struct {
|
||||
T4
|
||||
}
|
||||
|
||||
//
|
||||
type U1 struct {
|
||||
*U1
|
||||
}
|
||||
|
||||
// U1.M should appear as method of U1.
|
||||
func (*U1) M()
|
||||
|
||||
//
|
||||
type U2 struct {
|
||||
*U3
|
||||
}
|
||||
|
||||
// U2.M should appear as method of U2 and as method of U3 only if ...
|
||||
func (*U2) M()
|
||||
|
||||
//
|
||||
type U3 struct {
|
||||
*U2
|
||||
}
|
||||
|
||||
// U3.N should appear as method of U3 and as method of U2 only if ...
|
||||
func (*U3) N()
|
||||
|
||||
//
|
||||
type U4 struct {
|
||||
*u5
|
||||
}
|
||||
|
||||
// U4.M should appear as method of U4.
|
||||
func (*U4) M()
|
||||
|
||||
//
|
||||
type V1 struct {
|
||||
*V2
|
||||
*V5
|
||||
}
|
||||
|
||||
//
|
||||
type V2 struct {
|
||||
*V3
|
||||
}
|
||||
|
||||
//
|
||||
type V3 struct {
|
||||
*V4
|
||||
}
|
||||
|
||||
//
|
||||
type V4 struct {
|
||||
*V5
|
||||
}
|
||||
|
||||
// V4.M should appear as method of V2 and V3 if AllMethods is set.
|
||||
func (*V4) M()
|
||||
|
||||
//
|
||||
type V5 struct {
|
||||
*V6
|
||||
}
|
||||
|
||||
//
|
||||
type V6 struct{}
|
||||
|
||||
// V6.M should appear as method of V1 and V5 if AllMethods is set.
|
||||
func (*V6) M()
|
||||
|
||||
//
|
||||
type t1 struct{}
|
||||
|
||||
// t1.M should not appear as method in a Tx type.
|
||||
func (t1) M()
|
||||
|
||||
//
|
||||
type t1e struct {
|
||||
t1
|
||||
}
|
||||
|
||||
// t1.M should not appear as method in a Tx type.
|
||||
func (t1e) M()
|
||||
|
||||
//
|
||||
type t2 struct{}
|
||||
|
||||
// t2.M should not appear as method in a Tx type.
|
||||
func (t2) M()
|
||||
|
||||
//
|
||||
type t2e struct {
|
||||
t2
|
||||
}
|
||||
|
||||
// t2.M should not appear as method in a Tx type.
|
||||
func (t2e) M()
|
||||
|
||||
//
|
||||
type u5 struct {
|
||||
*U4
|
||||
}
|
||||
|
|
@ -0,0 +1,130 @@
|
|||
// The package e is a go/doc test for embedded methods.
|
||||
PACKAGE e
|
||||
|
||||
IMPORTPATH
|
||||
testdata/e
|
||||
|
||||
FILENAMES
|
||||
testdata/e.go
|
||||
|
||||
TYPES
|
||||
// T1 has no embedded (level 1) M method due to conflict.
|
||||
type T1 struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
// T2 has only M as top-level method.
|
||||
type T2 struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
// T2.M should appear as method of T2.
|
||||
func (T2) M()
|
||||
|
||||
// T3 has only M as top-level method.
|
||||
type T3 struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
// T3.M should appear as method of T3.
|
||||
func (T3) M()
|
||||
|
||||
//
|
||||
type T4 struct{}
|
||||
|
||||
// T4.M should appear as method of T5 only if AllMethods is set.
|
||||
func (*T4) M()
|
||||
|
||||
//
|
||||
type T5 struct {
|
||||
T4
|
||||
}
|
||||
|
||||
// T4.M should appear as method of T5 only if AllMethods is set.
|
||||
func (*T5) M()
|
||||
|
||||
//
|
||||
type U1 struct {
|
||||
*U1
|
||||
}
|
||||
|
||||
// U1.M should appear as method of U1.
|
||||
func (*U1) M()
|
||||
|
||||
//
|
||||
type U2 struct {
|
||||
*U3
|
||||
}
|
||||
|
||||
// U2.M should appear as method of U2 and as method of U3 only if ...
|
||||
func (*U2) M()
|
||||
|
||||
// U3.N should appear as method of U3 and as method of U2 only if ...
|
||||
func (U2) N()
|
||||
|
||||
//
|
||||
type U3 struct {
|
||||
*U2
|
||||
}
|
||||
|
||||
// U2.M should appear as method of U2 and as method of U3 only if ...
|
||||
func (U3) M()
|
||||
|
||||
// U3.N should appear as method of U3 and as method of U2 only if ...
|
||||
func (*U3) N()
|
||||
|
||||
//
|
||||
type U4 struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
// U4.M should appear as method of U4.
|
||||
func (*U4) M()
|
||||
|
||||
//
|
||||
type V1 struct {
|
||||
*V2
|
||||
*V5
|
||||
}
|
||||
|
||||
// V6.M should appear as method of V1 and V5 if AllMethods is set.
|
||||
func (V1) M()
|
||||
|
||||
//
|
||||
type V2 struct {
|
||||
*V3
|
||||
}
|
||||
|
||||
// V4.M should appear as method of V2 and V3 if AllMethods is set.
|
||||
func (V2) M()
|
||||
|
||||
//
|
||||
type V3 struct {
|
||||
*V4
|
||||
}
|
||||
|
||||
// V4.M should appear as method of V2 and V3 if AllMethods is set.
|
||||
func (V3) M()
|
||||
|
||||
//
|
||||
type V4 struct {
|
||||
*V5
|
||||
}
|
||||
|
||||
// V4.M should appear as method of V2 and V3 if AllMethods is set.
|
||||
func (*V4) M()
|
||||
|
||||
//
|
||||
type V5 struct {
|
||||
*V6
|
||||
}
|
||||
|
||||
// V6.M should appear as method of V1 and V5 if AllMethods is set.
|
||||
func (V5) M()
|
||||
|
||||
//
|
||||
type V6 struct{}
|
||||
|
||||
// V6.M should appear as method of V1 and V5 if AllMethods is set.
|
||||
func (*V6) M()
|
||||
|
|
@ -0,0 +1,147 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// The package e is a go/doc test for embedded methods.
|
||||
package e
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Conflicting methods M must not show up.
|
||||
|
||||
type t1 struct{}
|
||||
|
||||
// t1.M should not appear as method in a Tx type.
|
||||
func (t1) M() {}
|
||||
|
||||
type t2 struct{}
|
||||
|
||||
// t2.M should not appear as method in a Tx type.
|
||||
func (t2) M() {}
|
||||
|
||||
// T1 has no embedded (level 1) M method due to conflict.
|
||||
type T1 struct {
|
||||
t1
|
||||
t2
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Higher-level method M wins over lower-level method M.
|
||||
|
||||
// T2 has only M as top-level method.
|
||||
type T2 struct {
|
||||
t1
|
||||
}
|
||||
|
||||
// T2.M should appear as method of T2.
|
||||
func (T2) M() {}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Higher-level method M wins over lower-level conflicting methods M.
|
||||
|
||||
type t1e struct {
|
||||
t1
|
||||
}
|
||||
|
||||
type t2e struct {
|
||||
t2
|
||||
}
|
||||
|
||||
// T3 has only M as top-level method.
|
||||
type T3 struct {
|
||||
t1e
|
||||
t2e
|
||||
}
|
||||
|
||||
// T3.M should appear as method of T3.
|
||||
func (T3) M() {}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Don't show conflicting methods M embedded via an exported and non-exported
|
||||
// type.
|
||||
|
||||
// T1 has no embedded (level 1) M method due to conflict.
|
||||
type T4 struct {
|
||||
t2
|
||||
T2
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Don't show embedded methods of exported anonymous fields unless AllMethods
|
||||
// is set.
|
||||
|
||||
type T4 struct{}
|
||||
|
||||
// T4.M should appear as method of T5 only if AllMethods is set.
|
||||
func (*T4) M() {}
|
||||
|
||||
type T5 struct {
|
||||
T4
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Recursive type declarations must not lead to endless recursion.
|
||||
|
||||
type U1 struct {
|
||||
*U1
|
||||
}
|
||||
|
||||
// U1.M should appear as method of U1.
|
||||
func (*U1) M() {}
|
||||
|
||||
type U2 struct {
|
||||
*U3
|
||||
}
|
||||
|
||||
// U2.M should appear as method of U2 and as method of U3 only if AllMethods is set.
|
||||
func (*U2) M() {}
|
||||
|
||||
type U3 struct {
|
||||
*U2
|
||||
}
|
||||
|
||||
// U3.N should appear as method of U3 and as method of U2 only if AllMethods is set.
|
||||
func (*U3) N() {}
|
||||
|
||||
type U4 struct {
|
||||
*u5
|
||||
}
|
||||
|
||||
// U4.M should appear as method of U4.
|
||||
func (*U4) M() {}
|
||||
|
||||
type u5 struct {
|
||||
*U4
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// A higher-level embedded type (and its methods) wins over the same type (and
|
||||
// its methods) embedded at a lower level.
|
||||
|
||||
type V1 struct {
|
||||
*V2
|
||||
*V5
|
||||
}
|
||||
|
||||
type V2 struct {
|
||||
*V3
|
||||
}
|
||||
|
||||
type V3 struct {
|
||||
*V4
|
||||
}
|
||||
|
||||
type V4 struct {
|
||||
*V5
|
||||
}
|
||||
|
||||
type V5 struct {
|
||||
*V6
|
||||
}
|
||||
|
||||
type V6 struct{}
|
||||
|
||||
// V4.M should appear as method of V2 and V3 if AllMethods is set.
|
||||
func (*V4) M() {}
|
||||
|
||||
// V6.M should appear as method of V1 and V5 if AllMethods is set.
|
||||
func (*V6) M() {}
|
|
@ -0,0 +1,30 @@
|
|||
//
|
||||
PACKAGE error1
|
||||
|
||||
IMPORTPATH
|
||||
testdata/error1
|
||||
|
||||
FILENAMES
|
||||
testdata/error1.go
|
||||
|
||||
TYPES
|
||||
//
|
||||
type I0 interface {
|
||||
// When embedded, the predeclared error interface
|
||||
// must remain visible in interface types.
|
||||
error
|
||||
}
|
||||
|
||||
//
|
||||
type S0 struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
//
|
||||
type T0 struct {
|
||||
ExportedField interface {
|
||||
// error should be visible
|
||||
error
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
//
|
||||
PACKAGE error1
|
||||
|
||||
IMPORTPATH
|
||||
testdata/error1
|
||||
|
||||
FILENAMES
|
||||
testdata/error1.go
|
||||
|
||||
TYPES
|
||||
//
|
||||
type I0 interface {
|
||||
// When embedded, the predeclared error interface
|
||||
// must remain visible in interface types.
|
||||
error
|
||||
}
|
||||
|
||||
//
|
||||
type S0 struct {
|
||||
// In struct types, an embedded error must only be visible
|
||||
// if AllDecls is set.
|
||||
error
|
||||
}
|
||||
|
||||
//
|
||||
type T0 struct {
|
||||
ExportedField interface {
|
||||
// error should be visible
|
||||
error
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
//
|
||||
PACKAGE error1
|
||||
|
||||
IMPORTPATH
|
||||
testdata/error1
|
||||
|
||||
FILENAMES
|
||||
testdata/error1.go
|
||||
|
||||
TYPES
|
||||
//
|
||||
type I0 interface {
|
||||
// When embedded, the predeclared error interface
|
||||
// must remain visible in interface types.
|
||||
error
|
||||
}
|
||||
|
||||
//
|
||||
type S0 struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
//
|
||||
type T0 struct {
|
||||
ExportedField interface {
|
||||
// error should be visible
|
||||
error
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package error1
|
||||
|
||||
type I0 interface {
|
||||
// When embedded, the predeclared error interface
|
||||
// must remain visible in interface types.
|
||||
error
|
||||
}
|
||||
|
||||
type T0 struct {
|
||||
ExportedField interface {
|
||||
// error should be visible
|
||||
error
|
||||
}
|
||||
}
|
||||
|
||||
type S0 struct {
|
||||
// In struct types, an embedded error must only be visible
|
||||
// if AllDecls is set.
|
||||
error
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
//
|
||||
PACKAGE error2
|
||||
|
||||
IMPORTPATH
|
||||
testdata/error2
|
||||
|
||||
FILENAMES
|
||||
testdata/error2.go
|
||||
|
||||
TYPES
|
||||
//
|
||||
type I0 interface {
|
||||
// contains filtered or unexported methods
|
||||
}
|
||||
|
||||
//
|
||||
type S0 struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
//
|
||||
type T0 struct {
|
||||
ExportedField interface {
|
||||
// contains filtered or unexported methods
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
//
|
||||
PACKAGE error2
|
||||
|
||||
IMPORTPATH
|
||||
testdata/error2
|
||||
|
||||
FILENAMES
|
||||
testdata/error2.go
|
||||
|
||||
TYPES
|
||||
//
|
||||
type I0 interface {
|
||||
// When embedded, the locally-declared error interface
|
||||
// is only visible if all declarations are shown.
|
||||
error
|
||||
}
|
||||
|
||||
//
|
||||
type S0 struct {
|
||||
// In struct types, an embedded error must only be visible
|
||||
// if AllDecls is set.
|
||||
error
|
||||
}
|
||||
|
||||
//
|
||||
type T0 struct {
|
||||
ExportedField interface {
|
||||
// error should not be visible
|
||||
error
|
||||
}
|
||||
}
|
||||
|
||||
// This error declaration shadows the predeclared error type.
|
||||
type error interface {
|
||||
Error() string
|
||||
}
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
//
|
||||
PACKAGE error2
|
||||
|
||||
IMPORTPATH
|
||||
testdata/error2
|
||||
|
||||
FILENAMES
|
||||
testdata/error2.go
|
||||
|
||||
TYPES
|
||||
//
|
||||
type I0 interface {
|
||||
// contains filtered or unexported methods
|
||||
}
|
||||
|
||||
//
|
||||
type S0 struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
//
|
||||
type T0 struct {
|
||||
ExportedField interface {
|
||||
// contains filtered or unexported methods
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package error2
|
||||
|
||||
type I0 interface {
|
||||
// When embedded, the locally-declared error interface
|
||||
// is only visible if all declarations are shown.
|
||||
error
|
||||
}
|
||||
|
||||
type T0 struct {
|
||||
ExportedField interface {
|
||||
// error should not be visible
|
||||
error
|
||||
}
|
||||
}
|
||||
|
||||
type S0 struct {
|
||||
// In struct types, an embedded error must only be visible
|
||||
// if AllDecls is set.
|
||||
error
|
||||
}
|
||||
|
||||
// This error declaration shadows the predeclared error type.
|
||||
type error interface {
|
||||
Error() string
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package testing
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type InternalExample struct {
|
||||
Name string
|
||||
F func()
|
||||
Output string
|
||||
}
|
||||
|
||||
func RunExamples(examples []InternalExample) (ok bool) {
|
||||
ok = true
|
||||
|
||||
var eg InternalExample
|
||||
|
||||
stdout, stderr := os.Stdout, os.Stderr
|
||||
defer func() {
|
||||
os.Stdout, os.Stderr = stdout, stderr
|
||||
if e := recover(); e != nil {
|
||||
fmt.Printf("--- FAIL: %s\npanic: %v\n", eg.Name, e)
|
||||
os.Exit(1)
|
||||
}
|
||||
}()
|
||||
|
||||
for _, eg = range examples {
|
||||
if *chatty {
|
||||
fmt.Printf("=== RUN: %s\n", eg.Name)
|
||||
}
|
||||
|
||||
// capture stdout and stderr
|
||||
r, w, err := os.Pipe()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
os.Stdout, os.Stderr = w, w
|
||||
outC := make(chan string)
|
||||
go func() {
|
||||
buf := new(bytes.Buffer)
|
||||
_, err := io.Copy(buf, r)
|
||||
if err != nil {
|
||||
fmt.Fprintf(stderr, "testing: copying pipe: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
outC <- buf.String()
|
||||
}()
|
||||
|
||||
// run example
|
||||
t0 := time.Now()
|
||||
eg.F()
|
||||
dt := time.Now().Sub(t0)
|
||||
|
||||
// close pipe, restore stdout/stderr, get output
|
||||
w.Close()
|
||||
os.Stdout, os.Stderr = stdout, stderr
|
||||
out := <-outC
|
||||
|
||||
// report any errors
|
||||
tstr := fmt.Sprintf("(%.2f seconds)", dt.Seconds())
|
||||
if g, e := strings.TrimSpace(out), strings.TrimSpace(eg.Output); g != e {
|
||||
fmt.Printf("--- FAIL: %s %s\ngot:\n%s\nwant:\n%s\n",
|
||||
eg.Name, tstr, g, e)
|
||||
ok = false
|
||||
} else if *chatty {
|
||||
fmt.Printf("--- PASS: %s %s\n", eg.Name, tstr)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
// The package f is a go/doc test for functions and factory ...
|
||||
PACKAGE f
|
||||
|
||||
IMPORTPATH
|
||||
testdata/f
|
||||
|
||||
FILENAMES
|
||||
testdata/f.go
|
||||
|
||||
FUNCTIONS
|
||||
// Exported must always be visible. Was issue 2824.
|
||||
func Exported() private
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
// The package f is a go/doc test for functions and factory ...
|
||||
PACKAGE f
|
||||
|
||||
IMPORTPATH
|
||||
testdata/f
|
||||
|
||||
FILENAMES
|
||||
testdata/f.go
|
||||
|
||||
TYPES
|
||||
//
|
||||
type private struct{}
|
||||
|
||||
// Exported must always be visible. Was issue 2824.
|
||||
func Exported() private
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
// The package f is a go/doc test for functions and factory ...
|
||||
PACKAGE f
|
||||
|
||||
IMPORTPATH
|
||||
testdata/f
|
||||
|
||||
FILENAMES
|
||||
testdata/f.go
|
||||
|
||||
FUNCTIONS
|
||||
// Exported must always be visible. Was issue 2824.
|
||||
func Exported() private
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// The package f is a go/doc test for functions and factory methods.
|
||||
package f
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Factory functions for non-exported types must not get lost.
|
||||
|
||||
type private struct{}
|
||||
|
||||
// Exported must always be visible. Was issue 2824.
|
||||
func Exported() private {}
|
|
@ -0,0 +1,32 @@
|
|||
// The package g is a go/doc test for mixed exported/unexported ...
|
||||
PACKAGE g
|
||||
|
||||
IMPORTPATH
|
||||
testdata/g
|
||||
|
||||
FILENAMES
|
||||
testdata/g.go
|
||||
|
||||
CONSTANTS
|
||||
//
|
||||
const (
|
||||
A, _ = iota, iota
|
||||
_, D
|
||||
E, _
|
||||
G, H
|
||||
)
|
||||
|
||||
|
||||
VARIABLES
|
||||
//
|
||||
var (
|
||||
_, C2, _ = 1, 2, 3
|
||||
C4, _, C6 = 4, 5, 6
|
||||
_, C8, _ = 7, 8, 9
|
||||
)
|
||||
|
||||
//
|
||||
var (
|
||||
_, X = f()
|
||||
)
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
// The package g is a go/doc test for mixed exported/unexported ...
|
||||
PACKAGE g
|
||||
|
||||
IMPORTPATH
|
||||
testdata/g
|
||||
|
||||
FILENAMES
|
||||
testdata/g.go
|
||||
|
||||
CONSTANTS
|
||||
//
|
||||
const (
|
||||
A, b = iota, iota
|
||||
c, D
|
||||
E, f
|
||||
G, H
|
||||
)
|
||||
|
||||
|
||||
VARIABLES
|
||||
//
|
||||
var (
|
||||
c1, C2, c3 = 1, 2, 3
|
||||
C4, c5, C6 = 4, 5, 6
|
||||
c7, C8, c9 = 7, 8, 9
|
||||
xx, yy, zz = 0, 0, 0 // all unexported and hidden
|
||||
)
|
||||
|
||||
//
|
||||
var (
|
||||
x, X = f()
|
||||
y, z = f()
|
||||
)
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
// The package g is a go/doc test for mixed exported/unexported ...
|
||||
PACKAGE g
|
||||
|
||||
IMPORTPATH
|
||||
testdata/g
|
||||
|
||||
FILENAMES
|
||||
testdata/g.go
|
||||
|
||||
CONSTANTS
|
||||
//
|
||||
const (
|
||||
A, _ = iota, iota
|
||||
_, D
|
||||
E, _
|
||||
G, H
|
||||
)
|
||||
|
||||
|
||||
VARIABLES
|
||||
//
|
||||
var (
|
||||
_, C2, _ = 1, 2, 3
|
||||
C4, _, C6 = 4, 5, 6
|
||||
_, C8, _ = 7, 8, 9
|
||||
)
|
||||
|
||||
//
|
||||
var (
|
||||
_, X = f()
|
||||
)
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// The package g is a go/doc test for mixed exported/unexported values.
|
||||
package g
|
||||
|
||||
const (
|
||||
A, b = iota, iota
|
||||
c, D
|
||||
E, f
|
||||
G, H
|
||||
)
|
||||
|
||||
var (
|
||||
c1, C2, c3 = 1, 2, 3
|
||||
C4, c5, C6 = 4, 5, 6
|
||||
c7, C8, c9 = 7, 8, 9
|
||||
xx, yy, zz = 0, 0, 0 // all unexported and hidden
|
||||
)
|
||||
|
||||
var (
|
||||
x, X = f()
|
||||
y, z = f()
|
||||
)
|
|
@ -0,0 +1,76 @@
|
|||
// Package generics contains the new syntax supporting generic ...
|
||||
PACKAGE generics
|
||||
|
||||
IMPORTPATH
|
||||
testdata/generics
|
||||
|
||||
FILENAMES
|
||||
testdata/generics.go
|
||||
|
||||
FUNCTIONS
|
||||
// AnotherFunc has an implicit constraint interface. Neither type ...
|
||||
func AnotherFunc[T ~struct{ f int }](_ struct{ f int })
|
||||
|
||||
// Func has an instantiated constraint.
|
||||
func Func[T Constraint[string, Type[int]]]()
|
||||
|
||||
// Single is not a factory function.
|
||||
func Single[T any]() *T
|
||||
|
||||
// Slice is not a factory function.
|
||||
func Slice[T any]() []T
|
||||
|
||||
|
||||
TYPES
|
||||
// AFuncType demonstrates filtering of parameters and type ...
|
||||
type AFuncType[T ~struct{ f int }] func(_ struct {
|
||||
// contains filtered or unexported fields
|
||||
})
|
||||
|
||||
// Constraint is a constraint interface with two type parameters.
|
||||
type Constraint[P, Q interface{ string | ~int | Type[int] }] interface {
|
||||
~int | ~byte | Type[string]
|
||||
M() P
|
||||
}
|
||||
|
||||
// NewEmbeddings demonstrates how we filter the new embedded ...
|
||||
type NewEmbeddings interface {
|
||||
string // should not be filtered
|
||||
|
||||
struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
~struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
*struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
struct {
|
||||
// contains filtered or unexported fields
|
||||
} | ~struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
// contains filtered or unexported methods
|
||||
}
|
||||
|
||||
// Parameterized types should be shown.
|
||||
type Type[P any] struct {
|
||||
Field P
|
||||
}
|
||||
|
||||
// Variables with an instantiated type should be shown.
|
||||
var X Type[int]
|
||||
|
||||
// Constructors for parameterized types should be shown.
|
||||
func Constructor[lowerCase any]() Type[lowerCase]
|
||||
|
||||
// MethodA uses a different name for its receiver type parameter.
|
||||
func (t Type[A]) MethodA(p A)
|
||||
|
||||
// MethodB has a blank receiver type parameter.
|
||||
func (t Type[_]) MethodB()
|
||||
|
||||
// MethodC has a lower-case receiver type parameter.
|
||||
func (t Type[c]) MethodC()
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
// Package generics contains the new syntax supporting generic ...
|
||||
PACKAGE generics
|
||||
|
||||
IMPORTPATH
|
||||
testdata/generics
|
||||
|
||||
FILENAMES
|
||||
testdata/generics.go
|
||||
|
||||
FUNCTIONS
|
||||
// AnotherFunc has an implicit constraint interface. Neither type ...
|
||||
func AnotherFunc[T ~struct{ f int }](_ struct{ f int })
|
||||
|
||||
// Func has an instantiated constraint.
|
||||
func Func[T Constraint[string, Type[int]]]()
|
||||
|
||||
// Single is not a factory function.
|
||||
func Single[T any]() *T
|
||||
|
||||
// Slice is not a factory function.
|
||||
func Slice[T any]() []T
|
||||
|
||||
|
||||
TYPES
|
||||
// AFuncType demonstrates filtering of parameters and type ...
|
||||
type AFuncType[T ~struct{ f int }] func(_ struct{ f int })
|
||||
|
||||
// Constraint is a constraint interface with two type parameters.
|
||||
type Constraint[P, Q interface{ string | ~int | Type[int] }] interface {
|
||||
~int | ~byte | Type[string]
|
||||
M() P
|
||||
}
|
||||
|
||||
// NewEmbeddings demonstrates how we filter the new embedded ...
|
||||
type NewEmbeddings interface {
|
||||
string // should not be filtered
|
||||
int16
|
||||
struct{ f int }
|
||||
~struct{ f int }
|
||||
*struct{ f int }
|
||||
struct{ f int } | ~struct{ f int }
|
||||
}
|
||||
|
||||
// Parameterized types should be shown.
|
||||
type Type[P any] struct {
|
||||
Field P
|
||||
}
|
||||
|
||||
// Variables with an instantiated type should be shown.
|
||||
var X Type[int]
|
||||
|
||||
// Constructors for parameterized types should be shown.
|
||||
func Constructor[lowerCase any]() Type[lowerCase]
|
||||
|
||||
// MethodA uses a different name for its receiver type parameter.
|
||||
func (t Type[A]) MethodA(p A)
|
||||
|
||||
// MethodB has a blank receiver type parameter.
|
||||
func (t Type[_]) MethodB()
|
||||
|
||||
// MethodC has a lower-case receiver type parameter.
|
||||
func (t Type[c]) MethodC()
|
||||
|
||||
// int16 shadows the predeclared type int16.
|
||||
type int16 int
|
||||
|
|
@ -0,0 +1,76 @@
|
|||
// Package generics contains the new syntax supporting generic ...
|
||||
PACKAGE generics
|
||||
|
||||
IMPORTPATH
|
||||
testdata/generics
|
||||
|
||||
FILENAMES
|
||||
testdata/generics.go
|
||||
|
||||
FUNCTIONS
|
||||
// AnotherFunc has an implicit constraint interface. Neither type ...
|
||||
func AnotherFunc[T ~struct{ f int }](_ struct{ f int })
|
||||
|
||||
// Func has an instantiated constraint.
|
||||
func Func[T Constraint[string, Type[int]]]()
|
||||
|
||||
// Single is not a factory function.
|
||||
func Single[T any]() *T
|
||||
|
||||
// Slice is not a factory function.
|
||||
func Slice[T any]() []T
|
||||
|
||||
|
||||
TYPES
|
||||
// AFuncType demonstrates filtering of parameters and type ...
|
||||
type AFuncType[T ~struct{ f int }] func(_ struct {
|
||||
// contains filtered or unexported fields
|
||||
})
|
||||
|
||||
// Constraint is a constraint interface with two type parameters.
|
||||
type Constraint[P, Q interface{ string | ~int | Type[int] }] interface {
|
||||
~int | ~byte | Type[string]
|
||||
M() P
|
||||
}
|
||||
|
||||
// NewEmbeddings demonstrates how we filter the new embedded ...
|
||||
type NewEmbeddings interface {
|
||||
string // should not be filtered
|
||||
|
||||
struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
~struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
*struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
struct {
|
||||
// contains filtered or unexported fields
|
||||
} | ~struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
// contains filtered or unexported methods
|
||||
}
|
||||
|
||||
// Parameterized types should be shown.
|
||||
type Type[P any] struct {
|
||||
Field P
|
||||
}
|
||||
|
||||
// Variables with an instantiated type should be shown.
|
||||
var X Type[int]
|
||||
|
||||
// Constructors for parameterized types should be shown.
|
||||
func Constructor[lowerCase any]() Type[lowerCase]
|
||||
|
||||
// MethodA uses a different name for its receiver type parameter.
|
||||
func (t Type[A]) MethodA(p A)
|
||||
|
||||
// MethodB has a blank receiver type parameter.
|
||||
func (t Type[_]) MethodB()
|
||||
|
||||
// MethodC has a lower-case receiver type parameter.
|
||||
func (t Type[c]) MethodC()
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package generics contains the new syntax supporting generic programming in
|
||||
// Go.
|
||||
package generics
|
||||
|
||||
// Variables with an instantiated type should be shown.
|
||||
var X Type[int]
|
||||
|
||||
// Parameterized types should be shown.
|
||||
type Type[P any] struct {
|
||||
Field P
|
||||
}
|
||||
|
||||
// Constructors for parameterized types should be shown.
|
||||
func Constructor[lowerCase any]() Type[lowerCase] {
|
||||
return Type[lowerCase]{}
|
||||
}
|
||||
|
||||
// MethodA uses a different name for its receiver type parameter.
|
||||
func (t Type[A]) MethodA(p A) {}
|
||||
|
||||
// MethodB has a blank receiver type parameter.
|
||||
func (t Type[_]) MethodB() {}
|
||||
|
||||
// MethodC has a lower-case receiver type parameter.
|
||||
func (t Type[c]) MethodC() {}
|
||||
|
||||
// Constraint is a constraint interface with two type parameters.
|
||||
type Constraint[P, Q interface{ string | ~int | Type[int] }] interface {
|
||||
~int | ~byte | Type[string]
|
||||
M() P
|
||||
}
|
||||
|
||||
// int16 shadows the predeclared type int16.
|
||||
type int16 int
|
||||
|
||||
// NewEmbeddings demonstrates how we filter the new embedded elements.
|
||||
type NewEmbeddings interface {
|
||||
string // should not be filtered
|
||||
int16
|
||||
struct{ f int }
|
||||
~struct{ f int }
|
||||
*struct{ f int }
|
||||
struct{ f int } | ~struct{ f int }
|
||||
}
|
||||
|
||||
// Func has an instantiated constraint.
|
||||
func Func[T Constraint[string, Type[int]]]() {}
|
||||
|
||||
// AnotherFunc has an implicit constraint interface.
|
||||
//
|
||||
// Neither type parameters nor regular parameters should be filtered.
|
||||
func AnotherFunc[T ~struct{ f int }](_ struct{ f int }) {}
|
||||
|
||||
// AFuncType demonstrates filtering of parameters and type parameters. Here we
|
||||
// don't filter type parameters (to be consistent with function declarations),
|
||||
// but DO filter the RHS.
|
||||
type AFuncType[T ~struct{ f int }] func(_ struct{ f int })
|
||||
|
||||
// See issue #49477: type parameters should not be interpreted as named types
|
||||
// for the purpose of determining whether a function is a factory function.
|
||||
|
||||
// Slice is not a factory function.
|
||||
func Slice[T any]() []T {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Single is not a factory function.
|
||||
func Single[T any]() *T {
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
// Package issue12839 is a go/doc test to test association of a ...
|
||||
PACKAGE issue12839
|
||||
|
||||
IMPORTPATH
|
||||
testdata/issue12839
|
||||
|
||||
IMPORTS
|
||||
p
|
||||
|
||||
FILENAMES
|
||||
testdata/issue12839.go
|
||||
|
||||
FUNCTIONS
|
||||
// F1 should not be associated with T1
|
||||
func F1() (*T1, *T2)
|
||||
|
||||
// F10 should not be associated with T1.
|
||||
func F10() (T1, T2, error)
|
||||
|
||||
// F4 should not be associated with a type (same as F1)
|
||||
func F4() (a T1, b T2)
|
||||
|
||||
// F9 should not be associated with T1.
|
||||
func F9() (int, T1, T2)
|
||||
|
||||
|
||||
TYPES
|
||||
//
|
||||
type T1 struct{}
|
||||
|
||||
// F2 should be associated with T1
|
||||
func F2() (a, b, c T1)
|
||||
|
||||
// F3 should be associated with T1 because b.T3 is from a ...
|
||||
func F3() (a T1, b p.T3)
|
||||
|
||||
// F5 should be associated with T1.
|
||||
func F5() (T1, error)
|
||||
|
||||
// F6 should be associated with T1.
|
||||
func F6() (*T1, error)
|
||||
|
||||
// F7 should be associated with T1.
|
||||
func F7() (T1, string)
|
||||
|
||||
// F8 should be associated with T1.
|
||||
func F8() (int, T1, string)
|
||||
|
||||
//
|
||||
type T2 struct{}
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
// Package issue12839 is a go/doc test to test association of a ...
|
||||
PACKAGE issue12839
|
||||
|
||||
IMPORTPATH
|
||||
testdata/issue12839
|
||||
|
||||
IMPORTS
|
||||
p
|
||||
|
||||
FILENAMES
|
||||
testdata/issue12839.go
|
||||
|
||||
FUNCTIONS
|
||||
// F1 should not be associated with T1
|
||||
func F1() (*T1, *T2)
|
||||
|
||||
// F10 should not be associated with T1.
|
||||
func F10() (T1, T2, error)
|
||||
|
||||
// F4 should not be associated with a type (same as F1)
|
||||
func F4() (a T1, b T2)
|
||||
|
||||
// F9 should not be associated with T1.
|
||||
func F9() (int, T1, T2)
|
||||
|
||||
|
||||
TYPES
|
||||
//
|
||||
type T1 struct{}
|
||||
|
||||
// F2 should be associated with T1
|
||||
func F2() (a, b, c T1)
|
||||
|
||||
// F3 should be associated with T1 because b.T3 is from a ...
|
||||
func F3() (a T1, b p.T3)
|
||||
|
||||
// F5 should be associated with T1.
|
||||
func F5() (T1, error)
|
||||
|
||||
// F6 should be associated with T1.
|
||||
func F6() (*T1, error)
|
||||
|
||||
// F7 should be associated with T1.
|
||||
func F7() (T1, string)
|
||||
|
||||
// F8 should be associated with T1.
|
||||
func F8() (int, T1, string)
|
||||
|
||||
//
|
||||
func (t T1) hello() string
|
||||
|
||||
//
|
||||
type T2 struct{}
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
// Package issue12839 is a go/doc test to test association of a ...
|
||||
PACKAGE issue12839
|
||||
|
||||
IMPORTPATH
|
||||
testdata/issue12839
|
||||
|
||||
IMPORTS
|
||||
p
|
||||
|
||||
FILENAMES
|
||||
testdata/issue12839.go
|
||||
|
||||
FUNCTIONS
|
||||
// F1 should not be associated with T1
|
||||
func F1() (*T1, *T2)
|
||||
|
||||
// F10 should not be associated with T1.
|
||||
func F10() (T1, T2, error)
|
||||
|
||||
// F4 should not be associated with a type (same as F1)
|
||||
func F4() (a T1, b T2)
|
||||
|
||||
// F9 should not be associated with T1.
|
||||
func F9() (int, T1, T2)
|
||||
|
||||
|
||||
TYPES
|
||||
//
|
||||
type T1 struct{}
|
||||
|
||||
// F2 should be associated with T1
|
||||
func F2() (a, b, c T1)
|
||||
|
||||
// F3 should be associated with T1 because b.T3 is from a ...
|
||||
func F3() (a T1, b p.T3)
|
||||
|
||||
// F5 should be associated with T1.
|
||||
func F5() (T1, error)
|
||||
|
||||
// F6 should be associated with T1.
|
||||
func F6() (*T1, error)
|
||||
|
||||
// F7 should be associated with T1.
|
||||
func F7() (T1, string)
|
||||
|
||||
// F8 should be associated with T1.
|
||||
func F8() (int, T1, string)
|
||||
|
||||
//
|
||||
type T2 struct{}
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package issue12839 is a go/doc test to test association of a function
|
||||
// that returns multiple types.
|
||||
// See golang.org/issue/12839.
|
||||
// (See also golang.org/issue/27928.)
|
||||
package issue12839
|
||||
|
||||
import "p"
|
||||
|
||||
type T1 struct{}
|
||||
|
||||
type T2 struct{}
|
||||
|
||||
func (t T1) hello() string {
|
||||
return "hello"
|
||||
}
|
||||
|
||||
// F1 should not be associated with T1
|
||||
func F1() (*T1, *T2) {
|
||||
return &T1{}, &T2{}
|
||||
}
|
||||
|
||||
// F2 should be associated with T1
|
||||
func F2() (a, b, c T1) {
|
||||
return T1{}, T1{}, T1{}
|
||||
}
|
||||
|
||||
// F3 should be associated with T1 because b.T3 is from a different package
|
||||
func F3() (a T1, b p.T3) {
|
||||
return T1{}, p.T3{}
|
||||
}
|
||||
|
||||
// F4 should not be associated with a type (same as F1)
|
||||
func F4() (a T1, b T2) {
|
||||
return T1{}, T2{}
|
||||
}
|
||||
|
||||
// F5 should be associated with T1.
|
||||
func F5() (T1, error) {
|
||||
return T1{}, nil
|
||||
}
|
||||
|
||||
// F6 should be associated with T1.
|
||||
func F6() (*T1, error) {
|
||||
return &T1{}, nil
|
||||
}
|
||||
|
||||
// F7 should be associated with T1.
|
||||
func F7() (T1, string) {
|
||||
return T1{}, nil
|
||||
}
|
||||
|
||||
// F8 should be associated with T1.
|
||||
func F8() (int, T1, string) {
|
||||
return 0, T1{}, nil
|
||||
}
|
||||
|
||||
// F9 should not be associated with T1.
|
||||
func F9() (int, T1, T2) {
|
||||
return 0, T1{}, T2{}
|
||||
}
|
||||
|
||||
// F10 should not be associated with T1.
|
||||
func F10() (T1, T2, error) {
|
||||
return T1{}, T2{}, nil
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
//
|
||||
PACKAGE issue13742
|
||||
|
||||
IMPORTPATH
|
||||
testdata/issue13742
|
||||
|
||||
IMPORTS
|
||||
go/ast
|
||||
|
||||
FILENAMES
|
||||
testdata/issue13742.go
|
||||
|
||||
FUNCTIONS
|
||||
// Both F0 and G0 should appear as functions.
|
||||
func F0(Node)
|
||||
|
||||
// Both F1 and G1 should appear as functions.
|
||||
func F1(ast.Node)
|
||||
|
||||
//
|
||||
func G0() Node
|
||||
|
||||
//
|
||||
func G1() ast.Node
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
//
|
||||
PACKAGE issue13742
|
||||
|
||||
IMPORTPATH
|
||||
testdata/issue13742
|
||||
|
||||
IMPORTS
|
||||
go/ast
|
||||
|
||||
FILENAMES
|
||||
testdata/issue13742.go
|
||||
|
||||
FUNCTIONS
|
||||
// Both F0 and G0 should appear as functions.
|
||||
func F0(Node)
|
||||
|
||||
// Both F1 and G1 should appear as functions.
|
||||
func F1(ast.Node)
|
||||
|
||||
//
|
||||
func G0() Node
|
||||
|
||||
//
|
||||
func G1() ast.Node
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
//
|
||||
PACKAGE issue13742
|
||||
|
||||
IMPORTPATH
|
||||
testdata/issue13742
|
||||
|
||||
IMPORTS
|
||||
go/ast
|
||||
|
||||
FILENAMES
|
||||
testdata/issue13742.go
|
||||
|
||||
FUNCTIONS
|
||||
// Both F0 and G0 should appear as functions.
|
||||
func F0(Node)
|
||||
|
||||
// Both F1 and G1 should appear as functions.
|
||||
func F1(ast.Node)
|
||||
|
||||
//
|
||||
func G0() Node
|
||||
|
||||
//
|
||||
func G1() ast.Node
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package issue13742
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
. "go/ast"
|
||||
)
|
||||
|
||||
// Both F0 and G0 should appear as functions.
|
||||
func F0(Node) {}
|
||||
func G0() Node { return nil }
|
||||
|
||||
// Both F1 and G1 should appear as functions.
|
||||
func F1(ast.Node) {}
|
||||
func G1() ast.Node { return nil }
|
|
@ -0,0 +1,32 @@
|
|||
//
|
||||
PACKAGE issue16153
|
||||
|
||||
IMPORTPATH
|
||||
testdata/issue16153
|
||||
|
||||
FILENAMES
|
||||
testdata/issue16153.go
|
||||
|
||||
CONSTANTS
|
||||
//
|
||||
const (
|
||||
X3 int64 = iota
|
||||
Y3 = 1
|
||||
)
|
||||
|
||||
//
|
||||
const (
|
||||
X4 int64 = iota
|
||||
Y4
|
||||
)
|
||||
|
||||
// original test case
|
||||
const (
|
||||
Y1 = 256
|
||||
)
|
||||
|
||||
// variations
|
||||
const (
|
||||
Y2 uint8
|
||||
)
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
//
|
||||
PACKAGE issue16153
|
||||
|
||||
IMPORTPATH
|
||||
testdata/issue16153
|
||||
|
||||
FILENAMES
|
||||
testdata/issue16153.go
|
||||
|
||||
CONSTANTS
|
||||
// original test case
|
||||
const (
|
||||
x1 uint8 = 255
|
||||
Y1 = 256
|
||||
)
|
||||
|
||||
// variations
|
||||
const (
|
||||
x2 uint8 = 255
|
||||
Y2
|
||||
)
|
||||
|
||||
//
|
||||
const (
|
||||
X3 int64 = iota
|
||||
Y3 = 1
|
||||
)
|
||||
|
||||
//
|
||||
const (
|
||||
X4 int64 = iota
|
||||
Y4
|
||||
)
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
//
|
||||
PACKAGE issue16153
|
||||
|
||||
IMPORTPATH
|
||||
testdata/issue16153
|
||||
|
||||
FILENAMES
|
||||
testdata/issue16153.go
|
||||
|
||||
CONSTANTS
|
||||
//
|
||||
const (
|
||||
X3 int64 = iota
|
||||
Y3 = 1
|
||||
)
|
||||
|
||||
//
|
||||
const (
|
||||
X4 int64 = iota
|
||||
Y4
|
||||
)
|
||||
|
||||
// original test case
|
||||
const (
|
||||
Y1 = 256
|
||||
)
|
||||
|
||||
// variations
|
||||
const (
|
||||
Y2 uint8
|
||||
)
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
// Copyright 2017 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package issue16153
|
||||
|
||||
// original test case
|
||||
const (
|
||||
x1 uint8 = 255
|
||||
Y1 = 256
|
||||
)
|
||||
|
||||
// variations
|
||||
const (
|
||||
x2 uint8 = 255
|
||||
Y2
|
||||
)
|
||||
|
||||
const (
|
||||
X3 int64 = iota
|
||||
Y3 = 1
|
||||
)
|
||||
|
||||
const (
|
||||
X4 int64 = iota
|
||||
Y4
|
||||
)
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче