зеркало из https://github.com/golang/gddo.git
use github.com/garyburd/gosrc, eliminate secrets.json.
This commit is contained in:
Родитель
c24cdf0ece
Коммит
7718e36599
|
@ -26,23 +26,18 @@ Development Environment Setup
|
|||
|
||||
- Install and run [Redis 2.6.x](http://redis.io/download). The redis.conf file included in the Redis distribution is suitable for development.
|
||||
- Install Go from source and update to tip.
|
||||
- Create a `secrets.json` file with this template:
|
||||
|
||||
{
|
||||
"UserAgent": "",
|
||||
"GitHubId": "",
|
||||
"GitHubSecret": "",
|
||||
"GAAccount": ""
|
||||
}
|
||||
|
||||
- Install and run the server:
|
||||
|
||||
go get github.com/garyburd/gddo/gddo-server
|
||||
gddo-server
|
||||
$ go get github.com/garyburd/gddo/gddo-server
|
||||
$ gddo-server
|
||||
|
||||
- Go to http://localhost:8080/ in your browser
|
||||
- Enter an import path to have the server retrieve & display a package's documentation
|
||||
|
||||
Optional:
|
||||
|
||||
- Create the file gddo-server/config.go using the template in gddo-server/config.go.example.
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
|
|
|
@ -53,6 +53,7 @@ import (
|
|||
|
||||
"code.google.com/p/snappy-go/snappy"
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"github.com/garyburd/gosrc"
|
||||
"github.com/garyburd/redigo/redis"
|
||||
)
|
||||
|
||||
|
@ -252,17 +253,17 @@ func (db *Database) Put(pdoc *doc.Package, nextCrawl time.Time) error {
|
|||
|
||||
paths := make(map[string]bool)
|
||||
for _, p := range pdoc.Imports {
|
||||
if doc.IsValidRemotePath(p) {
|
||||
if gosrc.IsValidRemotePath(p) {
|
||||
paths[p] = true
|
||||
}
|
||||
}
|
||||
for _, p := range pdoc.TestImports {
|
||||
if doc.IsValidRemotePath(p) {
|
||||
if gosrc.IsValidRemotePath(p) {
|
||||
paths[p] = true
|
||||
}
|
||||
}
|
||||
for _, p := range pdoc.XTestImports {
|
||||
if doc.IsValidRemotePath(p) {
|
||||
if gosrc.IsValidRemotePath(p) {
|
||||
paths[p] = true
|
||||
}
|
||||
}
|
||||
|
@ -318,7 +319,7 @@ var bumpCrawlScript = redis.NewScript(0, `
|
|||
t = tonumber(redis.call('ZSCORE', 'nextCrawl', pkgs[i]) or 0)
|
||||
if t == 0 or nextCrawl < t then
|
||||
redis.call('ZADD', 'nextCrawl', nextCrawl, pkgs[i])
|
||||
nextCrawl = nextCrawl + 30
|
||||
nextCrawl = nextCrawl + 120
|
||||
end
|
||||
end
|
||||
`)
|
||||
|
|
|
@ -21,6 +21,7 @@ import (
|
|||
"unicode"
|
||||
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"github.com/garyburd/gosrc"
|
||||
)
|
||||
|
||||
func isStandardPackage(path string) bool {
|
||||
|
@ -56,7 +57,7 @@ func documentTerms(pdoc *doc.Package, score float64) []string {
|
|||
// Imports
|
||||
|
||||
for _, path := range pdoc.Imports {
|
||||
if doc.IsValidPath(path) {
|
||||
if gosrc.IsValidPath(path) {
|
||||
terms["import:"+path] = true
|
||||
}
|
||||
}
|
||||
|
|
108
doc/bitbucket.go
108
doc/bitbucket.go
|
@ -1,108 +0,0 @@
|
|||
// Copyright 2011 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"path"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
var (
|
||||
bitbucketPattern = regexp.MustCompile(`^bitbucket\.org/(?P<owner>[a-z0-9A-Z_.\-]+)/(?P<repo>[a-z0-9A-Z_.\-]+)(?P<dir>/[a-z0-9A-Z_.\-/]*)?$`)
|
||||
bitbucketEtagRe = regexp.MustCompile(`^(hg|git)-`)
|
||||
)
|
||||
|
||||
func getBitbucketDoc(client *http.Client, match map[string]string, savedEtag string) (*Package, error) {
|
||||
|
||||
if m := bitbucketEtagRe.FindStringSubmatch(savedEtag); m != nil {
|
||||
match["vcs"] = m[1]
|
||||
} else {
|
||||
var repo struct {
|
||||
Scm string
|
||||
}
|
||||
if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), nil, &repo); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
match["vcs"] = repo.Scm
|
||||
}
|
||||
|
||||
tags := make(map[string]string)
|
||||
for _, nodeType := range []string{"branches", "tags"} {
|
||||
var nodes map[string]struct {
|
||||
Node string
|
||||
}
|
||||
if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/{0}", match, nodeType), nil, &nodes); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for t, n := range nodes {
|
||||
tags[t] = n.Node
|
||||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
match["tag"], match["commit"], err = bestTag(tags, defaultTags[match["vcs"]])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
etag := expand("{vcs}-{commit}", match)
|
||||
if etag == savedEtag {
|
||||
return nil, ErrNotModified
|
||||
}
|
||||
|
||||
var contents struct {
|
||||
Directories []string
|
||||
Files []struct {
|
||||
Path string
|
||||
}
|
||||
}
|
||||
|
||||
if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/src/{tag}{dir}/", match), nil, &contents); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var files []*source
|
||||
for _, f := range contents.Files {
|
||||
_, name := path.Split(f.Path)
|
||||
if isDocFile(name) {
|
||||
files = append(files, &source{
|
||||
name: name,
|
||||
browseURL: expand("https://bitbucket.org/{owner}/{repo}/src/{tag}/{0}", match, f.Path),
|
||||
rawURL: expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/raw/{tag}/{0}", match, f.Path),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if err := fetchFiles(client, files, nil); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b := builder{
|
||||
pdoc: &Package{
|
||||
LineFmt: "%s#cl-%d",
|
||||
ImportPath: match["originalImportPath"],
|
||||
ProjectRoot: expand("bitbucket.org/{owner}/{repo}", match),
|
||||
ProjectName: match["repo"],
|
||||
ProjectURL: expand("https://bitbucket.org/{owner}/{repo}/", match),
|
||||
BrowseURL: expand("https://bitbucket.org/{owner}/{repo}/src/{tag}{dir}", match),
|
||||
Etag: etag,
|
||||
VCS: match["vcs"],
|
||||
Subdirectories: contents.Directories,
|
||||
},
|
||||
}
|
||||
|
||||
return b.build(files)
|
||||
}
|
148
doc/builder.go
148
doc/builder.go
|
@ -23,16 +23,14 @@ import (
|
|||
"go/format"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/garyburd/gosrc"
|
||||
)
|
||||
|
||||
func startsWithUppercase(s string) bool {
|
||||
|
@ -132,7 +130,7 @@ func addReferences(references map[string]bool, s []byte) {
|
|||
for _, pat := range referencesPats {
|
||||
for _, m := range pat.FindAllSubmatch(s, -1) {
|
||||
p := string(m[1])
|
||||
if IsValidRemotePath(p) {
|
||||
if gosrc.IsValidRemotePath(p) {
|
||||
references[p] = true
|
||||
}
|
||||
}
|
||||
|
@ -155,8 +153,6 @@ func removeAssociations(dpkg *doc.Package) {
|
|||
|
||||
// builder holds the state used when building the documentation.
|
||||
type builder struct {
|
||||
pdoc *Package
|
||||
|
||||
srcs map[string]*source
|
||||
fset *token.FileSet
|
||||
examples []*doc.Example
|
||||
|
@ -374,36 +370,10 @@ type Pos struct {
|
|||
type source struct {
|
||||
name string
|
||||
browseURL string
|
||||
rawURL string
|
||||
data []byte
|
||||
index int
|
||||
}
|
||||
|
||||
func (s *source) Name() string { return s.name }
|
||||
func (s *source) Size() int64 { return int64(len(s.data)) }
|
||||
func (s *source) Mode() os.FileMode { return 0 }
|
||||
func (s *source) ModTime() time.Time { return time.Time{} }
|
||||
func (s *source) IsDir() bool { return false }
|
||||
func (s *source) Sys() interface{} { return nil }
|
||||
|
||||
func (b *builder) readDir(dir string) ([]os.FileInfo, error) {
|
||||
if dir != "/" {
|
||||
panic("unexpected")
|
||||
}
|
||||
fis := make([]os.FileInfo, 0, len(b.srcs))
|
||||
for _, src := range b.srcs {
|
||||
fis = append(fis, src)
|
||||
}
|
||||
return fis, nil
|
||||
}
|
||||
|
||||
func (b *builder) openFile(path string) (io.ReadCloser, error) {
|
||||
if src, ok := b.srcs[path[1:]]; ok {
|
||||
return ioutil.NopCloser(bytes.NewReader(src.data)), nil
|
||||
}
|
||||
panic("unexpected")
|
||||
}
|
||||
|
||||
// PackageVersion is modified when previously stored packages are invalid.
|
||||
const PackageVersion = "6"
|
||||
|
||||
|
@ -490,27 +460,39 @@ var goEnvs = []struct{ GOOS, GOARCH string }{
|
|||
{"windows", "amd64"},
|
||||
}
|
||||
|
||||
func (b *builder) build(srcs []*source) (*Package, error) {
|
||||
func newPackage(dir *gosrc.Directory) (*Package, error) {
|
||||
|
||||
b.pdoc.Updated = time.Now().UTC()
|
||||
pkg := &Package{
|
||||
Updated: time.Now().UTC(),
|
||||
LineFmt: dir.LineFmt,
|
||||
ImportPath: dir.ImportPath,
|
||||
ProjectRoot: dir.ProjectRoot,
|
||||
ProjectName: dir.ProjectName,
|
||||
ProjectURL: dir.ProjectURL,
|
||||
BrowseURL: dir.BrowseURL,
|
||||
Etag: PackageVersion + "-" + dir.Etag,
|
||||
VCS: dir.VCS,
|
||||
Subdirectories: dir.Subdirectories,
|
||||
}
|
||||
|
||||
references := make(map[string]bool)
|
||||
var b builder
|
||||
b.srcs = make(map[string]*source)
|
||||
for _, src := range srcs {
|
||||
if strings.HasSuffix(src.name, ".go") {
|
||||
b.srcs[src.name] = src
|
||||
OverwriteLineComments(src.data)
|
||||
references := make(map[string]bool)
|
||||
for _, file := range dir.Files {
|
||||
if strings.HasSuffix(file.Name, ".go") {
|
||||
gosrc.OverwriteLineComments(file.Data)
|
||||
b.srcs[file.Name] = &source{name: file.Name, browseURL: file.BrowseURL, data: file.Data}
|
||||
} else {
|
||||
addReferences(references, src.data)
|
||||
addReferences(references, file.Data)
|
||||
}
|
||||
}
|
||||
|
||||
for r := range references {
|
||||
b.pdoc.References = append(b.pdoc.References, r)
|
||||
pkg.References = append(pkg.References, r)
|
||||
}
|
||||
|
||||
if len(b.srcs) == 0 {
|
||||
return b.pdoc, nil
|
||||
return pkg, nil
|
||||
}
|
||||
|
||||
b.fset = token.NewFileSet()
|
||||
|
@ -518,18 +500,12 @@ func (b *builder) build(srcs []*source) (*Package, error) {
|
|||
// Find the package and associated files.
|
||||
|
||||
ctxt := build.Context{
|
||||
GOOS: "linux",
|
||||
GOARCH: "amd64",
|
||||
CgoEnabled: true,
|
||||
ReleaseTags: build.Default.ReleaseTags,
|
||||
JoinPath: path.Join,
|
||||
IsAbsPath: path.IsAbs,
|
||||
SplitPathList: func(list string) []string { return strings.Split(list, ":") },
|
||||
IsDir: func(path string) bool { panic("unexpected") },
|
||||
HasSubdir: func(root, dir string) (rel string, ok bool) { panic("unexpected") },
|
||||
ReadDir: func(dir string) (fi []os.FileInfo, err error) { return b.readDir(dir) },
|
||||
OpenFile: func(path string) (r io.ReadCloser, err error) { return b.openFile(path) },
|
||||
Compiler: "gc",
|
||||
GOOS: "linux",
|
||||
GOARCH: "amd64",
|
||||
CgoEnabled: true,
|
||||
ReleaseTags: build.Default.ReleaseTags,
|
||||
BuildTags: build.Default.BuildTags,
|
||||
Compiler: "gc",
|
||||
}
|
||||
|
||||
var err error
|
||||
|
@ -538,16 +514,16 @@ func (b *builder) build(srcs []*source) (*Package, error) {
|
|||
for _, env := range goEnvs {
|
||||
ctxt.GOOS = env.GOOS
|
||||
ctxt.GOARCH = env.GOARCH
|
||||
bpkg, err = ctxt.ImportDir("/", 0)
|
||||
bpkg, err = dir.Import(&ctxt, 0)
|
||||
if _, ok := err.(*build.NoGoError); !ok {
|
||||
break
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
if _, ok := err.(*build.NoGoError); !ok {
|
||||
b.pdoc.Errors = append(b.pdoc.Errors, err.Error())
|
||||
pkg.Errors = append(pkg.Errors, err.Error())
|
||||
}
|
||||
return b.pdoc, nil
|
||||
return pkg, nil
|
||||
}
|
||||
|
||||
// Parse the Go files
|
||||
|
@ -555,18 +531,18 @@ func (b *builder) build(srcs []*source) (*Package, error) {
|
|||
files := make(map[string]*ast.File)
|
||||
names := append(bpkg.GoFiles, bpkg.CgoFiles...)
|
||||
sort.Strings(names)
|
||||
b.pdoc.Files = make([]*File, len(names))
|
||||
pkg.Files = make([]*File, len(names))
|
||||
for i, name := range names {
|
||||
file, err := parser.ParseFile(b.fset, name, b.srcs[name].data, parser.ParseComments)
|
||||
if err != nil {
|
||||
b.pdoc.Errors = append(b.pdoc.Errors, err.Error())
|
||||
pkg.Errors = append(pkg.Errors, err.Error())
|
||||
} else {
|
||||
files[name] = file
|
||||
}
|
||||
src := b.srcs[name]
|
||||
src.index = i
|
||||
b.pdoc.Files[i] = &File{Name: name, URL: src.browseURL}
|
||||
b.pdoc.SourceSize += len(src.data)
|
||||
pkg.Files[i] = &File{Name: name, URL: src.browseURL}
|
||||
pkg.SourceSize += len(src.data)
|
||||
}
|
||||
|
||||
apkg, _ := ast.NewPackage(b.fset, files, simpleImporter, nil)
|
||||
|
@ -575,49 +551,49 @@ func (b *builder) build(srcs []*source) (*Package, error) {
|
|||
|
||||
names = append(bpkg.TestGoFiles, bpkg.XTestGoFiles...)
|
||||
sort.Strings(names)
|
||||
b.pdoc.TestFiles = make([]*File, len(names))
|
||||
pkg.TestFiles = make([]*File, len(names))
|
||||
for i, name := range names {
|
||||
file, err := parser.ParseFile(b.fset, name, b.srcs[name].data, parser.ParseComments)
|
||||
if err != nil {
|
||||
b.pdoc.Errors = append(b.pdoc.Errors, err.Error())
|
||||
pkg.Errors = append(pkg.Errors, err.Error())
|
||||
} else {
|
||||
b.examples = append(b.examples, doc.Examples(file)...)
|
||||
}
|
||||
b.pdoc.TestFiles[i] = &File{Name: name, URL: b.srcs[name].browseURL}
|
||||
b.pdoc.TestSourceSize += len(b.srcs[name].data)
|
||||
pkg.TestFiles[i] = &File{Name: name, URL: b.srcs[name].browseURL}
|
||||
pkg.TestSourceSize += len(b.srcs[name].data)
|
||||
}
|
||||
|
||||
b.vetPackage(apkg)
|
||||
b.vetPackage(pkg, apkg)
|
||||
|
||||
mode := doc.Mode(0)
|
||||
if b.pdoc.ImportPath == "builtin" {
|
||||
if pkg.ImportPath == "builtin" {
|
||||
mode |= doc.AllDecls
|
||||
}
|
||||
|
||||
dpkg := doc.New(apkg, b.pdoc.ImportPath, mode)
|
||||
dpkg := doc.New(apkg, pkg.ImportPath, mode)
|
||||
|
||||
if b.pdoc.ImportPath == "builtin" {
|
||||
if pkg.ImportPath == "builtin" {
|
||||
removeAssociations(dpkg)
|
||||
}
|
||||
|
||||
b.pdoc.Name = dpkg.Name
|
||||
b.pdoc.Doc = strings.TrimRight(dpkg.Doc, " \t\n\r")
|
||||
b.pdoc.Synopsis = synopsis(b.pdoc.Doc)
|
||||
pkg.Name = dpkg.Name
|
||||
pkg.Doc = strings.TrimRight(dpkg.Doc, " \t\n\r")
|
||||
pkg.Synopsis = synopsis(pkg.Doc)
|
||||
|
||||
b.pdoc.Examples = b.getExamples("")
|
||||
b.pdoc.IsCmd = bpkg.IsCommand()
|
||||
b.pdoc.GOOS = ctxt.GOOS
|
||||
b.pdoc.GOARCH = ctxt.GOARCH
|
||||
pkg.Examples = b.getExamples("")
|
||||
pkg.IsCmd = bpkg.IsCommand()
|
||||
pkg.GOOS = ctxt.GOOS
|
||||
pkg.GOARCH = ctxt.GOARCH
|
||||
|
||||
b.pdoc.Consts = b.values(dpkg.Consts)
|
||||
b.pdoc.Funcs = b.funcs(dpkg.Funcs)
|
||||
b.pdoc.Types = b.types(dpkg.Types)
|
||||
b.pdoc.Vars = b.values(dpkg.Vars)
|
||||
b.pdoc.Notes = b.notes(dpkg.Notes)
|
||||
pkg.Consts = b.values(dpkg.Consts)
|
||||
pkg.Funcs = b.funcs(dpkg.Funcs)
|
||||
pkg.Types = b.types(dpkg.Types)
|
||||
pkg.Vars = b.values(dpkg.Vars)
|
||||
pkg.Notes = b.notes(dpkg.Notes)
|
||||
|
||||
b.pdoc.Imports = bpkg.Imports
|
||||
b.pdoc.TestImports = bpkg.TestImports
|
||||
b.pdoc.XTestImports = bpkg.XTestImports
|
||||
pkg.Imports = bpkg.Imports
|
||||
pkg.TestImports = bpkg.TestImports
|
||||
pkg.XTestImports = bpkg.XTestImports
|
||||
|
||||
return b.pdoc, nil
|
||||
return pkg, nil
|
||||
}
|
||||
|
|
268
doc/get.go
268
doc/get.go
|
@ -16,219 +16,11 @@
|
|||
package doc
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"github.com/garyburd/gosrc"
|
||||
"net/http"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type NotFoundError struct {
|
||||
Message string
|
||||
}
|
||||
|
||||
func (e NotFoundError) Error() string {
|
||||
return e.Message
|
||||
}
|
||||
|
||||
func IsNotFound(err error) bool {
|
||||
_, ok := err.(NotFoundError)
|
||||
return ok
|
||||
}
|
||||
|
||||
type RemoteError struct {
|
||||
Host string
|
||||
err error
|
||||
}
|
||||
|
||||
func (e *RemoteError) Error() string {
|
||||
return e.err.Error()
|
||||
}
|
||||
|
||||
var (
|
||||
ErrNotModified = errors.New("package not modified")
|
||||
errNoMatch = errors.New("no match")
|
||||
)
|
||||
|
||||
// service represents a source code control service.
|
||||
type service struct {
|
||||
pattern *regexp.Regexp
|
||||
prefix string
|
||||
get func(*http.Client, map[string]string, string) (*Package, error)
|
||||
getPresentation func(*http.Client, map[string]string) (*Presentation, error)
|
||||
}
|
||||
|
||||
// services is the list of source code control services handled by gopkgdoc.
|
||||
var services = []*service{
|
||||
{gitHubPattern, "github.com/", getGitHubDoc, getGitHubPresentation},
|
||||
{googlePattern, "code.google.com/", getGoogleDoc, getGooglePresentation},
|
||||
{bitbucketPattern, "bitbucket.org/", getBitbucketDoc, nil},
|
||||
{launchpadPattern, "launchpad.net/", getLaunchpadDoc, nil},
|
||||
{vcsPattern, "", getVCSDoc, nil},
|
||||
}
|
||||
|
||||
func attrValue(attrs []xml.Attr, name string) string {
|
||||
for _, a := range attrs {
|
||||
if strings.EqualFold(a.Name.Local, name) {
|
||||
return a.Value
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func fetchMeta(client *http.Client, importPath string) (map[string]string, error) {
|
||||
uri := importPath
|
||||
if !strings.Contains(uri, "/") {
|
||||
// Add slash for root of domain.
|
||||
uri = uri + "/"
|
||||
}
|
||||
uri = uri + "?go-get=1"
|
||||
|
||||
scheme := "https"
|
||||
resp, err := client.Get(scheme + "://" + uri)
|
||||
if err != nil || resp.StatusCode != 200 {
|
||||
if err == nil {
|
||||
resp.Body.Close()
|
||||
}
|
||||
scheme = "http"
|
||||
resp, err = client.Get(scheme + "://" + uri)
|
||||
if err != nil {
|
||||
return nil, &RemoteError{strings.SplitN(importPath, "/", 2)[0], err}
|
||||
}
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
return parseMeta(scheme, importPath, resp.Body)
|
||||
}
|
||||
|
||||
func parseMeta(scheme, importPath string, r io.Reader) (map[string]string, error) {
|
||||
var match map[string]string
|
||||
|
||||
d := xml.NewDecoder(r)
|
||||
d.Strict = false
|
||||
metaScan:
|
||||
for {
|
||||
t, tokenErr := d.Token()
|
||||
if tokenErr != nil {
|
||||
break metaScan
|
||||
}
|
||||
switch t := t.(type) {
|
||||
case xml.EndElement:
|
||||
if strings.EqualFold(t.Name.Local, "head") {
|
||||
break metaScan
|
||||
}
|
||||
case xml.StartElement:
|
||||
if strings.EqualFold(t.Name.Local, "body") {
|
||||
break metaScan
|
||||
}
|
||||
if !strings.EqualFold(t.Name.Local, "meta") ||
|
||||
attrValue(t.Attr, "name") != "go-import" {
|
||||
continue metaScan
|
||||
}
|
||||
f := strings.Fields(attrValue(t.Attr, "content"))
|
||||
if len(f) != 3 ||
|
||||
!strings.HasPrefix(importPath, f[0]) ||
|
||||
!(len(importPath) == len(f[0]) || importPath[len(f[0])] == '/') {
|
||||
continue metaScan
|
||||
}
|
||||
if match != nil {
|
||||
return nil, NotFoundError{"More than one <meta> found at " + scheme + "://" + importPath}
|
||||
}
|
||||
|
||||
projectRoot, vcs, repo := f[0], f[1], f[2]
|
||||
|
||||
repo = strings.TrimSuffix(repo, "."+vcs)
|
||||
i := strings.Index(repo, "://")
|
||||
if i < 0 {
|
||||
return nil, NotFoundError{"Bad repo URL in <meta>."}
|
||||
}
|
||||
proto := repo[:i]
|
||||
repo = repo[i+len("://"):]
|
||||
|
||||
match = map[string]string{
|
||||
// Used in getVCSDoc, same as vcsPattern matches.
|
||||
"importPath": importPath,
|
||||
"repo": repo,
|
||||
"vcs": vcs,
|
||||
"dir": importPath[len(projectRoot):],
|
||||
|
||||
// Used in getVCSDoc
|
||||
"scheme": proto,
|
||||
|
||||
// Used in getDynamic.
|
||||
"projectRoot": projectRoot,
|
||||
"projectName": path.Base(projectRoot),
|
||||
"projectURL": scheme + "://" + projectRoot,
|
||||
}
|
||||
}
|
||||
}
|
||||
if match == nil {
|
||||
return nil, NotFoundError{"<meta> not found."}
|
||||
}
|
||||
return match, nil
|
||||
}
|
||||
|
||||
// getDynamic gets a document from a service that is not statically known.
|
||||
func getDynamic(client *http.Client, importPath, etag string) (*Package, error) {
|
||||
match, err := fetchMeta(client, importPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if match["projectRoot"] != importPath {
|
||||
rootMatch, err := fetchMeta(client, match["projectRoot"])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if rootMatch["projectRoot"] != match["projectRoot"] {
|
||||
return nil, NotFoundError{"Project root mismatch."}
|
||||
}
|
||||
}
|
||||
|
||||
pdoc, err := getStatic(client, expand("{repo}{dir}", match), importPath, etag)
|
||||
if err == errNoMatch {
|
||||
pdoc, err = getVCSDoc(client, match, etag)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if pdoc != nil {
|
||||
pdoc.ProjectRoot = match["projectRoot"]
|
||||
pdoc.ProjectName = match["projectName"]
|
||||
pdoc.ProjectURL = match["projectURL"]
|
||||
}
|
||||
|
||||
return pdoc, err
|
||||
}
|
||||
|
||||
// getStatic gets a document from a statically known service. getStatic
|
||||
// returns errNoMatch if the import path is not recognized.
|
||||
func getStatic(client *http.Client, importPath, originalImportPath, etag string) (*Package, error) {
|
||||
for _, s := range services {
|
||||
if s.get == nil || !strings.HasPrefix(importPath, s.prefix) {
|
||||
continue
|
||||
}
|
||||
m := s.pattern.FindStringSubmatch(importPath)
|
||||
if m == nil {
|
||||
if s.prefix != "" {
|
||||
return nil, NotFoundError{"Import path prefix matches known service, but regexp does not."}
|
||||
}
|
||||
continue
|
||||
}
|
||||
match := map[string]string{"importPath": importPath, "originalImportPath": originalImportPath}
|
||||
for i, n := range s.pattern.SubexpNames() {
|
||||
if n != "" {
|
||||
match[n] = m[i]
|
||||
}
|
||||
}
|
||||
return s.get(client, match, etag)
|
||||
}
|
||||
return nil, errNoMatch
|
||||
}
|
||||
|
||||
func Get(client *http.Client, importPath string, etag string) (pdoc *Package, err error) {
|
||||
|
||||
const versionPrefix = PackageVersion + "-"
|
||||
|
@ -239,59 +31,9 @@ func Get(client *http.Client, importPath string, etag string) (pdoc *Package, er
|
|||
etag = ""
|
||||
}
|
||||
|
||||
switch {
|
||||
case IsGoRepoPath(importPath):
|
||||
pdoc, err = getStandardDoc(client, importPath, etag)
|
||||
case IsValidRemotePath(importPath):
|
||||
pdoc, err = getStatic(client, importPath, importPath, etag)
|
||||
if err == errNoMatch {
|
||||
pdoc, err = getDynamic(client, importPath, etag)
|
||||
}
|
||||
default:
|
||||
err = errNoMatch
|
||||
dir, err := gosrc.Get(client, importPath, etag)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err == errNoMatch {
|
||||
err = NotFoundError{"Import path not valid:"}
|
||||
}
|
||||
|
||||
if pdoc != nil {
|
||||
pdoc.Etag = versionPrefix + pdoc.Etag
|
||||
if pdoc.ImportPath != importPath {
|
||||
return nil, fmt.Errorf("get: pdoc.ImportPath = %q, want %q", pdoc.ImportPath, importPath)
|
||||
}
|
||||
}
|
||||
|
||||
return pdoc, err
|
||||
}
|
||||
|
||||
// GetPresentation gets a presentation from the the given path.
|
||||
func GetPresentation(client *http.Client, importPath string) (*Presentation, error) {
|
||||
ext := path.Ext(importPath)
|
||||
if ext != ".slide" && ext != ".article" {
|
||||
return nil, NotFoundError{"unknown file extension."}
|
||||
}
|
||||
|
||||
importPath, file := path.Split(importPath)
|
||||
importPath = strings.TrimSuffix(importPath, "/")
|
||||
for _, s := range services {
|
||||
if s.getPresentation == nil || !strings.HasPrefix(importPath, s.prefix) {
|
||||
continue
|
||||
}
|
||||
m := s.pattern.FindStringSubmatch(importPath)
|
||||
if m == nil {
|
||||
if s.prefix != "" {
|
||||
return nil, NotFoundError{"path prefix matches known service, but regexp does not."}
|
||||
}
|
||||
continue
|
||||
}
|
||||
match := map[string]string{"importPath": importPath, "file": file}
|
||||
for i, n := range s.pattern.SubexpNames() {
|
||||
if n != "" {
|
||||
match[n] = m[i]
|
||||
}
|
||||
}
|
||||
return s.getPresentation(client, match)
|
||||
}
|
||||
return nil, errNoMatch
|
||||
return newPackage(dir)
|
||||
}
|
||||
|
|
220
doc/github.go
220
doc/github.go
|
@ -1,220 +0,0 @@
|
|||
// Copyright 2011 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
gitHubRawHeader = http.Header{"Accept": {"application/vnd.github-blob.raw"}}
|
||||
gitHubPreviewHeader = http.Header{"Accept": {"application/vnd.github.preview"}}
|
||||
gitHubPattern = regexp.MustCompile(`^github\.com/(?P<owner>[a-z0-9A-Z_.\-]+)/(?P<repo>[a-z0-9A-Z_.\-]+)(?P<dir>/[a-z0-9A-Z_.\-/]*)?$`)
|
||||
gitHubCred string
|
||||
)
|
||||
|
||||
func SetGitHubCredentials(id, secret string) {
|
||||
gitHubCred = "client_id=" + id + "&client_secret=" + secret
|
||||
}
|
||||
|
||||
func getGitHubDoc(client *http.Client, match map[string]string, savedEtag string) (*Package, error) {
|
||||
|
||||
match["cred"] = gitHubCred
|
||||
|
||||
var refs []*struct {
|
||||
Object struct {
|
||||
Type string
|
||||
Sha string
|
||||
URL string
|
||||
}
|
||||
Ref string
|
||||
URL string
|
||||
}
|
||||
|
||||
err := httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/refs?{cred}", match), nil, &refs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tags := make(map[string]string)
|
||||
for _, ref := range refs {
|
||||
switch {
|
||||
case strings.HasPrefix(ref.Ref, "refs/heads/"):
|
||||
tags[ref.Ref[len("refs/heads/"):]] = ref.Object.Sha
|
||||
case strings.HasPrefix(ref.Ref, "refs/tags/"):
|
||||
tags[ref.Ref[len("refs/tags/"):]] = ref.Object.Sha
|
||||
}
|
||||
}
|
||||
|
||||
var commit string
|
||||
match["tag"], commit, err = bestTag(tags, "master")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if commit == savedEtag {
|
||||
return nil, ErrNotModified
|
||||
}
|
||||
|
||||
var contents []*struct {
|
||||
Type string
|
||||
Name string
|
||||
GitURL string `json:"git_url"`
|
||||
HTMLURL string `json:"html_url"`
|
||||
}
|
||||
|
||||
err = httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/contents{dir}?ref={tag}&{cred}", match), nil, &contents)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(contents) == 0 {
|
||||
return nil, NotFoundError{"No files in directory."}
|
||||
}
|
||||
|
||||
// Because Github API URLs are case-insensitive, we check that the owner
|
||||
// and repo returned from Github matches the one that we are requesting.
|
||||
if !strings.HasPrefix(contents[0].GitURL, expand("https://api.github.com/repos/{owner}/{repo}/", match)) {
|
||||
return nil, NotFoundError{"Github import path has incorrect case."}
|
||||
}
|
||||
|
||||
var files []*source
|
||||
var subdirs []string
|
||||
|
||||
for _, item := range contents {
|
||||
switch {
|
||||
case item.Type == "dir":
|
||||
if isValidPathElement(item.Name) {
|
||||
subdirs = append(subdirs, item.Name)
|
||||
}
|
||||
case isDocFile(item.Name):
|
||||
files = append(files, &source{
|
||||
name: item.Name,
|
||||
browseURL: item.HTMLURL,
|
||||
rawURL: item.GitURL + "?" + gitHubCred,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if err := fetchFiles(client, files, gitHubRawHeader); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
browseURL := expand("https://github.com/{owner}/{repo}", match)
|
||||
if match["dir"] != "" {
|
||||
browseURL = expand("https://github.com/{owner}/{repo}/tree/{tag}{dir}", match)
|
||||
}
|
||||
|
||||
b := &builder{
|
||||
pdoc: &Package{
|
||||
LineFmt: "%s#L%d",
|
||||
ImportPath: match["originalImportPath"],
|
||||
ProjectRoot: expand("github.com/{owner}/{repo}", match),
|
||||
ProjectName: match["repo"],
|
||||
ProjectURL: expand("https://github.com/{owner}/{repo}", match),
|
||||
BrowseURL: browseURL,
|
||||
Etag: commit,
|
||||
VCS: "git",
|
||||
Subdirectories: subdirs,
|
||||
},
|
||||
}
|
||||
|
||||
return b.build(files)
|
||||
}
|
||||
|
||||
func getGitHubPresentation(client *http.Client, match map[string]string) (*Presentation, error) {
|
||||
|
||||
match["cred"] = gitHubCred
|
||||
|
||||
p, err := httpGetBytes(client, expand("https://api.github.com/repos/{owner}/{repo}/contents{dir}/{file}?{cred}", match), gitHubRawHeader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
apiBase, err := url.Parse(expand("https://api.github.com/repos/{owner}/{repo}/contents{dir}/?{cred}", match))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rawBase, err := url.Parse(expand("https://raw.github.com/{owner}/{repo}/master{dir}/", match))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b := &presBuilder{
|
||||
data: p,
|
||||
filename: match["file"],
|
||||
fetch: func(files []*source) error {
|
||||
for _, f := range files {
|
||||
u, err := apiBase.Parse(f.name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
u.RawQuery = apiBase.RawQuery
|
||||
f.rawURL = u.String()
|
||||
}
|
||||
return fetchFiles(client, files, gitHubRawHeader)
|
||||
},
|
||||
resolveURL: func(fname string) string {
|
||||
u, err := rawBase.Parse(fname)
|
||||
if err != nil {
|
||||
return "/notfound"
|
||||
}
|
||||
if strings.HasSuffix(fname, ".svg") {
|
||||
u.Host = "rawgithub.com"
|
||||
}
|
||||
return u.String()
|
||||
},
|
||||
}
|
||||
|
||||
return b.build()
|
||||
}
|
||||
|
||||
type GitHubUpdate struct {
|
||||
OwnerRepo string
|
||||
Fork bool
|
||||
}
|
||||
|
||||
func GetGitHubUpdates(client *http.Client, last string) (string, []GitHubUpdate, error) {
|
||||
if last == "" {
|
||||
last = time.Now().Add(-24 * time.Hour).UTC().Format("2006-01-02T15:04:05Z")
|
||||
}
|
||||
u := "https://api.github.com/search/repositories?order=asc&sort=updated&q=language:Go+pushed:>" + last
|
||||
if gitHubCred != "" {
|
||||
u += "&" + gitHubCred
|
||||
}
|
||||
var updates struct {
|
||||
Items []struct {
|
||||
FullName string `json:"full_name"`
|
||||
Fork bool
|
||||
PushedAt string `json:"pushed_at"`
|
||||
}
|
||||
}
|
||||
err := httpGetJSON(client, u, gitHubPreviewHeader, &updates)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
var result []GitHubUpdate
|
||||
for _, item := range updates.Items {
|
||||
result = append(result, GitHubUpdate{OwnerRepo: item.FullName, Fork: item.Fork})
|
||||
if item.PushedAt > last {
|
||||
last = item.PushedAt
|
||||
}
|
||||
}
|
||||
return last, result, nil
|
||||
}
|
215
doc/google.go
215
doc/google.go
|
@ -1,215 +0,0 @@
|
|||
// Copyright 2011 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
googleRepoRe = regexp.MustCompile(`id="checkoutcmd">(hg|git|svn)`)
|
||||
googleRevisionRe = regexp.MustCompile(`<h2>(?:[^ ]+ - )?Revision *([^:]+):`)
|
||||
googleEtagRe = regexp.MustCompile(`^(hg|git|svn)-`)
|
||||
googleFileRe = regexp.MustCompile(`<li><a href="([^"]+)"`)
|
||||
googlePattern = regexp.MustCompile(`^code\.google\.com/(?P<pr>[pr])/(?P<repo>[a-z0-9\-]+)(:?\.(?P<subrepo>[a-z0-9\-]+))?(?P<dir>/[a-z0-9A-Z_.\-/]+)?$`)
|
||||
)
|
||||
|
||||
func getGoogleDoc(client *http.Client, match map[string]string, savedEtag string) (*Package, error) {
|
||||
setupGoogleMatch(match)
|
||||
if m := googleEtagRe.FindStringSubmatch(savedEtag); m != nil {
|
||||
match["vcs"] = m[1]
|
||||
} else if err := getGoogleVCS(client, match); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Scrape the repo browser to find the project revision and individual Go files.
|
||||
p, err := httpGetBytes(client, expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/", match), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var etag string
|
||||
m := googleRevisionRe.FindSubmatch(p)
|
||||
if m == nil {
|
||||
return nil, errors.New("Could not find revision for " + match["importPath"])
|
||||
}
|
||||
etag = expand("{vcs}-{0}", match, string(m[1]))
|
||||
if etag == savedEtag {
|
||||
return nil, ErrNotModified
|
||||
}
|
||||
|
||||
var subdirs []string
|
||||
var files []*source
|
||||
for _, m := range googleFileRe.FindAllSubmatch(p, -1) {
|
||||
fname := string(m[1])
|
||||
switch {
|
||||
case strings.HasSuffix(fname, "/"):
|
||||
fname = fname[:len(fname)-1]
|
||||
if isValidPathElement(fname) {
|
||||
subdirs = append(subdirs, fname)
|
||||
}
|
||||
case isDocFile(fname):
|
||||
files = append(files, &source{
|
||||
name: fname,
|
||||
browseURL: expand("http://code.google.com/{pr}/{repo}/source/browse{dir}/{0}{query}", match, fname),
|
||||
rawURL: expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/{0}", match, fname),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if err := fetchFiles(client, files, nil); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var projectURL string
|
||||
if match["subrepo"] == "" {
|
||||
projectURL = expand("https://code.google.com/{pr}/{repo}/", match)
|
||||
} else {
|
||||
projectURL = expand("https://code.google.com/{pr}/{repo}/source/browse?repo={subrepo}", match)
|
||||
}
|
||||
|
||||
b := &builder{
|
||||
pdoc: &Package{
|
||||
LineFmt: "%s#%d",
|
||||
ImportPath: match["originalImportPath"],
|
||||
ProjectRoot: expand("code.google.com/{pr}/{repo}{dot}{subrepo}", match),
|
||||
ProjectName: expand("{repo}{dot}{subrepo}", match),
|
||||
ProjectURL: projectURL,
|
||||
BrowseURL: expand("http://code.google.com/{pr}/{repo}/source/browse{dir}/{query}", match),
|
||||
Etag: etag,
|
||||
VCS: match["vcs"],
|
||||
},
|
||||
}
|
||||
|
||||
return b.build(files)
|
||||
}
|
||||
|
||||
func setupGoogleMatch(match map[string]string) {
|
||||
if s := match["subrepo"]; s != "" {
|
||||
match["dot"] = "."
|
||||
match["query"] = "?repo=" + s
|
||||
} else {
|
||||
match["dot"] = ""
|
||||
match["query"] = ""
|
||||
}
|
||||
}
|
||||
|
||||
func getGoogleVCS(client *http.Client, match map[string]string) error {
|
||||
// Scrape the HTML project page to find the VCS.
|
||||
p, err := httpGetBytes(client, expand("http://code.google.com/{pr}/{repo}/source/checkout", match), nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
m := googleRepoRe.FindSubmatch(p)
|
||||
if m == nil {
|
||||
return NotFoundError{"Could not VCS on Google Code project page."}
|
||||
}
|
||||
match["vcs"] = string(m[1])
|
||||
return nil
|
||||
}
|
||||
|
||||
func getStandardDoc(client *http.Client, importPath string, savedEtag string) (*Package, error) {
|
||||
|
||||
p, err := httpGetBytes(client, "http://go.googlecode.com/hg-history/release/src/pkg/"+importPath+"/", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var etag string
|
||||
m := googleRevisionRe.FindSubmatch(p)
|
||||
if m == nil {
|
||||
return nil, errors.New("Could not find revision for " + importPath)
|
||||
}
|
||||
etag = string(m[1])
|
||||
if etag == savedEtag {
|
||||
return nil, ErrNotModified
|
||||
}
|
||||
|
||||
var files []*source
|
||||
for _, m := range googleFileRe.FindAllSubmatch(p, -1) {
|
||||
fname := strings.Split(string(m[1]), "?")[0]
|
||||
if isDocFile(fname) {
|
||||
files = append(files, &source{
|
||||
name: fname,
|
||||
browseURL: "http://code.google.com/p/go/source/browse/src/pkg/" + importPath + "/" + fname + "?name=release",
|
||||
rawURL: "http://go.googlecode.com/hg-history/release/src/pkg/" + importPath + "/" + fname,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if err := fetchFiles(client, files, nil); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b := &builder{
|
||||
pdoc: &Package{
|
||||
LineFmt: "%s#%d",
|
||||
ImportPath: importPath,
|
||||
ProjectRoot: "",
|
||||
ProjectName: "Go",
|
||||
ProjectURL: "https://code.google.com/p/go/",
|
||||
BrowseURL: "http://code.google.com/p/go/source/browse/src/pkg/" + importPath + "?name=release",
|
||||
Etag: etag,
|
||||
VCS: "hg",
|
||||
},
|
||||
}
|
||||
|
||||
return b.build(files)
|
||||
}
|
||||
|
||||
func getGooglePresentation(client *http.Client, match map[string]string) (*Presentation, error) {
|
||||
setupGoogleMatch(match)
|
||||
if err := getGoogleVCS(client, match); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rawBase, err := url.Parse(expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/", match))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p, err := httpGetBytes(client, expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/{file}", match), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b := &presBuilder{
|
||||
data: p,
|
||||
filename: match["file"],
|
||||
fetch: func(files []*source) error {
|
||||
for _, f := range files {
|
||||
u, err := rawBase.Parse(f.name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
f.rawURL = u.String()
|
||||
}
|
||||
return fetchFiles(client, files, nil)
|
||||
},
|
||||
resolveURL: func(fname string) string {
|
||||
u, err := rawBase.Parse(fname)
|
||||
if err != nil {
|
||||
return "/notfound"
|
||||
}
|
||||
return u.String()
|
||||
},
|
||||
}
|
||||
|
||||
return b.build()
|
||||
}
|
139
doc/launchpad.go
139
doc/launchpad.go
|
@ -1,139 +0,0 @@
|
|||
// Copyright 2011 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"io"
|
||||
"net/http"
|
||||
"path"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type byHash []byte
|
||||
|
||||
func (p byHash) Len() int { return len(p) / md5.Size }
|
||||
func (p byHash) Less(i, j int) bool {
|
||||
return -1 == bytes.Compare(p[i*md5.Size:(i+1)*md5.Size], p[j*md5.Size:(j+1)*md5.Size])
|
||||
}
|
||||
func (p byHash) Swap(i, j int) {
|
||||
var temp [md5.Size]byte
|
||||
copy(temp[:], p[i*md5.Size:])
|
||||
copy(p[i*md5.Size:(i+1)*md5.Size], p[j*md5.Size:])
|
||||
copy(p[j*md5.Size:], temp[:])
|
||||
}
|
||||
|
||||
var launchpadPattern = regexp.MustCompile(`^launchpad\.net/(?P<repo>(?P<project>[a-z0-9A-Z_.\-]+)(?P<series>/[a-z0-9A-Z_.\-]+)?|~[a-z0-9A-Z_.\-]+/(\+junk|[a-z0-9A-Z_.\-]+)/[a-z0-9A-Z_.\-]+)(?P<dir>/[a-z0-9A-Z_.\-/]+)*$`)
|
||||
|
||||
func getLaunchpadDoc(client *http.Client, match map[string]string, savedEtag string) (*Package, error) {
|
||||
|
||||
if match["project"] != "" && match["series"] != "" {
|
||||
rc, err := httpGet(client, expand("https://code.launchpad.net/{project}{series}/.bzr/branch-format", match), nil)
|
||||
switch {
|
||||
case err == nil:
|
||||
rc.Close()
|
||||
// The structure of the import path is launchpad.net/{root}/{dir}.
|
||||
case IsNotFound(err):
|
||||
// The structure of the import path is is launchpad.net/{project}/{dir}.
|
||||
match["repo"] = match["project"]
|
||||
match["dir"] = expand("{series}{dir}", match)
|
||||
default:
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
p, err := httpGetBytes(client, expand("https://bazaar.launchpad.net/+branch/{repo}/tarball", match), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gzr, err := gzip.NewReader(bytes.NewReader(p))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer gzr.Close()
|
||||
|
||||
tr := tar.NewReader(gzr)
|
||||
|
||||
var hash []byte
|
||||
inTree := false
|
||||
dirPrefix := expand("+branch/{repo}{dir}/", match)
|
||||
var files []*source
|
||||
for {
|
||||
h, err := tr.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
d, f := path.Split(h.Name)
|
||||
if !isDocFile(f) {
|
||||
continue
|
||||
}
|
||||
b := make([]byte, h.Size)
|
||||
if _, err := io.ReadFull(tr, b); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
m := md5.New()
|
||||
m.Write(b)
|
||||
hash = m.Sum(hash)
|
||||
|
||||
if !strings.HasPrefix(h.Name, dirPrefix) {
|
||||
continue
|
||||
}
|
||||
inTree = true
|
||||
if d == dirPrefix {
|
||||
files = append(files, &source{
|
||||
name: f,
|
||||
browseURL: expand("http://bazaar.launchpad.net/+branch/{repo}/view/head:{dir}/{0}", match, f),
|
||||
data: b})
|
||||
}
|
||||
}
|
||||
|
||||
if !inTree {
|
||||
return nil, NotFoundError{"Directory tree does not contain Go files."}
|
||||
}
|
||||
|
||||
sort.Sort(byHash(hash))
|
||||
m := md5.New()
|
||||
m.Write(hash)
|
||||
hash = m.Sum(hash[:0])
|
||||
etag := hex.EncodeToString(hash)
|
||||
if etag == savedEtag {
|
||||
return nil, ErrNotModified
|
||||
}
|
||||
|
||||
b := &builder{
|
||||
pdoc: &Package{
|
||||
LineFmt: "%s#L%d",
|
||||
ImportPath: match["originalImportPath"],
|
||||
ProjectRoot: expand("launchpad.net/{repo}", match),
|
||||
ProjectName: match["repo"],
|
||||
ProjectURL: expand("https://launchpad.net/{repo}/", match),
|
||||
BrowseURL: expand("http://bazaar.launchpad.net/+branch/{repo}/view/head:{dir}/", match),
|
||||
Etag: etag,
|
||||
VCS: "bzr",
|
||||
},
|
||||
}
|
||||
return b.build(files)
|
||||
}
|
71
doc/local.go
71
doc/local.go
|
@ -1,71 +0,0 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/build"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
// GetLocal gets the documentation from the localy installed
|
||||
func GetLocal(importPath string, goroot, gopath string, browseURLFmt, lineFmt string) (*Package, error) {
|
||||
ctx := build.Default
|
||||
if goroot != "" {
|
||||
ctx.GOROOT = goroot
|
||||
}
|
||||
if gopath != "" {
|
||||
ctx.GOPATH = gopath
|
||||
}
|
||||
bpkg, err := ctx.Import(importPath, ".", build.FindOnly)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dir := filepath.Join(bpkg.SrcRoot, filepath.FromSlash(importPath))
|
||||
fis, err := ioutil.ReadDir(dir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var modTime time.Time
|
||||
var files []*source
|
||||
for _, fi := range fis {
|
||||
if fi.IsDir() || !isDocFile(fi.Name()) {
|
||||
continue
|
||||
}
|
||||
if fi.ModTime().After(modTime) {
|
||||
modTime = fi.ModTime()
|
||||
}
|
||||
b, err := ioutil.ReadFile(filepath.Join(dir, fi.Name()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files = append(files, &source{
|
||||
name: fi.Name(),
|
||||
browseURL: fmt.Sprintf(browseURLFmt, fi.Name()),
|
||||
data: b,
|
||||
})
|
||||
}
|
||||
b := &builder{
|
||||
pdoc: &Package{
|
||||
LineFmt: lineFmt,
|
||||
ImportPath: importPath,
|
||||
Etag: strconv.FormatInt(modTime.Unix(), 16),
|
||||
},
|
||||
}
|
||||
return b.build(files)
|
||||
}
|
542
doc/path.go
542
doc/path.go
|
@ -1,542 +0,0 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var standardPath = map[string]bool{
|
||||
"builtin": true,
|
||||
|
||||
// go list -f '"{{.ImportPath}}": true,' std | grep -v 'cmd/|exp/'
|
||||
"cmd/api": true,
|
||||
"cmd/cgo": true,
|
||||
"cmd/fix": true,
|
||||
"cmd/go": true,
|
||||
"cmd/godoc": true,
|
||||
"cmd/gofmt": true,
|
||||
"cmd/vet": true,
|
||||
"cmd/yacc": true,
|
||||
"archive/tar": true,
|
||||
"archive/zip": true,
|
||||
"bufio": true,
|
||||
"bytes": true,
|
||||
"compress/bzip2": true,
|
||||
"compress/flate": true,
|
||||
"compress/gzip": true,
|
||||
"compress/lzw": true,
|
||||
"compress/zlib": true,
|
||||
"container/heap": true,
|
||||
"container/list": true,
|
||||
"container/ring": true,
|
||||
"crypto": true,
|
||||
"crypto/aes": true,
|
||||
"crypto/cipher": true,
|
||||
"crypto/des": true,
|
||||
"crypto/dsa": true,
|
||||
"crypto/ecdsa": true,
|
||||
"crypto/elliptic": true,
|
||||
"crypto/hmac": true,
|
||||
"crypto/md5": true,
|
||||
"crypto/rand": true,
|
||||
"crypto/rc4": true,
|
||||
"crypto/rsa": true,
|
||||
"crypto/sha1": true,
|
||||
"crypto/sha256": true,
|
||||
"crypto/sha512": true,
|
||||
"crypto/subtle": true,
|
||||
"crypto/tls": true,
|
||||
"crypto/x509": true,
|
||||
"crypto/x509/pkix": true,
|
||||
"database/sql": true,
|
||||
"database/sql/driver": true,
|
||||
"debug/dwarf": true,
|
||||
"debug/elf": true,
|
||||
"debug/gosym": true,
|
||||
"debug/macho": true,
|
||||
"debug/pe": true,
|
||||
"encoding/ascii85": true,
|
||||
"encoding/asn1": true,
|
||||
"encoding/base32": true,
|
||||
"encoding/base64": true,
|
||||
"encoding/binary": true,
|
||||
"encoding/csv": true,
|
||||
"encoding/gob": true,
|
||||
"encoding/hex": true,
|
||||
"encoding/json": true,
|
||||
"encoding/pem": true,
|
||||
"encoding/xml": true,
|
||||
"errors": true,
|
||||
"expvar": true,
|
||||
"flag": true,
|
||||
"fmt": true,
|
||||
"go/ast": true,
|
||||
"go/build": true,
|
||||
"go/doc": true,
|
||||
"go/format": true,
|
||||
"go/parser": true,
|
||||
"go/printer": true,
|
||||
"go/scanner": true,
|
||||
"go/token": true,
|
||||
"hash": true,
|
||||
"hash/adler32": true,
|
||||
"hash/crc32": true,
|
||||
"hash/crc64": true,
|
||||
"hash/fnv": true,
|
||||
"html": true,
|
||||
"html/template": true,
|
||||
"image": true,
|
||||
"image/color": true,
|
||||
"image/draw": true,
|
||||
"image/gif": true,
|
||||
"image/jpeg": true,
|
||||
"image/png": true,
|
||||
"index/suffixarray": true,
|
||||
"io": true,
|
||||
"io/ioutil": true,
|
||||
"log": true,
|
||||
"log/syslog": true,
|
||||
"math": true,
|
||||
"math/big": true,
|
||||
"math/cmplx": true,
|
||||
"math/rand": true,
|
||||
"mime": true,
|
||||
"mime/multipart": true,
|
||||
"net": true,
|
||||
"net/http": true,
|
||||
"net/http/cgi": true,
|
||||
"net/http/cookiejar": true,
|
||||
"net/http/fcgi": true,
|
||||
"net/http/httptest": true,
|
||||
"net/http/httputil": true,
|
||||
"net/http/pprof": true,
|
||||
"net/mail": true,
|
||||
"net/rpc": true,
|
||||
"net/rpc/jsonrpc": true,
|
||||
"net/smtp": true,
|
||||
"net/textproto": true,
|
||||
"net/url": true,
|
||||
"os": true,
|
||||
"os/exec": true,
|
||||
"os/signal": true,
|
||||
"os/user": true,
|
||||
"path": true,
|
||||
"path/filepath": true,
|
||||
"reflect": true,
|
||||
"regexp": true,
|
||||
"regexp/syntax": true,
|
||||
"runtime": true,
|
||||
"runtime/cgo": true,
|
||||
"runtime/debug": true,
|
||||
"runtime/pprof": true,
|
||||
"sort": true,
|
||||
"strconv": true,
|
||||
"strings": true,
|
||||
"sync": true,
|
||||
"sync/atomic": true,
|
||||
"syscall": true,
|
||||
"testing": true,
|
||||
"testing/iotest": true,
|
||||
"testing/quick": true,
|
||||
"text/scanner": true,
|
||||
"text/tabwriter": true,
|
||||
"text/template": true,
|
||||
"text/template/parse": true,
|
||||
"time": true,
|
||||
"unicode": true,
|
||||
"unicode/utf16": true,
|
||||
"unicode/utf8": true,
|
||||
"unsafe": true,
|
||||
}
|
||||
|
||||
var validTLD = map[string]bool{
|
||||
// curl http://data.iana.org/TLD/tlds-alpha-by-domain.txt | sed -e '/#/ d' -e 's/.*/"&": true,/' | tr [:upper:] [:lower:]
|
||||
".ac": true,
|
||||
".ad": true,
|
||||
".ae": true,
|
||||
".aero": true,
|
||||
".af": true,
|
||||
".ag": true,
|
||||
".ai": true,
|
||||
".al": true,
|
||||
".am": true,
|
||||
".an": true,
|
||||
".ao": true,
|
||||
".aq": true,
|
||||
".ar": true,
|
||||
".arpa": true,
|
||||
".as": true,
|
||||
".asia": true,
|
||||
".at": true,
|
||||
".au": true,
|
||||
".aw": true,
|
||||
".ax": true,
|
||||
".az": true,
|
||||
".ba": true,
|
||||
".bb": true,
|
||||
".bd": true,
|
||||
".be": true,
|
||||
".bf": true,
|
||||
".bg": true,
|
||||
".bh": true,
|
||||
".bi": true,
|
||||
".biz": true,
|
||||
".bj": true,
|
||||
".bm": true,
|
||||
".bn": true,
|
||||
".bo": true,
|
||||
".br": true,
|
||||
".bs": true,
|
||||
".bt": true,
|
||||
".bv": true,
|
||||
".bw": true,
|
||||
".by": true,
|
||||
".bz": true,
|
||||
".ca": true,
|
||||
".cat": true,
|
||||
".cc": true,
|
||||
".cd": true,
|
||||
".cf": true,
|
||||
".cg": true,
|
||||
".ch": true,
|
||||
".ci": true,
|
||||
".ck": true,
|
||||
".cl": true,
|
||||
".cm": true,
|
||||
".cn": true,
|
||||
".co": true,
|
||||
".com": true,
|
||||
".coop": true,
|
||||
".cr": true,
|
||||
".cu": true,
|
||||
".cv": true,
|
||||
".cw": true,
|
||||
".cx": true,
|
||||
".cy": true,
|
||||
".cz": true,
|
||||
".de": true,
|
||||
".dj": true,
|
||||
".dk": true,
|
||||
".dm": true,
|
||||
".do": true,
|
||||
".dz": true,
|
||||
".ec": true,
|
||||
".edu": true,
|
||||
".ee": true,
|
||||
".eg": true,
|
||||
".er": true,
|
||||
".es": true,
|
||||
".et": true,
|
||||
".eu": true,
|
||||
".fi": true,
|
||||
".fj": true,
|
||||
".fk": true,
|
||||
".fm": true,
|
||||
".fo": true,
|
||||
".fr": true,
|
||||
".ga": true,
|
||||
".gb": true,
|
||||
".gd": true,
|
||||
".ge": true,
|
||||
".gf": true,
|
||||
".gg": true,
|
||||
".gh": true,
|
||||
".gi": true,
|
||||
".gl": true,
|
||||
".gm": true,
|
||||
".gn": true,
|
||||
".gov": true,
|
||||
".gp": true,
|
||||
".gq": true,
|
||||
".gr": true,
|
||||
".gs": true,
|
||||
".gt": true,
|
||||
".gu": true,
|
||||
".gw": true,
|
||||
".gy": true,
|
||||
".hk": true,
|
||||
".hm": true,
|
||||
".hn": true,
|
||||
".hr": true,
|
||||
".ht": true,
|
||||
".hu": true,
|
||||
".id": true,
|
||||
".ie": true,
|
||||
".il": true,
|
||||
".im": true,
|
||||
".in": true,
|
||||
".info": true,
|
||||
".int": true,
|
||||
".io": true,
|
||||
".iq": true,
|
||||
".ir": true,
|
||||
".is": true,
|
||||
".it": true,
|
||||
".je": true,
|
||||
".jm": true,
|
||||
".jo": true,
|
||||
".jobs": true,
|
||||
".jp": true,
|
||||
".ke": true,
|
||||
".kg": true,
|
||||
".kh": true,
|
||||
".ki": true,
|
||||
".km": true,
|
||||
".kn": true,
|
||||
".kp": true,
|
||||
".kr": true,
|
||||
".kw": true,
|
||||
".ky": true,
|
||||
".kz": true,
|
||||
".la": true,
|
||||
".lb": true,
|
||||
".lc": true,
|
||||
".li": true,
|
||||
".lk": true,
|
||||
".lr": true,
|
||||
".ls": true,
|
||||
".lt": true,
|
||||
".lu": true,
|
||||
".lv": true,
|
||||
".ly": true,
|
||||
".ma": true,
|
||||
".mc": true,
|
||||
".md": true,
|
||||
".me": true,
|
||||
".mg": true,
|
||||
".mh": true,
|
||||
".mil": true,
|
||||
".mk": true,
|
||||
".ml": true,
|
||||
".mm": true,
|
||||
".mn": true,
|
||||
".mo": true,
|
||||
".mobi": true,
|
||||
".mp": true,
|
||||
".mq": true,
|
||||
".mr": true,
|
||||
".ms": true,
|
||||
".mt": true,
|
||||
".mu": true,
|
||||
".museum": true,
|
||||
".mv": true,
|
||||
".mw": true,
|
||||
".mx": true,
|
||||
".my": true,
|
||||
".mz": true,
|
||||
".na": true,
|
||||
".name": true,
|
||||
".nc": true,
|
||||
".ne": true,
|
||||
".net": true,
|
||||
".nf": true,
|
||||
".ng": true,
|
||||
".ni": true,
|
||||
".nl": true,
|
||||
".no": true,
|
||||
".np": true,
|
||||
".nr": true,
|
||||
".nu": true,
|
||||
".nz": true,
|
||||
".om": true,
|
||||
".org": true,
|
||||
".pa": true,
|
||||
".pe": true,
|
||||
".pf": true,
|
||||
".pg": true,
|
||||
".ph": true,
|
||||
".pk": true,
|
||||
".pl": true,
|
||||
".pm": true,
|
||||
".pn": true,
|
||||
".post": true,
|
||||
".pr": true,
|
||||
".pro": true,
|
||||
".ps": true,
|
||||
".pt": true,
|
||||
".pw": true,
|
||||
".py": true,
|
||||
".qa": true,
|
||||
".re": true,
|
||||
".ro": true,
|
||||
".rs": true,
|
||||
".ru": true,
|
||||
".rw": true,
|
||||
".sa": true,
|
||||
".sb": true,
|
||||
".sc": true,
|
||||
".sd": true,
|
||||
".se": true,
|
||||
".sg": true,
|
||||
".sh": true,
|
||||
".si": true,
|
||||
".sj": true,
|
||||
".sk": true,
|
||||
".sl": true,
|
||||
".sm": true,
|
||||
".sn": true,
|
||||
".so": true,
|
||||
".sr": true,
|
||||
".st": true,
|
||||
".su": true,
|
||||
".sv": true,
|
||||
".sx": true,
|
||||
".sy": true,
|
||||
".sz": true,
|
||||
".tc": true,
|
||||
".td": true,
|
||||
".tel": true,
|
||||
".tf": true,
|
||||
".tg": true,
|
||||
".th": true,
|
||||
".tj": true,
|
||||
".tk": true,
|
||||
".tl": true,
|
||||
".tm": true,
|
||||
".tn": true,
|
||||
".to": true,
|
||||
".tp": true,
|
||||
".tr": true,
|
||||
".travel": true,
|
||||
".tt": true,
|
||||
".tv": true,
|
||||
".tw": true,
|
||||
".tz": true,
|
||||
".ua": true,
|
||||
".ug": true,
|
||||
".uk": true,
|
||||
".us": true,
|
||||
".uy": true,
|
||||
".uz": true,
|
||||
".va": true,
|
||||
".vc": true,
|
||||
".ve": true,
|
||||
".vg": true,
|
||||
".vi": true,
|
||||
".vn": true,
|
||||
".vu": true,
|
||||
".wf": true,
|
||||
".ws": true,
|
||||
".xn--0zwm56d": true,
|
||||
".xn--11b5bs3a9aj6g": true,
|
||||
".xn--3e0b707e": true,
|
||||
".xn--45brj9c": true,
|
||||
".xn--80akhbyknj4f": true,
|
||||
".xn--80ao21a": true,
|
||||
".xn--90a3ac": true,
|
||||
".xn--9t4b11yi5a": true,
|
||||
".xn--clchc0ea0b2g2a9gcd": true,
|
||||
".xn--deba0ad": true,
|
||||
".xn--fiqs8s": true,
|
||||
".xn--fiqz9s": true,
|
||||
".xn--fpcrj9c3d": true,
|
||||
".xn--fzc2c9e2c": true,
|
||||
".xn--g6w251d": true,
|
||||
".xn--gecrj9c": true,
|
||||
".xn--h2brj9c": true,
|
||||
".xn--hgbk6aj7f53bba": true,
|
||||
".xn--hlcj6aya9esc7a": true,
|
||||
".xn--j6w193g": true,
|
||||
".xn--jxalpdlp": true,
|
||||
".xn--kgbechtv": true,
|
||||
".xn--kprw13d": true,
|
||||
".xn--kpry57d": true,
|
||||
".xn--lgbbat1ad8j": true,
|
||||
".xn--mgb9awbf": true,
|
||||
".xn--mgbaam7a8h": true,
|
||||
".xn--mgbayh7gpa": true,
|
||||
".xn--mgbbh1a71e": true,
|
||||
".xn--mgbc0a9azcg": true,
|
||||
".xn--mgberp4a5d4ar": true,
|
||||
".xn--mgbx4cd0ab": true,
|
||||
".xn--o3cw4h": true,
|
||||
".xn--ogbpf8fl": true,
|
||||
".xn--p1ai": true,
|
||||
".xn--pgbs0dh": true,
|
||||
".xn--s9brj9c": true,
|
||||
".xn--wgbh1c": true,
|
||||
".xn--wgbl6a": true,
|
||||
".xn--xkc2al3hye2a": true,
|
||||
".xn--xkc2dl3a5ee0h": true,
|
||||
".xn--yfro4i67o": true,
|
||||
".xn--ygbi2ammx": true,
|
||||
".xn--zckzah": true,
|
||||
".xxx": true,
|
||||
".ye": true,
|
||||
".yt": true,
|
||||
".za": true,
|
||||
".zm": true,
|
||||
".zw": true,
|
||||
}
|
||||
|
||||
var validHost = regexp.MustCompile(`^[-a-z0-9]+(?:\.[-a-z0-9]+)+$`)
|
||||
var validPathElement = regexp.MustCompile(`^[-A-Za-z0-9~+][-A-Za-z0-9_.]*$`)
|
||||
|
||||
func isValidPathElement(s string) bool {
|
||||
return validPathElement.MatchString(s) && s != "testdata"
|
||||
}
|
||||
|
||||
// IsValidRemotePath returns true if importPath is structurally valid for "go get".
|
||||
func IsValidRemotePath(importPath string) bool {
|
||||
|
||||
parts := strings.Split(importPath, "/")
|
||||
|
||||
if len(parts) <= 1 {
|
||||
// Import path must contain at least one "/".
|
||||
return false
|
||||
}
|
||||
|
||||
if !validTLD[path.Ext(parts[0])] {
|
||||
return false
|
||||
}
|
||||
|
||||
if !validHost.MatchString(parts[0]) {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, part := range parts[1:] {
|
||||
if !isValidPathElement(part) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
var goRepoPath = map[string]bool{}
|
||||
|
||||
func init() {
|
||||
for p := range standardPath {
|
||||
for {
|
||||
goRepoPath[p] = true
|
||||
i := strings.LastIndex(p, "/")
|
||||
if i < 0 {
|
||||
break
|
||||
}
|
||||
p = p[:i]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func IsGoRepoPath(importPath string) bool {
|
||||
return goRepoPath[importPath]
|
||||
}
|
||||
|
||||
func IsValidPath(importPath string) bool {
|
||||
return importPath == "C" || standardPath[importPath] || IsValidRemotePath(importPath)
|
||||
}
|
|
@ -1,54 +0,0 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var goodImportPaths = []string{
|
||||
"github.com/user/repo",
|
||||
"github.com/user/repo/src/pkg/compress/somethingelse",
|
||||
"github.com/user/repo/src/compress/gzip",
|
||||
"github.com/user/repo/src/pkg",
|
||||
"camlistore.org/r/p/camlistore",
|
||||
"example.com/foo.git",
|
||||
"launchpad.net/~user/foo/trunk",
|
||||
"launchpad.net/~user/+junk/version",
|
||||
}
|
||||
|
||||
var badImportPaths = []string{
|
||||
"foobar",
|
||||
"foo.",
|
||||
".bar",
|
||||
"favicon.ico",
|
||||
"exmpple.com",
|
||||
"github.com/user/repo/testdata/x",
|
||||
"github.com/user/repo/_ignore/x",
|
||||
"github.com/user/repo/.ignore/x",
|
||||
}
|
||||
|
||||
func TestIsValidRemotePath(t *testing.T) {
|
||||
for _, importPath := range goodImportPaths {
|
||||
if !IsValidRemotePath(importPath) {
|
||||
t.Errorf("isBadImportPath(%q) -> true, want false", importPath)
|
||||
}
|
||||
}
|
||||
for _, importPath := range badImportPaths {
|
||||
if IsValidRemotePath(importPath) {
|
||||
t.Errorf("isBadImportPath(%q) -> false, want true", importPath)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,80 +0,0 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Presentation struct {
|
||||
Filename string
|
||||
Files map[string][]byte
|
||||
Updated time.Time
|
||||
}
|
||||
|
||||
type presBuilder struct {
|
||||
filename string
|
||||
data []byte
|
||||
resolveURL func(fname string) string
|
||||
fetch func(srcs []*source) error
|
||||
}
|
||||
|
||||
var assetPat = regexp.MustCompile(`(?m)^\.(play|code|image|iframe|html)\s+(\S+)`)
|
||||
|
||||
func (b *presBuilder) build() (*Presentation, error) {
|
||||
var data []byte
|
||||
var files []*source
|
||||
i := 0
|
||||
for _, m := range assetPat.FindAllSubmatchIndex(b.data, -1) {
|
||||
name := string(b.data[m[4]:m[5]])
|
||||
switch string(b.data[m[2]:m[3]]) {
|
||||
case "iframe", "image":
|
||||
data = append(data, b.data[i:m[4]]...)
|
||||
data = append(data, b.resolveURL(name)...)
|
||||
case "html":
|
||||
// TODO: sanitize and fix relative URLs in HTML.
|
||||
data = append(data, "\ntalks.godoc.org does not support .html\n"...)
|
||||
case "play", "code":
|
||||
data = append(data, b.data[i:m[5]]...)
|
||||
found := false
|
||||
for _, f := range files {
|
||||
if f.name == name {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
files = append(files, &source{name: name})
|
||||
}
|
||||
default:
|
||||
panic("unreachable")
|
||||
}
|
||||
i = m[5]
|
||||
}
|
||||
data = append(data, b.data[i:]...)
|
||||
if err := b.fetch(files); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pres := &Presentation{
|
||||
Updated: time.Now().UTC(),
|
||||
Filename: b.filename,
|
||||
Files: map[string][]byte{b.filename: data},
|
||||
}
|
||||
for _, f := range files {
|
||||
pres.Files[f.name] = f.data
|
||||
}
|
||||
return pres, nil
|
||||
}
|
35
doc/print.go
35
doc/print.go
|
@ -21,18 +21,18 @@ package main
|
|||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"github.com/garyburd/gosrc"
|
||||
)
|
||||
|
||||
var (
|
||||
etag = flag.String("etag", "", "Etag")
|
||||
local = flag.Bool("local", false, "Get package from local directory.")
|
||||
present = flag.Bool("present", false, "Get presentation.")
|
||||
etag = flag.String("etag", "", "Etag")
|
||||
local = flag.Bool("local", false, "Get package from local directory.")
|
||||
)
|
||||
|
||||
func main() {
|
||||
|
@ -40,38 +40,19 @@ func main() {
|
|||
if len(flag.Args()) != 1 {
|
||||
log.Fatal("Usage: go run print.go importPath")
|
||||
}
|
||||
if *present {
|
||||
printPresentation(flag.Args()[0])
|
||||
} else {
|
||||
printPackage(flag.Args()[0])
|
||||
}
|
||||
}
|
||||
path := flag.Args()[0]
|
||||
|
||||
func printPackage(path string) {
|
||||
var (
|
||||
pdoc *doc.Package
|
||||
err error
|
||||
)
|
||||
if *local {
|
||||
pdoc, err = doc.GetLocal(path, "", "", "%s", "#L%d")
|
||||
} else {
|
||||
pdoc, err = doc.Get(http.DefaultClient, path, *etag)
|
||||
gosrc.SetLocalDevMode(os.Getenv("GOPATH"))
|
||||
}
|
||||
pdoc, err = doc.Get(http.DefaultClient, path, *etag)
|
||||
//}
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
spew.Dump(pdoc)
|
||||
}
|
||||
|
||||
func printPresentation(path string) {
|
||||
pres, err := doc.GetPresentation(http.DefaultClient, path)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Printf("%s\n", pres.Files[pres.Filename])
|
||||
for name, data := range pres.Files {
|
||||
if name != pres.Filename {
|
||||
fmt.Printf("---------- %s ----------\n%s\n", name, data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
229
doc/util.go
229
doc/util.go
|
@ -14,238 +14,9 @@
|
|||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var defaultTags = map[string]string{"git": "master", "hg": "default"}
|
||||
|
||||
func bestTag(tags map[string]string, defaultTag string) (string, string, error) {
|
||||
if commit, ok := tags["go1"]; ok {
|
||||
return "go1", commit, nil
|
||||
}
|
||||
if commit, ok := tags[defaultTag]; ok {
|
||||
return defaultTag, commit, nil
|
||||
}
|
||||
return "", "", NotFoundError{"Tag or branch not found."}
|
||||
}
|
||||
|
||||
// expand replaces {k} in template with match[k] or subs[atoi(k)] if k is not in match.
|
||||
func expand(template string, match map[string]string, subs ...string) string {
|
||||
var p []byte
|
||||
var i int
|
||||
for {
|
||||
i = strings.Index(template, "{")
|
||||
if i < 0 {
|
||||
break
|
||||
}
|
||||
p = append(p, template[:i]...)
|
||||
template = template[i+1:]
|
||||
i = strings.Index(template, "}")
|
||||
if s, ok := match[template[:i]]; ok {
|
||||
p = append(p, s...)
|
||||
} else {
|
||||
j, _ := strconv.Atoi(template[:i])
|
||||
p = append(p, subs[j]...)
|
||||
}
|
||||
template = template[i+1:]
|
||||
}
|
||||
p = append(p, template...)
|
||||
return string(p)
|
||||
}
|
||||
|
||||
var readmePat = regexp.MustCompile(`^[Rr][Ee][Aa][Dd][Mm][Ee](?:$|\.)`)
|
||||
|
||||
// isDocFile returns true if a file with name n should be included in the
|
||||
// documentation.
|
||||
func isDocFile(n string) bool {
|
||||
if strings.HasSuffix(n, ".go") && n[0] != '_' && n[0] != '.' {
|
||||
return true
|
||||
}
|
||||
return readmePat.MatchString(n)
|
||||
}
|
||||
|
||||
var userAgent = "go application"
|
||||
|
||||
func SetUserAgent(ua string) {
|
||||
userAgent = ua
|
||||
}
|
||||
|
||||
// fetchFiles fetches the source files specified by the rawURL field in parallel.
|
||||
func fetchFiles(client *http.Client, files []*source, header http.Header) error {
|
||||
ch := make(chan error, len(files))
|
||||
for i := range files {
|
||||
go func(i int) {
|
||||
req, err := http.NewRequest("GET", files[i].rawURL, nil)
|
||||
if err != nil {
|
||||
ch <- err
|
||||
return
|
||||
}
|
||||
req.Header.Set("User-Agent", userAgent)
|
||||
for k, vs := range header {
|
||||
req.Header[k] = vs
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
ch <- &RemoteError{req.URL.Host, err}
|
||||
return
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode != 200 {
|
||||
ch <- &RemoteError{req.URL.Host, fmt.Errorf("get %s -> %d", req.URL, resp.StatusCode)}
|
||||
return
|
||||
}
|
||||
files[i].data, err = ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
ch <- &RemoteError{req.URL.Host, err}
|
||||
return
|
||||
}
|
||||
ch <- nil
|
||||
}(i)
|
||||
}
|
||||
for _ = range files {
|
||||
if err := <-ch; err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// httpGet gets the specified resource. ErrNotFound is returned if the
|
||||
// server responds with status 404.
|
||||
func httpGet(client *http.Client, url string, header http.Header) (io.ReadCloser, error) {
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("User-Agent", userAgent)
|
||||
for k, vs := range header {
|
||||
req.Header[k] = vs
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, &RemoteError{req.URL.Host, err}
|
||||
}
|
||||
if resp.StatusCode == 200 {
|
||||
return resp.Body, nil
|
||||
}
|
||||
resp.Body.Close()
|
||||
if resp.StatusCode == 404 { // 403 can be rate limit error. || resp.StatusCode == 403 {
|
||||
err = NotFoundError{"Resource not found: " + url}
|
||||
} else {
|
||||
err = &RemoteError{req.URL.Host, fmt.Errorf("get %s -> %d", url, resp.StatusCode)}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
func httpGetJSON(client *http.Client, url string, header http.Header, v interface{}) error {
|
||||
rc, err := httpGet(client, url, header)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rc.Close()
|
||||
err = json.NewDecoder(rc).Decode(v)
|
||||
if _, ok := err.(*json.SyntaxError); ok {
|
||||
err = NotFoundError{"JSON syntax error at " + url}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// httpGet gets the specified resource. ErrNotFound is returned if the server
|
||||
// responds with status 404.
|
||||
func httpGetBytes(client *http.Client, url string, header http.Header) ([]byte, error) {
|
||||
rc, err := httpGet(client, url, header)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
p, err := ioutil.ReadAll(rc)
|
||||
rc.Close()
|
||||
return p, err
|
||||
}
|
||||
|
||||
// httpGetBytesNoneMatch conditionally gets the specified resource. If a 304 status
|
||||
// is returned, then the function returns ErrNotModified. If a 404
|
||||
// status is returned, then the function returns ErrNotFound.
|
||||
func httpGetBytesNoneMatch(client *http.Client, url string, etag string) ([]byte, string, error) {
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
req.Header.Set("User-Agent", userAgent)
|
||||
req.Header.Set("If-None-Match", `"`+etag+`"`)
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, "", &RemoteError{req.URL.Host, err}
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
etag = resp.Header.Get("Etag")
|
||||
if len(etag) >= 2 && etag[0] == '"' && etag[len(etag)-1] == '"' {
|
||||
etag = etag[1 : len(etag)-1]
|
||||
} else {
|
||||
etag = ""
|
||||
}
|
||||
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
p, err := ioutil.ReadAll(resp.Body)
|
||||
return p, etag, err
|
||||
case 404:
|
||||
return nil, "", NotFoundError{"Resource not found: " + url}
|
||||
case 304:
|
||||
return nil, "", ErrNotModified
|
||||
default:
|
||||
return nil, "", &RemoteError{req.URL.Host, fmt.Errorf("get %s -> %d", url, resp.StatusCode)}
|
||||
}
|
||||
}
|
||||
|
||||
// httpGet gets the specified resource. ErrNotFound is returned if the
|
||||
// server responds with status 404. ErrNotModified is returned if the
|
||||
// hash of the resource equals savedEtag.
|
||||
func httpGetBytesCompare(client *http.Client, url string, savedEtag string) ([]byte, string, error) {
|
||||
p, err := httpGetBytes(client, url, nil)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
h := md5.New()
|
||||
h.Write(p)
|
||||
etag := hex.EncodeToString(h.Sum(nil))
|
||||
if savedEtag == etag {
|
||||
err = ErrNotModified
|
||||
}
|
||||
return p, etag, err
|
||||
}
|
||||
|
||||
type sliceWriter struct{ p *[]byte }
|
||||
|
||||
func (w sliceWriter) Write(p []byte) (int, error) {
|
||||
*w.p = append(*w.p, p...)
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
var lineComment = []byte("\n//line ")
|
||||
|
||||
func OverwriteLineComments(p []byte) {
|
||||
if bytes.HasPrefix(p, lineComment[1:]) {
|
||||
p[2] = 'L'
|
||||
p = p[len(lineComment)-1:]
|
||||
}
|
||||
for {
|
||||
i := bytes.Index(p, lineComment)
|
||||
if i < 0 {
|
||||
break
|
||||
}
|
||||
p[i+3] = 'L'
|
||||
p = p[i+len(lineComment):]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var lineCommentTests = []struct {
|
||||
in, out string
|
||||
}{
|
||||
{"", ""},
|
||||
{"//line ", "//Line "},
|
||||
{"//line x\n//line y", "//Line x\n//Line y"},
|
||||
{"x\n//line ", "x\n//Line "},
|
||||
}
|
||||
|
||||
func TestOverwriteLineComments(t *testing.T) {
|
||||
for _, tt := range lineCommentTests {
|
||||
p := []byte(tt.in)
|
||||
OverwriteLineComments(p)
|
||||
s := string(p)
|
||||
if s != tt.out {
|
||||
t.Errorf("actual=%q, expect=%q", s, tt.out)
|
||||
}
|
||||
}
|
||||
}
|
256
doc/vcs.go
256
doc/vcs.go
|
@ -1,256 +0,0 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
// Store temporary data in this directory.
|
||||
TempDir = filepath.Join(os.TempDir(), "gddo")
|
||||
)
|
||||
|
||||
type urlTemplates struct {
|
||||
re *regexp.Regexp
|
||||
fileBrowse string
|
||||
project string
|
||||
line string
|
||||
}
|
||||
|
||||
var vcsServices = []*urlTemplates{
|
||||
{
|
||||
regexp.MustCompile(`^git\.gitorious\.org/(?P<repo>[^/]+/[^/]+)$`),
|
||||
"https://gitorious.org/{repo}/blobs/{tag}/{dir}{0}",
|
||||
"https://gitorious.org/{repo}",
|
||||
"%s#line%d",
|
||||
},
|
||||
{
|
||||
regexp.MustCompile(`^camlistore\.org/r/p/(?P<repo>[^/]+)$`),
|
||||
"http://camlistore.org/code/?p={repo}.git;hb={tag};f={dir}{0}",
|
||||
"http://camlistore.org/",
|
||||
"%s#l%d",
|
||||
},
|
||||
{
|
||||
regexp.MustCompile(`git\.oschina\.net/(?P<repo>[^/]+/[^/]+)$`),
|
||||
"http://git.oschina.net/{repo}/blob/{tag}/{dir}{0}",
|
||||
"http://git.oschina.net/{repo}",
|
||||
"%s#L%d",
|
||||
},
|
||||
}
|
||||
|
||||
// lookupURLTemplate finds an expand() template, match map and line number
|
||||
// format for well known repositories.
|
||||
func lookupURLTemplate(repo, dir, tag string) (*urlTemplates, map[string]string) {
|
||||
if strings.HasPrefix(dir, "/") {
|
||||
dir = dir[1:] + "/"
|
||||
}
|
||||
for _, t := range vcsServices {
|
||||
if m := t.re.FindStringSubmatch(repo); m != nil {
|
||||
match := map[string]string{
|
||||
"dir": dir,
|
||||
"tag": tag,
|
||||
}
|
||||
for i, name := range t.re.SubexpNames() {
|
||||
if name != "" {
|
||||
match[name] = m[i]
|
||||
}
|
||||
}
|
||||
return t, match
|
||||
}
|
||||
}
|
||||
return &urlTemplates{}, nil
|
||||
}
|
||||
|
||||
type vcsCmd struct {
|
||||
schemes []string
|
||||
download func([]string, string, string) (string, string, error)
|
||||
}
|
||||
|
||||
var vcsCmds = map[string]*vcsCmd{
|
||||
"git": {
|
||||
schemes: []string{"http", "https", "git"},
|
||||
download: downloadGit,
|
||||
},
|
||||
}
|
||||
|
||||
var lsremoteRe = regexp.MustCompile(`(?m)^([0-9a-f]{40})\s+refs/(?:tags|heads)/(.+)$`)
|
||||
|
||||
func downloadGit(schemes []string, repo, savedEtag string) (string, string, error) {
|
||||
var p []byte
|
||||
var scheme string
|
||||
for i := range schemes {
|
||||
cmd := exec.Command("git", "ls-remote", "--heads", "--tags", schemes[i]+"://"+repo+".git")
|
||||
log.Println(strings.Join(cmd.Args, " "))
|
||||
var err error
|
||||
p, err = cmd.Output()
|
||||
if err == nil {
|
||||
scheme = schemes[i]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if scheme == "" {
|
||||
return "", "", NotFoundError{"VCS not found"}
|
||||
}
|
||||
|
||||
tags := make(map[string]string)
|
||||
for _, m := range lsremoteRe.FindAllSubmatch(p, -1) {
|
||||
tags[string(m[2])] = string(m[1])
|
||||
}
|
||||
|
||||
tag, commit, err := bestTag(tags, "master")
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
|
||||
etag := scheme + "-" + commit
|
||||
|
||||
if etag == savedEtag {
|
||||
return "", "", ErrNotModified
|
||||
}
|
||||
|
||||
dir := path.Join(TempDir, repo+".git")
|
||||
p, err = ioutil.ReadFile(path.Join(dir, ".git/HEAD"))
|
||||
switch {
|
||||
case err != nil:
|
||||
if err := os.MkdirAll(dir, 0777); err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
cmd := exec.Command("git", "clone", scheme+"://"+repo+".git", dir)
|
||||
log.Println(strings.Join(cmd.Args, " "))
|
||||
if err := cmd.Run(); err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
case string(bytes.TrimRight(p, "\n")) == commit:
|
||||
return tag, etag, nil
|
||||
default:
|
||||
cmd := exec.Command("git", "fetch")
|
||||
log.Println(strings.Join(cmd.Args, " "))
|
||||
cmd.Dir = dir
|
||||
if err := cmd.Run(); err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
}
|
||||
|
||||
cmd := exec.Command("git", "checkout", "--detach", "--force", commit)
|
||||
cmd.Dir = dir
|
||||
if err := cmd.Run(); err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
|
||||
return tag, etag, nil
|
||||
}
|
||||
|
||||
var vcsPattern = regexp.MustCompile(`^(?P<repo>(?:[a-z0-9.\-]+\.)+[a-z0-9.\-]+(?::[0-9]+)?/[A-Za-z0-9_.\-/]*?)\.(?P<vcs>bzr|git|hg|svn)(?P<dir>/[A-Za-z0-9_.\-/]*)?$`)
|
||||
|
||||
func getVCSDoc(client *http.Client, match map[string]string, etagSaved string) (*Package, error) {
|
||||
cmd := vcsCmds[match["vcs"]]
|
||||
if cmd == nil {
|
||||
return nil, NotFoundError{expand("VCS not supported: {vcs}", match)}
|
||||
}
|
||||
|
||||
scheme := match["scheme"]
|
||||
if scheme == "" {
|
||||
i := strings.Index(etagSaved, "-")
|
||||
if i > 0 {
|
||||
scheme = etagSaved[:i]
|
||||
}
|
||||
}
|
||||
|
||||
schemes := cmd.schemes
|
||||
if scheme != "" {
|
||||
for i := range cmd.schemes {
|
||||
if cmd.schemes[i] == scheme {
|
||||
schemes = cmd.schemes[i : i+1]
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Download and checkout.
|
||||
|
||||
tag, etag, err := cmd.download(schemes, match["repo"], etagSaved)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Find source location.
|
||||
|
||||
template, urlMatch := lookupURLTemplate(match["repo"], match["dir"], tag)
|
||||
|
||||
// Slurp source files.
|
||||
|
||||
d := path.Join(TempDir, expand("{repo}.{vcs}", match), match["dir"])
|
||||
f, err := os.Open(d)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
err = NotFoundError{err.Error()}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
fis, err := f.Readdir(-1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var files []*source
|
||||
var subdirs []string
|
||||
for _, fi := range fis {
|
||||
switch {
|
||||
case fi.IsDir():
|
||||
if isValidPathElement(fi.Name()) {
|
||||
subdirs = append(subdirs, fi.Name())
|
||||
}
|
||||
case isDocFile(fi.Name()):
|
||||
b, err := ioutil.ReadFile(path.Join(d, fi.Name()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files = append(files, &source{
|
||||
name: fi.Name(),
|
||||
browseURL: expand(template.fileBrowse, urlMatch, fi.Name()),
|
||||
data: b,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Create the documentation.
|
||||
|
||||
b := &builder{
|
||||
pdoc: &Package{
|
||||
LineFmt: template.line,
|
||||
ImportPath: match["importPath"],
|
||||
ProjectRoot: expand("{repo}.{vcs}", match),
|
||||
ProjectName: path.Base(match["repo"]),
|
||||
ProjectURL: expand(template.project, urlMatch),
|
||||
BrowseURL: "",
|
||||
Etag: etag,
|
||||
VCS: match["vcs"],
|
||||
Subdirectories: subdirs,
|
||||
},
|
||||
}
|
||||
|
||||
return b.build(files)
|
||||
}
|
10
doc/vet.go
10
doc/vet.go
|
@ -20,6 +20,8 @@ import (
|
|||
"go/token"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/garyburd/gosrc"
|
||||
)
|
||||
|
||||
// This list of deprecated exports is used to find code that has not been
|
||||
|
@ -66,12 +68,12 @@ func (v *vetVisitor) Visit(n ast.Node) ast.Visitor {
|
|||
return v
|
||||
}
|
||||
|
||||
func (b *builder) vetPackage(pkg *ast.Package) {
|
||||
func (b *builder) vetPackage(pkg *Package, apkg *ast.Package) {
|
||||
errors := make(map[string]token.Pos)
|
||||
for _, file := range pkg.Files {
|
||||
for _, file := range apkg.Files {
|
||||
for _, is := range file.Imports {
|
||||
importPath, _ := strconv.Unquote(is.Path.Value)
|
||||
if !IsValidPath(importPath) &&
|
||||
if !gosrc.IsValidPath(importPath) &&
|
||||
!strings.HasPrefix(importPath, "exp/") &&
|
||||
!strings.HasPrefix(importPath, "appengine") {
|
||||
errors[fmt.Sprintf("Unrecognized import path %q", importPath)] = is.Pos()
|
||||
|
@ -81,7 +83,7 @@ func (b *builder) vetPackage(pkg *ast.Package) {
|
|||
ast.Walk(&v, file)
|
||||
}
|
||||
for message, pos := range errors {
|
||||
b.pdoc.Errors = append(b.pdoc.Errors,
|
||||
pkg.Errors = append(pkg.Errors,
|
||||
fmt.Sprintf("%s (%s)", message, b.fset.Position(pos)))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ import (
|
|||
"os"
|
||||
|
||||
"github.com/garyburd/gddo/database"
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"github.com/garyburd/gosrc"
|
||||
)
|
||||
|
||||
var dangleCommand = &command{
|
||||
|
@ -42,17 +42,17 @@ func dangle(c *command) {
|
|||
err = db.Do(func(pi *database.PackageInfo) error {
|
||||
m[pi.PDoc.ImportPath] |= 1
|
||||
for _, p := range pi.PDoc.Imports {
|
||||
if doc.IsValidPath(p) {
|
||||
if gosrc.IsValidPath(p) {
|
||||
m[p] |= 2
|
||||
}
|
||||
}
|
||||
for _, p := range pi.PDoc.TestImports {
|
||||
if doc.IsValidPath(p) {
|
||||
if gosrc.IsValidPath(p) {
|
||||
m[p] |= 2
|
||||
}
|
||||
}
|
||||
for _, p := range pi.PDoc.XTestImports {
|
||||
if doc.IsValidPath(p) {
|
||||
if gosrc.IsValidPath(p) {
|
||||
m[p] |= 2
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ package main
|
|||
|
||||
import (
|
||||
"flag"
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"github.com/garyburd/gosrc"
|
||||
"log"
|
||||
"time"
|
||||
)
|
||||
|
@ -103,14 +103,14 @@ func readGitHubUpdates() error {
|
|||
if err := db.GetGob(key, &last); err != nil {
|
||||
return err
|
||||
}
|
||||
last, updates, err := doc.GetGitHubUpdates(httpClient, last)
|
||||
last, names, err := gosrc.GetGitHubUpdates(httpClient, last)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, update := range updates {
|
||||
log.Printf("bump crawl %s", update.OwnerRepo)
|
||||
if err := db.BumpCrawl("github.com/" + update.OwnerRepo); err != nil {
|
||||
for _, name := range names {
|
||||
log.Printf("bump crawl github.com/%s", name)
|
||||
if err := db.BumpCrawl("github.com/" + name); err != nil {
|
||||
log.Println("ERROR force crawl:", err)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/garyburd/gosrc"
|
||||
)
|
||||
|
||||
func init() {
|
||||
// Register an application at https://github.com/settings/applications/new
|
||||
// and enter the client ID and client secret here.
|
||||
gosrc.SetGitHubCredentials("id", "secret")
|
||||
}
|
|
@ -21,6 +21,7 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"github.com/garyburd/gosrc"
|
||||
)
|
||||
|
||||
var nestedProjectPat = regexp.MustCompile(`/(?:github\.com|launchpad\.net|code\.google\.com/p|bitbucket\.org|labix\.org)/`)
|
||||
|
@ -56,28 +57,28 @@ func crawlDoc(source string, importPath string, pdoc *doc.Package, hasSubdirs bo
|
|||
|
||||
start := time.Now()
|
||||
var err error
|
||||
if i := strings.Index(importPath, "/src/pkg/"); i > 0 && doc.IsGoRepoPath(importPath[i+len("/src/pkg/"):]) {
|
||||
if i := strings.Index(importPath, "/src/pkg/"); i > 0 && gosrc.IsGoRepoPath(importPath[i+len("/src/pkg/"):]) {
|
||||
// Go source tree mirror.
|
||||
pdoc = nil
|
||||
err = doc.NotFoundError{Message: "Go source tree mirror."}
|
||||
} else if i := strings.Index(importPath, "/libgo/go/"); i > 0 && doc.IsGoRepoPath(importPath[i+len("/libgo/go/"):]) {
|
||||
err = gosrc.NotFoundError{Message: "Go source tree mirror."}
|
||||
} else if i := strings.Index(importPath, "/libgo/go/"); i > 0 && gosrc.IsGoRepoPath(importPath[i+len("/libgo/go/"):]) {
|
||||
// Go Frontend source tree mirror.
|
||||
pdoc = nil
|
||||
err = doc.NotFoundError{Message: "Go Frontend source tree mirror."}
|
||||
err = gosrc.NotFoundError{Message: "Go Frontend source tree mirror."}
|
||||
} else if m := nestedProjectPat.FindStringIndex(importPath); m != nil && exists(importPath[m[0]+1:]) {
|
||||
pdoc = nil
|
||||
err = doc.NotFoundError{Message: "Copy of other project."}
|
||||
err = gosrc.NotFoundError{Message: "Copy of other project."}
|
||||
} else if blocked, e := db.IsBlocked(importPath); blocked && e == nil {
|
||||
pdoc = nil
|
||||
err = doc.NotFoundError{Message: "Blocked."}
|
||||
err = gosrc.NotFoundError{Message: "Blocked."}
|
||||
} else {
|
||||
var pdocNew *doc.Package
|
||||
pdocNew, err = doc.Get(httpClient, importPath, etag)
|
||||
message = append(message, "fetch:", int64(time.Since(start)/time.Millisecond))
|
||||
if err == nil && pdocNew.Name == "" && !hasSubdirs {
|
||||
pdoc = nil
|
||||
err = doc.NotFoundError{Message: "No Go files or subdirs"}
|
||||
} else if err != doc.ErrNotModified {
|
||||
err = gosrc.NotFoundError{Message: "No Go files or subdirs"}
|
||||
} else if err != gosrc.ErrNotModified {
|
||||
pdoc = pdocNew
|
||||
}
|
||||
}
|
||||
|
@ -97,12 +98,12 @@ func crawlDoc(source string, importPath string, pdoc *doc.Package, hasSubdirs bo
|
|||
if err := db.Put(pdoc, nextCrawl); err != nil {
|
||||
log.Printf("ERROR db.Put(%q): %v", importPath, err)
|
||||
}
|
||||
case err == doc.ErrNotModified:
|
||||
case err == gosrc.ErrNotModified:
|
||||
message = append(message, "touch")
|
||||
if err := db.SetNextCrawlEtag(pdoc.ProjectRoot, pdoc.Etag, nextCrawl); err != nil {
|
||||
log.Printf("ERROR db.SetNextCrawl(%q): %v", importPath, err)
|
||||
}
|
||||
case doc.IsNotFound(err):
|
||||
case gosrc.IsNotFound(err):
|
||||
message = append(message, "notfound:", err)
|
||||
if err := db.Delete(importPath); err != nil {
|
||||
log.Printf("ERROR db.Delete(%q): %v", importPath, err)
|
||||
|
|
|
@ -45,6 +45,7 @@ import (
|
|||
"code.google.com/p/go.talks/pkg/present"
|
||||
"github.com/garyburd/gddo/database"
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"github.com/garyburd/gosrc"
|
||||
"github.com/garyburd/indigo/server"
|
||||
"github.com/garyburd/indigo/web"
|
||||
)
|
||||
|
@ -506,7 +507,7 @@ func serveHome(resp web.Response, req *web.Request) error {
|
|||
q = path
|
||||
}
|
||||
|
||||
if doc.IsValidRemotePath(q) {
|
||||
if gosrc.IsValidRemotePath(q) {
|
||||
pdoc, pkgs, err := getDoc(q, queryRequest)
|
||||
if err == nil && (pdoc != nil || len(pkgs) > 0) {
|
||||
return web.Redirect(resp, req, "/"+q, 302, nil)
|
||||
|
@ -594,7 +595,7 @@ func servePresentHome(resp web.Response, req *web.Request) error {
|
|||
|
||||
var (
|
||||
presMu sync.Mutex
|
||||
presentations = map[string]*doc.Presentation{}
|
||||
presentations = map[string]*gosrc.Presentation{}
|
||||
)
|
||||
|
||||
func servePresentation(resp web.Response, req *web.Request) error {
|
||||
|
@ -615,7 +616,7 @@ func servePresentation(resp web.Response, req *web.Request) error {
|
|||
if pres == nil {
|
||||
var err error
|
||||
log.Println("Fetch presentation ", p)
|
||||
pres, err = doc.GetPresentation(httpClient, p)
|
||||
pres, err = gosrc.GetPresentation(httpClient, p)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -702,7 +703,7 @@ func handleError(resp web.Response, req *web.Request, status int, err error, r i
|
|||
s := web.StatusText(status)
|
||||
if err == errUpdateTimeout {
|
||||
s = "Timeout getting package files from the version control system."
|
||||
} else if e, ok := err.(*doc.RemoteError); ok {
|
||||
} else if e, ok := err.(*gosrc.RemoteError); ok {
|
||||
s = "Error getting package files from " + e.Host + "."
|
||||
}
|
||||
w := resp.Start(web.StatusInternalServerError, web.Header{web.HeaderContentType: {"text/plan; charset=utf-8"}})
|
||||
|
@ -717,12 +718,12 @@ func handlePresentError(resp web.Response, req *web.Request, status int, err err
|
|||
// nothing to do
|
||||
default:
|
||||
s := web.StatusText(status)
|
||||
if doc.IsNotFound(err) {
|
||||
if gosrc.IsNotFound(err) {
|
||||
s = web.StatusText(web.StatusNotFound)
|
||||
status = web.StatusNotFound
|
||||
} else if err == errUpdateTimeout {
|
||||
s = "Timeout getting package files from the version control system."
|
||||
} else if e, ok := err.(*doc.RemoteError); ok {
|
||||
} else if e, ok := err.(*gosrc.RemoteError); ok {
|
||||
s = "Error getting package files from " + e.Host + "."
|
||||
}
|
||||
w := resp.Start(status, web.Header{web.HeaderContentType: {"text/plan; charset=utf-8"}})
|
||||
|
@ -765,44 +766,11 @@ var (
|
|||
firstGetTimeout = flag.Duration("first_get_timeout", 5*time.Second, "Time to wait for first fetch of package from the VCS.")
|
||||
maxAge = flag.Duration("max_age", 24*time.Hour, "Update package documents older than this age.")
|
||||
httpAddr = flag.String("http", ":8080", "Listen for HTTP connections on this address")
|
||||
secretsPath = flag.String("secrets", "secrets.json", "Path to file containing application ids and credentials for other services.")
|
||||
redirGoTalks = flag.Bool("redirGoTalks", true, "Redirect paths with prefix 'code.google.com/p/go.talks/' to talks.golang.org")
|
||||
srcZip = flag.String("srcZip", "", "")
|
||||
|
||||
secrets struct {
|
||||
// HTTP user agent for outbound requests
|
||||
UserAgent string
|
||||
|
||||
// GitHub API Credentials
|
||||
GitHubId string
|
||||
GitHubSecret string
|
||||
|
||||
// Google Analytics account for tracking codes.
|
||||
GAAccount string
|
||||
}
|
||||
|
||||
srcFiles = make(map[string]*zip.File)
|
||||
srcFiles = make(map[string]*zip.File)
|
||||
)
|
||||
|
||||
func readSecrets() error {
|
||||
b, err := ioutil.ReadFile(*secretsPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err = json.Unmarshal(b, &secrets); err != nil {
|
||||
return err
|
||||
}
|
||||
if secrets.UserAgent != "" {
|
||||
doc.SetUserAgent(secrets.UserAgent)
|
||||
}
|
||||
if secrets.GitHubId != "" {
|
||||
doc.SetGitHubCredentials(secrets.GitHubId, secrets.GitHubSecret)
|
||||
} else {
|
||||
log.Printf("GitHub credentials not set in %q.", *secretsPath)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var cacheBusters = map[string]string{}
|
||||
|
||||
func dataHandler(cacheBusterKey, contentType, dir string, names ...string) web.Handler {
|
||||
|
@ -825,9 +793,6 @@ func dataHandler(cacheBusterKey, contentType, dir string, names ...string) web.H
|
|||
func main() {
|
||||
flag.Parse()
|
||||
log.Printf("Starting server, os.Args=%s", strings.Join(os.Args, " "))
|
||||
if err := readSecrets(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if *srcZip != "" {
|
||||
r, err := zip.OpenReader(*srcZip)
|
||||
|
|
|
@ -38,6 +38,7 @@ import (
|
|||
"code.google.com/p/go.talks/pkg/present"
|
||||
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"github.com/garyburd/gosrc"
|
||||
"github.com/garyburd/indigo/web"
|
||||
)
|
||||
|
||||
|
@ -69,7 +70,7 @@ func (pdoc *tdoc) SourceLink(pos doc.Pos, text, anchor string) htemp.HTML {
|
|||
u = fmt.Sprintf(pdoc.LineFmt, pdoc.Files[pos.File].URL, pos.Line)
|
||||
}
|
||||
u = htemp.HTMLEscapeString(u)
|
||||
return htemp.HTML(fmt.Sprintf(`<a href="%s">%s</a>`, u, text))
|
||||
return htemp.HTML(fmt.Sprintf(`<a title="View Source" href="%s">%s</a>`, u, text))
|
||||
}
|
||||
|
||||
func (pdoc *tdoc) PageName() string {
|
||||
|
@ -320,7 +321,7 @@ func commentFn(v string) htemp.HTML {
|
|||
})
|
||||
p = replaceAll(p, packagePat, func(out, src []byte, m []int) []byte {
|
||||
path := bytes.TrimRight(src[m[2]:m[3]], ".!?:")
|
||||
if !doc.IsValidPath(string(path)) {
|
||||
if !gosrc.IsValidPath(string(path)) {
|
||||
return append(out, src[m[0]:m[1]]...)
|
||||
}
|
||||
out = append(out, src[m[0]:m[2]]...)
|
||||
|
@ -396,8 +397,10 @@ func codeFn(c doc.Code, typ *doc.Type) htemp.HTML {
|
|||
return htemp.HTML(buf.String())
|
||||
}
|
||||
|
||||
var gaAccount string
|
||||
|
||||
func gaAccountFn() string {
|
||||
return secrets.GAAccount
|
||||
return gaAccount
|
||||
}
|
||||
|
||||
func noteTitleFn(s string) string {
|
||||
|
@ -456,7 +459,7 @@ func parseHTMLTemplates(sets [][]string) error {
|
|||
"host": hostFn,
|
||||
"htmlComment": htmlCommentFn,
|
||||
"importPath": importPathFn,
|
||||
"isValidImportPath": doc.IsValidPath,
|
||||
"isValidImportPath": gosrc.IsValidPath,
|
||||
"map": mapFn,
|
||||
"noteTitle": noteTitleFn,
|
||||
"relativePath": relativePathFn,
|
||||
|
|
Загрузка…
Ссылка в новой задаче