зеркало из https://github.com/golang/gddo.git
Initial commit
This commit is contained in:
Коммит
68142c388c
|
@ -0,0 +1,37 @@
|
|||
This project is the source for http://godoc.org/
|
||||
|
||||
The code in this project is designed to be used by godoc.org. Send mail to
|
||||
info@godoc.org if you want to discuss other uses of the code.
|
||||
|
||||
Feedback
|
||||
--------
|
||||
|
||||
Send ideas and questions to info@godoc.org. Request features and report bugs
|
||||
using the [Github Issue
|
||||
Tracker](https://github.com/garyburd/gopkgdoc/issues/new).
|
||||
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
Contributions are welcome.
|
||||
|
||||
Before writing code, send mail to info@godoc.org to discuss what you plan to
|
||||
do. This gives the project manager a chance to validate the design, avoid
|
||||
duplication of effort and ensure that the changes fit the goals of the project.
|
||||
Do not start the discussion with a pull request.
|
||||
|
||||
Development Environment Setup
|
||||
-----------------------------
|
||||
|
||||
- Install and run [Redis 2.6.x](http://redis.io/download). The redis.conf file included in the Redis distribution is suitable for development.
|
||||
- Install Go from source and update to tip.
|
||||
- Install and run the server:
|
||||
|
||||
go get github.com/garyburd/gopkgdoc/gddo-server
|
||||
gddo-server
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
[Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0.html).
|
|
@ -0,0 +1,899 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
// Redis keys and types:
|
||||
//
|
||||
// maxPackageId string: next id to assign
|
||||
// id:<path> string: id for given import path
|
||||
// pkg:<id> hash
|
||||
// terms: space separated search terms
|
||||
// path: import path
|
||||
// synopsis: synopsis
|
||||
// gob: snappy compressed gob encoded doc.Package
|
||||
// score: document search score
|
||||
// etag:
|
||||
// kind: p=package, c=command, d=directory with no go files
|
||||
// index:<term> set: package ids for given search term
|
||||
// index:import:<path> set: packages with import path
|
||||
// index:project:<root> set: packages in project with root
|
||||
// nextCrawl zset: package id, Unix time for next crawl
|
||||
// block set: packages to block
|
||||
// popular zset: package id, score
|
||||
// popular:0 string: scaled base time for popular scores
|
||||
// newCrawl set: new paths to crawl
|
||||
// badCrawl set: paths that returned error when crawling.
|
||||
|
||||
// Package database manages storage for GoPkgDoc.
|
||||
package database
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/gob"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"math"
|
||||
"net/url"
|
||||
"os"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"code.google.com/p/snappy-go/snappy"
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"github.com/garyburd/redigo/redis"
|
||||
)
|
||||
|
||||
type Database struct {
|
||||
Pool interface {
|
||||
Get() redis.Conn
|
||||
}
|
||||
}
|
||||
|
||||
type Package struct {
|
||||
Path string `json:"path"`
|
||||
Synopsis string `json:"synopsis,omitempty"`
|
||||
}
|
||||
|
||||
type byPath []Package
|
||||
|
||||
func (p byPath) Len() int { return len(p) }
|
||||
func (p byPath) Less(i, j int) bool { return p[i].Path < p[j].Path }
|
||||
func (p byPath) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||
|
||||
var (
|
||||
redisServer = flag.String("db-server", "redis://127.0.0.1:6379", "URI of Redis server.")
|
||||
redisIdleTimeout = flag.Duration("db-idle-timeout", 250*time.Second, "Close Redis connections after remaining idle for this duration.")
|
||||
redisLog = flag.Bool("db-log", false, "Log database commands")
|
||||
)
|
||||
|
||||
func dialDb() (c redis.Conn, err error) {
|
||||
u, err := url.Parse(*redisServer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err != nil && c != nil {
|
||||
c.Close()
|
||||
}
|
||||
}()
|
||||
|
||||
c, err = redis.Dial("tcp", u.Host)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if *redisLog {
|
||||
l := log.New(os.Stderr, "", log.LstdFlags)
|
||||
c = redis.NewLoggingConn(c, l, "")
|
||||
}
|
||||
|
||||
if u.User != nil {
|
||||
if pw, ok := u.User.Password(); ok {
|
||||
if _, err = c.Do("AUTH", pw); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// New creates a database configured from command line flags.
|
||||
func New() (*Database, error) {
|
||||
pool := &redis.Pool{
|
||||
Dial: dialDb,
|
||||
MaxIdle: 10,
|
||||
IdleTimeout: *redisIdleTimeout,
|
||||
}
|
||||
|
||||
if c := pool.Get(); c.Err() != nil {
|
||||
return nil, c.Err()
|
||||
} else {
|
||||
c.Close()
|
||||
}
|
||||
|
||||
return &Database{Pool: pool}, nil
|
||||
}
|
||||
|
||||
// Exists returns true if package with import path exists in the database.
|
||||
func (db *Database) Exists(path string) (bool, error) {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
return redis.Bool(c.Do("EXISTS", "id:"+path))
|
||||
}
|
||||
|
||||
var putScript = redis.NewScript(0, `
|
||||
local path = ARGV[1]
|
||||
local synopsis = ARGV[2]
|
||||
local score = ARGV[3]
|
||||
local gob = ARGV[4]
|
||||
local terms = ARGV[5]
|
||||
local etag = ARGV[6]
|
||||
local kind = ARGV[7]
|
||||
local nextCrawl = ARGV[8]
|
||||
|
||||
local id = redis.call('GET', 'id:' .. path)
|
||||
if not id then
|
||||
id = redis.call('INCR', 'maxPackageId')
|
||||
redis.call('SET', 'id:' .. path, id)
|
||||
end
|
||||
|
||||
local update = {}
|
||||
for term in string.gmatch(redis.call('HGET', 'pkg:' .. id, 'terms') or '', '([^ ]+)') do
|
||||
update[term] = 1
|
||||
end
|
||||
|
||||
for term in string.gmatch(terms, '([^ ]+)') do
|
||||
update[term] = (update[term] or 0) + 2
|
||||
end
|
||||
|
||||
for term, x in pairs(update) do
|
||||
if x == 1 then
|
||||
redis.call('SREM', 'index:' .. term, id)
|
||||
elseif x == 2 then
|
||||
redis.call('SADD', 'index:' .. term, id)
|
||||
if string.sub(term, 1, 7) == 'import:' then
|
||||
local import = string.sub(term, 8)
|
||||
if redis.call('EXISTS', 'id:' .. import) == 0 and redis.call('SISMEMBER', 'badCrawl', import) == 0 then
|
||||
redis.call('SADD', 'newCrawl', import)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
redis.call('SREM', 'badCrawl', path)
|
||||
redis.call('SREM', 'newCrawl', path)
|
||||
|
||||
if nextCrawl ~= '0' then
|
||||
redis.call('ZADD', 'nextCrawl', nextCrawl, id)
|
||||
end
|
||||
|
||||
return redis.call('HMSET', 'pkg:' .. id, 'path', path, 'synopsis', synopsis, 'score', score, 'gob', gob, 'terms', terms, 'etag', etag, 'kind', kind)
|
||||
`)
|
||||
|
||||
// Put adds the package documentation to the database.
|
||||
func (db *Database) Put(pdoc *doc.Package, nextCrawl time.Time) error {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
|
||||
score := documentScore(pdoc)
|
||||
terms := documentTerms(pdoc, score)
|
||||
|
||||
var gobBuf bytes.Buffer
|
||||
if err := gob.NewEncoder(&gobBuf).Encode(pdoc); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Truncate large documents.
|
||||
if gobBuf.Len() > 700000 {
|
||||
pdocNew := *pdoc
|
||||
pdoc = &pdocNew
|
||||
pdoc.Truncated = true
|
||||
pdoc.Vars = nil
|
||||
pdoc.Funcs = nil
|
||||
pdoc.Types = nil
|
||||
pdoc.Consts = nil
|
||||
pdoc.Examples = nil
|
||||
gobBuf.Reset()
|
||||
if err := gob.NewEncoder(&gobBuf).Encode(pdoc); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
gobBytes, err := snappy.Encode(nil, gobBuf.Bytes())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
kind := "p"
|
||||
switch {
|
||||
case pdoc.Name == "":
|
||||
kind = "d"
|
||||
case pdoc.IsCmd:
|
||||
kind = "c"
|
||||
}
|
||||
|
||||
t := int64(0)
|
||||
if !nextCrawl.IsZero() {
|
||||
t = nextCrawl.Unix()
|
||||
}
|
||||
_, err = putScript.Do(c, pdoc.ImportPath, pdoc.Synopsis, score, gobBytes, strings.Join(terms, " "), pdoc.Etag, kind, t)
|
||||
return err
|
||||
}
|
||||
|
||||
var setNextCrawlEtagScript = redis.NewScript(0, `
|
||||
local root = ARGV[1]
|
||||
local etag = ARGV[2]
|
||||
local nextCrawl = ARGV[3]
|
||||
|
||||
local pkgs = redis.call('SORT', 'index:project:' .. root, 'GET', '#', 'GET', 'pkg:*->etag')
|
||||
|
||||
for i=1,#pkgs,2 do
|
||||
if pkgs[i+1] == etag then
|
||||
redis.call('ZADD', 'nextCrawl', nextCrawl, pkgs[i])
|
||||
end
|
||||
end
|
||||
`)
|
||||
|
||||
// SetNextCrawlEtag sets the next crawl time for all packages in the project with the given etag.
|
||||
func (db *Database) SetNextCrawlEtag(projectRoot string, etag string, t time.Time) error {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
_, err := setNextCrawlEtagScript.Do(c, normalizeProjectRoot(projectRoot), etag, t.Unix())
|
||||
return err
|
||||
}
|
||||
|
||||
var setNextCrawlScript = redis.NewScript(0, `
|
||||
local root = ARGV[1]
|
||||
local nextCrawl = tonumber(ARGV[2])
|
||||
|
||||
local pkgs = redis.call('SORT', 'index:project:' .. root, 'GET', '#')
|
||||
|
||||
for i=1,#pkgs do
|
||||
if nextCrawl < tonumber(redis.call('ZSCORE', 'nextCrawl', pkgs[i])) then
|
||||
redis.call('ZADD', 'nextCrawl', nextCrawl, pkgs[i])
|
||||
end
|
||||
end
|
||||
`)
|
||||
|
||||
// SetNextCrawl sets the maximum next crawl time for all packages in the project.
|
||||
func (db *Database) SetNextCrawl(projectRoot string, t time.Time) error {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
_, err := setNextCrawlScript.Do(c, normalizeProjectRoot(projectRoot), t.Unix())
|
||||
return err
|
||||
}
|
||||
|
||||
// getDocScript gets the package documentation and update time for the
|
||||
// specified path. If path is "-", then the oldest document is returned.
|
||||
var getDocScript = redis.NewScript(0, `
|
||||
local path = ARGV[1]
|
||||
|
||||
local id
|
||||
if path == '-' then
|
||||
local r = redis.call('ZRANGE', 'nextCrawl', 0, 0)
|
||||
if not r or #r == 0 then
|
||||
return false
|
||||
end
|
||||
id = r[1]
|
||||
else
|
||||
id = redis.call('GET', 'id:' .. path)
|
||||
if not id then
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
local gob = redis.call('HGET', 'pkg:' .. id, 'gob')
|
||||
if not gob then
|
||||
return false
|
||||
end
|
||||
|
||||
local nextCrawl = redis.call('ZSCORE', 'nextCrawl', id)
|
||||
if not nextCrawl then
|
||||
nextCrawl = 0
|
||||
end
|
||||
|
||||
return {gob, nextCrawl}
|
||||
`)
|
||||
|
||||
func (db *Database) getDoc(c redis.Conn, path string) (*doc.Package, time.Time, error) {
|
||||
r, err := redis.Values(getDocScript.Do(c, path))
|
||||
if err == redis.ErrNil {
|
||||
return nil, time.Time{}, nil
|
||||
} else if err != nil {
|
||||
return nil, time.Time{}, err
|
||||
}
|
||||
|
||||
var p []byte
|
||||
var t int64
|
||||
|
||||
if _, err := redis.Scan(r, &p, &t); err != nil {
|
||||
return nil, time.Time{}, err
|
||||
}
|
||||
|
||||
p, err = snappy.Decode(nil, p)
|
||||
if err != nil {
|
||||
return nil, time.Time{}, err
|
||||
}
|
||||
|
||||
var pdoc doc.Package
|
||||
if err := gob.NewDecoder(bytes.NewReader(p)).Decode(&pdoc); err != nil {
|
||||
return nil, time.Time{}, err
|
||||
}
|
||||
|
||||
nextCrawl := pdoc.Updated
|
||||
if t != 0 {
|
||||
nextCrawl = time.Unix(t, 0).UTC()
|
||||
}
|
||||
|
||||
return &pdoc, nextCrawl, err
|
||||
}
|
||||
|
||||
var getSubdirsScript = redis.NewScript(0, `
|
||||
local reply
|
||||
for i = 1,#ARGV do
|
||||
reply = redis.call('SORT', 'index:project:' .. ARGV[i], 'ALPHA', 'BY', 'pkg:*->path', 'GET', 'pkg:*->path', 'GET', 'pkg:*->synopsis', 'GET', 'pkg:*->kind')
|
||||
if #reply > 0 then
|
||||
break
|
||||
end
|
||||
end
|
||||
return reply
|
||||
`)
|
||||
|
||||
func (db *Database) getSubdirs(c redis.Conn, path string, pdoc *doc.Package) ([]Package, error) {
|
||||
var reply interface{}
|
||||
var err error
|
||||
|
||||
switch {
|
||||
case isStandardPackage(path):
|
||||
reply, err = getSubdirsScript.Do(c, "go")
|
||||
case pdoc != nil:
|
||||
reply, err = getSubdirsScript.Do(c, pdoc.ProjectRoot)
|
||||
default:
|
||||
var roots []interface{}
|
||||
projectRoot := path
|
||||
for i := 0; i < 5; i++ {
|
||||
roots = append(roots, projectRoot)
|
||||
if j := strings.LastIndex(projectRoot, "/"); j < 0 {
|
||||
break
|
||||
} else {
|
||||
projectRoot = projectRoot[:j]
|
||||
}
|
||||
}
|
||||
reply, err = getSubdirsScript.Do(c, roots...)
|
||||
}
|
||||
|
||||
values, err := redis.Values(reply, err)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var subdirs []Package
|
||||
prefix := path + "/"
|
||||
|
||||
for len(values) > 0 {
|
||||
var pkg Package
|
||||
var kind string
|
||||
values, err = redis.Scan(values, &pkg.Path, &pkg.Synopsis, &kind)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if (kind == "p" || kind == "c") && strings.HasPrefix(pkg.Path, prefix) {
|
||||
subdirs = append(subdirs, pkg)
|
||||
}
|
||||
}
|
||||
|
||||
return subdirs, err
|
||||
}
|
||||
|
||||
// Get gets the package documenation and sub-directories for the the given
|
||||
// import path.
|
||||
func (db *Database) Get(path string) (*doc.Package, []Package, time.Time, error) {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
|
||||
pdoc, nextCrawl, err := db.getDoc(c, path)
|
||||
if err != nil {
|
||||
return nil, nil, time.Time{}, err
|
||||
}
|
||||
|
||||
if pdoc != nil {
|
||||
// fixup for speclal "-" path.
|
||||
path = pdoc.ImportPath
|
||||
}
|
||||
|
||||
subdirs, err := db.getSubdirs(c, path, pdoc)
|
||||
if err != nil {
|
||||
return nil, nil, time.Time{}, err
|
||||
}
|
||||
return pdoc, subdirs, nextCrawl, nil
|
||||
}
|
||||
|
||||
func (db *Database) GetDoc(path string) (*doc.Package, time.Time, error) {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
return db.getDoc(c, path)
|
||||
}
|
||||
|
||||
var deleteScript = redis.NewScript(0, `
|
||||
local path = ARGV[1]
|
||||
|
||||
local id = redis.call('GET', 'id:' .. path)
|
||||
if not id then
|
||||
return false
|
||||
end
|
||||
|
||||
for term in string.gmatch(redis.call('HGET', 'pkg:' .. id, 'terms') or '', '([^ ]+)') do
|
||||
redis.call('SREM', 'index:' .. term, id)
|
||||
end
|
||||
|
||||
redis.call('ZREM', 'nextCrawl', id)
|
||||
redis.call('SREM', 'badCrawl', path)
|
||||
redis.call('SREM', 'newCrawl', path)
|
||||
redis.call('ZREM', 'popular', id)
|
||||
redis.call('DEL', 'pkg:' .. id)
|
||||
return redis.call('DEL', 'id:' .. path)
|
||||
`)
|
||||
|
||||
// Delete deletes the documenation for the given import path.
|
||||
func (db *Database) Delete(path string) error {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
_, err := deleteScript.Do(c, path)
|
||||
return err
|
||||
}
|
||||
|
||||
func packages(reply interface{}, all bool) ([]Package, error) {
|
||||
values, err := redis.Values(reply, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result := make([]Package, 0, len(values)/3)
|
||||
for len(values) > 0 {
|
||||
var pkg Package
|
||||
var kind string
|
||||
values, err = redis.Scan(values, &pkg.Path, &pkg.Synopsis, &kind)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !all && kind == "d" {
|
||||
continue
|
||||
}
|
||||
if pkg.Path == "C" {
|
||||
pkg.Synopsis = "Package C is a \"pseudo-package\" used to access the C namespace from a cgo source file."
|
||||
}
|
||||
result = append(result, pkg)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (db *Database) getPackages(key string, all bool) ([]Package, error) {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
reply, err := c.Do("SORT", key, "ALPHA", "BY", "pkg:*->path", "GET", "pkg:*->path", "GET", "pkg:*->synopsis", "GET", "pkg:*->kind")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return packages(reply, all)
|
||||
}
|
||||
|
||||
func (db *Database) GoIndex() ([]Package, error) {
|
||||
return db.getPackages("index:project:go", false)
|
||||
}
|
||||
|
||||
func (db *Database) Index() ([]Package, error) {
|
||||
return db.getPackages("index:all:", false)
|
||||
}
|
||||
|
||||
func (db *Database) Project(projectRoot string) ([]Package, error) {
|
||||
return db.getPackages("index:project:"+normalizeProjectRoot(projectRoot), true)
|
||||
}
|
||||
|
||||
func (db *Database) AllPackages() ([]Package, error) {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
values, err := redis.Values(c.Do("SORT", "nextCrawl", "DESC", "BY", "pkg:*->score", "GET", "pkg:*->path", "GET", "pkg:*->kind"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result := make([]Package, 0, len(values)/2)
|
||||
for len(values) > 0 {
|
||||
var pkg Package
|
||||
var kind string
|
||||
values, err = redis.Scan(values, &pkg.Path, &kind)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if kind == "d" {
|
||||
continue
|
||||
}
|
||||
result = append(result, pkg)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
var packagesScript = redis.NewScript(0, `
|
||||
local result = {}
|
||||
for i = 1,#ARGV do
|
||||
local path = ARGV[i]
|
||||
local synopsis = ''
|
||||
local kind = 'u'
|
||||
local id = redis.call('GET', 'id:' .. path)
|
||||
if id then
|
||||
synopsis = redis.call('HGET', 'pkg:' .. id, 'synopsis')
|
||||
kind = redis.call('HGET', 'pkg:' .. id, 'kind')
|
||||
end
|
||||
result[#result+1] = path
|
||||
result[#result+1] = synopsis
|
||||
result[#result+1] = kind
|
||||
end
|
||||
return result
|
||||
`)
|
||||
|
||||
func (db *Database) Packages(paths []string) ([]Package, error) {
|
||||
var args []interface{}
|
||||
for _, p := range paths {
|
||||
args = append(args, p)
|
||||
}
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
reply, err := packagesScript.Do(c, args...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pkgs, err := packages(reply, false)
|
||||
sort.Sort(byPath(pkgs))
|
||||
return pkgs, err
|
||||
}
|
||||
|
||||
func (db *Database) ImporterCount(path string) (int, error) {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
return redis.Int(c.Do("SCARD", "index:import:"+path))
|
||||
}
|
||||
|
||||
func (db *Database) Importers(path string) ([]Package, error) {
|
||||
return db.getPackages("index:import:"+path, false)
|
||||
}
|
||||
|
||||
func (db *Database) Block(root string) error {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
if _, err := c.Do("SADD", "block", root); err != nil {
|
||||
return err
|
||||
}
|
||||
keys, err := redis.Values(c.Do("KEYS", "id:"+root+"*"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, key := range keys {
|
||||
path := string(key.([]byte)[len("id:"):])
|
||||
if path == root || strings.HasPrefix(path, root) && path[len(root)] == '/' {
|
||||
if _, err := deleteScript.Do(c, path); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var isBlockedScript = redis.NewScript(0, `
|
||||
local path = ''
|
||||
for s in string.gmatch(ARGV[1], '[^/]+') do
|
||||
path = path .. s
|
||||
if redis.call('SISMEMBER', 'block', path) == 1 then
|
||||
return 1
|
||||
end
|
||||
path = path .. '/'
|
||||
end
|
||||
return 0
|
||||
`)
|
||||
|
||||
func (db *Database) IsBlocked(path string) (bool, error) {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
return redis.Bool(isBlockedScript.Do(c, path))
|
||||
}
|
||||
|
||||
func (db *Database) Query(q string) ([]Package, error) {
|
||||
terms := parseQuery(q)
|
||||
if len(terms) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
n, err := redis.Int(c.Do("INCR", "maxQueryId"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
id := "tmp:query-" + strconv.Itoa(n)
|
||||
|
||||
args := []interface{}{id}
|
||||
for _, term := range terms {
|
||||
args = append(args, "index:"+term)
|
||||
}
|
||||
c.Send("SINTERSTORE", args...)
|
||||
c.Send("SORT", id, "DESC", "BY", "pkg:*->score", "GET", "pkg:*->path", "GET", "pkg:*->synopsis", "GET", "pkg:*->kind")
|
||||
c.Send("DEL", id)
|
||||
values, err := redis.Values(c.Do(""))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pkgs, err := packages(values[1], false)
|
||||
|
||||
// Move exact match on standard package to the top of the list.
|
||||
for i, pkg := range pkgs {
|
||||
if !isStandardPackage(pkg.Path) {
|
||||
break
|
||||
}
|
||||
if strings.HasSuffix(pkg.Path, q) {
|
||||
pkgs[0], pkgs[i] = pkgs[i], pkgs[0]
|
||||
break
|
||||
}
|
||||
}
|
||||
return pkgs, err
|
||||
}
|
||||
|
||||
type PackageInfo struct {
|
||||
PDoc *doc.Package
|
||||
Pkgs []Package
|
||||
Score float64
|
||||
Kind string
|
||||
}
|
||||
|
||||
// Do executes function f for each document in the database.
|
||||
func (db *Database) Do(f func(*PackageInfo) error) error {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
keys, err := redis.Values(c.Do("KEYS", "pkg:*"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, key := range keys {
|
||||
values, err := redis.Values(c.Do("HMGET", key, "gob", "score", "kind", "path"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var (
|
||||
pi PackageInfo
|
||||
p []byte
|
||||
path string
|
||||
)
|
||||
|
||||
if _, err := redis.Scan(values, &p, &pi.Score, &pi.Kind, &path); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if p == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
p, err = snappy.Decode(nil, p)
|
||||
if err != nil {
|
||||
return fmt.Errorf("snappy decoding %s: %v", path, err)
|
||||
}
|
||||
|
||||
if err := gob.NewDecoder(bytes.NewReader(p)).Decode(&pi.PDoc); err != nil {
|
||||
return fmt.Errorf("gob decoding %s: %v", path, err)
|
||||
}
|
||||
pi.Pkgs, err = db.getSubdirs(c, pi.PDoc.ImportPath, pi.PDoc)
|
||||
if err != nil {
|
||||
return fmt.Errorf("get subdirs %s: %v", path, err)
|
||||
}
|
||||
if err := f(&pi); err != nil {
|
||||
return fmt.Errorf("func %s: %v", path, err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var importGraphScript = redis.NewScript(0, `
|
||||
local path = ARGV[1]
|
||||
|
||||
local id = redis.call('GET', 'id:' .. path)
|
||||
if not id then
|
||||
return false
|
||||
end
|
||||
|
||||
return redis.call('HMGET', 'pkg:' .. id, 'synopsis', 'terms')
|
||||
`)
|
||||
|
||||
func (db *Database) ImportGraph(pdoc *doc.Package, hideStdDeps bool) ([]Package, [][2]int, error) {
|
||||
|
||||
// This breadth-first traversal of the package's dependencies uses the
|
||||
// Redis pipeline as queue. Links to packages with invalid import paths are
|
||||
// only included for the root package.
|
||||
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
if err := importGraphScript.Load(c); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
nodes := []Package{{Path: pdoc.ImportPath, Synopsis: pdoc.Synopsis}}
|
||||
edges := [][2]int{}
|
||||
index := map[string]int{pdoc.ImportPath: 0}
|
||||
|
||||
for _, path := range pdoc.Imports {
|
||||
j := len(nodes)
|
||||
index[path] = j
|
||||
edges = append(edges, [2]int{0, j})
|
||||
nodes = append(nodes, Package{Path: path})
|
||||
importGraphScript.Send(c, path)
|
||||
}
|
||||
|
||||
for i := 1; i < len(nodes); i++ {
|
||||
c.Flush()
|
||||
r, err := redis.Values(c.Receive())
|
||||
if err == redis.ErrNil {
|
||||
continue
|
||||
} else if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
var synopsis, terms string
|
||||
if _, err := redis.Scan(r, &synopsis, &terms); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
nodes[i].Synopsis = synopsis
|
||||
if hideStdDeps && isStandardPackage(nodes[i].Path) {
|
||||
continue
|
||||
}
|
||||
for _, term := range strings.Fields(terms) {
|
||||
if strings.HasPrefix(term, "import:") {
|
||||
path := term[len("import:"):]
|
||||
j, ok := index[path]
|
||||
if !ok {
|
||||
j = len(nodes)
|
||||
index[path] = j
|
||||
nodes = append(nodes, Package{Path: path})
|
||||
importGraphScript.Send(c, path)
|
||||
}
|
||||
edges = append(edges, [2]int{i, j})
|
||||
}
|
||||
}
|
||||
}
|
||||
return nodes, edges, nil
|
||||
}
|
||||
|
||||
func (db *Database) PutGob(key string, value interface{}) error {
|
||||
var buf bytes.Buffer
|
||||
if err := gob.NewEncoder(&buf).Encode(value); err != nil {
|
||||
return err
|
||||
}
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
_, err := c.Do("SET", "gob:"+key, buf.Bytes())
|
||||
return err
|
||||
}
|
||||
|
||||
func (db *Database) GetGob(key string, value interface{}) error {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
p, err := redis.Bytes(c.Do("GET", "gob:"+key))
|
||||
if err == redis.ErrNil {
|
||||
return nil
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
return gob.NewDecoder(bytes.NewReader(p)).Decode(value)
|
||||
}
|
||||
|
||||
var incrementPopularScore = redis.NewScript(0, `
|
||||
local path = ARGV[1]
|
||||
local n = ARGV[2]
|
||||
local t = ARGV[3]
|
||||
|
||||
local id = redis.call('GET', 'id:' .. path)
|
||||
if not id then
|
||||
return
|
||||
end
|
||||
|
||||
local t0 = redis.call('GET', 'popular:0') or '0'
|
||||
local f = math.exp(tonumber(t) - tonumber(t0))
|
||||
redis.call('ZINCRBY', 'popular', tonumber(n) * f, id)
|
||||
if f > 10 then
|
||||
redis.call('SET', 'popular:0', t)
|
||||
redis.call('ZUNIONSTORE', 'popular', 1, 'popular', 'WEIGHTS', 1.0 / f)
|
||||
redis.call('ZREMRANGEBYSCORE', 'popular', '-inf', 0.05)
|
||||
end
|
||||
`)
|
||||
|
||||
const popularHalfLife = time.Hour * 24 * 7
|
||||
|
||||
func scaledTime(t time.Time) float64 {
|
||||
const lambda = math.Ln2 / float64(popularHalfLife)
|
||||
return lambda * float64(t.Sub(time.Unix(1257894000, 0)))
|
||||
}
|
||||
|
||||
func (db *Database) IncrementPopularScore(path string) error {
|
||||
// nt = n0 * math.Exp(-lambda * t)
|
||||
// lambda = math.Ln2 / thalf
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
_, err := incrementPopularScore.Do(c, path, 1, scaledTime(time.Now()))
|
||||
return err
|
||||
}
|
||||
|
||||
var popularScript = redis.NewScript(0, `
|
||||
local stop = ARGV[1]
|
||||
local ids = redis.call('ZREVRANGE', 'popular', '0', stop)
|
||||
local result = {}
|
||||
for i=1,#ids do
|
||||
local values = redis.call('HMGET', 'pkg:' .. ids[i], 'path', 'synopsis', 'kind')
|
||||
result[#result+1] = values[1]
|
||||
result[#result+1] = values[2]
|
||||
result[#result+1] = values[3]
|
||||
end
|
||||
return result
|
||||
`)
|
||||
|
||||
func (db *Database) Popular(count int) ([]Package, error) {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
reply, err := popularScript.Do(c, count-1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pkgs, err := packages(reply, false)
|
||||
return pkgs, err
|
||||
}
|
||||
|
||||
var popularWithScoreScript = redis.NewScript(0, `
|
||||
local ids = redis.call('ZREVRANGE', 'popular', '0', -1, 'WITHSCORES')
|
||||
local result = {}
|
||||
for i=1,#ids,2 do
|
||||
result[#result+1] = redis.call('HGET', 'pkg:' .. ids[i], 'path')
|
||||
result[#result+1] = ids[i+1]
|
||||
result[#result+1] = 'p'
|
||||
end
|
||||
return result
|
||||
`)
|
||||
|
||||
func (db *Database) PopularWithScores() ([]Package, error) {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
reply, err := popularWithScoreScript.Do(c)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pkgs, err := packages(reply, false)
|
||||
return pkgs, err
|
||||
}
|
||||
|
||||
func (db *Database) GetNewCrawl() (string, error) {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
v, err := redis.String(c.Do("SRANDMEMBER", "newCrawl"))
|
||||
if err == redis.ErrNil {
|
||||
err = nil
|
||||
}
|
||||
return v, err
|
||||
}
|
||||
|
||||
var setBadCrawlScript = redis.NewScript(0, `
|
||||
local path = ARGV[1]
|
||||
if redis.call('SREM', 'newCrawl', path) == 1 then
|
||||
redis.call('SADD', 'badCrawl', path)
|
||||
end
|
||||
`)
|
||||
|
||||
func (db *Database) SetBadCrawl(path string) error {
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
_, err := setBadCrawlScript.Do(c, path)
|
||||
return err
|
||||
}
|
|
@ -0,0 +1,243 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package database
|
||||
|
||||
import (
|
||||
"math"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"github.com/garyburd/redigo/redis"
|
||||
)
|
||||
|
||||
func newDB(t *testing.T) *Database {
|
||||
p := redis.NewPool(func() (redis.Conn, error) {
|
||||
c, err := redis.DialTimeout("tcp", ":6379", 0, 1*time.Second, 1*time.Second)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, err = c.Do("SELECT", "9")
|
||||
if err != nil {
|
||||
c.Close()
|
||||
return nil, err
|
||||
}
|
||||
return c, nil
|
||||
}, 1)
|
||||
|
||||
c := p.Get()
|
||||
defer c.Close()
|
||||
n, err := redis.Int(c.Do("DBSIZE"))
|
||||
if n != 0 || err != nil {
|
||||
t.Fatalf("DBSIZE returned %d, %v", n, err)
|
||||
}
|
||||
return &Database{Pool: p}
|
||||
}
|
||||
|
||||
func closeDB(db *Database) {
|
||||
c := db.Pool.Get()
|
||||
c.Do("FLUSHDB")
|
||||
c.Close()
|
||||
}
|
||||
|
||||
func TestPutGet(t *testing.T) {
|
||||
var updated = time.Unix(1221681866, 0).UTC()
|
||||
var nextCrawl = time.Unix(1231681866, 0).UTC()
|
||||
|
||||
db := newDB(t)
|
||||
defer closeDB(db)
|
||||
pdoc := &doc.Package{
|
||||
ImportPath: "github.com/user/repo/foo/bar",
|
||||
Name: "bar",
|
||||
Synopsis: "hello",
|
||||
ProjectRoot: "github.com/user/repo",
|
||||
ProjectName: "foo",
|
||||
Updated: updated,
|
||||
Imports: []string{"C", "errors", "github.com/user/repo/foo/bar"}, // self import for testing convenience.
|
||||
}
|
||||
if err := db.Put(pdoc, nextCrawl); err != nil {
|
||||
t.Errorf("db.Put() returned error %v", err)
|
||||
}
|
||||
if err := db.Put(pdoc, time.Time{}); err != nil {
|
||||
t.Errorf("second db.Put() returned error %v", err)
|
||||
}
|
||||
|
||||
actualPdoc, actualSubdirs, actualCrawl, err := db.Get("github.com/user/repo/foo/bar")
|
||||
if err != nil {
|
||||
t.Fatalf("db.Get(.../foo/bar) returned %v", err)
|
||||
}
|
||||
if len(actualSubdirs) != 0 {
|
||||
t.Errorf("db.Get(.../foo/bar) returned subdirs %v, want none", actualSubdirs)
|
||||
}
|
||||
if !reflect.DeepEqual(actualPdoc, pdoc) {
|
||||
t.Errorf("db.Get(.../foo/bar) returned doc %v, want %v", actualPdoc, pdoc)
|
||||
}
|
||||
if !nextCrawl.Equal(actualCrawl) {
|
||||
t.Errorf("db.Get(.../foo/bar) returned crawl %v, want %v", actualCrawl, updated)
|
||||
}
|
||||
|
||||
// Popular
|
||||
|
||||
if err := db.IncrementPopularScore(pdoc.ImportPath); err != nil {
|
||||
t.Errorf("db.IncrementPopularScore() returned %v", err)
|
||||
}
|
||||
|
||||
// Next crawl
|
||||
|
||||
if err := db.SetNextCrawl(pdoc.ProjectRoot, nextCrawl.Add(time.Hour)); err != nil {
|
||||
t.Errorf("db.SetNextCrawl(...) returned %v", err)
|
||||
}
|
||||
_, _, actualCrawl, err = db.Get("github.com/user/repo/foo/bar")
|
||||
if err != nil {
|
||||
t.Fatalf("db.Get(.../foo/bar) returned %v", err)
|
||||
}
|
||||
if !nextCrawl.Equal(actualCrawl) {
|
||||
t.Errorf("db.Get(.../foo/bar) returned crawl %v, want %v", actualCrawl, updated)
|
||||
}
|
||||
nextCrawl = nextCrawl.Add(-time.Hour)
|
||||
if err := db.SetNextCrawl(pdoc.ProjectRoot, nextCrawl); err != nil {
|
||||
t.Errorf("db.SetNextCrawl(...) returned %v", err)
|
||||
}
|
||||
_, _, actualCrawl, err = db.Get("github.com/user/repo/foo/bar")
|
||||
if err != nil {
|
||||
t.Fatalf("db.Get(.../foo/bar) returned %v", err)
|
||||
}
|
||||
if !nextCrawl.Equal(actualCrawl) {
|
||||
t.Errorf("db.Get(.../foo/bar) returned crawl %v, want %v", actualCrawl, updated)
|
||||
}
|
||||
|
||||
// Get "-"
|
||||
|
||||
actualPdoc, _, _, err = db.Get("-")
|
||||
if err != nil {
|
||||
t.Fatalf("db.Get(-) returned %v", err)
|
||||
}
|
||||
if !reflect.DeepEqual(actualPdoc, pdoc) {
|
||||
t.Errorf("db.Get(-) returned doc %v, want %v", actualPdoc, pdoc)
|
||||
}
|
||||
|
||||
actualPdoc, actualSubdirs, _, err = db.Get("github.com/user/repo/foo")
|
||||
if err != nil {
|
||||
t.Fatalf("db.Get(.../foo) returned %v", err)
|
||||
}
|
||||
if actualPdoc != nil {
|
||||
t.Errorf("db.Get(.../foo) returned doc %v, want %v", actualPdoc, nil)
|
||||
}
|
||||
expectedSubdirs := []Package{{Path: "github.com/user/repo/foo/bar", Synopsis: "hello"}}
|
||||
if !reflect.DeepEqual(actualSubdirs, expectedSubdirs) {
|
||||
t.Errorf("db.Get(.../foo) returned subdirs %v, want %v", actualSubdirs, expectedSubdirs)
|
||||
}
|
||||
actualImporters, err := db.Importers("github.com/user/repo/foo/bar")
|
||||
if err != nil {
|
||||
t.Fatalf("db.Importers() retunred error %v", err)
|
||||
}
|
||||
expectedImporters := []Package{{"github.com/user/repo/foo/bar", "hello"}}
|
||||
if !reflect.DeepEqual(actualImporters, expectedImporters) {
|
||||
t.Errorf("db.Importers() = %v, want %v", actualImporters, expectedImporters)
|
||||
}
|
||||
actualImports, err := db.Packages(pdoc.Imports)
|
||||
if err != nil {
|
||||
t.Fatalf("db.Imports() retunred error %v", err)
|
||||
}
|
||||
for i := range actualImports {
|
||||
if actualImports[i].Path == "C" {
|
||||
actualImports[i].Synopsis = ""
|
||||
}
|
||||
}
|
||||
expectedImports := []Package{{"C", ""}, {"errors", ""}, {"github.com/user/repo/foo/bar", "hello"}}
|
||||
if !reflect.DeepEqual(actualImports, expectedImports) {
|
||||
t.Errorf("db.Imports() = %v, want %v", actualImports, expectedImports)
|
||||
}
|
||||
importerCount, _ := db.ImporterCount("github.com/user/repo/foo/bar")
|
||||
if importerCount != 1 {
|
||||
t.Errorf("db.ImporterCount() = %d, want %d", importerCount, 1)
|
||||
}
|
||||
if err := db.Delete("github.com/user/repo/foo/bar"); err != nil {
|
||||
t.Errorf("db.Delete() returned error %v", err)
|
||||
}
|
||||
|
||||
db.Query("bar")
|
||||
|
||||
if err := db.Put(pdoc, time.Time{}); err != nil {
|
||||
t.Errorf("db.Put() returned error %v", err)
|
||||
}
|
||||
|
||||
if err := db.Block("github.com/user/repo"); err != nil {
|
||||
t.Errorf("db.Block() returned error %v", err)
|
||||
}
|
||||
|
||||
blocked, err := db.IsBlocked("github.com/user/repo/foo/bar")
|
||||
if !blocked || err != nil {
|
||||
t.Errorf("db.IsBlocked(github.com/user/repo/foo/bar) returned %v, %v, want true, nil", blocked, err)
|
||||
}
|
||||
|
||||
blocked, err = db.IsBlocked("github.com/foo/bar")
|
||||
if blocked || err != nil {
|
||||
t.Errorf("db.IsBlocked(github.com/foo/bar) returned %v, %v, want false, nil", blocked, err)
|
||||
}
|
||||
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
c.Send("DEL", "maxQueryId")
|
||||
c.Send("DEL", "maxPackageId")
|
||||
c.Send("DEL", "block")
|
||||
c.Send("DEL", "popular:0")
|
||||
c.Send("DEL", "newCrawl")
|
||||
if n, err := c.Do("DBSIZE"); n != int64(0) || err != nil {
|
||||
t.Errorf("c.Do(DBSIZE) = %d, %v, want 0, nil", n, err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPopular(t *testing.T) {
|
||||
db := newDB(t)
|
||||
defer closeDB(db)
|
||||
c := db.Pool.Get()
|
||||
defer c.Close()
|
||||
|
||||
// Add scores for packages. On each iteration, add half-life to time and
|
||||
// divide the score by two. All packages should have the same score.
|
||||
|
||||
now := time.Now()
|
||||
score := float64(4048)
|
||||
for id := 12; id >= 0; id-- {
|
||||
path := "github.com/user/repo/p" + strconv.Itoa(id)
|
||||
c.Do("SET", "id:"+path, id)
|
||||
_, err := incrementPopularScore.Do(c, path, score, scaledTime(now))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
now = now.Add(popularHalfLife)
|
||||
score /= 2
|
||||
}
|
||||
|
||||
values, _ := redis.Values(c.Do("ZRANGE", "popular", "0", "100000", "WITHSCORES"))
|
||||
if len(values) != 26 {
|
||||
t.Errorf("Expected 26 values, got %d", len(values))
|
||||
}
|
||||
|
||||
// Check for equal scores.
|
||||
score, err := redis.Float64(values[1], nil)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
for i := 3; i < len(values); i += 2 {
|
||||
s, _ := redis.Float64(values[i], nil)
|
||||
if math.Abs(score-s)/score > 0.0001 {
|
||||
t.Errorf("Bad score, score[1]=%g, score[%d]=%g", score, i, s)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,152 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package database
|
||||
|
||||
import (
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/garyburd/gddo/doc"
|
||||
)
|
||||
|
||||
func isStandardPackage(path string) bool {
|
||||
return strings.Index(path, ".") < 0
|
||||
}
|
||||
|
||||
func isTermSep(r rune) bool {
|
||||
return unicode.IsSpace(r) || unicode.IsPunct(r) || unicode.IsSymbol(r)
|
||||
}
|
||||
|
||||
func normalizeProjectRoot(projectRoot string) string {
|
||||
if projectRoot == "" {
|
||||
return "go"
|
||||
}
|
||||
return projectRoot
|
||||
}
|
||||
|
||||
var httpPat = regexp.MustCompile(`https?://\S+`)
|
||||
|
||||
func documentTerms(pdoc *doc.Package, score float64) []string {
|
||||
|
||||
terms := make(map[string]bool)
|
||||
|
||||
// Project root
|
||||
|
||||
projectRoot := normalizeProjectRoot(pdoc.ProjectRoot)
|
||||
terms["project:"+projectRoot] = true
|
||||
|
||||
// Imports
|
||||
|
||||
for _, path := range pdoc.Imports {
|
||||
if doc.IsValidPath(path) {
|
||||
terms["import:"+path] = true
|
||||
}
|
||||
}
|
||||
|
||||
if score > 0 {
|
||||
|
||||
if isStandardPackage(pdoc.ImportPath) {
|
||||
for _, term := range parseQuery(pdoc.ImportPath) {
|
||||
terms[term] = true
|
||||
}
|
||||
} else {
|
||||
terms["all:"] = true
|
||||
for _, term := range parseQuery(pdoc.ProjectName) {
|
||||
terms[term] = true
|
||||
}
|
||||
for _, term := range parseQuery(pdoc.Name) {
|
||||
terms[term] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Synopsis
|
||||
|
||||
synopsis := httpPat.ReplaceAllLiteralString(pdoc.Synopsis, "")
|
||||
for i, s := range strings.FieldsFunc(synopsis, isTermSep) {
|
||||
s = strings.ToLower(s)
|
||||
if !stopWord[s] && (i > 3 || s != "package") {
|
||||
terms[stem(s)] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result := make([]string, 0, len(terms))
|
||||
for term := range terms {
|
||||
result = append(result, term)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func documentScore(pdoc *doc.Package) float64 {
|
||||
if pdoc.Name == "" || pdoc.IsCmd || len(pdoc.Errors) > 0 || strings.HasSuffix(pdoc.ImportPath, ".go") {
|
||||
return 0
|
||||
}
|
||||
|
||||
for _, p := range pdoc.Imports {
|
||||
if strings.HasSuffix(p, ".go") {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
if !pdoc.Truncated &&
|
||||
len(pdoc.Consts) == 0 &&
|
||||
len(pdoc.Vars) == 0 &&
|
||||
len(pdoc.Funcs) == 0 &&
|
||||
len(pdoc.Types) == 0 &&
|
||||
len(pdoc.Examples) == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
var r float64
|
||||
switch {
|
||||
case isStandardPackage(pdoc.ImportPath):
|
||||
r = 1000
|
||||
case strings.HasPrefix(pdoc.ImportPath, "code.google.com/p/go."):
|
||||
r = 500
|
||||
case strings.HasPrefix(pdoc.Synopsis, "Package "+pdoc.Name+" "):
|
||||
r = 100
|
||||
case len(pdoc.Synopsis) > 0:
|
||||
r = 10
|
||||
default:
|
||||
r = 1
|
||||
}
|
||||
|
||||
for i := 0; i < strings.Count(pdoc.ImportPath[len(pdoc.ProjectRoot):], "/"); i++ {
|
||||
r *= 0.99
|
||||
}
|
||||
|
||||
if strings.Index(pdoc.ImportPath[len(pdoc.ProjectRoot):], "/src/") > 0 {
|
||||
r *= 0.85
|
||||
}
|
||||
|
||||
if path.Base(pdoc.ImportPath) != pdoc.Name {
|
||||
r *= 0.9
|
||||
}
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func parseQuery(q string) []string {
|
||||
var terms []string
|
||||
q = strings.ToLower(q)
|
||||
for _, s := range strings.FieldsFunc(q, isTermSep) {
|
||||
if !stopWord[s] {
|
||||
terms = append(terms, stem(s))
|
||||
}
|
||||
}
|
||||
return terms
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package database
|
||||
|
||||
import (
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"reflect"
|
||||
"sort"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var indexTests = []struct {
|
||||
pdoc *doc.Package
|
||||
terms []string
|
||||
}{
|
||||
{&doc.Package{
|
||||
ImportPath: "strconv",
|
||||
ProjectRoot: "",
|
||||
ProjectName: "Go",
|
||||
Name: "strconv",
|
||||
Synopsis: "Package strconv implements conversions to and from string representations of basic data types.",
|
||||
Doc: "Package strconv implements conversions to and from string representations\nof basic data types.",
|
||||
Imports: []string{"errors", "math", "unicode/utf8"},
|
||||
Funcs: []*doc.Func{{}},
|
||||
},
|
||||
[]string{
|
||||
"bas",
|
||||
"convert",
|
||||
"dat",
|
||||
"import:errors",
|
||||
"import:math",
|
||||
"import:unicode/utf8",
|
||||
"project:go",
|
||||
"repres",
|
||||
"strconv",
|
||||
"string",
|
||||
"typ"},
|
||||
},
|
||||
{&doc.Package{
|
||||
ImportPath: "github.com/user/repo/dir",
|
||||
ProjectRoot: "github.com/user/repo",
|
||||
ProjectName: "go-oauth",
|
||||
ProjectURL: "https://github.com/user/repo/",
|
||||
Name: "dir",
|
||||
Synopsis: "Package dir implements a subset of the OAuth client interface as defined in RFC 5849.",
|
||||
Doc: "Package oauth implements a subset of the OAuth client interface as defined in RFC 5849.\n\n" +
|
||||
"This package assumes that the application writes request URL paths to the\nnetwork using " +
|
||||
"the encoding implemented by the net/url URL RequestURI method.\n" +
|
||||
"The HTTP client in the standard net/http package uses this encoding.",
|
||||
IsCmd: false,
|
||||
Imports: []string{
|
||||
"bytes",
|
||||
"crypto/hmac",
|
||||
"crypto/sha1",
|
||||
"encoding/base64",
|
||||
"encoding/binary",
|
||||
"errors",
|
||||
"fmt",
|
||||
"io",
|
||||
"io/ioutil",
|
||||
"net/http",
|
||||
"net/url",
|
||||
"regexp",
|
||||
"sort",
|
||||
"strconv",
|
||||
"strings",
|
||||
"sync",
|
||||
"time",
|
||||
},
|
||||
TestImports: []string{"bytes", "net/url", "testing"},
|
||||
Funcs: []*doc.Func{{}},
|
||||
},
|
||||
[]string{
|
||||
"all:",
|
||||
"5849", "cly", "defin", "dir", "go",
|
||||
"import:bytes", "import:crypto/hmac", "import:crypto/sha1",
|
||||
"import:encoding/base64", "import:encoding/binary", "import:errors",
|
||||
"import:fmt", "import:io", "import:io/ioutil", "import:net/http",
|
||||
"import:net/url", "import:regexp", "import:sort", "import:strconv",
|
||||
"import:strings", "import:sync", "import:time", "interfac",
|
||||
"oau", "project:github.com/user/repo", "rfc", "subset",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestDocTerms(t *testing.T) {
|
||||
for _, tt := range indexTests {
|
||||
score := documentScore(tt.pdoc)
|
||||
terms := documentTerms(tt.pdoc, score)
|
||||
sort.Strings(terms)
|
||||
sort.Strings(tt.terms)
|
||||
if !reflect.DeepEqual(terms, tt.terms) {
|
||||
t.Errorf("documentTerms(%s)=%#v, want %#v", tt.pdoc.ImportPath, terms, tt.terms)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,131 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
// This file implements the Paice/Husk stemming algorithm.
|
||||
// http://www.comp.lancs.ac.uk/computing/research/stemming/Links/paice.htm
|
||||
|
||||
package database
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"regexp"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
const stemRuleText = `
|
||||
ai*2. a*1.
|
||||
bb1.
|
||||
city3s. ci2> cn1t>
|
||||
dd1. dei3y> deec2ss. dee1. de2> dooh4>
|
||||
e1>
|
||||
feil1v. fi2>
|
||||
gni3> gai3y. ga2> gg1.
|
||||
ht*2. hsiug5ct. hsi3>
|
||||
i*1. i1y>
|
||||
ji1d. juf1s. ju1d. jo1d. jeh1r. jrev1t. jsim2t. jn1d. j1s.
|
||||
lbaifi6. lbai4y. lba3> lbi3. lib2l> lc1. lufi4y. luf3> lu2. lai3> lau3> la2> ll1.
|
||||
mui3. mu*2. msi3> mm1.
|
||||
nois4j> noix4ct. noi3> nai3> na2> nee0. ne2> nn1.
|
||||
pihs4> pp1.
|
||||
re2> rae0. ra2. ro2> ru2> rr1. rt1> rei3y>
|
||||
sei3y> sis2. si2> ssen4> ss0. suo3> su*2. s*1> s0.
|
||||
tacilp4y. ta2> tnem4> tne3> tna3> tpir2b. tpro2b. tcud1. tpmus2. tpec2iv. tulo2v. tsis0. tsi3> tt1.
|
||||
uqi3. ugo1.
|
||||
vis3j> vie0. vi2>
|
||||
ylb1> yli3y> ylp0. yl2> ygo1. yhp1. ymo1. ypo1. yti3> yte3> ytl2. yrtsi5. yra3> yro3> yfi3. ycn2t> yca3>
|
||||
zi2> zy1s.
|
||||
`
|
||||
|
||||
type stemRule struct {
|
||||
text string
|
||||
suffix []byte
|
||||
intact bool
|
||||
remove int
|
||||
append []byte
|
||||
more bool
|
||||
}
|
||||
|
||||
func parseStemRules() map[byte][]*stemRule {
|
||||
|
||||
rules := make(map[byte][]*stemRule)
|
||||
for _, m := range regexp.MustCompile(`(?m)(?:^| )([a-zA-Z]*)(\*?)([0-9])([a-zA-z]*)([.>])`).FindAllStringSubmatch(stemRuleText, -1) {
|
||||
|
||||
suffix := []byte(m[1])
|
||||
for i := 0; i < len(suffix)/2; i++ {
|
||||
j := len(suffix) - 1 - i
|
||||
suffix[i], suffix[j] = suffix[j], suffix[i]
|
||||
}
|
||||
|
||||
remove, _ := strconv.Atoi(m[3])
|
||||
r := &stemRule{
|
||||
text: m[0],
|
||||
suffix: suffix,
|
||||
intact: m[2] == "*",
|
||||
remove: remove,
|
||||
append: []byte(m[4]),
|
||||
more: m[5] == ">",
|
||||
}
|
||||
c := suffix[len(suffix)-1]
|
||||
rules[c] = append(rules[c], r)
|
||||
}
|
||||
return rules
|
||||
}
|
||||
|
||||
var stemRules = parseStemRules()
|
||||
|
||||
func firstVowel(offset int, p []byte) int {
|
||||
for i, b := range p {
|
||||
switch b {
|
||||
case 'a', 'e', 'i', 'o', 'u':
|
||||
return offset + i
|
||||
case 'y':
|
||||
if offset+i > 0 {
|
||||
return offset + i
|
||||
}
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func acceptableStem(a, b []byte) bool {
|
||||
i := firstVowel(0, a)
|
||||
if i < 0 {
|
||||
i = firstVowel(len(a), b)
|
||||
}
|
||||
l := len(a) + len(b)
|
||||
if i == 0 {
|
||||
return l > 1
|
||||
}
|
||||
return i >= 0 && l > 2
|
||||
}
|
||||
|
||||
func stem(s string) string {
|
||||
stem := bytes.ToLower([]byte(s))
|
||||
intact := true
|
||||
run := acceptableStem(stem, []byte{})
|
||||
for run {
|
||||
run = false
|
||||
for _, rule := range stemRules[stem[len(stem)-1]] {
|
||||
if bytes.HasSuffix(stem, rule.suffix) &&
|
||||
(intact || !rule.intact) &&
|
||||
acceptableStem(stem[:len(stem)-rule.remove], rule.append) {
|
||||
stem = append(stem[:len(stem)-rule.remove], rule.append...)
|
||||
intact = false
|
||||
run = rule.more
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return string(stem)
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package database
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var stemTests = []struct {
|
||||
s, expected string
|
||||
}{
|
||||
{"html", "html"},
|
||||
{"strings", "string"},
|
||||
{"ballroom", "ballroom"},
|
||||
{"mechanicalization", "mech"},
|
||||
{"pragmaticality", "pragm"},
|
||||
{"rationalistically", "rat"},
|
||||
}
|
||||
|
||||
func TestStem(t *testing.T) {
|
||||
for _, tt := range stemTests {
|
||||
actual := stem(tt.s)
|
||||
if actual != tt.expected {
|
||||
t.Errorf("stem(%q) = %q, want %q", tt.s, actual, tt.expected)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,151 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package database
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
var stopWord = createStopWordMap()
|
||||
|
||||
func createStopWordMap() map[string]bool {
|
||||
m := make(map[string]bool)
|
||||
for _, s := range strings.Fields(stopText) {
|
||||
m[s] = true
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
const stopText = `
|
||||
a
|
||||
about
|
||||
after
|
||||
all
|
||||
also
|
||||
am
|
||||
an
|
||||
and
|
||||
another
|
||||
any
|
||||
are
|
||||
as
|
||||
at
|
||||
b
|
||||
be
|
||||
because
|
||||
been
|
||||
before
|
||||
being
|
||||
between
|
||||
both
|
||||
but
|
||||
by
|
||||
c
|
||||
came
|
||||
can
|
||||
come
|
||||
could
|
||||
d
|
||||
did
|
||||
do
|
||||
e
|
||||
each
|
||||
f
|
||||
for
|
||||
from
|
||||
g
|
||||
get
|
||||
got
|
||||
h
|
||||
had
|
||||
has
|
||||
have
|
||||
he
|
||||
her
|
||||
here
|
||||
him
|
||||
himself
|
||||
his
|
||||
how
|
||||
i
|
||||
if
|
||||
implement
|
||||
implements
|
||||
in
|
||||
into
|
||||
is
|
||||
it
|
||||
j
|
||||
k
|
||||
l
|
||||
like
|
||||
m
|
||||
make
|
||||
many
|
||||
me
|
||||
might
|
||||
more
|
||||
most
|
||||
much
|
||||
must
|
||||
my
|
||||
n
|
||||
never
|
||||
now
|
||||
o
|
||||
of
|
||||
on
|
||||
only
|
||||
or
|
||||
other
|
||||
our
|
||||
out
|
||||
over
|
||||
p
|
||||
q
|
||||
r
|
||||
s
|
||||
said
|
||||
same
|
||||
see
|
||||
should
|
||||
since
|
||||
some
|
||||
still
|
||||
such
|
||||
t
|
||||
take
|
||||
than
|
||||
that
|
||||
the
|
||||
their
|
||||
them
|
||||
then
|
||||
there
|
||||
these
|
||||
they
|
||||
this
|
||||
those
|
||||
through
|
||||
to
|
||||
too
|
||||
u
|
||||
under
|
||||
v
|
||||
w
|
||||
x
|
||||
y
|
||||
z
|
||||
`
|
|
@ -0,0 +1,104 @@
|
|||
// Copyright 2011 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"path"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
var bitbucketPattern = regexp.MustCompile(`^bitbucket\.org/(?P<owner>[a-z0-9A-Z_.\-]+)/(?P<repo>[a-z0-9A-Z_.\-]+)(?P<dir>/[a-z0-9A-Z_.\-/]*)?$`)
|
||||
var bitbucketEtagRe = regexp.MustCompile(`^(hg|git)-`)
|
||||
|
||||
func getBitbucketDoc(client *http.Client, match map[string]string, savedEtag string) (*Package, error) {
|
||||
|
||||
if m := bitbucketEtagRe.FindStringSubmatch(savedEtag); m != nil {
|
||||
match["vcs"] = m[1]
|
||||
} else {
|
||||
var repo struct {
|
||||
Scm string
|
||||
}
|
||||
if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
match["vcs"] = repo.Scm
|
||||
}
|
||||
|
||||
tags := make(map[string]string)
|
||||
for _, nodeType := range []string{"branches", "tags"} {
|
||||
var nodes map[string]struct {
|
||||
Node string
|
||||
}
|
||||
if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/{0}", match, nodeType), &nodes); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for t, n := range nodes {
|
||||
tags[t] = n.Node
|
||||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
match["tag"], match["commit"], err = bestTag(tags, defaultTags[match["vcs"]])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
etag := expand("{vcs}-{commit}", match)
|
||||
if etag == savedEtag {
|
||||
return nil, ErrNotModified
|
||||
}
|
||||
|
||||
var directory struct {
|
||||
Files []struct {
|
||||
Path string
|
||||
}
|
||||
}
|
||||
|
||||
if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/src/{tag}{dir}/", match), &directory); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var files []*source
|
||||
for _, f := range directory.Files {
|
||||
_, name := path.Split(f.Path)
|
||||
if isDocFile(name) {
|
||||
files = append(files, &source{
|
||||
name: name,
|
||||
browseURL: expand("https://bitbucket.org/{owner}/{repo}/src/{tag}/{0}", match, f.Path),
|
||||
rawURL: expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/raw/{tag}/{0}", match, f.Path),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if err := fetchFiles(client, files, nil); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b := builder{
|
||||
pdoc: &Package{
|
||||
LineFmt: "%s#cl-%d",
|
||||
ImportPath: match["originalImportPath"],
|
||||
ProjectRoot: expand("bitbucket.org/{owner}/{repo}", match),
|
||||
ProjectName: match["repo"],
|
||||
ProjectURL: expand("https://bitbucket.org/{owner}/{repo}/", match),
|
||||
BrowseURL: expand("https://bitbucket.org/{owner}/{repo}/src/{tag}{dir}", match),
|
||||
Etag: etag,
|
||||
VCS: match["vcs"],
|
||||
},
|
||||
}
|
||||
|
||||
return b.build(files)
|
||||
}
|
|
@ -0,0 +1,591 @@
|
|||
// Copyright 2011 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/doc"
|
||||
"go/format"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
func startsWithUppercase(s string) bool {
|
||||
r, _ := utf8.DecodeRuneInString(s)
|
||||
return unicode.IsUpper(r)
|
||||
}
|
||||
|
||||
var badSynopsisPrefixes = []string{
|
||||
"Autogenerated by Thrift Compiler",
|
||||
"Automatically generated ",
|
||||
"Auto-generated by ",
|
||||
"Copyright ",
|
||||
"COPYRIGHT ",
|
||||
`THE SOFTWARE IS PROVIDED "AS IS"`,
|
||||
"TODO: ",
|
||||
"vim:",
|
||||
}
|
||||
|
||||
// synopsis extracts the first sentence from s. All runs of whitespace are
|
||||
// replaced by a single space.
|
||||
func synopsis(s string) string {
|
||||
|
||||
parts := strings.SplitN(s, "\n\n", 2)
|
||||
s = parts[0]
|
||||
|
||||
var buf []byte
|
||||
const (
|
||||
other = iota
|
||||
period
|
||||
space
|
||||
)
|
||||
last := space
|
||||
Loop:
|
||||
for i := 0; i < len(s); i++ {
|
||||
b := s[i]
|
||||
switch b {
|
||||
case ' ', '\t', '\r', '\n':
|
||||
switch last {
|
||||
case period:
|
||||
break Loop
|
||||
case other:
|
||||
buf = append(buf, ' ')
|
||||
last = space
|
||||
}
|
||||
case '.':
|
||||
last = period
|
||||
buf = append(buf, b)
|
||||
default:
|
||||
last = other
|
||||
buf = append(buf, b)
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure that synopsis fits an App Engine datastore text property.
|
||||
const m = 400
|
||||
if len(buf) > m {
|
||||
buf = buf[:m]
|
||||
if i := bytes.LastIndex(buf, []byte{' '}); i >= 0 {
|
||||
buf = buf[:i]
|
||||
}
|
||||
buf = append(buf, " ..."...)
|
||||
}
|
||||
|
||||
s = string(buf)
|
||||
|
||||
r, n := utf8.DecodeRuneInString(s)
|
||||
if n < 0 || unicode.IsPunct(r) || unicode.IsSymbol(r) {
|
||||
// ignore Markdown headings, editor settings, Go build constraints, and * in poorly formatted block comments.
|
||||
s = ""
|
||||
} else {
|
||||
for _, prefix := range badSynopsisPrefixes {
|
||||
if strings.HasPrefix(s, prefix) {
|
||||
s = ""
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
var referencePat = regexp.MustCompile(`\b(?:go\s+get\s+|goinstall\s+|http://gopkgdoc\.appspot\.com/pkg/|http://go\.pkgdoc\.org/|http://godoc\.org/)([-a-zA-Z0-9~+_./]+)`)
|
||||
var quotedReferencePat = regexp.MustCompile(`"([-a-zA-Z0-9~+_./]+)"`)
|
||||
|
||||
// addReferences adds packages referenced in plain text s.
|
||||
func addReferences(references map[string]bool, s []byte) {
|
||||
for _, m := range referencePat.FindAllSubmatch(s, -1) {
|
||||
p := string(m[1])
|
||||
if IsValidRemotePath(p) {
|
||||
references[p] = true
|
||||
}
|
||||
}
|
||||
for _, m := range quotedReferencePat.FindAllSubmatch(s, -1) {
|
||||
p := string(m[1])
|
||||
if IsValidRemotePath(p) {
|
||||
references[p] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// builder holds the state used when building the documentation.
|
||||
type builder struct {
|
||||
pdoc *Package
|
||||
|
||||
srcs map[string]*source
|
||||
fset *token.FileSet
|
||||
examples []*doc.Example
|
||||
buf []byte // scratch space for printNode method.
|
||||
}
|
||||
|
||||
type Value struct {
|
||||
Decl Code
|
||||
Pos Pos
|
||||
Doc string
|
||||
}
|
||||
|
||||
func (b *builder) values(vdocs []*doc.Value) []*Value {
|
||||
var result []*Value
|
||||
for _, d := range vdocs {
|
||||
result = append(result, &Value{
|
||||
Decl: b.printDecl(d.Decl),
|
||||
Pos: b.position(d.Decl),
|
||||
Doc: d.Doc,
|
||||
})
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
type Note struct {
|
||||
Pos Pos
|
||||
UID string
|
||||
Body string
|
||||
}
|
||||
|
||||
type posNode token.Pos
|
||||
|
||||
func (p posNode) Pos() token.Pos { return token.Pos(p) }
|
||||
func (p posNode) End() token.Pos { return token.Pos(p) }
|
||||
|
||||
func (b *builder) notes(gnotes map[string][]*doc.Note) map[string][]*Note {
|
||||
if len(gnotes) == 0 {
|
||||
return nil
|
||||
}
|
||||
notes := make(map[string][]*Note)
|
||||
for tag, gvalues := range gnotes {
|
||||
values := make([]*Note, len(gvalues))
|
||||
for i := range gvalues {
|
||||
values[i] = &Note{
|
||||
Pos: b.position(posNode(gvalues[i].Pos)),
|
||||
UID: gvalues[i].UID,
|
||||
Body: strings.TrimSpace(gvalues[i].Body),
|
||||
}
|
||||
}
|
||||
notes[tag] = values
|
||||
}
|
||||
return notes
|
||||
}
|
||||
|
||||
type Example struct {
|
||||
Name string
|
||||
Doc string
|
||||
Code Code
|
||||
Play string
|
||||
Output string
|
||||
}
|
||||
|
||||
var exampleOutputRx = regexp.MustCompile(`(?i)//[[:space:]]*output:`)
|
||||
|
||||
func (b *builder) getExamples(name string) []*Example {
|
||||
var docs []*Example
|
||||
for _, e := range b.examples {
|
||||
if !strings.HasPrefix(e.Name, name) {
|
||||
continue
|
||||
}
|
||||
n := e.Name[len(name):]
|
||||
if n != "" {
|
||||
if i := strings.LastIndex(n, "_"); i != 0 {
|
||||
continue
|
||||
}
|
||||
n = n[1:]
|
||||
if startsWithUppercase(n) {
|
||||
continue
|
||||
}
|
||||
n = strings.Title(n)
|
||||
}
|
||||
|
||||
code, output := b.printExample(e)
|
||||
|
||||
play := ""
|
||||
if e.Play != nil {
|
||||
b.buf = b.buf[:0]
|
||||
if err := format.Node(sliceWriter{&b.buf}, b.fset, e.Play); err != nil {
|
||||
play = err.Error()
|
||||
} else {
|
||||
play = string(b.buf)
|
||||
}
|
||||
}
|
||||
|
||||
docs = append(docs, &Example{
|
||||
Name: n,
|
||||
Doc: e.Doc,
|
||||
Code: code,
|
||||
Output: output,
|
||||
Play: play})
|
||||
}
|
||||
return docs
|
||||
}
|
||||
|
||||
type Func struct {
|
||||
Decl Code
|
||||
Pos Pos
|
||||
Doc string
|
||||
Name string
|
||||
Recv string
|
||||
Examples []*Example
|
||||
}
|
||||
|
||||
func (b *builder) funcs(fdocs []*doc.Func) []*Func {
|
||||
var result []*Func
|
||||
for _, d := range fdocs {
|
||||
var exampleName string
|
||||
switch {
|
||||
case d.Recv == "":
|
||||
exampleName = d.Name
|
||||
case d.Recv[0] == '*':
|
||||
exampleName = d.Recv[1:] + "_" + d.Name
|
||||
default:
|
||||
exampleName = d.Recv + "_" + d.Name
|
||||
}
|
||||
result = append(result, &Func{
|
||||
Decl: b.printDecl(d.Decl),
|
||||
Pos: b.position(d.Decl),
|
||||
Doc: d.Doc,
|
||||
Name: d.Name,
|
||||
Recv: d.Recv,
|
||||
Examples: b.getExamples(exampleName),
|
||||
})
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
type Type struct {
|
||||
Doc string
|
||||
Name string
|
||||
Decl Code
|
||||
Pos Pos
|
||||
Consts []*Value
|
||||
Vars []*Value
|
||||
Funcs []*Func
|
||||
Methods []*Func
|
||||
Examples []*Example
|
||||
}
|
||||
|
||||
func (b *builder) types(tdocs []*doc.Type) []*Type {
|
||||
var result []*Type
|
||||
for _, d := range tdocs {
|
||||
result = append(result, &Type{
|
||||
Doc: d.Doc,
|
||||
Name: d.Name,
|
||||
Decl: b.printDecl(d.Decl),
|
||||
Pos: b.position(d.Decl),
|
||||
Consts: b.values(d.Consts),
|
||||
Vars: b.values(d.Vars),
|
||||
Funcs: b.funcs(d.Funcs),
|
||||
Methods: b.funcs(d.Methods),
|
||||
Examples: b.getExamples(d.Name),
|
||||
})
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
var packageNamePats = []*regexp.Regexp{
|
||||
// Strip suffix and prefix separated by illegal id runes "." and "-".
|
||||
regexp.MustCompile(`/([^-./]+)[-.](?:go|git)$`),
|
||||
regexp.MustCompile(`/(?:go)[-.]([^-./]+)$`),
|
||||
|
||||
regexp.MustCompile(`^code\.google\.com/p/google-api-go-client/([^/]+)/v[^/]+$`),
|
||||
regexp.MustCompile(`^code\.google\.com/p/biogo\.([^/]+)$`),
|
||||
|
||||
// It's also common for the last element of the path to contain an
|
||||
// extra "go" prefix, but not always. TODO: examine unresolved ids to
|
||||
// detect when trimming the "go" prefix is appropriate.
|
||||
|
||||
// Last component of path.
|
||||
regexp.MustCompile(`([^/]+)$`),
|
||||
}
|
||||
|
||||
func simpleImporter(imports map[string]*ast.Object, path string) (*ast.Object, error) {
|
||||
pkg := imports[path]
|
||||
if pkg == nil {
|
||||
// Guess the package name without importing it.
|
||||
for _, pat := range packageNamePats {
|
||||
m := pat.FindStringSubmatch(path)
|
||||
if m != nil {
|
||||
pkg = ast.NewObj(ast.Pkg, m[1])
|
||||
pkg.Data = ast.NewScope(nil)
|
||||
imports[path] = pkg
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return pkg, nil
|
||||
}
|
||||
|
||||
type File struct {
|
||||
Name string
|
||||
URL string
|
||||
}
|
||||
|
||||
type Pos struct {
|
||||
Line int32 // 0 if not valid.
|
||||
N uint16 // number of lines - 1
|
||||
File int16 // index in Package.Files
|
||||
}
|
||||
|
||||
type source struct {
|
||||
name string
|
||||
browseURL string
|
||||
rawURL string
|
||||
data []byte
|
||||
index int
|
||||
}
|
||||
|
||||
func (s *source) Name() string { return s.name }
|
||||
func (s *source) Size() int64 { return int64(len(s.data)) }
|
||||
func (s *source) Mode() os.FileMode { return 0 }
|
||||
func (s *source) ModTime() time.Time { return time.Time{} }
|
||||
func (s *source) IsDir() bool { return false }
|
||||
func (s *source) Sys() interface{} { return nil }
|
||||
|
||||
func (b *builder) readDir(dir string) ([]os.FileInfo, error) {
|
||||
if dir != "/" {
|
||||
panic("unexpected")
|
||||
}
|
||||
fis := make([]os.FileInfo, 0, len(b.srcs))
|
||||
for _, src := range b.srcs {
|
||||
fis = append(fis, src)
|
||||
}
|
||||
return fis, nil
|
||||
}
|
||||
|
||||
func (b *builder) openFile(path string) (io.ReadCloser, error) {
|
||||
if src, ok := b.srcs[path[1:]]; ok {
|
||||
return ioutil.NopCloser(bytes.NewReader(src.data)), nil
|
||||
}
|
||||
panic("unexpected")
|
||||
}
|
||||
|
||||
// PackageVersion is modified when previously stored packages are invalid.
|
||||
const PackageVersion = "6"
|
||||
|
||||
type Package struct {
|
||||
// The import path for this package.
|
||||
ImportPath string
|
||||
|
||||
// Import path prefix for all packages in the project.
|
||||
ProjectRoot string
|
||||
|
||||
// Name of the project.
|
||||
ProjectName string
|
||||
|
||||
// Project home page.
|
||||
ProjectURL string
|
||||
|
||||
// Errors found when fetching or parsing this package.
|
||||
Errors []string
|
||||
|
||||
// Packages referenced in README files.
|
||||
References []string
|
||||
|
||||
// Version control system: git, hg, bzr, ...
|
||||
VCS string
|
||||
|
||||
// The time this object was created.
|
||||
Updated time.Time
|
||||
|
||||
// Cache validation tag. This tag is not necessarily an HTTP entity tag.
|
||||
// The tag is "" if there is no meaningful cache validation for the VCS.
|
||||
Etag string
|
||||
|
||||
// Package name or "" if no package for this import path. The proceeding
|
||||
// fields are set even if a package is not found for the import path.
|
||||
Name string
|
||||
|
||||
// Synopsis and full documentation for package.
|
||||
Synopsis string
|
||||
Doc string
|
||||
|
||||
// Format this package as a command.
|
||||
IsCmd bool
|
||||
|
||||
// True if package documentation is incomplete.
|
||||
Truncated bool
|
||||
|
||||
// Environment
|
||||
GOOS, GOARCH string
|
||||
|
||||
// Top-level declarations.
|
||||
Consts []*Value
|
||||
Funcs []*Func
|
||||
Types []*Type
|
||||
Vars []*Value
|
||||
|
||||
// Package examples
|
||||
Examples []*Example
|
||||
|
||||
Notes map[string][]*Note
|
||||
Bugs []string
|
||||
|
||||
// Source.
|
||||
LineFmt string
|
||||
BrowseURL string
|
||||
Files []*File
|
||||
TestFiles []*File
|
||||
|
||||
// Source size in bytes.
|
||||
SourceSize int
|
||||
TestSourceSize int
|
||||
|
||||
// Imports
|
||||
Imports []string
|
||||
TestImports []string
|
||||
XTestImports []string
|
||||
}
|
||||
|
||||
var goEnvs = []struct{ GOOS, GOARCH string }{
|
||||
{"linux", "amd64"},
|
||||
{"darwin", "amd64"},
|
||||
{"windows", "amd64"},
|
||||
}
|
||||
|
||||
func (b *builder) build(srcs []*source) (*Package, error) {
|
||||
|
||||
b.pdoc.Updated = time.Now().UTC()
|
||||
|
||||
references := make(map[string]bool)
|
||||
b.srcs = make(map[string]*source)
|
||||
for _, src := range srcs {
|
||||
if strings.HasSuffix(src.name, ".go") {
|
||||
b.srcs[src.name] = src
|
||||
} else {
|
||||
addReferences(references, src.data)
|
||||
}
|
||||
}
|
||||
|
||||
for r := range references {
|
||||
b.pdoc.References = append(b.pdoc.References, r)
|
||||
}
|
||||
|
||||
if len(b.srcs) == 0 {
|
||||
return b.pdoc, nil
|
||||
}
|
||||
|
||||
b.fset = token.NewFileSet()
|
||||
|
||||
// Find the package and associated files.
|
||||
|
||||
ctxt := build.Context{
|
||||
GOOS: "linux",
|
||||
GOARCH: "amd64",
|
||||
CgoEnabled: true,
|
||||
ReleaseTags: build.Default.ReleaseTags,
|
||||
JoinPath: path.Join,
|
||||
IsAbsPath: path.IsAbs,
|
||||
SplitPathList: func(list string) []string { return strings.Split(list, ":") },
|
||||
IsDir: func(path string) bool { panic("unexpected") },
|
||||
HasSubdir: func(root, dir string) (rel string, ok bool) { panic("unexpected") },
|
||||
ReadDir: func(dir string) (fi []os.FileInfo, err error) { return b.readDir(dir) },
|
||||
OpenFile: func(path string) (r io.ReadCloser, err error) { return b.openFile(path) },
|
||||
Compiler: "gc",
|
||||
}
|
||||
|
||||
var err error
|
||||
var bpkg *build.Package
|
||||
|
||||
for _, env := range goEnvs {
|
||||
ctxt.GOOS = env.GOOS
|
||||
ctxt.GOARCH = env.GOARCH
|
||||
bpkg, err = ctxt.ImportDir("/", 0)
|
||||
if _, ok := err.(*build.NoGoError); !ok {
|
||||
break
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
if _, ok := err.(*build.NoGoError); !ok {
|
||||
b.pdoc.Errors = append(b.pdoc.Errors, err.Error())
|
||||
}
|
||||
return b.pdoc, nil
|
||||
}
|
||||
|
||||
// Parse the Go files
|
||||
|
||||
files := make(map[string]*ast.File)
|
||||
names := append(bpkg.GoFiles, bpkg.CgoFiles...)
|
||||
sort.Strings(names)
|
||||
b.pdoc.Files = make([]*File, len(names))
|
||||
for i, name := range names {
|
||||
file, err := parser.ParseFile(b.fset, name, b.srcs[name].data, parser.ParseComments)
|
||||
if err != nil {
|
||||
b.pdoc.Errors = append(b.pdoc.Errors, err.Error())
|
||||
continue
|
||||
}
|
||||
src := b.srcs[name]
|
||||
src.index = i
|
||||
b.pdoc.Files[i] = &File{Name: name, URL: src.browseURL}
|
||||
b.pdoc.SourceSize += len(src.data)
|
||||
files[name] = file
|
||||
}
|
||||
|
||||
apkg, _ := ast.NewPackage(b.fset, files, simpleImporter, nil)
|
||||
|
||||
// Find examples in the test files.
|
||||
|
||||
names = append(bpkg.TestGoFiles, bpkg.XTestGoFiles...)
|
||||
sort.Strings(names)
|
||||
b.pdoc.TestFiles = make([]*File, len(names))
|
||||
for i, name := range names {
|
||||
file, err := parser.ParseFile(b.fset, name, b.srcs[name].data, parser.ParseComments)
|
||||
if err != nil {
|
||||
b.pdoc.Errors = append(b.pdoc.Errors, err.Error())
|
||||
continue
|
||||
}
|
||||
b.pdoc.TestFiles[i] = &File{Name: name, URL: b.srcs[name].browseURL}
|
||||
b.pdoc.TestSourceSize += len(b.srcs[name].data)
|
||||
b.examples = append(b.examples, doc.Examples(file)...)
|
||||
}
|
||||
|
||||
b.vetPackage(apkg)
|
||||
|
||||
mode := doc.Mode(0)
|
||||
if b.pdoc.ImportPath == "builtin" {
|
||||
mode |= doc.AllDecls
|
||||
}
|
||||
|
||||
dpkg := doc.New(apkg, b.pdoc.ImportPath, mode)
|
||||
|
||||
b.pdoc.Name = dpkg.Name
|
||||
b.pdoc.Doc = strings.TrimRight(dpkg.Doc, " \t\n\r")
|
||||
b.pdoc.Synopsis = synopsis(b.pdoc.Doc)
|
||||
|
||||
b.pdoc.Examples = b.getExamples("")
|
||||
b.pdoc.IsCmd = bpkg.IsCommand()
|
||||
b.pdoc.GOOS = ctxt.GOOS
|
||||
b.pdoc.GOARCH = ctxt.GOARCH
|
||||
|
||||
b.pdoc.Consts = b.values(dpkg.Consts)
|
||||
b.pdoc.Funcs = b.funcs(dpkg.Funcs)
|
||||
b.pdoc.Types = b.types(dpkg.Types)
|
||||
b.pdoc.Vars = b.values(dpkg.Vars)
|
||||
b.pdoc.Notes = b.notes(dpkg.Notes)
|
||||
|
||||
b.pdoc.Imports = bpkg.Imports
|
||||
b.pdoc.TestImports = bpkg.TestImports
|
||||
b.pdoc.XTestImports = bpkg.XTestImports
|
||||
|
||||
return b.pdoc, nil
|
||||
}
|
|
@ -0,0 +1,97 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var badSynopsis = []string{
|
||||
"+build !release",
|
||||
"COPYRIGHT Jimmy Bob",
|
||||
"### Markdown heading",
|
||||
"-*- indent-tabs-mode: nil -*-",
|
||||
"vim:set ts=2 sw=2 et ai ft=go:",
|
||||
}
|
||||
|
||||
func TestBadSynopsis(t *testing.T) {
|
||||
for _, s := range badSynopsis {
|
||||
if synopsis(s) != "" {
|
||||
t.Errorf(`synopsis(%q) did not return ""`, s)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const readme = `
|
||||
$ go get github.com/user/repo/pkg1
|
||||
[foo](http://gopkgdoc.appspot.com/pkg/github.com/user/repo/pkg2)
|
||||
[foo](http://go.pkgdoc.org/github.com/user/repo/pkg3)
|
||||
[foo](http://godoc.org/github.com/user/repo/pkg4)
|
||||
<http://go.pkgdoc.org/github.com/user/repo/pkg5>
|
||||
[foo](http://godoc.org/github.com/user/repo/pkg6#Export)
|
||||
'go get example.org/package1' will install package1.
|
||||
(http://go.pkgdoc.org/example.org/package2 "Package2's documentation on GoPkgDoc").
|
||||
import "example.org/package3"
|
||||
`
|
||||
|
||||
var expectedReferences = []string{
|
||||
"github.com/user/repo/pkg1",
|
||||
"github.com/user/repo/pkg2",
|
||||
"github.com/user/repo/pkg3",
|
||||
"github.com/user/repo/pkg4",
|
||||
"github.com/user/repo/pkg5",
|
||||
"github.com/user/repo/pkg6",
|
||||
"example.org/package1",
|
||||
"example.org/package2",
|
||||
"example.org/package3",
|
||||
}
|
||||
|
||||
func TestReferences(t *testing.T) {
|
||||
references := make(map[string]bool)
|
||||
addReferences(references, []byte(readme))
|
||||
for _, r := range expectedReferences {
|
||||
if !references[r] {
|
||||
t.Errorf("missing %s", r)
|
||||
}
|
||||
delete(references, r)
|
||||
}
|
||||
for r := range references {
|
||||
t.Errorf("extra %s", r)
|
||||
}
|
||||
}
|
||||
|
||||
var simpleImporterTests = []string{
|
||||
"code.google.com/p/biogo.foobar",
|
||||
"code.google.com/p/google-api-go-client/foobar/v3",
|
||||
"git.gitorious.org/go-pkg/foobar.git",
|
||||
"github.com/quux/go-foobar",
|
||||
"github.com/quux/go.foobar",
|
||||
"github.com/quux/foobar.go",
|
||||
"github.com/quux/foobar-go",
|
||||
"github.com/quux/foobar",
|
||||
"foobar",
|
||||
"quux/foobar",
|
||||
}
|
||||
|
||||
func TestSimpleImporter(t *testing.T) {
|
||||
for _, path := range simpleImporterTests {
|
||||
m := make(map[string]*ast.Object)
|
||||
obj, _ := simpleImporter(m, path)
|
||||
if obj.Name != "foobar" {
|
||||
t.Errorf("simpleImporter(%q) = %q, want %q", path, obj.Name, "foobar")
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,335 @@
|
|||
// Copyright 2011 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"go/ast"
|
||||
"go/doc"
|
||||
"go/printer"
|
||||
"go/scanner"
|
||||
"go/token"
|
||||
"math"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
const (
|
||||
notPredeclared = iota
|
||||
predeclaredType
|
||||
predeclaredConstant
|
||||
predeclaredFunction
|
||||
)
|
||||
|
||||
// predeclared represents the set of all predeclared identifiers.
|
||||
var predeclared = map[string]int{
|
||||
"bool": predeclaredType,
|
||||
"byte": predeclaredType,
|
||||
"complex128": predeclaredType,
|
||||
"complex64": predeclaredType,
|
||||
"error": predeclaredType,
|
||||
"float32": predeclaredType,
|
||||
"float64": predeclaredType,
|
||||
"int16": predeclaredType,
|
||||
"int32": predeclaredType,
|
||||
"int64": predeclaredType,
|
||||
"int8": predeclaredType,
|
||||
"int": predeclaredType,
|
||||
"rune": predeclaredType,
|
||||
"string": predeclaredType,
|
||||
"uint16": predeclaredType,
|
||||
"uint32": predeclaredType,
|
||||
"uint64": predeclaredType,
|
||||
"uint8": predeclaredType,
|
||||
"uint": predeclaredType,
|
||||
"uintptr": predeclaredType,
|
||||
|
||||
"true": predeclaredConstant,
|
||||
"false": predeclaredConstant,
|
||||
"iota": predeclaredConstant,
|
||||
"nil": predeclaredConstant,
|
||||
|
||||
"append": predeclaredFunction,
|
||||
"cap": predeclaredFunction,
|
||||
"close": predeclaredFunction,
|
||||
"complex": predeclaredFunction,
|
||||
"copy": predeclaredFunction,
|
||||
"delete": predeclaredFunction,
|
||||
"imag": predeclaredFunction,
|
||||
"len": predeclaredFunction,
|
||||
"make": predeclaredFunction,
|
||||
"new": predeclaredFunction,
|
||||
"panic": predeclaredFunction,
|
||||
"print": predeclaredFunction,
|
||||
"println": predeclaredFunction,
|
||||
"real": predeclaredFunction,
|
||||
"recover": predeclaredFunction,
|
||||
}
|
||||
|
||||
type AnnotationKind int32
|
||||
|
||||
const (
|
||||
// Link to export in package specifed by Paths[PathIndex] with fragment
|
||||
// Text[strings.LastIndex(Text[Pos:End], ".")+1:End].
|
||||
ExportLinkAnnotation AnnotationKind = iota
|
||||
|
||||
// Anchor with name specified by Text[Pos:End] or typeName + "." +
|
||||
// Text[Pos:End] for type declarations.
|
||||
AnchorAnnotation
|
||||
|
||||
// Comment.
|
||||
CommentAnnotation
|
||||
|
||||
// Link to package specified by Paths[PathIndex].
|
||||
PackageLinkAnnotation
|
||||
|
||||
// Link to builtin entity with name Text[Pos:End].
|
||||
BuiltinAnnotation
|
||||
)
|
||||
|
||||
type Annotation struct {
|
||||
Pos, End int32
|
||||
Kind AnnotationKind
|
||||
PathIndex int32
|
||||
}
|
||||
|
||||
type Code struct {
|
||||
Text string
|
||||
Annotations []Annotation
|
||||
Paths []string
|
||||
}
|
||||
|
||||
// annotationVisitor collects annotations.
|
||||
type annotationVisitor struct {
|
||||
annotations []Annotation
|
||||
paths []string
|
||||
pathIndex map[string]int
|
||||
}
|
||||
|
||||
func (v *annotationVisitor) add(kind AnnotationKind, importPath string) {
|
||||
pathIndex := -1
|
||||
if importPath != "" {
|
||||
var ok bool
|
||||
pathIndex, ok = v.pathIndex[importPath]
|
||||
if !ok {
|
||||
pathIndex = len(v.paths)
|
||||
v.paths = append(v.paths, importPath)
|
||||
v.pathIndex[importPath] = pathIndex
|
||||
}
|
||||
}
|
||||
v.annotations = append(v.annotations, Annotation{Kind: kind, PathIndex: int32(pathIndex)})
|
||||
}
|
||||
|
||||
func (v *annotationVisitor) ignoreName() {
|
||||
v.add(-1, "")
|
||||
}
|
||||
|
||||
func (v *annotationVisitor) Visit(n ast.Node) ast.Visitor {
|
||||
switch n := n.(type) {
|
||||
case *ast.TypeSpec:
|
||||
v.ignoreName()
|
||||
var list *ast.FieldList
|
||||
switch n := n.Type.(type) {
|
||||
case *ast.InterfaceType:
|
||||
list = n.Methods
|
||||
case *ast.StructType:
|
||||
list = n.Fields
|
||||
}
|
||||
if list == nil {
|
||||
ast.Walk(v, n.Type)
|
||||
} else {
|
||||
for _, f := range list.List {
|
||||
for _ = range f.Names {
|
||||
v.add(AnchorAnnotation, "")
|
||||
}
|
||||
ast.Walk(v, f.Type)
|
||||
}
|
||||
}
|
||||
case *ast.FuncDecl:
|
||||
if n.Recv != nil {
|
||||
ast.Walk(v, n.Recv)
|
||||
}
|
||||
v.ignoreName()
|
||||
ast.Walk(v, n.Type)
|
||||
case *ast.Field:
|
||||
for _ = range n.Names {
|
||||
v.ignoreName()
|
||||
}
|
||||
ast.Walk(v, n.Type)
|
||||
case *ast.ValueSpec:
|
||||
for _ = range n.Names {
|
||||
v.add(AnchorAnnotation, "")
|
||||
}
|
||||
if n.Type != nil {
|
||||
ast.Walk(v, n.Type)
|
||||
}
|
||||
for _, x := range n.Values {
|
||||
ast.Walk(v, x)
|
||||
}
|
||||
case *ast.Ident:
|
||||
switch {
|
||||
case n.Obj == nil && predeclared[n.Name] != notPredeclared:
|
||||
v.add(BuiltinAnnotation, "")
|
||||
case n.Obj != nil && ast.IsExported(n.Name):
|
||||
v.add(ExportLinkAnnotation, "")
|
||||
default:
|
||||
v.ignoreName()
|
||||
}
|
||||
case *ast.SelectorExpr:
|
||||
if x, _ := n.X.(*ast.Ident); x != nil {
|
||||
if obj := x.Obj; obj != nil && obj.Kind == ast.Pkg {
|
||||
if spec, _ := obj.Decl.(*ast.ImportSpec); spec != nil {
|
||||
if path, err := strconv.Unquote(spec.Path.Value); err == nil {
|
||||
v.add(PackageLinkAnnotation, path)
|
||||
if path == "C" {
|
||||
v.ignoreName()
|
||||
} else {
|
||||
v.add(ExportLinkAnnotation, path)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ast.Walk(v, n.X)
|
||||
v.ignoreName()
|
||||
default:
|
||||
return v
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *builder) printDecl(decl ast.Decl) (d Code) {
|
||||
v := &annotationVisitor{pathIndex: make(map[string]int)}
|
||||
ast.Walk(v, decl)
|
||||
b.buf = b.buf[:0]
|
||||
err := (&printer.Config{Mode: printer.UseSpaces, Tabwidth: 4}).Fprint(sliceWriter{&b.buf}, b.fset, decl)
|
||||
if err != nil {
|
||||
return Code{Text: err.Error()}
|
||||
}
|
||||
|
||||
var annotations []Annotation
|
||||
var s scanner.Scanner
|
||||
fset := token.NewFileSet()
|
||||
file := fset.AddFile("", fset.Base(), len(b.buf))
|
||||
s.Init(file, b.buf, nil, scanner.ScanComments)
|
||||
loop:
|
||||
for {
|
||||
pos, tok, lit := s.Scan()
|
||||
switch tok {
|
||||
case token.EOF:
|
||||
break loop
|
||||
case token.COMMENT:
|
||||
p := file.Offset(pos)
|
||||
e := p + len(lit)
|
||||
annotations = append(annotations, Annotation{Kind: CommentAnnotation, Pos: int32(p), End: int32(e)})
|
||||
case token.IDENT:
|
||||
if len(v.annotations) == 0 {
|
||||
// Oops!
|
||||
break loop
|
||||
}
|
||||
annotation := v.annotations[0]
|
||||
v.annotations = v.annotations[1:]
|
||||
if annotation.Kind == -1 {
|
||||
continue
|
||||
}
|
||||
p := file.Offset(pos)
|
||||
e := p + len(lit)
|
||||
annotation.Pos = int32(p)
|
||||
annotation.End = int32(e)
|
||||
if len(annotations) > 0 && annotation.Kind == ExportLinkAnnotation {
|
||||
prev := annotations[len(annotations)-1]
|
||||
if prev.Kind == PackageLinkAnnotation &&
|
||||
prev.PathIndex == annotation.PathIndex &&
|
||||
prev.End+1 == annotation.Pos {
|
||||
// merge with previous
|
||||
annotation.Pos = prev.Pos
|
||||
annotations[len(annotations)-1] = annotation
|
||||
continue loop
|
||||
}
|
||||
}
|
||||
annotations = append(annotations, annotation)
|
||||
}
|
||||
}
|
||||
return Code{Text: string(b.buf), Annotations: annotations, Paths: v.paths}
|
||||
}
|
||||
|
||||
func (b *builder) position(n ast.Node) Pos {
|
||||
var position Pos
|
||||
pos := b.fset.Position(n.Pos())
|
||||
src := b.srcs[pos.Filename]
|
||||
if src != nil {
|
||||
position.File = int16(src.index)
|
||||
position.Line = int32(pos.Line)
|
||||
end := b.fset.Position(n.End())
|
||||
if src == b.srcs[end.Filename] {
|
||||
n := end.Line - pos.Line
|
||||
if n >= 0 && n <= math.MaxUint16 {
|
||||
position.N = uint16(n)
|
||||
}
|
||||
}
|
||||
}
|
||||
return position
|
||||
}
|
||||
|
||||
func (b *builder) printExample(e *doc.Example) (code Code, output string) {
|
||||
output = e.Output
|
||||
|
||||
b.buf = b.buf[:0]
|
||||
err := (&printer.Config{Mode: printer.UseSpaces, Tabwidth: 4}).Fprint(
|
||||
sliceWriter{&b.buf},
|
||||
b.fset,
|
||||
&printer.CommentedNode{
|
||||
Node: e.Code,
|
||||
Comments: e.Comments,
|
||||
})
|
||||
if err != nil {
|
||||
return Code{Text: err.Error()}, output
|
||||
}
|
||||
|
||||
// additional formatting if this is a function body
|
||||
if i := len(b.buf); i >= 2 && b.buf[0] == '{' && b.buf[i-1] == '}' {
|
||||
// remove surrounding braces
|
||||
b.buf = b.buf[1 : i-1]
|
||||
// unindent
|
||||
b.buf = bytes.Replace(b.buf, []byte("\n "), []byte("\n"), -1)
|
||||
// remove output comment
|
||||
if j := exampleOutputRx.FindIndex(b.buf); j != nil {
|
||||
b.buf = bytes.TrimSpace(b.buf[:j[0]])
|
||||
}
|
||||
} else {
|
||||
// drop output, as the output comment will appear in the code
|
||||
output = ""
|
||||
}
|
||||
|
||||
var annotations []Annotation
|
||||
var s scanner.Scanner
|
||||
fset := token.NewFileSet()
|
||||
file := fset.AddFile("", fset.Base(), len(b.buf))
|
||||
s.Init(file, b.buf, nil, scanner.ScanComments)
|
||||
scanLoop:
|
||||
for {
|
||||
pos, tok, lit := s.Scan()
|
||||
switch tok {
|
||||
case token.EOF:
|
||||
break scanLoop
|
||||
case token.COMMENT:
|
||||
p := file.Offset(pos)
|
||||
e := p + len(lit)
|
||||
annotations = append(annotations, Annotation{Kind: CommentAnnotation, Pos: int32(p), End: int32(e)})
|
||||
}
|
||||
}
|
||||
|
||||
return Code{Text: string(b.buf), Annotations: annotations}, output
|
||||
}
|
|
@ -0,0 +1,297 @@
|
|||
// Copyright 2011 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
// Package doc fetches Go package documentation from version control services.
|
||||
package doc
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type NotFoundError struct {
|
||||
Message string
|
||||
}
|
||||
|
||||
func (e NotFoundError) Error() string {
|
||||
return e.Message
|
||||
}
|
||||
|
||||
func IsNotFound(err error) bool {
|
||||
_, ok := err.(NotFoundError)
|
||||
return ok
|
||||
}
|
||||
|
||||
type RemoteError struct {
|
||||
Host string
|
||||
err error
|
||||
}
|
||||
|
||||
func (e *RemoteError) Error() string {
|
||||
return e.err.Error()
|
||||
}
|
||||
|
||||
var (
|
||||
ErrNotModified = errors.New("package not modified")
|
||||
errNoMatch = errors.New("no match")
|
||||
)
|
||||
|
||||
// service represents a source code control service.
|
||||
type service struct {
|
||||
pattern *regexp.Regexp
|
||||
prefix string
|
||||
get func(*http.Client, map[string]string, string) (*Package, error)
|
||||
getPresentation func(*http.Client, map[string]string) (*Presentation, error)
|
||||
}
|
||||
|
||||
// services is the list of source code control services handled by gopkgdoc.
|
||||
var services = []*service{
|
||||
{githubPattern, "github.com/", getGithubDoc, getGithubPresentation},
|
||||
{googlePattern, "code.google.com/", getGoogleDoc, getGooglePresentation},
|
||||
{bitbucketPattern, "bitbucket.org/", getBitbucketDoc, nil},
|
||||
{launchpadPattern, "launchpad.net/", getLaunchpadDoc, nil},
|
||||
{vcsPattern, "", getVCSDoc, nil},
|
||||
}
|
||||
|
||||
func attrValue(attrs []xml.Attr, name string) string {
|
||||
for _, a := range attrs {
|
||||
if strings.EqualFold(a.Name.Local, name) {
|
||||
return a.Value
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func fetchMeta(client *http.Client, importPath string) (map[string]string, error) {
|
||||
uri := importPath
|
||||
if !strings.Contains(uri, "/") {
|
||||
// Add slash for root of domain.
|
||||
uri = uri + "/"
|
||||
}
|
||||
uri = uri + "?go-get=1"
|
||||
|
||||
scheme := "https"
|
||||
resp, err := client.Get(scheme + "://" + uri)
|
||||
if err != nil || resp.StatusCode != 200 {
|
||||
if err == nil {
|
||||
resp.Body.Close()
|
||||
}
|
||||
scheme = "http"
|
||||
resp, err = client.Get(scheme + "://" + uri)
|
||||
if err != nil {
|
||||
return nil, &RemoteError{strings.SplitN(importPath, "/", 2)[0], err}
|
||||
}
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
return parseMeta(scheme, importPath, resp.Body)
|
||||
}
|
||||
|
||||
func parseMeta(scheme, importPath string, r io.Reader) (map[string]string, error) {
|
||||
var match map[string]string
|
||||
|
||||
d := xml.NewDecoder(r)
|
||||
d.Strict = false
|
||||
metaScan:
|
||||
for {
|
||||
t, tokenErr := d.Token()
|
||||
if tokenErr != nil {
|
||||
break metaScan
|
||||
}
|
||||
switch t := t.(type) {
|
||||
case xml.EndElement:
|
||||
if strings.EqualFold(t.Name.Local, "head") {
|
||||
break metaScan
|
||||
}
|
||||
case xml.StartElement:
|
||||
if strings.EqualFold(t.Name.Local, "body") {
|
||||
break metaScan
|
||||
}
|
||||
if !strings.EqualFold(t.Name.Local, "meta") ||
|
||||
attrValue(t.Attr, "name") != "go-import" {
|
||||
continue metaScan
|
||||
}
|
||||
f := strings.Fields(attrValue(t.Attr, "content"))
|
||||
if len(f) != 3 ||
|
||||
!strings.HasPrefix(importPath, f[0]) ||
|
||||
!(len(importPath) == len(f[0]) || importPath[len(f[0])] == '/') {
|
||||
continue metaScan
|
||||
}
|
||||
if match != nil {
|
||||
return nil, NotFoundError{"More than one <meta> found at " + scheme + "://" + importPath}
|
||||
}
|
||||
|
||||
projectRoot, vcs, repo := f[0], f[1], f[2]
|
||||
|
||||
repo = strings.TrimSuffix(repo, "."+vcs)
|
||||
i := strings.Index(repo, "://")
|
||||
if i < 0 {
|
||||
return nil, NotFoundError{"Bad repo URL in <meta>."}
|
||||
}
|
||||
proto := repo[:i]
|
||||
repo = repo[i+len("://"):]
|
||||
|
||||
match = map[string]string{
|
||||
// Used in getVCSDoc, same as vcsPattern matches.
|
||||
"importPath": importPath,
|
||||
"repo": repo,
|
||||
"vcs": vcs,
|
||||
"dir": importPath[len(projectRoot):],
|
||||
|
||||
// Used in getVCSDoc
|
||||
"scheme": proto,
|
||||
|
||||
// Used in getDynamic.
|
||||
"projectRoot": projectRoot,
|
||||
"projectName": path.Base(projectRoot),
|
||||
"projectURL": scheme + "://" + projectRoot,
|
||||
}
|
||||
}
|
||||
}
|
||||
if match == nil {
|
||||
return nil, NotFoundError{"<meta> not found."}
|
||||
}
|
||||
return match, nil
|
||||
}
|
||||
|
||||
// getDynamic gets a document from a service that is not statically known.
|
||||
func getDynamic(client *http.Client, importPath, etag string) (*Package, error) {
|
||||
match, err := fetchMeta(client, importPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if match["projectRoot"] != importPath {
|
||||
rootMatch, err := fetchMeta(client, match["projectRoot"])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if rootMatch["projectRoot"] != match["projectRoot"] {
|
||||
return nil, NotFoundError{"Project root mismatch."}
|
||||
}
|
||||
}
|
||||
|
||||
pdoc, err := getStatic(client, expand("{repo}{dir}", match), importPath, etag)
|
||||
if err == errNoMatch {
|
||||
pdoc, err = getVCSDoc(client, match, etag)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if pdoc != nil {
|
||||
pdoc.ProjectRoot = match["projectRoot"]
|
||||
pdoc.ProjectName = match["projectName"]
|
||||
pdoc.ProjectURL = match["projectURL"]
|
||||
}
|
||||
|
||||
return pdoc, err
|
||||
}
|
||||
|
||||
// getStatic gets a document from a statically known service. getStatic
|
||||
// returns errNoMatch if the import path is not recognized.
|
||||
func getStatic(client *http.Client, importPath, originalImportPath, etag string) (*Package, error) {
|
||||
for _, s := range services {
|
||||
if s.get == nil || !strings.HasPrefix(importPath, s.prefix) {
|
||||
continue
|
||||
}
|
||||
m := s.pattern.FindStringSubmatch(importPath)
|
||||
if m == nil {
|
||||
if s.prefix != "" {
|
||||
return nil, NotFoundError{"Import path prefix matches known service, but regexp does not."}
|
||||
}
|
||||
continue
|
||||
}
|
||||
match := map[string]string{"importPath": importPath, "originalImportPath": originalImportPath}
|
||||
for i, n := range s.pattern.SubexpNames() {
|
||||
if n != "" {
|
||||
match[n] = m[i]
|
||||
}
|
||||
}
|
||||
return s.get(client, match, etag)
|
||||
}
|
||||
return nil, errNoMatch
|
||||
}
|
||||
|
||||
func Get(client *http.Client, importPath string, etag string) (pdoc *Package, err error) {
|
||||
|
||||
const versionPrefix = PackageVersion + "-"
|
||||
|
||||
if strings.HasPrefix(etag, versionPrefix) {
|
||||
etag = etag[len(versionPrefix):]
|
||||
} else {
|
||||
etag = ""
|
||||
}
|
||||
|
||||
switch {
|
||||
case IsGoRepoPath(importPath):
|
||||
pdoc, err = getStandardDoc(client, importPath, etag)
|
||||
case IsValidRemotePath(importPath):
|
||||
pdoc, err = getStatic(client, importPath, importPath, etag)
|
||||
if err == errNoMatch {
|
||||
pdoc, err = getDynamic(client, importPath, etag)
|
||||
}
|
||||
default:
|
||||
err = errNoMatch
|
||||
}
|
||||
|
||||
if err == errNoMatch {
|
||||
err = NotFoundError{"Import path not valid:"}
|
||||
}
|
||||
|
||||
if pdoc != nil {
|
||||
pdoc.Etag = versionPrefix + pdoc.Etag
|
||||
if pdoc.ImportPath != importPath {
|
||||
return nil, fmt.Errorf("Get: pdoc.ImportPath = %q, want %q", pdoc.ImportPath, importPath)
|
||||
}
|
||||
}
|
||||
|
||||
return pdoc, err
|
||||
}
|
||||
|
||||
// GetPresentation gets a presentation from the the given path.
|
||||
func GetPresentation(client *http.Client, importPath string) (*Presentation, error) {
|
||||
ext := path.Ext(importPath)
|
||||
if ext != ".slide" && ext != ".article" {
|
||||
return nil, NotFoundError{"unknown file extension."}
|
||||
}
|
||||
|
||||
importPath, file := path.Split(importPath)
|
||||
importPath = strings.TrimSuffix(importPath, "/")
|
||||
for _, s := range services {
|
||||
if s.getPresentation == nil || !strings.HasPrefix(importPath, s.prefix) {
|
||||
continue
|
||||
}
|
||||
m := s.pattern.FindStringSubmatch(importPath)
|
||||
if m == nil {
|
||||
if s.prefix != "" {
|
||||
return nil, NotFoundError{"path prefix matches known service, but regexp does not."}
|
||||
}
|
||||
continue
|
||||
}
|
||||
match := map[string]string{"importPath": importPath, "file": file}
|
||||
for i, n := range s.pattern.SubexpNames() {
|
||||
if n != "" {
|
||||
match[n] = m[i]
|
||||
}
|
||||
}
|
||||
return s.getPresentation(client, match)
|
||||
}
|
||||
return nil, errNoMatch
|
||||
}
|
|
@ -0,0 +1,186 @@
|
|||
// Copyright 2011 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var githubRawHeader = http.Header{"Accept": {"application/vnd.github-blob.raw"}}
|
||||
var githubPattern = regexp.MustCompile(`^github\.com/(?P<owner>[a-z0-9A-Z_.\-]+)/(?P<repo>[a-z0-9A-Z_.\-]+)(?P<dir>/[a-z0-9A-Z_.\-/]*)?$`)
|
||||
var githubCred string
|
||||
|
||||
func SetGithubCredentials(id, secret string) {
|
||||
githubCred = "client_id=" + id + "&client_secret=" + secret
|
||||
}
|
||||
|
||||
func getGithubDoc(client *http.Client, match map[string]string, savedEtag string) (*Package, error) {
|
||||
|
||||
match["cred"] = githubCred
|
||||
|
||||
var refs []*struct {
|
||||
Object struct {
|
||||
Type string
|
||||
Sha string
|
||||
Url string
|
||||
}
|
||||
Ref string
|
||||
Url string
|
||||
}
|
||||
|
||||
err := httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/refs?{cred}", match), &refs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tags := make(map[string]string)
|
||||
for _, ref := range refs {
|
||||
switch {
|
||||
case strings.HasPrefix(ref.Ref, "refs/heads/"):
|
||||
tags[ref.Ref[len("refs/heads/"):]] = ref.Object.Sha
|
||||
case strings.HasPrefix(ref.Ref, "refs/tags/"):
|
||||
tags[ref.Ref[len("refs/tags/"):]] = ref.Object.Sha
|
||||
}
|
||||
}
|
||||
|
||||
var commit string
|
||||
match["tag"], commit, err = bestTag(tags, "master")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if commit == savedEtag {
|
||||
return nil, ErrNotModified
|
||||
}
|
||||
|
||||
var tree struct {
|
||||
Tree []struct {
|
||||
Url string
|
||||
Path string
|
||||
Type string
|
||||
}
|
||||
Url string
|
||||
}
|
||||
|
||||
err = httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/trees/{tag}?recursive=1&{cred}", match), &tree)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Because Github API URLs are case-insensitive, we need to check that the
|
||||
// userRepo returned from Github matches the one that we are requesting.
|
||||
if !strings.HasPrefix(tree.Url, expand("https://api.github.com/repos/{owner}/{repo}/", match)) {
|
||||
return nil, NotFoundError{"Github import path has incorrect case."}
|
||||
}
|
||||
|
||||
inTree := false
|
||||
dirPrefix := match["dir"]
|
||||
if dirPrefix != "" {
|
||||
dirPrefix = dirPrefix[1:] + "/"
|
||||
}
|
||||
var files []*source
|
||||
for _, node := range tree.Tree {
|
||||
if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) {
|
||||
continue
|
||||
}
|
||||
inTree = true
|
||||
if d, f := path.Split(node.Path); d == dirPrefix && isDocFile(f) {
|
||||
files = append(files, &source{
|
||||
name: f,
|
||||
browseURL: expand("https://github.com/{owner}/{repo}/blob/{tag}/{0}", match, node.Path),
|
||||
rawURL: node.Url + "?" + githubCred,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if !inTree {
|
||||
return nil, NotFoundError{"Directory tree does not contain Go files."}
|
||||
}
|
||||
|
||||
if err := fetchFiles(client, files, githubRawHeader); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
browseURL := expand("https://github.com/{owner}/{repo}", match)
|
||||
if match["dir"] != "" {
|
||||
browseURL = expand("https://github.com/{owner}/{repo}/tree/{tag}{dir}", match)
|
||||
}
|
||||
|
||||
b := &builder{
|
||||
pdoc: &Package{
|
||||
LineFmt: "%s#L%d",
|
||||
ImportPath: match["originalImportPath"],
|
||||
ProjectRoot: expand("github.com/{owner}/{repo}", match),
|
||||
ProjectName: match["repo"],
|
||||
ProjectURL: expand("https://github.com/{owner}/{repo}", match),
|
||||
BrowseURL: browseURL,
|
||||
Etag: commit,
|
||||
VCS: "git",
|
||||
},
|
||||
}
|
||||
|
||||
return b.build(files)
|
||||
}
|
||||
|
||||
func getGithubPresentation(client *http.Client, match map[string]string) (*Presentation, error) {
|
||||
|
||||
match["cred"] = githubCred
|
||||
|
||||
p, err := httpGetBytes(client, expand("https://api.github.com/repos/{owner}/{repo}/contents{dir}/{file}?{cred}", match), githubRawHeader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
apiBase, err := url.Parse(expand("https://api.github.com/repos/{owner}/{repo}/contents{dir}/?{cred}", match))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rawBase, err := url.Parse(expand("https://raw.github.com/{owner}/{repo}/master{dir}/", match))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b := &presBuilder{
|
||||
data: p,
|
||||
filename: match["file"],
|
||||
fetch: func(files []*source) error {
|
||||
for _, f := range files {
|
||||
u, err := apiBase.Parse(f.name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
u.RawQuery = apiBase.RawQuery
|
||||
f.rawURL = u.String()
|
||||
}
|
||||
return fetchFiles(client, files, githubRawHeader)
|
||||
},
|
||||
resolveURL: func(fname string) string {
|
||||
u, err := rawBase.Parse(fname)
|
||||
if err != nil {
|
||||
return "/notfound"
|
||||
}
|
||||
if strings.HasSuffix(fname, ".svg") {
|
||||
u.Host = "rawgithub.com"
|
||||
}
|
||||
return u.String()
|
||||
},
|
||||
}
|
||||
|
||||
return b.build()
|
||||
}
|
|
@ -0,0 +1,208 @@
|
|||
// Copyright 2011 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
googleRepoRe = regexp.MustCompile(`id="checkoutcmd">(hg|git|svn)`)
|
||||
googleRevisionRe = regexp.MustCompile(`<h2>(?:[^ ]+ - )?Revision *([^:]+):`)
|
||||
googleEtagRe = regexp.MustCompile(`^(hg|git|svn)-`)
|
||||
googleFileRe = regexp.MustCompile(`<li><a href="([^"/]+)"`)
|
||||
googlePattern = regexp.MustCompile(`^code\.google\.com/p/(?P<repo>[a-z0-9\-]+)(:?\.(?P<subrepo>[a-z0-9\-]+))?(?P<dir>/[a-z0-9A-Z_.\-/]+)?$`)
|
||||
)
|
||||
|
||||
func getGoogleDoc(client *http.Client, match map[string]string, savedEtag string) (*Package, error) {
|
||||
setupGoogleMatch(match)
|
||||
if m := googleEtagRe.FindStringSubmatch(savedEtag); m != nil {
|
||||
match["vcs"] = m[1]
|
||||
} else if err := getGoogleVCS(client, match); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Scrape the repo browser to find the project revision and individual Go files.
|
||||
p, err := httpGetBytes(client, expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/", match), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var etag string
|
||||
if m := googleRevisionRe.FindSubmatch(p); m == nil {
|
||||
return nil, errors.New("Could not find revision for " + match["importPath"])
|
||||
} else {
|
||||
etag = expand("{vcs}-{0}", match, string(m[1]))
|
||||
if etag == savedEtag {
|
||||
return nil, ErrNotModified
|
||||
}
|
||||
}
|
||||
|
||||
var files []*source
|
||||
for _, m := range googleFileRe.FindAllSubmatch(p, -1) {
|
||||
fname := string(m[1])
|
||||
if isDocFile(fname) {
|
||||
files = append(files, &source{
|
||||
name: fname,
|
||||
browseURL: expand("http://code.google.com/p/{repo}/source/browse{dir}/{0}{query}", match, fname),
|
||||
rawURL: expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/{0}", match, fname),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if err := fetchFiles(client, files, nil); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var projectURL string
|
||||
if match["subrepo"] == "" {
|
||||
projectURL = expand("https://code.google.com/p/{repo}/", match)
|
||||
} else {
|
||||
projectURL = expand("https://code.google.com/p/{repo}/source/browse?repo={subrepo}", match)
|
||||
}
|
||||
|
||||
b := &builder{
|
||||
pdoc: &Package{
|
||||
LineFmt: "%s#%d",
|
||||
ImportPath: match["originalImportPath"],
|
||||
ProjectRoot: expand("code.google.com/p/{repo}{dot}{subrepo}", match),
|
||||
ProjectName: expand("{repo}{dot}{subrepo}", match),
|
||||
ProjectURL: projectURL,
|
||||
BrowseURL: expand("http://code.google.com/p/{repo}/source/browse{dir}/{query}", match),
|
||||
Etag: etag,
|
||||
VCS: match["vcs"],
|
||||
},
|
||||
}
|
||||
|
||||
return b.build(files)
|
||||
}
|
||||
|
||||
func setupGoogleMatch(match map[string]string) {
|
||||
if s := match["subrepo"]; s != "" {
|
||||
match["dot"] = "."
|
||||
match["query"] = "?repo=" + s
|
||||
} else {
|
||||
match["dot"] = ""
|
||||
match["query"] = ""
|
||||
}
|
||||
}
|
||||
|
||||
func getGoogleVCS(client *http.Client, match map[string]string) error {
|
||||
// Scrape the HTML project page to find the VCS.
|
||||
p, err := httpGetBytes(client, expand("http://code.google.com/p/{repo}/source/checkout", match), nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
m := googleRepoRe.FindSubmatch(p)
|
||||
if m == nil {
|
||||
return NotFoundError{"Could not VCS on Google Code project page."}
|
||||
}
|
||||
match["vcs"] = string(m[1])
|
||||
return nil
|
||||
}
|
||||
|
||||
func getStandardDoc(client *http.Client, importPath string, savedEtag string) (*Package, error) {
|
||||
|
||||
p, err := httpGetBytes(client, "http://go.googlecode.com/hg-history/release/src/pkg/"+importPath+"/", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var etag string
|
||||
if m := googleRevisionRe.FindSubmatch(p); m == nil {
|
||||
return nil, errors.New("Could not find revision for " + importPath)
|
||||
} else {
|
||||
etag = string(m[1])
|
||||
if etag == savedEtag {
|
||||
return nil, ErrNotModified
|
||||
}
|
||||
}
|
||||
|
||||
var files []*source
|
||||
for _, m := range googleFileRe.FindAllSubmatch(p, -1) {
|
||||
fname := strings.Split(string(m[1]), "?")[0]
|
||||
if isDocFile(fname) {
|
||||
files = append(files, &source{
|
||||
name: fname,
|
||||
browseURL: "http://code.google.com/p/go/source/browse/src/pkg/" + importPath + "/" + fname + "?name=release",
|
||||
rawURL: "http://go.googlecode.com/hg-history/release/src/pkg/" + importPath + "/" + fname,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if err := fetchFiles(client, files, nil); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b := &builder{
|
||||
pdoc: &Package{
|
||||
LineFmt: "%s#%d",
|
||||
ImportPath: importPath,
|
||||
ProjectRoot: "",
|
||||
ProjectName: "Go",
|
||||
ProjectURL: "https://code.google.com/p/go/",
|
||||
BrowseURL: "http://code.google.com/p/go/source/browse/src/pkg/" + importPath + "?name=release",
|
||||
Etag: etag,
|
||||
VCS: "hg",
|
||||
},
|
||||
}
|
||||
|
||||
return b.build(files)
|
||||
}
|
||||
|
||||
func getGooglePresentation(client *http.Client, match map[string]string) (*Presentation, error) {
|
||||
setupGoogleMatch(match)
|
||||
if err := getGoogleVCS(client, match); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rawBase, err := url.Parse(expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/", match))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p, err := httpGetBytes(client, expand("http://{subrepo}{dot}{repo}.googlecode.com/{vcs}{dir}/{file}", match), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b := &presBuilder{
|
||||
data: p,
|
||||
filename: match["file"],
|
||||
fetch: func(files []*source) error {
|
||||
for _, f := range files {
|
||||
u, err := rawBase.Parse(f.name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
f.rawURL = u.String()
|
||||
}
|
||||
return fetchFiles(client, files, nil)
|
||||
},
|
||||
resolveURL: func(fname string) string {
|
||||
u, err := rawBase.Parse(fname)
|
||||
if err != nil {
|
||||
return "/notfound"
|
||||
}
|
||||
return u.String()
|
||||
},
|
||||
}
|
||||
|
||||
return b.build()
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
// +build ignore
|
||||
|
||||
// Command astprint prints the AST for a file.
|
||||
//
|
||||
// Usage: go run asprint.go fname
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/doc"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
)
|
||||
|
||||
func importer(imports map[string]*ast.Object, path string) (*ast.Object, error) {
|
||||
pkg := imports[path]
|
||||
if pkg == nil {
|
||||
name := path[strings.LastIndex(path, "/")+1:]
|
||||
pkg = ast.NewObj(ast.Pkg, name)
|
||||
pkg.Data = ast.NewScope(nil) // required by ast.NewPackage for dot-import
|
||||
imports[path] = pkg
|
||||
}
|
||||
return pkg, nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
if len(flag.Args()) != 1 {
|
||||
log.Fatal("Usage: go run goprint.go path")
|
||||
}
|
||||
bpkg, err := build.Default.Import(flag.Args()[0], ".", 0)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fset := token.NewFileSet()
|
||||
files := make(map[string]*ast.File)
|
||||
for _, fname := range bpkg.GoFiles {
|
||||
p, err := ioutil.ReadFile(filepath.Join(bpkg.SrcRoot, bpkg.ImportPath, fname))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
file, err := parser.ParseFile(fset, fname, p, parser.ParseComments)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
files[fname] = file
|
||||
}
|
||||
c := spew.NewDefaultConfig()
|
||||
c.DisableMethods = true
|
||||
apkg, _ := ast.NewPackage(fset, files, importer, nil)
|
||||
c.Dump(apkg)
|
||||
ast.Print(fset, apkg)
|
||||
dpkg := doc.New(apkg, bpkg.ImportPath, 0)
|
||||
c.Dump(dpkg)
|
||||
}
|
|
@ -0,0 +1,139 @@
|
|||
// Copyright 2011 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"io"
|
||||
"net/http"
|
||||
"path"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type byHash []byte
|
||||
|
||||
func (p byHash) Len() int { return len(p) / md5.Size }
|
||||
func (p byHash) Less(i, j int) bool {
|
||||
return -1 == bytes.Compare(p[i*md5.Size:(i+1)*md5.Size], p[j*md5.Size:(j+1)*md5.Size])
|
||||
}
|
||||
func (p byHash) Swap(i, j int) {
|
||||
var temp [md5.Size]byte
|
||||
copy(temp[:], p[i*md5.Size:])
|
||||
copy(p[i*md5.Size:(i+1)*md5.Size], p[j*md5.Size:])
|
||||
copy(p[j*md5.Size:], temp[:])
|
||||
}
|
||||
|
||||
var launchpadPattern = regexp.MustCompile(`^launchpad\.net/(?P<repo>(?P<project>[a-z0-9A-Z_.\-]+)(?P<series>/[a-z0-9A-Z_.\-]+)?|~[a-z0-9A-Z_.\-]+/(\+junk|[a-z0-9A-Z_.\-]+)/[a-z0-9A-Z_.\-]+)(?P<dir>/[a-z0-9A-Z_.\-/]+)*$`)
|
||||
|
||||
func getLaunchpadDoc(client *http.Client, match map[string]string, savedEtag string) (*Package, error) {
|
||||
|
||||
if match["project"] != "" && match["series"] != "" {
|
||||
rc, err := httpGet(client, expand("https://code.launchpad.net/{project}{series}/.bzr/branch-format", match), nil)
|
||||
switch {
|
||||
case err == nil:
|
||||
rc.Close()
|
||||
// The structure of the import path is launchpad.net/{root}/{dir}.
|
||||
case IsNotFound(err):
|
||||
// The structure of the import path is is launchpad.net/{project}/{dir}.
|
||||
match["repo"] = match["project"]
|
||||
match["dir"] = expand("{series}{dir}", match)
|
||||
default:
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
p, err := httpGetBytes(client, expand("https://bazaar.launchpad.net/+branch/{repo}/tarball", match), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gzr, err := gzip.NewReader(bytes.NewReader(p))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer gzr.Close()
|
||||
|
||||
tr := tar.NewReader(gzr)
|
||||
|
||||
var hash []byte
|
||||
inTree := false
|
||||
dirPrefix := expand("+branch/{repo}{dir}/", match)
|
||||
var files []*source
|
||||
for {
|
||||
h, err := tr.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
d, f := path.Split(h.Name)
|
||||
if !isDocFile(f) {
|
||||
continue
|
||||
}
|
||||
b := make([]byte, h.Size)
|
||||
if _, err := io.ReadFull(tr, b); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
m := md5.New()
|
||||
m.Write(b)
|
||||
hash = m.Sum(hash)
|
||||
|
||||
if !strings.HasPrefix(h.Name, dirPrefix) {
|
||||
continue
|
||||
}
|
||||
inTree = true
|
||||
if d == dirPrefix {
|
||||
files = append(files, &source{
|
||||
name: f,
|
||||
browseURL: expand("http://bazaar.launchpad.net/+branch/{repo}/view/head:{dir}/{0}", match, f),
|
||||
data: b})
|
||||
}
|
||||
}
|
||||
|
||||
if !inTree {
|
||||
return nil, NotFoundError{"Directory tree does not contain Go files."}
|
||||
}
|
||||
|
||||
sort.Sort(byHash(hash))
|
||||
m := md5.New()
|
||||
m.Write(hash)
|
||||
hash = m.Sum(hash[:0])
|
||||
etag := hex.EncodeToString(hash)
|
||||
if etag == savedEtag {
|
||||
return nil, ErrNotModified
|
||||
}
|
||||
|
||||
b := &builder{
|
||||
pdoc: &Package{
|
||||
LineFmt: "%s#L%d",
|
||||
ImportPath: match["originalImportPath"],
|
||||
ProjectRoot: expand("launchpad.net/{repo}", match),
|
||||
ProjectName: match["repo"],
|
||||
ProjectURL: expand("https://launchpad.net/{repo}/", match),
|
||||
BrowseURL: expand("http://bazaar.launchpad.net/+branch/{repo}/view/head:{dir}/", match),
|
||||
Etag: etag,
|
||||
VCS: "bzr",
|
||||
},
|
||||
}
|
||||
return b.build(files)
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/build"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
// GetLocal gets the documentation from the localy installed
|
||||
func GetLocal(importPath string, goroot, gopath string, browseURLFmt, lineFmt string) (*Package, error) {
|
||||
ctx := build.Default
|
||||
if goroot != "" {
|
||||
ctx.GOROOT = goroot
|
||||
}
|
||||
if gopath != "" {
|
||||
ctx.GOPATH = gopath
|
||||
}
|
||||
bpkg, err := ctx.Import(importPath, ".", build.FindOnly)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dir := filepath.Join(bpkg.SrcRoot, filepath.FromSlash(importPath))
|
||||
fis, err := ioutil.ReadDir(dir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var modTime time.Time
|
||||
var files []*source
|
||||
for _, fi := range fis {
|
||||
if fi.IsDir() || !isDocFile(fi.Name()) {
|
||||
continue
|
||||
}
|
||||
if fi.ModTime().After(modTime) {
|
||||
modTime = fi.ModTime()
|
||||
}
|
||||
b, err := ioutil.ReadFile(filepath.Join(dir, fi.Name()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files = append(files, &source{
|
||||
name: fi.Name(),
|
||||
browseURL: fmt.Sprintf(browseURLFmt, fi.Name()),
|
||||
data: b,
|
||||
})
|
||||
}
|
||||
b := &builder{
|
||||
pdoc: &Package{
|
||||
LineFmt: lineFmt,
|
||||
ImportPath: importPath,
|
||||
Etag: strconv.FormatInt(modTime.Unix(), 16),
|
||||
},
|
||||
}
|
||||
return b.build(files)
|
||||
}
|
|
@ -0,0 +1,538 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var standardPath = map[string]bool{
|
||||
"builtin": true,
|
||||
|
||||
// go list -f '"{{.ImportPath}}": true,' std | grep -v 'cmd/|exp/'
|
||||
"cmd/api": true,
|
||||
"cmd/cgo": true,
|
||||
"cmd/fix": true,
|
||||
"cmd/go": true,
|
||||
"cmd/godoc": true,
|
||||
"cmd/gofmt": true,
|
||||
"cmd/vet": true,
|
||||
"cmd/yacc": true,
|
||||
"archive/tar": true,
|
||||
"archive/zip": true,
|
||||
"bufio": true,
|
||||
"bytes": true,
|
||||
"compress/bzip2": true,
|
||||
"compress/flate": true,
|
||||
"compress/gzip": true,
|
||||
"compress/lzw": true,
|
||||
"compress/zlib": true,
|
||||
"container/heap": true,
|
||||
"container/list": true,
|
||||
"container/ring": true,
|
||||
"crypto": true,
|
||||
"crypto/aes": true,
|
||||
"crypto/cipher": true,
|
||||
"crypto/des": true,
|
||||
"crypto/dsa": true,
|
||||
"crypto/ecdsa": true,
|
||||
"crypto/elliptic": true,
|
||||
"crypto/hmac": true,
|
||||
"crypto/md5": true,
|
||||
"crypto/rand": true,
|
||||
"crypto/rc4": true,
|
||||
"crypto/rsa": true,
|
||||
"crypto/sha1": true,
|
||||
"crypto/sha256": true,
|
||||
"crypto/sha512": true,
|
||||
"crypto/subtle": true,
|
||||
"crypto/tls": true,
|
||||
"crypto/x509": true,
|
||||
"crypto/x509/pkix": true,
|
||||
"database/sql": true,
|
||||
"database/sql/driver": true,
|
||||
"debug/dwarf": true,
|
||||
"debug/elf": true,
|
||||
"debug/gosym": true,
|
||||
"debug/macho": true,
|
||||
"debug/pe": true,
|
||||
"encoding/ascii85": true,
|
||||
"encoding/asn1": true,
|
||||
"encoding/base32": true,
|
||||
"encoding/base64": true,
|
||||
"encoding/binary": true,
|
||||
"encoding/csv": true,
|
||||
"encoding/gob": true,
|
||||
"encoding/hex": true,
|
||||
"encoding/json": true,
|
||||
"encoding/pem": true,
|
||||
"encoding/xml": true,
|
||||
"errors": true,
|
||||
"expvar": true,
|
||||
"flag": true,
|
||||
"fmt": true,
|
||||
"go/ast": true,
|
||||
"go/build": true,
|
||||
"go/doc": true,
|
||||
"go/format": true,
|
||||
"go/parser": true,
|
||||
"go/printer": true,
|
||||
"go/scanner": true,
|
||||
"go/token": true,
|
||||
"hash": true,
|
||||
"hash/adler32": true,
|
||||
"hash/crc32": true,
|
||||
"hash/crc64": true,
|
||||
"hash/fnv": true,
|
||||
"html": true,
|
||||
"html/template": true,
|
||||
"image": true,
|
||||
"image/color": true,
|
||||
"image/draw": true,
|
||||
"image/gif": true,
|
||||
"image/jpeg": true,
|
||||
"image/png": true,
|
||||
"index/suffixarray": true,
|
||||
"io": true,
|
||||
"io/ioutil": true,
|
||||
"log": true,
|
||||
"log/syslog": true,
|
||||
"math": true,
|
||||
"math/big": true,
|
||||
"math/cmplx": true,
|
||||
"math/rand": true,
|
||||
"mime": true,
|
||||
"mime/multipart": true,
|
||||
"net": true,
|
||||
"net/http": true,
|
||||
"net/http/cgi": true,
|
||||
"net/http/cookiejar": true,
|
||||
"net/http/fcgi": true,
|
||||
"net/http/httptest": true,
|
||||
"net/http/httputil": true,
|
||||
"net/http/pprof": true,
|
||||
"net/mail": true,
|
||||
"net/rpc": true,
|
||||
"net/rpc/jsonrpc": true,
|
||||
"net/smtp": true,
|
||||
"net/textproto": true,
|
||||
"net/url": true,
|
||||
"os": true,
|
||||
"os/exec": true,
|
||||
"os/signal": true,
|
||||
"os/user": true,
|
||||
"path": true,
|
||||
"path/filepath": true,
|
||||
"reflect": true,
|
||||
"regexp": true,
|
||||
"regexp/syntax": true,
|
||||
"runtime": true,
|
||||
"runtime/cgo": true,
|
||||
"runtime/debug": true,
|
||||
"runtime/pprof": true,
|
||||
"sort": true,
|
||||
"strconv": true,
|
||||
"strings": true,
|
||||
"sync": true,
|
||||
"sync/atomic": true,
|
||||
"syscall": true,
|
||||
"testing": true,
|
||||
"testing/iotest": true,
|
||||
"testing/quick": true,
|
||||
"text/scanner": true,
|
||||
"text/tabwriter": true,
|
||||
"text/template": true,
|
||||
"text/template/parse": true,
|
||||
"time": true,
|
||||
"unicode": true,
|
||||
"unicode/utf16": true,
|
||||
"unicode/utf8": true,
|
||||
"unsafe": true,
|
||||
}
|
||||
|
||||
var validTLD = map[string]bool{
|
||||
// curl http://data.iana.org/TLD/tlds-alpha-by-domain.txt | sed -e '/#/ d' -e 's/.*/"&": true,/' | tr [:upper:] [:lower:]
|
||||
".ac": true,
|
||||
".ad": true,
|
||||
".ae": true,
|
||||
".aero": true,
|
||||
".af": true,
|
||||
".ag": true,
|
||||
".ai": true,
|
||||
".al": true,
|
||||
".am": true,
|
||||
".an": true,
|
||||
".ao": true,
|
||||
".aq": true,
|
||||
".ar": true,
|
||||
".arpa": true,
|
||||
".as": true,
|
||||
".asia": true,
|
||||
".at": true,
|
||||
".au": true,
|
||||
".aw": true,
|
||||
".ax": true,
|
||||
".az": true,
|
||||
".ba": true,
|
||||
".bb": true,
|
||||
".bd": true,
|
||||
".be": true,
|
||||
".bf": true,
|
||||
".bg": true,
|
||||
".bh": true,
|
||||
".bi": true,
|
||||
".biz": true,
|
||||
".bj": true,
|
||||
".bm": true,
|
||||
".bn": true,
|
||||
".bo": true,
|
||||
".br": true,
|
||||
".bs": true,
|
||||
".bt": true,
|
||||
".bv": true,
|
||||
".bw": true,
|
||||
".by": true,
|
||||
".bz": true,
|
||||
".ca": true,
|
||||
".cat": true,
|
||||
".cc": true,
|
||||
".cd": true,
|
||||
".cf": true,
|
||||
".cg": true,
|
||||
".ch": true,
|
||||
".ci": true,
|
||||
".ck": true,
|
||||
".cl": true,
|
||||
".cm": true,
|
||||
".cn": true,
|
||||
".co": true,
|
||||
".com": true,
|
||||
".coop": true,
|
||||
".cr": true,
|
||||
".cu": true,
|
||||
".cv": true,
|
||||
".cw": true,
|
||||
".cx": true,
|
||||
".cy": true,
|
||||
".cz": true,
|
||||
".de": true,
|
||||
".dj": true,
|
||||
".dk": true,
|
||||
".dm": true,
|
||||
".do": true,
|
||||
".dz": true,
|
||||
".ec": true,
|
||||
".edu": true,
|
||||
".ee": true,
|
||||
".eg": true,
|
||||
".er": true,
|
||||
".es": true,
|
||||
".et": true,
|
||||
".eu": true,
|
||||
".fi": true,
|
||||
".fj": true,
|
||||
".fk": true,
|
||||
".fm": true,
|
||||
".fo": true,
|
||||
".fr": true,
|
||||
".ga": true,
|
||||
".gb": true,
|
||||
".gd": true,
|
||||
".ge": true,
|
||||
".gf": true,
|
||||
".gg": true,
|
||||
".gh": true,
|
||||
".gi": true,
|
||||
".gl": true,
|
||||
".gm": true,
|
||||
".gn": true,
|
||||
".gov": true,
|
||||
".gp": true,
|
||||
".gq": true,
|
||||
".gr": true,
|
||||
".gs": true,
|
||||
".gt": true,
|
||||
".gu": true,
|
||||
".gw": true,
|
||||
".gy": true,
|
||||
".hk": true,
|
||||
".hm": true,
|
||||
".hn": true,
|
||||
".hr": true,
|
||||
".ht": true,
|
||||
".hu": true,
|
||||
".id": true,
|
||||
".ie": true,
|
||||
".il": true,
|
||||
".im": true,
|
||||
".in": true,
|
||||
".info": true,
|
||||
".int": true,
|
||||
".io": true,
|
||||
".iq": true,
|
||||
".ir": true,
|
||||
".is": true,
|
||||
".it": true,
|
||||
".je": true,
|
||||
".jm": true,
|
||||
".jo": true,
|
||||
".jobs": true,
|
||||
".jp": true,
|
||||
".ke": true,
|
||||
".kg": true,
|
||||
".kh": true,
|
||||
".ki": true,
|
||||
".km": true,
|
||||
".kn": true,
|
||||
".kp": true,
|
||||
".kr": true,
|
||||
".kw": true,
|
||||
".ky": true,
|
||||
".kz": true,
|
||||
".la": true,
|
||||
".lb": true,
|
||||
".lc": true,
|
||||
".li": true,
|
||||
".lk": true,
|
||||
".lr": true,
|
||||
".ls": true,
|
||||
".lt": true,
|
||||
".lu": true,
|
||||
".lv": true,
|
||||
".ly": true,
|
||||
".ma": true,
|
||||
".mc": true,
|
||||
".md": true,
|
||||
".me": true,
|
||||
".mg": true,
|
||||
".mh": true,
|
||||
".mil": true,
|
||||
".mk": true,
|
||||
".ml": true,
|
||||
".mm": true,
|
||||
".mn": true,
|
||||
".mo": true,
|
||||
".mobi": true,
|
||||
".mp": true,
|
||||
".mq": true,
|
||||
".mr": true,
|
||||
".ms": true,
|
||||
".mt": true,
|
||||
".mu": true,
|
||||
".museum": true,
|
||||
".mv": true,
|
||||
".mw": true,
|
||||
".mx": true,
|
||||
".my": true,
|
||||
".mz": true,
|
||||
".na": true,
|
||||
".name": true,
|
||||
".nc": true,
|
||||
".ne": true,
|
||||
".net": true,
|
||||
".nf": true,
|
||||
".ng": true,
|
||||
".ni": true,
|
||||
".nl": true,
|
||||
".no": true,
|
||||
".np": true,
|
||||
".nr": true,
|
||||
".nu": true,
|
||||
".nz": true,
|
||||
".om": true,
|
||||
".org": true,
|
||||
".pa": true,
|
||||
".pe": true,
|
||||
".pf": true,
|
||||
".pg": true,
|
||||
".ph": true,
|
||||
".pk": true,
|
||||
".pl": true,
|
||||
".pm": true,
|
||||
".pn": true,
|
||||
".post": true,
|
||||
".pr": true,
|
||||
".pro": true,
|
||||
".ps": true,
|
||||
".pt": true,
|
||||
".pw": true,
|
||||
".py": true,
|
||||
".qa": true,
|
||||
".re": true,
|
||||
".ro": true,
|
||||
".rs": true,
|
||||
".ru": true,
|
||||
".rw": true,
|
||||
".sa": true,
|
||||
".sb": true,
|
||||
".sc": true,
|
||||
".sd": true,
|
||||
".se": true,
|
||||
".sg": true,
|
||||
".sh": true,
|
||||
".si": true,
|
||||
".sj": true,
|
||||
".sk": true,
|
||||
".sl": true,
|
||||
".sm": true,
|
||||
".sn": true,
|
||||
".so": true,
|
||||
".sr": true,
|
||||
".st": true,
|
||||
".su": true,
|
||||
".sv": true,
|
||||
".sx": true,
|
||||
".sy": true,
|
||||
".sz": true,
|
||||
".tc": true,
|
||||
".td": true,
|
||||
".tel": true,
|
||||
".tf": true,
|
||||
".tg": true,
|
||||
".th": true,
|
||||
".tj": true,
|
||||
".tk": true,
|
||||
".tl": true,
|
||||
".tm": true,
|
||||
".tn": true,
|
||||
".to": true,
|
||||
".tp": true,
|
||||
".tr": true,
|
||||
".travel": true,
|
||||
".tt": true,
|
||||
".tv": true,
|
||||
".tw": true,
|
||||
".tz": true,
|
||||
".ua": true,
|
||||
".ug": true,
|
||||
".uk": true,
|
||||
".us": true,
|
||||
".uy": true,
|
||||
".uz": true,
|
||||
".va": true,
|
||||
".vc": true,
|
||||
".ve": true,
|
||||
".vg": true,
|
||||
".vi": true,
|
||||
".vn": true,
|
||||
".vu": true,
|
||||
".wf": true,
|
||||
".ws": true,
|
||||
".xn--0zwm56d": true,
|
||||
".xn--11b5bs3a9aj6g": true,
|
||||
".xn--3e0b707e": true,
|
||||
".xn--45brj9c": true,
|
||||
".xn--80akhbyknj4f": true,
|
||||
".xn--80ao21a": true,
|
||||
".xn--90a3ac": true,
|
||||
".xn--9t4b11yi5a": true,
|
||||
".xn--clchc0ea0b2g2a9gcd": true,
|
||||
".xn--deba0ad": true,
|
||||
".xn--fiqs8s": true,
|
||||
".xn--fiqz9s": true,
|
||||
".xn--fpcrj9c3d": true,
|
||||
".xn--fzc2c9e2c": true,
|
||||
".xn--g6w251d": true,
|
||||
".xn--gecrj9c": true,
|
||||
".xn--h2brj9c": true,
|
||||
".xn--hgbk6aj7f53bba": true,
|
||||
".xn--hlcj6aya9esc7a": true,
|
||||
".xn--j6w193g": true,
|
||||
".xn--jxalpdlp": true,
|
||||
".xn--kgbechtv": true,
|
||||
".xn--kprw13d": true,
|
||||
".xn--kpry57d": true,
|
||||
".xn--lgbbat1ad8j": true,
|
||||
".xn--mgb9awbf": true,
|
||||
".xn--mgbaam7a8h": true,
|
||||
".xn--mgbayh7gpa": true,
|
||||
".xn--mgbbh1a71e": true,
|
||||
".xn--mgbc0a9azcg": true,
|
||||
".xn--mgberp4a5d4ar": true,
|
||||
".xn--mgbx4cd0ab": true,
|
||||
".xn--o3cw4h": true,
|
||||
".xn--ogbpf8fl": true,
|
||||
".xn--p1ai": true,
|
||||
".xn--pgbs0dh": true,
|
||||
".xn--s9brj9c": true,
|
||||
".xn--wgbh1c": true,
|
||||
".xn--wgbl6a": true,
|
||||
".xn--xkc2al3hye2a": true,
|
||||
".xn--xkc2dl3a5ee0h": true,
|
||||
".xn--yfro4i67o": true,
|
||||
".xn--ygbi2ammx": true,
|
||||
".xn--zckzah": true,
|
||||
".xxx": true,
|
||||
".ye": true,
|
||||
".yt": true,
|
||||
".za": true,
|
||||
".zm": true,
|
||||
".zw": true,
|
||||
}
|
||||
|
||||
var validHost = regexp.MustCompile(`^[-a-z0-9]+(?:\.[-a-z0-9]+)+$`)
|
||||
var validPathElement = regexp.MustCompile(`^[-A-Za-z0-9~+][-A-Za-z0-9_.]*$`)
|
||||
|
||||
// IsValidRemotePath returns true if importPath is structurally valid for "go get".
|
||||
func IsValidRemotePath(importPath string) bool {
|
||||
|
||||
parts := strings.Split(importPath, "/")
|
||||
|
||||
if len(parts) <= 1 {
|
||||
// Import path must contain at least one "/".
|
||||
return false
|
||||
}
|
||||
|
||||
if !validTLD[path.Ext(parts[0])] {
|
||||
return false
|
||||
}
|
||||
|
||||
if !validHost.MatchString(parts[0]) {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, part := range parts[1:] {
|
||||
if !validPathElement.MatchString(part) || part == "testdata" {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
var goRepoPath = map[string]bool{}
|
||||
|
||||
func init() {
|
||||
for p := range standardPath {
|
||||
for {
|
||||
goRepoPath[p] = true
|
||||
i := strings.LastIndex(p, "/")
|
||||
if i < 0 {
|
||||
break
|
||||
}
|
||||
p = p[:i]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func IsGoRepoPath(importPath string) bool {
|
||||
return goRepoPath[importPath]
|
||||
}
|
||||
|
||||
func IsValidPath(importPath string) bool {
|
||||
return importPath == "C" || standardPath[importPath] || IsValidRemotePath(importPath)
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var goodImportPaths = []string{
|
||||
"github.com/user/repo",
|
||||
"github.com/user/repo/src/pkg/compress/somethingelse",
|
||||
"github.com/user/repo/src/compress/gzip",
|
||||
"github.com/user/repo/src/pkg",
|
||||
"camlistore.org/r/p/camlistore",
|
||||
"example.com/foo.git",
|
||||
"launchpad.net/~user/foo/trunk",
|
||||
"launchpad.net/~user/+junk/version",
|
||||
}
|
||||
|
||||
var badImportPaths = []string{
|
||||
"foobar",
|
||||
"foo.",
|
||||
".bar",
|
||||
"favicon.ico",
|
||||
"exmpple.com",
|
||||
"github.com/user/repo/testdata/x",
|
||||
"github.com/user/repo/_ignore/x",
|
||||
"github.com/user/repo/.ignore/x",
|
||||
}
|
||||
|
||||
func TestIsValidRemotePath(t *testing.T) {
|
||||
for _, importPath := range goodImportPaths {
|
||||
if !IsValidRemotePath(importPath) {
|
||||
t.Errorf("isBadImportPath(%q) -> true, want false", importPath)
|
||||
}
|
||||
}
|
||||
for _, importPath := range badImportPaths {
|
||||
if IsValidRemotePath(importPath) {
|
||||
t.Errorf("isBadImportPath(%q) -> false, want true", importPath)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,80 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Presentation struct {
|
||||
Filename string
|
||||
Files map[string][]byte
|
||||
Updated time.Time
|
||||
}
|
||||
|
||||
type presBuilder struct {
|
||||
filename string
|
||||
data []byte
|
||||
resolveURL func(fname string) string
|
||||
fetch func(srcs []*source) error
|
||||
}
|
||||
|
||||
var assetPat = regexp.MustCompile(`(?m)^\.(play|code|image|iframe|html)\s+(\S+)`)
|
||||
|
||||
func (b *presBuilder) build() (*Presentation, error) {
|
||||
var data []byte
|
||||
var files []*source
|
||||
i := 0
|
||||
for _, m := range assetPat.FindAllSubmatchIndex(b.data, -1) {
|
||||
name := string(b.data[m[4]:m[5]])
|
||||
switch string(b.data[m[2]:m[3]]) {
|
||||
case "iframe", "image":
|
||||
data = append(data, b.data[i:m[4]]...)
|
||||
data = append(data, b.resolveURL(name)...)
|
||||
case "html":
|
||||
// TODO: sanitize and fix relative URLs in HTML.
|
||||
data = append(data, "\ntalks.godoc.org does not support .html\n"...)
|
||||
case "play", "code":
|
||||
data = append(data, b.data[i:m[5]]...)
|
||||
found := false
|
||||
for _, f := range files {
|
||||
if f.name == name {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
files = append(files, &source{name: name})
|
||||
}
|
||||
default:
|
||||
panic("unreachable")
|
||||
}
|
||||
i = m[5]
|
||||
}
|
||||
data = append(data, b.data[i:]...)
|
||||
if err := b.fetch(files); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pres := &Presentation{
|
||||
Updated: time.Now().UTC(),
|
||||
Filename: b.filename,
|
||||
Files: map[string][]byte{b.filename: data},
|
||||
}
|
||||
for _, f := range files {
|
||||
pres.Files[f.name] = f.data
|
||||
}
|
||||
return pres, nil
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
// +build ignore
|
||||
|
||||
// Command print fetches and prints package documentation.
|
||||
//
|
||||
// Usage: go run print.go importPath
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
"github.com/garyburd/gddo/doc"
|
||||
)
|
||||
|
||||
var (
|
||||
etag = flag.String("etag", "", "Etag")
|
||||
local = flag.Bool("local", false, "Get package from local directory.")
|
||||
present = flag.Bool("present", false, "Get presentation.")
|
||||
)
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
if len(flag.Args()) != 1 {
|
||||
log.Fatal("Usage: go run print.go importPath")
|
||||
}
|
||||
if *present {
|
||||
printPresentation(flag.Args()[0])
|
||||
} else {
|
||||
printPackage(flag.Args()[0])
|
||||
}
|
||||
}
|
||||
|
||||
func printPackage(path string) {
|
||||
var (
|
||||
pdoc *doc.Package
|
||||
err error
|
||||
)
|
||||
if *local {
|
||||
pdoc, err = doc.GetLocal(path, "", "", "%s", "#L%d")
|
||||
} else {
|
||||
pdoc, err = doc.Get(http.DefaultClient, path, *etag)
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
spew.Dump(pdoc)
|
||||
}
|
||||
|
||||
func printPresentation(path string) {
|
||||
pres, err := doc.GetPresentation(http.DefaultClient, path)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Printf("%s\n", pres.Files[pres.Filename])
|
||||
for name, data := range pres.Files {
|
||||
if name != pres.Filename {
|
||||
fmt.Printf("---------- %s ----------\n%s\n", name, data)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,233 @@
|
|||
// Copyright 2011 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var defaultTags = map[string]string{"git": "master", "hg": "default"}
|
||||
|
||||
func bestTag(tags map[string]string, defaultTag string) (string, string, error) {
|
||||
if commit, ok := tags["go1"]; ok {
|
||||
return "go1", commit, nil
|
||||
}
|
||||
if commit, ok := tags[defaultTag]; ok {
|
||||
return defaultTag, commit, nil
|
||||
}
|
||||
return "", "", NotFoundError{"Tag or branch not found."}
|
||||
}
|
||||
|
||||
// expand replaces {k} in template with match[k] or subs[atoi(k)] if k is not in match.
|
||||
func expand(template string, match map[string]string, subs ...string) string {
|
||||
var p []byte
|
||||
var i int
|
||||
for {
|
||||
i = strings.Index(template, "{")
|
||||
if i < 0 {
|
||||
break
|
||||
}
|
||||
p = append(p, template[:i]...)
|
||||
template = template[i+1:]
|
||||
i = strings.Index(template, "}")
|
||||
if s, ok := match[template[:i]]; ok {
|
||||
p = append(p, s...)
|
||||
} else {
|
||||
j, _ := strconv.Atoi(template[:i])
|
||||
p = append(p, subs[j]...)
|
||||
}
|
||||
template = template[i+1:]
|
||||
}
|
||||
p = append(p, template...)
|
||||
return string(p)
|
||||
}
|
||||
|
||||
var readmePat = regexp.MustCompile(`^[Rr][Ee][Aa][Dd][Mm][Ee](?:$|\.)`)
|
||||
|
||||
// isDocFile returns true if a file with name n should be included in the
|
||||
// documentation.
|
||||
func isDocFile(n string) bool {
|
||||
if strings.HasSuffix(n, ".go") && n[0] != '_' && n[0] != '.' {
|
||||
return true
|
||||
}
|
||||
return readmePat.MatchString(n)
|
||||
}
|
||||
|
||||
var userAgent = "go application"
|
||||
|
||||
func SetUserAgent(ua string) {
|
||||
userAgent = ua
|
||||
}
|
||||
|
||||
// fetchFiles fetches the source files specified by the rawURL field in parallel.
|
||||
func fetchFiles(client *http.Client, files []*source, header http.Header) error {
|
||||
ch := make(chan error, len(files))
|
||||
for i := range files {
|
||||
go func(i int) {
|
||||
req, err := http.NewRequest("GET", files[i].rawURL, nil)
|
||||
if err != nil {
|
||||
ch <- err
|
||||
return
|
||||
}
|
||||
req.Header.Set("User-Agent", userAgent)
|
||||
for k, vs := range header {
|
||||
req.Header[k] = vs
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
ch <- &RemoteError{req.URL.Host, err}
|
||||
return
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode != 200 {
|
||||
ch <- &RemoteError{req.URL.Host, fmt.Errorf("get %s -> %d", req.URL, resp.StatusCode)}
|
||||
return
|
||||
}
|
||||
files[i].data, err = ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
ch <- &RemoteError{req.URL.Host, err}
|
||||
return
|
||||
}
|
||||
ch <- nil
|
||||
}(i)
|
||||
}
|
||||
for _ = range files {
|
||||
if err := <-ch; err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// httpGet gets the specified resource. ErrNotFound is returned if the
|
||||
// server responds with status 404.
|
||||
func httpGet(client *http.Client, url string, header http.Header) (io.ReadCloser, error) {
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("User-Agent", userAgent)
|
||||
for k, vs := range header {
|
||||
req.Header[k] = vs
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, &RemoteError{req.URL.Host, err}
|
||||
}
|
||||
if resp.StatusCode == 200 {
|
||||
return resp.Body, nil
|
||||
}
|
||||
resp.Body.Close()
|
||||
if resp.StatusCode == 404 { // 403 can be rate limit error. || resp.StatusCode == 403 {
|
||||
err = NotFoundError{"Resource not found: " + url}
|
||||
} else {
|
||||
err = &RemoteError{req.URL.Host, fmt.Errorf("get %s -> %d", url, resp.StatusCode)}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
func httpGetJSON(client *http.Client, url string, v interface{}) error {
|
||||
rc, err := httpGet(client, url, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rc.Close()
|
||||
err = json.NewDecoder(rc).Decode(v)
|
||||
if _, ok := err.(*json.SyntaxError); ok {
|
||||
err = NotFoundError{"JSON syntax error at " + url}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// httpGet gets the specified resource. ErrNotFound is returned if the server
|
||||
// responds with status 404.
|
||||
func httpGetBytes(client *http.Client, url string, header http.Header) ([]byte, error) {
|
||||
rc, err := httpGet(client, url, header)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
p, err := ioutil.ReadAll(rc)
|
||||
rc.Close()
|
||||
return p, err
|
||||
}
|
||||
|
||||
// httpGetBytesNoneMatch conditionally gets the specified resource. If a 304 status
|
||||
// is returned, then the function returns ErrNotModified. If a 404
|
||||
// status is returned, then the function returns ErrNotFound.
|
||||
func httpGetBytesNoneMatch(client *http.Client, url string, etag string) ([]byte, string, error) {
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
req.Header.Set("User-Agent", userAgent)
|
||||
req.Header.Set("If-None-Match", `"`+etag+`"`)
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, "", &RemoteError{req.URL.Host, err}
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
etag = resp.Header.Get("Etag")
|
||||
if len(etag) >= 2 && etag[0] == '"' && etag[len(etag)-1] == '"' {
|
||||
etag = etag[1 : len(etag)-1]
|
||||
} else {
|
||||
etag = ""
|
||||
}
|
||||
|
||||
switch resp.StatusCode {
|
||||
case 200:
|
||||
p, err := ioutil.ReadAll(resp.Body)
|
||||
return p, etag, err
|
||||
case 404:
|
||||
return nil, "", NotFoundError{"Resource not found: " + url}
|
||||
case 304:
|
||||
return nil, "", ErrNotModified
|
||||
default:
|
||||
return nil, "", &RemoteError{req.URL.Host, fmt.Errorf("get %s -> %d", url, resp.StatusCode)}
|
||||
}
|
||||
}
|
||||
|
||||
// httpGet gets the specified resource. ErrNotFound is returned if the
|
||||
// server responds with status 404. ErrNotModified is returned if the
|
||||
// hash of the resource equals savedEtag.
|
||||
func httpGetBytesCompare(client *http.Client, url string, savedEtag string) ([]byte, string, error) {
|
||||
p, err := httpGetBytes(client, url, nil)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
h := md5.New()
|
||||
h.Write(p)
|
||||
etag := hex.EncodeToString(h.Sum(nil))
|
||||
if savedEtag == etag {
|
||||
err = ErrNotModified
|
||||
}
|
||||
return p, etag, err
|
||||
}
|
||||
|
||||
type sliceWriter struct{ p *[]byte }
|
||||
|
||||
func (w sliceWriter) Write(p []byte) (int, error) {
|
||||
*w.p = append(*w.p, p...)
|
||||
return len(p), nil
|
||||
}
|
|
@ -0,0 +1,236 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// TODO: specify with command line flag
|
||||
const repoRoot = "/tmp/gddo"
|
||||
|
||||
var urlTemplates = []struct {
|
||||
re *regexp.Regexp
|
||||
template string
|
||||
lineFmt string
|
||||
}{
|
||||
{
|
||||
regexp.MustCompile(`^git\.gitorious\.org/(?P<repo>[^/]+/[^/]+)$`),
|
||||
"https://gitorious.org/{repo}/blobs/{tag}/{dir}{0}",
|
||||
"%s#line%d",
|
||||
},
|
||||
{
|
||||
regexp.MustCompile(`^camlistore\.org/r/p/(?P<repo>[^/]+)$`),
|
||||
"http://camlistore.org/code/?p={repo}.git;hb={tag};f={dir}{0}",
|
||||
"%s#l%d",
|
||||
},
|
||||
}
|
||||
|
||||
// lookupURLTemplate finds an expand() template, match map and line number
|
||||
// format for well known repositories.
|
||||
func lookupURLTemplate(repo, dir, tag string) (string, map[string]string, string) {
|
||||
if strings.HasPrefix(dir, "/") {
|
||||
dir = dir[1:] + "/"
|
||||
}
|
||||
for _, t := range urlTemplates {
|
||||
if m := t.re.FindStringSubmatch(repo); m != nil {
|
||||
match := map[string]string{
|
||||
"dir": dir,
|
||||
"tag": tag,
|
||||
}
|
||||
for i, name := range t.re.SubexpNames() {
|
||||
if name != "" {
|
||||
match[name] = m[i]
|
||||
}
|
||||
}
|
||||
return t.template, match, t.lineFmt
|
||||
}
|
||||
}
|
||||
return "", nil, ""
|
||||
}
|
||||
|
||||
type vcsCmd struct {
|
||||
schemes []string
|
||||
download func([]string, string, string) (string, string, error)
|
||||
}
|
||||
|
||||
var vcsCmds = map[string]*vcsCmd{
|
||||
"git": &vcsCmd{
|
||||
schemes: []string{"http", "https", "git"},
|
||||
download: downloadGit,
|
||||
},
|
||||
}
|
||||
|
||||
var lsremoteRe = regexp.MustCompile(`(?m)^([0-9a-f]{40})\s+refs/(?:tags|heads)/(.+)$`)
|
||||
|
||||
func downloadGit(schemes []string, repo, savedEtag string) (string, string, error) {
|
||||
var p []byte
|
||||
var scheme string
|
||||
for i := range schemes {
|
||||
cmd := exec.Command("git", "ls-remote", "--heads", "--tags", schemes[i]+"://"+repo+".git")
|
||||
log.Println(strings.Join(cmd.Args, " "))
|
||||
var err error
|
||||
p, err = cmd.Output()
|
||||
if err == nil {
|
||||
scheme = schemes[i]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if scheme == "" {
|
||||
return "", "", NotFoundError{"VCS not found"}
|
||||
}
|
||||
|
||||
tags := make(map[string]string)
|
||||
for _, m := range lsremoteRe.FindAllSubmatch(p, -1) {
|
||||
tags[string(m[2])] = string(m[1])
|
||||
}
|
||||
|
||||
tag, commit, err := bestTag(tags, "master")
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
|
||||
etag := scheme + "-" + commit
|
||||
|
||||
if etag == savedEtag {
|
||||
return "", "", ErrNotModified
|
||||
}
|
||||
|
||||
dir := path.Join(repoRoot, repo+".git")
|
||||
p, err = ioutil.ReadFile(path.Join(dir, ".git/HEAD"))
|
||||
switch {
|
||||
case err != nil:
|
||||
if err := os.MkdirAll(dir, 0777); err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
cmd := exec.Command("git", "clone", scheme+"://"+repo, dir)
|
||||
log.Println(strings.Join(cmd.Args, " "))
|
||||
if err := cmd.Run(); err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
case string(bytes.TrimRight(p, "\n")) == commit:
|
||||
return tag, etag, nil
|
||||
default:
|
||||
cmd := exec.Command("git", "fetch")
|
||||
log.Println(strings.Join(cmd.Args, " "))
|
||||
cmd.Dir = dir
|
||||
if err := cmd.Run(); err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
}
|
||||
|
||||
cmd := exec.Command("git", "checkout", "--detach", "--force", commit)
|
||||
cmd.Dir = dir
|
||||
if err := cmd.Run(); err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
|
||||
return tag, etag, nil
|
||||
}
|
||||
|
||||
var vcsPattern = regexp.MustCompile(`^(?P<repo>(?:[a-z0-9.\-]+\.)+[a-z0-9.\-]+(?::[0-9]+)?/[A-Za-z0-9_.\-/]*?)\.(?P<vcs>bzr|git|hg|svn)(?P<dir>/[A-Za-z0-9_.\-/]*)?$`)
|
||||
|
||||
func getVCSDoc(client *http.Client, match map[string]string, etagSaved string) (*Package, error) {
|
||||
cmd := vcsCmds[match["vcs"]]
|
||||
if cmd == nil {
|
||||
return nil, NotFoundError{expand("VCS not supported: {vcs}", match)}
|
||||
}
|
||||
|
||||
scheme := match["scheme"]
|
||||
if scheme == "" {
|
||||
i := strings.Index(etagSaved, "-")
|
||||
if i > 0 {
|
||||
scheme = etagSaved[:i]
|
||||
}
|
||||
}
|
||||
|
||||
schemes := cmd.schemes
|
||||
if scheme != "" {
|
||||
for i := range cmd.schemes {
|
||||
if cmd.schemes[i] == scheme {
|
||||
schemes = cmd.schemes[i : i+1]
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Download and checkout.
|
||||
|
||||
tag, etag, err := cmd.download(schemes, match["repo"], etagSaved)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Find source location.
|
||||
|
||||
urlTemplate, urlMatch, lineFmt := lookupURLTemplate(match["repo"], match["dir"], tag)
|
||||
|
||||
// Slurp source files.
|
||||
|
||||
d := path.Join(repoRoot, expand("{repo}.{vcs}", match), match["dir"])
|
||||
f, err := os.Open(d)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
err = NotFoundError{err.Error()}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
fis, err := f.Readdir(-1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var files []*source
|
||||
for _, fi := range fis {
|
||||
if fi.IsDir() || !isDocFile(fi.Name()) {
|
||||
continue
|
||||
}
|
||||
b, err := ioutil.ReadFile(path.Join(d, fi.Name()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files = append(files, &source{
|
||||
name: fi.Name(),
|
||||
browseURL: expand(urlTemplate, urlMatch, fi.Name()),
|
||||
data: b,
|
||||
})
|
||||
}
|
||||
|
||||
// Create the documentation.
|
||||
|
||||
b := &builder{
|
||||
pdoc: &Package{
|
||||
LineFmt: lineFmt,
|
||||
ImportPath: match["importPath"],
|
||||
ProjectRoot: expand("{repo}.{vcs}", match),
|
||||
ProjectName: path.Base(match["repo"]),
|
||||
ProjectURL: "",
|
||||
BrowseURL: "",
|
||||
Etag: etag,
|
||||
VCS: match["vcs"],
|
||||
},
|
||||
}
|
||||
|
||||
return b.build(files)
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// This list of deprecated exports is used to find code that has not been
|
||||
// updated for Go 1.
|
||||
var deprecatedExports = map[string][]string{
|
||||
`"bytes"`: {"Add"},
|
||||
`"crypto/aes"`: {"Cipher"},
|
||||
`"crypto/hmac"`: {"NewSHA1", "NewSHA256"},
|
||||
`"crypto/rand"`: {"Seed"},
|
||||
`"encoding/json"`: {"MarshalForHTML"},
|
||||
`"encoding/xml"`: {"Marshaler", "NewParser", "Parser"},
|
||||
`"html"`: {"NewTokenizer", "Parse"},
|
||||
`"image"`: {"Color", "NRGBAColor", "RGBAColor"},
|
||||
`"io"`: {"Copyn"},
|
||||
`"log"`: {"Exitf"},
|
||||
`"math"`: {"Fabs", "Fmax", "Fmod"},
|
||||
`"os"`: {"Envs", "Error", "Getenverror", "NewError", "Time", "UnixSignal", "Wait"},
|
||||
`"reflect"`: {"MapValue", "Typeof"},
|
||||
`"runtime"`: {"UpdateMemStats"},
|
||||
`"strconv"`: {"Atob", "Atof32", "Atof64", "AtofN", "Atoi64", "Atoui", "Atoui64", "Btoui64", "Ftoa64", "Itoa64", "Uitoa", "Uitoa64"},
|
||||
`"time"`: {"LocalTime", "Nanoseconds", "NanosecondsToLocalTime", "Seconds", "SecondsToLocalTime", "SecondsToUTC"},
|
||||
`"unicode/utf8"`: {"NewString"},
|
||||
}
|
||||
|
||||
type vetVisitor struct {
|
||||
errors map[string]token.Pos
|
||||
}
|
||||
|
||||
func (v *vetVisitor) Visit(n ast.Node) ast.Visitor {
|
||||
if sel, ok := n.(*ast.SelectorExpr); ok {
|
||||
if x, _ := sel.X.(*ast.Ident); x != nil {
|
||||
if obj := x.Obj; obj != nil && obj.Kind == ast.Pkg {
|
||||
if spec, _ := obj.Decl.(*ast.ImportSpec); spec != nil {
|
||||
for _, name := range deprecatedExports[spec.Path.Value] {
|
||||
if name == sel.Sel.Name {
|
||||
v.errors[fmt.Sprintf("%s.%s not found", spec.Path.Value, sel.Sel.Name)] = n.Pos()
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func (b *builder) vetPackage(pkg *ast.Package) {
|
||||
errors := make(map[string]token.Pos)
|
||||
for _, file := range pkg.Files {
|
||||
for _, is := range file.Imports {
|
||||
importPath, _ := strconv.Unquote(is.Path.Value)
|
||||
if !IsValidPath(importPath) &&
|
||||
!strings.HasPrefix(importPath, "exp/") &&
|
||||
!strings.HasPrefix(importPath, "appengine") {
|
||||
errors[fmt.Sprintf("Unrecognized import path %q", importPath)] = is.Pos()
|
||||
}
|
||||
}
|
||||
v := vetVisitor{errors: errors}
|
||||
ast.Walk(&v, file)
|
||||
}
|
||||
for message, pos := range errors {
|
||||
b.pdoc.Errors = append(b.pdoc.Errors,
|
||||
fmt.Sprintf("%s (%s)", message, b.fset.Position(pos)))
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/garyburd/gddo/database"
|
||||
"log"
|
||||
"os"
|
||||
)
|
||||
|
||||
var blockCommand = &command{
|
||||
name: "block",
|
||||
run: block,
|
||||
usage: "block path",
|
||||
}
|
||||
|
||||
func block(c *command) {
|
||||
if len(c.flag.Args()) != 1 {
|
||||
c.printUsage()
|
||||
os.Exit(1)
|
||||
}
|
||||
db, err := database.New()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if err := db.Block(c.flag.Args()[0]); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/garyburd/gddo/database"
|
||||
"github.com/garyburd/redigo/redis"
|
||||
)
|
||||
|
||||
var crawlCommand = &command{
|
||||
name: "crawl",
|
||||
run: crawl,
|
||||
usage: "crawl",
|
||||
}
|
||||
|
||||
func crawl(c *command) {
|
||||
if len(c.flag.Args()) != 0 {
|
||||
c.printUsage()
|
||||
os.Exit(1)
|
||||
}
|
||||
db, err := database.New()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
conn := db.Pool.Get()
|
||||
defer conn.Close()
|
||||
|
||||
paths, err := redis.Strings(conn.Do("SMEMBERS", "newCrawl"))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Println("NEW")
|
||||
for _, path := range paths {
|
||||
fmt.Println(path)
|
||||
}
|
||||
|
||||
paths, err = redis.Strings(conn.Do("SMEMBERS", "badCrawl"))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Println("BAD")
|
||||
for _, path := range paths {
|
||||
fmt.Println(path)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/garyburd/gddo/database"
|
||||
"github.com/garyburd/gddo/doc"
|
||||
)
|
||||
|
||||
var dangleCommand = &command{
|
||||
name: "dangle",
|
||||
run: dangle,
|
||||
usage: "dangle",
|
||||
}
|
||||
|
||||
func dangle(c *command) {
|
||||
if len(c.flag.Args()) != 0 {
|
||||
c.printUsage()
|
||||
os.Exit(1)
|
||||
}
|
||||
db, err := database.New()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
m := make(map[string]int)
|
||||
err = db.Do(func(pi *database.PackageInfo) error {
|
||||
m[pi.PDoc.ImportPath] |= 1
|
||||
for _, p := range pi.PDoc.Imports {
|
||||
if doc.IsValidPath(p) {
|
||||
m[p] |= 2
|
||||
}
|
||||
}
|
||||
for _, p := range pi.PDoc.TestImports {
|
||||
if doc.IsValidPath(p) {
|
||||
m[p] |= 2
|
||||
}
|
||||
}
|
||||
for _, p := range pi.PDoc.XTestImports {
|
||||
if doc.IsValidPath(p) {
|
||||
m[p] |= 2
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
for p, v := range m {
|
||||
if v == 2 {
|
||||
fmt.Println(p)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/garyburd/gddo/database"
|
||||
)
|
||||
|
||||
var deleteCommand = &command{
|
||||
name: "delete",
|
||||
run: del,
|
||||
usage: "delete path",
|
||||
}
|
||||
|
||||
func del(c *command) {
|
||||
if len(c.flag.Args()) != 1 {
|
||||
c.printUsage()
|
||||
os.Exit(1)
|
||||
}
|
||||
db, err := database.New()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if err := db.Delete(c.flag.Args()[0]); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,78 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
// Command gddo-admin is the GoDoc.org command line administration tool.
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type command struct {
|
||||
name string
|
||||
run func(c *command)
|
||||
flag flag.FlagSet
|
||||
usage string
|
||||
}
|
||||
|
||||
func (c *command) printUsage() {
|
||||
fmt.Fprintf(os.Stderr, "%s %s\n", os.Args[0], c.usage)
|
||||
c.flag.PrintDefaults()
|
||||
}
|
||||
|
||||
var commands = []*command{
|
||||
blockCommand,
|
||||
reindexCommand,
|
||||
pruneCommand,
|
||||
deleteCommand,
|
||||
popularCommand,
|
||||
dangleCommand,
|
||||
crawlCommand,
|
||||
}
|
||||
|
||||
func printUsage() {
|
||||
var n []string
|
||||
for _, c := range commands {
|
||||
n = append(n, c.name)
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "%s %s\n", os.Args[0], strings.Join(n, "|"))
|
||||
flag.PrintDefaults()
|
||||
for _, c := range commands {
|
||||
c.printUsage()
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Usage = printUsage
|
||||
flag.Parse()
|
||||
args := flag.Args()
|
||||
if len(args) >= 1 {
|
||||
for _, c := range commands {
|
||||
if args[0] == c.name {
|
||||
c.flag.Usage = func() {
|
||||
c.printUsage()
|
||||
os.Exit(2)
|
||||
}
|
||||
c.flag.Parse(args[1:])
|
||||
c.run(c)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
printUsage()
|
||||
os.Exit(2)
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/garyburd/gddo/database"
|
||||
)
|
||||
|
||||
var (
|
||||
popularCommand = &command{
|
||||
name: "popular",
|
||||
usage: "popular",
|
||||
}
|
||||
)
|
||||
|
||||
func init() {
|
||||
popularCommand.run = popular
|
||||
}
|
||||
|
||||
func popular(c *command) {
|
||||
if len(c.flag.Args()) != 0 {
|
||||
c.printUsage()
|
||||
os.Exit(1)
|
||||
}
|
||||
db, err := database.New()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
pkgs, err := db.PopularWithScores()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
for _, pkg := range pkgs {
|
||||
fmt.Println(pkg.Path, pkg.Synopsis)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,170 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/garyburd/gddo/database"
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func jaccardIndex(a, b *doc.Package) float64 {
|
||||
set := make(map[string]int)
|
||||
for i, pdoc := range []*doc.Package{a, b} {
|
||||
mask := 1 << uint(i)
|
||||
for _, f := range pdoc.Files {
|
||||
set[f.Name] |= mask
|
||||
}
|
||||
for _, p := range pdoc.Imports {
|
||||
set[p] |= mask
|
||||
}
|
||||
for _, p := range pdoc.TestImports {
|
||||
set[p] |= mask
|
||||
}
|
||||
for _, f := range pdoc.Funcs {
|
||||
set[f.Name] |= mask
|
||||
}
|
||||
for _, t := range pdoc.Types {
|
||||
set[t.Name] |= mask
|
||||
for _, f := range t.Funcs {
|
||||
set[f.Name] |= mask
|
||||
}
|
||||
for _, f := range t.Methods {
|
||||
set[f.Recv+"."+f.Name] |= mask
|
||||
}
|
||||
}
|
||||
}
|
||||
n := 0
|
||||
for _, bits := range set {
|
||||
if bits == 3 {
|
||||
n += 1
|
||||
}
|
||||
}
|
||||
return float64(n) / float64(len(set))
|
||||
}
|
||||
|
||||
var (
|
||||
pruneCommand = &command{
|
||||
name: "prune",
|
||||
usage: "prune",
|
||||
}
|
||||
pruneDryRun = pruneCommand.flag.Bool("n", false, "Dry run.")
|
||||
)
|
||||
|
||||
func init() {
|
||||
pruneCommand.run = prune
|
||||
}
|
||||
|
||||
func prune(c *command) {
|
||||
if len(c.flag.Args()) != 0 {
|
||||
c.printUsage()
|
||||
os.Exit(1)
|
||||
}
|
||||
db, err := database.New()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
paths := make(map[string]bool)
|
||||
|
||||
err = db.Do(func(pi *database.PackageInfo) error {
|
||||
pdoc := pi.PDoc
|
||||
if pdoc.ProjectRoot == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
i := strings.LastIndex(pdoc.ImportPath, "/")
|
||||
if i < 0 {
|
||||
return nil
|
||||
}
|
||||
suffix := pdoc.ImportPath[i:]
|
||||
|
||||
imports := make(map[string]bool)
|
||||
for _, p := range pdoc.Imports {
|
||||
imports[p] = true
|
||||
}
|
||||
|
||||
pathLists := [][]string{pdoc.TestImports, pdoc.XTestImports, pdoc.References}
|
||||
if pdoc.ProjectRoot != pdoc.ImportPath {
|
||||
if pdocRoot, _, _ := db.GetDoc(pdoc.ProjectRoot); pdocRoot != nil {
|
||||
pathLists = append(pathLists, pdocRoot.References)
|
||||
}
|
||||
}
|
||||
|
||||
fork := ""
|
||||
|
||||
forkCheck:
|
||||
for _, list := range pathLists {
|
||||
for _, p := range list {
|
||||
if p != pdoc.ImportPath && strings.HasSuffix(p, suffix) && !imports[p] {
|
||||
pdocTest, _, _ := db.GetDoc(p)
|
||||
if pdocTest != nil && pdocTest.Name == pdoc.Name && jaccardIndex(pdocTest, pdoc) > 0.75 {
|
||||
fork = pdocTest.ImportPath
|
||||
break forkCheck
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if fork != "" {
|
||||
log.Printf("%s is fork of %s", pdoc.ImportPath, fork)
|
||||
if !*pruneDryRun {
|
||||
for _, pkg := range pi.Pkgs {
|
||||
if err := db.Delete(pkg.Path); err != nil {
|
||||
log.Printf("Error deleting %s, %v", pkg.Path, err)
|
||||
}
|
||||
}
|
||||
if err := db.Delete(pdoc.ImportPath); err != nil {
|
||||
log.Printf("Error deleting %s, %v", pdoc.ImportPath, err)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
keep := pi.Score > 0
|
||||
if pdoc.IsCmd && pdoc.Synopsis != "" && len(pdoc.Doc) > len(pdoc.Synopsis) {
|
||||
// Keep a command if there's actually some documentation.
|
||||
keep = true
|
||||
}
|
||||
p := pdoc.ImportPath
|
||||
for {
|
||||
paths[p] = paths[p] || keep
|
||||
if len(p) <= len(pdoc.ProjectRoot) {
|
||||
break
|
||||
} else if i := strings.LastIndex(p, "/"); i < 0 {
|
||||
break
|
||||
} else {
|
||||
p = p[:i]
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
for p, keep := range paths {
|
||||
if !keep {
|
||||
log.Printf("%s has rank 0", p)
|
||||
if !*pruneDryRun {
|
||||
if err := db.Delete(p); err != nil {
|
||||
log.Printf("Error deleting %s, %v", p, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/garyburd/gddo/database"
|
||||
"github.com/garyburd/gddo/doc"
|
||||
)
|
||||
|
||||
var reindexCommand = &command{
|
||||
name: "reindex",
|
||||
run: reindex,
|
||||
usage: "reindex",
|
||||
}
|
||||
|
||||
func fix(pdoc *doc.Package) {
|
||||
/*
|
||||
for _, v := range pdoc.Consts {
|
||||
}
|
||||
for _, v := range pdoc.Vars {
|
||||
}
|
||||
for _, v := range pdoc.Funcs {
|
||||
}
|
||||
for _, t := range pdoc.Types {
|
||||
for _, v := range t.Consts {
|
||||
}
|
||||
for _, v := range t.Vars {
|
||||
}
|
||||
for _, v := range t.Funcs {
|
||||
}
|
||||
for _, v := range t.Methods {
|
||||
}
|
||||
}
|
||||
for _, notes := range pdoc.Notes {
|
||||
for _, v := range notes {
|
||||
}
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
func reindex(c *command) {
|
||||
if len(c.flag.Args()) != 0 {
|
||||
c.printUsage()
|
||||
os.Exit(1)
|
||||
}
|
||||
db, err := database.New()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
var n int
|
||||
err = db.Do(func(pi *database.PackageInfo) error {
|
||||
n += 1
|
||||
fix(pi.PDoc)
|
||||
return db.Put(pi.PDoc, time.Time{})
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
log.Printf("Updated %d documents", n)
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
BOOTSTRAP=$(HOME)/src/bootstrap
|
||||
TIMEAGO=$(HOME)/src/jquery-timeago
|
||||
|
||||
default: assets/static/site.js assets/static/css/bootstrap.css
|
||||
|
||||
assets/static/css/bootstrap.css: less/after.less less/before.less less/bootstrap.less less/variables.less
|
||||
lessc -I$(BOOTSTRAP)/less -Iless less/bootstrap.less > assets/static/css/bootstrap.css
|
||||
|
||||
assets/static/site.js: \
|
||||
$(BOOTSTRAP)/bootstrap/js/bootstrap.min.js \
|
||||
$(TIMEAGO)/jquery.timeago.js \
|
||||
js/site.js
|
||||
cat $^ > $@
|
|
@ -0,0 +1,4 @@
|
|||
<?xml version="1.0"?>
|
||||
<users>
|
||||
<user>6F3E495D5591D0B1308072CA245E8849</user>
|
||||
</users>
|
Двоичный файл не отображается.
После Ширина: | Высота: | Размер: 198 B |
|
@ -0,0 +1 @@
|
|||
google-site-verification: google3d2f3cd4cc2bb44b.html
|
|
@ -0,0 +1,39 @@
|
|||
talks.godoc.org
|
||||
|
||||
@GoDocDotOrg
|
||||
|
||||
* Introduction
|
||||
|
||||
This site plays slide presentations and articles stored on Github and Google Project Hosting in the
|
||||
[[http://godoc.org/code.google.com/p/go.talks/pkg/present][present format]].
|
||||
|
||||
The syntax for URLs is:
|
||||
|
||||
GitHub
|
||||
|
||||
http://talks.godoc.org/github.com/owner/project/file.ext
|
||||
http://talks.godoc.org/github.com/owner/project/sub/directory/file.ext
|
||||
|
||||
Google Code Project Hosting
|
||||
|
||||
http://talks.godoc.org/code.google.com/p/project/file.ext
|
||||
http://talks.godoc.org/code.google.com/p/project/sub/directory/file.ext
|
||||
http://talks.godoc.org/code.google.com/p/project.subrepository/file.ext
|
||||
http://talks.godoc.org/code.google.com/p/project.subrepository/sub/directory/file.ext
|
||||
|
||||
The supported file extensions (.ext) are .slide and .article.
|
||||
|
||||
The .html command is not supported.
|
||||
|
||||
This page is [[http://talks.godoc.org/github.com/garyburd/gddo/gddo-server/assets/presentHome.article][an article]].
|
||||
|
||||
* Talks
|
||||
|
||||
See the [[https://code.google.com/p/go-wiki/wiki/GoTalks][Go Wiki Talks page]]
|
||||
for a list of talks about Go. If you want to share your talk about Go with the
|
||||
community, then please add your talk to the wiki page.
|
||||
|
||||
* Feedback
|
||||
|
||||
Send your ideas, feature requests and questions to info@godoc.org. Report bugs using the
|
||||
[[https://github.com/garyburd/gddo/issues/new][Github Issue Tracker]].
|
|
@ -0,0 +1,2 @@
|
|||
User-agent: *
|
||||
Disallow: *
|
|
@ -0,0 +1,8 @@
|
|||
User-agent: *
|
||||
Disallow: /*?imports
|
||||
Disallow: /*?importers
|
||||
Disallow: /*?import-graph*
|
||||
Disallow: /*?gosrc*
|
||||
|
||||
user-agent: AhrefsBot
|
||||
disallow: /
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -0,0 +1,85 @@
|
|||
{{define "Head"}}<title>About - GoDoc</title>{{end}}
|
||||
|
||||
{{define "Body"}}
|
||||
<h1>About</h1>
|
||||
|
||||
<p>GoDoc displays documentation for <a href="http://golang.org/">Go</a>
|
||||
packages on <a href="https://bitbucket.org/">Bitbucket</a>, <a
|
||||
href="https://github.com/">Github</a>, <a
|
||||
href="https://launchpad.net/">Launchpad</a> and <a
|
||||
href="http://code.google.com/hosting/">Google Project Hosting</a>.
|
||||
|
||||
<p>The source code for GoDoc is available <a
|
||||
href="https://github.com/garyburd/gddo">on Github</a>.
|
||||
|
||||
<p>GoDoc displays documentation for GOOS=linux unless otherwise noted at the
|
||||
bottom of the documentation page.
|
||||
|
||||
<h4 id="howto">Add a package to GoDoc</h4>
|
||||
|
||||
<p>GoDoc generates documentation from Go source code. The <a
|
||||
href="http://golang.org/doc/articles/godoc_documenting_go_code.html">guidelines</a>
|
||||
for writing documentation for the <a
|
||||
href="http://golang.org/cmd/godoc/">godoc</a> tool apply to GoDoc.
|
||||
|
||||
<p>It's important to write a good summary of the package in the first sentence
|
||||
of the package comment. GoDoc indexes the first sentence and displays the
|
||||
sentence in package lists.
|
||||
|
||||
<p>To add a package to GoDoc, <a href="/">search</a> for the package by import
|
||||
path. If GoDoc does not already have the documentation for the package, then
|
||||
GoDoc will fetch the source from the version control system on the fly and add
|
||||
the documentation.
|
||||
|
||||
<p>GoDoc scrapes the <a href="https://github.com/languages/Go/updated">Github
|
||||
recent updates page</a> to find recently updated packages on Github. For
|
||||
other services, GoDoc checks for updates once per day. You can force GoDoc to
|
||||
refresh the documentation immediately by clicking the refresh link at the
|
||||
bottom of the package documentation page.
|
||||
|
||||
<p>GoDoc crawls package imports to automatically find new packages.
|
||||
|
||||
<h4 id="remove">Remove a package from GoDoc</h4>
|
||||
|
||||
GoDoc automatically removes packages deleted from the version control system
|
||||
when GoDoc checks for updates to the package. You can force GoDoc to remove a
|
||||
deleted package immediately by clicking the refresh link at the bottom of the
|
||||
package documentation page.
|
||||
|
||||
If you do not want GoDoc to display documentation for your package, send mail
|
||||
to info@godoc.org with the import path of the path of the package that you want
|
||||
to remove.
|
||||
|
||||
<h4 id="feedback">Feedback</h4>
|
||||
|
||||
<p>Send your ideas, feature requests and questions to
|
||||
info@godoc.org. Report bugs using the <a
|
||||
href="https://github.com/garyburd/gddo/issues/new">Github Issue
|
||||
Tracker</a>. Join the discussion at the <a
|
||||
href="https://groups.google.com/d/forum/godocorg">GoDoc group</a>. You
|
||||
should follow <a href="https://twitter.com/godocdotorg">GoDoc on Twitter</a>.
|
||||
|
||||
<h4 id="plain-text">Plain Text</h4>
|
||||
|
||||
GoDoc provides plain text output for integration with shell scripts. Use the
|
||||
HTTP Accept header to request a plain text response.
|
||||
|
||||
<p>Search for packages with the term 'sql':
|
||||
<pre>$ curl -H 'Accept: text/plain' http://godoc.org/?q=sql</pre>
|
||||
|
||||
<p>Get the documentation for the standard math package:
|
||||
<pre>$ curl -H 'Accept: text/plain' http://godoc.org/math</pre>
|
||||
|
||||
<h4 id="shortcuts">Keyboard Shortcuts</h4>
|
||||
|
||||
<p>GoDoc has keyboard shortcuts for navigating package documentation
|
||||
pages. Type '?' on a package page for help.
|
||||
|
||||
<h4 id="bookmarklet">Bookmarklet</h4>
|
||||
|
||||
<p>The GoDoc bookmarklet navigates from pages on Bitbucket, Github Launchpad
|
||||
and Google Project Hosting to the package documentation. To install the
|
||||
bookmarklet, click and drag the following link to your bookmark bar: <a
|
||||
href="javascript:window.location='http://{{.Host|html}}/?q='+encodeURIComponent(window.location)">GoDoc</a>
|
||||
|
||||
{{end}}
|
|
@ -0,0 +1,9 @@
|
|||
{{define "Head"}}<title>Bot - GoDoc</title>{{end}}
|
||||
|
||||
{{define "Body"}}
|
||||
|
||||
<p>GoDocBot is godoc.org's robot for fetching Go documentation from version control systems.
|
||||
|
||||
<p>Contact: info@godoc.org
|
||||
|
||||
{{end}}
|
|
@ -0,0 +1,9 @@
|
|||
{{define "Head"}}{{template "PkgCmdHeader" $}}{{end}}
|
||||
|
||||
{{define "Body"}}{{with .pdoc}}
|
||||
{{template "ProjectNav" $}}
|
||||
<h2>Command {{.|pageName}}</h2>
|
||||
{{template "Errors" $}}
|
||||
{{.Doc|comment}}
|
||||
{{template "PkgCmdFooter" $}}
|
||||
{{end}}{{end}}
|
|
@ -0,0 +1,5 @@
|
|||
{{define "ROOT"}}{{with .pdoc}}
|
||||
COMMAND DOCUMENTATION
|
||||
|
||||
{{.Doc|comment}}
|
||||
{{template "Subdirs" $}}{{end}}{{end}}
|
|
@ -0,0 +1,82 @@
|
|||
|
||||
{{define "Analytics"}}{{with gaAccount}}<script type="text/javascript">
|
||||
var _gaq = _gaq || [];
|
||||
_gaq.push(['_setAccount', '{{.}}']);
|
||||
_gaq.push(['_trackPageview']);
|
||||
(function() {
|
||||
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
|
||||
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
|
||||
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
|
||||
})();
|
||||
</script>{{end}}{{end}}
|
||||
|
||||
{{define "SearchBox"}}
|
||||
<form>
|
||||
<div class="input-append">
|
||||
<input class="span6" name="q" autofocus="autofocus" value="{{.}}" placeholder="Import path or keywords" type="text">
|
||||
<button class="btn" type="submit">Go!</button>
|
||||
</div>
|
||||
</form>
|
||||
{{end}}
|
||||
|
||||
{{define "ProjectNav"}}<div class="flat-well well-small">
|
||||
{{if .pdoc.ProjectRoot}}<a href="{{.pdoc.ProjectURL}}"><strong>{{.pdoc.ProjectName}}:</strong></a>{{else}}<a href="/-/go">Go:</a>{{end}}
|
||||
{{breadcrumbs .pdoc (templateName)}}
|
||||
{{if and .pdoc.Name (equal templateName "pkg.html")}}
|
||||
<span class="pull-right">
|
||||
<a href="#_index">Index</a>
|
||||
{{if hasExamples .pdoc}}<span class="muted">|</span> <a href="#_examples">Examples</a>{{end}}
|
||||
<span class="muted">|</span> <a href="#_files">Files</a>
|
||||
{{if .pkgs}}<span class="muted">|</span> <a href="#_subdirs">Directories</a>{{end}}
|
||||
</span>
|
||||
{{end}}
|
||||
</div>{{end}}
|
||||
|
||||
{{define "Errors"}}{{with .pdoc.Errors}}<div class="well">
|
||||
<p>The <a href="http://golang.org/cmd/go/#Download_and_install_packages_and_dependencies">go get</a>
|
||||
command cannot install this package because of the following issues:
|
||||
<ul>
|
||||
{{range .}}<li>{{.}}{{end}}
|
||||
</ul>
|
||||
</div>{{end}}{{end}}
|
||||
|
||||
{{define "Pkgs"}}
|
||||
<table class="table table-condensed">
|
||||
<thead><tr><th>Path</th><th>Synopsis</th></tr></thead>
|
||||
<tbody>{{range .}}<tr><td>{{if .Path|isValidImportPath}}<a href="/{{.Path}}">{{.Path|importPath}}</a>{{else}}{{.Path|importPath}}{{end}}</td><td>{{.Synopsis|importPath}}</td></tr>
|
||||
{{end}}</tbody>
|
||||
</table>
|
||||
{{end}}
|
||||
|
||||
{{define "PkgCmdHeader"}}{{with .pdoc}}
|
||||
<title>{{.|pageName}} - GoDoc</title>
|
||||
{{if .Synopsis}}
|
||||
<meta name="twitter:title" content="{{if .IsCmd}}Command{{else}}package{{end}} {{.Name}}">
|
||||
<meta property="og:title" content="{{if .IsCmd}}Command{{else}}package{{end}} {{.Name}}">
|
||||
<meta name="description" content="{{.Synopsis}}">
|
||||
<meta name="twitter:description" content="{{.Synopsis}}">
|
||||
<meta property="og:description" content="{{.Synopsis}}">
|
||||
<meta name="twitter:card" content="summary">
|
||||
<meta name="twitter:site" content="@godocdotorg">
|
||||
{{end}}
|
||||
{{if .Errors}}<meta name="robots" content="NOINDEX">{{end}}
|
||||
{{end}}{{end}}
|
||||
|
||||
{{define "PkgCmdFooter"}}
|
||||
{{if $.pkgs}}{{if $.pdoc.Name}}<h3 id="_subdirs">Directories</h3>{{else}}<h3>Directory</h3>{{end}}
|
||||
<table class="table table-condensed">
|
||||
<thead><tr><th>Path</th><th>Synopsis</th></tr></thead>
|
||||
<tbody>{{range $.pkgs}}<tr><td><a href="/{{.Path}}">{{relativePath .Path $.pdoc.ImportPath}}</a><td>{{.Synopsis}}</td></tr>{{end}}</tbody>
|
||||
</table>
|
||||
{{end}}
|
||||
{{with $.pdoc}}
|
||||
<form name="refresh" method="POST" action="/-/refresh" class="form-inline">
|
||||
{{if or .Imports $.importerCount}}Package {{.Name}} {{if .Imports}}imports <a href="?imports">{{.Imports|len}} packages</a> (<a href="?import-graph">graph</a>){{end}}{{if and .Imports $.importerCount}} and {{end}}{{if $.importerCount}}is imported by <a href="?importers">{{$.importerCount}} packages</a>{{end}}.{{end}}
|
||||
{{if not .Updated.IsZero}}Updated <span class="timeago" title="{{.Updated.Format "2006-01-02T15:04:05Z"}}">{{.Updated.Format "2006-01-02"}}</span>{{if or (equal .GOOS "windows") (equal .GOOS "darwin")}} with GOOS={{.GOOS}}{{end}}.
|
||||
<a href="javascript:document.refresh.submit();" title="Refresh this page from the source">Refresh</a>.
|
||||
<input type="hidden" name="path" value="{{.ImportPath}}">
|
||||
{{end}}
|
||||
</form>
|
||||
{{end}}{{end}}
|
||||
|
||||
{{define "jQuery"}}<script src="//ajax.googleapis.com/ajax/libs/jquery/1.8.1/jquery.min.js"></script>{{end}}
|
|
@ -0,0 +1,3 @@
|
|||
{{define "Subdirs"}}{{with $.pkgs}}SUBDIRECTORIES
|
||||
{{range .}}
|
||||
{{.Path}}{{end}}{{end}}{{end}}
|
|
@ -0,0 +1,18 @@
|
|||
{{define "ROOT"}}<!DOCTYPE html><html lang="en">
|
||||
<head>
|
||||
<title>{{.pdoc|pageName}} graph - GoDoc</title>
|
||||
<meta name="robots" content="NOINDEX, NOFOLLOW">
|
||||
<link href="{{staticFile "css/bootstrap.css"}}" rel="stylesheet">
|
||||
</head>
|
||||
<body>
|
||||
<div class="well-small">
|
||||
Package <a href="/{{.pdoc.ImportPath}}">{{.pdoc.Name}}</a>
|
||||
{{if .pdoc.ProjectRoot}}<span class="muted">|</span>
|
||||
{{if .hide}}<a href="?view=import-graph">Show</a>{{else}}<a href="?view=import-graph&hide=1">Hide</a>{{end}}
|
||||
standard package dependencies.
|
||||
{{end}}
|
||||
</div>
|
||||
{{.svg}}
|
||||
</body>
|
||||
{{template "Analytics"}}
|
||||
</html>{{end}}
|
|
@ -0,0 +1,37 @@
|
|||
{{define "Head"}}<title>GoDoc</title>
|
||||
<link type="application/opensearchdescription+xml" rel="search" href="/-/opensearch.xml?v={{fileHash "templates/opensearch.xml"}}"/>{{end}}
|
||||
|
||||
{{define "Body"}}
|
||||
|
||||
<div class="flat-well well-large">
|
||||
<h2 style="margin-top: 0">Search for Go packages.</h2>
|
||||
{{template "SearchBox" ""}}
|
||||
</div>
|
||||
|
||||
<h4>What is this?</h4>
|
||||
|
||||
<p>GoDoc generates <a href="http://golang.org/">Go</a> package documentation
|
||||
on the fly from packages on Bitbucket, Github, Google Project Hosting and
|
||||
Launchpad. Read the <a href="/-/about">About Page</a> for information about
|
||||
adding packages to GoDoc and more.
|
||||
|
||||
<div class="row">
|
||||
<div class="span6">
|
||||
{{with .Popular}}
|
||||
<h4>Popular Packages</h4>
|
||||
<ul class="unstyled">
|
||||
{{range .}}<li><a href="/{{.Path}}">{{.Path}}</a>{{end}}
|
||||
</ul>
|
||||
{{end}}
|
||||
</div>
|
||||
<div class="span6">
|
||||
<h4>More Packages</h4>
|
||||
<ul class="unstyled">
|
||||
<li><a href="/-/index">Index</a>
|
||||
<li><a href="/-/go">Standard Packages</a>
|
||||
<li><a href="https://code.google.com/p/go-wiki/wiki/Projects">Projects @ go-wiki</a>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{{end}}
|
|
@ -0,0 +1,2 @@
|
|||
{{define "ROOT"}}
|
||||
{{end}}
|
|
@ -0,0 +1,7 @@
|
|||
{{define "Head"}}<title>{{.pdoc|pageName}} importers - GoDoc</title><meta name="robots" content="NOINDEX, NOFOLLOW">{{end}}
|
||||
|
||||
{{define "Body"}}
|
||||
{{template "ProjectNav" $}}
|
||||
<h3>Packages that import {{.pdoc.Name|html}}</h3>
|
||||
{{template "Pkgs" $.pkgs}}
|
||||
{{end}}
|
|
@ -0,0 +1,7 @@
|
|||
{{define "Head"}}<title>{{.pdoc|pageName}} imports - GoDoc</title><meta name="robots" content="NOINDEX, NOFOLLOW">{{end}}
|
||||
|
||||
{{define "Body"}}
|
||||
{{template "ProjectNav" $}}
|
||||
<h3>Packages imported by {{.pdoc.Name|html}}</h3>
|
||||
{{template "Pkgs" $.pkgs}}
|
||||
{{end}}
|
|
@ -0,0 +1,14 @@
|
|||
{{define "Head"}}<title>Index - GoDoc</title>{{end}}
|
||||
|
||||
{{define "Body"}}
|
||||
<h1>Index</h1>
|
||||
|
||||
<p>The following is a list of '<a
|
||||
href="http://golang.org/cmd/go/#hdr-Download_and_install_packages_and_dependencies">go
|
||||
get</a>'able packages viewed previously on godoc.org. A <a href="/-/go">list of Go standard packages</a> is also available.
|
||||
|
||||
{{htmlComment "\nPlease use http://api.godoc.org/packages instead of scraping this page.\n"}}
|
||||
{{template "Pkgs" .pkgs}}
|
||||
|
||||
<p>Number of packages: {{len .pkgs}}.
|
||||
{{end}}
|
|
@ -0,0 +1,10 @@
|
|||
{{define "Head"}}<title>{{.pdoc.Name}}.{{.name}} - GoDoc</title><meta name="robots" content="NOINDEX, NOFOLLOW">{{end}}
|
||||
|
||||
{{define "Body"}}
|
||||
{{template "ProjectNav" $}}
|
||||
<h3>Type {{.name}}</h3>
|
||||
<br>is interface {{.mset.IsInterface}}
|
||||
{{range .mset.Errors}}<br>error: {{.}}{{end}}
|
||||
{{range .mset.EmbeddedFields}}<br>field: {{.Path}} {{.Name}} {{.IsPtr}}{{end}}
|
||||
{{range .mset.Methods}}<br>method: {{.Name}} {{.Fingerprint}} {{.IsPtr}}{{end}}
|
||||
{{end}}
|
|
@ -0,0 +1,52 @@
|
|||
{{define "ROOT"}}<!DOCTYPE html><html lang="en">
|
||||
<head profile="http://a9.com/-/spec/opensearch/1.1/">
|
||||
<meta charset="utf-8">
|
||||
<link href="{{staticFile "css/bootstrap.css"}}" rel="stylesheet">
|
||||
{{template "Head" $}}
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="navbar navbar-inverse">
|
||||
<div class="navbar-inner">
|
||||
<a class="brand" href="/">GoDoc</a>
|
||||
<ul class="nav">
|
||||
<li{{if equal "home.html" templateName}} class="active"{{end}}><a href="/">Home</a></li>
|
||||
<li{{if equal "index.html" templateName}} class="active"{{end}}><a href="/-/index">Index</a></li>
|
||||
<li{{if equal "about.html" templateName}} class="active"{{end}}><a href="/-/about">About</a></li>
|
||||
</ul>
|
||||
<form class="navbar-search pull-right" action="/"><input id="_search" type="text" class="search-query" name="q" placeholder="Search"></form>
|
||||
</div>
|
||||
</div>
|
||||
{{template "Body" $}}
|
||||
<div class="container">
|
||||
<div class="flat-well well-small"><a href="http://twitter.com/GoDocDotOrg">@GoDocDotOrg</a>
|
||||
<span class="muted">|</span> <a href="mailto:info@godoc.org">Feedback</a>
|
||||
<span class="muted">|</span> <a href="https://github.com/garyburd/gddo/issues">Website Issues</a>
|
||||
<span class="pull-right"><a href="#">Back to top</a></span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="_shortcuts" tabindex="-1" class="modal hide">
|
||||
<div class="modal-header">
|
||||
<h4>Keyboard Shortcuts</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<table>{{$mutePkg := not (equal "pkg.html" templateName)}}
|
||||
<tr><td align="right"><b>?</b></td><td> : This menu</td></tr>
|
||||
<tr><td align="right"><b>/</b></td><td> : Search site</td></tr>
|
||||
<tr{{if $mutePkg}} class="muted"{{end}}><td align="right"><b>.</b></td><td> : Go to export</td></tr>
|
||||
<tr><td align="right"><b>g</b> then <b>g</b></td><td> : Go to top of page</td></tr>
|
||||
<tr><td align="right"><b>g</b> then <b>b</b></td><td> : Go to end of page</td></tr>
|
||||
<tr{{if $mutePkg}} class="muted"{{end}}><td align="right"><b>g</b> then <b>i</b></td><td> : Go to index</td></tr>
|
||||
<tr{{if $mutePkg}} class="muted"{{end}}><td align="right"><b>g</b> then <b>e</b></td><td> : Go to examples</td></tr>
|
||||
</table>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button class="btn" data-dismiss="modal" aria-hidden="true">Close</button>
|
||||
</div>
|
||||
</div>
|
||||
{{template "jQuery"}}<script src="{{staticFile "site.js"}}"></script>{{template "Analytics"}}
|
||||
</body>
|
||||
</html>
|
||||
{{end}}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{{define "Head"}}<title>Not Found - GoDoc</title>{{end}}
|
||||
|
||||
{{define "Body"}}
|
||||
<h2>Not Found</h2>
|
||||
<p>Oh snap! Our team of gophers could not find the web page you are looking for. Try one of these pages:
|
||||
<ul>
|
||||
<li><a href="/">Home</a>
|
||||
<li><a href="/-/index">Package Index</a>
|
||||
</ul>
|
||||
{{end}}
|
|
@ -0,0 +1,2 @@
|
|||
{{define "ROOT"}}NOT FOUND
|
||||
{{end}}
|
|
@ -0,0 +1,9 @@
|
|||
{{define "ROOT"}}<?xml version="1.0"?>
|
||||
<OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/">
|
||||
<InputEncoding>UTF-8</InputEncoding>
|
||||
<ShortName>GoDoc</ShortName>
|
||||
<Description>GoDoc: Go Documentation Service</Description>
|
||||
<Url type="text/html" method="get" template="http://{{.}}/?q={searchTerms}"/>
|
||||
<Url type="application/x-suggestions+json" template="http://{{.}}/-/suggest?q={searchTerms}"/>
|
||||
</OpenSearchDescription>
|
||||
{{end}}
|
|
@ -0,0 +1,101 @@
|
|||
{{define "Head"}}{{template "PkgCmdHeader" $}}{{end}}
|
||||
|
||||
{{define "Body"}}{{with .pdoc}}
|
||||
{{template "ProjectNav" $}}
|
||||
{{if .Name}}<h2>package {{.Name}}</h2>{{end}}
|
||||
{{template "Errors" $}}
|
||||
{{if .Name}}
|
||||
<p><code>import "{{.ImportPath}}"</code>
|
||||
{{.Doc|comment}}
|
||||
{{template "Examples" map "object" . "name" "package"}}
|
||||
|
||||
<h3 id="_index">Index</h3>
|
||||
{{if .Truncated}}<div class="alert">The documentation displayed here is incomplete. Use the godoc command to read the complete documentation.</div>{{end}}
|
||||
|
||||
<ul class="unstyled">
|
||||
{{if .Consts}}<li><a href="#_constants">Constants</a>{{end}}
|
||||
{{if .Vars}}<li><a href="#_variables">Variables</a>{{end}}
|
||||
{{range .Funcs}}<li><a href="#{{.Name}}">{{.Decl.Text}}</a>{{end}}
|
||||
{{range $t := .Types}}
|
||||
<li><a href="#{{.Name}}">type {{.Name}}</a>
|
||||
{{if or .Funcs .Methods}}<ul>{{end}}
|
||||
{{range .Funcs}}<li><a href="#{{.Name}}">{{.Decl.Text}}</a>{{end}}
|
||||
{{range .Methods}}<li><a href="#{{$t.Name}}.{{.Name}}">{{.Decl.Text}}</a>{{end}}
|
||||
{{if or .Funcs .Methods}}</ul>{{end}}
|
||||
{{end}}
|
||||
</ul>
|
||||
|
||||
{{if hasExamples .}}<h3 id="_examples">Examples</h3><ul class="unstyled">
|
||||
{{if .Examples}}{{template "ExampleLink" map "href" "package" "text" "package"}}{{end}}
|
||||
{{range .Funcs}}{{if .Examples}}{{template "ExampleLink" map "href" .Name "text" (printf "func %s" .Name)}}{{end}}{{end}}
|
||||
{{range $t := .Types}}
|
||||
{{if .Examples}}{{template "ExampleLink" map "href" .Name "text" (printf "type %s" .Name)}}{{end}}
|
||||
{{range .Funcs}}{{if .Examples}}{{template "ExampleLink" map "href" .Name "text" (printf "func %s" .Name)}}{{end}}{{end}}
|
||||
{{range .Methods}}{{if .Examples}}{{template "ExampleLink" map "href" (printf "%s-%s" $t.Name .Name) "text" (printf "func (%s) %s" .Recv .Name)}}{{end}}{{end}}
|
||||
{{end}}
|
||||
</ul>{{else}}<span id="_examples"></span>{{end}}
|
||||
|
||||
{{if .Consts}}<h3 id="_constants">Constants</h3>{{range .Consts}}<pre class="pre-x-scrollable">{{code .Decl nil}}</pre>{{.Doc|comment}}{{end}}{{end}}
|
||||
{{if .Vars}}<h3 id="_variables">Variables</h3>{{range .Vars}}<pre class="pre-x-scrollable">{{code .Decl nil}}</pre>{{.Doc|comment}}{{end}}{{end}}
|
||||
|
||||
{{range .Funcs}}<h3 id="{{.Name}}">func {{sourceLink $.pdoc .Pos .Name}}</h3>
|
||||
<pre>{{code .Decl nil}}</pre>{{.Doc|comment}}
|
||||
{{template "Examples" map "object" . "name" .Name}}
|
||||
{{end}}
|
||||
|
||||
{{range $t := .Types}}<h3 id="{{.Name}}">type {{sourceLink $.pdoc .Pos .Name}}</h3>
|
||||
<pre class="pre-x-scrollable">{{code .Decl $t}}</pre>{{.Doc|comment}}
|
||||
{{range .Consts}}<pre class="pre-x-scrollable">{{code .Decl nil}}</pre>{{.Doc|comment}}{{end}}
|
||||
{{range .Vars}}<pre class="pre-x-scrollable">{{code .Decl nil}}</pre>{{.Doc|comment}}{{end}}
|
||||
{{template "Examples" map "object" . "name" .Name}}
|
||||
|
||||
{{range .Funcs}}<h4 id="{{.Name}}">func {{sourceLink $.pdoc .Pos .Name}}</h4>
|
||||
<pre>{{code .Decl nil}}</pre>{{.Doc|comment}}
|
||||
{{template "Examples" map "object" . "name" .Name}}
|
||||
{{end}}
|
||||
|
||||
{{range .Methods}}<h4 id="{{$t.Name}}.{{.Name}}">func ({{.Recv}}) {{sourceLink $.pdoc .Pos .Name}}</h4>
|
||||
<pre>{{code .Decl nil}}</pre>{{.Doc|comment}}
|
||||
{{template "Examples" map "object" . "name" (printf "%s-%s" $t.Name .Name)}}
|
||||
{{end}}
|
||||
|
||||
{{end}}{{/* range .Types */}}
|
||||
{{end}}{{/* if .Name */}}
|
||||
|
||||
{{with .Notes}}{{with .BUG}}<h3 id="_bugs">Bugs</h3>{{range .}}<p>{{sourceLink $.pdoc .Pos "☞"}} {{.Body}}{{end}}{{end}}{{end}}
|
||||
|
||||
{{if .Name}}<h3 id="_files">{{with .BrowseURL}}<a href="{{.}}">Files</a>{{else}}Package Files{{end}}</h3>
|
||||
<p>{{range .Files}}{{if .URL}}<a href="{{.URL}}">{{.Name}}</a>{{else}}{{.Name}}{{end}} {{end}}</p>
|
||||
{{end}}
|
||||
{{template "PkgCmdFooter" $}}
|
||||
<div id="_jump" tabindex="-1" class="modal hide">
|
||||
<form id="_jump_form" class="modal-form">
|
||||
<div class="modal-header">
|
||||
<h4>Go to export</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<input id="_jump_text" class="span5" autocomplete="off" type="text">
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn" data-dismiss="modal">Close</button>
|
||||
<button type="submit" class="btn btn-primary">Go</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
{{end}}{{end}}
|
||||
|
||||
{{define "Examples"}}{{with .object.Examples}}<div class="accordian" id="_example_{{$.name}}">{{range .}}
|
||||
<div class="accordion-group">
|
||||
<div class="accordion-heading"><a class="accordion-toggle" data-toggle="collapse" href="#_ex_{{$.name}}{{with .Name}}-{{.}}{{end}}">Example{{with .Name}} ({{.}}){{end}}</a></div>
|
||||
<div id="_ex_{{$.name}}{{with .Name}}-{{.}}{{end}}" class="accordion-body collapse"><div class="accordion-inner">
|
||||
{{with .Doc}}<p>{{.|comment}}{{end}}
|
||||
<p>Code:{{if .Play}}<span class="pull-right"><a href="?play={{$.name}}{{with .Name}}&name={{.}}{{end}}">play</a> </span>{{end}}
|
||||
<pre class="pre-x-scrollable">{{code .Code nil}}</pre>
|
||||
{{with .Output}}<p>Output:<pre class="pre-x-scrollable">{{.}}</pre>{{end}}
|
||||
</div></div>
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}{{end}}
|
||||
|
||||
{{define "ExampleLink"}}<li><a href="#_example_{{.href}}" onclick="$('[id|=_ex_{{.href}}]').addClass('in').height('auto')">{{.text}}</a>{{end}}
|
|
@ -0,0 +1,38 @@
|
|||
{{define "ROOT"}}{{with .pdoc}}PACKAGE{{if .Name}}
|
||||
|
||||
package {{.Name}}
|
||||
import "{{.ImportPath}}"
|
||||
|
||||
{{.Doc|comment}}
|
||||
{{if .Consts}}
|
||||
CONSTANTS
|
||||
|
||||
{{range .Consts}}{{.Decl.Text}}
|
||||
{{.Doc|comment}}{{end}}
|
||||
{{end}}{{if .Vars}}
|
||||
VARIABLES
|
||||
|
||||
{{range .Vars}}{{.Decl.Text}}
|
||||
{{.Doc|comment}}{{end}}
|
||||
{{end}}{{if .Funcs}}
|
||||
FUNCTIONS
|
||||
|
||||
{{range .Funcs}}{{.Decl.Text}}
|
||||
{{.Doc|comment}}
|
||||
{{end}}{{end}}{{if .Types}}
|
||||
TYPES
|
||||
|
||||
{{range .Types}}{{.Decl.Text}}
|
||||
{{.Doc|comment}}
|
||||
{{range .Consts}}{{.Decl.Text}}
|
||||
{{.Doc|comment}}
|
||||
{{end}}{{range .Vars}}{{.Decl.Text}}
|
||||
{{.Doc|comment}}
|
||||
{{end}}{{range .Funcs}}{{.Decl.Text}}
|
||||
{{.Doc|comment}}
|
||||
{{end}}{{range .Methods}}{{.Decl.Text}}
|
||||
{{.Doc|comment}}
|
||||
{{end}}{{end}}
|
||||
{{end}}
|
||||
{{template "Subdirs" $}}
|
||||
{{end}}{{end}}{{end}}
|
|
@ -0,0 +1,10 @@
|
|||
{{define "Head"}}<title>{{.q}} - GoDoc</title>{{end}}
|
||||
|
||||
{{define "Body"}}
|
||||
{{template "SearchBox" .q}}
|
||||
{{if .pkgs}}
|
||||
{{template "Pkgs" .pkgs}}
|
||||
{{else}}
|
||||
<p>No packages found.
|
||||
{{end}}
|
||||
{{end}}
|
|
@ -0,0 +1,2 @@
|
|||
{{define "ROOT"}}{{range .pkgs}}{{.Path}} {{.Synopsis}}
|
||||
{{end}}{{end}}
|
|
@ -0,0 +1,7 @@
|
|||
{{define "Head"}}<title>Standard Packages - GoDoc</title>{{end}}
|
||||
|
||||
{{define "Body"}}
|
||||
<h1>Go Standard Packages</h1>
|
||||
{{template "Pkgs" .pkgs}}
|
||||
<p>View the official documentation at <a href="http://golang.org/pkg/">golang.org</a>.
|
||||
{{end}}
|
|
@ -0,0 +1,100 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func importPathFromGoogleBrowse(m []string) string {
|
||||
project := m[1]
|
||||
dir := m[2]
|
||||
if dir == "" {
|
||||
dir = "/"
|
||||
} else if dir[len(dir)-1] == '/' {
|
||||
dir = dir[:len(dir)-1]
|
||||
}
|
||||
subrepo := ""
|
||||
if len(m[3]) > 0 {
|
||||
v, _ := url.ParseQuery(m[3][1:])
|
||||
subrepo = v.Get("repo")
|
||||
if len(subrepo) > 0 {
|
||||
subrepo = "." + subrepo
|
||||
}
|
||||
}
|
||||
if strings.HasPrefix(m[4], "#hg%2F") {
|
||||
d, _ := url.QueryUnescape(m[4][len("#hg%2f"):])
|
||||
if i := strings.IndexRune(d, '%'); i >= 0 {
|
||||
d = d[:i]
|
||||
}
|
||||
dir = dir + "/" + d
|
||||
}
|
||||
return "code.google.com/p/" + project + subrepo + dir
|
||||
}
|
||||
|
||||
var browsePatterns = []struct {
|
||||
pat *regexp.Regexp
|
||||
fn func([]string) string
|
||||
}{
|
||||
{
|
||||
// Github tree browser.
|
||||
regexp.MustCompile(`^https?://(github\.com/[^/]+/[^/]+)(?:/tree/[^/]+(/.*))?$`),
|
||||
func(m []string) string { return m[1] + m[2] },
|
||||
},
|
||||
{
|
||||
// Github file browser.
|
||||
regexp.MustCompile(`^https?://(github\.com/[^/]+/[^/]+)/blob/[^/]+/(.*)$`),
|
||||
func(m []string) string {
|
||||
d := path.Dir(m[2])
|
||||
if d == "." {
|
||||
return m[1]
|
||||
}
|
||||
return m[1] + "/" + d
|
||||
},
|
||||
},
|
||||
{
|
||||
// Bitbucket source borwser.
|
||||
regexp.MustCompile(`^https?://(bitbucket\.org/[^/]+/[^/]+)(?:/src/[^/]+(/[^?]+)?)?`),
|
||||
func(m []string) string { return m[1] + m[2] },
|
||||
},
|
||||
{
|
||||
// Google Project Hosting source browser.
|
||||
regexp.MustCompile(`^http:/+code\.google\.com/p/([^/]+)/source/browse(/[^?#]*)?(\?[^#]*)?(#.*)?$`),
|
||||
importPathFromGoogleBrowse,
|
||||
},
|
||||
{
|
||||
// Launchpad source browser.
|
||||
regexp.MustCompile(`^https?:/+bazaar\.(launchpad\.net/.*)/files$`),
|
||||
func(m []string) string { return m[1] },
|
||||
},
|
||||
{
|
||||
regexp.MustCompile(`^https?://(.+)$`),
|
||||
func(m []string) string { return strings.Trim(m[1], "/") },
|
||||
},
|
||||
}
|
||||
|
||||
// isBrowserURL returns importPath and true if URL looks like a URL for a VCS
|
||||
// source browser.
|
||||
func isBrowseURL(s string) (importPath string, ok bool) {
|
||||
for _, c := range browsePatterns {
|
||||
if m := c.pat.FindStringSubmatch(s); m != nil {
|
||||
return c.fn(m), true
|
||||
}
|
||||
}
|
||||
return "", false
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var isBrowseURLTests = []struct {
|
||||
s string
|
||||
importPath string
|
||||
ok bool
|
||||
}{
|
||||
{"https://github.com/garyburd/gddo/blob/master/doc/code.go", "github.com/garyburd/gddo/doc", true},
|
||||
{"https://github.com/garyburd/go-oauth/blob/master/.gitignore", "github.com/garyburd/go-oauth", true},
|
||||
{"https://bitbucket.org/user/repo/src/bd0b661a263e/p1/p2?at=default", "bitbucket.org/user/repo/p1/p2", true},
|
||||
{"https://bitbucket.org/user/repo/src", "bitbucket.org/user/repo", true},
|
||||
{"https://bitbucket.org/user/repo", "bitbucket.org/user/repo", true},
|
||||
{"https://github.com/user/repo", "github.com/user/repo", true},
|
||||
{"https://github.com/user/repo/tree/master/p1", "github.com/user/repo/p1", true},
|
||||
{"http://code.google.com/p/project", "code.google.com/p/project", true},
|
||||
}
|
||||
|
||||
func TestIsBrowseURL(t *testing.T) {
|
||||
for _, tt := range isBrowseURLTests {
|
||||
importPath, ok := isBrowseURL(tt.s)
|
||||
if tt.ok {
|
||||
if importPath != tt.importPath || ok != true {
|
||||
t.Errorf("IsBrowseURL(%q) = %q, %v; want %q %v", tt.s, importPath, ok, tt.importPath, true)
|
||||
}
|
||||
} else if ok {
|
||||
t.Errorf("IsBrowseURL(%q) = %q, %v; want _, false", tt.s, importPath, ok)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
// This file implements an http.Client with request timeouts set by command
|
||||
// line flags. The logic is not perfect, but the code is short.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"log"
|
||||
"net"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
dialTimeout = flag.Duration("dial_timeout", 5*time.Second, "Timeout for dialing an HTTP connection.")
|
||||
requestTimeout = flag.Duration("request_timeout", 20*time.Second, "Time out for roundtripping an HTTP request.")
|
||||
)
|
||||
|
||||
func timeoutDial(network, addr string) (net.Conn, error) {
|
||||
return net.DialTimeout(network, addr, *dialTimeout)
|
||||
}
|
||||
|
||||
type transport struct {
|
||||
t http.Transport
|
||||
}
|
||||
|
||||
func (t *transport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
timer := time.AfterFunc(*requestTimeout, func() {
|
||||
t.t.CancelRequest(req)
|
||||
log.Printf("Canceled request for %s", req.URL)
|
||||
})
|
||||
defer timer.Stop()
|
||||
resp, err := t.t.RoundTrip(req)
|
||||
return resp, err
|
||||
}
|
||||
|
||||
var httpTransport = &transport{t: http.Transport{Dial: timeoutDial, ResponseHeaderTimeout: *requestTimeout / 2}}
|
||||
var httpClient = &http.Client{Transport: httpTransport}
|
|
@ -0,0 +1,148 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/garyburd/gddo/doc"
|
||||
)
|
||||
|
||||
var nestedProjectPat = regexp.MustCompile(`/(?:github\.com|launchpad\.net|code\.google\.com/p|bitbucket\.org|labix\.org)/`)
|
||||
|
||||
func exists(path string) bool {
|
||||
b, err := db.Exists(path)
|
||||
if err != nil {
|
||||
b = false
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// crawlDoc fetches the package documentation from the VCS and updates the database.
|
||||
func crawlDoc(source string, path string, pdoc *doc.Package, hasSubdirs bool, nextCrawl time.Time) (*doc.Package, error) {
|
||||
message := []interface{}{source}
|
||||
defer func() {
|
||||
message = append(message, path)
|
||||
log.Println(message...)
|
||||
}()
|
||||
|
||||
if !nextCrawl.IsZero() {
|
||||
d := time.Since(nextCrawl) / time.Hour
|
||||
if d > 0 {
|
||||
message = append(message, "late:", int64(d))
|
||||
}
|
||||
}
|
||||
|
||||
etag := ""
|
||||
if pdoc != nil {
|
||||
etag = pdoc.Etag
|
||||
message = append(message, "etag:", etag)
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
var err error
|
||||
if i := strings.Index(path, "/src/pkg/"); i > 0 && doc.IsGoRepoPath(path[i+len("/src/pkg/"):]) {
|
||||
// Go source tree mirror.
|
||||
pdoc = nil
|
||||
err = doc.NotFoundError{Message: "Go source tree mirror."}
|
||||
} else if i := strings.Index(path, "/libgo/go/"); i > 0 && doc.IsGoRepoPath(path[i+len("/libgo/go/"):]) {
|
||||
// Go Frontend source tree mirror.
|
||||
pdoc = nil
|
||||
err = doc.NotFoundError{Message: "Go Frontend source tree mirror."}
|
||||
} else if m := nestedProjectPat.FindStringIndex(path); m != nil && exists(path[m[0]+1:]) {
|
||||
pdoc = nil
|
||||
err = doc.NotFoundError{Message: "Copy of other project."}
|
||||
} else if blocked, e := db.IsBlocked(path); blocked && e == nil {
|
||||
pdoc = nil
|
||||
err = doc.NotFoundError{Message: "Blocked."}
|
||||
} else {
|
||||
var pdocNew *doc.Package
|
||||
pdocNew, err = doc.Get(httpClient, path, etag)
|
||||
message = append(message, "fetch:", int64(time.Since(start)/time.Millisecond))
|
||||
if err != doc.ErrNotModified {
|
||||
pdoc = pdocNew
|
||||
}
|
||||
}
|
||||
|
||||
nextCrawl = start.Add(*maxAge)
|
||||
if strings.HasPrefix(path, "github.com/") || (pdoc != nil && len(pdoc.Errors) > 0) {
|
||||
nextCrawl = start.Add(*maxAge * 7)
|
||||
}
|
||||
|
||||
switch {
|
||||
case err == nil:
|
||||
message = append(message, "put:", pdoc.Etag)
|
||||
if err := db.Put(pdoc, nextCrawl); err != nil {
|
||||
log.Printf("ERROR db.Put(%q): %v", path, err)
|
||||
}
|
||||
case err == doc.ErrNotModified:
|
||||
message = append(message, "touch")
|
||||
if err := db.SetNextCrawlEtag(pdoc.ProjectRoot, pdoc.Etag, nextCrawl); err != nil {
|
||||
log.Printf("ERROR db.SetNextCrawl(%q): %v", path, err)
|
||||
}
|
||||
case doc.IsNotFound(err):
|
||||
message = append(message, "notfound:", err)
|
||||
if err := db.Delete(path); err != nil {
|
||||
log.Printf("ERROR db.Delete(%q): %v", path, err)
|
||||
}
|
||||
default:
|
||||
message = append(message, "ERROR:", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return pdoc, nil
|
||||
}
|
||||
|
||||
func crawl(interval time.Duration) {
|
||||
for {
|
||||
time.Sleep(interval)
|
||||
|
||||
// Look for new package to crawl.
|
||||
|
||||
importPath, err := db.GetNewCrawl()
|
||||
if err != nil {
|
||||
log.Printf("db.GetNewCrawl() returned error %v", err)
|
||||
continue
|
||||
}
|
||||
if importPath != "" {
|
||||
if pdoc, err := crawlDoc("new", importPath, nil, false, time.Time{}); err != nil || pdoc == nil {
|
||||
if err := db.SetBadCrawl(importPath); err != nil {
|
||||
log.Printf("ERROR db.SetBadCrawl(%q): %v", importPath, err)
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Crawl existing doc.
|
||||
|
||||
pdoc, pkgs, nextCrawl, err := db.Get("-")
|
||||
if err != nil {
|
||||
log.Printf("db.Get(\"-\") returned error %v", err)
|
||||
continue
|
||||
}
|
||||
if pdoc == nil || nextCrawl.After(time.Now()) {
|
||||
continue
|
||||
}
|
||||
if _, err = crawlDoc("crawl", pdoc.ImportPath, pdoc, len(pkgs) > 0, nextCrawl); err != nil {
|
||||
// Touch package so that crawl advances to next package.
|
||||
if err := db.SetNextCrawlEtag(pdoc.ProjectRoot, pdoc.Etag, time.Now().Add(*maxAge/3)); err != nil {
|
||||
log.Printf("ERROR db.TouchLastCrawl(%q): %v", pdoc.ImportPath, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,111 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
var githubProjectPat = regexp.MustCompile(`href="/([^/]+/[^/]+)/stargazers"`)
|
||||
var githubUpdatedPat = regexp.MustCompile(`datetime="([^"]+)"`)
|
||||
|
||||
func readGithubUpdates() (map[string]string, error) {
|
||||
updates := make(map[string]string)
|
||||
for i := 0; i < 2; i++ {
|
||||
resp, err := http.Get("https://github.com/languages/Go/updated?page=" + strconv.Itoa(i+1))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
p, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for {
|
||||
m := githubProjectPat.FindSubmatchIndex(p)
|
||||
if m == nil {
|
||||
break
|
||||
}
|
||||
ownerRepo := string(p[m[2]:m[3]])
|
||||
p = p[m[1]:]
|
||||
|
||||
m = githubUpdatedPat.FindSubmatchIndex(p)
|
||||
if m == nil {
|
||||
return nil, fmt.Errorf("updated not found for %s", ownerRepo)
|
||||
}
|
||||
updated := string(p[m[2]:m[3]])
|
||||
p = p[m[1]:]
|
||||
|
||||
if _, found := updates[ownerRepo]; !found {
|
||||
updates[ownerRepo] = updated
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(updates) == 0 {
|
||||
return nil, errors.New("no updates found")
|
||||
}
|
||||
return updates, nil
|
||||
}
|
||||
|
||||
func crawlGithubUpdates(interval time.Duration) {
|
||||
defer log.Println("ERROR, exiting github update scraper")
|
||||
|
||||
const key = "ghupdates"
|
||||
sleep := false
|
||||
for {
|
||||
if sleep {
|
||||
time.Sleep(interval)
|
||||
}
|
||||
sleep = true
|
||||
|
||||
updates, err := readGithubUpdates()
|
||||
if err != nil {
|
||||
log.Println("ERROR github crawl:", err)
|
||||
continue
|
||||
}
|
||||
var prev map[string]string
|
||||
if err := db.GetGob(key, &prev); err != nil {
|
||||
log.Println("ERROR get prev updates:", err)
|
||||
continue
|
||||
}
|
||||
if prev == nil {
|
||||
prev = make(map[string]string)
|
||||
}
|
||||
for ownerRepo, t := range updates {
|
||||
if prev[ownerRepo] != t {
|
||||
d := time.Duration(0)
|
||||
if prev[ownerRepo] != "" {
|
||||
// Delay crawl if repo was updated recently.
|
||||
d = time.Hour
|
||||
}
|
||||
log.Printf("Set next crawl for %s to %v from now", ownerRepo, d)
|
||||
if err := db.SetNextCrawl("github.com/"+ownerRepo, time.Now().Add(d)); err != nil {
|
||||
log.Println("ERROR set next crawl:", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
if err := db.PutGob(key, updates); err != nil {
|
||||
log.Println("ERROR put updates:", err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os/exec"
|
||||
"strings"
|
||||
|
||||
"github.com/garyburd/gddo/database"
|
||||
"github.com/garyburd/gddo/doc"
|
||||
)
|
||||
|
||||
func renderGraph(pdoc *doc.Package, pkgs []database.Package, edges [][2]int) ([]byte, error) {
|
||||
var in, out bytes.Buffer
|
||||
|
||||
fmt.Fprintf(&in, "digraph %s { \n", pdoc.Name)
|
||||
for i, pkg := range pkgs {
|
||||
fmt.Fprintf(&in, " n%d [label=\"%s\", URL=\"/%s\", tooltip=\"%s\"];\n",
|
||||
i, pkg.Path, pkg.Path,
|
||||
strings.Replace(pkg.Synopsis, `"`, `\"`, -1))
|
||||
}
|
||||
for _, edge := range edges {
|
||||
fmt.Fprintf(&in, " n%d -> n%d;\n", edge[0], edge[1])
|
||||
}
|
||||
in.WriteString("}")
|
||||
|
||||
cmd := exec.Command("dot", "-Tsvg")
|
||||
cmd.Stdin = &in
|
||||
cmd.Stdout = &out
|
||||
if err := cmd.Run(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := out.Bytes()
|
||||
if i := bytes.Index(p, []byte("<svg")); i < 0 {
|
||||
return nil, errors.New("<svg not found")
|
||||
} else {
|
||||
p = p[i:]
|
||||
}
|
||||
return p, nil
|
||||
}
|
|
@ -0,0 +1,134 @@
|
|||
$(function() {
|
||||
var prevCh = null, prevTime = 0, modal = false;
|
||||
|
||||
var exportPat = /^[^_][^-]*$/
|
||||
|
||||
function exports() {
|
||||
var result = []
|
||||
$('*[id]').each(function() {
|
||||
var id = $(this).attr('id');
|
||||
if (exportPat.test(id)) {
|
||||
result.push(id);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
$('#_jump').on({
|
||||
hide: function() { $('#_jump_text').blur(); },
|
||||
shown: function() { $('#_jump_text').val('').focus(); },
|
||||
});
|
||||
|
||||
$('#_jump_form').on({
|
||||
submit: function(e) {
|
||||
$('#_jump').modal('hide');
|
||||
window.location.href = '#' + $('#_jump_text').val();
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
$('.modal').on({
|
||||
show: function() { modal = true; },
|
||||
hidden: function() { modal = false; }
|
||||
});
|
||||
|
||||
$(document).on('keypress', function(e) {
|
||||
var combo = e.timeStamp - prevTime <= 1000;
|
||||
prevTime = 0;
|
||||
|
||||
if (modal) {
|
||||
return true;
|
||||
}
|
||||
|
||||
var t = e.target.tagName
|
||||
if (t == 'INPUT' ||
|
||||
t == 'SELECT' ||
|
||||
t == 'TEXTAREA' ) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (e.target.contentEditable && e.target.contentEditable == 'true') {
|
||||
return true;
|
||||
}
|
||||
|
||||
var ch = String.fromCharCode(e.which);
|
||||
|
||||
if (combo) {
|
||||
switch (prevCh + ch) {
|
||||
case "gg":
|
||||
$('html,body').animate({scrollTop: 0},'fast');
|
||||
return false;
|
||||
case "gb":
|
||||
$('html,body').animate({scrollTop: $(document).height()},'fast');
|
||||
return false;
|
||||
case "gi":
|
||||
if ($('#_index').length > 0) {
|
||||
$('html,body').animate({scrollTop: $("#_index").offset().top},'fast');
|
||||
return false;
|
||||
}
|
||||
case "ge":
|
||||
if ($('#_examples').length > 0) {
|
||||
$('html,body').animate({scrollTop: $("#_examples").offset().top},'fast');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch (ch) {
|
||||
case "/":
|
||||
$('#_search').focus();
|
||||
return false;
|
||||
case "?":
|
||||
$('#_shortcuts').modal();
|
||||
return false;
|
||||
case ".":
|
||||
if ($('#_jump').length > 0) {
|
||||
window.setTimeout(function() {
|
||||
$('#_jump_text').typeahead({source: exports()});
|
||||
$('#_jump').modal();
|
||||
}, 0);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
prevCh = ch
|
||||
prevTime = e.timeStamp
|
||||
return true;
|
||||
});
|
||||
|
||||
var thData
|
||||
|
||||
function searchSource(query, process) {
|
||||
if (thData) {
|
||||
return thData.items;
|
||||
}
|
||||
return $.get('/-/typeahead', { q: query }, function (data) {
|
||||
thData = data
|
||||
return process(data.items);
|
||||
});
|
||||
}
|
||||
|
||||
function searchSorter(items) {
|
||||
var p1 = [], p2 = [], p3 = [];
|
||||
var q = this.query.toLowerCase();
|
||||
var item;
|
||||
while (item = items.shift()) {
|
||||
var i = item.toLowerCase();
|
||||
var n = i.substring(i.lastIndexOf("/")).indexOf(q);
|
||||
if (n == 0) {
|
||||
p1.push(item);
|
||||
} else if (n > 0) {
|
||||
p2.push(item);
|
||||
} else if (i.indexOf(q) >= 0) {
|
||||
p3.push(item);
|
||||
}
|
||||
}
|
||||
return p1.concat(p2, p3);
|
||||
}
|
||||
|
||||
//$('#_searchBox').typeahead({source: searchSource, sorter: searchSorter});
|
||||
$('span.timeago').timeago();
|
||||
if (window.location.hash.substring(0, 10) == '#_example_') {
|
||||
$('[id|=_ex_' + window.location.hash.substring(10) + ']').addClass('in').height('auto');
|
||||
}
|
||||
});
|
|
@ -0,0 +1,25 @@
|
|||
// More space around headers on package pages.
|
||||
.navbar, h2, h3, h4 { margin-top: @baseLineHeight }
|
||||
|
||||
code {
|
||||
background-color: inherit;
|
||||
border: none;
|
||||
color: inherit;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
// comments
|
||||
pre .com {
|
||||
color: rgb(147, 161, 161);
|
||||
}
|
||||
|
||||
.pre-x-scrollable {
|
||||
overflow: auto;
|
||||
word-wrap: normal;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
// make typeahead dropdown visible.
|
||||
#_jump .modal-body {
|
||||
overflow: visible;
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
// More space around headers on package pages.
|
||||
.navbar, .spacey h2, .spacey h3, .spacey h4 { margin-top: @baseLineHeight }
|
||||
|
||||
.flat-well {
|
||||
min-height: 20px;
|
||||
padding: 19px;
|
||||
margin-bottom: 20px;
|
||||
background-color: @wellBackground;
|
||||
.border-radius(@baseBorderRadius);
|
||||
blockquote {
|
||||
border-color: #ddd;
|
||||
border-color: rgba(0,0,0,.15);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
/*!
|
||||
* Bootstrap v2.2.2
|
||||
*
|
||||
* Copyright 2012 Twitter, Inc
|
||||
* Licensed under the Apache License v2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Designed and built with all the love in the world @twitter by @mdo and @fat.
|
||||
*
|
||||
* Modifications to the Boostrap source:
|
||||
*
|
||||
* Copyright 2012 Gary Burd
|
||||
* Licensed under the Apache License v2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*/
|
||||
|
||||
// CSS Reset
|
||||
@import "reset.less";
|
||||
|
||||
// Core variables and mixins
|
||||
@import "variables.less"; // Modify this for custom colors, font-sizes, etc
|
||||
@import "mixins.less";
|
||||
|
||||
@import "before.less";
|
||||
|
||||
// Grid system and page structure
|
||||
@import "scaffolding.less";
|
||||
@import "grid.less";
|
||||
@import "layouts.less";
|
||||
|
||||
// Base CSS
|
||||
@import "type.less";
|
||||
@import "code.less";
|
||||
@import "forms.less";
|
||||
@import "tables.less";
|
||||
|
||||
// Components: common
|
||||
@import "sprites.less";
|
||||
@import "dropdowns.less";
|
||||
@import "wells.less";
|
||||
@import "component-animations.less";
|
||||
@import "close.less";
|
||||
|
||||
// Components: Buttons & Alerts
|
||||
@import "buttons.less";
|
||||
@import "button-groups.less";
|
||||
@import "alerts.less"; // Note: alerts share common CSS with buttons and thus have styles in buttons.less
|
||||
|
||||
// Components: Nav
|
||||
@import "navs.less";
|
||||
@import "navbar.less";
|
||||
@import "breadcrumbs.less";
|
||||
@import "pagination.less";
|
||||
@import "pager.less";
|
||||
|
||||
// Components: Popovers
|
||||
@import "modals.less";
|
||||
@import "tooltip.less";
|
||||
@import "popovers.less";
|
||||
|
||||
// Components: Misc
|
||||
@import "thumbnails.less";
|
||||
@import "media.less";
|
||||
@import "labels-badges.less";
|
||||
@import "progress-bars.less";
|
||||
@import "accordion.less";
|
||||
@import "carousel.less";
|
||||
@import "hero-unit.less";
|
||||
|
||||
@import "after.less";
|
||||
|
||||
// Utility classes
|
||||
@import "utilities.less"; // Has to be last to override when necessary
|
|
@ -0,0 +1,303 @@
|
|||
@goColor: #375EAB; // godoc button background color.
|
||||
|
||||
//
|
||||
// Variables
|
||||
// --------------------------------------------------
|
||||
|
||||
|
||||
// Global values
|
||||
// --------------------------------------------------
|
||||
|
||||
|
||||
// Grays
|
||||
// -------------------------
|
||||
@black: #000;
|
||||
@grayDarker: #222;
|
||||
@grayDark: #333;
|
||||
@gray: #555;
|
||||
@grayLight: #999;
|
||||
@grayLighter: #eee;
|
||||
@white: #fff;
|
||||
|
||||
|
||||
// Accent colors
|
||||
// -------------------------
|
||||
@blue: #049cdb;
|
||||
@blueDark: #0064cd;
|
||||
@green: #46a546;
|
||||
@red: #9d261d;
|
||||
@yellow: #ffc40d;
|
||||
@orange: #f89406;
|
||||
@pink: #c3325f;
|
||||
@purple: #7a43b6;
|
||||
|
||||
|
||||
// Scaffolding
|
||||
// -------------------------
|
||||
@bodyBackground: @white;
|
||||
@textColor: @grayDark;
|
||||
|
||||
|
||||
// Links
|
||||
// -------------------------
|
||||
@linkColor: @goColor; // #08c;
|
||||
@linkColorHover: darken(@linkColor, 15%);
|
||||
|
||||
|
||||
// Typography
|
||||
// -------------------------
|
||||
@sansFontFamily: "Helvetica Neue", Helvetica, Arial, sans-serif;
|
||||
@serifFontFamily: Georgia, "Times New Roman", Times, serif;
|
||||
@monoFontFamily: Monaco, Menlo, Consolas, "Courier New", monospace;
|
||||
|
||||
@baseFontSize: 14px;
|
||||
@baseFontFamily: @sansFontFamily;
|
||||
@baseLineHeight: 20px;
|
||||
@altFontFamily: @serifFontFamily;
|
||||
|
||||
@headingsFontFamily: inherit; // empty to use BS default, @baseFontFamily
|
||||
@headingsFontWeight: bold; // instead of browser default, bold
|
||||
@headingsColor: inherit; // empty to use BS default, @textColor
|
||||
|
||||
|
||||
// Component sizing
|
||||
// -------------------------
|
||||
// Based on 14px font-size and 20px line-height
|
||||
|
||||
@fontSizeLarge: @baseFontSize * 1.25; // ~18px
|
||||
@fontSizeSmall: @baseFontSize * 0.85; // ~12px
|
||||
@fontSizeMini: @baseFontSize * 0.75; // ~11px
|
||||
|
||||
@paddingLarge: 11px 19px; // 44px
|
||||
@paddingSmall: 2px 10px; // 26px
|
||||
@paddingMini: 0 6px; // 22px
|
||||
|
||||
@baseBorderRadius: 4px;
|
||||
@borderRadiusLarge: 6px;
|
||||
@borderRadiusSmall: 3px;
|
||||
|
||||
|
||||
// Tables
|
||||
// -------------------------
|
||||
@tableBackground: transparent; // overall background-color
|
||||
@tableBackgroundAccent: #f9f9f9; // for striping
|
||||
@tableBackgroundHover: #f5f5f5; // for hover
|
||||
@tableBorder: #ddd; // table and cell border
|
||||
|
||||
// Buttons
|
||||
// -------------------------
|
||||
@btnBackground: @white;
|
||||
@btnBackgroundHighlight: darken(@white, 10%);
|
||||
@btnBorder: #bbb;
|
||||
|
||||
@btnPrimaryBackground: @linkColor;
|
||||
@btnPrimaryBackgroundHighlight: spin(@btnPrimaryBackground, 20%);
|
||||
|
||||
@btnInfoBackground: #5bc0de;
|
||||
@btnInfoBackgroundHighlight: #2f96b4;
|
||||
|
||||
@btnSuccessBackground: #62c462;
|
||||
@btnSuccessBackgroundHighlight: #51a351;
|
||||
|
||||
@btnWarningBackground: lighten(@orange, 15%);
|
||||
@btnWarningBackgroundHighlight: @orange;
|
||||
|
||||
@btnDangerBackground: #ee5f5b;
|
||||
@btnDangerBackgroundHighlight: #bd362f;
|
||||
|
||||
@btnInverseBackground: #444;
|
||||
@btnInverseBackgroundHighlight: @grayDarker;
|
||||
|
||||
|
||||
// Forms
|
||||
// -------------------------
|
||||
@inputBackground: @white;
|
||||
@inputBorder: #ccc;
|
||||
@inputBorderRadius: @baseBorderRadius;
|
||||
@inputDisabledBackground: @grayLighter;
|
||||
@formActionsBackground: #f5f5f5;
|
||||
@inputHeight: @baseLineHeight + 10px; // base line-height + 8px vertical padding + 2px top/bottom border
|
||||
|
||||
|
||||
// Dropdowns
|
||||
// -------------------------
|
||||
@dropdownBackground: @white;
|
||||
@dropdownBorder: rgba(0,0,0,.2);
|
||||
@dropdownDividerTop: #e5e5e5;
|
||||
@dropdownDividerBottom: @white;
|
||||
|
||||
@dropdownLinkColor: @grayDark;
|
||||
@dropdownLinkColorHover: @white;
|
||||
@dropdownLinkColorActive: @white;
|
||||
|
||||
@dropdownLinkBackgroundActive: @linkColor;
|
||||
@dropdownLinkBackgroundHover: @dropdownLinkBackgroundActive;
|
||||
|
||||
|
||||
|
||||
// COMPONENT VARIABLES
|
||||
// --------------------------------------------------
|
||||
|
||||
|
||||
// Z-index master list
|
||||
// -------------------------
|
||||
// Used for a bird's eye view of components dependent on the z-axis
|
||||
// Try to avoid customizing these :)
|
||||
@zindexDropdown: 1000;
|
||||
@zindexPopover: 1010;
|
||||
@zindexTooltip: 1030;
|
||||
@zindexFixedNavbar: 1030;
|
||||
@zindexModalBackdrop: 1040;
|
||||
@zindexModal: 1050;
|
||||
|
||||
|
||||
// Sprite icons path
|
||||
// -------------------------
|
||||
@iconSpritePath: "../img/glyphicons-halflings.png";
|
||||
@iconWhiteSpritePath: "../img/glyphicons-halflings-white.png";
|
||||
|
||||
|
||||
// Input placeholder text color
|
||||
// -------------------------
|
||||
@placeholderText: @grayLight;
|
||||
|
||||
|
||||
// Hr border color
|
||||
// -------------------------
|
||||
@hrBorder: @grayLighter;
|
||||
|
||||
|
||||
// Horizontal forms & lists
|
||||
// -------------------------
|
||||
@horizontalComponentOffset: 180px;
|
||||
|
||||
|
||||
// Wells
|
||||
// -------------------------
|
||||
@wellBackground: #f5f5f5;
|
||||
|
||||
|
||||
// Navbar
|
||||
// -------------------------
|
||||
@navbarCollapseWidth: 979px;
|
||||
@navbarCollapseDesktopWidth: @navbarCollapseWidth + 1;
|
||||
|
||||
@navbarHeight: 40px;
|
||||
@navbarBackgroundHighlight: #ffffff;
|
||||
@navbarBackground: darken(@navbarBackgroundHighlight, 5%);
|
||||
@navbarBorder: darken(@navbarBackground, 12%);
|
||||
|
||||
@navbarText: #777;
|
||||
@navbarLinkColor: #777;
|
||||
@navbarLinkColorHover: @grayDark;
|
||||
@navbarLinkColorActive: @gray;
|
||||
@navbarLinkBackgroundHover: transparent;
|
||||
@navbarLinkBackgroundActive: darken(@navbarBackground, 5%);
|
||||
|
||||
@navbarBrandColor: @navbarLinkColor;
|
||||
|
||||
// Inverted navbar
|
||||
@navbarInverseBackground: @goColor; // #111111;
|
||||
@navbarInverseBackgroundHighlight: @goColor; // #222222;
|
||||
@navbarInverseBorder: darken(@goColor, 5%);
|
||||
|
||||
@navbarInverseText: #ddd; // @grayLight;
|
||||
@navbarInverseLinkColor: #ddd; // @grayLight;
|
||||
@navbarInverseLinkColorHover: @white;
|
||||
@navbarInverseLinkColorActive: @white; // @navbarInverseLinkColorHover;
|
||||
@navbarInverseLinkBackgroundHover: lighten(@goColor, 10%);
|
||||
@navbarInverseLinkBackgroundActive: darken(@navbarInverseBackground, 4%);
|
||||
|
||||
@navbarInverseSearchBackground: lighten(@navbarInverseBackground, 10%);
|
||||
@navbarInverseSearchBackgroundFocus: @white;
|
||||
@navbarInverseSearchBorder: @navbarInverseBackground;
|
||||
@navbarInverseSearchPlaceholderColor: #ccc;
|
||||
|
||||
@navbarInverseBrandColor: @navbarInverseLinkColor;
|
||||
|
||||
|
||||
// Pagination
|
||||
// -------------------------
|
||||
@paginationBackground: #fff;
|
||||
@paginationBorder: #ddd;
|
||||
@paginationActiveBackground: #f5f5f5;
|
||||
|
||||
|
||||
// Hero unit
|
||||
// -------------------------
|
||||
@heroUnitBackground: @grayLighter;
|
||||
@heroUnitHeadingColor: inherit;
|
||||
@heroUnitLeadColor: inherit;
|
||||
|
||||
|
||||
// Form states and alerts
|
||||
// -------------------------
|
||||
@warningText: #c09853;
|
||||
@warningBackground: #fcf8e3;
|
||||
@warningBorder: darken(spin(@warningBackground, -10), 3%);
|
||||
|
||||
@errorText: #b94a48;
|
||||
@errorBackground: #f2dede;
|
||||
@errorBorder: darken(spin(@errorBackground, -10), 3%);
|
||||
|
||||
@successText: #468847;
|
||||
@successBackground: #dff0d8;
|
||||
@successBorder: darken(spin(@successBackground, -10), 5%);
|
||||
|
||||
@infoText: #3a87ad;
|
||||
@infoBackground: #d9edf7;
|
||||
@infoBorder: darken(spin(@infoBackground, -10), 7%);
|
||||
|
||||
|
||||
// Tooltips and popovers
|
||||
// -------------------------
|
||||
@tooltipColor: #fff;
|
||||
@tooltipBackground: #000;
|
||||
@tooltipArrowWidth: 5px;
|
||||
@tooltipArrowColor: @tooltipBackground;
|
||||
|
||||
@popoverBackground: #fff;
|
||||
@popoverArrowWidth: 10px;
|
||||
@popoverArrowColor: #fff;
|
||||
@popoverTitleBackground: darken(@popoverBackground, 3%);
|
||||
|
||||
// Special enhancement for popovers
|
||||
@popoverArrowOuterWidth: @popoverArrowWidth + 1;
|
||||
@popoverArrowOuterColor: rgba(0,0,0,.25);
|
||||
|
||||
|
||||
|
||||
// GRID
|
||||
// --------------------------------------------------
|
||||
|
||||
|
||||
// Default 940px grid
|
||||
// -------------------------
|
||||
@gridColumns: 12;
|
||||
@gridColumnWidth: 50px; // 60px;
|
||||
@gridGutterWidth: 20px;
|
||||
@gridRowWidth: (@gridColumns * @gridColumnWidth) + (@gridGutterWidth * (@gridColumns - 1));
|
||||
|
||||
// 1200px min
|
||||
@gridColumnWidth1200: 70px;
|
||||
@gridGutterWidth1200: 30px;
|
||||
@gridRowWidth1200: (@gridColumns * @gridColumnWidth1200) + (@gridGutterWidth1200 * (@gridColumns - 1));
|
||||
|
||||
// 768px-979px
|
||||
@gridColumnWidth768: 42px;
|
||||
@gridGutterWidth768: 20px;
|
||||
@gridRowWidth768: (@gridColumns * @gridColumnWidth768) + (@gridGutterWidth768 * (@gridColumns - 1));
|
||||
|
||||
|
||||
// Fluid grid
|
||||
// -------------------------
|
||||
@fluidGridColumnWidth: percentage(@gridColumnWidth/@gridRowWidth);
|
||||
@fluidGridGutterWidth: percentage(@gridGutterWidth/@gridRowWidth);
|
||||
|
||||
// 1200px min
|
||||
@fluidGridColumnWidth1200: percentage(@gridColumnWidth1200/@gridRowWidth1200);
|
||||
@fluidGridGutterWidth1200: percentage(@gridGutterWidth1200/@gridRowWidth1200);
|
||||
|
||||
// 768px-979px
|
||||
@fluidGridColumnWidth768: percentage(@gridColumnWidth768/@gridRowWidth768);
|
||||
@fluidGridGutterWidth768: percentage(@gridGutterWidth768/@gridRowWidth768);
|
|
@ -0,0 +1,820 @@
|
|||
// Copyright 2012 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
// Command gddo-server is the GoPkgDoc server.
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/build"
|
||||
"html/template"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime/debug"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"code.google.com/p/go.talks/pkg/present"
|
||||
"github.com/garyburd/gddo/database"
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"github.com/garyburd/indigo/server"
|
||||
"github.com/garyburd/indigo/web"
|
||||
)
|
||||
|
||||
var errUpdateTimeout = errors.New("refresh timeout")
|
||||
|
||||
const (
|
||||
humanRequest = iota
|
||||
robotRequest
|
||||
queryRequest
|
||||
refreshRequest
|
||||
)
|
||||
|
||||
type crawlResult struct {
|
||||
pdoc *doc.Package
|
||||
err error
|
||||
}
|
||||
|
||||
// getDoc gets the package documentation from the database or from the version
|
||||
// control system as needed.
|
||||
func getDoc(path string, requestType int) (*doc.Package, []database.Package, error) {
|
||||
if path == "-" {
|
||||
// A hack in the database package uses the path "-" to represent the
|
||||
// next document to crawl. Block "-" here so that requests to /- always
|
||||
// return not found.
|
||||
return nil, nil, nil
|
||||
}
|
||||
|
||||
pdoc, pkgs, nextCrawl, err := db.Get(path)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
needsCrawl := false
|
||||
switch requestType {
|
||||
case queryRequest:
|
||||
needsCrawl = nextCrawl.IsZero() && len(pkgs) == 0
|
||||
case humanRequest:
|
||||
needsCrawl = nextCrawl.Before(time.Now())
|
||||
case robotRequest:
|
||||
needsCrawl = nextCrawl.IsZero() && len(pkgs) > 0
|
||||
}
|
||||
|
||||
if needsCrawl {
|
||||
c := make(chan crawlResult, 1)
|
||||
go func() {
|
||||
pdoc, err := crawlDoc("web ", path, pdoc, len(pkgs) > 0, nextCrawl)
|
||||
c <- crawlResult{pdoc, err}
|
||||
}()
|
||||
var err error
|
||||
timeout := *getTimeout
|
||||
if pdoc == nil {
|
||||
timeout = *firstGetTimeout
|
||||
}
|
||||
select {
|
||||
case rr := <-c:
|
||||
if rr.err == nil {
|
||||
pdoc = rr.pdoc
|
||||
}
|
||||
err = rr.err
|
||||
case <-time.After(timeout):
|
||||
err = errUpdateTimeout
|
||||
}
|
||||
if err != nil {
|
||||
if pdoc != nil {
|
||||
log.Printf("Serving %q from database after error: %v", path, err)
|
||||
err = nil
|
||||
} else if err == errUpdateTimeout {
|
||||
// Handle timeout on packages never seeen before as not found.
|
||||
log.Printf("Serving %q as not found after timeout", path)
|
||||
err = &web.Error{Status: web.StatusNotFound}
|
||||
}
|
||||
}
|
||||
}
|
||||
return pdoc, pkgs, err
|
||||
}
|
||||
|
||||
func templateExt(req *web.Request) string {
|
||||
if web.NegotiateContentType(req, []string{"text/html", "text/plain"}, "text/html") == "text/plain" {
|
||||
return ".txt"
|
||||
}
|
||||
return ".html"
|
||||
}
|
||||
|
||||
var (
|
||||
robotPat = regexp.MustCompile(`(:?\+https?://)|(?:\Wbot\W)`)
|
||||
)
|
||||
|
||||
func isRobot(req *web.Request) bool {
|
||||
return *robot || robotPat.MatchString(req.Header.Get(web.HeaderUserAgent))
|
||||
}
|
||||
|
||||
func popularLinkReferral(req *web.Request) bool {
|
||||
u := url.URL{Scheme: req.URL.Scheme, Host: req.URL.Host, Path: "/"}
|
||||
return req.Header.Get("Referer") == u.String()
|
||||
}
|
||||
|
||||
func hasFormValue(req *web.Request, key string) bool {
|
||||
_, ok := req.Form[key]
|
||||
return ok
|
||||
}
|
||||
|
||||
func servePackage(resp web.Response, req *web.Request) error {
|
||||
p := path.Clean(req.URL.Path)
|
||||
if strings.HasPrefix(p, "/pkg/") {
|
||||
p = p[len("/pkg"):]
|
||||
}
|
||||
if p != req.URL.Path {
|
||||
return web.Redirect(resp, req, p, 301, nil)
|
||||
}
|
||||
|
||||
requestType := humanRequest
|
||||
if isRobot(req) {
|
||||
requestType = robotRequest
|
||||
}
|
||||
|
||||
path := req.RouteVars["path"]
|
||||
pdoc, pkgs, err := getDoc(path, requestType)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if pdoc == nil {
|
||||
if len(pkgs) == 0 {
|
||||
return &web.Error{Status: web.StatusNotFound}
|
||||
}
|
||||
pdocChild, _, _, err := db.Get(pkgs[0].Path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pdoc = &doc.Package{
|
||||
ProjectName: pdocChild.ProjectName,
|
||||
ProjectRoot: pdocChild.ProjectRoot,
|
||||
ProjectURL: pdocChild.ProjectURL,
|
||||
ImportPath: path,
|
||||
}
|
||||
}
|
||||
|
||||
switch {
|
||||
case len(req.Form) == 0:
|
||||
if requestType == humanRequest &&
|
||||
pdoc.Name != "" && // not a directory
|
||||
pdoc.ProjectRoot != "" && // not a standard package
|
||||
!pdoc.IsCmd &&
|
||||
len(pdoc.Errors) == 0 &&
|
||||
!popularLinkReferral(req) {
|
||||
if err := db.IncrementPopularScore(pdoc.ImportPath); err != nil {
|
||||
log.Print("ERROR db.IncrementPopularScore(%s): %v", pdoc.ImportPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
importerCount, err := db.ImporterCount(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
template := "pkg"
|
||||
if pdoc.IsCmd {
|
||||
template = "cmd"
|
||||
}
|
||||
template += templateExt(req)
|
||||
|
||||
return executeTemplate(resp, template, web.StatusOK, nil, map[string]interface{}{
|
||||
"pkgs": pkgs,
|
||||
"pdoc": pdoc,
|
||||
"importerCount": importerCount,
|
||||
})
|
||||
case hasFormValue(req, "imports"):
|
||||
if pdoc.Name == "" {
|
||||
break
|
||||
}
|
||||
pkgs, err = db.Packages(pdoc.Imports)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return executeTemplate(resp, "imports.html", web.StatusOK, nil, map[string]interface{}{
|
||||
"pkgs": pkgs,
|
||||
"pdoc": pdoc,
|
||||
})
|
||||
case hasFormValue(req, "importers"):
|
||||
if pdoc.Name == "" {
|
||||
break
|
||||
}
|
||||
pkgs, err = db.Importers(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return executeTemplate(resp, "importers.html", web.StatusOK, nil, map[string]interface{}{
|
||||
"pkgs": pkgs,
|
||||
"pdoc": pdoc,
|
||||
})
|
||||
case hasFormValue(req, "import-graph"):
|
||||
if pdoc.Name == "" {
|
||||
break
|
||||
}
|
||||
hide := req.Form.Get("hide") == "1"
|
||||
pkgs, edges, err := db.ImportGraph(pdoc, hide)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
b, err := renderGraph(pdoc, pkgs, edges)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return executeTemplate(resp, "graph.html", web.StatusOK, nil, map[string]interface{}{
|
||||
"svg": template.HTML(b),
|
||||
"pdoc": pdoc,
|
||||
"hide": hide,
|
||||
})
|
||||
case req.Form.Get("play") != "":
|
||||
u, err := playURL(pdoc, req.Form.Get("play"), req.Form.Get("name"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return web.Redirect(resp, req, u, 301, nil)
|
||||
case req.Form.Get("view") != "":
|
||||
// Redirect deprecated view= queries.
|
||||
var q string
|
||||
switch view := req.Form.Get("view"); view {
|
||||
case "imports", "importers":
|
||||
q = view
|
||||
case "import-graph":
|
||||
if req.Form.Get("hide") == "1" {
|
||||
q = "import-graph&hide=1"
|
||||
} else {
|
||||
q = "import-graph"
|
||||
}
|
||||
}
|
||||
if q != "" {
|
||||
u := *req.URL
|
||||
u.RawQuery = q
|
||||
return web.Redirect(resp, req, u.String(), 301, nil)
|
||||
}
|
||||
}
|
||||
return &web.Error{Status: web.StatusNotFound}
|
||||
}
|
||||
|
||||
func serveRefresh(resp web.Response, req *web.Request) error {
|
||||
path := req.Form.Get("path")
|
||||
_, pkgs, _, err := db.Get(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c := make(chan error, 1)
|
||||
go func() {
|
||||
_, err := crawlDoc("rfrsh", path, nil, len(pkgs) > 0, time.Time{})
|
||||
c <- err
|
||||
}()
|
||||
select {
|
||||
case err = <-c:
|
||||
case <-time.After(*getTimeout):
|
||||
err = errUpdateTimeout
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return web.Redirect(resp, req, "/"+path, 302, nil)
|
||||
}
|
||||
|
||||
func serveGoIndex(resp web.Response, req *web.Request) error {
|
||||
pkgs, err := db.GoIndex()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return executeTemplate(resp, "std.html", web.StatusOK, nil, map[string]interface{}{
|
||||
"pkgs": pkgs,
|
||||
})
|
||||
}
|
||||
|
||||
func serveIndex(resp web.Response, req *web.Request) error {
|
||||
pkgs, err := db.Index()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return executeTemplate(resp, "index.html", web.StatusOK, nil, map[string]interface{}{
|
||||
"pkgs": pkgs,
|
||||
})
|
||||
}
|
||||
|
||||
type byPath struct {
|
||||
pkgs []database.Package
|
||||
rank []int
|
||||
}
|
||||
|
||||
func (bp *byPath) Len() int { return len(bp.pkgs) }
|
||||
func (bp *byPath) Less(i, j int) bool { return bp.pkgs[i].Path < bp.pkgs[j].Path }
|
||||
func (bp *byPath) Swap(i, j int) {
|
||||
bp.pkgs[i], bp.pkgs[j] = bp.pkgs[j], bp.pkgs[i]
|
||||
bp.rank[i], bp.rank[j] = bp.rank[j], bp.rank[i]
|
||||
}
|
||||
|
||||
type byRank struct {
|
||||
pkgs []database.Package
|
||||
rank []int
|
||||
}
|
||||
|
||||
func (br *byRank) Len() int { return len(br.pkgs) }
|
||||
func (br *byRank) Less(i, j int) bool { return br.rank[i] < br.rank[j] }
|
||||
func (br *byRank) Swap(i, j int) {
|
||||
br.pkgs[i], br.pkgs[j] = br.pkgs[j], br.pkgs[i]
|
||||
br.rank[i], br.rank[j] = br.rank[j], br.rank[i]
|
||||
}
|
||||
|
||||
func popular() ([]database.Package, error) {
|
||||
const n = 25
|
||||
|
||||
pkgs, err := db.Popular(2 * n)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rank := make([]int, len(pkgs))
|
||||
for i := range pkgs {
|
||||
rank[i] = i
|
||||
}
|
||||
|
||||
sort.Sort(&byPath{pkgs, rank})
|
||||
|
||||
j := 0
|
||||
prev := "."
|
||||
for i, pkg := range pkgs {
|
||||
if strings.HasPrefix(pkg.Path, prev) {
|
||||
if rank[j-1] < rank[i] {
|
||||
rank[j-1] = rank[i]
|
||||
}
|
||||
continue
|
||||
}
|
||||
prev = pkg.Path + "/"
|
||||
pkgs[j] = pkg
|
||||
rank[j] = rank[i]
|
||||
j += 1
|
||||
}
|
||||
pkgs = pkgs[:j]
|
||||
|
||||
sort.Sort(&byRank{pkgs, rank})
|
||||
|
||||
if len(pkgs) > n {
|
||||
pkgs = pkgs[:n]
|
||||
}
|
||||
|
||||
sort.Sort(&byPath{pkgs, rank})
|
||||
|
||||
return pkgs, nil
|
||||
}
|
||||
|
||||
func serveHome(resp web.Response, req *web.Request) error {
|
||||
|
||||
q := strings.TrimSpace(req.Form.Get("q"))
|
||||
if q == "" {
|
||||
pkgs, err := popular()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return executeTemplate(resp, "home"+templateExt(req), web.StatusOK, nil,
|
||||
map[string]interface{}{"Popular": pkgs})
|
||||
}
|
||||
|
||||
if path, ok := isBrowseURL(q); ok {
|
||||
q = path
|
||||
}
|
||||
|
||||
if doc.IsValidRemotePath(q) {
|
||||
pdoc, pkgs, err := getDoc(q, queryRequest)
|
||||
if err == nil && (pdoc != nil || len(pkgs) > 0) {
|
||||
return web.Redirect(resp, req, "/"+q, 302, nil)
|
||||
}
|
||||
}
|
||||
|
||||
pkgs, err := db.Query(q)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return executeTemplate(resp, "results"+templateExt(req), web.StatusOK, nil,
|
||||
map[string]interface{}{"q": q, "pkgs": pkgs})
|
||||
}
|
||||
|
||||
func serveAbout(resp web.Response, req *web.Request) error {
|
||||
return executeTemplate(resp, "about.html", web.StatusOK, nil,
|
||||
map[string]interface{}{"Host": req.URL.Host})
|
||||
}
|
||||
|
||||
func serveBot(resp web.Response, req *web.Request) error {
|
||||
return executeTemplate(resp, "bot.html", web.StatusOK, nil, nil)
|
||||
}
|
||||
|
||||
func serveOpenSearchDescription(resp web.Response, req *web.Request) error {
|
||||
return executeTemplate(resp, "opensearch.xml", web.StatusOK, nil, req.URL.Host)
|
||||
}
|
||||
|
||||
func serveTypeahead(resp web.Response, req *web.Request) error {
|
||||
pkgs, err := db.Popular(1000)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
items := make([]string, len(pkgs))
|
||||
for i, pkg := range pkgs {
|
||||
items[i] = pkg.Path
|
||||
}
|
||||
data := map[string]interface{}{"items": items}
|
||||
w := resp.Start(web.StatusOK, web.Header{web.HeaderContentType: {"application/json; charset=uft-8"}})
|
||||
return json.NewEncoder(w).Encode(data)
|
||||
}
|
||||
|
||||
func logError(req *web.Request, err error, r interface{}) {
|
||||
if err != nil {
|
||||
var buf bytes.Buffer
|
||||
fmt.Fprintf(&buf, "Error serving %s: %v\n", req.URL, err)
|
||||
if r != nil {
|
||||
fmt.Fprintln(&buf, r)
|
||||
buf.Write(debug.Stack())
|
||||
}
|
||||
log.Print(buf.String())
|
||||
}
|
||||
}
|
||||
|
||||
func renderPresentation(resp web.Response, fname string, doc *present.Doc) error {
|
||||
t := presentTemplates[path.Ext(fname)]
|
||||
data := struct {
|
||||
*present.Doc
|
||||
Template *template.Template
|
||||
PlayEnabled bool
|
||||
}{
|
||||
doc,
|
||||
t,
|
||||
true,
|
||||
}
|
||||
|
||||
return t.Execute(
|
||||
resp.Start(web.StatusOK, web.Header{web.HeaderContentType: {"text/html; charset=utf8"}}),
|
||||
&data)
|
||||
}
|
||||
|
||||
func servePresentHome(resp web.Response, req *web.Request) error {
|
||||
fname := filepath.Join(*assetsDir, "presentHome.article")
|
||||
f, err := os.Open(fname)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
doc, err := present.Parse(f, fname, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return renderPresentation(resp, fname, doc)
|
||||
}
|
||||
|
||||
var (
|
||||
presMu sync.Mutex
|
||||
presentations = map[string]*doc.Presentation{}
|
||||
)
|
||||
|
||||
func servePresentation(resp web.Response, req *web.Request) error {
|
||||
if p := path.Clean(req.URL.Path); p != req.URL.Path {
|
||||
return web.Redirect(resp, req, p, 301, nil)
|
||||
}
|
||||
|
||||
presMu.Lock()
|
||||
for p, pres := range presentations {
|
||||
if time.Since(pres.Updated) > 15*time.Minute {
|
||||
delete(presentations, p)
|
||||
}
|
||||
}
|
||||
p := req.RouteVars["path"]
|
||||
pres := presentations[p]
|
||||
presMu.Unlock()
|
||||
|
||||
if pres == nil {
|
||||
var err error
|
||||
log.Println("Fetch presentation ", p)
|
||||
pres, err = doc.GetPresentation(httpClient, p)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
presMu.Lock()
|
||||
presentations[p] = pres
|
||||
presMu.Unlock()
|
||||
}
|
||||
ctx := &present.Context{
|
||||
ReadFile: func(name string) ([]byte, error) {
|
||||
if p, ok := pres.Files[name]; ok {
|
||||
return p, nil
|
||||
}
|
||||
return nil, fmt.Errorf("pres file not found %s", name)
|
||||
},
|
||||
}
|
||||
doc, err := ctx.Parse(bytes.NewReader(pres.Files[pres.Filename]), pres.Filename, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return renderPresentation(resp, p, doc)
|
||||
}
|
||||
|
||||
func serveCompile(resp web.Response, req *web.Request) error {
|
||||
r, err := http.PostForm("http://golang.org/compile", req.Form)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer r.Body.Close()
|
||||
_, err = io.Copy(
|
||||
resp.Start(web.StatusOK, web.Header{web.HeaderContentType: r.Header[web.HeaderContentType]}),
|
||||
r.Body)
|
||||
return err
|
||||
}
|
||||
|
||||
func serveAPISearch(resp web.Response, req *web.Request) error {
|
||||
q := strings.TrimSpace(req.Form.Get("q"))
|
||||
pkgs, err := db.Query(q)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var data struct {
|
||||
Results []database.Package `json:"results"`
|
||||
}
|
||||
data.Results = pkgs
|
||||
w := resp.Start(web.StatusOK, web.Header{web.HeaderContentType: {"application/json; charset=uft-8"}})
|
||||
return json.NewEncoder(w).Encode(&data)
|
||||
}
|
||||
|
||||
func serveAPIPackages(resp web.Response, req *web.Request) error {
|
||||
pkgs, err := db.AllPackages()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var data struct {
|
||||
Results []database.Package `json:"results"`
|
||||
}
|
||||
data.Results = pkgs
|
||||
w := resp.Start(web.StatusOK, web.Header{web.HeaderContentType: {"application/json; charset=uft-8"}})
|
||||
return json.NewEncoder(w).Encode(&data)
|
||||
}
|
||||
|
||||
func handleError(resp web.Response, req *web.Request, status int, err error, r interface{}) {
|
||||
logError(req, err, r)
|
||||
switch status {
|
||||
case 0:
|
||||
// nothing to do
|
||||
case web.StatusNotFound:
|
||||
executeTemplate(resp, "notfound"+templateExt(req), status, nil, nil)
|
||||
default:
|
||||
s := web.StatusText(status)
|
||||
if err == errUpdateTimeout {
|
||||
s = "Timeout getting package files from the version control system."
|
||||
} else if e, ok := err.(*doc.RemoteError); ok {
|
||||
s = "Error getting package files from " + e.Host + "."
|
||||
}
|
||||
w := resp.Start(web.StatusInternalServerError, web.Header{web.HeaderContentType: {"text/plan; charset=uft-8"}})
|
||||
io.WriteString(w, s)
|
||||
}
|
||||
}
|
||||
|
||||
func handlePresentError(resp web.Response, req *web.Request, status int, err error, r interface{}) {
|
||||
logError(req, err, r)
|
||||
switch status {
|
||||
case 0:
|
||||
// nothing to do
|
||||
default:
|
||||
s := web.StatusText(status)
|
||||
if doc.IsNotFound(err) {
|
||||
s = web.StatusText(web.StatusNotFound)
|
||||
status = web.StatusNotFound
|
||||
} else if err == errUpdateTimeout {
|
||||
s = "Timeout getting package files from the version control system."
|
||||
} else if e, ok := err.(*doc.RemoteError); ok {
|
||||
s = "Error getting package files from " + e.Host + "."
|
||||
}
|
||||
w := resp.Start(status, web.Header{web.HeaderContentType: {"text/plan; charset=uft-8"}})
|
||||
io.WriteString(w, s)
|
||||
}
|
||||
}
|
||||
|
||||
func handleAPIError(resp web.Response, req *web.Request, status int, err error, r interface{}) {
|
||||
logError(req, err, r)
|
||||
switch status {
|
||||
case 0:
|
||||
// nothing to do
|
||||
default:
|
||||
var data struct {
|
||||
Error struct {
|
||||
Message string `json:"message"`
|
||||
} `json:"error"`
|
||||
}
|
||||
data.Error.Message = web.StatusText(status)
|
||||
w := resp.Start(status, web.Header{web.HeaderContentType: {"application/json; charset=uft-8"}})
|
||||
json.NewEncoder(w).Encode(&data)
|
||||
}
|
||||
}
|
||||
|
||||
func defaultBase(path string) string {
|
||||
p, err := build.Default.Import(path, "", build.FindOnly)
|
||||
if err != nil {
|
||||
return "."
|
||||
}
|
||||
return p.Dir
|
||||
}
|
||||
|
||||
var (
|
||||
db *database.Database
|
||||
robot = flag.Bool("robot", false, "Robot mode")
|
||||
assetsDir = flag.String("assets", filepath.Join(defaultBase("github.com/garyburd/gddo/gddo-server"), "assets"), "Base directory for templates and static files.")
|
||||
gzAssetsDir = flag.String("gzassets", "", "Base directory for compressed static files.")
|
||||
presentDir = flag.String("present", defaultBase("code.google.com/p/go.talks/present"), "Base directory for templates and static files.")
|
||||
getTimeout = flag.Duration("get_timeout", 8*time.Second, "Time to wait for package update from the VCS.")
|
||||
firstGetTimeout = flag.Duration("first_get_timeout", 5*time.Second, "Time to wait for first fetch of package from the VCS.")
|
||||
maxAge = flag.Duration("max_age", 24*time.Hour, "Update package documents older than this age.")
|
||||
httpAddr = flag.String("http", ":8080", "Listen for HTTP connections on this address")
|
||||
crawlInterval = flag.Duration("crawl_interval", 0, "Package updater sleeps for this duration between package updates. Zero disables updates.")
|
||||
githubInterval = flag.Duration("github_interval", 0, "Github updates crawler sleeps for this duration between fetches. Zero disables the crawler.")
|
||||
secretsPath = flag.String("secrets", "secrets.json", "Path to file containing application ids and credentials for other services.")
|
||||
secrets struct {
|
||||
// HTTP user agent for outbound requests
|
||||
UserAgent string
|
||||
|
||||
// Github API Credentials
|
||||
GithubId string
|
||||
GithubSecret string
|
||||
|
||||
// Google Analytics account for tracking codes.
|
||||
GAAccount string
|
||||
}
|
||||
)
|
||||
|
||||
func readSecrets() error {
|
||||
b, err := ioutil.ReadFile(*secretsPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err = json.Unmarshal(b, &secrets); err != nil {
|
||||
return err
|
||||
}
|
||||
if secrets.UserAgent != "" {
|
||||
doc.SetUserAgent(secrets.UserAgent)
|
||||
}
|
||||
if secrets.GithubId != "" {
|
||||
doc.SetGithubCredentials(secrets.GithubId, secrets.GithubSecret)
|
||||
} else {
|
||||
log.Printf("Github credentials not set in %q.", *secretsPath)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
log.Printf("Starting server, os.Args=%s", strings.Join(os.Args, " "))
|
||||
if err := readSecrets(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if err := parseHTMLTemplates([][]string{
|
||||
{"about.html", "common.html", "layout.html"},
|
||||
{"bot.html", "common.html", "layout.html"},
|
||||
{"cmd.html", "common.html", "layout.html"},
|
||||
{"home.html", "common.html", "layout.html"},
|
||||
{"importers.html", "common.html", "layout.html"},
|
||||
{"imports.html", "common.html", "layout.html"},
|
||||
{"interface.html", "common.html", "layout.html"},
|
||||
{"index.html", "common.html", "layout.html"},
|
||||
{"notfound.html", "common.html", "layout.html"},
|
||||
{"pkg.html", "common.html", "layout.html"},
|
||||
{"results.html", "common.html", "layout.html"},
|
||||
{"std.html", "common.html", "layout.html"},
|
||||
{"graph.html", "common.html"},
|
||||
}); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if err := parseTextTemplates([][]string{
|
||||
{"cmd.txt", "common.txt"},
|
||||
{"home.txt", "common.txt"},
|
||||
{"notfound.txt", "common.txt"},
|
||||
{"pkg.txt", "common.txt"},
|
||||
{"results.txt", "common.txt"},
|
||||
{"opensearch.xml"},
|
||||
}); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if err := parsePresentTemplates([][]string{
|
||||
{".article", "article.tmpl", "action.tmpl"},
|
||||
{".slide", "slides.tmpl", "action.tmpl"},
|
||||
}); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
present.PlayEnabled = true
|
||||
|
||||
var err error
|
||||
db, err = database.New()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if *crawlInterval > 0 {
|
||||
go crawl(*crawlInterval)
|
||||
}
|
||||
|
||||
if *githubInterval > 0 {
|
||||
go crawlGithubUpdates(*githubInterval)
|
||||
}
|
||||
|
||||
playScript, err := readPlayScript(*presentDir)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
staticConfig := &web.StaticConfig{
|
||||
Header: web.Header{web.HeaderCacheControl: {"public, max-age=3600"}},
|
||||
Directory: *assetsDir,
|
||||
GzDirectory: *gzAssetsDir,
|
||||
}
|
||||
presentStaticConfig := &web.StaticConfig{
|
||||
Header: web.Header{web.HeaderCacheControl: {"public, max-age=3600"}},
|
||||
Directory: *presentDir,
|
||||
}
|
||||
|
||||
h := web.NewHostRouter()
|
||||
|
||||
r := web.NewRouter()
|
||||
r.Add("/").GetFunc(servePresentHome)
|
||||
r.Add("/compile").PostFunc(serveCompile)
|
||||
r.Add("/favicon.ico").Get(staticConfig.FileHandler("favicon.ico"))
|
||||
r.Add("/google3d2f3cd4cc2bb44b.html").Get(staticConfig.FileHandler("google3d2f3cd4cc2bb44b.html"))
|
||||
r.Add("/humans.txt").Get(staticConfig.FileHandler("humans.txt"))
|
||||
r.Add("/play.js").Get(web.DataHandler(playScript, web.Header{web.HeaderContentType: {"text/javascript"}}))
|
||||
r.Add("/robots.txt").Get(staticConfig.FileHandler("presentRobots.txt"))
|
||||
r.Add("/static/<path:.*>").Get(presentStaticConfig.DirectoryHandler("static"))
|
||||
r.Add("/<path:.+>").GetFunc(servePresentation)
|
||||
|
||||
h.Add("talks.<:.*>", web.ErrorHandler(handlePresentError, web.FormAndCookieHandler(6000, false, r)))
|
||||
|
||||
r = web.NewRouter()
|
||||
r.Add("/favicon.ico").Get(staticConfig.FileHandler("favicon.ico"))
|
||||
r.Add("/google3d2f3cd4cc2bb44b.html").Get(staticConfig.FileHandler("google3d2f3cd4cc2bb44b.html"))
|
||||
r.Add("/humans.txt").Get(staticConfig.FileHandler("humans.txt"))
|
||||
r.Add("/robots.txt").Get(staticConfig.FileHandler("presentRobots.txt"))
|
||||
r.Add("/search").GetFunc(serveAPISearch)
|
||||
r.Add("/packages").GetFunc(serveAPIPackages)
|
||||
|
||||
h.Add("api.<:.*>", web.ErrorHandler(handleAPIError, web.FormAndCookieHandler(6000, false, r)))
|
||||
|
||||
r = web.NewRouter()
|
||||
r.Add("/").GetFunc(serveHome)
|
||||
r.Add("/-/about").GetFunc(serveAbout)
|
||||
r.Add("/-/bot").GetFunc(serveBot)
|
||||
r.Add("/-/opensearch.xml").GetFunc(serveOpenSearchDescription)
|
||||
r.Add("/-/typeahead").GetFunc(serveTypeahead)
|
||||
r.Add("/-/go").GetFunc(serveGoIndex)
|
||||
r.Add("/-/index").GetFunc(serveIndex)
|
||||
r.Add("/-/refresh").PostFunc(serveRefresh)
|
||||
r.Add("/-/static/<path:.*>").Get(staticConfig.DirectoryHandler("static"))
|
||||
r.Add("/a/index").Get(web.RedirectHandler("/-/index", 301))
|
||||
r.Add("/about").Get(web.RedirectHandler("/-/about", 301))
|
||||
r.Add("/favicon.ico").Get(staticConfig.FileHandler("favicon.ico"))
|
||||
r.Add("/google3d2f3cd4cc2bb44b.html").Get(staticConfig.FileHandler("google3d2f3cd4cc2bb44b.html"))
|
||||
r.Add("/humans.txt").Get(staticConfig.FileHandler("humans.txt"))
|
||||
r.Add("/robots.txt").Get(staticConfig.FileHandler("robots.txt"))
|
||||
r.Add("/BingSiteAuth.xml").Get(staticConfig.FileHandler("BingSiteAuth.xml"))
|
||||
r.Add("/C").Get(web.RedirectHandler("http://golang.org/doc/articles/c_go_cgo.html", 301))
|
||||
r.Add("/<path:.+>").GetFunc(servePackage)
|
||||
|
||||
h.Add("<:.*>", web.ErrorHandler(handleError, web.FormAndCookieHandler(1000, false, r)))
|
||||
|
||||
listener, err := net.Listen("tcp", *httpAddr)
|
||||
if err != nil {
|
||||
log.Fatal("Listen", err)
|
||||
return
|
||||
}
|
||||
defer listener.Close()
|
||||
s := &server.Server{Listener: listener, Handler: h} // add logger
|
||||
err = s.Serve()
|
||||
if err != nil {
|
||||
log.Fatal("Server", err)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,96 @@
|
|||
// Copyright 2013 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"github.com/garyburd/indigo/web"
|
||||
)
|
||||
|
||||
func findExamples(pdoc *doc.Package, export, method string) []*doc.Example {
|
||||
if "package" == export {
|
||||
return pdoc.Examples
|
||||
}
|
||||
for _, f := range pdoc.Funcs {
|
||||
if f.Name == export {
|
||||
return f.Examples
|
||||
}
|
||||
}
|
||||
for _, t := range pdoc.Types {
|
||||
for _, f := range t.Funcs {
|
||||
if f.Name == export {
|
||||
return f.Examples
|
||||
}
|
||||
}
|
||||
if t.Name == export {
|
||||
if method == "" {
|
||||
return t.Examples
|
||||
}
|
||||
for _, m := range t.Methods {
|
||||
if method == m.Name {
|
||||
return m.Examples
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func findExample(pdoc *doc.Package, export, method, name string) *doc.Example {
|
||||
for _, e := range findExamples(pdoc, export, method) {
|
||||
if name == e.Name {
|
||||
return e
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func playURL(pdoc *doc.Package, export string, name string) (string, error) {
|
||||
var method string
|
||||
if i := strings.Index(export, "-"); i > 0 {
|
||||
method = export[i+1:]
|
||||
export = export[:i]
|
||||
}
|
||||
if e := findExample(pdoc, export, method, name); e != nil && e.Play != "" {
|
||||
resp, err := httpClient.Post("http://play.golang.org/share", "text/plain", strings.NewReader(e.Play))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
p, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return fmt.Sprintf("http://play.golang.org/p/%s", p), nil
|
||||
}
|
||||
return "", &web.Error{Status: web.StatusNotFound}
|
||||
}
|
||||
|
||||
func readPlayScript(dir string) (script []byte, err error) {
|
||||
for _, name := range []string{"jquery.js", "playground.js", "play.js"} {
|
||||
p, err := ioutil.ReadFile(filepath.Join(dir, "js", name))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
script = append(script, p...)
|
||||
}
|
||||
return
|
||||
}
|
|
@ -0,0 +1,481 @@
|
|||
// Copyright 2011 Gary Burd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
godoc "go/doc"
|
||||
htemp "html/template"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/url"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strings"
|
||||
"sync"
|
||||
ttemp "text/template"
|
||||
"time"
|
||||
|
||||
"code.google.com/p/go.talks/pkg/present"
|
||||
|
||||
"github.com/garyburd/gddo/doc"
|
||||
"github.com/garyburd/indigo/web"
|
||||
)
|
||||
|
||||
func escapePath(s string) string {
|
||||
u := url.URL{Path: s}
|
||||
return u.String()
|
||||
}
|
||||
|
||||
func sourceLinkFn(pdoc *doc.Package, pos doc.Pos, text string) htemp.HTML {
|
||||
text = htemp.HTMLEscapeString(text)
|
||||
if pos.Line == 0 {
|
||||
return htemp.HTML(text)
|
||||
}
|
||||
u := fmt.Sprintf(pdoc.LineFmt, pdoc.Files[pos.File].URL, pos.Line)
|
||||
u = htemp.HTMLEscapeString(u)
|
||||
return htemp.HTML(fmt.Sprintf(`<a href="%s">%s</a>`, u, text))
|
||||
}
|
||||
|
||||
var (
|
||||
staticMutex sync.RWMutex
|
||||
staticHash = make(map[string]string)
|
||||
)
|
||||
|
||||
func fileHashFn(p string) (string, error) {
|
||||
staticMutex.RLock()
|
||||
h, ok := staticHash[p]
|
||||
staticMutex.RUnlock()
|
||||
|
||||
if !ok {
|
||||
b, err := ioutil.ReadFile(filepath.Join(*assetsDir, filepath.FromSlash(p)))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
m := md5.New()
|
||||
m.Write(b)
|
||||
h = hex.EncodeToString(m.Sum(nil))
|
||||
|
||||
staticMutex.Lock()
|
||||
staticHash[p] = h
|
||||
staticMutex.Unlock()
|
||||
}
|
||||
return h, nil
|
||||
}
|
||||
|
||||
func staticFileFn(p string) htemp.URL {
|
||||
h, err := fileHashFn("static/" + p)
|
||||
if err != nil {
|
||||
log.Printf("WARNING could not read static file %s, %v", p, err)
|
||||
return htemp.URL("/-/static/" + p)
|
||||
}
|
||||
return htemp.URL("/-/static/" + p + "?v=" + h)
|
||||
}
|
||||
|
||||
func mapFn(kvs ...interface{}) (map[string]interface{}, error) {
|
||||
if len(kvs)%2 != 0 {
|
||||
return nil, errors.New("map requires even number of arguments.")
|
||||
}
|
||||
m := make(map[string]interface{})
|
||||
for i := 0; i < len(kvs); i += 2 {
|
||||
s, ok := kvs[i].(string)
|
||||
if !ok {
|
||||
return nil, errors.New("even args to map must be strings.")
|
||||
}
|
||||
m[s] = kvs[i+1]
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
// relativePathFn formats an import path as HTML.
|
||||
func relativePathFn(path string, parentPath interface{}) string {
|
||||
if p, ok := parentPath.(string); ok && p != "" && strings.HasPrefix(path, p) {
|
||||
path = path[len(p)+1:]
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
// importPathFn formats an import with zero width space characters to allow for breaks.
|
||||
func importPathFn(path string) htemp.HTML {
|
||||
path = htemp.HTMLEscapeString(path)
|
||||
if len(path) > 45 {
|
||||
// Allow long import paths to break following "/"
|
||||
path = strings.Replace(path, "/", "/​", -1)
|
||||
}
|
||||
return htemp.HTML(path)
|
||||
}
|
||||
|
||||
// relativeTime formats the time t in nanoseconds as a human readable relative
|
||||
// time.
|
||||
func relativeTime(t time.Time) string {
|
||||
const day = 24 * time.Hour
|
||||
d := time.Now().Sub(t)
|
||||
switch {
|
||||
case d < time.Second:
|
||||
return "just now"
|
||||
case d < 2*time.Second:
|
||||
return "one second ago"
|
||||
case d < time.Minute:
|
||||
return fmt.Sprintf("%d seconds ago", d/time.Second)
|
||||
case d < 2*time.Minute:
|
||||
return "one minute ago"
|
||||
case d < time.Hour:
|
||||
return fmt.Sprintf("%d minutes ago", d/time.Minute)
|
||||
case d < 2*time.Hour:
|
||||
return "one hour ago"
|
||||
case d < day:
|
||||
return fmt.Sprintf("%d hours ago", d/time.Hour)
|
||||
case d < 2*day:
|
||||
return "one day ago"
|
||||
}
|
||||
return fmt.Sprintf("%d days ago", d/day)
|
||||
}
|
||||
|
||||
var (
|
||||
h3Pat = regexp.MustCompile(`</?h3`)
|
||||
rfcPat = regexp.MustCompile(`RFC\s+(\d{3,4})`)
|
||||
packagePat = regexp.MustCompile(`\s+package\s+([-a-z0-9]\S+)`)
|
||||
)
|
||||
|
||||
func replaceAll(src []byte, re *regexp.Regexp, replace func(out, src []byte, m []int) []byte) []byte {
|
||||
var out []byte
|
||||
for len(src) > 0 {
|
||||
m := re.FindSubmatchIndex(src)
|
||||
if m == nil {
|
||||
break
|
||||
}
|
||||
out = append(out, src[:m[0]]...)
|
||||
out = replace(out, src, m)
|
||||
src = src[m[1]:]
|
||||
}
|
||||
if out == nil {
|
||||
return src
|
||||
}
|
||||
return append(out, src...)
|
||||
}
|
||||
|
||||
// commentFn formats a source code comment as HTML.
|
||||
func commentFn(v string) htemp.HTML {
|
||||
var buf bytes.Buffer
|
||||
godoc.ToHTML(&buf, v, nil)
|
||||
p := buf.Bytes()
|
||||
p = replaceAll(p, h3Pat, func(out, src []byte, m []int) []byte {
|
||||
out = append(out, src[m[0]:m[1]-1]...)
|
||||
out = append(out, '4')
|
||||
return out
|
||||
})
|
||||
p = replaceAll(p, rfcPat, func(out, src []byte, m []int) []byte {
|
||||
out = append(out, `<a href="http://tools.ietf.org/html/rfc`...)
|
||||
out = append(out, src[m[2]:m[3]]...)
|
||||
out = append(out, `">`...)
|
||||
out = append(out, src[m[0]:m[1]]...)
|
||||
out = append(out, `</a>`...)
|
||||
return out
|
||||
})
|
||||
p = replaceAll(p, packagePat, func(out, src []byte, m []int) []byte {
|
||||
path := bytes.TrimRight(src[m[2]:m[3]], ".!?:")
|
||||
if !doc.IsValidPath(string(path)) {
|
||||
return append(out, src[m[0]:m[1]]...)
|
||||
}
|
||||
out = append(out, src[m[0]:m[2]]...)
|
||||
out = append(out, `<a href="/`...)
|
||||
out = append(out, path...)
|
||||
out = append(out, `">`...)
|
||||
out = append(out, path...)
|
||||
out = append(out, `</a>`...)
|
||||
out = append(out, src[m[2]+len(path):m[1]]...)
|
||||
return out
|
||||
})
|
||||
return htemp.HTML(p)
|
||||
}
|
||||
|
||||
// commentTextFn formats a source code comment as text.
|
||||
func commentTextFn(v string) string {
|
||||
const indent = " "
|
||||
var buf bytes.Buffer
|
||||
godoc.ToText(&buf, v, indent, "\t", 80-2*len(indent))
|
||||
p := buf.Bytes()
|
||||
return string(p)
|
||||
}
|
||||
|
||||
var period = []byte{'.'}
|
||||
|
||||
func codeFn(c doc.Code, typ *doc.Type) htemp.HTML {
|
||||
var buf bytes.Buffer
|
||||
last := 0
|
||||
src := []byte(c.Text)
|
||||
for _, a := range c.Annotations {
|
||||
htemp.HTMLEscape(&buf, src[last:a.Pos])
|
||||
switch a.Kind {
|
||||
case doc.PackageLinkAnnotation:
|
||||
p := "/" + c.Paths[a.PathIndex]
|
||||
buf.WriteString(`<a href="`)
|
||||
buf.WriteString(escapePath(p))
|
||||
buf.WriteString(`">`)
|
||||
htemp.HTMLEscape(&buf, src[a.Pos:a.End])
|
||||
buf.WriteString(`</a>`)
|
||||
case doc.ExportLinkAnnotation, doc.BuiltinAnnotation:
|
||||
var p string
|
||||
if a.Kind == doc.BuiltinAnnotation {
|
||||
p = "/builtin"
|
||||
} else if a.PathIndex >= 0 {
|
||||
p = "/" + c.Paths[a.PathIndex]
|
||||
}
|
||||
n := src[a.Pos:a.End]
|
||||
n = n[bytes.LastIndex(n, period)+1:]
|
||||
buf.WriteString(`<a href="`)
|
||||
buf.WriteString(escapePath(p))
|
||||
buf.WriteByte('#')
|
||||
buf.WriteString(escapePath(string(n)))
|
||||
buf.WriteString(`">`)
|
||||
htemp.HTMLEscape(&buf, src[a.Pos:a.End])
|
||||
buf.WriteString(`</a>`)
|
||||
case doc.CommentAnnotation:
|
||||
buf.WriteString(`<span class="com">`)
|
||||
htemp.HTMLEscape(&buf, src[a.Pos:a.End])
|
||||
buf.WriteString(`</span>`)
|
||||
case doc.AnchorAnnotation:
|
||||
buf.WriteString(`<span id="`)
|
||||
if typ != nil {
|
||||
htemp.HTMLEscape(&buf, []byte(typ.Name))
|
||||
buf.WriteByte('.')
|
||||
}
|
||||
htemp.HTMLEscape(&buf, src[a.Pos:a.End])
|
||||
buf.WriteString(`">`)
|
||||
htemp.HTMLEscape(&buf, src[a.Pos:a.End])
|
||||
buf.WriteString(`</span>`)
|
||||
default:
|
||||
htemp.HTMLEscape(&buf, src[a.Pos:a.End])
|
||||
}
|
||||
last = int(a.End)
|
||||
}
|
||||
htemp.HTMLEscape(&buf, src[last:])
|
||||
return htemp.HTML(buf.String())
|
||||
}
|
||||
|
||||
func pageNameFn(pdoc *doc.Package) string {
|
||||
if pdoc.Name != "" && !pdoc.IsCmd {
|
||||
return pdoc.Name
|
||||
}
|
||||
_, name := path.Split(pdoc.ImportPath)
|
||||
return name
|
||||
}
|
||||
|
||||
func hasExamplesFn(pdoc *doc.Package) bool {
|
||||
if len(pdoc.Examples) > 0 {
|
||||
return true
|
||||
}
|
||||
for _, f := range pdoc.Funcs {
|
||||
if len(f.Examples) > 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
for _, t := range pdoc.Types {
|
||||
if len(t.Examples) > 0 {
|
||||
return true
|
||||
}
|
||||
for _, f := range t.Funcs {
|
||||
if len(f.Examples) > 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
for _, m := range t.Methods {
|
||||
if len(m.Examples) > 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type crumb struct {
|
||||
Name string
|
||||
URL string
|
||||
Sep bool
|
||||
}
|
||||
|
||||
func breadcrumbsFn(pdoc *doc.Package, templateName string) htemp.HTML {
|
||||
if !strings.HasPrefix(pdoc.ImportPath, pdoc.ProjectRoot) {
|
||||
return ""
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
i := 0
|
||||
j := len(pdoc.ProjectRoot)
|
||||
if j == 0 {
|
||||
j = strings.IndexRune(pdoc.ImportPath, '/')
|
||||
if j < 0 {
|
||||
j = len(pdoc.ImportPath)
|
||||
}
|
||||
}
|
||||
for {
|
||||
if i != 0 {
|
||||
buf.WriteString(`<span class="muted">/</span>`)
|
||||
}
|
||||
link := j < len(pdoc.ImportPath) ||
|
||||
templateName == "imports.html" ||
|
||||
templateName == "importers.html" ||
|
||||
templateName == "graph.html" ||
|
||||
templateName == "interface.html"
|
||||
if link {
|
||||
buf.WriteString(`<a href="/`)
|
||||
buf.WriteString(escapePath(pdoc.ImportPath[:j]))
|
||||
buf.WriteString(`">`)
|
||||
} else {
|
||||
buf.WriteString(`<span class="muted">`)
|
||||
}
|
||||
buf.WriteString(htemp.HTMLEscapeString(pdoc.ImportPath[i:j]))
|
||||
if link {
|
||||
buf.WriteString("</a>")
|
||||
} else {
|
||||
buf.WriteString("</span>")
|
||||
}
|
||||
i = j + 1
|
||||
if i >= len(pdoc.ImportPath) {
|
||||
break
|
||||
}
|
||||
j = strings.IndexRune(pdoc.ImportPath[i:], '/')
|
||||
if j < 0 {
|
||||
j = len(pdoc.ImportPath)
|
||||
} else {
|
||||
j += i
|
||||
}
|
||||
}
|
||||
return htemp.HTML(buf.String())
|
||||
}
|
||||
|
||||
func gaAccountFn() string {
|
||||
return secrets.GAAccount
|
||||
}
|
||||
|
||||
func noteTitleFn(s string) string {
|
||||
return strings.Title(strings.ToLower(s))
|
||||
}
|
||||
|
||||
func htmlCommentFn(s string) htemp.HTML {
|
||||
return htemp.HTML("<!-- " + s + " -->")
|
||||
}
|
||||
|
||||
var contentTypes = map[string]string{
|
||||
".html": "text/html; charset=utf-8",
|
||||
".txt": "text/plain; charset=utf-8",
|
||||
}
|
||||
|
||||
func executeTemplate(resp web.Response, name string, status int, header web.Header, data interface{}) error {
|
||||
contentType, ok := contentTypes[path.Ext(name)]
|
||||
if !ok {
|
||||
contentType = "text/plain; charset=utf-8"
|
||||
}
|
||||
t := templates[name]
|
||||
if t == nil {
|
||||
return fmt.Errorf("Template %s not found", name)
|
||||
}
|
||||
if header == nil {
|
||||
header = make(web.Header)
|
||||
}
|
||||
header.Set(web.HeaderContentType, contentType)
|
||||
w := resp.Start(status, header)
|
||||
return t.Execute(w, data)
|
||||
}
|
||||
|
||||
var templates = map[string]interface {
|
||||
Execute(io.Writer, interface{}) error
|
||||
}{}
|
||||
|
||||
func joinTemplateDir(base string, files []string) []string {
|
||||
result := make([]string, len(files))
|
||||
for i := range files {
|
||||
result[i] = filepath.Join(base, "templates", files[i])
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func parseHTMLTemplates(sets [][]string) error {
|
||||
for _, set := range sets {
|
||||
templateName := set[0]
|
||||
t := htemp.New("")
|
||||
t.Funcs(htemp.FuncMap{
|
||||
"sourceLink": sourceLinkFn,
|
||||
"htmlComment": htmlCommentFn,
|
||||
"breadcrumbs": breadcrumbsFn,
|
||||
"comment": commentFn,
|
||||
"code": codeFn,
|
||||
"equal": reflect.DeepEqual,
|
||||
"hasExamples": hasExamplesFn,
|
||||
"gaAccount": gaAccountFn,
|
||||
"importPath": importPathFn,
|
||||
"isValidImportPath": doc.IsValidPath,
|
||||
"map": mapFn,
|
||||
"noteTitle": noteTitleFn,
|
||||
"pageName": pageNameFn,
|
||||
"relativePath": relativePathFn,
|
||||
"staticFile": staticFileFn,
|
||||
"fileHash": fileHashFn,
|
||||
"templateName": func() string { return templateName },
|
||||
})
|
||||
if _, err := t.ParseFiles(joinTemplateDir(*assetsDir, set)...); err != nil {
|
||||
return err
|
||||
}
|
||||
t = t.Lookup("ROOT")
|
||||
if t == nil {
|
||||
return fmt.Errorf("ROOT template not found in %v", set)
|
||||
}
|
||||
templates[set[0]] = t
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseTextTemplates(sets [][]string) error {
|
||||
for _, set := range sets {
|
||||
t := ttemp.New("")
|
||||
t.Funcs(ttemp.FuncMap{
|
||||
"comment": commentTextFn,
|
||||
})
|
||||
if _, err := t.ParseFiles(joinTemplateDir(*assetsDir, set)...); err != nil {
|
||||
return err
|
||||
}
|
||||
t = t.Lookup("ROOT")
|
||||
if t == nil {
|
||||
return fmt.Errorf("ROOT template not found in %v", set)
|
||||
}
|
||||
templates[set[0]] = t
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var presentTemplates = make(map[string]*htemp.Template)
|
||||
|
||||
func parsePresentTemplates(sets [][]string) error {
|
||||
for _, set := range sets {
|
||||
t := present.Template()
|
||||
if _, err := t.ParseFiles(joinTemplateDir(*presentDir, set[1:])...); err != nil {
|
||||
return err
|
||||
}
|
||||
t = t.Lookup("root")
|
||||
if t == nil {
|
||||
return fmt.Errorf("root template not found in %v", set)
|
||||
}
|
||||
presentTemplates[set[0]] = t
|
||||
}
|
||||
return nil
|
||||
}
|
Загрузка…
Ссылка в новой задаче