diff --git a/database/database.go b/database/database.go index 0d3f114..117b5df 100644 --- a/database/database.go +++ b/database/database.go @@ -170,12 +170,6 @@ var putScript = redis.NewScript(0, ` redis.call('SREM', 'index:' .. term, id) elseif x == 2 then redis.call('SADD', 'index:' .. term, id) - if string.sub(term, 1, 7) == 'import:' then - local import = string.sub(term, 8) - if redis.call('HEXISTS', 'ids', import) == 0 and redis.call('SISMEMBER', 'badCrawl', import) == 0 then - redis.call('SADD', 'newCrawl', import) - end - end end end @@ -190,6 +184,15 @@ var putScript = redis.NewScript(0, ` return redis.call('HMSET', 'pkg:' .. id, 'path', path, 'synopsis', synopsis, 'score', score, 'gob', gob, 'terms', terms, 'etag', etag, 'kind', kind) `) +var addCrawlScript = redis.NewScript(0, ` + for i=1,#ARGV do + local pkg = ARGV[i] + if redis.call('HEXISTS', 'ids', pkg) == 0 and redis.call('SISMEMBER', 'badCrawl', pkg) == 0 then + redis.call('SADD', 'newCrawl', pkg) + end + end +`) + // Put adds the package documentation to the database. func (db *Database) Put(pdoc *doc.Package, nextCrawl time.Time) error { c := db.Pool.Get() @@ -204,7 +207,7 @@ func (db *Database) Put(pdoc *doc.Package, nextCrawl time.Time) error { } // Truncate large documents. - if gobBuf.Len() > 700000 { + if gobBuf.Len() > 200000 { pdocNew := *pdoc pdoc = &pdocNew pdoc.Truncated = true @@ -236,7 +239,45 @@ func (db *Database) Put(pdoc *doc.Package, nextCrawl time.Time) error { if !nextCrawl.IsZero() { t = nextCrawl.Unix() } + _, err = putScript.Do(c, pdoc.ImportPath, pdoc.Synopsis, score, gobBytes, strings.Join(terms, " "), pdoc.Etag, kind, t) + if err != nil { + return err + } + + if nextCrawl.IsZero() { + // Skip crawling related packages if this is not a full save. + return nil + } + + paths := make(map[string]bool) + for _, p := range pdoc.Imports { + if doc.IsValidRemotePath(p) { + paths[p] = true + } + } + for _, p := range pdoc.TestImports { + if doc.IsValidRemotePath(p) { + paths[p] = true + } + } + for _, p := range pdoc.XTestImports { + if doc.IsValidRemotePath(p) { + paths[p] = true + } + } + if pdoc.ImportPath != pdoc.ProjectRoot && pdoc.ProjectRoot != "" { + paths[pdoc.ProjectRoot] = true + } + for _, p := range pdoc.Subdirectories { + paths[pdoc.ImportPath+"/"+p] = true + } + + args := make([]interface{}, 0, len(paths)) + for p := range paths { + args = append(args, p) + } + _, err = addCrawlScript.Do(c, args...) return err } @@ -456,7 +497,6 @@ var deleteScript = redis.NewScript(0, ` end redis.call('ZREM', 'nextCrawl', id) - redis.call('SREM', 'badCrawl', path) redis.call('SREM', 'newCrawl', path) redis.call('ZREM', 'popular', id) redis.call('DEL', 'pkg:' .. id) @@ -670,6 +710,7 @@ type PackageInfo struct { Pkgs []Package Score float64 Kind string + Size int } // Do executes function f for each document in the database. @@ -681,18 +722,20 @@ func (db *Database) Do(f func(*PackageInfo) error) error { return err } for _, key := range keys { - values, err := redis.Values(c.Do("HMGET", key, "gob", "score", "kind", "path")) + values, err := redis.Values(c.Do("HMGET", key, "gob", "score", "kind", "path", "terms", "synopis")) if err != nil { return err } var ( - pi PackageInfo - p []byte - path string + pi PackageInfo + p []byte + path string + terms string + synopsis string ) - if _, err := redis.Scan(values, &p, &pi.Score, &pi.Kind, &path); err != nil { + if _, err := redis.Scan(values, &p, &pi.Score, &pi.Kind, &path, &terms, &synopsis); err != nil { return err } @@ -700,6 +743,8 @@ func (db *Database) Do(f func(*PackageInfo) error) error { continue } + pi.Size = len(path) + len(p) + len(terms) + len(synopsis) + p, err = snappy.Decode(nil, p) if err != nil { return fmt.Errorf("snappy decoding %s: %v", path, err) @@ -810,7 +855,7 @@ func (db *Database) GetGob(key string, value interface{}) error { return gob.NewDecoder(bytes.NewReader(p)).Decode(value) } -var incrementPopularScore = redis.NewScript(0, ` +var incrementPopularScoreScript = redis.NewScript(0, ` local path = ARGV[1] local n = ARGV[2] local t = ARGV[3] @@ -832,20 +877,21 @@ var incrementPopularScore = redis.NewScript(0, ` const popularHalfLife = time.Hour * 24 * 7 -func scaledTime(t time.Time) float64 { - const lambda = math.Ln2 / float64(popularHalfLife) - return lambda * float64(t.Sub(time.Unix(1257894000, 0))) -} - -func (db *Database) IncrementPopularScore(path string) error { +func (db *Database) incrementPopularScoreInternal(path string, delta float64, t time.Time) error { // nt = n0 * math.Exp(-lambda * t) // lambda = math.Ln2 / thalf c := db.Pool.Get() defer c.Close() - _, err := incrementPopularScore.Do(c, path, 1, scaledTime(time.Now())) + const lambda = math.Ln2 / float64(popularHalfLife) + scaledTime := lambda * float64(t.Sub(time.Unix(1257894000, 0))) + _, err := incrementPopularScoreScript.Do(c, path, delta, scaledTime) return err } +func (db *Database) IncrementPopularScore(path string) error { + return db.incrementPopularScoreInternal(path, 1, time.Now()) +} + var popularScript = redis.NewScript(0, ` local stop = ARGV[1] local ids = redis.call('ZREVRANGE', 'popular', '0', stop) @@ -892,26 +938,59 @@ func (db *Database) PopularWithScores() ([]Package, error) { return pkgs, err } -func (db *Database) GetNewCrawl() (string, error) { +func (db *Database) PopNewCrawl() (string, bool, error) { c := db.Pool.Get() defer c.Close() - v, err := redis.String(c.Do("SRANDMEMBER", "newCrawl")) - if err == redis.ErrNil { + + var subdirs []Package + + path, err := redis.String(c.Do("SPOP", "newCrawl")) + switch { + case err == redis.ErrNil: err = nil + path = "" + case err == nil: + subdirs, err = db.getSubdirs(c, path, nil) } - return v, err + return path, len(subdirs) > 0, err } -var setBadCrawlScript = redis.NewScript(0, ` - local path = ARGV[1] - if redis.call('SREM', 'newCrawl', path) == 1 then - redis.call('SADD', 'badCrawl', path) - end -`) - -func (db *Database) SetBadCrawl(path string) error { +func (db *Database) AddBadCrawl(path string) error { c := db.Pool.Get() defer c.Close() - _, err := setBadCrawlScript.Do(c, path) + _, err := c.Do("SADD", "badCrawl", path) return err } + +var incrementCounterScript = redis.NewScript(0, ` + local key = 'counter:' .. ARGV[1] + local n = tonumber(ARGV[2]) + local t = tonumber(ARGV[3]) + local exp = tonumber(ARGV[4]) + + local counter = redis.call('GET', key) + if counter then + counter = cjson.decode(counter) + n = n + counter.n * math.exp(counter.t - t) + end + + redis.call('SET', key, cjson.encode({n = n; t = t})) + redis.call('EXPIRE', key, exp) + return tostring(n) +`) + +const counterHalflife = time.Hour + +func (db *Database) incrementCounterInternal(key string, delta float64, t time.Time) (float64, error) { + // nt = n0 * math.Exp(-lambda * t) + // lambda = math.Ln2 / thalf + c := db.Pool.Get() + defer c.Close() + const lambda = math.Ln2 / float64(counterHalflife) + scaledTime := lambda * float64(t.Sub(time.Unix(1257894000, 0))) + return redis.Float64(incrementCounterScript.Do(c, key, delta, scaledTime, (4*counterHalflife)/time.Second)) +} + +func (db *Database) IncrementCounter(key string, delta float64) (float64, error) { + return db.incrementCounterInternal(key, delta, time.Now()) +} diff --git a/database/database_test.go b/database/database_test.go index 669f456..5dcb46a 100644 --- a/database/database_test.go +++ b/database/database_test.go @@ -193,6 +193,8 @@ func TestPutGet(t *testing.T) { } } +const epsilon = 0.000001 + func TestPopular(t *testing.T) { db := newDB(t) defer closeDB(db) @@ -207,7 +209,7 @@ func TestPopular(t *testing.T) { for id := 12; id >= 0; id-- { path := "github.com/user/repo/p" + strconv.Itoa(id) c.Do("HSET", "ids", path, id) - _, err := incrementPopularScore.Do(c, path, score, scaledTime(now)) + err := db.incrementPopularScoreInternal(path, score, now) if err != nil { t.Fatal(err) } @@ -227,8 +229,39 @@ func TestPopular(t *testing.T) { } for i := 3; i < len(values); i += 2 { s, _ := redis.Float64(values[i], nil) - if math.Abs(score-s)/score > 0.0001 { + if math.Abs(score-s)/score > epsilon { t.Errorf("Bad score, score[1]=%g, score[%d]=%g", score, i, s) } } } + +func TestCounter(t *testing.T) { + db := newDB(t) + defer closeDB(db) + + const key = "127.0.0.1" + + now := time.Now() + n, err := db.incrementCounterInternal(key, 1, now) + if err != nil { + t.Fatal(err) + } + if math.Abs(n-1.0) > epsilon { + t.Errorf("1: got n=%g, want 1", n) + } + n, err = db.incrementCounterInternal(key, 1, now) + if err != nil { + t.Fatal(err) + } + if math.Abs(n-2.0)/2.0 > epsilon { + t.Errorf("2: got n=%g, want 2", n) + } + now = now.Add(counterHalflife) + n, err = db.incrementCounterInternal(key, 1, now) + if err != nil { + t.Fatal(err) + } + if math.Abs(n-2.0)/2.0 > epsilon { + t.Errorf("3: got n=%g, want 2", n) + } +} diff --git a/doc/bitbucket.go b/doc/bitbucket.go index 640f70d..0d84352 100644 --- a/doc/bitbucket.go +++ b/doc/bitbucket.go @@ -63,18 +63,19 @@ func getBitbucketDoc(client *http.Client, match map[string]string, savedEtag str return nil, ErrNotModified } - var directory struct { - Files []struct { + var contents struct { + Directories []string + Files []struct { Path string } } - if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/src/{tag}{dir}/", match), nil, &directory); err != nil { + if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/src/{tag}{dir}/", match), nil, &contents); err != nil { return nil, err } var files []*source - for _, f := range directory.Files { + for _, f := range contents.Files { _, name := path.Split(f.Path) if isDocFile(name) { files = append(files, &source{ @@ -91,14 +92,15 @@ func getBitbucketDoc(client *http.Client, match map[string]string, savedEtag str b := builder{ pdoc: &Package{ - LineFmt: "%s#cl-%d", - ImportPath: match["originalImportPath"], - ProjectRoot: expand("bitbucket.org/{owner}/{repo}", match), - ProjectName: match["repo"], - ProjectURL: expand("https://bitbucket.org/{owner}/{repo}/", match), - BrowseURL: expand("https://bitbucket.org/{owner}/{repo}/src/{tag}{dir}", match), - Etag: etag, - VCS: match["vcs"], + LineFmt: "%s#cl-%d", + ImportPath: match["originalImportPath"], + ProjectRoot: expand("bitbucket.org/{owner}/{repo}", match), + ProjectName: match["repo"], + ProjectURL: expand("https://bitbucket.org/{owner}/{repo}/", match), + BrowseURL: expand("https://bitbucket.org/{owner}/{repo}/src/{tag}{dir}", match), + Etag: etag, + VCS: match["vcs"], + Subdirectories: contents.Directories, }, } diff --git a/doc/builder.go b/doc/builder.go index e63ec04..c114ceb 100644 --- a/doc/builder.go +++ b/doc/builder.go @@ -16,6 +16,7 @@ package doc import ( "bytes" + "errors" "go/ast" "go/build" "go/doc" @@ -327,19 +328,22 @@ var packageNamePats = []*regexp.Regexp{ func simpleImporter(imports map[string]*ast.Object, path string) (*ast.Object, error) { pkg := imports[path] - if pkg == nil { - // Guess the package name without importing it. - for _, pat := range packageNamePats { - m := pat.FindStringSubmatch(path) - if m != nil { - pkg = ast.NewObj(ast.Pkg, m[1]) - pkg.Data = ast.NewScope(nil) - imports[path] = pkg - break - } + if pkg != nil { + return pkg, nil + } + + // Guess the package name without importing it. + for _, pat := range packageNamePats { + m := pat.FindStringSubmatch(path) + if m != nil { + pkg = ast.NewObj(ast.Pkg, m[1]) + pkg.Data = ast.NewScope(nil) + imports[path] = pkg + return pkg, nil } } - return pkg, nil + + return nil, errors.New("package not found") } type File struct { diff --git a/doc/github.go b/doc/github.go index 8f12a40..e689365 100644 --- a/doc/github.go +++ b/doc/github.go @@ -17,7 +17,6 @@ package doc import ( "net/http" "net/url" - "path" "regexp" "strings" "time" @@ -73,49 +72,91 @@ func getGitHubDoc(client *http.Client, match map[string]string, savedEtag string return nil, ErrNotModified } - var tree struct { - Tree []struct { - Url string - Path string - Type string - } - Url string + var contents []*struct { + Type string + Name string + Git_URL string + HTML_URL string } - err = httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/trees/{tag}?recursive=1&{cred}", match), nil, &tree) + err = httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/contents{dir}?ref={tag}&{cred}", match), nil, &contents) if err != nil { return nil, err } - // Because Github API URLs are case-insensitive, we need to check that the - // userRepo returned from Github matches the one that we are requesting. - if !strings.HasPrefix(tree.Url, expand("https://api.github.com/repos/{owner}/{repo}/", match)) { + if len(contents) == 0 { + return nil, NotFoundError{"No files in directory."} + } + + // Because Github API URLs are case-insensitive, we check that the owner + // and repo returned from Github matches the one that we are requesting. + if !strings.HasPrefix(contents[0].Git_URL, expand("https://api.github.com/repos/{owner}/{repo}/", match)) { return nil, NotFoundError{"Github import path has incorrect case."} } - inTree := false - dirPrefix := match["dir"] - if dirPrefix != "" { - dirPrefix = dirPrefix[1:] + "/" - } var files []*source - for _, node := range tree.Tree { - if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) { - continue - } - inTree = true - if d, f := path.Split(node.Path); d == dirPrefix && isDocFile(f) { + var subdirs []string + + for _, item := range contents { + switch { + case item.Type == "dir": + if isValidPathElement(item.Name) { + subdirs = append(subdirs, item.Name) + } + case isDocFile(item.Name): files = append(files, &source{ - name: f, - browseURL: expand("https://github.com/{owner}/{repo}/blob/{tag}/{0}", match, node.Path), - rawURL: node.Url + "?" + gitHubCred, + name: item.Name, + browseURL: item.HTML_URL, + rawURL: item.Git_URL + "?" + gitHubCred, }) } } - if !inTree { - return nil, NotFoundError{"Directory tree does not contain Go files."} - } + /* + var tree struct { + Tree []struct { + Url string + Path string + Type string + } + Url string + } + + err = httpGetJSON(client, expand("https://api.github.com/repos/{owner}/{repo}/git/trees/{tag}?recursive=1&{cred}", match), nil, &tree) + if err != nil { + return nil, err + } + + // Because Github API URLs are case-insensitive, we need to check that the + // userRepo returned from Github matches the one that we are requesting. + if !strings.HasPrefix(tree.Url, expand("https://api.github.com/repos/{owner}/{repo}/", match)) { + return nil, NotFoundError{"Github import path has incorrect case."} + } + + inTree := false + dirPrefix := match["dir"] + if dirPrefix != "" { + dirPrefix = dirPrefix[1:] + "/" + } + var files []*source + for _, node := range tree.Tree { + if node.Type != "blob" || !strings.HasPrefix(node.Path, dirPrefix) { + continue + } + inTree = true + if d, f := path.Split(node.Path); d == dirPrefix && isDocFile(f) { + files = append(files, &source{ + name: f, + browseURL: expand("https://github.com/{owner}/{repo}/blob/{tag}/{0}", match, node.Path), + rawURL: node.Url + "?" + gitHubCred, + }) + } + } + + if !inTree { + return nil, NotFoundError{"Directory tree does not contain Go files."} + } + */ if err := fetchFiles(client, files, gitHubRawHeader); err != nil { return nil, err @@ -128,14 +169,15 @@ func getGitHubDoc(client *http.Client, match map[string]string, savedEtag string b := &builder{ pdoc: &Package{ - LineFmt: "%s#L%d", - ImportPath: match["originalImportPath"], - ProjectRoot: expand("github.com/{owner}/{repo}", match), - ProjectName: match["repo"], - ProjectURL: expand("https://github.com/{owner}/{repo}", match), - BrowseURL: browseURL, - Etag: commit, - VCS: "git", + LineFmt: "%s#L%d", + ImportPath: match["originalImportPath"], + ProjectRoot: expand("github.com/{owner}/{repo}", match), + ProjectName: match["repo"], + ProjectURL: expand("https://github.com/{owner}/{repo}", match), + BrowseURL: browseURL, + Etag: commit, + VCS: "git", + Subdirectories: subdirs, }, } diff --git a/doc/google.go b/doc/google.go index 5e70227..f6ccde0 100644 --- a/doc/google.go +++ b/doc/google.go @@ -26,7 +26,7 @@ var ( googleRepoRe = regexp.MustCompile(`id="checkoutcmd">(hg|git|svn)`) googleRevisionRe = regexp.MustCompile(`

(?:[^ ]+ - )?Revision *([^:]+):`) googleEtagRe = regexp.MustCompile(`^(hg|git|svn)-`) - googleFileRe = regexp.MustCompile(`
  • [a-z0-9\-]+)(:?\.(?P[a-z0-9\-]+))?(?P/[a-z0-9A-Z_.\-/]+)?$`) ) @@ -54,10 +54,17 @@ func getGoogleDoc(client *http.Client, match map[string]string, savedEtag string } } + var subdirs []string var files []*source for _, m := range googleFileRe.FindAllSubmatch(p, -1) { fname := string(m[1]) - if isDocFile(fname) { + switch { + case strings.HasSuffix(fname, "/"): + fname = fname[:len(fname)-1] + if isValidPathElement(fname) { + subdirs = append(subdirs, fname) + } + case isDocFile(fname): files = append(files, &source{ name: fname, browseURL: expand("http://code.google.com/p/{repo}/source/browse{dir}/{0}{query}", match, fname), diff --git a/doc/path.go b/doc/path.go index 40c8f3d..6ad87be 100644 --- a/doc/path.go +++ b/doc/path.go @@ -487,6 +487,10 @@ var validTLD = map[string]bool{ var validHost = regexp.MustCompile(`^[-a-z0-9]+(?:\.[-a-z0-9]+)+$`) var validPathElement = regexp.MustCompile(`^[-A-Za-z0-9~+][-A-Za-z0-9_.]*$`) +func isValidPathElement(s string) bool { + return validPathElement.MatchString(s) && s != "testdata" +} + // IsValidRemotePath returns true if importPath is structurally valid for "go get". func IsValidRemotePath(importPath string) bool { @@ -506,7 +510,7 @@ func IsValidRemotePath(importPath string) bool { } for _, part := range parts[1:] { - if !validPathElement.MatchString(part) || part == "testdata" { + if !isValidPathElement(part) { return false } } diff --git a/doc/vcs.go b/doc/vcs.go index 0796afa..515632a 100644 --- a/doc/vcs.go +++ b/doc/vcs.go @@ -89,7 +89,7 @@ type vcsCmd struct { } var vcsCmds = map[string]*vcsCmd{ - "git": &vcsCmd{ + "git": { schemes: []string{"http", "https", "git"}, download: downloadGit, }, @@ -216,33 +216,39 @@ func getVCSDoc(client *http.Client, match map[string]string, etagSaved string) ( } var files []*source + var subdirs []string for _, fi := range fis { - if fi.IsDir() || !isDocFile(fi.Name()) { - continue + switch { + case fi.IsDir(): + if isValidPathElement(fi.Name()) { + subdirs = append(subdirs, fi.Name()) + } + case isDocFile(fi.Name()): + b, err := ioutil.ReadFile(path.Join(d, fi.Name())) + if err != nil { + return nil, err + } + files = append(files, &source{ + name: fi.Name(), + browseURL: expand(template.fileBrowse, urlMatch, fi.Name()), + data: b, + }) } - b, err := ioutil.ReadFile(path.Join(d, fi.Name())) - if err != nil { - return nil, err - } - files = append(files, &source{ - name: fi.Name(), - browseURL: expand(template.fileBrowse, urlMatch, fi.Name()), - data: b, - }) } // Create the documentation. b := &builder{ pdoc: &Package{ - LineFmt: template.line, - ImportPath: match["importPath"], - ProjectRoot: expand("{repo}.{vcs}", match), - ProjectName: path.Base(match["repo"]), - ProjectURL: expand(template.project, urlMatch), - BrowseURL: "", - Etag: etag, - VCS: match["vcs"], + LineFmt: template.line, + ImportPath: match["importPath"], + ProjectRoot: expand("{repo}.{vcs}", match), + ProjectName: path.Base(match["repo"]), + ProjectURL: expand(template.project, urlMatch), + BrowseURL: "", + Etag: etag, + VCS: match["vcs"], + Subdirectories: subdirs, }, } diff --git a/gddo-admin/stats.go b/gddo-admin/stats.go index 9e3b251..9afe460 100644 --- a/gddo-admin/stats.go +++ b/gddo-admin/stats.go @@ -15,8 +15,10 @@ package main import ( + "fmt" "log" "os" + "sort" "github.com/garyburd/gddo/database" ) @@ -27,14 +29,47 @@ var statsCommand = &command{ usage: "stats", } +type itemSize struct { + path string + size int +} + +type bySizeDesc []itemSize + +func (p bySizeDesc) Len() int { return len(p) } +func (p bySizeDesc) Less(i, j int) bool { return p[i].size > p[j].size } +func (p bySizeDesc) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + func stats(c *command) { if len(c.flag.Args()) != 0 { c.printUsage() os.Exit(1) } - _, err := database.New() + db, err := database.New() if err != nil { log.Fatal(err) } - log.Println("DONE") + + var packageSizes []itemSize + projectSizes := make(map[string]int) + err = db.Do(func(pi *database.PackageInfo) error { + packageSizes = append(packageSizes, itemSize{pi.PDoc.ImportPath, pi.Size}) + projectSizes[pi.PDoc.ProjectRoot] += pi.Size + return nil + }) + + var sizes []itemSize + for path, size := range projectSizes { + sizes = append(sizes, itemSize{path, size}) + } + sort.Sort(bySizeDesc(sizes)) + for _, size := range sizes { + fmt.Printf("%6d %s\n", size.size, size.path) + } + + sort.Sort(bySizeDesc(packageSizes)) + for _, size := range packageSizes { + fmt.Printf("%6d %s\n", size.size, size.path) + } + } diff --git a/gddo-server/assets/favicon.ico b/gddo-server/assets/favicon.ico old mode 100644 new mode 100755 index bfb709e..ee16b8e Binary files a/gddo-server/assets/favicon.ico and b/gddo-server/assets/favicon.ico differ diff --git a/gddo-server/assets/site.css b/gddo-server/assets/site.css index e87cb17..452130b 100644 --- a/gddo-server/assets/site.css +++ b/gddo-server/assets/site.css @@ -2,7 +2,7 @@ html { background-color: whitesmoke; } body { background-color: white; } h4 { margin-top: 20px; } .container { max-width: 728px; } - + #x-projnav { min-height: 20px; margin-bottom: 20px; @@ -12,10 +12,13 @@ h4 { margin-top: 20px; } } #x-footer { - padding-top: 15px; + padding-top: 14px; padding-bottom: 15px; margin-top: 5px; background-color: #eee; + border-top-style: solid; + border-top-width: 1px; + } #x-pkginfo { @@ -25,12 +28,6 @@ h4 { margin-top: 20px; } margin-bottom: 15px; } -@media screen and (min-width: 768px) { - #x-search { - float: right; - } -} - code { background-color: inherit; border: none; @@ -55,22 +52,29 @@ pre .com { color: rgb(147, 161, 161); } -a, .navbar-brand { +a, .navbar-default .navbar-brand { color: #375eab; } -.btn-default { - background-color: #375eab; +.navbar-default, #x-footer { + background-color: hsl(209, 51%, 92%); + border-color: hsl(209, 51%, 88%); } -.navbar, #x-footer { - background-color: #E0EBF5; +.navbar-default .navbar-nav > .active > a, +.navbar-default .navbar-nav > .active > a:hover, +.navbar-default .navbar-nav > .active > a:focus { + background-color: hsl(209, 51%, 88%); } -.navbar-nav > .active > a, -.navbar-nav > .active > a:hover, -.navbar-nav > .active > a:focus { - background-color: #e7e7e7; +.navbar-default .navbar-nav > li > a:hover, +.navbar-default .navbar-nav > li > a:focus { + color: #000; +} + +.panel-default > .panel-heading { + color: #333; + background-color: transparent; } #x-file .highlight { diff --git a/gddo-server/assets/site.js b/gddo-server/assets/site.js index 6cfec45..bd0ec3b 100644 --- a/gddo-server/assets/site.js +++ b/gddo-server/assets/site.js @@ -72,8 +72,10 @@ $(function() { $('span.timeago').timeago(); if (window.location.hash.substring(0, 9) == '#example-') { - console.log(window.location.hash.substring(1, 9)); - $('#ex-' + window.location.hash.substring(9)).addClass('in').height('auto'); + var id = '#ex-' + window.location.hash.substring(9); + console.log(id); + console.log($(id)); + $(id).addClass('in').removeClass('collapse').height('auto'); } var highlighted; diff --git a/gddo-server/assets/templates/home.html b/gddo-server/assets/templates/home.html index 5a8ed33..bf18a10 100644 --- a/gddo-server/assets/templates/home.html +++ b/gddo-server/assets/templates/home.html @@ -15,7 +15,7 @@ and more. diff --git a/gddo-server/assets/templates/layout.html b/gddo-server/assets/templates/layout.html index bb0aecf..0b08375 100644 --- a/gddo-server/assets/templates/layout.html +++ b/gddo-server/assets/templates/layout.html @@ -6,34 +6,38 @@ {{template "Head" $}} - -
    + + +
    {{template "Body" $}} -
    +