mirror of https://github.com/perkeep/perkeep.git
website: /pkg/ and /cmd/ work.
-added some doc.go for each command to make /cmd/* work nicely. -hacked a bit of style, mostly borrowed from golang too. Change-Id: I011d70bca2c0151a29dfd53c78a896821730d5fb
This commit is contained in:
parent
a96ec4e614
commit
698e8605e6
|
@ -1,24 +1,3 @@
|
|||
// The camget tool fetches blobs, files, and directories.
|
||||
//
|
||||
// Examples
|
||||
//
|
||||
// Writes to stdout by default:
|
||||
//
|
||||
// camget <blobref> // dump raw blob
|
||||
// camget -contents <file-blobref> // dump file contents
|
||||
//
|
||||
// Like curl, lets you set output file/directory with -o:
|
||||
//
|
||||
// camget -o <dir> <blobref>
|
||||
// (if <dir> exists and is directory, <blobref> must be a directory;
|
||||
// use -f to overwrite any files)
|
||||
//
|
||||
// camget -o <filename> <file-blobref>
|
||||
//
|
||||
// TODO(bradfitz): camget isn't very fleshed out. In general, using 'cammount' to just
|
||||
// mount a tree is an easier way to get files back.
|
||||
package main
|
||||
|
||||
/*
|
||||
Copyright 2011 Google Inc.
|
||||
|
||||
|
@ -35,6 +14,8 @@ See the License for the specific language governing permissions and
|
|||
limitations under the License.
|
||||
*/
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
Copyright 2013 The Camlistore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
$ go get camlistore.org/cmd/camget
|
||||
|
||||
The camget tool fetches blobs, files, and directories.
|
||||
|
||||
Examples
|
||||
|
||||
Writes to stdout by default:
|
||||
|
||||
camget <blobref> // dump raw blob
|
||||
camget -contents <file-blobref> // dump file contents
|
||||
|
||||
Like curl, lets you set output file/directory with -o:
|
||||
|
||||
camget -o <dir> <blobref>
|
||||
(if <dir> exists and is directory, <blobref> must be a directory;
|
||||
use -f to overwrite any files)
|
||||
|
||||
camget -o <filename> <file-blobref>
|
||||
|
||||
Camget isn't very fleshed out. In general, using 'cammount' to just
|
||||
mount a tree is an easier way to get files back.
|
||||
*/
|
||||
package main
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
Copyright 2013 The Camlistore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
$ go get camlistore.org/cmd/cammount
|
||||
|
||||
The cammount tool mounts a root directory blob onto the given mountpoint. The blobref can be given directly or through a share blob URL. If no root blobref is given, an automatic root is created instead.
|
||||
|
||||
usage: cammount [opts] <mountpoint> [<root-blobref>|<share URL>]
|
||||
-debug=false: print debugging messages.
|
||||
-server="": Camlistore server prefix.
|
||||
If blank, the default from the "server" field of ~/.camlistore/config is used.
|
||||
Acceptable forms: https://you.example.com, example.com:1345 (https assumed), or
|
||||
http://you.example.com/alt-root
|
||||
*/
|
||||
package main
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
Copyright 2013 The Camlistore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
$ go get camlistore.org/cmd/camput
|
||||
|
||||
The camput tool mainly pushes blobs, files, and directories. It can also perform various tasks related to that, such as setting tags, creating permanodes, an creating share blobs.
|
||||
|
||||
|
||||
Usage: camput [globalopts] <mode> [commandopts] [commandargs]
|
||||
|
||||
Modes:
|
||||
|
||||
init: Initialize the camput configuration file.
|
||||
share: Grant access to a resource by making a "share" blob.
|
||||
rawobj: Upload a custom JSON schema blob.
|
||||
attr: Add, set, or delete a permanode's attribute.
|
||||
file: Upload file(s).
|
||||
blob: Upload raw blob(s).
|
||||
permanode: Create and upload a permanode.
|
||||
|
||||
Examples:
|
||||
|
||||
camput file [opts] <file(s)/director(ies)
|
||||
camput file --permanode --name='Homedir backup' --tag=backup,homedir $HOME
|
||||
camput file --filenodes /mnt/camera/DCIM
|
||||
|
||||
camput blob <files> (raw, without any metadata)
|
||||
camput blob - (read from stdin)
|
||||
|
||||
camput permanode (create a new permanode)
|
||||
camput permanode -name="Some Name" -tag=foo,bar (with attributes added)
|
||||
|
||||
camput init
|
||||
camput init --gpgkey=XXXXX
|
||||
|
||||
camput share [opts] <blobref to share via haveref>
|
||||
|
||||
camput rawobj (debug command)
|
||||
|
||||
camput attr <permanode> <name> <value> Set attribute
|
||||
camput attr --add <permanode> <name> <value> Adds attribute (e.g. "tag")
|
||||
camput attr --del <permanode> <name> [<value>] Deletes named attribute [value
|
||||
|
||||
For mode-specific help:
|
||||
|
||||
camput <mode> -help
|
||||
|
||||
Global options:
|
||||
-help=false: print usage
|
||||
-secret-keyring="~/.gnupg/secring.gpg": GnuPG secret keyring file to use.
|
||||
-server="": Camlistore server prefix. If blank, the default from the "server" field of
|
||||
~/.camlistore/config is used.
|
||||
Acceptable forms: https://you.example.com, example.com:1345 (https assumed),
|
||||
or http://you.example.com/alt-root
|
||||
-verbose=false: extra debug logging
|
||||
-verbose_http=false: show HTTP request summaries
|
||||
-version=false: show version
|
||||
*/
|
||||
package main
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
Copyright 2013 The Camlistore Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
$ go get camlistore.org/cmd/camtool
|
||||
|
||||
Camtool is a collection of commands to help with the use of a camlistore server. Notably, it can initialize a database for the indexer, and it can sync blobs between blobservers.
|
||||
|
||||
|
||||
Usage: camtool [globalopts] <mode> [commandopts] [commandargs]
|
||||
|
||||
Modes:
|
||||
|
||||
sync: Synchronize blobs from a source to a destination.
|
||||
dbinit: Set up the database for the indexer.
|
||||
gsinit: Init Google Storage.
|
||||
debug: Show misc meta-info from the given file.
|
||||
|
||||
Examples:
|
||||
|
||||
camtool dbinit -user root -password root -host localhost -dbname camliprod -wipe
|
||||
|
||||
camtool sync --all
|
||||
camtool sync --src http://localhost:3179/bs/ --dest http://localhost:3179/index-mem/
|
||||
|
||||
For mode-specific help:
|
||||
|
||||
camtool <mode> -help
|
||||
|
||||
Global options:
|
||||
-help=false: print usage
|
||||
-verbose=false: extra debug logging
|
||||
-version=false: show version
|
||||
*/
|
||||
package main
|
|
@ -285,8 +285,8 @@ func main() {
|
|||
mux.Handle("/robots.txt", http.FileServer(http.Dir(filepath.Join(*root, "static"))))
|
||||
mux.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir(filepath.Join(*root, "static")))))
|
||||
mux.Handle("/talks/", http.StripPrefix("/talks/", http.FileServer(http.Dir(filepath.Join(*root, "talks")))))
|
||||
mux.Handle("/pkg/", godocHandler{})
|
||||
mux.Handle("/cmd/", godocHandler{})
|
||||
mux.Handle(pkgPattern, godocHandler{})
|
||||
mux.Handle(cmdPattern, godocHandler{})
|
||||
|
||||
gerritUrl, _ := url.Parse(fmt.Sprintf("http://%s:8000/", *gerritHost))
|
||||
var gerritHandler http.Handler = httputil.NewSingleHostReverseProxy(gerritUrl)
|
||||
|
|
|
@ -0,0 +1,334 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the code dealing with package directory trees.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
// "fmt"
|
||||
"go/doc"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
pathpkg "path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Conventional name for directories containing test data.
|
||||
// Excluded from directory trees.
|
||||
//
|
||||
const testdataDirName = "testdata"
|
||||
|
||||
type Directory struct {
|
||||
Depth int
|
||||
Path string // directory path; includes Name
|
||||
Name string // directory name
|
||||
HasPkg bool // true if the directory contains at least one package
|
||||
Synopsis string // package documentation, if any
|
||||
Dirs []*Directory // subdirectories
|
||||
}
|
||||
|
||||
func isGoFile(fi os.FileInfo) bool {
|
||||
name := fi.Name()
|
||||
return !fi.IsDir() &&
|
||||
len(name) > 0 && name[0] != '.' && // ignore .files
|
||||
pathpkg.Ext(name) == ".go"
|
||||
}
|
||||
|
||||
func isPkgFile(fi os.FileInfo) bool {
|
||||
return isGoFile(fi) &&
|
||||
!strings.HasSuffix(fi.Name(), "_test.go") // ignore test files
|
||||
}
|
||||
|
||||
func isPkgDir(fi os.FileInfo) bool {
|
||||
name := fi.Name()
|
||||
return fi.IsDir() && len(name) > 0 &&
|
||||
name[0] != '_' && name[0] != '.' // ignore _files and .files
|
||||
}
|
||||
|
||||
type treeBuilder struct {
|
||||
maxDepth int
|
||||
}
|
||||
|
||||
func (b *treeBuilder) newDirTree(fset *token.FileSet, path, name string, depth int) *Directory {
|
||||
if name == testdataDirName {
|
||||
return nil
|
||||
}
|
||||
|
||||
if depth >= b.maxDepth {
|
||||
// return a dummy directory so that the parent directory
|
||||
// doesn't get discarded just because we reached the max
|
||||
// directory depth
|
||||
return &Directory{
|
||||
Depth: depth,
|
||||
Path: path,
|
||||
Name: name,
|
||||
}
|
||||
}
|
||||
|
||||
list, err := ioutil.ReadDir(path)
|
||||
if err != nil {
|
||||
log.Printf("Could not read %v\n", path)
|
||||
return nil
|
||||
}
|
||||
|
||||
// determine number of subdirectories and if there are package files
|
||||
ndirs := 0
|
||||
hasPkgFiles := false
|
||||
var synopses [4]string // prioritized package documentation (0 == highest priority)
|
||||
for _, d := range list {
|
||||
switch {
|
||||
case isPkgDir(d):
|
||||
ndirs++
|
||||
case isPkgFile(d):
|
||||
// looks like a package file, but may just be a file ending in ".go";
|
||||
// don't just count it yet (otherwise we may end up with hasPkgFiles even
|
||||
// though the directory doesn't contain any real package files - was bug)
|
||||
if synopses[0] == "" {
|
||||
// no "optimal" package synopsis yet; continue to collect synopses
|
||||
src, err := ioutil.ReadFile(pathpkg.Join(path, d.Name()))
|
||||
if err != nil {
|
||||
log.Printf("Could not read %v\n", pathpkg.Join(path, d.Name()))
|
||||
continue
|
||||
}
|
||||
file, err := parser.ParseFile(fset, pathpkg.Join(path, d.Name()),
|
||||
src, parser.ParseComments|parser.PackageClauseOnly)
|
||||
if err == nil {
|
||||
hasPkgFiles = true
|
||||
if file.Doc != nil {
|
||||
// prioritize documentation
|
||||
i := -1
|
||||
switch file.Name.Name {
|
||||
case name:
|
||||
i = 0 // normal case: directory name matches package name
|
||||
case "main":
|
||||
i = 2 // directory contains a main package
|
||||
default:
|
||||
i = 3 // none of the above
|
||||
}
|
||||
if 0 <= i && i < len(synopses) && synopses[i] == "" {
|
||||
synopses[i] = doc.Synopsis(file.Doc.Text())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// create subdirectory tree
|
||||
var dirs []*Directory
|
||||
if ndirs > 0 {
|
||||
dirs = make([]*Directory, ndirs)
|
||||
i := 0
|
||||
for _, d := range list {
|
||||
if isPkgDir(d) {
|
||||
name := d.Name()
|
||||
dd := b.newDirTree(fset, pathpkg.Join(path, name), name, depth+1)
|
||||
if dd != nil {
|
||||
dirs[i] = dd
|
||||
i++
|
||||
}
|
||||
}
|
||||
}
|
||||
dirs = dirs[0:i]
|
||||
}
|
||||
|
||||
// if there are no package files and no subdirectories
|
||||
// containing package files, ignore the directory
|
||||
if !hasPkgFiles && len(dirs) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// select the highest-priority synopsis for the directory entry, if any
|
||||
synopsis := ""
|
||||
for _, synopsis = range synopses {
|
||||
if synopsis != "" {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return &Directory{
|
||||
Depth: depth,
|
||||
Path: path,
|
||||
Name: name,
|
||||
HasPkg: hasPkgFiles,
|
||||
Synopsis: synopsis,
|
||||
Dirs: dirs,
|
||||
}
|
||||
}
|
||||
|
||||
// newDirectory creates a new package directory tree with at most maxDepth
|
||||
// levels, anchored at root. The result tree is pruned such that it only
|
||||
// contains directories that contain package files or that contain
|
||||
// subdirectories containing package files (transitively). If a non-nil
|
||||
// pathFilter is provided, directory paths additionally must be accepted
|
||||
// by the filter (i.e., pathFilter(path) must be true). If a value >= 0 is
|
||||
// provided for maxDepth, nodes at larger depths are pruned as well; they
|
||||
// are assumed to contain package files even if their contents are not known
|
||||
// (i.e., in this case the tree may contain directories w/o any package files).
|
||||
//
|
||||
func newDirectory(root string, maxDepth int) *Directory {
|
||||
// The root could be a symbolic link so use Stat not Lstat.
|
||||
d, err := os.Stat(root)
|
||||
// If we fail here, report detailed error messages; otherwise
|
||||
// is is hard to see why a directory tree was not built.
|
||||
switch {
|
||||
case err != nil:
|
||||
log.Printf("newDirectory(%s): %s", root, err)
|
||||
return nil
|
||||
case !isPkgDir(d):
|
||||
log.Printf("newDirectory(%s): not a package directory", root)
|
||||
return nil
|
||||
}
|
||||
if maxDepth < 0 {
|
||||
maxDepth = 1e6 // "infinity"
|
||||
}
|
||||
b := treeBuilder{maxDepth}
|
||||
// the file set provided is only for local parsing, no position
|
||||
// information escapes and thus we don't need to save the set
|
||||
return b.newDirTree(token.NewFileSet(), root, d.Name(), 0)
|
||||
}
|
||||
|
||||
func (dir *Directory) writeLeafs(buf *bytes.Buffer) {
|
||||
if dir != nil {
|
||||
if len(dir.Dirs) == 0 {
|
||||
buf.WriteString(dir.Path)
|
||||
buf.WriteByte('\n')
|
||||
return
|
||||
}
|
||||
|
||||
for _, d := range dir.Dirs {
|
||||
d.writeLeafs(buf)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (dir *Directory) walk(c chan<- *Directory, skipRoot bool) {
|
||||
if dir != nil {
|
||||
if !skipRoot {
|
||||
c <- dir
|
||||
}
|
||||
for _, d := range dir.Dirs {
|
||||
d.walk(c, false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (dir *Directory) iter(skipRoot bool) <-chan *Directory {
|
||||
c := make(chan *Directory)
|
||||
go func() {
|
||||
dir.walk(c, skipRoot)
|
||||
close(c)
|
||||
}()
|
||||
return c
|
||||
}
|
||||
|
||||
func (dir *Directory) lookupLocal(name string) *Directory {
|
||||
for _, d := range dir.Dirs {
|
||||
if d.Name == name {
|
||||
return d
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func splitPath(p string) []string {
|
||||
if strings.HasPrefix(p, "/") {
|
||||
p = p[1:]
|
||||
}
|
||||
if p == "" {
|
||||
return nil
|
||||
}
|
||||
return strings.Split(p, "/")
|
||||
}
|
||||
|
||||
// lookup looks for the *Directory for a given path, relative to dir.
|
||||
func (dir *Directory) lookup(path string) *Directory {
|
||||
d := splitPath(dir.Path)
|
||||
p := splitPath(path)
|
||||
i := 0
|
||||
for i < len(d) {
|
||||
if i >= len(p) || d[i] != p[i] {
|
||||
return nil
|
||||
}
|
||||
i++
|
||||
}
|
||||
for dir != nil && i < len(p) {
|
||||
dir = dir.lookupLocal(p[i])
|
||||
i++
|
||||
}
|
||||
return dir
|
||||
}
|
||||
|
||||
// DirEntry describes a directory entry. The Depth and Height values
|
||||
// are useful for presenting an entry in an indented fashion.
|
||||
//
|
||||
type DirEntry struct {
|
||||
Depth int // >= 0
|
||||
Height int // = DirList.MaxHeight - Depth, > 0
|
||||
Path string // directory path; includes Name, absolute, with the camli dir as root
|
||||
Name string // directory name
|
||||
HasPkg bool // true if the directory contains at least one package
|
||||
Synopsis string // package documentation, if any
|
||||
}
|
||||
|
||||
type DirList struct {
|
||||
MaxHeight int // directory tree height, > 0
|
||||
List []DirEntry
|
||||
}
|
||||
|
||||
// listing creates a (linear) directory listing from a directory tree.
|
||||
// If skipRoot is set, the root directory itself is excluded from the list.
|
||||
//
|
||||
func (root *Directory) listing(skipRoot bool) *DirList {
|
||||
if root == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// determine number of entries n and maximum height
|
||||
n := 0
|
||||
minDepth := 1 << 30 // infinity
|
||||
maxDepth := 0
|
||||
for d := range root.iter(skipRoot) {
|
||||
n++
|
||||
if minDepth > d.Depth {
|
||||
minDepth = d.Depth
|
||||
}
|
||||
if maxDepth < d.Depth {
|
||||
maxDepth = d.Depth
|
||||
}
|
||||
}
|
||||
maxHeight := maxDepth - minDepth + 1
|
||||
|
||||
if n == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// create list
|
||||
list := make([]DirEntry, n)
|
||||
i := 0
|
||||
for d := range root.iter(skipRoot) {
|
||||
p := &list[i]
|
||||
p.Depth = d.Depth - minDepth
|
||||
p.Height = maxHeight - p.Depth
|
||||
// the suffix is absolute, with the camlistore dir as the root
|
||||
idx := strings.LastIndex(d.Path, domainName)
|
||||
if idx == -1 {
|
||||
log.Fatalf("No \"%s\" in path to file %s", domainName, d.Path)
|
||||
}
|
||||
suffix := pathpkg.Clean(d.Path[idx+len(domainName):])
|
||||
|
||||
p.Path = suffix
|
||||
p.Name = d.Name
|
||||
p.HasPkg = d.HasPkg
|
||||
p.Synopsis = d.Synopsis
|
||||
i++
|
||||
}
|
||||
|
||||
return &DirList{maxHeight, list}
|
||||
}
|
|
@ -42,46 +42,23 @@ import (
|
|||
"time"
|
||||
)
|
||||
|
||||
const domainName = "camlistore.org"
|
||||
const (
|
||||
domainName = "camlistore.org"
|
||||
pkgPattern = "/pkg/"
|
||||
cmdPattern = "/cmd/"
|
||||
)
|
||||
|
||||
var docRx = regexp.MustCompile(`^/((?:pkg|cmd)/([\w/]+?)(\.go)??)/?$`)
|
||||
|
||||
var tabwidth = 4
|
||||
|
||||
type docServer struct {
|
||||
pattern string // url pattern; e.g. "/pkg/"
|
||||
fsRoot string // file system root to which the pattern is mapped
|
||||
}
|
||||
|
||||
var (
|
||||
cmdHandler = docServer{"/cmd/", "/cmd"}
|
||||
pkgHandler = docServer{"/pkg/", "/pkg"}
|
||||
)
|
||||
|
||||
// DirEntry describes a directory entry. The Depth and Height values
|
||||
// are useful for presenting an entry in an indented fashion.
|
||||
//
|
||||
type DirEntry struct {
|
||||
Depth int // >= 0
|
||||
Height int // = DirList.MaxHeight - Depth, > 0
|
||||
Path string // directory path; includes Name, relative to DirList root
|
||||
Name string // directory name
|
||||
HasPkg bool // true if the directory contains at least one package
|
||||
Synopsis string // package documentation, if any
|
||||
}
|
||||
|
||||
type DirList struct {
|
||||
MaxHeight int // directory tree height, > 0
|
||||
List []DirEntry
|
||||
}
|
||||
|
||||
type PageInfo struct {
|
||||
Dirname string // directory containing the package
|
||||
Err error // error or nil
|
||||
|
||||
// package info
|
||||
FSet *token.FileSet // nil if no package documentation
|
||||
PDoc *doc.Package // nil if no package documentation
|
||||
PDoc *doc.Package // package documentation, but never nil anyway.
|
||||
Examples []*doc.Example // nil if no example code
|
||||
PAst *ast.File // nil if no AST with package exports
|
||||
IsPkg bool // true for pkg, false for cmd
|
||||
|
@ -118,7 +95,6 @@ var godocFmap = template.FuncMap{
|
|||
//"comment_text": comment_textFunc,
|
||||
|
||||
// support for URL attributes
|
||||
"pkgLink": pkgLinkFunc,
|
||||
"srcLink": srcLinkFunc,
|
||||
"posLink_url": posLink_urlFunc,
|
||||
|
||||
|
@ -199,14 +175,6 @@ func comment_htmlFunc(comment string) string {
|
|||
return buf.String()
|
||||
}
|
||||
|
||||
func pkgLinkFunc(path string) string {
|
||||
relpath := path[1:]
|
||||
// because of the irregular mapping under goroot
|
||||
// we need to correct certain relative paths
|
||||
relpath = strings.TrimLeft(relpath, "pkg/")
|
||||
return pkgHandler.pattern[1:] + relpath // remove trailing '/' for relative URL
|
||||
}
|
||||
|
||||
func posLink_urlFunc(node ast.Node, fset *token.FileSet) string {
|
||||
var relpath string
|
||||
var line int
|
||||
|
@ -274,7 +242,24 @@ func readGodocTemplate(name string) *template.Template {
|
|||
return t
|
||||
}
|
||||
|
||||
func (pi *PageInfo) populateDirs(diskPath string, depth int) {
|
||||
var dir *Directory
|
||||
dir = newDirectory(diskPath, depth)
|
||||
pi.Dirs = dir.listing(true)
|
||||
pi.DirTime = time.Now()
|
||||
}
|
||||
|
||||
func getPageInfo(pkgName, diskPath string) (pi PageInfo, err error) {
|
||||
if pkgName == pathpkg.Join(domainName, pkgPattern) ||
|
||||
pkgName == pathpkg.Join(domainName, cmdPattern) {
|
||||
pi.Dirname = diskPath
|
||||
pi.populateDirs(diskPath, 2)
|
||||
// hack; setting PDoc so that we can keep using directly
|
||||
// $GOROOT/lib/godoc/package.html, while avoiding the
|
||||
// missing gopher png and the "ad" for the go dashboard.
|
||||
pi.PDoc = &doc.Package{}
|
||||
return
|
||||
}
|
||||
bpkg, err := build.ImportDir(diskPath, 0)
|
||||
if err != nil {
|
||||
return
|
||||
|
@ -306,7 +291,10 @@ func getPageInfo(pkgName, diskPath string) (pi PageInfo, err error) {
|
|||
|
||||
pi.Dirname = diskPath
|
||||
pi.PDoc = doc.New(aPkg, pkgName, 0)
|
||||
pi.IsPkg = !strings.Contains(pkgName, domainName+"/cmd/")
|
||||
pi.IsPkg = strings.Contains(pkgName, domainName+pkgPattern)
|
||||
|
||||
// get directory information
|
||||
pi.populateDirs(diskPath, 1)
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -410,11 +398,16 @@ type godocHandler struct{}
|
|||
|
||||
func (godocHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
m := docRx.FindStringSubmatch(r.URL.Path)
|
||||
suffix := ""
|
||||
if m == nil {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
if r.URL.Path != pkgPattern && r.URL.Path != cmdPattern {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
suffix = r.URL.Path
|
||||
} else {
|
||||
suffix = m[1]
|
||||
}
|
||||
suffix := m[1]
|
||||
diskPath := filepath.Join(*root, "..", suffix)
|
||||
|
||||
switch pathpkg.Ext(suffix) {
|
||||
|
@ -423,7 +416,7 @@ func (godocHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
pkgName := domainName + "/" + suffix
|
||||
pkgName := pathpkg.Join(domainName, suffix)
|
||||
pi, err := getPageInfo(pkgName, diskPath)
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
|
|
|
@ -1,6 +1,123 @@
|
|||
body {
|
||||
margin-top: 0;
|
||||
font-family: arial, serif;
|
||||
margin: 0;
|
||||
font-family: Helvetica, Arial, sans-serif;
|
||||
font-size: 16px;
|
||||
}
|
||||
pre,
|
||||
code {
|
||||
font-family: Menlo, monospace;
|
||||
font-size: 14px;
|
||||
}
|
||||
pre {
|
||||
line-height: 18px;
|
||||
}
|
||||
pre .comment {
|
||||
color: #375EAB;
|
||||
}
|
||||
pre .highlight,
|
||||
pre .highlight-comment,
|
||||
pre .selection-highlight,
|
||||
pre .selection-highlight-comment {
|
||||
background: #FFFF00;
|
||||
}
|
||||
pre .selection,
|
||||
pre .selection-comment {
|
||||
background: #FF9632;
|
||||
}
|
||||
pre .ln {
|
||||
color: #999;
|
||||
}
|
||||
body {
|
||||
color: #222;
|
||||
}
|
||||
a,
|
||||
.exampleHeading .text {
|
||||
color: #375EAB;
|
||||
text-decoration: none;
|
||||
}
|
||||
a:hover,
|
||||
.exampleHeading .text:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
p,
|
||||
pre,
|
||||
ul,
|
||||
ol {
|
||||
margin: 20px;
|
||||
}
|
||||
pre {
|
||||
background: #e9e9e9;
|
||||
padding: 10px;
|
||||
|
||||
-webkit-border-radius: 5px;
|
||||
-moz-border-radius: 5px;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
.rootHeading {
|
||||
margin: 20px 0;
|
||||
padding: 0;
|
||||
color: #375EAB;
|
||||
font-weight: bold;
|
||||
}
|
||||
h1 {
|
||||
font-size: 24px;
|
||||
}
|
||||
h2,
|
||||
h2>a {
|
||||
color: #fff;
|
||||
background: url(camli-bar-background.png);
|
||||
padding: 2px 5px;
|
||||
}
|
||||
h2 {
|
||||
font-size: 20px;
|
||||
}
|
||||
h2>a {
|
||||
font-size: 18px;
|
||||
text-decoration: underline;
|
||||
}
|
||||
h2>a:hover {
|
||||
color: #be1d2d;
|
||||
}
|
||||
h3 {
|
||||
font-size: 20px;
|
||||
}
|
||||
h3,
|
||||
h4 {
|
||||
margin: 20px 5px;
|
||||
}
|
||||
h4 {
|
||||
font-size: 16px;
|
||||
}
|
||||
.rootHeading {
|
||||
font-size: 20px;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
dl {
|
||||
margin: 20px;
|
||||
}
|
||||
dd {
|
||||
margin: 2px 20px;
|
||||
}
|
||||
dl,
|
||||
dd {
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
table.dir th {
|
||||
text-align: left;
|
||||
}
|
||||
table.dir td {
|
||||
word-wrap: break-word;
|
||||
vertical-align: top;
|
||||
}
|
||||
div#page.wide table.dir td.name {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
#header {
|
||||
|
@ -102,4 +219,10 @@ span.termhashlink {
|
|||
|
||||
span.termhashlink a {
|
||||
color: #e84c00;
|
||||
}
|
||||
}
|
||||
|
||||
.toggleButton { cursor: pointer; }
|
||||
.toggle .collapsed { display: block; }
|
||||
.toggle .expanded { display: none; }
|
||||
.toggleVisible .collapsed { display: none; }
|
||||
.toggleVisible .expanded { display: block; }
|
||||
|
|
|
@ -0,0 +1,213 @@
|
|||
// Except as noted, this content is licensed under Creative Commons
|
||||
// Attribution 3.0
|
||||
|
||||
/* A little code to ease navigation of these documents.
|
||||
*
|
||||
* On window load we:
|
||||
* + Generate a table of contents (godocs_generateTOC)
|
||||
* + Add links up to the top of the doc from each section (godocs_addTopLinks)
|
||||
*/
|
||||
|
||||
/* We want to do some stuff on page load (after the HTML is rendered).
|
||||
So listen for that:
|
||||
*/
|
||||
function bindEvent(el, e, fn) {
|
||||
if (el.addEventListener){
|
||||
el.addEventListener(e, fn, false);
|
||||
} else if (el.attachEvent){
|
||||
el.attachEvent('on'+e, fn);
|
||||
}
|
||||
}
|
||||
|
||||
function godocs_bindSearchEvents() {
|
||||
var search = document.getElementById('search');
|
||||
if (!search) {
|
||||
// no search box (index disabled)
|
||||
return;
|
||||
}
|
||||
function clearInactive() {
|
||||
if (search.className == "inactive") {
|
||||
search.value = "";
|
||||
search.className = "";
|
||||
}
|
||||
}
|
||||
function restoreInactive() {
|
||||
if (search.value !== "") {
|
||||
return;
|
||||
}
|
||||
if (search.type != "search") {
|
||||
search.value = search.getAttribute("placeholder");
|
||||
}
|
||||
search.className = "inactive";
|
||||
}
|
||||
restoreInactive();
|
||||
bindEvent(search, 'focus', clearInactive);
|
||||
bindEvent(search, 'blur', restoreInactive);
|
||||
}
|
||||
|
||||
/* Returns the "This sweet header" from <h2>This <i>sweet</i> header</h2>.
|
||||
* Takes a node, returns a string.
|
||||
*/
|
||||
function godocs_nodeToText(node) {
|
||||
var TEXT_NODE = 3; // Defined in Mozilla but not MSIE :(
|
||||
|
||||
var text = '';
|
||||
for (var j = 0; j != node.childNodes.length; j++) {
|
||||
var child = node.childNodes[j];
|
||||
if (child.nodeType == TEXT_NODE) {
|
||||
if (child.nodeValue != '[Top]') { //ok, that's a hack, but it works.
|
||||
text = text + child.nodeValue;
|
||||
}
|
||||
} else {
|
||||
text = text + godocs_nodeToText(child);
|
||||
}
|
||||
}
|
||||
return text;
|
||||
}
|
||||
|
||||
/* Generates a table of contents: looks for h2 and h3 elements and generates
|
||||
* links. "Decorates" the element with id=="nav" with this table of contents.
|
||||
*/
|
||||
function godocs_generateTOC() {
|
||||
if (document.getElementById('manual-nav')) { return; }
|
||||
var navbar = document.getElementById('nav');
|
||||
if (!navbar) { return; }
|
||||
|
||||
var toc_items = [];
|
||||
|
||||
var i;
|
||||
var seenNav = false;
|
||||
for (i = 0; i < navbar.parentNode.childNodes.length; i++) {
|
||||
var node = navbar.parentNode.childNodes[i];
|
||||
if (!seenNav) {
|
||||
if (node.id == 'nav') {
|
||||
seenNav = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if ((node.tagName != 'h2') && (node.tagName != 'H2') &&
|
||||
(node.tagName != 'h3') && (node.tagName != 'H3')) {
|
||||
continue;
|
||||
}
|
||||
if (!node.id) {
|
||||
node.id = 'tmp_' + i;
|
||||
}
|
||||
var text = godocs_nodeToText(node);
|
||||
if (!text) { continue; }
|
||||
|
||||
var textNode = document.createTextNode(text);
|
||||
|
||||
var link = document.createElement('a');
|
||||
link.href = '#' + node.id;
|
||||
link.appendChild(textNode);
|
||||
|
||||
// Then create the item itself
|
||||
var item;
|
||||
if ((node.tagName == 'h2') || (node.tagName == 'H2')) {
|
||||
item = document.createElement('dt');
|
||||
} else { // h3
|
||||
item = document.createElement('dd');
|
||||
}
|
||||
|
||||
item.appendChild(link);
|
||||
toc_items.push(item);
|
||||
}
|
||||
|
||||
if (toc_items.length <= 1) { return; }
|
||||
|
||||
var dl1 = document.createElement('dl');
|
||||
var dl2 = document.createElement('dl');
|
||||
|
||||
var split_index = (toc_items.length / 2) + 1;
|
||||
if (split_index < 8) {
|
||||
split_index = toc_items.length;
|
||||
}
|
||||
|
||||
for (i = 0; i < split_index; i++) {
|
||||
dl1.appendChild(toc_items[i]);
|
||||
}
|
||||
for (/* keep using i */; i < toc_items.length; i++) {
|
||||
dl2.appendChild(toc_items[i]);
|
||||
}
|
||||
|
||||
var tocTable = document.createElement('table');
|
||||
navbar.appendChild(tocTable);
|
||||
tocTable.className = 'unruled';
|
||||
var tocBody = document.createElement('tbody');
|
||||
tocTable.appendChild(tocBody);
|
||||
|
||||
var tocRow = document.createElement('tr');
|
||||
tocBody.appendChild(tocRow);
|
||||
|
||||
// 1st column
|
||||
var tocCell = document.createElement('td');
|
||||
tocCell.className = 'first';
|
||||
tocRow.appendChild(tocCell);
|
||||
tocCell.appendChild(dl1);
|
||||
|
||||
// 2nd column
|
||||
tocCell = document.createElement('td');
|
||||
tocRow.appendChild(tocCell);
|
||||
tocCell.appendChild(dl2);
|
||||
}
|
||||
|
||||
function getElementsByClassName(base, clazz) {
|
||||
if (base.getElementsByClassName) {
|
||||
return base.getElementsByClassName(clazz);
|
||||
}
|
||||
var elements = base.getElementsByTagName('*'), foundElements = [];
|
||||
for (var n in elements) {
|
||||
if (clazz == elements[n].className) {
|
||||
foundElements.push(elements[n]);
|
||||
}
|
||||
}
|
||||
return foundElements;
|
||||
}
|
||||
|
||||
function godocs_bindToggle(el) {
|
||||
var button = getElementsByClassName(el, "toggleButton");
|
||||
var callback = function() {
|
||||
if (el.className == "toggle") {
|
||||
el.className = "toggleVisible";
|
||||
} else {
|
||||
el.className = "toggle";
|
||||
}
|
||||
};
|
||||
for (var i = 0; i < button.length; i++) {
|
||||
bindEvent(button[i], "click", callback);
|
||||
}
|
||||
}
|
||||
function godocs_bindToggles(className) {
|
||||
var els = getElementsByClassName(document, className);
|
||||
for (var i = 0; i < els.length; i++) {
|
||||
godocs_bindToggle(els[i]);
|
||||
}
|
||||
}
|
||||
function godocs_bindToggleLink(l, prefix) {
|
||||
bindEvent(l, "click", function() {
|
||||
var i = l.href.indexOf("#"+prefix);
|
||||
if (i < 0) {
|
||||
return;
|
||||
}
|
||||
var id = prefix + l.href.slice(i+1+prefix.length);
|
||||
var eg = document.getElementById(id);
|
||||
eg.className = "toggleVisible";
|
||||
});
|
||||
}
|
||||
function godocs_bindToggleLinks(className, prefix) {
|
||||
var links = getElementsByClassName(document, className);
|
||||
for (i = 0; i < links.length; i++) {
|
||||
godocs_bindToggleLink(links[i], prefix);
|
||||
}
|
||||
}
|
||||
|
||||
function godocs_onload() {
|
||||
godocs_bindSearchEvents();
|
||||
godocs_generateTOC();
|
||||
godocs_bindToggles("toggle");
|
||||
godocs_bindToggles("toggleVisible");
|
||||
godocs_bindToggleLinks("exampleLink", "example_");
|
||||
godocs_bindToggleLinks("overviewLink", "");
|
||||
}
|
||||
|
||||
bindEvent(window, 'load', godocs_onload);
|
|
@ -6,8 +6,10 @@
|
|||
{{else}}
|
||||
<title>Camlistore</title>
|
||||
{{end}}
|
||||
<!-- TODO(mpl): figure out why Index does not toggle (like Overview) -->
|
||||
<link rel="stylesheet" href="/static/all.css" type="text/css" media="all" charset="utf-8">
|
||||
<script type="text/javascript" src="/static/all-async.js" async="true"></script>
|
||||
<script type="text/javascript" src="/static/godocs.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
|
|
Loading…
Reference in New Issue