Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Tree: f49d50ee07
Fetching contributors…

Cannot retrieve contributors at this time

659 lines (579 sloc) 15.407 kB
// Copyright 2011 Gary Burd
//
// Licensed under the Apache License, Version 2.0 (the "License"): you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
package doc
import (
"bytes"
"fmt"
"go/ast"
"go/build"
"go/doc"
"go/parser"
"go/printer"
"go/token"
"io"
"io/ioutil"
"os"
"path"
"regexp"
"sort"
"strconv"
"strings"
"time"
"unicode"
"unicode/utf8"
)
func startsWithUppercase(s string) bool {
r, _ := utf8.DecodeRuneInString(s)
return unicode.IsUpper(r)
}
var badSynopsisPrefixes = []string{
"Autogenerated by Thrift Compiler",
"Copyright 20",
`THE SOFTWARE IS PROVIDED "AS IS"`,
"Copyright (c) 20",
"Copyright (C) 20",
"Copyright © 20",
"* ",
}
// synopsis extracts the first sentence from s. All runs of whitespace are
// replaced by a single space.
func synopsis(s string) string {
parts := strings.SplitN(s, "\n\n", 2)
s = parts[0]
var buf []byte
const (
other = iota
period
space
)
last := space
Loop:
for i := 0; i < len(s); i++ {
b := s[i]
switch b {
case ' ', '\t', '\r', '\n':
switch last {
case period:
break Loop
case other:
buf = append(buf, ' ')
last = space
}
case '.':
last = period
buf = append(buf, b)
default:
last = other
buf = append(buf, b)
}
}
// Ensure that synopsis fits an App Engine datastore text property.
const n = 400
if len(buf) > n {
buf = buf[:n]
if i := bytes.LastIndex(buf, []byte{' '}); i >= 0 {
buf = buf[:i]
}
buf = append(buf, " ..."...)
}
s = string(buf)
// Kill output from code generators.
for _, prefix := range badSynopsisPrefixes {
if strings.HasPrefix(s, prefix) {
s = ""
break
}
}
return s
}
// builder holds the state used when building the documentation.
type builder struct {
fset *token.FileSet
lineFmt string
examples []*doc.Example
buf bytes.Buffer // scratch space for printNode method.
importPaths map[string]map[string]string
ast *ast.Package
srcs map[string]*source
pkg *Package
}
// fileImportPaths returns a package name to import path map for the file with
// filename.
func (b *builder) fileImportPaths(filename string) map[string]string {
importPaths := b.importPaths[filename]
if importPaths != nil {
return importPaths
}
importPaths = make(map[string]string)
scores := make(map[string]int)
b.importPaths[filename] = importPaths
file := b.ast.Files[filename]
if file == nil {
// The code can reference files outside the known set of files
// when line comments are used (//line <file>:<line>).
return importPaths
}
for _, i := range file.Imports {
importPath, _ := strconv.Unquote(i.Path.Value)
if importPath == "C" {
continue
}
if i.Name != nil {
importPaths[i.Name.Name] = importPath
scores[i.Name.Name] = 4
} else {
// Use heuristics to find package name from the last segment of
// the import path.
_, name := path.Split(importPath)
if scores[name] <= 1 {
if strings.HasPrefix(name, "go") {
n := name[len("go"):]
importPaths[n] = importPath
scores[n] = 1
}
}
if scores[name] <= 2 {
switch {
case strings.HasPrefix(name, "go-") || strings.HasPrefix(name, "go."):
n := name[len("go-"):]
importPaths[n] = importPath
scores[n] = 2
case strings.HasSuffix(name, ".go") || strings.HasSuffix(name, "-go"):
n := name[:len(name)-len(".go")]
importPaths[n] = importPath
scores[n] = 2
}
}
if scores[name] <= 3 {
importPaths[name] = importPath
scores[name] = 3
}
}
}
return importPaths
}
type TypeAnnotation struct {
Pos, End int
ImportPath string
Name string
}
type Decl struct {
Text string
Annotations []TypeAnnotation
}
type sortByPos []TypeAnnotation
func (p sortByPos) Len() int { return len(p) }
func (p sortByPos) Less(i, j int) bool { return p[i].Pos < p[j].Pos }
func (p sortByPos) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
// annotationVisitor collects type annotations.
type annotationVisitor struct {
annotations []TypeAnnotation
fset *token.FileSet
b *builder
importPaths map[string]string
}
func (v *annotationVisitor) Visit(n ast.Node) ast.Visitor {
switch n := n.(type) {
case *ast.TypeSpec:
if n.Type != nil {
ast.Walk(v, n.Type)
}
return nil
case *ast.FuncDecl:
if n.Recv != nil {
ast.Walk(v, n.Recv)
}
if n.Type != nil {
ast.Walk(v, n.Type)
}
return nil
case *ast.Field:
if n.Type != nil {
ast.Walk(v, n.Type)
}
return nil
case *ast.ValueSpec:
if n.Type != nil {
ast.Walk(v, n.Type)
}
return nil
case *ast.FuncLit:
if n.Type != nil {
ast.Walk(v, n.Type)
}
return nil
case *ast.CompositeLit:
if n.Type != nil {
ast.Walk(v, n.Type)
}
return nil
case *ast.Ident:
if !ast.IsExported(n.Name) {
return nil
}
v.addAnnotation(n, "", n.Name)
return nil
case *ast.SelectorExpr:
if !ast.IsExported(n.Sel.Name) {
return nil
}
if i, ok := n.X.(*ast.Ident); ok {
v.addAnnotation(n, i.Name, n.Sel.Name)
return nil
}
}
return v
}
const packageWrapper = "package p\n"
func (v *annotationVisitor) addAnnotation(n ast.Node, packageName string, name string) {
pos := v.fset.Position(n.Pos())
end := v.fset.Position(n.End())
importPath := ""
if packageName != "" {
importPath = v.importPaths[packageName]
if importPath == "" {
return
}
}
v.annotations = append(v.annotations, TypeAnnotation{
pos.Offset - len(packageWrapper),
end.Offset - len(packageWrapper),
importPath,
name})
}
func (b *builder) printDecl(decl ast.Node) Decl {
b.buf.Reset()
b.buf.WriteString(packageWrapper)
err := (&printer.Config{Mode: printer.UseSpaces, Tabwidth: 4}).Fprint(&b.buf, b.fset, decl)
if err != nil {
return Decl{Text: err.Error()}
}
text := string(b.buf.Bytes()[len(packageWrapper):])
position := b.fset.Position(decl.Pos())
v := &annotationVisitor{
b: b,
fset: token.NewFileSet(),
importPaths: b.fileImportPaths(position.Filename),
}
f, err := parser.ParseFile(v.fset, "", b.buf.Bytes(), 0)
if err != nil {
return Decl{Text: text}
}
ast.Walk(v, f)
sort.Sort(sortByPos(v.annotations))
return Decl{Text: text, Annotations: v.annotations}
}
func (b *builder) printNode(node interface{}) string {
b.buf.Reset()
err := (&printer.Config{Mode: printer.UseSpaces, Tabwidth: 4}).Fprint(&b.buf, b.fset, node)
if err != nil {
b.buf.Reset()
b.buf.WriteString(err.Error())
}
return b.buf.String()
}
func (b *builder) printPos(pos token.Pos) string {
position := b.fset.Position(pos)
src := b.srcs[position.Filename]
if src == nil || src.browseURL == "" {
// src can be nil when line comments are used (//line <file>:<line>).
return ""
}
return src.browseURL + fmt.Sprintf(b.lineFmt, position.Line)
}
type Value struct {
Decl Decl
URL string
Doc string
}
func (b *builder) values(vdocs []*doc.Value) []*Value {
var result []*Value
for _, d := range vdocs {
result = append(result, &Value{
Decl: b.printDecl(d.Decl),
URL: b.printPos(d.Decl.Pos()),
Doc: d.Doc,
})
}
return result
}
type Example struct {
Name string
Doc string
Code string
Output string
}
var exampleOutputRx = regexp.MustCompile(`(?i)//[[:space:]]*output:`)
func (b *builder) getExamples(name string) []*Example {
var docs []*Example
for _, e := range b.examples {
if !strings.HasPrefix(e.Name, name) {
continue
}
n := e.Name[len(name):]
if n != "" {
if i := strings.LastIndex(n, "_"); i != 0 {
continue
}
n = n[1:]
if startsWithUppercase(n) {
continue
}
n = strings.Title(n)
}
output := e.Output
code := b.printNode(&printer.CommentedNode{
Node: e.Code,
Comments: e.Comments,
})
// additional formatting if this is a function body
if i := len(code); i >= 2 && code[0] == '{' && code[i-1] == '}' {
// remove surrounding braces
code = code[1 : i-1]
// unindent
code = strings.Replace(code, "\n ", "\n", -1)
// remove output comment
if j := exampleOutputRx.FindStringIndex(code); j != nil {
code = strings.TrimSpace(code[:j[0]])
}
} else {
// drop output, as the output comment will appear in the code
output = ""
}
docs = append(docs, &Example{Name: n, Doc: e.Doc, Code: code, Output: output})
}
return docs
}
type Func struct {
Decl Decl
URL string
Doc string
Name string
Recv string
Examples []*Example
}
func (b *builder) funcs(fdocs []*doc.Func) []*Func {
var result []*Func
for _, d := range fdocs {
var exampleName string
switch {
case d.Recv == "":
exampleName = d.Name
case d.Recv[0] == '*':
exampleName = d.Recv[1:] + "_" + d.Name
default:
exampleName = d.Recv + "_" + d.Name
}
result = append(result, &Func{
Decl: b.printDecl(d.Decl),
URL: b.printPos(d.Decl.Pos()),
Doc: d.Doc,
Name: d.Name,
Recv: d.Recv,
Examples: b.getExamples(exampleName),
})
}
return result
}
type Type struct {
Doc string
Name string
Decl Decl
URL string
Consts []*Value
Vars []*Value
Funcs []*Func
Methods []*Func
Examples []*Example
}
func (b *builder) types(tdocs []*doc.Type) []*Type {
var result []*Type
for _, d := range tdocs {
result = append(result, &Type{
Doc: d.Doc,
Name: d.Name,
Decl: b.printDecl(d.Decl),
URL: b.printPos(d.Decl.Pos()),
Consts: b.values(d.Consts),
Vars: b.values(d.Vars),
Funcs: b.funcs(d.Funcs),
Methods: b.funcs(d.Methods),
Examples: b.getExamples(d.Name),
})
}
return result
}
type File struct {
Name string
URL string
}
type source struct {
name string
browseURL string
rawURL string
data []byte
}
func (s *source) Name() string { return s.name }
func (s *source) Size() int64 { return int64(len(s.data)) }
func (s *source) Mode() os.FileMode { return 0 }
func (s *source) ModTime() time.Time { return time.Time{} }
func (s *source) IsDir() bool { return false }
func (s *source) Sys() interface{} { return nil }
func (b *builder) readDir(dir string) ([]os.FileInfo, error) {
if dir != b.pkg.ImportPath {
panic("unexpected")
}
infos := make([]os.FileInfo, 0, len(b.srcs))
for _, src := range b.srcs {
infos = append(infos, src)
}
return infos, nil
}
func (b *builder) openFile(path string) (io.ReadCloser, error) {
if strings.HasPrefix(path, b.pkg.ImportPath+"/") {
if src, ok := b.srcs[path[len(b.pkg.ImportPath)+1:]]; ok {
return ioutil.NopCloser(bytes.NewReader(src.data)), nil
}
}
panic("unexpected")
}
// PackageVersion is modified when previously stored packages are invalid.
const PackageVersion = "4"
type Package struct {
// The import path for this package.
ImportPath string
// Import path prefix for all packages in the project.
ProjectRoot string
// Name of the project.
ProjectName string
// Project home page.
ProjectURL string
// Errors found when fetching or parsing this package.
Errors []string
// The time this object was created.
Updated time.Time
// Cache validation tag. This tag is not necessarily an HTTP entity tag.
// The tag is "" if there is no meaningful cache validation for the VCS.
Etag string
// Package name or "" if no package for this import path. The proceeding
// fields are set even if a package is not found for the import path.
Name string
// Synopsis and full documentation for package.
Synopsis string
Doc string
// Format this package as a command.
IsCmd bool
// Top-level declarations.
Consts []*Value
Funcs []*Func
Types []*Type
Vars []*Value
// Package examples
Examples []*Example
// Files.
Files []*File
TestFiles []*File
// Source size in bytes.
SourceSize int
TestSourceSize int
// Imports
Imports []string
TestImports []string
}
func buildDoc(importPath, projectRoot, projectName, projectURL, etag string, lineFmt string, srcs []*source) (*Package, error) {
b := &builder{
lineFmt: lineFmt,
fset: token.NewFileSet(),
importPaths: make(map[string]map[string]string),
srcs: make(map[string]*source),
pkg: &Package{
ImportPath: importPath,
ProjectName: projectName,
ProjectRoot: projectRoot,
ProjectURL: projectURL,
Etag: etag,
Updated: time.Now(),
},
}
if len(srcs) == 0 {
return b.pkg, nil
}
for _, src := range srcs {
b.srcs[src.name] = src
}
// Find the package and associated files.
ctxt := build.Context{
GOOS: "linux",
GOARCH: "amd64",
CgoEnabled: true,
JoinPath: path.Join,
IsAbsPath: path.IsAbs,
SplitPathList: func(list string) []string { return strings.Split(list, ":") },
IsDir: func(path string) bool { panic("unexpected") },
HasSubdir: func(root, dir string) (rel string, ok bool) { panic("unexpected") },
ReadDir: func(dir string) (fi []os.FileInfo, err error) { return b.readDir(dir) },
OpenFile: func(path string) (r io.ReadCloser, err error) { return b.openFile(path) },
Compiler: "gc",
}
pkg, err := ctxt.ImportDir(b.pkg.ImportPath, 0)
if err != nil {
b.pkg.Errors = append(b.pkg.Errors, err.Error())
return b.pkg, nil
}
// Parse the Go files
b.ast = &ast.Package{Name: pkg.Name, Files: make(map[string]*ast.File)}
if pkg.IsCommand() && b.srcs["doc.go"] != nil {
file, err := parser.ParseFile(b.fset, "doc.go", b.srcs["doc.go"].data, parser.ParseComments)
if err == nil && file.Name.Name == "documentation" {
b.ast.Files["doc.go"] = file
}
}
if len(b.ast.Files) == 0 {
for _, name := range append(pkg.GoFiles, pkg.CgoFiles...) {
file, err := parser.ParseFile(b.fset, name, b.srcs[name].data, parser.ParseComments)
if err != nil {
b.pkg.Errors = append(b.pkg.Errors, err.Error())
continue
}
b.pkg.Files = append(b.pkg.Files, &File{Name: name, URL: b.srcs[name].browseURL})
b.pkg.SourceSize += len(b.srcs[name].data)
b.ast.Files[name] = file
}
}
// Find examples in the test files.
for _, name := range append(pkg.TestGoFiles, pkg.XTestGoFiles...) {
file, err := parser.ParseFile(b.fset, name, b.srcs[name].data, parser.ParseComments)
if err != nil {
b.pkg.Errors = append(b.pkg.Errors, err.Error())
continue
}
b.pkg.TestFiles = append(b.pkg.TestFiles, &File{Name: name, URL: b.srcs[name].browseURL})
b.pkg.TestSourceSize += len(b.srcs[name].data)
b.examples = append(b.examples, doc.Examples(file)...)
}
b.vetPackage()
pdoc := doc.New(b.ast, b.pkg.ImportPath, 0)
b.pkg.Name = pdoc.Name
b.pkg.Doc = strings.TrimRight(pdoc.Doc, " \t\n\r")
b.pkg.Synopsis = synopsis(b.pkg.Doc)
b.pkg.Examples = b.getExamples("")
b.pkg.IsCmd = pkg.IsCommand()
b.pkg.Consts = b.values(pdoc.Consts)
b.pkg.Funcs = b.funcs(pdoc.Funcs)
b.pkg.Types = b.types(pdoc.Types)
b.pkg.Vars = b.values(pdoc.Vars)
b.pkg.Imports = pkg.Imports
b.pkg.TestImports = pkg.TestImports
return b.pkg, nil
}
Jump to Line
Something went wrong with that request. Please try again.