mirror of
https://github.com/golang/go.git
synced 2025-05-05 15:43:04 +00:00
x/tools: clone some files in preparation for alias changes
This CL only copies files and updates build tags. Substantive changes will come in follow-ups. This is a workaround for git's lack of rename/copy tracking. Tested with go1.6, go1.7, and tip (go1.8). Change-Id: Id88a05273fb963586b228d5e5dfacab32133a960 Reviewed-on: https://go-review.googlesource.com/32630 Reviewed-by: Robert Griesemer <gri@golang.org>
This commit is contained in:
parent
1529f889eb
commit
4549178751
@ -2,7 +2,7 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// +build go1.5
|
// +build !go1.8
|
||||||
|
|
||||||
// Bundle creates a single-source-file version of a source package
|
// Bundle creates a single-source-file version of a source package
|
||||||
// suitable for inclusion in a particular target package.
|
// suitable for inclusion in a particular target package.
|
||||||
|
388
cmd/bundle/main18.go
Normal file
388
cmd/bundle/main18.go
Normal file
@ -0,0 +1,388 @@
|
|||||||
|
// Copyright 2015 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
// Bundle creates a single-source-file version of a source package
|
||||||
|
// suitable for inclusion in a particular target package.
|
||||||
|
//
|
||||||
|
// Usage:
|
||||||
|
//
|
||||||
|
// bundle [-o file] [-dst path] [-pkg name] [-prefix p] [-import old=new] <src>
|
||||||
|
//
|
||||||
|
// The src argument specifies the import path of the package to bundle.
|
||||||
|
// The bundling of a directory of source files into a single source file
|
||||||
|
// necessarily imposes a number of constraints.
|
||||||
|
// The package being bundled must not use cgo; must not use conditional
|
||||||
|
// file compilation, whether with build tags or system-specific file names
|
||||||
|
// like code_amd64.go; must not depend on any special comments, which
|
||||||
|
// may not be preserved; must not use any assembly sources;
|
||||||
|
// must not use renaming imports; and must not use reflection-based APIs
|
||||||
|
// that depend on the specific names of types or struct fields.
|
||||||
|
//
|
||||||
|
// By default, bundle writes the bundled code to standard output.
|
||||||
|
// If the -o argument is given, bundle writes to the named file
|
||||||
|
// and also includes a ``//go:generate'' comment giving the exact
|
||||||
|
// command line used, for regenerating the file with ``go generate.''
|
||||||
|
//
|
||||||
|
// Bundle customizes its output for inclusion in a particular package, the destination package.
|
||||||
|
// By default bundle assumes the destination is the package in the current directory,
|
||||||
|
// but the destination package can be specified explicitly using the -dst option,
|
||||||
|
// which takes an import path as its argument.
|
||||||
|
// If the source package imports the destination package, bundle will remove
|
||||||
|
// those imports and rewrite any references to use direct references to the
|
||||||
|
// corresponding symbols.
|
||||||
|
// Bundle also must write a package declaration in the output and must
|
||||||
|
// choose a name to use in that declaration.
|
||||||
|
// If the -package option is given, bundle uses that name.
|
||||||
|
// Otherwise, if the -dst option is given, bundle uses the last
|
||||||
|
// element of the destination import path.
|
||||||
|
// Otherwise, by default bundle uses the package name found in the
|
||||||
|
// package sources in the current directory.
|
||||||
|
//
|
||||||
|
// To avoid collisions, bundle inserts a prefix at the beginning of
|
||||||
|
// every package-level const, func, type, and var identifier in src's code,
|
||||||
|
// updating references accordingly. The default prefix is the package name
|
||||||
|
// of the source package followed by an underscore. The -prefix option
|
||||||
|
// specifies an alternate prefix.
|
||||||
|
//
|
||||||
|
// Occasionally it is necessary to rewrite imports during the bundling
|
||||||
|
// process. The -import option, which may be repeated, specifies that
|
||||||
|
// an import of "old" should be rewritten to import "new" instead.
|
||||||
|
//
|
||||||
|
// Example
|
||||||
|
//
|
||||||
|
// Bundle archive/zip for inclusion in cmd/dist:
|
||||||
|
//
|
||||||
|
// cd $GOROOT/src/cmd/dist
|
||||||
|
// bundle -o zip.go archive/zip
|
||||||
|
//
|
||||||
|
// Bundle golang.org/x/net/http2 for inclusion in net/http,
|
||||||
|
// prefixing all identifiers by "http2" instead of "http2_",
|
||||||
|
// and rewriting the import "golang.org/x/net/http2/hpack"
|
||||||
|
// to "internal/golang.org/x/net/http2/hpack":
|
||||||
|
//
|
||||||
|
// cd $GOROOT/src/net/http
|
||||||
|
// bundle -o h2_bundle.go \
|
||||||
|
// -prefix http2 \
|
||||||
|
// -import golang.org/x/net/http2/hpack=internal/golang.org/x/net/http2/hpack \
|
||||||
|
// golang.org/x/net/http2
|
||||||
|
//
|
||||||
|
// Two ways to update the http2 bundle:
|
||||||
|
//
|
||||||
|
// go generate net/http
|
||||||
|
//
|
||||||
|
// cd $GOROOT/src/net/http
|
||||||
|
// go generate
|
||||||
|
//
|
||||||
|
// Update both bundles, restricting ``go generate'' to running bundle commands:
|
||||||
|
//
|
||||||
|
// go generate -run bundle cmd/dist net/http
|
||||||
|
//
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/build"
|
||||||
|
"go/format"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
outputFile = flag.String("o", "", "write output to `file` (default standard output)")
|
||||||
|
dstPath = flag.String("dst", "", "set destination import `path` (default taken from current directory)")
|
||||||
|
pkgName = flag.String("pkg", "", "set destination package `name` (default taken from current directory)")
|
||||||
|
prefix = flag.String("prefix", "", "set bundled identifier prefix to `p` (default source package name + \"_\")")
|
||||||
|
underscore = flag.Bool("underscore", false, "rewrite golang.org to golang_org in imports; temporary workaround for golang.org/issue/16333")
|
||||||
|
|
||||||
|
importMap = map[string]string{}
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
flag.Var(flagFunc(addImportMap), "import", "rewrite import using `map`, of form old=new (can be repeated)")
|
||||||
|
}
|
||||||
|
|
||||||
|
func addImportMap(s string) {
|
||||||
|
if strings.Count(s, "=") != 1 {
|
||||||
|
log.Fatal("-import argument must be of the form old=new")
|
||||||
|
}
|
||||||
|
i := strings.Index(s, "=")
|
||||||
|
old, new := s[:i], s[i+1:]
|
||||||
|
if old == "" || new == "" {
|
||||||
|
log.Fatal("-import argument must be of the form old=new; old and new must be non-empty")
|
||||||
|
}
|
||||||
|
importMap[old] = new
|
||||||
|
}
|
||||||
|
|
||||||
|
func usage() {
|
||||||
|
fmt.Fprintf(os.Stderr, "Usage: bundle [options] <src>\n")
|
||||||
|
flag.PrintDefaults()
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
log.SetPrefix("bundle: ")
|
||||||
|
log.SetFlags(0)
|
||||||
|
|
||||||
|
flag.Usage = usage
|
||||||
|
flag.Parse()
|
||||||
|
args := flag.Args()
|
||||||
|
if len(args) != 1 {
|
||||||
|
usage()
|
||||||
|
os.Exit(2)
|
||||||
|
}
|
||||||
|
|
||||||
|
if *dstPath != "" {
|
||||||
|
if *pkgName == "" {
|
||||||
|
*pkgName = path.Base(*dstPath)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
wd, _ := os.Getwd()
|
||||||
|
pkg, err := build.ImportDir(wd, 0)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("cannot find package in current directory: %v", err)
|
||||||
|
}
|
||||||
|
*dstPath = pkg.ImportPath
|
||||||
|
if *pkgName == "" {
|
||||||
|
*pkgName = pkg.Name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
code, err := bundle(args[0], *dstPath, *pkgName, *prefix)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
if *outputFile != "" {
|
||||||
|
err := ioutil.WriteFile(*outputFile, code, 0666)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
_, err := os.Stdout.Write(code)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// isStandardImportPath is copied from cmd/go in the standard library.
|
||||||
|
func isStandardImportPath(path string) bool {
|
||||||
|
i := strings.Index(path, "/")
|
||||||
|
if i < 0 {
|
||||||
|
i = len(path)
|
||||||
|
}
|
||||||
|
elem := path[:i]
|
||||||
|
return !strings.Contains(elem, ".")
|
||||||
|
}
|
||||||
|
|
||||||
|
var ctxt = &build.Default
|
||||||
|
|
||||||
|
func bundle(src, dst, dstpkg, prefix string) ([]byte, error) {
|
||||||
|
// Load the initial package.
|
||||||
|
conf := loader.Config{ParserMode: parser.ParseComments, Build: ctxt}
|
||||||
|
conf.TypeCheckFuncBodies = func(p string) bool { return p == src }
|
||||||
|
conf.Import(src)
|
||||||
|
|
||||||
|
lprog, err := conf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
info := lprog.Package(src)
|
||||||
|
if prefix == "" {
|
||||||
|
pkgName := info.Files[0].Name.Name
|
||||||
|
prefix = pkgName + "_"
|
||||||
|
}
|
||||||
|
|
||||||
|
objsToUpdate := make(map[types.Object]bool)
|
||||||
|
var rename func(from types.Object)
|
||||||
|
rename = func(from types.Object) {
|
||||||
|
if !objsToUpdate[from] {
|
||||||
|
objsToUpdate[from] = true
|
||||||
|
|
||||||
|
// Renaming a type that is used as an embedded field
|
||||||
|
// requires renaming the field too. e.g.
|
||||||
|
// type T int // if we rename this to U..
|
||||||
|
// var s struct {T}
|
||||||
|
// print(s.T) // ...this must change too
|
||||||
|
if _, ok := from.(*types.TypeName); ok {
|
||||||
|
for id, obj := range info.Uses {
|
||||||
|
if obj == from {
|
||||||
|
if field := info.Defs[id]; field != nil {
|
||||||
|
rename(field)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rename each package-level object.
|
||||||
|
scope := info.Pkg.Scope()
|
||||||
|
for _, name := range scope.Names() {
|
||||||
|
rename(scope.Lookup(name))
|
||||||
|
}
|
||||||
|
|
||||||
|
var out bytes.Buffer
|
||||||
|
|
||||||
|
fmt.Fprintf(&out, "// Code generated by golang.org/x/tools/cmd/bundle.\n")
|
||||||
|
if *outputFile != "" {
|
||||||
|
fmt.Fprintf(&out, "//go:generate bundle %s\n", strings.Join(os.Args[1:], " "))
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(&out, "// $ bundle %s\n", strings.Join(os.Args[1:], " "))
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&out, "\n")
|
||||||
|
|
||||||
|
// Concatenate package comments from all files...
|
||||||
|
for _, f := range info.Files {
|
||||||
|
if doc := f.Doc.Text(); strings.TrimSpace(doc) != "" {
|
||||||
|
for _, line := range strings.Split(doc, "\n") {
|
||||||
|
fmt.Fprintf(&out, "// %s\n", line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// ...but don't let them become the actual package comment.
|
||||||
|
fmt.Fprintln(&out)
|
||||||
|
|
||||||
|
fmt.Fprintf(&out, "package %s\n\n", dstpkg)
|
||||||
|
|
||||||
|
// BUG(adonovan,shurcooL): bundle may generate incorrect code
|
||||||
|
// due to shadowing between identifiers and imported package names.
|
||||||
|
//
|
||||||
|
// The generated code will either fail to compile or
|
||||||
|
// (unlikely) compile successfully but have different behavior
|
||||||
|
// than the original package. The risk of this happening is higher
|
||||||
|
// when the original package has renamed imports (they're typically
|
||||||
|
// renamed in order to resolve a shadow inside that particular .go file).
|
||||||
|
|
||||||
|
// TODO(adonovan,shurcooL):
|
||||||
|
// - detect shadowing issues, and either return error or resolve them
|
||||||
|
// - preserve comments from the original import declarations.
|
||||||
|
|
||||||
|
// pkgStd and pkgExt are sets of printed import specs. This is done
|
||||||
|
// to deduplicate instances of the same import name and path.
|
||||||
|
var pkgStd = make(map[string]bool)
|
||||||
|
var pkgExt = make(map[string]bool)
|
||||||
|
for _, f := range info.Files {
|
||||||
|
for _, imp := range f.Imports {
|
||||||
|
path, err := strconv.Unquote(imp.Path.Value)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("invalid import path string: %v", err) // Shouldn't happen here since conf.Load succeeded.
|
||||||
|
}
|
||||||
|
if path == dst {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if newPath, ok := importMap[path]; ok {
|
||||||
|
path = newPath
|
||||||
|
}
|
||||||
|
|
||||||
|
var name string
|
||||||
|
if imp.Name != nil {
|
||||||
|
name = imp.Name.Name
|
||||||
|
}
|
||||||
|
spec := fmt.Sprintf("%s %q", name, path)
|
||||||
|
if isStandardImportPath(path) {
|
||||||
|
pkgStd[spec] = true
|
||||||
|
} else {
|
||||||
|
if *underscore {
|
||||||
|
spec = strings.Replace(spec, "golang.org/", "golang_org/", 1)
|
||||||
|
}
|
||||||
|
pkgExt[spec] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print a single declaration that imports all necessary packages.
|
||||||
|
fmt.Fprintln(&out, "import (")
|
||||||
|
for p := range pkgStd {
|
||||||
|
fmt.Fprintf(&out, "\t%s\n", p)
|
||||||
|
}
|
||||||
|
if len(pkgExt) > 0 {
|
||||||
|
fmt.Fprintln(&out)
|
||||||
|
}
|
||||||
|
for p := range pkgExt {
|
||||||
|
fmt.Fprintf(&out, "\t%s\n", p)
|
||||||
|
}
|
||||||
|
fmt.Fprint(&out, ")\n\n")
|
||||||
|
|
||||||
|
// Modify and print each file.
|
||||||
|
for _, f := range info.Files {
|
||||||
|
// Update renamed identifiers.
|
||||||
|
for id, obj := range info.Defs {
|
||||||
|
if objsToUpdate[obj] {
|
||||||
|
id.Name = prefix + obj.Name()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for id, obj := range info.Uses {
|
||||||
|
if objsToUpdate[obj] {
|
||||||
|
id.Name = prefix + obj.Name()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For each qualified identifier that refers to the
|
||||||
|
// destination package, remove the qualifier.
|
||||||
|
// The "@@@." strings are removed in postprocessing.
|
||||||
|
ast.Inspect(f, func(n ast.Node) bool {
|
||||||
|
if sel, ok := n.(*ast.SelectorExpr); ok {
|
||||||
|
if id, ok := sel.X.(*ast.Ident); ok {
|
||||||
|
if obj, ok := info.Uses[id].(*types.PkgName); ok {
|
||||||
|
if obj.Imported().Path() == dst {
|
||||||
|
id.Name = "@@@"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
|
// Pretty-print package-level declarations.
|
||||||
|
// but no package or import declarations.
|
||||||
|
//
|
||||||
|
// TODO(adonovan): this may cause loss of comments
|
||||||
|
// preceding or associated with the package or import
|
||||||
|
// declarations or not associated with any declaration.
|
||||||
|
// Check.
|
||||||
|
var buf bytes.Buffer
|
||||||
|
for _, decl := range f.Decls {
|
||||||
|
if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
buf.Reset()
|
||||||
|
format.Node(&buf, lprog.Fset, decl)
|
||||||
|
// Remove each "@@@." in the output.
|
||||||
|
// TODO(adonovan): not hygienic.
|
||||||
|
out.Write(bytes.Replace(buf.Bytes(), []byte("@@@."), nil, -1))
|
||||||
|
out.WriteString("\n\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now format the entire thing.
|
||||||
|
result, err := format.Source(out.Bytes())
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("formatting failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type flagFunc func(string)
|
||||||
|
|
||||||
|
func (f flagFunc) Set(s string) error {
|
||||||
|
f(s)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f flagFunc) String() string { return "" }
|
@ -2,6 +2,8 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !go1.8
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
259
cmd/guru/callees18.go
Normal file
259
cmd/guru/callees18.go
Normal file
@ -0,0 +1,259 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"golang.org/x/tools/cmd/guru/serial"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/go/pointer"
|
||||||
|
"golang.org/x/tools/go/ssa"
|
||||||
|
"golang.org/x/tools/go/ssa/ssautil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Callees reports the possible callees of the function call site
|
||||||
|
// identified by the specified source location.
|
||||||
|
func callees(q *Query) error {
|
||||||
|
lconf := loader.Config{Build: q.Build}
|
||||||
|
|
||||||
|
if err := setPTAScope(&lconf, q.Scope); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, true) // needs exact pos
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine the enclosing call for the specified position.
|
||||||
|
var e *ast.CallExpr
|
||||||
|
for _, n := range qpos.path {
|
||||||
|
if e, _ = n.(*ast.CallExpr); e != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if e == nil {
|
||||||
|
return fmt.Errorf("there is no function call here")
|
||||||
|
}
|
||||||
|
// TODO(adonovan): issue an error if the call is "too far
|
||||||
|
// away" from the current selection, as this most likely is
|
||||||
|
// not what the user intended.
|
||||||
|
|
||||||
|
// Reject type conversions.
|
||||||
|
if qpos.info.Types[e.Fun].IsType() {
|
||||||
|
return fmt.Errorf("this is a type conversion, not a function call")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deal with obviously static calls before constructing SSA form.
|
||||||
|
// Some static calls may yet require SSA construction,
|
||||||
|
// e.g. f := func(){}; f().
|
||||||
|
switch funexpr := unparen(e.Fun).(type) {
|
||||||
|
case *ast.Ident:
|
||||||
|
switch obj := qpos.info.Uses[funexpr].(type) {
|
||||||
|
case *types.Builtin:
|
||||||
|
// Reject calls to built-ins.
|
||||||
|
return fmt.Errorf("this is a call to the built-in '%s' operator", obj.Name())
|
||||||
|
case *types.Func:
|
||||||
|
// This is a static function call
|
||||||
|
q.Output(lprog.Fset, &calleesTypesResult{
|
||||||
|
site: e,
|
||||||
|
callee: obj,
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
sel := qpos.info.Selections[funexpr]
|
||||||
|
if sel == nil {
|
||||||
|
// qualified identifier.
|
||||||
|
// May refer to top level function variable
|
||||||
|
// or to top level function.
|
||||||
|
callee := qpos.info.Uses[funexpr.Sel]
|
||||||
|
if obj, ok := callee.(*types.Func); ok {
|
||||||
|
q.Output(lprog.Fset, &calleesTypesResult{
|
||||||
|
site: e,
|
||||||
|
callee: obj,
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
} else if sel.Kind() == types.MethodVal {
|
||||||
|
// Inspect the receiver type of the selected method.
|
||||||
|
// If it is concrete, the call is statically dispatched.
|
||||||
|
// (Due to implicit field selections, it is not enough to look
|
||||||
|
// at sel.Recv(), the type of the actual receiver expression.)
|
||||||
|
method := sel.Obj().(*types.Func)
|
||||||
|
recvtype := method.Type().(*types.Signature).Recv().Type()
|
||||||
|
if !types.IsInterface(recvtype) {
|
||||||
|
// static method call
|
||||||
|
q.Output(lprog.Fset, &calleesTypesResult{
|
||||||
|
site: e,
|
||||||
|
callee: method,
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
prog := ssautil.CreateProgram(lprog, ssa.GlobalDebug)
|
||||||
|
|
||||||
|
ptaConfig, err := setupPTA(prog, lprog, q.PTALog, q.Reflection)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg := prog.Package(qpos.info.Pkg)
|
||||||
|
if pkg == nil {
|
||||||
|
return fmt.Errorf("no SSA package")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defer SSA construction till after errors are reported.
|
||||||
|
prog.Build()
|
||||||
|
|
||||||
|
// Ascertain calling function and call site.
|
||||||
|
callerFn := ssa.EnclosingFunction(pkg, qpos.path)
|
||||||
|
if callerFn == nil {
|
||||||
|
return fmt.Errorf("no SSA function built for this location (dead code?)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the call site.
|
||||||
|
site, err := findCallSite(callerFn, e)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
funcs, err := findCallees(ptaConfig, site)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
q.Output(lprog.Fset, &calleesSSAResult{
|
||||||
|
site: site,
|
||||||
|
funcs: funcs,
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func findCallSite(fn *ssa.Function, call *ast.CallExpr) (ssa.CallInstruction, error) {
|
||||||
|
instr, _ := fn.ValueForExpr(call)
|
||||||
|
callInstr, _ := instr.(ssa.CallInstruction)
|
||||||
|
if instr == nil {
|
||||||
|
return nil, fmt.Errorf("this call site is unreachable in this analysis")
|
||||||
|
}
|
||||||
|
return callInstr, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func findCallees(conf *pointer.Config, site ssa.CallInstruction) ([]*ssa.Function, error) {
|
||||||
|
// Avoid running the pointer analysis for static calls.
|
||||||
|
if callee := site.Common().StaticCallee(); callee != nil {
|
||||||
|
switch callee.String() {
|
||||||
|
case "runtime.SetFinalizer", "(reflect.Value).Call":
|
||||||
|
// The PTA treats calls to these intrinsics as dynamic.
|
||||||
|
// TODO(adonovan): avoid reliance on PTA internals.
|
||||||
|
|
||||||
|
default:
|
||||||
|
return []*ssa.Function{callee}, nil // singleton
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dynamic call: use pointer analysis.
|
||||||
|
conf.BuildCallGraph = true
|
||||||
|
cg := ptrAnalysis(conf).CallGraph
|
||||||
|
cg.DeleteSyntheticNodes()
|
||||||
|
|
||||||
|
// Find all call edges from the site.
|
||||||
|
n := cg.Nodes[site.Parent()]
|
||||||
|
if n == nil {
|
||||||
|
return nil, fmt.Errorf("this call site is unreachable in this analysis")
|
||||||
|
}
|
||||||
|
calleesMap := make(map[*ssa.Function]bool)
|
||||||
|
for _, edge := range n.Out {
|
||||||
|
if edge.Site == site {
|
||||||
|
calleesMap[edge.Callee.Func] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// De-duplicate and sort.
|
||||||
|
funcs := make([]*ssa.Function, 0, len(calleesMap))
|
||||||
|
for f := range calleesMap {
|
||||||
|
funcs = append(funcs, f)
|
||||||
|
}
|
||||||
|
sort.Sort(byFuncPos(funcs))
|
||||||
|
return funcs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type calleesSSAResult struct {
|
||||||
|
site ssa.CallInstruction
|
||||||
|
funcs []*ssa.Function
|
||||||
|
}
|
||||||
|
|
||||||
|
type calleesTypesResult struct {
|
||||||
|
site *ast.CallExpr
|
||||||
|
callee *types.Func
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *calleesSSAResult) PrintPlain(printf printfFunc) {
|
||||||
|
if len(r.funcs) == 0 {
|
||||||
|
// dynamic call on a provably nil func/interface
|
||||||
|
printf(r.site, "%s on nil value", r.site.Common().Description())
|
||||||
|
} else {
|
||||||
|
printf(r.site, "this %s dispatches to:", r.site.Common().Description())
|
||||||
|
for _, callee := range r.funcs {
|
||||||
|
printf(callee, "\t%s", callee)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *calleesSSAResult) JSON(fset *token.FileSet) []byte {
|
||||||
|
j := &serial.Callees{
|
||||||
|
Pos: fset.Position(r.site.Pos()).String(),
|
||||||
|
Desc: r.site.Common().Description(),
|
||||||
|
}
|
||||||
|
for _, callee := range r.funcs {
|
||||||
|
j.Callees = append(j.Callees, &serial.Callee{
|
||||||
|
Name: callee.String(),
|
||||||
|
Pos: fset.Position(callee.Pos()).String(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return toJSON(j)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *calleesTypesResult) PrintPlain(printf printfFunc) {
|
||||||
|
printf(r.site, "this static function call dispatches to:")
|
||||||
|
printf(r.callee, "\t%s", r.callee.FullName())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *calleesTypesResult) JSON(fset *token.FileSet) []byte {
|
||||||
|
j := &serial.Callees{
|
||||||
|
Pos: fset.Position(r.site.Pos()).String(),
|
||||||
|
Desc: "static function call",
|
||||||
|
}
|
||||||
|
j.Callees = []*serial.Callee{
|
||||||
|
&serial.Callee{
|
||||||
|
Name: r.callee.FullName(),
|
||||||
|
Pos: fset.Position(r.callee.Pos()).String(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return toJSON(j)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NB: byFuncPos is not deterministic across packages since it depends on load order.
|
||||||
|
// Use lessPos if the tests need it.
|
||||||
|
type byFuncPos []*ssa.Function
|
||||||
|
|
||||||
|
func (a byFuncPos) Len() int { return len(a) }
|
||||||
|
func (a byFuncPos) Less(i, j int) bool { return a[i].Pos() < a[j].Pos() }
|
||||||
|
func (a byFuncPos) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
@ -2,6 +2,8 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !go1.8
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
207
cmd/guru/definition18.go
Normal file
207
cmd/guru/definition18.go
Normal file
@ -0,0 +1,207 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/build"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
pathpkg "path"
|
||||||
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"golang.org/x/tools/cmd/guru/serial"
|
||||||
|
"golang.org/x/tools/go/buildutil"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
)
|
||||||
|
|
||||||
|
// definition reports the location of the definition of an identifier.
|
||||||
|
func definition(q *Query) error {
|
||||||
|
// First try the simple resolution done by parser.
|
||||||
|
// It only works for intra-file references but it is very fast.
|
||||||
|
// (Extending this approach to all the files of the package,
|
||||||
|
// resolved using ast.NewPackage, was not worth the effort.)
|
||||||
|
{
|
||||||
|
qpos, err := fastQueryPos(q.Build, q.Pos)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
id, _ := qpos.path[0].(*ast.Ident)
|
||||||
|
if id == nil {
|
||||||
|
return fmt.Errorf("no identifier here")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Did the parser resolve it to a local object?
|
||||||
|
if obj := id.Obj; obj != nil && obj.Pos().IsValid() {
|
||||||
|
q.Output(qpos.fset, &definitionResult{
|
||||||
|
pos: obj.Pos(),
|
||||||
|
descr: fmt.Sprintf("%s %s", obj.Kind, obj.Name),
|
||||||
|
})
|
||||||
|
return nil // success
|
||||||
|
}
|
||||||
|
|
||||||
|
// Qualified identifier?
|
||||||
|
if pkg := packageForQualIdent(qpos.path, id); pkg != "" {
|
||||||
|
srcdir := filepath.Dir(qpos.fset.File(qpos.start).Name())
|
||||||
|
tok, pos, err := findPackageMember(q.Build, qpos.fset, srcdir, pkg, id.Name)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
q.Output(qpos.fset, &definitionResult{
|
||||||
|
pos: pos,
|
||||||
|
descr: fmt.Sprintf("%s %s.%s", tok, pkg, id.Name),
|
||||||
|
})
|
||||||
|
return nil // success
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back on the type checker.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the type checker.
|
||||||
|
lconf := loader.Config{Build: q.Build}
|
||||||
|
allowErrors(&lconf)
|
||||||
|
|
||||||
|
if _, err := importQueryPackage(q.Pos, &lconf); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
id, _ := qpos.path[0].(*ast.Ident)
|
||||||
|
if id == nil {
|
||||||
|
return fmt.Errorf("no identifier here")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Look up the declaration of this identifier.
|
||||||
|
// If id is an anonymous field declaration,
|
||||||
|
// it is both a use of a type and a def of a field;
|
||||||
|
// prefer the use in that case.
|
||||||
|
obj := qpos.info.Uses[id]
|
||||||
|
if obj == nil {
|
||||||
|
obj = qpos.info.Defs[id]
|
||||||
|
if obj == nil {
|
||||||
|
// Happens for y in "switch y := x.(type)",
|
||||||
|
// and the package declaration,
|
||||||
|
// but I think that's all.
|
||||||
|
return fmt.Errorf("no object for identifier")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !obj.Pos().IsValid() {
|
||||||
|
return fmt.Errorf("%s is built in", obj.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
q.Output(lprog.Fset, &definitionResult{
|
||||||
|
pos: obj.Pos(),
|
||||||
|
descr: qpos.objectString(obj),
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// packageForQualIdent returns the package p if id is X in a qualified
|
||||||
|
// identifier p.X; it returns "" otherwise.
|
||||||
|
//
|
||||||
|
// Precondition: id is path[0], and the parser did not resolve id to a
|
||||||
|
// local object. For speed, packageForQualIdent assumes that p is a
|
||||||
|
// package iff it is the basename of an import path (and not, say, a
|
||||||
|
// package-level decl in another file or a predeclared identifier).
|
||||||
|
func packageForQualIdent(path []ast.Node, id *ast.Ident) string {
|
||||||
|
if sel, ok := path[1].(*ast.SelectorExpr); ok && sel.Sel == id && ast.IsExported(id.Name) {
|
||||||
|
if pkgid, ok := sel.X.(*ast.Ident); ok && pkgid.Obj == nil {
|
||||||
|
f := path[len(path)-1].(*ast.File)
|
||||||
|
for _, imp := range f.Imports {
|
||||||
|
path, _ := strconv.Unquote(imp.Path.Value)
|
||||||
|
if imp.Name != nil {
|
||||||
|
if imp.Name.Name == pkgid.Name {
|
||||||
|
return path // renaming import
|
||||||
|
}
|
||||||
|
} else if pathpkg.Base(path) == pkgid.Name {
|
||||||
|
return path // ordinary import
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// findPackageMember returns the type and position of the declaration of
|
||||||
|
// pkg.member by loading and parsing the files of that package.
|
||||||
|
// srcdir is the directory in which the import appears.
|
||||||
|
func findPackageMember(ctxt *build.Context, fset *token.FileSet, srcdir, pkg, member string) (token.Token, token.Pos, error) {
|
||||||
|
bp, err := ctxt.Import(pkg, srcdir, 0)
|
||||||
|
if err != nil {
|
||||||
|
return 0, token.NoPos, err // no files for package
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(adonovan): opt: parallelize.
|
||||||
|
for _, fname := range bp.GoFiles {
|
||||||
|
filename := filepath.Join(bp.Dir, fname)
|
||||||
|
|
||||||
|
// Parse the file, opening it the file via the build.Context
|
||||||
|
// so that we observe the effects of the -modified flag.
|
||||||
|
f, _ := buildutil.ParseFile(fset, ctxt, nil, ".", filename, parser.Mode(0))
|
||||||
|
if f == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find a package-level decl called 'member'.
|
||||||
|
for _, decl := range f.Decls {
|
||||||
|
switch decl := decl.(type) {
|
||||||
|
case *ast.GenDecl:
|
||||||
|
for _, spec := range decl.Specs {
|
||||||
|
switch spec := spec.(type) {
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
// const or var
|
||||||
|
for _, id := range spec.Names {
|
||||||
|
if id.Name == member {
|
||||||
|
return decl.Tok, id.Pos(), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case *ast.TypeSpec:
|
||||||
|
if spec.Name.Name == member {
|
||||||
|
return token.TYPE, spec.Name.Pos(), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
if decl.Recv == nil && decl.Name.Name == member {
|
||||||
|
return token.FUNC, decl.Name.Pos(), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0, token.NoPos, fmt.Errorf("couldn't find declaration of %s in %q", member, pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
type definitionResult struct {
|
||||||
|
pos token.Pos // (nonzero) location of definition
|
||||||
|
descr string // description of object it denotes
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *definitionResult) PrintPlain(printf printfFunc) {
|
||||||
|
printf(r.pos, "defined here as %s", r.descr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *definitionResult) JSON(fset *token.FileSet) []byte {
|
||||||
|
return toJSON(&serial.Definition{
|
||||||
|
Desc: r.descr,
|
||||||
|
ObjPos: fset.Position(r.pos).String(),
|
||||||
|
})
|
||||||
|
}
|
@ -2,6 +2,8 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !go1.8
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
900
cmd/guru/describe18.go
Normal file
900
cmd/guru/describe18.go
Normal file
@ -0,0 +1,900 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
exact "go/constant"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
|
"golang.org/x/tools/cmd/guru/serial"
|
||||||
|
"golang.org/x/tools/go/ast/astutil"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/go/types/typeutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// describe describes the syntax node denoted by the query position,
|
||||||
|
// including:
|
||||||
|
// - its syntactic category
|
||||||
|
// - the definition of its referent (for identifiers) [now redundant]
|
||||||
|
// - its type, fields, and methods (for an expression or type expression)
|
||||||
|
//
|
||||||
|
func describe(q *Query) error {
|
||||||
|
lconf := loader.Config{Build: q.Build}
|
||||||
|
allowErrors(&lconf)
|
||||||
|
|
||||||
|
if _, err := importQueryPackage(q.Pos, &lconf); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the program.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, true) // (need exact pos)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if false { // debugging
|
||||||
|
fprintf(os.Stderr, lprog.Fset, qpos.path[0], "you selected: %s %s",
|
||||||
|
astutil.NodeDescription(qpos.path[0]), pathToString(qpos.path))
|
||||||
|
}
|
||||||
|
|
||||||
|
var qr QueryResult
|
||||||
|
path, action := findInterestingNode(qpos.info, qpos.path)
|
||||||
|
switch action {
|
||||||
|
case actionExpr:
|
||||||
|
qr, err = describeValue(qpos, path)
|
||||||
|
|
||||||
|
case actionType:
|
||||||
|
qr, err = describeType(qpos, path)
|
||||||
|
|
||||||
|
case actionPackage:
|
||||||
|
qr, err = describePackage(qpos, path)
|
||||||
|
|
||||||
|
case actionStmt:
|
||||||
|
qr, err = describeStmt(qpos, path)
|
||||||
|
|
||||||
|
case actionUnknown:
|
||||||
|
qr = &describeUnknownResult{path[0]}
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic(action) // unreachable
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
q.Output(lprog.Fset, qr)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeUnknownResult struct {
|
||||||
|
node ast.Node
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeUnknownResult) PrintPlain(printf printfFunc) {
|
||||||
|
// Nothing much to say about misc syntax.
|
||||||
|
printf(r.node, "%s", astutil.NodeDescription(r.node))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeUnknownResult) JSON(fset *token.FileSet) []byte {
|
||||||
|
return toJSON(&serial.Describe{
|
||||||
|
Desc: astutil.NodeDescription(r.node),
|
||||||
|
Pos: fset.Position(r.node.Pos()).String(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
type action int
|
||||||
|
|
||||||
|
const (
|
||||||
|
actionUnknown action = iota // None of the below
|
||||||
|
actionExpr // FuncDecl, true Expr or Ident(types.{Const,Var})
|
||||||
|
actionType // type Expr or Ident(types.TypeName).
|
||||||
|
actionStmt // Stmt or Ident(types.Label)
|
||||||
|
actionPackage // Ident(types.Package) or ImportSpec
|
||||||
|
)
|
||||||
|
|
||||||
|
// findInterestingNode classifies the syntax node denoted by path as one of:
|
||||||
|
// - an expression, part of an expression or a reference to a constant
|
||||||
|
// or variable;
|
||||||
|
// - a type, part of a type, or a reference to a named type;
|
||||||
|
// - a statement, part of a statement, or a label referring to a statement;
|
||||||
|
// - part of a package declaration or import spec.
|
||||||
|
// - none of the above.
|
||||||
|
// and returns the most "interesting" associated node, which may be
|
||||||
|
// the same node, an ancestor or a descendent.
|
||||||
|
//
|
||||||
|
func findInterestingNode(pkginfo *loader.PackageInfo, path []ast.Node) ([]ast.Node, action) {
|
||||||
|
// TODO(adonovan): integrate with go/types/stdlib_test.go and
|
||||||
|
// apply this to every AST node we can find to make sure it
|
||||||
|
// doesn't crash.
|
||||||
|
|
||||||
|
// TODO(adonovan): audit for ParenExpr safety, esp. since we
|
||||||
|
// traverse up and down.
|
||||||
|
|
||||||
|
// TODO(adonovan): if the users selects the "." in
|
||||||
|
// "fmt.Fprintf()", they'll get an ambiguous selection error;
|
||||||
|
// we won't even reach here. Can we do better?
|
||||||
|
|
||||||
|
// TODO(adonovan): describing a field within 'type T struct {...}'
|
||||||
|
// describes the (anonymous) struct type and concludes "no methods".
|
||||||
|
// We should ascend to the enclosing type decl, if any.
|
||||||
|
|
||||||
|
for len(path) > 0 {
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.GenDecl:
|
||||||
|
if len(n.Specs) == 1 {
|
||||||
|
// Descend to sole {Import,Type,Value}Spec child.
|
||||||
|
path = append([]ast.Node{n.Specs[0]}, path...)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return path, actionUnknown // uninteresting
|
||||||
|
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
// Descend to function name.
|
||||||
|
path = append([]ast.Node{n.Name}, path...)
|
||||||
|
continue
|
||||||
|
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
return path, actionPackage
|
||||||
|
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
if len(n.Names) == 1 {
|
||||||
|
// Descend to sole Ident child.
|
||||||
|
path = append([]ast.Node{n.Names[0]}, path...)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return path, actionUnknown // uninteresting
|
||||||
|
|
||||||
|
case *ast.TypeSpec:
|
||||||
|
// Descend to type name.
|
||||||
|
path = append([]ast.Node{n.Name}, path...)
|
||||||
|
continue
|
||||||
|
|
||||||
|
case ast.Stmt:
|
||||||
|
return path, actionStmt
|
||||||
|
|
||||||
|
case *ast.ArrayType,
|
||||||
|
*ast.StructType,
|
||||||
|
*ast.FuncType,
|
||||||
|
*ast.InterfaceType,
|
||||||
|
*ast.MapType,
|
||||||
|
*ast.ChanType:
|
||||||
|
return path, actionType
|
||||||
|
|
||||||
|
case *ast.Comment, *ast.CommentGroup, *ast.File, *ast.KeyValueExpr, *ast.CommClause:
|
||||||
|
return path, actionUnknown // uninteresting
|
||||||
|
|
||||||
|
case *ast.Ellipsis:
|
||||||
|
// Continue to enclosing node.
|
||||||
|
// e.g. [...]T in ArrayType
|
||||||
|
// f(x...) in CallExpr
|
||||||
|
// f(x...T) in FuncType
|
||||||
|
|
||||||
|
case *ast.Field:
|
||||||
|
// TODO(adonovan): this needs more thought,
|
||||||
|
// since fields can be so many things.
|
||||||
|
if len(n.Names) == 1 {
|
||||||
|
// Descend to sole Ident child.
|
||||||
|
path = append([]ast.Node{n.Names[0]}, path...)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Zero names (e.g. anon field in struct)
|
||||||
|
// or multiple field or param names:
|
||||||
|
// continue to enclosing field list.
|
||||||
|
|
||||||
|
case *ast.FieldList:
|
||||||
|
// Continue to enclosing node:
|
||||||
|
// {Struct,Func,Interface}Type or FuncDecl.
|
||||||
|
|
||||||
|
case *ast.BasicLit:
|
||||||
|
if _, ok := path[1].(*ast.ImportSpec); ok {
|
||||||
|
return path[1:], actionPackage
|
||||||
|
}
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
// TODO(adonovan): use Selections info directly.
|
||||||
|
if pkginfo.Uses[n.Sel] == nil {
|
||||||
|
// TODO(adonovan): is this reachable?
|
||||||
|
return path, actionUnknown
|
||||||
|
}
|
||||||
|
// Descend to .Sel child.
|
||||||
|
path = append([]ast.Node{n.Sel}, path...)
|
||||||
|
continue
|
||||||
|
|
||||||
|
case *ast.Ident:
|
||||||
|
switch pkginfo.ObjectOf(n).(type) {
|
||||||
|
case *types.PkgName:
|
||||||
|
return path, actionPackage
|
||||||
|
|
||||||
|
case *types.Const:
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case *types.Label:
|
||||||
|
return path, actionStmt
|
||||||
|
|
||||||
|
case *types.TypeName:
|
||||||
|
return path, actionType
|
||||||
|
|
||||||
|
case *types.Var:
|
||||||
|
// For x in 'struct {x T}', return struct type, for now.
|
||||||
|
if _, ok := path[1].(*ast.Field); ok {
|
||||||
|
_ = path[2].(*ast.FieldList) // assertion
|
||||||
|
if _, ok := path[3].(*ast.StructType); ok {
|
||||||
|
return path[3:], actionType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case *types.Func:
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case *types.Builtin:
|
||||||
|
// For reference to built-in function, return enclosing call.
|
||||||
|
path = path[1:] // ascend to enclosing function call
|
||||||
|
continue
|
||||||
|
|
||||||
|
case *types.Nil:
|
||||||
|
return path, actionExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
// No object.
|
||||||
|
switch path[1].(type) {
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
// Return enclosing selector expression.
|
||||||
|
return path[1:], actionExpr
|
||||||
|
|
||||||
|
case *ast.Field:
|
||||||
|
// TODO(adonovan): test this.
|
||||||
|
// e.g. all f in:
|
||||||
|
// struct { f, g int }
|
||||||
|
// interface { f() }
|
||||||
|
// func (f T) method(f, g int) (f, g bool)
|
||||||
|
//
|
||||||
|
// switch path[3].(type) {
|
||||||
|
// case *ast.FuncDecl:
|
||||||
|
// case *ast.StructType:
|
||||||
|
// case *ast.InterfaceType:
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// return path[1:], actionExpr
|
||||||
|
//
|
||||||
|
// Unclear what to do with these.
|
||||||
|
// Struct.Fields -- field
|
||||||
|
// Interface.Methods -- field
|
||||||
|
// FuncType.{Params.Results} -- actionExpr
|
||||||
|
// FuncDecl.Recv -- actionExpr
|
||||||
|
|
||||||
|
case *ast.File:
|
||||||
|
// 'package foo'
|
||||||
|
return path, actionPackage
|
||||||
|
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
return path[1:], actionPackage
|
||||||
|
|
||||||
|
default:
|
||||||
|
// e.g. blank identifier
|
||||||
|
// or y in "switch y := x.(type)"
|
||||||
|
// or code in a _test.go file that's not part of the package.
|
||||||
|
return path, actionUnknown
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.StarExpr:
|
||||||
|
if pkginfo.Types[n].IsType() {
|
||||||
|
return path, actionType
|
||||||
|
}
|
||||||
|
return path, actionExpr
|
||||||
|
|
||||||
|
case ast.Expr:
|
||||||
|
// All Expr but {BasicLit,Ident,StarExpr} are
|
||||||
|
// "true" expressions that evaluate to a value.
|
||||||
|
return path, actionExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ascend to parent.
|
||||||
|
path = path[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, actionUnknown // unreachable
|
||||||
|
}
|
||||||
|
|
||||||
|
func describeValue(qpos *queryPos, path []ast.Node) (*describeValueResult, error) {
|
||||||
|
var expr ast.Expr
|
||||||
|
var obj types.Object
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
// ambiguous ValueSpec containing multiple names
|
||||||
|
return nil, fmt.Errorf("multiple value specification")
|
||||||
|
case *ast.Ident:
|
||||||
|
obj = qpos.info.ObjectOf(n)
|
||||||
|
expr = n
|
||||||
|
case ast.Expr:
|
||||||
|
expr = n
|
||||||
|
default:
|
||||||
|
// TODO(adonovan): is this reachable?
|
||||||
|
return nil, fmt.Errorf("unexpected AST for expr: %T", n)
|
||||||
|
}
|
||||||
|
|
||||||
|
t := qpos.info.TypeOf(expr)
|
||||||
|
if t == nil {
|
||||||
|
t = types.Typ[types.Invalid]
|
||||||
|
}
|
||||||
|
constVal := qpos.info.Types[expr].Value
|
||||||
|
|
||||||
|
return &describeValueResult{
|
||||||
|
qpos: qpos,
|
||||||
|
expr: expr,
|
||||||
|
typ: t,
|
||||||
|
constVal: constVal,
|
||||||
|
obj: obj,
|
||||||
|
methods: accessibleMethods(t, qpos.info.Pkg),
|
||||||
|
fields: accessibleFields(t, qpos.info.Pkg),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeValueResult struct {
|
||||||
|
qpos *queryPos
|
||||||
|
expr ast.Expr // query node
|
||||||
|
typ types.Type // type of expression
|
||||||
|
constVal exact.Value // value of expression, if constant
|
||||||
|
obj types.Object // var/func/const object, if expr was Ident
|
||||||
|
methods []*types.Selection
|
||||||
|
fields []describeField
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeValueResult) PrintPlain(printf printfFunc) {
|
||||||
|
var prefix, suffix string
|
||||||
|
if r.constVal != nil {
|
||||||
|
suffix = fmt.Sprintf(" of constant value %s", constValString(r.constVal))
|
||||||
|
}
|
||||||
|
switch obj := r.obj.(type) {
|
||||||
|
case *types.Func:
|
||||||
|
if recv := obj.Type().(*types.Signature).Recv(); recv != nil {
|
||||||
|
if _, ok := recv.Type().Underlying().(*types.Interface); ok {
|
||||||
|
prefix = "interface method "
|
||||||
|
} else {
|
||||||
|
prefix = "method "
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Describe the expression.
|
||||||
|
if r.obj != nil {
|
||||||
|
if r.obj.Pos() == r.expr.Pos() {
|
||||||
|
// defining ident
|
||||||
|
printf(r.expr, "definition of %s%s%s", prefix, r.qpos.objectString(r.obj), suffix)
|
||||||
|
} else {
|
||||||
|
// referring ident
|
||||||
|
printf(r.expr, "reference to %s%s%s", prefix, r.qpos.objectString(r.obj), suffix)
|
||||||
|
if def := r.obj.Pos(); def != token.NoPos {
|
||||||
|
printf(def, "defined here")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
desc := astutil.NodeDescription(r.expr)
|
||||||
|
if suffix != "" {
|
||||||
|
// constant expression
|
||||||
|
printf(r.expr, "%s%s", desc, suffix)
|
||||||
|
} else {
|
||||||
|
// non-constant expression
|
||||||
|
printf(r.expr, "%s of type %s", desc, r.qpos.typeString(r.typ))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
printMethods(printf, r.expr, r.methods)
|
||||||
|
printFields(printf, r.expr, r.fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeValueResult) JSON(fset *token.FileSet) []byte {
|
||||||
|
var value, objpos string
|
||||||
|
if r.constVal != nil {
|
||||||
|
value = r.constVal.String()
|
||||||
|
}
|
||||||
|
if r.obj != nil {
|
||||||
|
objpos = fset.Position(r.obj.Pos()).String()
|
||||||
|
}
|
||||||
|
|
||||||
|
return toJSON(&serial.Describe{
|
||||||
|
Desc: astutil.NodeDescription(r.expr),
|
||||||
|
Pos: fset.Position(r.expr.Pos()).String(),
|
||||||
|
Detail: "value",
|
||||||
|
Value: &serial.DescribeValue{
|
||||||
|
Type: r.qpos.typeString(r.typ),
|
||||||
|
Value: value,
|
||||||
|
ObjPos: objpos,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- TYPE ------------------------------------------------------------
|
||||||
|
|
||||||
|
func describeType(qpos *queryPos, path []ast.Node) (*describeTypeResult, error) {
|
||||||
|
var description string
|
||||||
|
var t types.Type
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.Ident:
|
||||||
|
t = qpos.info.TypeOf(n)
|
||||||
|
switch t := t.(type) {
|
||||||
|
case *types.Basic:
|
||||||
|
description = "reference to built-in "
|
||||||
|
|
||||||
|
case *types.Named:
|
||||||
|
isDef := t.Obj().Pos() == n.Pos() // see caveats at isDef above
|
||||||
|
if isDef {
|
||||||
|
description = "definition of "
|
||||||
|
} else {
|
||||||
|
description = "reference to "
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case ast.Expr:
|
||||||
|
t = qpos.info.TypeOf(n)
|
||||||
|
|
||||||
|
default:
|
||||||
|
// Unreachable?
|
||||||
|
return nil, fmt.Errorf("unexpected AST for type: %T", n)
|
||||||
|
}
|
||||||
|
|
||||||
|
description = description + "type " + qpos.typeString(t)
|
||||||
|
|
||||||
|
// Show sizes for structs and named types (it's fairly obvious for others).
|
||||||
|
switch t.(type) {
|
||||||
|
case *types.Named, *types.Struct:
|
||||||
|
szs := types.StdSizes{WordSize: 8, MaxAlign: 8} // assume amd64
|
||||||
|
description = fmt.Sprintf("%s (size %d, align %d)", description,
|
||||||
|
szs.Sizeof(t), szs.Alignof(t))
|
||||||
|
}
|
||||||
|
|
||||||
|
return &describeTypeResult{
|
||||||
|
qpos: qpos,
|
||||||
|
node: path[0],
|
||||||
|
description: description,
|
||||||
|
typ: t,
|
||||||
|
methods: accessibleMethods(t, qpos.info.Pkg),
|
||||||
|
fields: accessibleFields(t, qpos.info.Pkg),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeTypeResult struct {
|
||||||
|
qpos *queryPos
|
||||||
|
node ast.Node
|
||||||
|
description string
|
||||||
|
typ types.Type
|
||||||
|
methods []*types.Selection
|
||||||
|
fields []describeField
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeField struct {
|
||||||
|
implicits []*types.Named
|
||||||
|
field *types.Var
|
||||||
|
}
|
||||||
|
|
||||||
|
func printMethods(printf printfFunc, node ast.Node, methods []*types.Selection) {
|
||||||
|
if len(methods) > 0 {
|
||||||
|
printf(node, "Methods:")
|
||||||
|
}
|
||||||
|
for _, meth := range methods {
|
||||||
|
// Print the method type relative to the package
|
||||||
|
// in which it was defined, not the query package,
|
||||||
|
printf(meth.Obj(), "\t%s",
|
||||||
|
types.SelectionString(meth, types.RelativeTo(meth.Obj().Pkg())))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func printFields(printf printfFunc, node ast.Node, fields []describeField) {
|
||||||
|
if len(fields) > 0 {
|
||||||
|
printf(node, "Fields:")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Align the names and the types (requires two passes).
|
||||||
|
var width int
|
||||||
|
var names []string
|
||||||
|
for _, f := range fields {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
for _, fld := range f.implicits {
|
||||||
|
buf.WriteString(fld.Obj().Name())
|
||||||
|
buf.WriteByte('.')
|
||||||
|
}
|
||||||
|
buf.WriteString(f.field.Name())
|
||||||
|
name := buf.String()
|
||||||
|
if n := utf8.RuneCountInString(name); n > width {
|
||||||
|
width = n
|
||||||
|
}
|
||||||
|
names = append(names, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, f := range fields {
|
||||||
|
// Print the field type relative to the package
|
||||||
|
// in which it was defined, not the query package,
|
||||||
|
printf(f.field, "\t%*s %s", -width, names[i],
|
||||||
|
types.TypeString(f.field.Type(), types.RelativeTo(f.field.Pkg())))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeTypeResult) PrintPlain(printf printfFunc) {
|
||||||
|
printf(r.node, "%s", r.description)
|
||||||
|
|
||||||
|
// Show the underlying type for a reference to a named type.
|
||||||
|
if nt, ok := r.typ.(*types.Named); ok && r.node.Pos() != nt.Obj().Pos() {
|
||||||
|
// TODO(adonovan): improve display of complex struct/interface types.
|
||||||
|
printf(nt.Obj(), "defined as %s", r.qpos.typeString(nt.Underlying()))
|
||||||
|
}
|
||||||
|
|
||||||
|
printMethods(printf, r.node, r.methods)
|
||||||
|
if len(r.methods) == 0 {
|
||||||
|
// Only report null result for type kinds
|
||||||
|
// capable of bearing methods.
|
||||||
|
switch r.typ.(type) {
|
||||||
|
case *types.Interface, *types.Struct, *types.Named:
|
||||||
|
printf(r.node, "No methods.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
printFields(printf, r.node, r.fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeTypeResult) JSON(fset *token.FileSet) []byte {
|
||||||
|
var namePos, nameDef string
|
||||||
|
if nt, ok := r.typ.(*types.Named); ok {
|
||||||
|
namePos = fset.Position(nt.Obj().Pos()).String()
|
||||||
|
nameDef = nt.Underlying().String()
|
||||||
|
}
|
||||||
|
return toJSON(&serial.Describe{
|
||||||
|
Desc: r.description,
|
||||||
|
Pos: fset.Position(r.node.Pos()).String(),
|
||||||
|
Detail: "type",
|
||||||
|
Type: &serial.DescribeType{
|
||||||
|
Type: r.qpos.typeString(r.typ),
|
||||||
|
NamePos: namePos,
|
||||||
|
NameDef: nameDef,
|
||||||
|
Methods: methodsToSerial(r.qpos.info.Pkg, r.methods, fset),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- PACKAGE ------------------------------------------------------------
|
||||||
|
|
||||||
|
func describePackage(qpos *queryPos, path []ast.Node) (*describePackageResult, error) {
|
||||||
|
var description string
|
||||||
|
var pkg *types.Package
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
var obj types.Object
|
||||||
|
if n.Name != nil {
|
||||||
|
obj = qpos.info.Defs[n.Name]
|
||||||
|
} else {
|
||||||
|
obj = qpos.info.Implicits[n]
|
||||||
|
}
|
||||||
|
pkgname, _ := obj.(*types.PkgName)
|
||||||
|
if pkgname == nil {
|
||||||
|
return nil, fmt.Errorf("can't import package %s", n.Path.Value)
|
||||||
|
}
|
||||||
|
pkg = pkgname.Imported()
|
||||||
|
description = fmt.Sprintf("import of package %q", pkg.Path())
|
||||||
|
|
||||||
|
case *ast.Ident:
|
||||||
|
if _, isDef := path[1].(*ast.File); isDef {
|
||||||
|
// e.g. package id
|
||||||
|
pkg = qpos.info.Pkg
|
||||||
|
description = fmt.Sprintf("definition of package %q", pkg.Path())
|
||||||
|
} else {
|
||||||
|
// e.g. import id "..."
|
||||||
|
// or id.F()
|
||||||
|
pkg = qpos.info.ObjectOf(n).(*types.PkgName).Imported()
|
||||||
|
description = fmt.Sprintf("reference to package %q", pkg.Path())
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
// Unreachable?
|
||||||
|
return nil, fmt.Errorf("unexpected AST for package: %T", n)
|
||||||
|
}
|
||||||
|
|
||||||
|
var members []*describeMember
|
||||||
|
// NB: "unsafe" has no types.Package
|
||||||
|
if pkg != nil {
|
||||||
|
// Enumerate the accessible package members
|
||||||
|
// in lexicographic order.
|
||||||
|
for _, name := range pkg.Scope().Names() {
|
||||||
|
if pkg == qpos.info.Pkg || ast.IsExported(name) {
|
||||||
|
mem := pkg.Scope().Lookup(name)
|
||||||
|
var methods []*types.Selection
|
||||||
|
if mem, ok := mem.(*types.TypeName); ok {
|
||||||
|
methods = accessibleMethods(mem.Type(), qpos.info.Pkg)
|
||||||
|
}
|
||||||
|
members = append(members, &describeMember{
|
||||||
|
mem,
|
||||||
|
methods,
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &describePackageResult{qpos.fset, path[0], description, pkg, members}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type describePackageResult struct {
|
||||||
|
fset *token.FileSet
|
||||||
|
node ast.Node
|
||||||
|
description string
|
||||||
|
pkg *types.Package
|
||||||
|
members []*describeMember // in lexicographic name order
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeMember struct {
|
||||||
|
obj types.Object
|
||||||
|
methods []*types.Selection // in types.MethodSet order
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describePackageResult) PrintPlain(printf printfFunc) {
|
||||||
|
printf(r.node, "%s", r.description)
|
||||||
|
|
||||||
|
// Compute max width of name "column".
|
||||||
|
maxname := 0
|
||||||
|
for _, mem := range r.members {
|
||||||
|
if l := len(mem.obj.Name()); l > maxname {
|
||||||
|
maxname = l
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, mem := range r.members {
|
||||||
|
printf(mem.obj, "\t%s", formatMember(mem.obj, maxname))
|
||||||
|
for _, meth := range mem.methods {
|
||||||
|
printf(meth.Obj(), "\t\t%s", types.SelectionString(meth, types.RelativeTo(r.pkg)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to adjust go1.5 numeric go/constant formatting.
|
||||||
|
// Can be removed once we give up compatibility with go1.5.
|
||||||
|
func constValString(v exact.Value) string {
|
||||||
|
if v.Kind() == exact.Float {
|
||||||
|
// In go1.5, go/constant floating-point values are printed
|
||||||
|
// as fractions. Make them appear as floating-point numbers.
|
||||||
|
f, _ := exact.Float64Val(v)
|
||||||
|
return fmt.Sprintf("%g", f)
|
||||||
|
}
|
||||||
|
return v.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func formatMember(obj types.Object, maxname int) string {
|
||||||
|
qualifier := types.RelativeTo(obj.Pkg())
|
||||||
|
var buf bytes.Buffer
|
||||||
|
fmt.Fprintf(&buf, "%-5s %-*s", tokenOf(obj), maxname, obj.Name())
|
||||||
|
switch obj := obj.(type) {
|
||||||
|
case *types.Const:
|
||||||
|
fmt.Fprintf(&buf, " %s = %s", types.TypeString(obj.Type(), qualifier), constValString(obj.Val()))
|
||||||
|
|
||||||
|
case *types.Func:
|
||||||
|
fmt.Fprintf(&buf, " %s", types.TypeString(obj.Type(), qualifier))
|
||||||
|
|
||||||
|
case *types.TypeName:
|
||||||
|
// Abbreviate long aggregate type names.
|
||||||
|
var abbrev string
|
||||||
|
switch t := obj.Type().Underlying().(type) {
|
||||||
|
case *types.Interface:
|
||||||
|
if t.NumMethods() > 1 {
|
||||||
|
abbrev = "interface{...}"
|
||||||
|
}
|
||||||
|
case *types.Struct:
|
||||||
|
if t.NumFields() > 1 {
|
||||||
|
abbrev = "struct{...}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if abbrev == "" {
|
||||||
|
fmt.Fprintf(&buf, " %s", types.TypeString(obj.Type().Underlying(), qualifier))
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(&buf, " %s", abbrev)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *types.Var:
|
||||||
|
fmt.Fprintf(&buf, " %s", types.TypeString(obj.Type(), qualifier))
|
||||||
|
}
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describePackageResult) JSON(fset *token.FileSet) []byte {
|
||||||
|
var members []*serial.DescribeMember
|
||||||
|
for _, mem := range r.members {
|
||||||
|
typ := mem.obj.Type()
|
||||||
|
var val string
|
||||||
|
switch mem := mem.obj.(type) {
|
||||||
|
case *types.Const:
|
||||||
|
val = constValString(mem.Val())
|
||||||
|
case *types.TypeName:
|
||||||
|
typ = typ.Underlying()
|
||||||
|
}
|
||||||
|
members = append(members, &serial.DescribeMember{
|
||||||
|
Name: mem.obj.Name(),
|
||||||
|
Type: typ.String(),
|
||||||
|
Value: val,
|
||||||
|
Pos: fset.Position(mem.obj.Pos()).String(),
|
||||||
|
Kind: tokenOf(mem.obj),
|
||||||
|
Methods: methodsToSerial(r.pkg, mem.methods, fset),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return toJSON(&serial.Describe{
|
||||||
|
Desc: r.description,
|
||||||
|
Pos: fset.Position(r.node.Pos()).String(),
|
||||||
|
Detail: "package",
|
||||||
|
Package: &serial.DescribePackage{
|
||||||
|
Path: r.pkg.Path(),
|
||||||
|
Members: members,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func tokenOf(o types.Object) string {
|
||||||
|
switch o.(type) {
|
||||||
|
case *types.Func:
|
||||||
|
return "func"
|
||||||
|
case *types.Var:
|
||||||
|
return "var"
|
||||||
|
case *types.TypeName:
|
||||||
|
return "type"
|
||||||
|
case *types.Const:
|
||||||
|
return "const"
|
||||||
|
case *types.PkgName:
|
||||||
|
return "package"
|
||||||
|
case *types.Builtin:
|
||||||
|
return "builtin" // e.g. when describing package "unsafe"
|
||||||
|
case *types.Nil:
|
||||||
|
return "nil"
|
||||||
|
case *types.Label:
|
||||||
|
return "label"
|
||||||
|
}
|
||||||
|
panic(o)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- STATEMENT ------------------------------------------------------------
|
||||||
|
|
||||||
|
func describeStmt(qpos *queryPos, path []ast.Node) (*describeStmtResult, error) {
|
||||||
|
var description string
|
||||||
|
switch n := path[0].(type) {
|
||||||
|
case *ast.Ident:
|
||||||
|
if qpos.info.Defs[n] != nil {
|
||||||
|
description = "labelled statement"
|
||||||
|
} else {
|
||||||
|
description = "reference to labelled statement"
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
// Nothing much to say about statements.
|
||||||
|
description = astutil.NodeDescription(n)
|
||||||
|
}
|
||||||
|
return &describeStmtResult{qpos.fset, path[0], description}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type describeStmtResult struct {
|
||||||
|
fset *token.FileSet
|
||||||
|
node ast.Node
|
||||||
|
description string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeStmtResult) PrintPlain(printf printfFunc) {
|
||||||
|
printf(r.node, "%s", r.description)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *describeStmtResult) JSON(fset *token.FileSet) []byte {
|
||||||
|
return toJSON(&serial.Describe{
|
||||||
|
Desc: r.description,
|
||||||
|
Pos: fset.Position(r.node.Pos()).String(),
|
||||||
|
Detail: "unknown",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ------------------- Utilities -------------------
|
||||||
|
|
||||||
|
// pathToString returns a string containing the concrete types of the
|
||||||
|
// nodes in path.
|
||||||
|
func pathToString(path []ast.Node) string {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
fmt.Fprint(&buf, "[")
|
||||||
|
for i, n := range path {
|
||||||
|
if i > 0 {
|
||||||
|
fmt.Fprint(&buf, " ")
|
||||||
|
}
|
||||||
|
fmt.Fprint(&buf, strings.TrimPrefix(fmt.Sprintf("%T", n), "*ast."))
|
||||||
|
}
|
||||||
|
fmt.Fprint(&buf, "]")
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func accessibleMethods(t types.Type, from *types.Package) []*types.Selection {
|
||||||
|
var methods []*types.Selection
|
||||||
|
for _, meth := range typeutil.IntuitiveMethodSet(t, nil) {
|
||||||
|
if isAccessibleFrom(meth.Obj(), from) {
|
||||||
|
methods = append(methods, meth)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return methods
|
||||||
|
}
|
||||||
|
|
||||||
|
// accessibleFields returns the set of accessible
|
||||||
|
// field selections on a value of type recv.
|
||||||
|
func accessibleFields(recv types.Type, from *types.Package) []describeField {
|
||||||
|
wantField := func(f *types.Var) bool {
|
||||||
|
if !isAccessibleFrom(f, from) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
// Check that the field is not shadowed.
|
||||||
|
obj, _, _ := types.LookupFieldOrMethod(recv, true, f.Pkg(), f.Name())
|
||||||
|
return obj == f
|
||||||
|
}
|
||||||
|
|
||||||
|
var fields []describeField
|
||||||
|
var visit func(t types.Type, stack []*types.Named)
|
||||||
|
visit = func(t types.Type, stack []*types.Named) {
|
||||||
|
tStruct, ok := deref(t).Underlying().(*types.Struct)
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
fieldloop:
|
||||||
|
for i := 0; i < tStruct.NumFields(); i++ {
|
||||||
|
f := tStruct.Field(i)
|
||||||
|
|
||||||
|
// Handle recursion through anonymous fields.
|
||||||
|
if f.Anonymous() {
|
||||||
|
tf := f.Type()
|
||||||
|
if ptr, ok := tf.(*types.Pointer); ok {
|
||||||
|
tf = ptr.Elem()
|
||||||
|
}
|
||||||
|
if named, ok := tf.(*types.Named); ok { // (be defensive)
|
||||||
|
// If we've already visited this named type
|
||||||
|
// on this path, break the cycle.
|
||||||
|
for _, x := range stack {
|
||||||
|
if x == named {
|
||||||
|
continue fieldloop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
visit(f.Type(), append(stack, named))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save accessible fields.
|
||||||
|
if wantField(f) {
|
||||||
|
fields = append(fields, describeField{
|
||||||
|
implicits: append([]*types.Named(nil), stack...),
|
||||||
|
field: f,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
visit(recv, nil)
|
||||||
|
|
||||||
|
return fields
|
||||||
|
}
|
||||||
|
|
||||||
|
func isAccessibleFrom(obj types.Object, pkg *types.Package) bool {
|
||||||
|
return ast.IsExported(obj.Name()) || obj.Pkg() == pkg
|
||||||
|
}
|
||||||
|
|
||||||
|
func methodsToSerial(this *types.Package, methods []*types.Selection, fset *token.FileSet) []serial.DescribeMethod {
|
||||||
|
qualifier := types.RelativeTo(this)
|
||||||
|
var jmethods []serial.DescribeMethod
|
||||||
|
for _, meth := range methods {
|
||||||
|
var ser serial.DescribeMethod
|
||||||
|
if meth != nil { // may contain nils when called by implements (on a method)
|
||||||
|
ser = serial.DescribeMethod{
|
||||||
|
Name: types.SelectionString(meth, qualifier),
|
||||||
|
Pos: fset.Position(meth.Obj().Pos()).String(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
jmethods = append(jmethods, ser)
|
||||||
|
}
|
||||||
|
return jmethods
|
||||||
|
}
|
@ -2,6 +2,8 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !go1.8
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
524
cmd/guru/referrers18.go
Normal file
524
cmd/guru/referrers18.go
Normal file
@ -0,0 +1,524 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/build"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"golang.org/x/tools/cmd/guru/serial"
|
||||||
|
"golang.org/x/tools/go/buildutil"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/refactor/importgraph"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Referrers reports all identifiers that resolve to the same object
|
||||||
|
// as the queried identifier, within any package in the workspace.
|
||||||
|
func referrers(q *Query) error {
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
lconf := loader.Config{Fset: fset, Build: q.Build}
|
||||||
|
allowErrors(&lconf)
|
||||||
|
|
||||||
|
if _, err := importQueryPackage(q.Pos, &lconf); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load/parse/type-check the query package.
|
||||||
|
lprog, err := lconf.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
qpos, err := parseQueryPos(lprog, q.Pos, false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
id, _ := qpos.path[0].(*ast.Ident)
|
||||||
|
if id == nil {
|
||||||
|
return fmt.Errorf("no identifier here")
|
||||||
|
}
|
||||||
|
|
||||||
|
obj := qpos.info.ObjectOf(id)
|
||||||
|
if obj == nil {
|
||||||
|
// Happens for y in "switch y := x.(type)",
|
||||||
|
// the package declaration,
|
||||||
|
// and unresolved identifiers.
|
||||||
|
if _, ok := qpos.path[1].(*ast.File); ok { // package decl?
|
||||||
|
return packageReferrers(q, qpos.info.Pkg.Path())
|
||||||
|
}
|
||||||
|
return fmt.Errorf("no object for identifier: %T", qpos.path[1])
|
||||||
|
}
|
||||||
|
|
||||||
|
// Imported package name?
|
||||||
|
if pkgname, ok := obj.(*types.PkgName); ok {
|
||||||
|
return packageReferrers(q, pkgname.Imported().Path())
|
||||||
|
}
|
||||||
|
|
||||||
|
if obj.Pkg() == nil {
|
||||||
|
return fmt.Errorf("references to predeclared %q are everywhere!", obj.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
// For a globally accessible object defined in package P, we
|
||||||
|
// must load packages that depend on P. Specifically, for a
|
||||||
|
// package-level object, we need load only direct importers
|
||||||
|
// of P, but for a field or interface method, we must load
|
||||||
|
// any package that transitively imports P.
|
||||||
|
if global, pkglevel := classify(obj); global {
|
||||||
|
// We'll use the the object's position to identify it in the larger program.
|
||||||
|
objposn := fset.Position(obj.Pos())
|
||||||
|
defpkg := obj.Pkg().Path() // defining package
|
||||||
|
return globalReferrers(q, qpos.info.Pkg.Path(), defpkg, objposn, pkglevel)
|
||||||
|
}
|
||||||
|
|
||||||
|
q.Output(fset, &referrersInitialResult{
|
||||||
|
qinfo: qpos.info,
|
||||||
|
obj: obj,
|
||||||
|
})
|
||||||
|
|
||||||
|
outputUses(q, fset, usesOf(obj, qpos.info), obj.Pkg())
|
||||||
|
|
||||||
|
return nil // success
|
||||||
|
}
|
||||||
|
|
||||||
|
// classify classifies objects by how far
|
||||||
|
// we have to look to find references to them.
|
||||||
|
func classify(obj types.Object) (global, pkglevel bool) {
|
||||||
|
if obj.Exported() {
|
||||||
|
if obj.Parent() == nil {
|
||||||
|
// selectable object (field or method)
|
||||||
|
return true, false
|
||||||
|
}
|
||||||
|
if obj.Parent() == obj.Pkg().Scope() {
|
||||||
|
// lexical object (package-level var/const/func/type)
|
||||||
|
return true, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// object with unexported named or defined in local scope
|
||||||
|
return false, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// packageReferrers reports all references to the specified package
|
||||||
|
// throughout the workspace.
|
||||||
|
func packageReferrers(q *Query, path string) error {
|
||||||
|
// Scan the workspace and build the import graph.
|
||||||
|
// Ignore broken packages.
|
||||||
|
_, rev, _ := importgraph.Build(q.Build)
|
||||||
|
|
||||||
|
// Find the set of packages that directly import the query package.
|
||||||
|
// Only those packages need typechecking of function bodies.
|
||||||
|
users := rev[path]
|
||||||
|
|
||||||
|
// Load the larger program.
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
lconf := loader.Config{
|
||||||
|
Fset: fset,
|
||||||
|
Build: q.Build,
|
||||||
|
TypeCheckFuncBodies: func(p string) bool {
|
||||||
|
return users[strings.TrimSuffix(p, "_test")]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
allowErrors(&lconf)
|
||||||
|
|
||||||
|
// The importgraph doesn't treat external test packages
|
||||||
|
// as separate nodes, so we must use ImportWithTests.
|
||||||
|
for path := range users {
|
||||||
|
lconf.ImportWithTests(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subtle! AfterTypeCheck needs no mutex for qpkg because the
|
||||||
|
// topological import order gives us the necessary happens-before edges.
|
||||||
|
// TODO(adonovan): what about import cycles?
|
||||||
|
var qpkg *types.Package
|
||||||
|
|
||||||
|
// For efficiency, we scan each package for references
|
||||||
|
// just after it has been type-checked. The loader calls
|
||||||
|
// AfterTypeCheck (concurrently), providing us with a stream of
|
||||||
|
// packages.
|
||||||
|
lconf.AfterTypeCheck = func(info *loader.PackageInfo, files []*ast.File) {
|
||||||
|
// AfterTypeCheck may be called twice for the same package due to augmentation.
|
||||||
|
|
||||||
|
if info.Pkg.Path() == path && qpkg == nil {
|
||||||
|
// Found the package of interest.
|
||||||
|
qpkg = info.Pkg
|
||||||
|
fakepkgname := types.NewPkgName(token.NoPos, qpkg, qpkg.Name(), qpkg)
|
||||||
|
q.Output(fset, &referrersInitialResult{
|
||||||
|
qinfo: info,
|
||||||
|
obj: fakepkgname, // bogus
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only inspect packages that directly import the
|
||||||
|
// declaring package (and thus were type-checked).
|
||||||
|
if lconf.TypeCheckFuncBodies(info.Pkg.Path()) {
|
||||||
|
// Find PkgNames that refer to qpkg.
|
||||||
|
// TODO(adonovan): perhaps more useful would be to show imports
|
||||||
|
// of the package instead of qualified identifiers.
|
||||||
|
var refs []*ast.Ident
|
||||||
|
for id, obj := range info.Uses {
|
||||||
|
if obj, ok := obj.(*types.PkgName); ok && obj.Imported() == qpkg {
|
||||||
|
refs = append(refs, id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
outputUses(q, fset, refs, info.Pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
clearInfoFields(info) // save memory
|
||||||
|
}
|
||||||
|
|
||||||
|
lconf.Load() // ignore error
|
||||||
|
|
||||||
|
if qpkg == nil {
|
||||||
|
log.Fatalf("query package %q not found during reloading", path)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func usesOf(queryObj types.Object, info *loader.PackageInfo) []*ast.Ident {
|
||||||
|
var refs []*ast.Ident
|
||||||
|
for id, obj := range info.Uses {
|
||||||
|
if sameObj(queryObj, obj) {
|
||||||
|
refs = append(refs, id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return refs
|
||||||
|
}
|
||||||
|
|
||||||
|
// outputUses outputs a result describing refs, which appear in the package denoted by info.
|
||||||
|
func outputUses(q *Query, fset *token.FileSet, refs []*ast.Ident, pkg *types.Package) {
|
||||||
|
if len(refs) > 0 {
|
||||||
|
sort.Sort(byNamePos{fset, refs})
|
||||||
|
q.Output(fset, &referrersPackageResult{
|
||||||
|
pkg: pkg,
|
||||||
|
build: q.Build,
|
||||||
|
fset: fset,
|
||||||
|
refs: refs,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// globalReferrers reports references throughout the entire workspace to the
|
||||||
|
// object at the specified source position. Its defining package is defpkg,
|
||||||
|
// and the query package is qpkg. isPkgLevel indicates whether the object
|
||||||
|
// is defined at package-level.
|
||||||
|
func globalReferrers(q *Query, qpkg, defpkg string, objposn token.Position, isPkgLevel bool) error {
|
||||||
|
// Scan the workspace and build the import graph.
|
||||||
|
// Ignore broken packages.
|
||||||
|
_, rev, _ := importgraph.Build(q.Build)
|
||||||
|
|
||||||
|
// Find the set of packages that depend on defpkg.
|
||||||
|
// Only function bodies in those packages need type-checking.
|
||||||
|
var users map[string]bool
|
||||||
|
if isPkgLevel {
|
||||||
|
users = rev[defpkg] // direct importers
|
||||||
|
if users == nil {
|
||||||
|
users = make(map[string]bool)
|
||||||
|
}
|
||||||
|
users[defpkg] = true // plus the defining package itself
|
||||||
|
} else {
|
||||||
|
users = rev.Search(defpkg) // transitive importers
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare to load the larger program.
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
lconf := loader.Config{
|
||||||
|
Fset: fset,
|
||||||
|
Build: q.Build,
|
||||||
|
TypeCheckFuncBodies: func(p string) bool {
|
||||||
|
return users[strings.TrimSuffix(p, "_test")]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
allowErrors(&lconf)
|
||||||
|
|
||||||
|
// The importgraph doesn't treat external test packages
|
||||||
|
// as separate nodes, so we must use ImportWithTests.
|
||||||
|
for path := range users {
|
||||||
|
lconf.ImportWithTests(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The remainder of this function is somewhat tricky because it
|
||||||
|
// operates on the concurrent stream of packages observed by the
|
||||||
|
// loader's AfterTypeCheck hook. Most of guru's helper
|
||||||
|
// functions assume the entire program has already been loaded,
|
||||||
|
// so we can't use them here.
|
||||||
|
// TODO(adonovan): smooth things out once the other changes have landed.
|
||||||
|
|
||||||
|
// Results are reported concurrently from within the
|
||||||
|
// AfterTypeCheck hook. The program may provide a useful stream
|
||||||
|
// of information even if the user doesn't let the program run
|
||||||
|
// to completion.
|
||||||
|
|
||||||
|
var (
|
||||||
|
mu sync.Mutex
|
||||||
|
qobj types.Object
|
||||||
|
qinfo *loader.PackageInfo // info for qpkg
|
||||||
|
)
|
||||||
|
|
||||||
|
// For efficiency, we scan each package for references
|
||||||
|
// just after it has been type-checked. The loader calls
|
||||||
|
// AfterTypeCheck (concurrently), providing us with a stream of
|
||||||
|
// packages.
|
||||||
|
lconf.AfterTypeCheck = func(info *loader.PackageInfo, files []*ast.File) {
|
||||||
|
// AfterTypeCheck may be called twice for the same package due to augmentation.
|
||||||
|
|
||||||
|
// Only inspect packages that depend on the declaring package
|
||||||
|
// (and thus were type-checked).
|
||||||
|
if lconf.TypeCheckFuncBodies(info.Pkg.Path()) {
|
||||||
|
// Record the query object and its package when we see it.
|
||||||
|
mu.Lock()
|
||||||
|
if qobj == nil && info.Pkg.Path() == defpkg {
|
||||||
|
// Find the object by its position (slightly ugly).
|
||||||
|
qobj = findObject(fset, &info.Info, objposn)
|
||||||
|
if qobj == nil {
|
||||||
|
// It really ought to be there;
|
||||||
|
// we found it once already.
|
||||||
|
log.Fatalf("object at %s not found in package %s",
|
||||||
|
objposn, defpkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Object found.
|
||||||
|
qinfo = info
|
||||||
|
q.Output(fset, &referrersInitialResult{
|
||||||
|
qinfo: qinfo,
|
||||||
|
obj: qobj,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
obj := qobj
|
||||||
|
mu.Unlock()
|
||||||
|
|
||||||
|
// Look for references to the query object.
|
||||||
|
if obj != nil {
|
||||||
|
outputUses(q, fset, usesOf(obj, info), info.Pkg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
clearInfoFields(info) // save memory
|
||||||
|
}
|
||||||
|
|
||||||
|
lconf.Load() // ignore error
|
||||||
|
|
||||||
|
if qobj == nil {
|
||||||
|
log.Fatal("query object not found during reloading")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil // success
|
||||||
|
}
|
||||||
|
|
||||||
|
// findObject returns the object defined at the specified position.
|
||||||
|
func findObject(fset *token.FileSet, info *types.Info, objposn token.Position) types.Object {
|
||||||
|
good := func(obj types.Object) bool {
|
||||||
|
if obj == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
posn := fset.Position(obj.Pos())
|
||||||
|
return posn.Filename == objposn.Filename && posn.Offset == objposn.Offset
|
||||||
|
}
|
||||||
|
for _, obj := range info.Defs {
|
||||||
|
if good(obj) {
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, obj := range info.Implicits {
|
||||||
|
if good(obj) {
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// same reports whether x and y are identical, or both are PkgNames
|
||||||
|
// that import the same Package.
|
||||||
|
//
|
||||||
|
func sameObj(x, y types.Object) bool {
|
||||||
|
if x == y {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if x, ok := x.(*types.PkgName); ok {
|
||||||
|
if y, ok := y.(*types.PkgName); ok {
|
||||||
|
return x.Imported() == y.Imported()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func clearInfoFields(info *loader.PackageInfo) {
|
||||||
|
// TODO(adonovan): opt: save memory by eliminating unneeded scopes/objects.
|
||||||
|
// (Requires go/types change for Go 1.7.)
|
||||||
|
// info.Pkg.Scope().ClearChildren()
|
||||||
|
|
||||||
|
// Discard the file ASTs and their accumulated type
|
||||||
|
// information to save memory.
|
||||||
|
info.Files = nil
|
||||||
|
info.Defs = make(map[*ast.Ident]types.Object)
|
||||||
|
info.Uses = make(map[*ast.Ident]types.Object)
|
||||||
|
info.Implicits = make(map[ast.Node]types.Object)
|
||||||
|
|
||||||
|
// Also, disable future collection of wholly unneeded
|
||||||
|
// type information for the package in case there is
|
||||||
|
// more type-checking to do (augmentation).
|
||||||
|
info.Types = nil
|
||||||
|
info.Scopes = nil
|
||||||
|
info.Selections = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// -------- utils --------
|
||||||
|
|
||||||
|
// An deterministic ordering for token.Pos that doesn't
|
||||||
|
// depend on the order in which packages were loaded.
|
||||||
|
func lessPos(fset *token.FileSet, x, y token.Pos) bool {
|
||||||
|
fx := fset.File(x)
|
||||||
|
fy := fset.File(y)
|
||||||
|
if fx != fy {
|
||||||
|
return fx.Name() < fy.Name()
|
||||||
|
}
|
||||||
|
return x < y
|
||||||
|
}
|
||||||
|
|
||||||
|
type byNamePos struct {
|
||||||
|
fset *token.FileSet
|
||||||
|
ids []*ast.Ident
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p byNamePos) Len() int { return len(p.ids) }
|
||||||
|
func (p byNamePos) Swap(i, j int) { p.ids[i], p.ids[j] = p.ids[j], p.ids[i] }
|
||||||
|
func (p byNamePos) Less(i, j int) bool {
|
||||||
|
return lessPos(p.fset, p.ids[i].NamePos, p.ids[j].NamePos)
|
||||||
|
}
|
||||||
|
|
||||||
|
// referrersInitialResult is the initial result of a "referrers" query.
|
||||||
|
type referrersInitialResult struct {
|
||||||
|
qinfo *loader.PackageInfo
|
||||||
|
obj types.Object // object it denotes
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *referrersInitialResult) PrintPlain(printf printfFunc) {
|
||||||
|
printf(r.obj, "references to %s",
|
||||||
|
types.ObjectString(r.obj, types.RelativeTo(r.qinfo.Pkg)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *referrersInitialResult) JSON(fset *token.FileSet) []byte {
|
||||||
|
var objpos string
|
||||||
|
if pos := r.obj.Pos(); pos.IsValid() {
|
||||||
|
objpos = fset.Position(pos).String()
|
||||||
|
}
|
||||||
|
return toJSON(&serial.ReferrersInitial{
|
||||||
|
Desc: r.obj.String(),
|
||||||
|
ObjPos: objpos,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// referrersPackageResult is the streaming result for one package of a "referrers" query.
|
||||||
|
type referrersPackageResult struct {
|
||||||
|
pkg *types.Package
|
||||||
|
build *build.Context
|
||||||
|
fset *token.FileSet
|
||||||
|
refs []*ast.Ident // set of all other references to it
|
||||||
|
}
|
||||||
|
|
||||||
|
// forEachRef calls f(id, text) for id in r.refs, in order.
|
||||||
|
// Text is the text of the line on which id appears.
|
||||||
|
func (r *referrersPackageResult) foreachRef(f func(id *ast.Ident, text string)) {
|
||||||
|
// Show referring lines, like grep.
|
||||||
|
type fileinfo struct {
|
||||||
|
refs []*ast.Ident
|
||||||
|
linenums []int // line number of refs[i]
|
||||||
|
data chan interface{} // file contents or error
|
||||||
|
}
|
||||||
|
var fileinfos []*fileinfo
|
||||||
|
fileinfosByName := make(map[string]*fileinfo)
|
||||||
|
|
||||||
|
// First pass: start the file reads concurrently.
|
||||||
|
sema := make(chan struct{}, 20) // counting semaphore to limit I/O concurrency
|
||||||
|
for _, ref := range r.refs {
|
||||||
|
posn := r.fset.Position(ref.Pos())
|
||||||
|
fi := fileinfosByName[posn.Filename]
|
||||||
|
if fi == nil {
|
||||||
|
fi = &fileinfo{data: make(chan interface{})}
|
||||||
|
fileinfosByName[posn.Filename] = fi
|
||||||
|
fileinfos = append(fileinfos, fi)
|
||||||
|
|
||||||
|
// First request for this file:
|
||||||
|
// start asynchronous read.
|
||||||
|
go func() {
|
||||||
|
sema <- struct{}{} // acquire token
|
||||||
|
content, err := readFile(r.build, posn.Filename)
|
||||||
|
<-sema // release token
|
||||||
|
if err != nil {
|
||||||
|
fi.data <- err
|
||||||
|
} else {
|
||||||
|
fi.data <- content
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
fi.refs = append(fi.refs, ref)
|
||||||
|
fi.linenums = append(fi.linenums, posn.Line)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second pass: print refs in original order.
|
||||||
|
// One line may have several refs at different columns.
|
||||||
|
for _, fi := range fileinfos {
|
||||||
|
v := <-fi.data // wait for I/O completion
|
||||||
|
|
||||||
|
// Print one item for all refs in a file that could not
|
||||||
|
// be loaded (perhaps due to //line directives).
|
||||||
|
if err, ok := v.(error); ok {
|
||||||
|
var suffix string
|
||||||
|
if more := len(fi.refs) - 1; more > 0 {
|
||||||
|
suffix = fmt.Sprintf(" (+ %d more refs in this file)", more)
|
||||||
|
}
|
||||||
|
f(fi.refs[0], err.Error()+suffix)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
lines := bytes.Split(v.([]byte), []byte("\n"))
|
||||||
|
for i, ref := range fi.refs {
|
||||||
|
f(ref, string(lines[fi.linenums[i]-1]))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// readFile is like ioutil.ReadFile, but
|
||||||
|
// it goes through the virtualized build.Context.
|
||||||
|
func readFile(ctxt *build.Context, filename string) ([]byte, error) {
|
||||||
|
rc, err := buildutil.OpenFile(ctxt, filename)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rc.Close()
|
||||||
|
var buf bytes.Buffer
|
||||||
|
if _, err := io.Copy(&buf, rc); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return buf.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *referrersPackageResult) PrintPlain(printf printfFunc) {
|
||||||
|
r.foreachRef(func(id *ast.Ident, text string) {
|
||||||
|
printf(id, "%s", text)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *referrersPackageResult) JSON(fset *token.FileSet) []byte {
|
||||||
|
refs := serial.ReferrersPackage{Package: r.pkg.Path()}
|
||||||
|
r.foreachRef(func(id *ast.Ident, text string) {
|
||||||
|
refs.Refs = append(refs.Refs, serial.Ref{
|
||||||
|
Pos: fset.Position(id.NamePos).String(),
|
||||||
|
Text: text,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
return toJSON(refs)
|
||||||
|
}
|
@ -2,7 +2,7 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// +build go1.5
|
// +build !go1.8
|
||||||
|
|
||||||
// Stringer is a tool to automate the creation of methods that satisfy the fmt.Stringer
|
// Stringer is a tool to automate the creation of methods that satisfy the fmt.Stringer
|
||||||
// interface. Given the name of a (signed or unsigned) integer type T that has constants
|
// interface. Given the name of a (signed or unsigned) integer type T that has constants
|
||||||
|
638
cmd/stringer/stringer18.go
Normal file
638
cmd/stringer/stringer18.go
Normal file
@ -0,0 +1,638 @@
|
|||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
// Stringer is a tool to automate the creation of methods that satisfy the fmt.Stringer
|
||||||
|
// interface. Given the name of a (signed or unsigned) integer type T that has constants
|
||||||
|
// defined, stringer will create a new self-contained Go source file implementing
|
||||||
|
// func (t T) String() string
|
||||||
|
// The file is created in the same package and directory as the package that defines T.
|
||||||
|
// It has helpful defaults designed for use with go generate.
|
||||||
|
//
|
||||||
|
// Stringer works best with constants that are consecutive values such as created using iota,
|
||||||
|
// but creates good code regardless. In the future it might also provide custom support for
|
||||||
|
// constant sets that are bit patterns.
|
||||||
|
//
|
||||||
|
// For example, given this snippet,
|
||||||
|
//
|
||||||
|
// package painkiller
|
||||||
|
//
|
||||||
|
// type Pill int
|
||||||
|
//
|
||||||
|
// const (
|
||||||
|
// Placebo Pill = iota
|
||||||
|
// Aspirin
|
||||||
|
// Ibuprofen
|
||||||
|
// Paracetamol
|
||||||
|
// Acetaminophen = Paracetamol
|
||||||
|
// )
|
||||||
|
//
|
||||||
|
// running this command
|
||||||
|
//
|
||||||
|
// stringer -type=Pill
|
||||||
|
//
|
||||||
|
// in the same directory will create the file pill_string.go, in package painkiller,
|
||||||
|
// containing a definition of
|
||||||
|
//
|
||||||
|
// func (Pill) String() string
|
||||||
|
//
|
||||||
|
// That method will translate the value of a Pill constant to the string representation
|
||||||
|
// of the respective constant name, so that the call fmt.Print(painkiller.Aspirin) will
|
||||||
|
// print the string "Aspirin".
|
||||||
|
//
|
||||||
|
// Typically this process would be run using go generate, like this:
|
||||||
|
//
|
||||||
|
// //go:generate stringer -type=Pill
|
||||||
|
//
|
||||||
|
// If multiple constants have the same value, the lexically first matching name will
|
||||||
|
// be used (in the example, Acetaminophen will print as "Paracetamol").
|
||||||
|
//
|
||||||
|
// With no arguments, it processes the package in the current directory.
|
||||||
|
// Otherwise, the arguments must name a single directory holding a Go package
|
||||||
|
// or a set of Go source files that represent a single Go package.
|
||||||
|
//
|
||||||
|
// The -type flag accepts a comma-separated list of types so a single run can
|
||||||
|
// generate methods for multiple types. The default output file is t_string.go,
|
||||||
|
// where t is the lower-cased name of the first type listed. It can be overridden
|
||||||
|
// with the -output flag.
|
||||||
|
//
|
||||||
|
package main // import "golang.org/x/tools/cmd/stringer"
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/build"
|
||||||
|
exact "go/constant"
|
||||||
|
"go/format"
|
||||||
|
"go/importer"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
typeNames = flag.String("type", "", "comma-separated list of type names; must be set")
|
||||||
|
output = flag.String("output", "", "output file name; default srcdir/<type>_string.go")
|
||||||
|
)
|
||||||
|
|
||||||
|
// Usage is a replacement usage function for the flags package.
|
||||||
|
func Usage() {
|
||||||
|
fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0])
|
||||||
|
fmt.Fprintf(os.Stderr, "\tstringer [flags] -type T [directory]\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "\tstringer [flags] -type T files... # Must be a single package\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "For more information, see:\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "\thttp://godoc.org/golang.org/x/tools/cmd/stringer\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Flags:\n")
|
||||||
|
flag.PrintDefaults()
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
log.SetFlags(0)
|
||||||
|
log.SetPrefix("stringer: ")
|
||||||
|
flag.Usage = Usage
|
||||||
|
flag.Parse()
|
||||||
|
if len(*typeNames) == 0 {
|
||||||
|
flag.Usage()
|
||||||
|
os.Exit(2)
|
||||||
|
}
|
||||||
|
types := strings.Split(*typeNames, ",")
|
||||||
|
|
||||||
|
// We accept either one directory or a list of files. Which do we have?
|
||||||
|
args := flag.Args()
|
||||||
|
if len(args) == 0 {
|
||||||
|
// Default: process whole package in current directory.
|
||||||
|
args = []string{"."}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the package once.
|
||||||
|
var (
|
||||||
|
dir string
|
||||||
|
g Generator
|
||||||
|
)
|
||||||
|
if len(args) == 1 && isDirectory(args[0]) {
|
||||||
|
dir = args[0]
|
||||||
|
g.parsePackageDir(args[0])
|
||||||
|
} else {
|
||||||
|
dir = filepath.Dir(args[0])
|
||||||
|
g.parsePackageFiles(args)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print the header and package clause.
|
||||||
|
g.Printf("// Code generated by \"stringer %s\"; DO NOT EDIT\n", strings.Join(os.Args[1:], " "))
|
||||||
|
g.Printf("\n")
|
||||||
|
g.Printf("package %s", g.pkg.name)
|
||||||
|
g.Printf("\n")
|
||||||
|
g.Printf("import \"fmt\"\n") // Used by all methods.
|
||||||
|
|
||||||
|
// Run generate for each type.
|
||||||
|
for _, typeName := range types {
|
||||||
|
g.generate(typeName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format the output.
|
||||||
|
src := g.format()
|
||||||
|
|
||||||
|
// Write to file.
|
||||||
|
outputName := *output
|
||||||
|
if outputName == "" {
|
||||||
|
baseName := fmt.Sprintf("%s_string.go", types[0])
|
||||||
|
outputName = filepath.Join(dir, strings.ToLower(baseName))
|
||||||
|
}
|
||||||
|
err := ioutil.WriteFile(outputName, src, 0644)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("writing output: %s", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// isDirectory reports whether the named file is a directory.
|
||||||
|
func isDirectory(name string) bool {
|
||||||
|
info, err := os.Stat(name)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
return info.IsDir()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generator holds the state of the analysis. Primarily used to buffer
|
||||||
|
// the output for format.Source.
|
||||||
|
type Generator struct {
|
||||||
|
buf bytes.Buffer // Accumulated output.
|
||||||
|
pkg *Package // Package we are scanning.
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *Generator) Printf(format string, args ...interface{}) {
|
||||||
|
fmt.Fprintf(&g.buf, format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// File holds a single parsed file and associated data.
|
||||||
|
type File struct {
|
||||||
|
pkg *Package // Package to which this file belongs.
|
||||||
|
file *ast.File // Parsed AST.
|
||||||
|
// These fields are reset for each type being generated.
|
||||||
|
typeName string // Name of the constant type.
|
||||||
|
values []Value // Accumulator for constant values of that type.
|
||||||
|
}
|
||||||
|
|
||||||
|
type Package struct {
|
||||||
|
dir string
|
||||||
|
name string
|
||||||
|
defs map[*ast.Ident]types.Object
|
||||||
|
files []*File
|
||||||
|
typesPkg *types.Package
|
||||||
|
}
|
||||||
|
|
||||||
|
// parsePackageDir parses the package residing in the directory.
|
||||||
|
func (g *Generator) parsePackageDir(directory string) {
|
||||||
|
pkg, err := build.Default.ImportDir(directory, 0)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("cannot process directory %s: %s", directory, err)
|
||||||
|
}
|
||||||
|
var names []string
|
||||||
|
names = append(names, pkg.GoFiles...)
|
||||||
|
names = append(names, pkg.CgoFiles...)
|
||||||
|
// TODO: Need to think about constants in test files. Maybe write type_string_test.go
|
||||||
|
// in a separate pass? For later.
|
||||||
|
// names = append(names, pkg.TestGoFiles...) // These are also in the "foo" package.
|
||||||
|
names = append(names, pkg.SFiles...)
|
||||||
|
names = prefixDirectory(directory, names)
|
||||||
|
g.parsePackage(directory, names, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parsePackageFiles parses the package occupying the named files.
|
||||||
|
func (g *Generator) parsePackageFiles(names []string) {
|
||||||
|
g.parsePackage(".", names, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// prefixDirectory places the directory name on the beginning of each name in the list.
|
||||||
|
func prefixDirectory(directory string, names []string) []string {
|
||||||
|
if directory == "." {
|
||||||
|
return names
|
||||||
|
}
|
||||||
|
ret := make([]string, len(names))
|
||||||
|
for i, name := range names {
|
||||||
|
ret[i] = filepath.Join(directory, name)
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
// parsePackage analyzes the single package constructed from the named files.
|
||||||
|
// If text is non-nil, it is a string to be used instead of the content of the file,
|
||||||
|
// to be used for testing. parsePackage exits if there is an error.
|
||||||
|
func (g *Generator) parsePackage(directory string, names []string, text interface{}) {
|
||||||
|
var files []*File
|
||||||
|
var astFiles []*ast.File
|
||||||
|
g.pkg = new(Package)
|
||||||
|
fs := token.NewFileSet()
|
||||||
|
for _, name := range names {
|
||||||
|
if !strings.HasSuffix(name, ".go") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
parsedFile, err := parser.ParseFile(fs, name, text, 0)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("parsing package: %s: %s", name, err)
|
||||||
|
}
|
||||||
|
astFiles = append(astFiles, parsedFile)
|
||||||
|
files = append(files, &File{
|
||||||
|
file: parsedFile,
|
||||||
|
pkg: g.pkg,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if len(astFiles) == 0 {
|
||||||
|
log.Fatalf("%s: no buildable Go files", directory)
|
||||||
|
}
|
||||||
|
g.pkg.name = astFiles[0].Name.Name
|
||||||
|
g.pkg.files = files
|
||||||
|
g.pkg.dir = directory
|
||||||
|
// Type check the package.
|
||||||
|
g.pkg.check(fs, astFiles)
|
||||||
|
}
|
||||||
|
|
||||||
|
// check type-checks the package. The package must be OK to proceed.
|
||||||
|
func (pkg *Package) check(fs *token.FileSet, astFiles []*ast.File) {
|
||||||
|
pkg.defs = make(map[*ast.Ident]types.Object)
|
||||||
|
config := types.Config{Importer: importer.Default(), FakeImportC: true}
|
||||||
|
info := &types.Info{
|
||||||
|
Defs: pkg.defs,
|
||||||
|
}
|
||||||
|
typesPkg, err := config.Check(pkg.dir, fs, astFiles, info)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("checking package: %s", err)
|
||||||
|
}
|
||||||
|
pkg.typesPkg = typesPkg
|
||||||
|
}
|
||||||
|
|
||||||
|
// generate produces the String method for the named type.
|
||||||
|
func (g *Generator) generate(typeName string) {
|
||||||
|
values := make([]Value, 0, 100)
|
||||||
|
for _, file := range g.pkg.files {
|
||||||
|
// Set the state for this run of the walker.
|
||||||
|
file.typeName = typeName
|
||||||
|
file.values = nil
|
||||||
|
if file.file != nil {
|
||||||
|
ast.Inspect(file.file, file.genDecl)
|
||||||
|
values = append(values, file.values...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(values) == 0 {
|
||||||
|
log.Fatalf("no values defined for type %s", typeName)
|
||||||
|
}
|
||||||
|
runs := splitIntoRuns(values)
|
||||||
|
// The decision of which pattern to use depends on the number of
|
||||||
|
// runs in the numbers. If there's only one, it's easy. For more than
|
||||||
|
// one, there's a tradeoff between complexity and size of the data
|
||||||
|
// and code vs. the simplicity of a map. A map takes more space,
|
||||||
|
// but so does the code. The decision here (crossover at 10) is
|
||||||
|
// arbitrary, but considers that for large numbers of runs the cost
|
||||||
|
// of the linear scan in the switch might become important, and
|
||||||
|
// rather than use yet another algorithm such as binary search,
|
||||||
|
// we punt and use a map. In any case, the likelihood of a map
|
||||||
|
// being necessary for any realistic example other than bitmasks
|
||||||
|
// is very low. And bitmasks probably deserve their own analysis,
|
||||||
|
// to be done some other day.
|
||||||
|
switch {
|
||||||
|
case len(runs) == 1:
|
||||||
|
g.buildOneRun(runs, typeName)
|
||||||
|
case len(runs) <= 10:
|
||||||
|
g.buildMultipleRuns(runs, typeName)
|
||||||
|
default:
|
||||||
|
g.buildMap(runs, typeName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// splitIntoRuns breaks the values into runs of contiguous sequences.
|
||||||
|
// For example, given 1,2,3,5,6,7 it returns {1,2,3},{5,6,7}.
|
||||||
|
// The input slice is known to be non-empty.
|
||||||
|
func splitIntoRuns(values []Value) [][]Value {
|
||||||
|
// We use stable sort so the lexically first name is chosen for equal elements.
|
||||||
|
sort.Stable(byValue(values))
|
||||||
|
// Remove duplicates. Stable sort has put the one we want to print first,
|
||||||
|
// so use that one. The String method won't care about which named constant
|
||||||
|
// was the argument, so the first name for the given value is the only one to keep.
|
||||||
|
// We need to do this because identical values would cause the switch or map
|
||||||
|
// to fail to compile.
|
||||||
|
j := 1
|
||||||
|
for i := 1; i < len(values); i++ {
|
||||||
|
if values[i].value != values[i-1].value {
|
||||||
|
values[j] = values[i]
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
values = values[:j]
|
||||||
|
runs := make([][]Value, 0, 10)
|
||||||
|
for len(values) > 0 {
|
||||||
|
// One contiguous sequence per outer loop.
|
||||||
|
i := 1
|
||||||
|
for i < len(values) && values[i].value == values[i-1].value+1 {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
runs = append(runs, values[:i])
|
||||||
|
values = values[i:]
|
||||||
|
}
|
||||||
|
return runs
|
||||||
|
}
|
||||||
|
|
||||||
|
// format returns the gofmt-ed contents of the Generator's buffer.
|
||||||
|
func (g *Generator) format() []byte {
|
||||||
|
src, err := format.Source(g.buf.Bytes())
|
||||||
|
if err != nil {
|
||||||
|
// Should never happen, but can arise when developing this code.
|
||||||
|
// The user can compile the output to see the error.
|
||||||
|
log.Printf("warning: internal error: invalid Go generated: %s", err)
|
||||||
|
log.Printf("warning: compile the package to analyze the error")
|
||||||
|
return g.buf.Bytes()
|
||||||
|
}
|
||||||
|
return src
|
||||||
|
}
|
||||||
|
|
||||||
|
// Value represents a declared constant.
|
||||||
|
type Value struct {
|
||||||
|
name string // The name of the constant.
|
||||||
|
// The value is stored as a bit pattern alone. The boolean tells us
|
||||||
|
// whether to interpret it as an int64 or a uint64; the only place
|
||||||
|
// this matters is when sorting.
|
||||||
|
// Much of the time the str field is all we need; it is printed
|
||||||
|
// by Value.String.
|
||||||
|
value uint64 // Will be converted to int64 when needed.
|
||||||
|
signed bool // Whether the constant is a signed type.
|
||||||
|
str string // The string representation given by the "go/exact" package.
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *Value) String() string {
|
||||||
|
return v.str
|
||||||
|
}
|
||||||
|
|
||||||
|
// byValue lets us sort the constants into increasing order.
|
||||||
|
// We take care in the Less method to sort in signed or unsigned order,
|
||||||
|
// as appropriate.
|
||||||
|
type byValue []Value
|
||||||
|
|
||||||
|
func (b byValue) Len() int { return len(b) }
|
||||||
|
func (b byValue) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
||||||
|
func (b byValue) Less(i, j int) bool {
|
||||||
|
if b[i].signed {
|
||||||
|
return int64(b[i].value) < int64(b[j].value)
|
||||||
|
}
|
||||||
|
return b[i].value < b[j].value
|
||||||
|
}
|
||||||
|
|
||||||
|
// genDecl processes one declaration clause.
|
||||||
|
func (f *File) genDecl(node ast.Node) bool {
|
||||||
|
decl, ok := node.(*ast.GenDecl)
|
||||||
|
if !ok || decl.Tok != token.CONST {
|
||||||
|
// We only care about const declarations.
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
// The name of the type of the constants we are declaring.
|
||||||
|
// Can change if this is a multi-element declaration.
|
||||||
|
typ := ""
|
||||||
|
// Loop over the elements of the declaration. Each element is a ValueSpec:
|
||||||
|
// a list of names possibly followed by a type, possibly followed by values.
|
||||||
|
// If the type and value are both missing, we carry down the type (and value,
|
||||||
|
// but the "go/types" package takes care of that).
|
||||||
|
for _, spec := range decl.Specs {
|
||||||
|
vspec := spec.(*ast.ValueSpec) // Guaranteed to succeed as this is CONST.
|
||||||
|
if vspec.Type == nil && len(vspec.Values) > 0 {
|
||||||
|
// "X = 1". With no type but a value, the constant is untyped.
|
||||||
|
// Skip this vspec and reset the remembered type.
|
||||||
|
typ = ""
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if vspec.Type != nil {
|
||||||
|
// "X T". We have a type. Remember it.
|
||||||
|
ident, ok := vspec.Type.(*ast.Ident)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
typ = ident.Name
|
||||||
|
}
|
||||||
|
if typ != f.typeName {
|
||||||
|
// This is not the type we're looking for.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// We now have a list of names (from one line of source code) all being
|
||||||
|
// declared with the desired type.
|
||||||
|
// Grab their names and actual values and store them in f.values.
|
||||||
|
for _, name := range vspec.Names {
|
||||||
|
if name.Name == "_" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// This dance lets the type checker find the values for us. It's a
|
||||||
|
// bit tricky: look up the object declared by the name, find its
|
||||||
|
// types.Const, and extract its value.
|
||||||
|
obj, ok := f.pkg.defs[name]
|
||||||
|
if !ok {
|
||||||
|
log.Fatalf("no value for constant %s", name)
|
||||||
|
}
|
||||||
|
info := obj.Type().Underlying().(*types.Basic).Info()
|
||||||
|
if info&types.IsInteger == 0 {
|
||||||
|
log.Fatalf("can't handle non-integer constant type %s", typ)
|
||||||
|
}
|
||||||
|
value := obj.(*types.Const).Val() // Guaranteed to succeed as this is CONST.
|
||||||
|
if value.Kind() != exact.Int {
|
||||||
|
log.Fatalf("can't happen: constant is not an integer %s", name)
|
||||||
|
}
|
||||||
|
i64, isInt := exact.Int64Val(value)
|
||||||
|
u64, isUint := exact.Uint64Val(value)
|
||||||
|
if !isInt && !isUint {
|
||||||
|
log.Fatalf("internal error: value of %s is not an integer: %s", name, value.String())
|
||||||
|
}
|
||||||
|
if !isInt {
|
||||||
|
u64 = uint64(i64)
|
||||||
|
}
|
||||||
|
v := Value{
|
||||||
|
name: name.Name,
|
||||||
|
value: u64,
|
||||||
|
signed: info&types.IsUnsigned == 0,
|
||||||
|
str: value.String(),
|
||||||
|
}
|
||||||
|
f.values = append(f.values, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helpers
|
||||||
|
|
||||||
|
// usize returns the number of bits of the smallest unsigned integer
|
||||||
|
// type that will hold n. Used to create the smallest possible slice of
|
||||||
|
// integers to use as indexes into the concatenated strings.
|
||||||
|
func usize(n int) int {
|
||||||
|
switch {
|
||||||
|
case n < 1<<8:
|
||||||
|
return 8
|
||||||
|
case n < 1<<16:
|
||||||
|
return 16
|
||||||
|
default:
|
||||||
|
// 2^32 is enough constants for anyone.
|
||||||
|
return 32
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// declareIndexAndNameVars declares the index slices and concatenated names
|
||||||
|
// strings representing the runs of values.
|
||||||
|
func (g *Generator) declareIndexAndNameVars(runs [][]Value, typeName string) {
|
||||||
|
var indexes, names []string
|
||||||
|
for i, run := range runs {
|
||||||
|
index, name := g.createIndexAndNameDecl(run, typeName, fmt.Sprintf("_%d", i))
|
||||||
|
indexes = append(indexes, index)
|
||||||
|
names = append(names, name)
|
||||||
|
}
|
||||||
|
g.Printf("const (\n")
|
||||||
|
for _, name := range names {
|
||||||
|
g.Printf("\t%s\n", name)
|
||||||
|
}
|
||||||
|
g.Printf(")\n\n")
|
||||||
|
g.Printf("var (")
|
||||||
|
for _, index := range indexes {
|
||||||
|
g.Printf("\t%s\n", index)
|
||||||
|
}
|
||||||
|
g.Printf(")\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// declareIndexAndNameVar is the single-run version of declareIndexAndNameVars
|
||||||
|
func (g *Generator) declareIndexAndNameVar(run []Value, typeName string) {
|
||||||
|
index, name := g.createIndexAndNameDecl(run, typeName, "")
|
||||||
|
g.Printf("const %s\n", name)
|
||||||
|
g.Printf("var %s\n", index)
|
||||||
|
}
|
||||||
|
|
||||||
|
// createIndexAndNameDecl returns the pair of declarations for the run. The caller will add "const" and "var".
|
||||||
|
func (g *Generator) createIndexAndNameDecl(run []Value, typeName string, suffix string) (string, string) {
|
||||||
|
b := new(bytes.Buffer)
|
||||||
|
indexes := make([]int, len(run))
|
||||||
|
for i := range run {
|
||||||
|
b.WriteString(run[i].name)
|
||||||
|
indexes[i] = b.Len()
|
||||||
|
}
|
||||||
|
nameConst := fmt.Sprintf("_%s_name%s = %q", typeName, suffix, b.String())
|
||||||
|
nameLen := b.Len()
|
||||||
|
b.Reset()
|
||||||
|
fmt.Fprintf(b, "_%s_index%s = [...]uint%d{0, ", typeName, suffix, usize(nameLen))
|
||||||
|
for i, v := range indexes {
|
||||||
|
if i > 0 {
|
||||||
|
fmt.Fprintf(b, ", ")
|
||||||
|
}
|
||||||
|
fmt.Fprintf(b, "%d", v)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(b, "}")
|
||||||
|
return b.String(), nameConst
|
||||||
|
}
|
||||||
|
|
||||||
|
// declareNameVars declares the concatenated names string representing all the values in the runs.
|
||||||
|
func (g *Generator) declareNameVars(runs [][]Value, typeName string, suffix string) {
|
||||||
|
g.Printf("const _%s_name%s = \"", typeName, suffix)
|
||||||
|
for _, run := range runs {
|
||||||
|
for i := range run {
|
||||||
|
g.Printf("%s", run[i].name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
g.Printf("\"\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildOneRun generates the variables and String method for a single run of contiguous values.
|
||||||
|
func (g *Generator) buildOneRun(runs [][]Value, typeName string) {
|
||||||
|
values := runs[0]
|
||||||
|
g.Printf("\n")
|
||||||
|
g.declareIndexAndNameVar(values, typeName)
|
||||||
|
// The generated code is simple enough to write as a Printf format.
|
||||||
|
lessThanZero := ""
|
||||||
|
if values[0].signed {
|
||||||
|
lessThanZero = "i < 0 || "
|
||||||
|
}
|
||||||
|
if values[0].value == 0 { // Signed or unsigned, 0 is still 0.
|
||||||
|
g.Printf(stringOneRun, typeName, usize(len(values)), lessThanZero)
|
||||||
|
} else {
|
||||||
|
g.Printf(stringOneRunWithOffset, typeName, values[0].String(), usize(len(values)), lessThanZero)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Arguments to format are:
|
||||||
|
// [1]: type name
|
||||||
|
// [2]: size of index element (8 for uint8 etc.)
|
||||||
|
// [3]: less than zero check (for signed types)
|
||||||
|
const stringOneRun = `func (i %[1]s) String() string {
|
||||||
|
if %[3]si >= %[1]s(len(_%[1]s_index)-1) {
|
||||||
|
return fmt.Sprintf("%[1]s(%%d)", i)
|
||||||
|
}
|
||||||
|
return _%[1]s_name[_%[1]s_index[i]:_%[1]s_index[i+1]]
|
||||||
|
}
|
||||||
|
`
|
||||||
|
|
||||||
|
// Arguments to format are:
|
||||||
|
// [1]: type name
|
||||||
|
// [2]: lowest defined value for type, as a string
|
||||||
|
// [3]: size of index element (8 for uint8 etc.)
|
||||||
|
// [4]: less than zero check (for signed types)
|
||||||
|
/*
|
||||||
|
*/
|
||||||
|
const stringOneRunWithOffset = `func (i %[1]s) String() string {
|
||||||
|
i -= %[2]s
|
||||||
|
if %[4]si >= %[1]s(len(_%[1]s_index)-1) {
|
||||||
|
return fmt.Sprintf("%[1]s(%%d)", i + %[2]s)
|
||||||
|
}
|
||||||
|
return _%[1]s_name[_%[1]s_index[i] : _%[1]s_index[i+1]]
|
||||||
|
}
|
||||||
|
`
|
||||||
|
|
||||||
|
// buildMultipleRuns generates the variables and String method for multiple runs of contiguous values.
|
||||||
|
// For this pattern, a single Printf format won't do.
|
||||||
|
func (g *Generator) buildMultipleRuns(runs [][]Value, typeName string) {
|
||||||
|
g.Printf("\n")
|
||||||
|
g.declareIndexAndNameVars(runs, typeName)
|
||||||
|
g.Printf("func (i %s) String() string {\n", typeName)
|
||||||
|
g.Printf("\tswitch {\n")
|
||||||
|
for i, values := range runs {
|
||||||
|
if len(values) == 1 {
|
||||||
|
g.Printf("\tcase i == %s:\n", &values[0])
|
||||||
|
g.Printf("\t\treturn _%s_name_%d\n", typeName, i)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
g.Printf("\tcase %s <= i && i <= %s:\n", &values[0], &values[len(values)-1])
|
||||||
|
if values[0].value != 0 {
|
||||||
|
g.Printf("\t\ti -= %s\n", &values[0])
|
||||||
|
}
|
||||||
|
g.Printf("\t\treturn _%s_name_%d[_%s_index_%d[i]:_%s_index_%d[i+1]]\n",
|
||||||
|
typeName, i, typeName, i, typeName, i)
|
||||||
|
}
|
||||||
|
g.Printf("\tdefault:\n")
|
||||||
|
g.Printf("\t\treturn fmt.Sprintf(\"%s(%%d)\", i)\n", typeName)
|
||||||
|
g.Printf("\t}\n")
|
||||||
|
g.Printf("}\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildMap handles the case where the space is so sparse a map is a reasonable fallback.
|
||||||
|
// It's a rare situation but has simple code.
|
||||||
|
func (g *Generator) buildMap(runs [][]Value, typeName string) {
|
||||||
|
g.Printf("\n")
|
||||||
|
g.declareNameVars(runs, typeName, "")
|
||||||
|
g.Printf("\nvar _%s_map = map[%s]string{\n", typeName, typeName)
|
||||||
|
n := 0
|
||||||
|
for _, values := range runs {
|
||||||
|
for _, value := range values {
|
||||||
|
g.Printf("\t%s: _%s_name[%d:%d],\n", &value, typeName, n, n+len(value.name))
|
||||||
|
n += len(value.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
g.Printf("}\n\n")
|
||||||
|
g.Printf(stringMap, typeName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Argument to format is the type name.
|
||||||
|
const stringMap = `func (i %[1]s) String() string {
|
||||||
|
if str, ok := _%[1]s_map[i]; ok {
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%[1]s(%%d)", i)
|
||||||
|
}
|
||||||
|
`
|
@ -2,6 +2,8 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !go1.8
|
||||||
|
|
||||||
package astutil
|
package astutil
|
||||||
|
|
||||||
// This file defines utilities for working with source positions.
|
// This file defines utilities for working with source positions.
|
||||||
|
629
go/ast/astutil/enclosing18.go
Normal file
629
go/ast/astutil/enclosing18.go
Normal file
@ -0,0 +1,629 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
package astutil
|
||||||
|
|
||||||
|
// This file defines utilities for working with source positions.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"sort"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PathEnclosingInterval returns the node that encloses the source
|
||||||
|
// interval [start, end), and all its ancestors up to the AST root.
|
||||||
|
//
|
||||||
|
// The definition of "enclosing" used by this function considers
|
||||||
|
// additional whitespace abutting a node to be enclosed by it.
|
||||||
|
// In this example:
|
||||||
|
//
|
||||||
|
// z := x + y // add them
|
||||||
|
// <-A->
|
||||||
|
// <----B----->
|
||||||
|
//
|
||||||
|
// the ast.BinaryExpr(+) node is considered to enclose interval B
|
||||||
|
// even though its [Pos()..End()) is actually only interval A.
|
||||||
|
// This behaviour makes user interfaces more tolerant of imperfect
|
||||||
|
// input.
|
||||||
|
//
|
||||||
|
// This function treats tokens as nodes, though they are not included
|
||||||
|
// in the result. e.g. PathEnclosingInterval("+") returns the
|
||||||
|
// enclosing ast.BinaryExpr("x + y").
|
||||||
|
//
|
||||||
|
// If start==end, the 1-char interval following start is used instead.
|
||||||
|
//
|
||||||
|
// The 'exact' result is true if the interval contains only path[0]
|
||||||
|
// and perhaps some adjacent whitespace. It is false if the interval
|
||||||
|
// overlaps multiple children of path[0], or if it contains only
|
||||||
|
// interior whitespace of path[0].
|
||||||
|
// In this example:
|
||||||
|
//
|
||||||
|
// z := x + y // add them
|
||||||
|
// <--C--> <---E-->
|
||||||
|
// ^
|
||||||
|
// D
|
||||||
|
//
|
||||||
|
// intervals C, D and E are inexact. C is contained by the
|
||||||
|
// z-assignment statement, because it spans three of its children (:=,
|
||||||
|
// x, +). So too is the 1-char interval D, because it contains only
|
||||||
|
// interior whitespace of the assignment. E is considered interior
|
||||||
|
// whitespace of the BlockStmt containing the assignment.
|
||||||
|
//
|
||||||
|
// Precondition: [start, end) both lie within the same file as root.
|
||||||
|
// TODO(adonovan): return (nil, false) in this case and remove precond.
|
||||||
|
// Requires FileSet; see loader.tokenFileContainsPos.
|
||||||
|
//
|
||||||
|
// Postcondition: path is never nil; it always contains at least 'root'.
|
||||||
|
//
|
||||||
|
func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
|
||||||
|
// fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
|
||||||
|
|
||||||
|
// Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
|
||||||
|
var visit func(node ast.Node) bool
|
||||||
|
visit = func(node ast.Node) bool {
|
||||||
|
path = append(path, node)
|
||||||
|
|
||||||
|
nodePos := node.Pos()
|
||||||
|
nodeEnd := node.End()
|
||||||
|
|
||||||
|
// fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
|
||||||
|
|
||||||
|
// Intersect [start, end) with interval of node.
|
||||||
|
if start < nodePos {
|
||||||
|
start = nodePos
|
||||||
|
}
|
||||||
|
if end > nodeEnd {
|
||||||
|
end = nodeEnd
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find sole child that contains [start, end).
|
||||||
|
children := childrenOf(node)
|
||||||
|
l := len(children)
|
||||||
|
for i, child := range children {
|
||||||
|
// [childPos, childEnd) is unaugmented interval of child.
|
||||||
|
childPos := child.Pos()
|
||||||
|
childEnd := child.End()
|
||||||
|
|
||||||
|
// [augPos, augEnd) is whitespace-augmented interval of child.
|
||||||
|
augPos := childPos
|
||||||
|
augEnd := childEnd
|
||||||
|
if i > 0 {
|
||||||
|
augPos = children[i-1].End() // start of preceding whitespace
|
||||||
|
}
|
||||||
|
if i < l-1 {
|
||||||
|
nextChildPos := children[i+1].Pos()
|
||||||
|
// Does [start, end) lie between child and next child?
|
||||||
|
if start >= augEnd && end <= nextChildPos {
|
||||||
|
return false // inexact match
|
||||||
|
}
|
||||||
|
augEnd = nextChildPos // end of following whitespace
|
||||||
|
}
|
||||||
|
|
||||||
|
// fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
|
||||||
|
// i, augPos, augEnd, start, end) // debugging
|
||||||
|
|
||||||
|
// Does augmented child strictly contain [start, end)?
|
||||||
|
if augPos <= start && end <= augEnd {
|
||||||
|
_, isToken := child.(tokenNode)
|
||||||
|
return isToken || visit(child)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Does [start, end) overlap multiple children?
|
||||||
|
// i.e. left-augmented child contains start
|
||||||
|
// but LR-augmented child does not contain end.
|
||||||
|
if start < childEnd && end > augEnd {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// No single child contained [start, end),
|
||||||
|
// so node is the result. Is it exact?
|
||||||
|
|
||||||
|
// (It's tempting to put this condition before the
|
||||||
|
// child loop, but it gives the wrong result in the
|
||||||
|
// case where a node (e.g. ExprStmt) and its sole
|
||||||
|
// child have equal intervals.)
|
||||||
|
if start == nodePos && end == nodeEnd {
|
||||||
|
return true // exact match
|
||||||
|
}
|
||||||
|
|
||||||
|
return false // inexact: overlaps multiple children
|
||||||
|
}
|
||||||
|
|
||||||
|
if start > end {
|
||||||
|
start, end = end, start
|
||||||
|
}
|
||||||
|
|
||||||
|
if start < root.End() && end > root.Pos() {
|
||||||
|
if start == end {
|
||||||
|
end = start + 1 // empty interval => interval of size 1
|
||||||
|
}
|
||||||
|
exact = visit(root)
|
||||||
|
|
||||||
|
// Reverse the path:
|
||||||
|
for i, l := 0, len(path); i < l/2; i++ {
|
||||||
|
path[i], path[l-1-i] = path[l-1-i], path[i]
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Selection lies within whitespace preceding the
|
||||||
|
// first (or following the last) declaration in the file.
|
||||||
|
// The result nonetheless always includes the ast.File.
|
||||||
|
path = append(path, root)
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// tokenNode is a dummy implementation of ast.Node for a single token.
|
||||||
|
// They are used transiently by PathEnclosingInterval but never escape
|
||||||
|
// this package.
|
||||||
|
//
|
||||||
|
type tokenNode struct {
|
||||||
|
pos token.Pos
|
||||||
|
end token.Pos
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n tokenNode) Pos() token.Pos {
|
||||||
|
return n.pos
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n tokenNode) End() token.Pos {
|
||||||
|
return n.end
|
||||||
|
}
|
||||||
|
|
||||||
|
func tok(pos token.Pos, len int) ast.Node {
|
||||||
|
return tokenNode{pos, pos + token.Pos(len)}
|
||||||
|
}
|
||||||
|
|
||||||
|
// childrenOf returns the direct non-nil children of ast.Node n.
|
||||||
|
// It may include fake ast.Node implementations for bare tokens.
|
||||||
|
// it is not safe to call (e.g.) ast.Walk on such nodes.
|
||||||
|
//
|
||||||
|
func childrenOf(n ast.Node) []ast.Node {
|
||||||
|
var children []ast.Node
|
||||||
|
|
||||||
|
// First add nodes for all true subtrees.
|
||||||
|
ast.Inspect(n, func(node ast.Node) bool {
|
||||||
|
if node == n { // push n
|
||||||
|
return true // recur
|
||||||
|
}
|
||||||
|
if node != nil { // push child
|
||||||
|
children = append(children, node)
|
||||||
|
}
|
||||||
|
return false // no recursion
|
||||||
|
})
|
||||||
|
|
||||||
|
// Then add fake Nodes for bare tokens.
|
||||||
|
switch n := n.(type) {
|
||||||
|
case *ast.ArrayType:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lbrack, len("[")),
|
||||||
|
tok(n.Elt.End(), len("]")))
|
||||||
|
|
||||||
|
case *ast.AssignStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.TokPos, len(n.Tok.String())))
|
||||||
|
|
||||||
|
case *ast.BasicLit:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.ValuePos, len(n.Value)))
|
||||||
|
|
||||||
|
case *ast.BinaryExpr:
|
||||||
|
children = append(children, tok(n.OpPos, len(n.Op.String())))
|
||||||
|
|
||||||
|
case *ast.BlockStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lbrace, len("{")),
|
||||||
|
tok(n.Rbrace, len("}")))
|
||||||
|
|
||||||
|
case *ast.BranchStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.TokPos, len(n.Tok.String())))
|
||||||
|
|
||||||
|
case *ast.CallExpr:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lparen, len("(")),
|
||||||
|
tok(n.Rparen, len(")")))
|
||||||
|
if n.Ellipsis != 0 {
|
||||||
|
children = append(children, tok(n.Ellipsis, len("...")))
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.CaseClause:
|
||||||
|
if n.List == nil {
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Case, len("default")))
|
||||||
|
} else {
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Case, len("case")))
|
||||||
|
}
|
||||||
|
children = append(children, tok(n.Colon, len(":")))
|
||||||
|
|
||||||
|
case *ast.ChanType:
|
||||||
|
switch n.Dir {
|
||||||
|
case ast.RECV:
|
||||||
|
children = append(children, tok(n.Begin, len("<-chan")))
|
||||||
|
case ast.SEND:
|
||||||
|
children = append(children, tok(n.Begin, len("chan<-")))
|
||||||
|
case ast.RECV | ast.SEND:
|
||||||
|
children = append(children, tok(n.Begin, len("chan")))
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.CommClause:
|
||||||
|
if n.Comm == nil {
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Case, len("default")))
|
||||||
|
} else {
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Case, len("case")))
|
||||||
|
}
|
||||||
|
children = append(children, tok(n.Colon, len(":")))
|
||||||
|
|
||||||
|
case *ast.Comment:
|
||||||
|
// nop
|
||||||
|
|
||||||
|
case *ast.CommentGroup:
|
||||||
|
// nop
|
||||||
|
|
||||||
|
case *ast.CompositeLit:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lbrace, len("{")),
|
||||||
|
tok(n.Rbrace, len("{")))
|
||||||
|
|
||||||
|
case *ast.DeclStmt:
|
||||||
|
// nop
|
||||||
|
|
||||||
|
case *ast.DeferStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Defer, len("defer")))
|
||||||
|
|
||||||
|
case *ast.Ellipsis:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Ellipsis, len("...")))
|
||||||
|
|
||||||
|
case *ast.EmptyStmt:
|
||||||
|
// nop
|
||||||
|
|
||||||
|
case *ast.ExprStmt:
|
||||||
|
// nop
|
||||||
|
|
||||||
|
case *ast.Field:
|
||||||
|
// TODO(adonovan): Field.{Doc,Comment,Tag}?
|
||||||
|
|
||||||
|
case *ast.FieldList:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Opening, len("(")),
|
||||||
|
tok(n.Closing, len(")")))
|
||||||
|
|
||||||
|
case *ast.File:
|
||||||
|
// TODO test: Doc
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Package, len("package")))
|
||||||
|
|
||||||
|
case *ast.ForStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.For, len("for")))
|
||||||
|
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
// TODO(adonovan): FuncDecl.Comment?
|
||||||
|
|
||||||
|
// Uniquely, FuncDecl breaks the invariant that
|
||||||
|
// preorder traversal yields tokens in lexical order:
|
||||||
|
// in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
|
||||||
|
//
|
||||||
|
// As a workaround, we inline the case for FuncType
|
||||||
|
// here and order things correctly.
|
||||||
|
//
|
||||||
|
children = nil // discard ast.Walk(FuncDecl) info subtrees
|
||||||
|
children = append(children, tok(n.Type.Func, len("func")))
|
||||||
|
if n.Recv != nil {
|
||||||
|
children = append(children, n.Recv)
|
||||||
|
}
|
||||||
|
children = append(children, n.Name)
|
||||||
|
if n.Type.Params != nil {
|
||||||
|
children = append(children, n.Type.Params)
|
||||||
|
}
|
||||||
|
if n.Type.Results != nil {
|
||||||
|
children = append(children, n.Type.Results)
|
||||||
|
}
|
||||||
|
if n.Body != nil {
|
||||||
|
children = append(children, n.Body)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.FuncLit:
|
||||||
|
// nop
|
||||||
|
|
||||||
|
case *ast.FuncType:
|
||||||
|
if n.Func != 0 {
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Func, len("func")))
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.GenDecl:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.TokPos, len(n.Tok.String())))
|
||||||
|
if n.Lparen != 0 {
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lparen, len("(")),
|
||||||
|
tok(n.Rparen, len(")")))
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.GoStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Go, len("go")))
|
||||||
|
|
||||||
|
case *ast.Ident:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.NamePos, len(n.Name)))
|
||||||
|
|
||||||
|
case *ast.IfStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.If, len("if")))
|
||||||
|
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
// TODO(adonovan): ImportSpec.{Doc,EndPos}?
|
||||||
|
|
||||||
|
case *ast.IncDecStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.TokPos, len(n.Tok.String())))
|
||||||
|
|
||||||
|
case *ast.IndexExpr:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lbrack, len("{")),
|
||||||
|
tok(n.Rbrack, len("}")))
|
||||||
|
|
||||||
|
case *ast.InterfaceType:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Interface, len("interface")))
|
||||||
|
|
||||||
|
case *ast.KeyValueExpr:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Colon, len(":")))
|
||||||
|
|
||||||
|
case *ast.LabeledStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Colon, len(":")))
|
||||||
|
|
||||||
|
case *ast.MapType:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Map, len("map")))
|
||||||
|
|
||||||
|
case *ast.ParenExpr:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lparen, len("(")),
|
||||||
|
tok(n.Rparen, len(")")))
|
||||||
|
|
||||||
|
case *ast.RangeStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.For, len("for")),
|
||||||
|
tok(n.TokPos, len(n.Tok.String())))
|
||||||
|
|
||||||
|
case *ast.ReturnStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Return, len("return")))
|
||||||
|
|
||||||
|
case *ast.SelectStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Select, len("select")))
|
||||||
|
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
// nop
|
||||||
|
|
||||||
|
case *ast.SendStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Arrow, len("<-")))
|
||||||
|
|
||||||
|
case *ast.SliceExpr:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lbrack, len("[")),
|
||||||
|
tok(n.Rbrack, len("]")))
|
||||||
|
|
||||||
|
case *ast.StarExpr:
|
||||||
|
children = append(children, tok(n.Star, len("*")))
|
||||||
|
|
||||||
|
case *ast.StructType:
|
||||||
|
children = append(children, tok(n.Struct, len("struct")))
|
||||||
|
|
||||||
|
case *ast.SwitchStmt:
|
||||||
|
children = append(children, tok(n.Switch, len("switch")))
|
||||||
|
|
||||||
|
case *ast.TypeAssertExpr:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lparen-1, len(".")),
|
||||||
|
tok(n.Lparen, len("(")),
|
||||||
|
tok(n.Rparen, len(")")))
|
||||||
|
|
||||||
|
case *ast.TypeSpec:
|
||||||
|
// TODO(adonovan): TypeSpec.{Doc,Comment}?
|
||||||
|
|
||||||
|
case *ast.TypeSwitchStmt:
|
||||||
|
children = append(children, tok(n.Switch, len("switch")))
|
||||||
|
|
||||||
|
case *ast.UnaryExpr:
|
||||||
|
children = append(children, tok(n.OpPos, len(n.Op.String())))
|
||||||
|
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
// TODO(adonovan): ValueSpec.{Doc,Comment}?
|
||||||
|
|
||||||
|
case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
|
||||||
|
// nop
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(adonovan): opt: merge the logic of ast.Inspect() into
|
||||||
|
// the switch above so we can make interleaved callbacks for
|
||||||
|
// both Nodes and Tokens in the right order and avoid the need
|
||||||
|
// to sort.
|
||||||
|
sort.Sort(byPos(children))
|
||||||
|
|
||||||
|
return children
|
||||||
|
}
|
||||||
|
|
||||||
|
type byPos []ast.Node
|
||||||
|
|
||||||
|
func (sl byPos) Len() int {
|
||||||
|
return len(sl)
|
||||||
|
}
|
||||||
|
func (sl byPos) Less(i, j int) bool {
|
||||||
|
return sl[i].Pos() < sl[j].Pos()
|
||||||
|
}
|
||||||
|
func (sl byPos) Swap(i, j int) {
|
||||||
|
sl[i], sl[j] = sl[j], sl[i]
|
||||||
|
}
|
||||||
|
|
||||||
|
// NodeDescription returns a description of the concrete type of n suitable
|
||||||
|
// for a user interface.
|
||||||
|
//
|
||||||
|
// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
|
||||||
|
// StarExpr) we could be much more specific given the path to the AST
|
||||||
|
// root. Perhaps we should do that.
|
||||||
|
//
|
||||||
|
func NodeDescription(n ast.Node) string {
|
||||||
|
switch n := n.(type) {
|
||||||
|
case *ast.ArrayType:
|
||||||
|
return "array type"
|
||||||
|
case *ast.AssignStmt:
|
||||||
|
return "assignment"
|
||||||
|
case *ast.BadDecl:
|
||||||
|
return "bad declaration"
|
||||||
|
case *ast.BadExpr:
|
||||||
|
return "bad expression"
|
||||||
|
case *ast.BadStmt:
|
||||||
|
return "bad statement"
|
||||||
|
case *ast.BasicLit:
|
||||||
|
return "basic literal"
|
||||||
|
case *ast.BinaryExpr:
|
||||||
|
return fmt.Sprintf("binary %s operation", n.Op)
|
||||||
|
case *ast.BlockStmt:
|
||||||
|
return "block"
|
||||||
|
case *ast.BranchStmt:
|
||||||
|
switch n.Tok {
|
||||||
|
case token.BREAK:
|
||||||
|
return "break statement"
|
||||||
|
case token.CONTINUE:
|
||||||
|
return "continue statement"
|
||||||
|
case token.GOTO:
|
||||||
|
return "goto statement"
|
||||||
|
case token.FALLTHROUGH:
|
||||||
|
return "fall-through statement"
|
||||||
|
}
|
||||||
|
case *ast.CallExpr:
|
||||||
|
if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
|
||||||
|
return "function call (or conversion)"
|
||||||
|
}
|
||||||
|
return "function call"
|
||||||
|
case *ast.CaseClause:
|
||||||
|
return "case clause"
|
||||||
|
case *ast.ChanType:
|
||||||
|
return "channel type"
|
||||||
|
case *ast.CommClause:
|
||||||
|
return "communication clause"
|
||||||
|
case *ast.Comment:
|
||||||
|
return "comment"
|
||||||
|
case *ast.CommentGroup:
|
||||||
|
return "comment group"
|
||||||
|
case *ast.CompositeLit:
|
||||||
|
return "composite literal"
|
||||||
|
case *ast.DeclStmt:
|
||||||
|
return NodeDescription(n.Decl) + " statement"
|
||||||
|
case *ast.DeferStmt:
|
||||||
|
return "defer statement"
|
||||||
|
case *ast.Ellipsis:
|
||||||
|
return "ellipsis"
|
||||||
|
case *ast.EmptyStmt:
|
||||||
|
return "empty statement"
|
||||||
|
case *ast.ExprStmt:
|
||||||
|
return "expression statement"
|
||||||
|
case *ast.Field:
|
||||||
|
// Can be any of these:
|
||||||
|
// struct {x, y int} -- struct field(s)
|
||||||
|
// struct {T} -- anon struct field
|
||||||
|
// interface {I} -- interface embedding
|
||||||
|
// interface {f()} -- interface method
|
||||||
|
// func (A) func(B) C -- receiver, param(s), result(s)
|
||||||
|
return "field/method/parameter"
|
||||||
|
case *ast.FieldList:
|
||||||
|
return "field/method/parameter list"
|
||||||
|
case *ast.File:
|
||||||
|
return "source file"
|
||||||
|
case *ast.ForStmt:
|
||||||
|
return "for loop"
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
return "function declaration"
|
||||||
|
case *ast.FuncLit:
|
||||||
|
return "function literal"
|
||||||
|
case *ast.FuncType:
|
||||||
|
return "function type"
|
||||||
|
case *ast.GenDecl:
|
||||||
|
switch n.Tok {
|
||||||
|
case token.IMPORT:
|
||||||
|
return "import declaration"
|
||||||
|
case token.CONST:
|
||||||
|
return "constant declaration"
|
||||||
|
case token.TYPE:
|
||||||
|
return "type declaration"
|
||||||
|
case token.VAR:
|
||||||
|
return "variable declaration"
|
||||||
|
}
|
||||||
|
case *ast.GoStmt:
|
||||||
|
return "go statement"
|
||||||
|
case *ast.Ident:
|
||||||
|
return "identifier"
|
||||||
|
case *ast.IfStmt:
|
||||||
|
return "if statement"
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
return "import specification"
|
||||||
|
case *ast.IncDecStmt:
|
||||||
|
if n.Tok == token.INC {
|
||||||
|
return "increment statement"
|
||||||
|
}
|
||||||
|
return "decrement statement"
|
||||||
|
case *ast.IndexExpr:
|
||||||
|
return "index expression"
|
||||||
|
case *ast.InterfaceType:
|
||||||
|
return "interface type"
|
||||||
|
case *ast.KeyValueExpr:
|
||||||
|
return "key/value association"
|
||||||
|
case *ast.LabeledStmt:
|
||||||
|
return "statement label"
|
||||||
|
case *ast.MapType:
|
||||||
|
return "map type"
|
||||||
|
case *ast.Package:
|
||||||
|
return "package"
|
||||||
|
case *ast.ParenExpr:
|
||||||
|
return "parenthesized " + NodeDescription(n.X)
|
||||||
|
case *ast.RangeStmt:
|
||||||
|
return "range loop"
|
||||||
|
case *ast.ReturnStmt:
|
||||||
|
return "return statement"
|
||||||
|
case *ast.SelectStmt:
|
||||||
|
return "select statement"
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
return "selector"
|
||||||
|
case *ast.SendStmt:
|
||||||
|
return "channel send"
|
||||||
|
case *ast.SliceExpr:
|
||||||
|
return "slice expression"
|
||||||
|
case *ast.StarExpr:
|
||||||
|
return "*-operation" // load/store expr or pointer type
|
||||||
|
case *ast.StructType:
|
||||||
|
return "struct type"
|
||||||
|
case *ast.SwitchStmt:
|
||||||
|
return "switch statement"
|
||||||
|
case *ast.TypeAssertExpr:
|
||||||
|
return "type assertion"
|
||||||
|
case *ast.TypeSpec:
|
||||||
|
return "type specification"
|
||||||
|
case *ast.TypeSwitchStmt:
|
||||||
|
return "type switch"
|
||||||
|
case *ast.UnaryExpr:
|
||||||
|
return fmt.Sprintf("unary %s operation", n.Op)
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
return "value specification"
|
||||||
|
|
||||||
|
}
|
||||||
|
panic(fmt.Sprintf("unexpected node type: %T", n))
|
||||||
|
}
|
@ -2,7 +2,7 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// +build go1.5
|
// +build !go1.8
|
||||||
|
|
||||||
package ssa
|
package ssa
|
||||||
|
|
||||||
|
2385
go/ssa/builder18.go
Normal file
2385
go/ssa/builder18.go
Normal file
File diff suppressed because it is too large
Load Diff
@ -2,7 +2,7 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// +build go1.5
|
// +build !go1.8
|
||||||
|
|
||||||
package ssa
|
package ssa
|
||||||
|
|
||||||
|
259
go/ssa/create18.go
Normal file
259
go/ssa/create18.go
Normal file
@ -0,0 +1,259 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
package ssa
|
||||||
|
|
||||||
|
// This file implements the CREATE phase of SSA construction.
|
||||||
|
// See builder.go for explanation.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"os"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/types/typeutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewProgram returns a new SSA Program.
|
||||||
|
//
|
||||||
|
// mode controls diagnostics and checking during SSA construction.
|
||||||
|
//
|
||||||
|
func NewProgram(fset *token.FileSet, mode BuilderMode) *Program {
|
||||||
|
prog := &Program{
|
||||||
|
Fset: fset,
|
||||||
|
imported: make(map[string]*Package),
|
||||||
|
packages: make(map[*types.Package]*Package),
|
||||||
|
thunks: make(map[selectionKey]*Function),
|
||||||
|
bounds: make(map[*types.Func]*Function),
|
||||||
|
mode: mode,
|
||||||
|
}
|
||||||
|
|
||||||
|
h := typeutil.MakeHasher() // protected by methodsMu, in effect
|
||||||
|
prog.methodSets.SetHasher(h)
|
||||||
|
prog.canon.SetHasher(h)
|
||||||
|
|
||||||
|
return prog
|
||||||
|
}
|
||||||
|
|
||||||
|
// memberFromObject populates package pkg with a member for the
|
||||||
|
// typechecker object obj.
|
||||||
|
//
|
||||||
|
// For objects from Go source code, syntax is the associated syntax
|
||||||
|
// tree (for funcs and vars only); it will be used during the build
|
||||||
|
// phase.
|
||||||
|
//
|
||||||
|
func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
|
||||||
|
name := obj.Name()
|
||||||
|
switch obj := obj.(type) {
|
||||||
|
case *types.TypeName:
|
||||||
|
pkg.Members[name] = &Type{
|
||||||
|
object: obj,
|
||||||
|
pkg: pkg,
|
||||||
|
}
|
||||||
|
|
||||||
|
case *types.Const:
|
||||||
|
c := &NamedConst{
|
||||||
|
object: obj,
|
||||||
|
Value: NewConst(obj.Val(), obj.Type()),
|
||||||
|
pkg: pkg,
|
||||||
|
}
|
||||||
|
pkg.values[obj] = c.Value
|
||||||
|
pkg.Members[name] = c
|
||||||
|
|
||||||
|
case *types.Var:
|
||||||
|
g := &Global{
|
||||||
|
Pkg: pkg,
|
||||||
|
name: name,
|
||||||
|
object: obj,
|
||||||
|
typ: types.NewPointer(obj.Type()), // address
|
||||||
|
pos: obj.Pos(),
|
||||||
|
}
|
||||||
|
pkg.values[obj] = g
|
||||||
|
pkg.Members[name] = g
|
||||||
|
|
||||||
|
case *types.Func:
|
||||||
|
sig := obj.Type().(*types.Signature)
|
||||||
|
if sig.Recv() == nil && name == "init" {
|
||||||
|
pkg.ninit++
|
||||||
|
name = fmt.Sprintf("init#%d", pkg.ninit)
|
||||||
|
}
|
||||||
|
fn := &Function{
|
||||||
|
name: name,
|
||||||
|
object: obj,
|
||||||
|
Signature: sig,
|
||||||
|
syntax: syntax,
|
||||||
|
pos: obj.Pos(),
|
||||||
|
Pkg: pkg,
|
||||||
|
Prog: pkg.Prog,
|
||||||
|
}
|
||||||
|
if syntax == nil {
|
||||||
|
fn.Synthetic = "loaded from gc object file"
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg.values[obj] = fn
|
||||||
|
if sig.Recv() == nil {
|
||||||
|
pkg.Members[name] = fn // package-level function
|
||||||
|
}
|
||||||
|
|
||||||
|
default: // (incl. *types.Package)
|
||||||
|
panic("unexpected Object type: " + obj.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// membersFromDecl populates package pkg with members for each
|
||||||
|
// typechecker object (var, func, const or type) associated with the
|
||||||
|
// specified decl.
|
||||||
|
//
|
||||||
|
func membersFromDecl(pkg *Package, decl ast.Decl) {
|
||||||
|
switch decl := decl.(type) {
|
||||||
|
case *ast.GenDecl: // import, const, type or var
|
||||||
|
switch decl.Tok {
|
||||||
|
case token.CONST:
|
||||||
|
for _, spec := range decl.Specs {
|
||||||
|
for _, id := range spec.(*ast.ValueSpec).Names {
|
||||||
|
if !isBlankIdent(id) {
|
||||||
|
memberFromObject(pkg, pkg.info.Defs[id], nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case token.VAR:
|
||||||
|
for _, spec := range decl.Specs {
|
||||||
|
for _, id := range spec.(*ast.ValueSpec).Names {
|
||||||
|
if !isBlankIdent(id) {
|
||||||
|
memberFromObject(pkg, pkg.info.Defs[id], spec)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case token.TYPE:
|
||||||
|
for _, spec := range decl.Specs {
|
||||||
|
id := spec.(*ast.TypeSpec).Name
|
||||||
|
if !isBlankIdent(id) {
|
||||||
|
memberFromObject(pkg, pkg.info.Defs[id], nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
id := decl.Name
|
||||||
|
if !isBlankIdent(id) {
|
||||||
|
memberFromObject(pkg, pkg.info.Defs[id], decl)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatePackage constructs and returns an SSA Package from the
|
||||||
|
// specified type-checked, error-free file ASTs, and populates its
|
||||||
|
// Members mapping.
|
||||||
|
//
|
||||||
|
// importable determines whether this package should be returned by a
|
||||||
|
// subsequent call to ImportedPackage(pkg.Path()).
|
||||||
|
//
|
||||||
|
// The real work of building SSA form for each function is not done
|
||||||
|
// until a subsequent call to Package.Build().
|
||||||
|
//
|
||||||
|
func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package {
|
||||||
|
p := &Package{
|
||||||
|
Prog: prog,
|
||||||
|
Members: make(map[string]Member),
|
||||||
|
values: make(map[types.Object]Value),
|
||||||
|
Pkg: pkg,
|
||||||
|
info: info, // transient (CREATE and BUILD phases)
|
||||||
|
files: files, // transient (CREATE and BUILD phases)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add init() function.
|
||||||
|
p.init = &Function{
|
||||||
|
name: "init",
|
||||||
|
Signature: new(types.Signature),
|
||||||
|
Synthetic: "package initializer",
|
||||||
|
Pkg: p,
|
||||||
|
Prog: prog,
|
||||||
|
}
|
||||||
|
p.Members[p.init.name] = p.init
|
||||||
|
|
||||||
|
// CREATE phase.
|
||||||
|
// Allocate all package members: vars, funcs, consts and types.
|
||||||
|
if len(files) > 0 {
|
||||||
|
// Go source package.
|
||||||
|
for _, file := range files {
|
||||||
|
for _, decl := range file.Decls {
|
||||||
|
membersFromDecl(p, decl)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// GC-compiled binary package.
|
||||||
|
// No code.
|
||||||
|
// No position information.
|
||||||
|
scope := p.Pkg.Scope()
|
||||||
|
for _, name := range scope.Names() {
|
||||||
|
obj := scope.Lookup(name)
|
||||||
|
memberFromObject(p, obj, nil)
|
||||||
|
if obj, ok := obj.(*types.TypeName); ok {
|
||||||
|
named := obj.Type().(*types.Named)
|
||||||
|
for i, n := 0, named.NumMethods(); i < n; i++ {
|
||||||
|
memberFromObject(p, named.Method(i), nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if prog.mode&BareInits == 0 {
|
||||||
|
// Add initializer guard variable.
|
||||||
|
initguard := &Global{
|
||||||
|
Pkg: p,
|
||||||
|
name: "init$guard",
|
||||||
|
typ: types.NewPointer(tBool),
|
||||||
|
}
|
||||||
|
p.Members[initguard.Name()] = initguard
|
||||||
|
}
|
||||||
|
|
||||||
|
if prog.mode&GlobalDebug != 0 {
|
||||||
|
p.SetDebugMode(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
if prog.mode&PrintPackages != 0 {
|
||||||
|
printMu.Lock()
|
||||||
|
p.WriteTo(os.Stdout)
|
||||||
|
printMu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
if importable {
|
||||||
|
prog.imported[p.Pkg.Path()] = p
|
||||||
|
}
|
||||||
|
prog.packages[p.Pkg] = p
|
||||||
|
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
// printMu serializes printing of Packages/Functions to stdout.
|
||||||
|
var printMu sync.Mutex
|
||||||
|
|
||||||
|
// AllPackages returns a new slice containing all packages in the
|
||||||
|
// program prog in unspecified order.
|
||||||
|
//
|
||||||
|
func (prog *Program) AllPackages() []*Package {
|
||||||
|
pkgs := make([]*Package, 0, len(prog.packages))
|
||||||
|
for _, pkg := range prog.packages {
|
||||||
|
pkgs = append(pkgs, pkg)
|
||||||
|
}
|
||||||
|
return pkgs
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImportedPackage returns the importable SSA Package whose import
|
||||||
|
// path is path, or nil if no such SSA package has been created.
|
||||||
|
//
|
||||||
|
// Not all packages are importable. For example, no import
|
||||||
|
// declaration can resolve to the x_test package created by 'go test'
|
||||||
|
// or the ad-hoc main package created 'go build foo.go'.
|
||||||
|
//
|
||||||
|
func (prog *Program) ImportedPackage(path string) *Package {
|
||||||
|
return prog.imported[path]
|
||||||
|
}
|
@ -2,6 +2,8 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !go1.8
|
||||||
|
|
||||||
// This file contains the infrastructure to create an
|
// This file contains the infrastructure to create an
|
||||||
// identifier and full-text index for a set of Go files.
|
// identifier and full-text index for a set of Go files.
|
||||||
//
|
//
|
||||||
|
1592
godoc/index18.go
Normal file
1592
godoc/index18.go
Normal file
File diff suppressed because it is too large
Load Diff
@ -2,6 +2,8 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !go1.8
|
||||||
|
|
||||||
// This file implements LinkifyText which introduces
|
// This file implements LinkifyText which introduces
|
||||||
// links for identifiers pointing to their declarations.
|
// links for identifiers pointing to their declarations.
|
||||||
// The approach does not cover all cases because godoc
|
// The approach does not cover all cases because godoc
|
||||||
|
236
godoc/linkify18.go
Normal file
236
godoc/linkify18.go
Normal file
@ -0,0 +1,236 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
// This file implements LinkifyText which introduces
|
||||||
|
// links for identifiers pointing to their declarations.
|
||||||
|
// The approach does not cover all cases because godoc
|
||||||
|
// doesn't have complete type information, but it's
|
||||||
|
// reasonably good for browsing.
|
||||||
|
|
||||||
|
package godoc
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"io"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
// LinkifyText HTML-escapes source text and writes it to w.
|
||||||
|
// Identifiers that are in a "use" position (i.e., that are
|
||||||
|
// not being declared), are wrapped with HTML links pointing
|
||||||
|
// to the respective declaration, if possible. Comments are
|
||||||
|
// formatted the same way as with FormatText.
|
||||||
|
//
|
||||||
|
func LinkifyText(w io.Writer, text []byte, n ast.Node) {
|
||||||
|
links := linksFor(n)
|
||||||
|
|
||||||
|
i := 0 // links index
|
||||||
|
prev := "" // prev HTML tag
|
||||||
|
linkWriter := func(w io.Writer, _ int, start bool) {
|
||||||
|
// end tag
|
||||||
|
if !start {
|
||||||
|
if prev != "" {
|
||||||
|
fmt.Fprintf(w, `</%s>`, prev)
|
||||||
|
prev = ""
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// start tag
|
||||||
|
prev = ""
|
||||||
|
if i < len(links) {
|
||||||
|
switch info := links[i]; {
|
||||||
|
case info.path != "" && info.name == "":
|
||||||
|
// package path
|
||||||
|
fmt.Fprintf(w, `<a href="/pkg/%s/">`, info.path)
|
||||||
|
prev = "a"
|
||||||
|
case info.path != "" && info.name != "":
|
||||||
|
// qualified identifier
|
||||||
|
fmt.Fprintf(w, `<a href="/pkg/%s/#%s">`, info.path, info.name)
|
||||||
|
prev = "a"
|
||||||
|
case info.path == "" && info.name != "":
|
||||||
|
// local identifier
|
||||||
|
if info.mode == identVal {
|
||||||
|
fmt.Fprintf(w, `<span id="%s">`, info.name)
|
||||||
|
prev = "span"
|
||||||
|
} else if ast.IsExported(info.name) {
|
||||||
|
fmt.Fprintf(w, `<a href="#%s">`, info.name)
|
||||||
|
prev = "a"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
idents := tokenSelection(text, token.IDENT)
|
||||||
|
comments := tokenSelection(text, token.COMMENT)
|
||||||
|
FormatSelections(w, text, linkWriter, idents, selectionTag, comments)
|
||||||
|
}
|
||||||
|
|
||||||
|
// A link describes the (HTML) link information for an identifier.
|
||||||
|
// The zero value of a link represents "no link".
|
||||||
|
//
|
||||||
|
type link struct {
|
||||||
|
mode identMode
|
||||||
|
path, name string // package path, identifier name
|
||||||
|
}
|
||||||
|
|
||||||
|
// linksFor returns the list of links for the identifiers used
|
||||||
|
// by node in the same order as they appear in the source.
|
||||||
|
//
|
||||||
|
func linksFor(node ast.Node) (list []link) {
|
||||||
|
modes := identModesFor(node)
|
||||||
|
|
||||||
|
// NOTE: We are expecting ast.Inspect to call the
|
||||||
|
// callback function in source text order.
|
||||||
|
ast.Inspect(node, func(node ast.Node) bool {
|
||||||
|
switch n := node.(type) {
|
||||||
|
case *ast.Ident:
|
||||||
|
m := modes[n]
|
||||||
|
info := link{mode: m}
|
||||||
|
switch m {
|
||||||
|
case identUse:
|
||||||
|
if n.Obj == nil && predeclared[n.Name] {
|
||||||
|
info.path = builtinPkgPath
|
||||||
|
}
|
||||||
|
info.name = n.Name
|
||||||
|
case identDef:
|
||||||
|
// any declaration expect const or var - empty link
|
||||||
|
case identVal:
|
||||||
|
// const or var declaration
|
||||||
|
info.name = n.Name
|
||||||
|
}
|
||||||
|
list = append(list, info)
|
||||||
|
return false
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
// Detect qualified identifiers of the form pkg.ident.
|
||||||
|
// If anything fails we return true and collect individual
|
||||||
|
// identifiers instead.
|
||||||
|
if x, _ := n.X.(*ast.Ident); x != nil {
|
||||||
|
// x must be a package for a qualified identifier
|
||||||
|
if obj := x.Obj; obj != nil && obj.Kind == ast.Pkg {
|
||||||
|
if spec, _ := obj.Decl.(*ast.ImportSpec); spec != nil {
|
||||||
|
// spec.Path.Value is the import path
|
||||||
|
if path, err := strconv.Unquote(spec.Path.Value); err == nil {
|
||||||
|
// Register two links, one for the package
|
||||||
|
// and one for the qualified identifier.
|
||||||
|
info := link{path: path}
|
||||||
|
list = append(list, info)
|
||||||
|
info.name = n.Sel.Name
|
||||||
|
list = append(list, info)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// The identMode describes how an identifier is "used" at its source location.
|
||||||
|
type identMode int
|
||||||
|
|
||||||
|
const (
|
||||||
|
identUse identMode = iota // identifier is used (must be zero value for identMode)
|
||||||
|
identDef // identifier is defined
|
||||||
|
identVal // identifier is defined in a const or var declaration
|
||||||
|
)
|
||||||
|
|
||||||
|
// identModesFor returns a map providing the identMode for each identifier used by node.
|
||||||
|
func identModesFor(node ast.Node) map[*ast.Ident]identMode {
|
||||||
|
m := make(map[*ast.Ident]identMode)
|
||||||
|
|
||||||
|
ast.Inspect(node, func(node ast.Node) bool {
|
||||||
|
switch n := node.(type) {
|
||||||
|
case *ast.Field:
|
||||||
|
for _, n := range n.Names {
|
||||||
|
m[n] = identDef
|
||||||
|
}
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
if name := n.Name; name != nil {
|
||||||
|
m[name] = identDef
|
||||||
|
}
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
for _, n := range n.Names {
|
||||||
|
m[n] = identVal
|
||||||
|
}
|
||||||
|
case *ast.TypeSpec:
|
||||||
|
m[n.Name] = identDef
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
m[n.Name] = identDef
|
||||||
|
case *ast.AssignStmt:
|
||||||
|
// Short variable declarations only show up if we apply
|
||||||
|
// this code to all source code (as opposed to exported
|
||||||
|
// declarations only).
|
||||||
|
if n.Tok == token.DEFINE {
|
||||||
|
// Some of the lhs variables may be re-declared,
|
||||||
|
// so technically they are not defs. We don't
|
||||||
|
// care for now.
|
||||||
|
for _, x := range n.Lhs {
|
||||||
|
// Each lhs expression should be an
|
||||||
|
// ident, but we are conservative and check.
|
||||||
|
if n, _ := x.(*ast.Ident); n != nil {
|
||||||
|
m[n] = identVal
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
// The predeclared map represents the set of all predeclared identifiers.
|
||||||
|
// TODO(gri) This information is also encoded in similar maps in go/doc,
|
||||||
|
// but not exported. Consider exporting an accessor and using
|
||||||
|
// it instead.
|
||||||
|
var predeclared = map[string]bool{
|
||||||
|
"bool": true,
|
||||||
|
"byte": true,
|
||||||
|
"complex64": true,
|
||||||
|
"complex128": true,
|
||||||
|
"error": true,
|
||||||
|
"float32": true,
|
||||||
|
"float64": true,
|
||||||
|
"int": true,
|
||||||
|
"int8": true,
|
||||||
|
"int16": true,
|
||||||
|
"int32": true,
|
||||||
|
"int64": true,
|
||||||
|
"rune": true,
|
||||||
|
"string": true,
|
||||||
|
"uint": true,
|
||||||
|
"uint8": true,
|
||||||
|
"uint16": true,
|
||||||
|
"uint32": true,
|
||||||
|
"uint64": true,
|
||||||
|
"uintptr": true,
|
||||||
|
"true": true,
|
||||||
|
"false": true,
|
||||||
|
"iota": true,
|
||||||
|
"nil": true,
|
||||||
|
"append": true,
|
||||||
|
"cap": true,
|
||||||
|
"close": true,
|
||||||
|
"complex": true,
|
||||||
|
"copy": true,
|
||||||
|
"delete": true,
|
||||||
|
"imag": true,
|
||||||
|
"len": true,
|
||||||
|
"make": true,
|
||||||
|
"new": true,
|
||||||
|
"panic": true,
|
||||||
|
"print": true,
|
||||||
|
"println": true,
|
||||||
|
"real": true,
|
||||||
|
"recover": true,
|
||||||
|
}
|
@ -2,6 +2,8 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !go1.8
|
||||||
|
|
||||||
package godoc
|
package godoc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
766
godoc/server18.go
Normal file
766
godoc/server18.go
Normal file
@ -0,0 +1,766 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
package godoc
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/build"
|
||||||
|
"go/doc"
|
||||||
|
"go/token"
|
||||||
|
htmlpkg "html"
|
||||||
|
htmltemplate "html/template"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
pathpkg "path"
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"golang.org/x/tools/godoc/analysis"
|
||||||
|
"golang.org/x/tools/godoc/util"
|
||||||
|
"golang.org/x/tools/godoc/vfs"
|
||||||
|
)
|
||||||
|
|
||||||
|
// handlerServer is a migration from an old godoc http Handler type.
|
||||||
|
// This should probably merge into something else.
|
||||||
|
type handlerServer struct {
|
||||||
|
p *Presentation
|
||||||
|
c *Corpus // copy of p.Corpus
|
||||||
|
pattern string // url pattern; e.g. "/pkg/"
|
||||||
|
stripPrefix string // prefix to strip from import path; e.g. "pkg/"
|
||||||
|
fsRoot string // file system root to which the pattern is mapped; e.g. "/src"
|
||||||
|
exclude []string // file system paths to exclude; e.g. "/src/cmd"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *handlerServer) registerWithMux(mux *http.ServeMux) {
|
||||||
|
mux.Handle(s.pattern, s)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getPageInfo returns the PageInfo for a package directory abspath. If the
|
||||||
|
// parameter genAST is set, an AST containing only the package exports is
|
||||||
|
// computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc)
|
||||||
|
// is extracted from the AST. If there is no corresponding package in the
|
||||||
|
// directory, PageInfo.PAst and PageInfo.PDoc are nil. If there are no sub-
|
||||||
|
// directories, PageInfo.Dirs is nil. If an error occurred, PageInfo.Err is
|
||||||
|
// set to the respective error but the error is not logged.
|
||||||
|
//
|
||||||
|
func (h *handlerServer) GetPageInfo(abspath, relpath string, mode PageInfoMode, goos, goarch string) *PageInfo {
|
||||||
|
info := &PageInfo{Dirname: abspath}
|
||||||
|
|
||||||
|
// Restrict to the package files that would be used when building
|
||||||
|
// the package on this system. This makes sure that if there are
|
||||||
|
// separate implementations for, say, Windows vs Unix, we don't
|
||||||
|
// jumble them all together.
|
||||||
|
// Note: If goos/goarch aren't set, the current binary's GOOS/GOARCH
|
||||||
|
// are used.
|
||||||
|
ctxt := build.Default
|
||||||
|
ctxt.IsAbsPath = pathpkg.IsAbs
|
||||||
|
ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) {
|
||||||
|
f, err := h.c.fs.ReadDir(filepath.ToSlash(dir))
|
||||||
|
filtered := make([]os.FileInfo, 0, len(f))
|
||||||
|
for _, i := range f {
|
||||||
|
if mode&NoFiltering != 0 || i.Name() != "internal" {
|
||||||
|
filtered = append(filtered, i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return filtered, err
|
||||||
|
}
|
||||||
|
ctxt.OpenFile = func(name string) (r io.ReadCloser, err error) {
|
||||||
|
data, err := vfs.ReadFile(h.c.fs, filepath.ToSlash(name))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ioutil.NopCloser(bytes.NewReader(data)), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if goos != "" {
|
||||||
|
ctxt.GOOS = goos
|
||||||
|
}
|
||||||
|
if goarch != "" {
|
||||||
|
ctxt.GOARCH = goarch
|
||||||
|
}
|
||||||
|
|
||||||
|
pkginfo, err := ctxt.ImportDir(abspath, 0)
|
||||||
|
// continue if there are no Go source files; we still want the directory info
|
||||||
|
if _, nogo := err.(*build.NoGoError); err != nil && !nogo {
|
||||||
|
info.Err = err
|
||||||
|
return info
|
||||||
|
}
|
||||||
|
|
||||||
|
// collect package files
|
||||||
|
pkgname := pkginfo.Name
|
||||||
|
pkgfiles := append(pkginfo.GoFiles, pkginfo.CgoFiles...)
|
||||||
|
if len(pkgfiles) == 0 {
|
||||||
|
// Commands written in C have no .go files in the build.
|
||||||
|
// Instead, documentation may be found in an ignored file.
|
||||||
|
// The file may be ignored via an explicit +build ignore
|
||||||
|
// constraint (recommended), or by defining the package
|
||||||
|
// documentation (historic).
|
||||||
|
pkgname = "main" // assume package main since pkginfo.Name == ""
|
||||||
|
pkgfiles = pkginfo.IgnoredGoFiles
|
||||||
|
}
|
||||||
|
|
||||||
|
// get package information, if any
|
||||||
|
if len(pkgfiles) > 0 {
|
||||||
|
// build package AST
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
files, err := h.c.parseFiles(fset, relpath, abspath, pkgfiles)
|
||||||
|
if err != nil {
|
||||||
|
info.Err = err
|
||||||
|
return info
|
||||||
|
}
|
||||||
|
|
||||||
|
// ignore any errors - they are due to unresolved identifiers
|
||||||
|
pkg, _ := ast.NewPackage(fset, files, poorMansImporter, nil)
|
||||||
|
|
||||||
|
// extract package documentation
|
||||||
|
info.FSet = fset
|
||||||
|
if mode&ShowSource == 0 {
|
||||||
|
// show extracted documentation
|
||||||
|
var m doc.Mode
|
||||||
|
if mode&NoFiltering != 0 {
|
||||||
|
m |= doc.AllDecls
|
||||||
|
}
|
||||||
|
if mode&AllMethods != 0 {
|
||||||
|
m |= doc.AllMethods
|
||||||
|
}
|
||||||
|
info.PDoc = doc.New(pkg, pathpkg.Clean(relpath), m) // no trailing '/' in importpath
|
||||||
|
if mode&NoTypeAssoc != 0 {
|
||||||
|
for _, t := range info.PDoc.Types {
|
||||||
|
info.PDoc.Consts = append(info.PDoc.Consts, t.Consts...)
|
||||||
|
info.PDoc.Vars = append(info.PDoc.Vars, t.Vars...)
|
||||||
|
info.PDoc.Funcs = append(info.PDoc.Funcs, t.Funcs...)
|
||||||
|
t.Consts = nil
|
||||||
|
t.Vars = nil
|
||||||
|
t.Funcs = nil
|
||||||
|
}
|
||||||
|
// for now we cannot easily sort consts and vars since
|
||||||
|
// go/doc.Value doesn't export the order information
|
||||||
|
sort.Sort(funcsByName(info.PDoc.Funcs))
|
||||||
|
}
|
||||||
|
|
||||||
|
// collect examples
|
||||||
|
testfiles := append(pkginfo.TestGoFiles, pkginfo.XTestGoFiles...)
|
||||||
|
files, err = h.c.parseFiles(fset, relpath, abspath, testfiles)
|
||||||
|
if err != nil {
|
||||||
|
log.Println("parsing examples:", err)
|
||||||
|
}
|
||||||
|
info.Examples = collectExamples(h.c, pkg, files)
|
||||||
|
|
||||||
|
// collect any notes that we want to show
|
||||||
|
if info.PDoc.Notes != nil {
|
||||||
|
// could regexp.Compile only once per godoc, but probably not worth it
|
||||||
|
if rx := h.p.NotesRx; rx != nil {
|
||||||
|
for m, n := range info.PDoc.Notes {
|
||||||
|
if rx.MatchString(m) {
|
||||||
|
if info.Notes == nil {
|
||||||
|
info.Notes = make(map[string][]*doc.Note)
|
||||||
|
}
|
||||||
|
info.Notes[m] = n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// show source code
|
||||||
|
// TODO(gri) Consider eliminating export filtering in this mode,
|
||||||
|
// or perhaps eliminating the mode altogether.
|
||||||
|
if mode&NoFiltering == 0 {
|
||||||
|
packageExports(fset, pkg)
|
||||||
|
}
|
||||||
|
info.PAst = files
|
||||||
|
}
|
||||||
|
info.IsMain = pkgname == "main"
|
||||||
|
}
|
||||||
|
|
||||||
|
// get directory information, if any
|
||||||
|
var dir *Directory
|
||||||
|
var timestamp time.Time
|
||||||
|
if tree, ts := h.c.fsTree.Get(); tree != nil && tree.(*Directory) != nil {
|
||||||
|
// directory tree is present; lookup respective directory
|
||||||
|
// (may still fail if the file system was updated and the
|
||||||
|
// new directory tree has not yet been computed)
|
||||||
|
dir = tree.(*Directory).lookup(abspath)
|
||||||
|
timestamp = ts
|
||||||
|
}
|
||||||
|
if dir == nil {
|
||||||
|
// no directory tree present (too early after startup or
|
||||||
|
// command-line mode); compute one level for this page
|
||||||
|
// note: cannot use path filter here because in general
|
||||||
|
// it doesn't contain the FSTree path
|
||||||
|
dir = h.c.newDirectory(abspath, 1)
|
||||||
|
timestamp = time.Now()
|
||||||
|
}
|
||||||
|
info.Dirs = dir.listing(true, func(path string) bool { return h.includePath(path, mode) })
|
||||||
|
info.DirTime = timestamp
|
||||||
|
info.DirFlat = mode&FlatDir != 0
|
||||||
|
|
||||||
|
return info
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *handlerServer) includePath(path string, mode PageInfoMode) (r bool) {
|
||||||
|
// if the path is under one of the exclusion paths, don't list.
|
||||||
|
for _, e := range h.exclude {
|
||||||
|
if strings.HasPrefix(path, e) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// if the path includes 'internal', don't list unless we are in the NoFiltering mode.
|
||||||
|
if mode&NoFiltering != 0 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if strings.Contains(path, "internal") || strings.Contains(path, "vendor") {
|
||||||
|
for _, c := range strings.Split(filepath.Clean(path), string(os.PathSeparator)) {
|
||||||
|
if c == "internal" || c == "vendor" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
type funcsByName []*doc.Func
|
||||||
|
|
||||||
|
func (s funcsByName) Len() int { return len(s) }
|
||||||
|
func (s funcsByName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||||
|
func (s funcsByName) Less(i, j int) bool { return s[i].Name < s[j].Name }
|
||||||
|
|
||||||
|
func (h *handlerServer) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if redirect(w, r) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
relpath := pathpkg.Clean(r.URL.Path[len(h.stripPrefix)+1:])
|
||||||
|
abspath := pathpkg.Join(h.fsRoot, relpath)
|
||||||
|
mode := h.p.GetPageInfoMode(r)
|
||||||
|
if relpath == builtinPkgPath {
|
||||||
|
mode = NoFiltering | NoTypeAssoc
|
||||||
|
}
|
||||||
|
info := h.GetPageInfo(abspath, relpath, mode, r.FormValue("GOOS"), r.FormValue("GOARCH"))
|
||||||
|
if info.Err != nil {
|
||||||
|
log.Print(info.Err)
|
||||||
|
h.p.ServeError(w, r, relpath, info.Err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode&NoHTML != 0 {
|
||||||
|
h.p.ServeText(w, applyTemplate(h.p.PackageText, "packageText", info))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var tabtitle, title, subtitle string
|
||||||
|
switch {
|
||||||
|
case info.PAst != nil:
|
||||||
|
for _, ast := range info.PAst {
|
||||||
|
tabtitle = ast.Name.Name
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case info.PDoc != nil:
|
||||||
|
tabtitle = info.PDoc.Name
|
||||||
|
default:
|
||||||
|
tabtitle = info.Dirname
|
||||||
|
title = "Directory "
|
||||||
|
if h.p.ShowTimestamps {
|
||||||
|
subtitle = "Last update: " + info.DirTime.String()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if title == "" {
|
||||||
|
if info.IsMain {
|
||||||
|
// assume that the directory name is the command name
|
||||||
|
_, tabtitle = pathpkg.Split(relpath)
|
||||||
|
title = "Command "
|
||||||
|
} else {
|
||||||
|
title = "Package "
|
||||||
|
}
|
||||||
|
}
|
||||||
|
title += tabtitle
|
||||||
|
|
||||||
|
// special cases for top-level package/command directories
|
||||||
|
switch tabtitle {
|
||||||
|
case "/src":
|
||||||
|
title = "Packages"
|
||||||
|
tabtitle = "Packages"
|
||||||
|
case "/src/cmd":
|
||||||
|
title = "Commands"
|
||||||
|
tabtitle = "Commands"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Emit JSON array for type information.
|
||||||
|
pi := h.c.Analysis.PackageInfo(relpath)
|
||||||
|
info.CallGraphIndex = pi.CallGraphIndex
|
||||||
|
info.CallGraph = htmltemplate.JS(marshalJSON(pi.CallGraph))
|
||||||
|
info.AnalysisData = htmltemplate.JS(marshalJSON(pi.Types))
|
||||||
|
info.TypeInfoIndex = make(map[string]int)
|
||||||
|
for i, ti := range pi.Types {
|
||||||
|
info.TypeInfoIndex[ti.Name] = i
|
||||||
|
}
|
||||||
|
|
||||||
|
info.Share = allowShare(r)
|
||||||
|
h.p.ServePage(w, Page{
|
||||||
|
Title: title,
|
||||||
|
Tabtitle: tabtitle,
|
||||||
|
Subtitle: subtitle,
|
||||||
|
Body: applyTemplate(h.p.PackageHTML, "packageHTML", info),
|
||||||
|
Share: info.Share,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
type PageInfoMode uint
|
||||||
|
|
||||||
|
const (
|
||||||
|
NoFiltering PageInfoMode = 1 << iota // do not filter exports
|
||||||
|
AllMethods // show all embedded methods
|
||||||
|
ShowSource // show source code, do not extract documentation
|
||||||
|
NoHTML // show result in textual form, do not generate HTML
|
||||||
|
FlatDir // show directory in a flat (non-indented) manner
|
||||||
|
NoTypeAssoc // don't associate consts, vars, and factory functions with types
|
||||||
|
)
|
||||||
|
|
||||||
|
// modeNames defines names for each PageInfoMode flag.
|
||||||
|
var modeNames = map[string]PageInfoMode{
|
||||||
|
"all": NoFiltering,
|
||||||
|
"methods": AllMethods,
|
||||||
|
"src": ShowSource,
|
||||||
|
"text": NoHTML,
|
||||||
|
"flat": FlatDir,
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPageInfoMode computes the PageInfoMode flags by analyzing the request
|
||||||
|
// URL form value "m". It is value is a comma-separated list of mode names
|
||||||
|
// as defined by modeNames (e.g.: m=src,text).
|
||||||
|
func (p *Presentation) GetPageInfoMode(r *http.Request) PageInfoMode {
|
||||||
|
var mode PageInfoMode
|
||||||
|
for _, k := range strings.Split(r.FormValue("m"), ",") {
|
||||||
|
if m, found := modeNames[strings.TrimSpace(k)]; found {
|
||||||
|
mode |= m
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if p.AdjustPageInfoMode != nil {
|
||||||
|
mode = p.AdjustPageInfoMode(r, mode)
|
||||||
|
}
|
||||||
|
return mode
|
||||||
|
}
|
||||||
|
|
||||||
|
// poorMansImporter returns a (dummy) package object named
|
||||||
|
// by the last path component of the provided package path
|
||||||
|
// (as is the convention for packages). This is sufficient
|
||||||
|
// to resolve package identifiers without doing an actual
|
||||||
|
// import. It never returns an error.
|
||||||
|
//
|
||||||
|
func poorMansImporter(imports map[string]*ast.Object, path string) (*ast.Object, error) {
|
||||||
|
pkg := imports[path]
|
||||||
|
if pkg == nil {
|
||||||
|
// note that strings.LastIndex returns -1 if there is no "/"
|
||||||
|
pkg = ast.NewObj(ast.Pkg, path[strings.LastIndex(path, "/")+1:])
|
||||||
|
pkg.Data = ast.NewScope(nil) // required by ast.NewPackage for dot-import
|
||||||
|
imports[path] = pkg
|
||||||
|
}
|
||||||
|
return pkg, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// globalNames returns a set of the names declared by all package-level
|
||||||
|
// declarations. Method names are returned in the form Receiver_Method.
|
||||||
|
func globalNames(pkg *ast.Package) map[string]bool {
|
||||||
|
names := make(map[string]bool)
|
||||||
|
for _, file := range pkg.Files {
|
||||||
|
for _, decl := range file.Decls {
|
||||||
|
addNames(names, decl)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return names
|
||||||
|
}
|
||||||
|
|
||||||
|
// collectExamples collects examples for pkg from testfiles.
|
||||||
|
func collectExamples(c *Corpus, pkg *ast.Package, testfiles map[string]*ast.File) []*doc.Example {
|
||||||
|
var files []*ast.File
|
||||||
|
for _, f := range testfiles {
|
||||||
|
files = append(files, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
var examples []*doc.Example
|
||||||
|
globals := globalNames(pkg)
|
||||||
|
for _, e := range doc.Examples(files...) {
|
||||||
|
name := stripExampleSuffix(e.Name)
|
||||||
|
if name == "" || globals[name] {
|
||||||
|
examples = append(examples, e)
|
||||||
|
} else if c.Verbose {
|
||||||
|
log.Printf("skipping example 'Example%s' because '%s' is not a known function or type", e.Name, e.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return examples
|
||||||
|
}
|
||||||
|
|
||||||
|
// addNames adds the names declared by decl to the names set.
|
||||||
|
// Method names are added in the form ReceiverTypeName_Method.
|
||||||
|
func addNames(names map[string]bool, decl ast.Decl) {
|
||||||
|
switch d := decl.(type) {
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
name := d.Name.Name
|
||||||
|
if d.Recv != nil {
|
||||||
|
var typeName string
|
||||||
|
switch r := d.Recv.List[0].Type.(type) {
|
||||||
|
case *ast.StarExpr:
|
||||||
|
typeName = r.X.(*ast.Ident).Name
|
||||||
|
case *ast.Ident:
|
||||||
|
typeName = r.Name
|
||||||
|
}
|
||||||
|
name = typeName + "_" + name
|
||||||
|
}
|
||||||
|
names[name] = true
|
||||||
|
case *ast.GenDecl:
|
||||||
|
for _, spec := range d.Specs {
|
||||||
|
switch s := spec.(type) {
|
||||||
|
case *ast.TypeSpec:
|
||||||
|
names[s.Name.Name] = true
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
for _, id := range s.Names {
|
||||||
|
names[id.Name] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// packageExports is a local implementation of ast.PackageExports
|
||||||
|
// which correctly updates each package file's comment list.
|
||||||
|
// (The ast.PackageExports signature is frozen, hence the local
|
||||||
|
// implementation).
|
||||||
|
//
|
||||||
|
func packageExports(fset *token.FileSet, pkg *ast.Package) {
|
||||||
|
for _, src := range pkg.Files {
|
||||||
|
cmap := ast.NewCommentMap(fset, src, src.Comments)
|
||||||
|
ast.FileExports(src)
|
||||||
|
src.Comments = cmap.Filter(src).Comments()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func applyTemplate(t *template.Template, name string, data interface{}) []byte {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
if err := t.Execute(&buf, data); err != nil {
|
||||||
|
log.Printf("%s.Execute: %s", name, err)
|
||||||
|
}
|
||||||
|
return buf.Bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
type writerCapturesErr struct {
|
||||||
|
w io.Writer
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *writerCapturesErr) Write(p []byte) (int, error) {
|
||||||
|
n, err := w.w.Write(p)
|
||||||
|
if err != nil {
|
||||||
|
w.err = err
|
||||||
|
}
|
||||||
|
return n, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// applyTemplateToResponseWriter uses an http.ResponseWriter as the io.Writer
|
||||||
|
// for the call to template.Execute. It uses an io.Writer wrapper to capture
|
||||||
|
// errors from the underlying http.ResponseWriter. Errors are logged only when
|
||||||
|
// they come from the template processing and not the Writer; this avoid
|
||||||
|
// polluting log files with error messages due to networking issues, such as
|
||||||
|
// client disconnects and http HEAD protocol violations.
|
||||||
|
func applyTemplateToResponseWriter(rw http.ResponseWriter, t *template.Template, data interface{}) {
|
||||||
|
w := &writerCapturesErr{w: rw}
|
||||||
|
err := t.Execute(w, data)
|
||||||
|
// There are some cases where template.Execute does not return an error when
|
||||||
|
// rw returns an error, and some where it does. So check w.err first.
|
||||||
|
if w.err == nil && err != nil {
|
||||||
|
// Log template errors.
|
||||||
|
log.Printf("%s.Execute: %s", t.Name(), err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func redirect(w http.ResponseWriter, r *http.Request) (redirected bool) {
|
||||||
|
canonical := pathpkg.Clean(r.URL.Path)
|
||||||
|
if !strings.HasSuffix(canonical, "/") {
|
||||||
|
canonical += "/"
|
||||||
|
}
|
||||||
|
if r.URL.Path != canonical {
|
||||||
|
url := *r.URL
|
||||||
|
url.Path = canonical
|
||||||
|
http.Redirect(w, r, url.String(), http.StatusMovedPermanently)
|
||||||
|
redirected = true
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func redirectFile(w http.ResponseWriter, r *http.Request) (redirected bool) {
|
||||||
|
c := pathpkg.Clean(r.URL.Path)
|
||||||
|
c = strings.TrimRight(c, "/")
|
||||||
|
if r.URL.Path != c {
|
||||||
|
url := *r.URL
|
||||||
|
url.Path = c
|
||||||
|
http.Redirect(w, r, url.String(), http.StatusMovedPermanently)
|
||||||
|
redirected = true
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Presentation) serveTextFile(w http.ResponseWriter, r *http.Request, abspath, relpath, title string) {
|
||||||
|
src, err := vfs.ReadFile(p.Corpus.fs, abspath)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("ReadFile: %s", err)
|
||||||
|
p.ServeError(w, r, relpath, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.FormValue("m") == "text" {
|
||||||
|
p.ServeText(w, src)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
h := r.FormValue("h")
|
||||||
|
s := RangeSelection(r.FormValue("s"))
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
if pathpkg.Ext(abspath) == ".go" {
|
||||||
|
// Find markup links for this file (e.g. "/src/fmt/print.go").
|
||||||
|
fi := p.Corpus.Analysis.FileInfo(abspath)
|
||||||
|
buf.WriteString("<script type='text/javascript'>document.ANALYSIS_DATA = ")
|
||||||
|
buf.Write(marshalJSON(fi.Data))
|
||||||
|
buf.WriteString(";</script>\n")
|
||||||
|
|
||||||
|
if status := p.Corpus.Analysis.Status(); status != "" {
|
||||||
|
buf.WriteString("<a href='/lib/godoc/analysis/help.html'>Static analysis features</a> ")
|
||||||
|
// TODO(adonovan): show analysis status at per-file granularity.
|
||||||
|
fmt.Fprintf(&buf, "<span style='color: grey'>[%s]</span><br/>", htmlpkg.EscapeString(status))
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.WriteString("<pre>")
|
||||||
|
formatGoSource(&buf, src, fi.Links, h, s)
|
||||||
|
buf.WriteString("</pre>")
|
||||||
|
} else {
|
||||||
|
buf.WriteString("<pre>")
|
||||||
|
FormatText(&buf, src, 1, false, h, s)
|
||||||
|
buf.WriteString("</pre>")
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&buf, `<p><a href="/%s?m=text">View as plain text</a></p>`, htmlpkg.EscapeString(relpath))
|
||||||
|
|
||||||
|
p.ServePage(w, Page{
|
||||||
|
Title: title + " " + relpath,
|
||||||
|
Tabtitle: relpath,
|
||||||
|
Body: buf.Bytes(),
|
||||||
|
Share: allowShare(r),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// formatGoSource HTML-escapes Go source text and writes it to w,
|
||||||
|
// decorating it with the specified analysis links.
|
||||||
|
//
|
||||||
|
func formatGoSource(buf *bytes.Buffer, text []byte, links []analysis.Link, pattern string, selection Selection) {
|
||||||
|
// Emit to a temp buffer so that we can add line anchors at the end.
|
||||||
|
saved, buf := buf, new(bytes.Buffer)
|
||||||
|
|
||||||
|
var i int
|
||||||
|
var link analysis.Link // shared state of the two funcs below
|
||||||
|
segmentIter := func() (seg Segment) {
|
||||||
|
if i < len(links) {
|
||||||
|
link = links[i]
|
||||||
|
i++
|
||||||
|
seg = Segment{link.Start(), link.End()}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
linkWriter := func(w io.Writer, offs int, start bool) {
|
||||||
|
link.Write(w, offs, start)
|
||||||
|
}
|
||||||
|
|
||||||
|
comments := tokenSelection(text, token.COMMENT)
|
||||||
|
var highlights Selection
|
||||||
|
if pattern != "" {
|
||||||
|
highlights = regexpSelection(text, pattern)
|
||||||
|
}
|
||||||
|
|
||||||
|
FormatSelections(buf, text, linkWriter, segmentIter, selectionTag, comments, highlights, selection)
|
||||||
|
|
||||||
|
// Now copy buf to saved, adding line anchors.
|
||||||
|
|
||||||
|
// The lineSelection mechanism can't be composed with our
|
||||||
|
// linkWriter, so we have to add line spans as another pass.
|
||||||
|
n := 1
|
||||||
|
for _, line := range bytes.Split(buf.Bytes(), []byte("\n")) {
|
||||||
|
fmt.Fprintf(saved, "<span id=\"L%d\" class=\"ln\">%6d</span>\t", n, n)
|
||||||
|
n++
|
||||||
|
saved.Write(line)
|
||||||
|
saved.WriteByte('\n')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Presentation) serveDirectory(w http.ResponseWriter, r *http.Request, abspath, relpath string) {
|
||||||
|
if redirect(w, r) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
list, err := p.Corpus.fs.ReadDir(abspath)
|
||||||
|
if err != nil {
|
||||||
|
p.ServeError(w, r, relpath, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
p.ServePage(w, Page{
|
||||||
|
Title: "Directory " + relpath,
|
||||||
|
Tabtitle: relpath,
|
||||||
|
Body: applyTemplate(p.DirlistHTML, "dirlistHTML", list),
|
||||||
|
Share: allowShare(r),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Presentation) ServeHTMLDoc(w http.ResponseWriter, r *http.Request, abspath, relpath string) {
|
||||||
|
// get HTML body contents
|
||||||
|
src, err := vfs.ReadFile(p.Corpus.fs, abspath)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("ReadFile: %s", err)
|
||||||
|
p.ServeError(w, r, relpath, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// if it begins with "<!DOCTYPE " assume it is standalone
|
||||||
|
// html that doesn't need the template wrapping.
|
||||||
|
if bytes.HasPrefix(src, doctype) {
|
||||||
|
w.Write(src)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// if it begins with a JSON blob, read in the metadata.
|
||||||
|
meta, src, err := extractMetadata(src)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("decoding metadata %s: %v", relpath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
page := Page{
|
||||||
|
Title: meta.Title,
|
||||||
|
Subtitle: meta.Subtitle,
|
||||||
|
Share: allowShare(r),
|
||||||
|
}
|
||||||
|
|
||||||
|
// evaluate as template if indicated
|
||||||
|
if meta.Template {
|
||||||
|
tmpl, err := template.New("main").Funcs(p.TemplateFuncs()).Parse(string(src))
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("parsing template %s: %v", relpath, err)
|
||||||
|
p.ServeError(w, r, relpath, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
var buf bytes.Buffer
|
||||||
|
if err := tmpl.Execute(&buf, page); err != nil {
|
||||||
|
log.Printf("executing template %s: %v", relpath, err)
|
||||||
|
p.ServeError(w, r, relpath, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
src = buf.Bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
// if it's the language spec, add tags to EBNF productions
|
||||||
|
if strings.HasSuffix(abspath, "go_spec.html") {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
Linkify(&buf, src)
|
||||||
|
src = buf.Bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
page.Body = src
|
||||||
|
p.ServePage(w, page)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Presentation) ServeFile(w http.ResponseWriter, r *http.Request) {
|
||||||
|
p.serveFile(w, r)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Presentation) serveFile(w http.ResponseWriter, r *http.Request) {
|
||||||
|
relpath := r.URL.Path
|
||||||
|
|
||||||
|
// Check to see if we need to redirect or serve another file.
|
||||||
|
if m := p.Corpus.MetadataFor(relpath); m != nil {
|
||||||
|
if m.Path != relpath {
|
||||||
|
// Redirect to canonical path.
|
||||||
|
http.Redirect(w, r, m.Path, http.StatusMovedPermanently)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Serve from the actual filesystem path.
|
||||||
|
relpath = m.filePath
|
||||||
|
}
|
||||||
|
|
||||||
|
abspath := relpath
|
||||||
|
relpath = relpath[1:] // strip leading slash
|
||||||
|
|
||||||
|
switch pathpkg.Ext(relpath) {
|
||||||
|
case ".html":
|
||||||
|
if strings.HasSuffix(relpath, "/index.html") {
|
||||||
|
// We'll show index.html for the directory.
|
||||||
|
// Use the dir/ version as canonical instead of dir/index.html.
|
||||||
|
http.Redirect(w, r, r.URL.Path[0:len(r.URL.Path)-len("index.html")], http.StatusMovedPermanently)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
p.ServeHTMLDoc(w, r, abspath, relpath)
|
||||||
|
return
|
||||||
|
|
||||||
|
case ".go":
|
||||||
|
p.serveTextFile(w, r, abspath, relpath, "Source file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
dir, err := p.Corpus.fs.Lstat(abspath)
|
||||||
|
if err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
p.ServeError(w, r, relpath, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if dir != nil && dir.IsDir() {
|
||||||
|
if redirect(w, r) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if index := pathpkg.Join(abspath, "index.html"); util.IsTextFile(p.Corpus.fs, index) {
|
||||||
|
p.ServeHTMLDoc(w, r, index, index)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
p.serveDirectory(w, r, abspath, relpath)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if util.IsTextFile(p.Corpus.fs, abspath) {
|
||||||
|
if redirectFile(w, r) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
p.serveTextFile(w, r, abspath, relpath, "Text file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
p.fileServer.ServeHTTP(w, r)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Presentation) ServeText(w http.ResponseWriter, text []byte) {
|
||||||
|
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
||||||
|
w.Write(text)
|
||||||
|
}
|
||||||
|
|
||||||
|
func marshalJSON(x interface{}) []byte {
|
||||||
|
var data []byte
|
||||||
|
var err error
|
||||||
|
const indentJSON = false // for easier debugging
|
||||||
|
if indentJSON {
|
||||||
|
data, err = json.MarshalIndent(x, "", " ")
|
||||||
|
} else {
|
||||||
|
data, err = json.Marshal(x)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("json.Marshal failed: %s", err))
|
||||||
|
}
|
||||||
|
return data
|
||||||
|
}
|
@ -2,6 +2,8 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !go1.8
|
||||||
|
|
||||||
// This file contains the infrastructure to create a code
|
// This file contains the infrastructure to create a code
|
||||||
// snippet for search results.
|
// snippet for search results.
|
||||||
//
|
//
|
||||||
|
125
godoc/snippet18.go
Normal file
125
godoc/snippet18.go
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
// This file contains the infrastructure to create a code
|
||||||
|
// snippet for search results.
|
||||||
|
//
|
||||||
|
// Note: At the moment, this only creates HTML snippets.
|
||||||
|
|
||||||
|
package godoc
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Snippet struct {
|
||||||
|
Line int
|
||||||
|
Text string // HTML-escaped
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Presentation) newSnippet(fset *token.FileSet, decl ast.Decl, id *ast.Ident) *Snippet {
|
||||||
|
// TODO instead of pretty-printing the node, should use the original source instead
|
||||||
|
var buf1 bytes.Buffer
|
||||||
|
p.writeNode(&buf1, fset, decl)
|
||||||
|
// wrap text with <pre> tag
|
||||||
|
var buf2 bytes.Buffer
|
||||||
|
buf2.WriteString("<pre>")
|
||||||
|
FormatText(&buf2, buf1.Bytes(), -1, true, id.Name, nil)
|
||||||
|
buf2.WriteString("</pre>")
|
||||||
|
return &Snippet{fset.Position(id.Pos()).Line, buf2.String()}
|
||||||
|
}
|
||||||
|
|
||||||
|
func findSpec(list []ast.Spec, id *ast.Ident) ast.Spec {
|
||||||
|
for _, spec := range list {
|
||||||
|
switch s := spec.(type) {
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
if s.Name == id {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
for _, n := range s.Names {
|
||||||
|
if n == id {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case *ast.TypeSpec:
|
||||||
|
if s.Name == id {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Presentation) genSnippet(fset *token.FileSet, d *ast.GenDecl, id *ast.Ident) *Snippet {
|
||||||
|
s := findSpec(d.Specs, id)
|
||||||
|
if s == nil {
|
||||||
|
return nil // declaration doesn't contain id - exit gracefully
|
||||||
|
}
|
||||||
|
|
||||||
|
// only use the spec containing the id for the snippet
|
||||||
|
dd := &ast.GenDecl{
|
||||||
|
Doc: d.Doc,
|
||||||
|
TokPos: d.Pos(),
|
||||||
|
Tok: d.Tok,
|
||||||
|
Lparen: d.Lparen,
|
||||||
|
Specs: []ast.Spec{s},
|
||||||
|
Rparen: d.Rparen,
|
||||||
|
}
|
||||||
|
|
||||||
|
return p.newSnippet(fset, dd, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Presentation) funcSnippet(fset *token.FileSet, d *ast.FuncDecl, id *ast.Ident) *Snippet {
|
||||||
|
if d.Name != id {
|
||||||
|
return nil // declaration doesn't contain id - exit gracefully
|
||||||
|
}
|
||||||
|
|
||||||
|
// only use the function signature for the snippet
|
||||||
|
dd := &ast.FuncDecl{
|
||||||
|
Doc: d.Doc,
|
||||||
|
Recv: d.Recv,
|
||||||
|
Name: d.Name,
|
||||||
|
Type: d.Type,
|
||||||
|
}
|
||||||
|
|
||||||
|
return p.newSnippet(fset, dd, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSnippet creates a text snippet from a declaration decl containing an
|
||||||
|
// identifier id. Parts of the declaration not containing the identifier
|
||||||
|
// may be removed for a more compact snippet.
|
||||||
|
func NewSnippet(fset *token.FileSet, decl ast.Decl, id *ast.Ident) *Snippet {
|
||||||
|
// TODO(bradfitz, adg): remove this function. But it's used by indexer, which
|
||||||
|
// doesn't have a *Presentation, and NewSnippet needs a TabWidth.
|
||||||
|
var p Presentation
|
||||||
|
p.TabWidth = 4
|
||||||
|
return p.NewSnippet(fset, decl, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSnippet creates a text snippet from a declaration decl containing an
|
||||||
|
// identifier id. Parts of the declaration not containing the identifier
|
||||||
|
// may be removed for a more compact snippet.
|
||||||
|
func (p *Presentation) NewSnippet(fset *token.FileSet, decl ast.Decl, id *ast.Ident) *Snippet {
|
||||||
|
var s *Snippet
|
||||||
|
switch d := decl.(type) {
|
||||||
|
case *ast.GenDecl:
|
||||||
|
s = p.genSnippet(fset, d, id)
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
s = p.funcSnippet(fset, d, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// handle failure gracefully
|
||||||
|
if s == nil {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
fmt.Fprintf(&buf, `<span class="alert">could not generate a snippet for <span class="highlight">%s</span></span>`, id.Name)
|
||||||
|
s = &Snippet{fset.Position(id.Pos()).Line, buf.String()}
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
@ -2,6 +2,8 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !go1.8
|
||||||
|
|
||||||
package imports
|
package imports
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
977
imports/fix18.go
Normal file
977
imports/fix18.go
Normal file
@ -0,0 +1,977 @@
|
|||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
package imports
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/build"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/ast/astutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Debug controls verbose logging.
|
||||||
|
var Debug = false
|
||||||
|
|
||||||
|
var (
|
||||||
|
inTests = false // set true by fix_test.go; if false, no need to use testMu
|
||||||
|
testMu sync.RWMutex // guards globals reset by tests; used only if inTests
|
||||||
|
)
|
||||||
|
|
||||||
|
// If set, LocalPrefix instructs Process to sort import paths with the given
|
||||||
|
// prefix into another group after 3rd-party packages.
|
||||||
|
var LocalPrefix string
|
||||||
|
|
||||||
|
// importToGroup is a list of functions which map from an import path to
|
||||||
|
// a group number.
|
||||||
|
var importToGroup = []func(importPath string) (num int, ok bool){
|
||||||
|
func(importPath string) (num int, ok bool) {
|
||||||
|
if LocalPrefix != "" && strings.HasPrefix(importPath, LocalPrefix) {
|
||||||
|
return 3, true
|
||||||
|
}
|
||||||
|
return
|
||||||
|
},
|
||||||
|
func(importPath string) (num int, ok bool) {
|
||||||
|
if strings.HasPrefix(importPath, "appengine") {
|
||||||
|
return 2, true
|
||||||
|
}
|
||||||
|
return
|
||||||
|
},
|
||||||
|
func(importPath string) (num int, ok bool) {
|
||||||
|
if strings.Contains(importPath, ".") {
|
||||||
|
return 1, true
|
||||||
|
}
|
||||||
|
return
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func importGroup(importPath string) int {
|
||||||
|
for _, fn := range importToGroup {
|
||||||
|
if n, ok := fn(importPath); ok {
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// packageInfo is a summary of features found in a package.
|
||||||
|
type packageInfo struct {
|
||||||
|
Globals map[string]bool // symbol => true
|
||||||
|
}
|
||||||
|
|
||||||
|
// dirPackageInfo gets information from other files in the package.
|
||||||
|
func dirPackageInfo(srcDir, filename string) (*packageInfo, error) {
|
||||||
|
considerTests := strings.HasSuffix(filename, "_test.go")
|
||||||
|
|
||||||
|
// Handle file from stdin
|
||||||
|
if _, err := os.Stat(filename); err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return &packageInfo{}, nil
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
fileBase := filepath.Base(filename)
|
||||||
|
packageFileInfos, err := ioutil.ReadDir(srcDir)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
info := &packageInfo{Globals: make(map[string]bool)}
|
||||||
|
for _, fi := range packageFileInfos {
|
||||||
|
if fi.Name() == fileBase || !strings.HasSuffix(fi.Name(), ".go") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !considerTests && strings.HasSuffix(fi.Name(), "_test.go") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fileSet := token.NewFileSet()
|
||||||
|
root, err := parser.ParseFile(fileSet, filepath.Join(srcDir, fi.Name()), nil, 0)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, decl := range root.Decls {
|
||||||
|
genDecl, ok := decl.(*ast.GenDecl)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, spec := range genDecl.Specs {
|
||||||
|
valueSpec, ok := spec.(*ast.ValueSpec)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
info.Globals[valueSpec.Names[0].Name] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return info, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func fixImports(fset *token.FileSet, f *ast.File, filename string) (added []string, err error) {
|
||||||
|
// refs are a set of possible package references currently unsatisfied by imports.
|
||||||
|
// first key: either base package (e.g. "fmt") or renamed package
|
||||||
|
// second key: referenced package symbol (e.g. "Println")
|
||||||
|
refs := make(map[string]map[string]bool)
|
||||||
|
|
||||||
|
// decls are the current package imports. key is base package or renamed package.
|
||||||
|
decls := make(map[string]*ast.ImportSpec)
|
||||||
|
|
||||||
|
abs, err := filepath.Abs(filename)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
srcDir := filepath.Dir(abs)
|
||||||
|
if Debug {
|
||||||
|
log.Printf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
var packageInfo *packageInfo
|
||||||
|
var loadedPackageInfo bool
|
||||||
|
|
||||||
|
// collect potential uses of packages.
|
||||||
|
var visitor visitFn
|
||||||
|
visitor = visitFn(func(node ast.Node) ast.Visitor {
|
||||||
|
if node == nil {
|
||||||
|
return visitor
|
||||||
|
}
|
||||||
|
switch v := node.(type) {
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
if v.Name != nil {
|
||||||
|
decls[v.Name.Name] = v
|
||||||
|
break
|
||||||
|
}
|
||||||
|
ipath := strings.Trim(v.Path.Value, `"`)
|
||||||
|
if ipath == "C" {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
local := importPathToName(ipath, srcDir)
|
||||||
|
decls[local] = v
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
xident, ok := v.X.(*ast.Ident)
|
||||||
|
if !ok {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if xident.Obj != nil {
|
||||||
|
// if the parser can resolve it, it's not a package ref
|
||||||
|
break
|
||||||
|
}
|
||||||
|
pkgName := xident.Name
|
||||||
|
if refs[pkgName] == nil {
|
||||||
|
refs[pkgName] = make(map[string]bool)
|
||||||
|
}
|
||||||
|
if !loadedPackageInfo {
|
||||||
|
loadedPackageInfo = true
|
||||||
|
packageInfo, _ = dirPackageInfo(srcDir, filename)
|
||||||
|
}
|
||||||
|
if decls[pkgName] == nil && (packageInfo == nil || !packageInfo.Globals[pkgName]) {
|
||||||
|
refs[pkgName][v.Sel.Name] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return visitor
|
||||||
|
})
|
||||||
|
ast.Walk(visitor, f)
|
||||||
|
|
||||||
|
// Nil out any unused ImportSpecs, to be removed in following passes
|
||||||
|
unusedImport := map[string]string{}
|
||||||
|
for pkg, is := range decls {
|
||||||
|
if refs[pkg] == nil && pkg != "_" && pkg != "." {
|
||||||
|
name := ""
|
||||||
|
if is.Name != nil {
|
||||||
|
name = is.Name.Name
|
||||||
|
}
|
||||||
|
unusedImport[strings.Trim(is.Path.Value, `"`)] = name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for ipath, name := range unusedImport {
|
||||||
|
if ipath == "C" {
|
||||||
|
// Don't remove cgo stuff.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
astutil.DeleteNamedImport(fset, f, name, ipath)
|
||||||
|
}
|
||||||
|
|
||||||
|
for pkgName, symbols := range refs {
|
||||||
|
if len(symbols) == 0 {
|
||||||
|
// skip over packages already imported
|
||||||
|
delete(refs, pkgName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Search for imports matching potential package references.
|
||||||
|
searches := 0
|
||||||
|
type result struct {
|
||||||
|
ipath string // import path (if err == nil)
|
||||||
|
name string // optional name to rename import as
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
results := make(chan result)
|
||||||
|
for pkgName, symbols := range refs {
|
||||||
|
go func(pkgName string, symbols map[string]bool) {
|
||||||
|
ipath, rename, err := findImport(pkgName, symbols, filename)
|
||||||
|
r := result{ipath: ipath, err: err}
|
||||||
|
if rename {
|
||||||
|
r.name = pkgName
|
||||||
|
}
|
||||||
|
results <- r
|
||||||
|
}(pkgName, symbols)
|
||||||
|
searches++
|
||||||
|
}
|
||||||
|
for i := 0; i < searches; i++ {
|
||||||
|
result := <-results
|
||||||
|
if result.err != nil {
|
||||||
|
return nil, result.err
|
||||||
|
}
|
||||||
|
if result.ipath != "" {
|
||||||
|
if result.name != "" {
|
||||||
|
astutil.AddNamedImport(fset, f, result.name, result.ipath)
|
||||||
|
} else {
|
||||||
|
astutil.AddImport(fset, f, result.ipath)
|
||||||
|
}
|
||||||
|
added = append(added, result.ipath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return added, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// importPathToName returns the package name for the given import path.
|
||||||
|
var importPathToName func(importPath, srcDir string) (packageName string) = importPathToNameGoPath
|
||||||
|
|
||||||
|
// importPathToNameBasic assumes the package name is the base of import path.
|
||||||
|
func importPathToNameBasic(importPath, srcDir string) (packageName string) {
|
||||||
|
return path.Base(importPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// importPathToNameGoPath finds out the actual package name, as declared in its .go files.
|
||||||
|
// If there's a problem, it falls back to using importPathToNameBasic.
|
||||||
|
func importPathToNameGoPath(importPath, srcDir string) (packageName string) {
|
||||||
|
// Fast path for standard library without going to disk.
|
||||||
|
if pkg, ok := stdImportPackage[importPath]; ok {
|
||||||
|
return pkg
|
||||||
|
}
|
||||||
|
|
||||||
|
pkgName, err := importPathToNameGoPathParse(importPath, srcDir)
|
||||||
|
if Debug {
|
||||||
|
log.Printf("importPathToNameGoPathParse(%q, srcDir=%q) = %q, %v", importPath, srcDir, pkgName, err)
|
||||||
|
}
|
||||||
|
if err == nil {
|
||||||
|
return pkgName
|
||||||
|
}
|
||||||
|
return importPathToNameBasic(importPath, srcDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
// importPathToNameGoPathParse is a faster version of build.Import if
|
||||||
|
// the only thing desired is the package name. It uses build.FindOnly
|
||||||
|
// to find the directory and then only parses one file in the package,
|
||||||
|
// trusting that the files in the directory are consistent.
|
||||||
|
func importPathToNameGoPathParse(importPath, srcDir string) (packageName string, err error) {
|
||||||
|
buildPkg, err := build.Import(importPath, srcDir, build.FindOnly)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
d, err := os.Open(buildPkg.Dir)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
names, err := d.Readdirnames(-1)
|
||||||
|
d.Close()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
sort.Strings(names) // to have predictable behavior
|
||||||
|
var lastErr error
|
||||||
|
var nfile int
|
||||||
|
for _, name := range names {
|
||||||
|
if !strings.HasSuffix(name, ".go") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strings.HasSuffix(name, "_test.go") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
nfile++
|
||||||
|
fullFile := filepath.Join(buildPkg.Dir, name)
|
||||||
|
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
f, err := parser.ParseFile(fset, fullFile, nil, parser.PackageClauseOnly)
|
||||||
|
if err != nil {
|
||||||
|
lastErr = err
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pkgName := f.Name.Name
|
||||||
|
if pkgName == "documentation" {
|
||||||
|
// Special case from go/build.ImportDir, not
|
||||||
|
// handled by ctx.MatchFile.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if pkgName == "main" {
|
||||||
|
// Also skip package main, assuming it's a +build ignore generator or example.
|
||||||
|
// Since you can't import a package main anyway, there's no harm here.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return pkgName, nil
|
||||||
|
}
|
||||||
|
if lastErr != nil {
|
||||||
|
return "", lastErr
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("no importable package found in %d Go files", nfile)
|
||||||
|
}
|
||||||
|
|
||||||
|
var stdImportPackage = map[string]string{} // "net/http" => "http"
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
// Nothing in the standard library has a package name not
|
||||||
|
// matching its import base name.
|
||||||
|
for _, pkg := range stdlib {
|
||||||
|
if _, ok := stdImportPackage[pkg]; !ok {
|
||||||
|
stdImportPackage[pkg] = path.Base(pkg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Directory-scanning state.
|
||||||
|
var (
|
||||||
|
// scanGoRootOnce guards calling scanGoRoot (for $GOROOT)
|
||||||
|
scanGoRootOnce sync.Once
|
||||||
|
// scanGoPathOnce guards calling scanGoPath (for $GOPATH)
|
||||||
|
scanGoPathOnce sync.Once
|
||||||
|
|
||||||
|
// populateIgnoreOnce guards calling populateIgnore
|
||||||
|
populateIgnoreOnce sync.Once
|
||||||
|
ignoredDirs []os.FileInfo
|
||||||
|
|
||||||
|
dirScanMu sync.RWMutex
|
||||||
|
dirScan map[string]*pkg // abs dir path => *pkg
|
||||||
|
)
|
||||||
|
|
||||||
|
type pkg struct {
|
||||||
|
dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http")
|
||||||
|
importPath string // full pkg import path ("net/http", "foo/bar/vendor/a/b")
|
||||||
|
importPathShort string // vendorless import path ("net/http", "a/b")
|
||||||
|
}
|
||||||
|
|
||||||
|
// byImportPathShortLength sorts by the short import path length, breaking ties on the
|
||||||
|
// import string itself.
|
||||||
|
type byImportPathShortLength []*pkg
|
||||||
|
|
||||||
|
func (s byImportPathShortLength) Len() int { return len(s) }
|
||||||
|
func (s byImportPathShortLength) Less(i, j int) bool {
|
||||||
|
vi, vj := s[i].importPathShort, s[j].importPathShort
|
||||||
|
return len(vi) < len(vj) || (len(vi) == len(vj) && vi < vj)
|
||||||
|
|
||||||
|
}
|
||||||
|
func (s byImportPathShortLength) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||||
|
|
||||||
|
// gate is a semaphore for limiting concurrency.
|
||||||
|
type gate chan struct{}
|
||||||
|
|
||||||
|
func (g gate) enter() { g <- struct{}{} }
|
||||||
|
func (g gate) leave() { <-g }
|
||||||
|
|
||||||
|
var visitedSymlinks struct {
|
||||||
|
sync.Mutex
|
||||||
|
m map[string]struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// guarded by populateIgnoreOnce; populates ignoredDirs.
|
||||||
|
func populateIgnore() {
|
||||||
|
for _, srcDir := range build.Default.SrcDirs() {
|
||||||
|
if srcDir == filepath.Join(build.Default.GOROOT, "src") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
populateIgnoredDirs(srcDir)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// populateIgnoredDirs reads an optional config file at <path>/.goimportsignore
|
||||||
|
// of relative directories to ignore when scanning for go files.
|
||||||
|
// The provided path is one of the $GOPATH entries with "src" appended.
|
||||||
|
func populateIgnoredDirs(path string) {
|
||||||
|
file := filepath.Join(path, ".goimportsignore")
|
||||||
|
slurp, err := ioutil.ReadFile(file)
|
||||||
|
if Debug {
|
||||||
|
if err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
} else {
|
||||||
|
log.Printf("Read %s", file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
bs := bufio.NewScanner(bytes.NewReader(slurp))
|
||||||
|
for bs.Scan() {
|
||||||
|
line := strings.TrimSpace(bs.Text())
|
||||||
|
if line == "" || strings.HasPrefix(line, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
full := filepath.Join(path, line)
|
||||||
|
if fi, err := os.Stat(full); err == nil {
|
||||||
|
ignoredDirs = append(ignoredDirs, fi)
|
||||||
|
if Debug {
|
||||||
|
log.Printf("Directory added to ignore list: %s", full)
|
||||||
|
}
|
||||||
|
} else if Debug {
|
||||||
|
log.Printf("Error statting entry in .goimportsignore: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func skipDir(fi os.FileInfo) bool {
|
||||||
|
for _, ignoredDir := range ignoredDirs {
|
||||||
|
if os.SameFile(fi, ignoredDir) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// shouldTraverse reports whether the symlink fi should, found in dir,
|
||||||
|
// should be followed. It makes sure symlinks were never visited
|
||||||
|
// before to avoid symlink loops.
|
||||||
|
func shouldTraverse(dir string, fi os.FileInfo) bool {
|
||||||
|
path := filepath.Join(dir, fi.Name())
|
||||||
|
target, err := filepath.EvalSymlinks(path)
|
||||||
|
if err != nil {
|
||||||
|
if !os.IsNotExist(err) {
|
||||||
|
fmt.Fprintln(os.Stderr, err)
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
ts, err := os.Stat(target)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !ts.IsDir() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
realParent, err := filepath.EvalSymlinks(dir)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprint(os.Stderr, err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
realPath := filepath.Join(realParent, fi.Name())
|
||||||
|
visitedSymlinks.Lock()
|
||||||
|
defer visitedSymlinks.Unlock()
|
||||||
|
if visitedSymlinks.m == nil {
|
||||||
|
visitedSymlinks.m = make(map[string]struct{})
|
||||||
|
}
|
||||||
|
if _, ok := visitedSymlinks.m[realPath]; ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
visitedSymlinks.m[realPath] = struct{}{}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
var testHookScanDir = func(dir string) {}
|
||||||
|
|
||||||
|
var scanGoRootDone = make(chan struct{}) // closed when scanGoRoot is done
|
||||||
|
|
||||||
|
func scanGoRoot() {
|
||||||
|
go func() {
|
||||||
|
scanGoDirs(true)
|
||||||
|
close(scanGoRootDone)
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
func scanGoPath() { scanGoDirs(false) }
|
||||||
|
|
||||||
|
func scanGoDirs(goRoot bool) {
|
||||||
|
if Debug {
|
||||||
|
which := "$GOROOT"
|
||||||
|
if !goRoot {
|
||||||
|
which = "$GOPATH"
|
||||||
|
}
|
||||||
|
log.Printf("scanning " + which)
|
||||||
|
defer log.Printf("scanned " + which)
|
||||||
|
}
|
||||||
|
dirScanMu.Lock()
|
||||||
|
if dirScan == nil {
|
||||||
|
dirScan = make(map[string]*pkg)
|
||||||
|
}
|
||||||
|
dirScanMu.Unlock()
|
||||||
|
|
||||||
|
for _, srcDir := range build.Default.SrcDirs() {
|
||||||
|
isGoroot := srcDir == filepath.Join(build.Default.GOROOT, "src")
|
||||||
|
if isGoroot != goRoot {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
testHookScanDir(srcDir)
|
||||||
|
walkFn := func(path string, typ os.FileMode) error {
|
||||||
|
dir := filepath.Dir(path)
|
||||||
|
if typ.IsRegular() {
|
||||||
|
if dir == srcDir {
|
||||||
|
// Doesn't make sense to have regular files
|
||||||
|
// directly in your $GOPATH/src or $GOROOT/src.
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if !strings.HasSuffix(path, ".go") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
dirScanMu.Lock()
|
||||||
|
if _, dup := dirScan[dir]; !dup {
|
||||||
|
importpath := filepath.ToSlash(dir[len(srcDir)+len("/"):])
|
||||||
|
dirScan[dir] = &pkg{
|
||||||
|
importPath: importpath,
|
||||||
|
importPathShort: vendorlessImportPath(importpath),
|
||||||
|
dir: dir,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dirScanMu.Unlock()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if typ == os.ModeDir {
|
||||||
|
base := filepath.Base(path)
|
||||||
|
if base == "" || base[0] == '.' || base[0] == '_' ||
|
||||||
|
base == "testdata" || base == "node_modules" {
|
||||||
|
return filepath.SkipDir
|
||||||
|
}
|
||||||
|
fi, err := os.Lstat(path)
|
||||||
|
if err == nil && skipDir(fi) {
|
||||||
|
if Debug {
|
||||||
|
log.Printf("skipping directory %q under %s", fi.Name(), dir)
|
||||||
|
}
|
||||||
|
return filepath.SkipDir
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if typ == os.ModeSymlink {
|
||||||
|
base := filepath.Base(path)
|
||||||
|
if strings.HasPrefix(base, ".#") {
|
||||||
|
// Emacs noise.
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
fi, err := os.Lstat(path)
|
||||||
|
if err != nil {
|
||||||
|
// Just ignore it.
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if shouldTraverse(dir, fi) {
|
||||||
|
return traverseLink
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if err := fastWalk(srcDir, walkFn); err != nil {
|
||||||
|
log.Printf("goimports: scanning directory %v: %v", srcDir, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// vendorlessImportPath returns the devendorized version of the provided import path.
|
||||||
|
// e.g. "foo/bar/vendor/a/b" => "a/b"
|
||||||
|
func vendorlessImportPath(ipath string) string {
|
||||||
|
// Devendorize for use in import statement.
|
||||||
|
if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 {
|
||||||
|
return ipath[i+len("/vendor/"):]
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(ipath, "vendor/") {
|
||||||
|
return ipath[len("vendor/"):]
|
||||||
|
}
|
||||||
|
return ipath
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadExports returns the set of exported symbols in the package at dir.
|
||||||
|
// It returns nil on error or if the package name in dir does not match expectPackage.
|
||||||
|
var loadExports func(expectPackage, dir string) map[string]bool = loadExportsGoPath
|
||||||
|
|
||||||
|
func loadExportsGoPath(expectPackage, dir string) map[string]bool {
|
||||||
|
if Debug {
|
||||||
|
log.Printf("loading exports in dir %s (seeking package %s)", dir, expectPackage)
|
||||||
|
}
|
||||||
|
exports := make(map[string]bool)
|
||||||
|
|
||||||
|
ctx := build.Default
|
||||||
|
|
||||||
|
// ReadDir is like ioutil.ReadDir, but only returns *.go files
|
||||||
|
// and filters out _test.go files since they're not relevant
|
||||||
|
// and only slow things down.
|
||||||
|
ctx.ReadDir = func(dir string) (notTests []os.FileInfo, err error) {
|
||||||
|
all, err := ioutil.ReadDir(dir)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
notTests = all[:0]
|
||||||
|
for _, fi := range all {
|
||||||
|
name := fi.Name()
|
||||||
|
if strings.HasSuffix(name, ".go") && !strings.HasSuffix(name, "_test.go") {
|
||||||
|
notTests = append(notTests, fi)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return notTests, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
files, err := ctx.ReadDir(dir)
|
||||||
|
if err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
|
||||||
|
for _, fi := range files {
|
||||||
|
match, err := ctx.MatchFile(dir, fi.Name())
|
||||||
|
if err != nil || !match {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fullFile := filepath.Join(dir, fi.Name())
|
||||||
|
f, err := parser.ParseFile(fset, fullFile, nil, 0)
|
||||||
|
if err != nil {
|
||||||
|
if Debug {
|
||||||
|
log.Printf("Parsing %s: %v", fullFile, err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
pkgName := f.Name.Name
|
||||||
|
if pkgName == "documentation" {
|
||||||
|
// Special case from go/build.ImportDir, not
|
||||||
|
// handled by ctx.MatchFile.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if pkgName != expectPackage {
|
||||||
|
if Debug {
|
||||||
|
log.Printf("scan of dir %v is not expected package %v (actually %v)", dir, expectPackage, pkgName)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
for name := range f.Scope.Objects {
|
||||||
|
if ast.IsExported(name) {
|
||||||
|
exports[name] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if Debug {
|
||||||
|
exportList := make([]string, 0, len(exports))
|
||||||
|
for k := range exports {
|
||||||
|
exportList = append(exportList, k)
|
||||||
|
}
|
||||||
|
sort.Strings(exportList)
|
||||||
|
log.Printf("loaded exports in dir %v (package %v): %v", dir, expectPackage, strings.Join(exportList, ", "))
|
||||||
|
}
|
||||||
|
return exports
|
||||||
|
}
|
||||||
|
|
||||||
|
// findImport searches for a package with the given symbols.
|
||||||
|
// If no package is found, findImport returns ("", false, nil)
|
||||||
|
//
|
||||||
|
// This is declared as a variable rather than a function so goimports
|
||||||
|
// can be easily extended by adding a file with an init function.
|
||||||
|
//
|
||||||
|
// The rename value tells goimports whether to use the package name as
|
||||||
|
// a local qualifier in an import. For example, if findImports("pkg",
|
||||||
|
// "X") returns ("foo/bar", rename=true), then goimports adds the
|
||||||
|
// import line:
|
||||||
|
// import pkg "foo/bar"
|
||||||
|
// to satisfy uses of pkg.X in the file.
|
||||||
|
var findImport func(pkgName string, symbols map[string]bool, filename string) (foundPkg string, rename bool, err error) = findImportGoPath
|
||||||
|
|
||||||
|
// findImportGoPath is the normal implementation of findImport.
|
||||||
|
// (Some companies have their own internally.)
|
||||||
|
func findImportGoPath(pkgName string, symbols map[string]bool, filename string) (foundPkg string, rename bool, err error) {
|
||||||
|
if inTests {
|
||||||
|
testMu.RLock()
|
||||||
|
defer testMu.RUnlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fast path for the standard library.
|
||||||
|
// In the common case we hopefully never have to scan the GOPATH, which can
|
||||||
|
// be slow with moving disks.
|
||||||
|
if pkg, rename, ok := findImportStdlib(pkgName, symbols); ok {
|
||||||
|
return pkg, rename, nil
|
||||||
|
}
|
||||||
|
if pkgName == "rand" && symbols["Read"] {
|
||||||
|
// Special-case rand.Read.
|
||||||
|
//
|
||||||
|
// If findImportStdlib didn't find it above, don't go
|
||||||
|
// searching for it, lest it find and pick math/rand
|
||||||
|
// in GOROOT (new as of Go 1.6)
|
||||||
|
//
|
||||||
|
// crypto/rand is the safer choice.
|
||||||
|
return "", false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(sameer): look at the import lines for other Go files in the
|
||||||
|
// local directory, since the user is likely to import the same packages
|
||||||
|
// in the current Go file. Return rename=true when the other Go files
|
||||||
|
// use a renamed package that's also used in the current file.
|
||||||
|
|
||||||
|
// Read all the $GOPATH/src/.goimportsignore files before scanning directories.
|
||||||
|
populateIgnoreOnce.Do(populateIgnore)
|
||||||
|
|
||||||
|
// Start scanning the $GOROOT asynchronously, then run the
|
||||||
|
// GOPATH scan synchronously if needed, and then wait for the
|
||||||
|
// $GOROOT to finish.
|
||||||
|
//
|
||||||
|
// TODO(bradfitz): run each $GOPATH entry async. But nobody
|
||||||
|
// really has more than one anyway, so low priority.
|
||||||
|
scanGoRootOnce.Do(scanGoRoot) // async
|
||||||
|
if !fileInDir(filename, build.Default.GOROOT) {
|
||||||
|
scanGoPathOnce.Do(scanGoPath) // blocking
|
||||||
|
}
|
||||||
|
<-scanGoRootDone
|
||||||
|
|
||||||
|
// Find candidate packages, looking only at their directory names first.
|
||||||
|
var candidates []*pkg
|
||||||
|
for _, pkg := range dirScan {
|
||||||
|
if pkgIsCandidate(filename, pkgName, pkg) {
|
||||||
|
candidates = append(candidates, pkg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort the candidates by their import package length,
|
||||||
|
// assuming that shorter package names are better than long
|
||||||
|
// ones. Note that this sorts by the de-vendored name, so
|
||||||
|
// there's no "penalty" for vendoring.
|
||||||
|
sort.Sort(byImportPathShortLength(candidates))
|
||||||
|
if Debug {
|
||||||
|
for i, pkg := range candidates {
|
||||||
|
log.Printf("%s candidate %d/%d: %v", pkgName, i+1, len(candidates), pkg.importPathShort)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect exports for packages with matching names.
|
||||||
|
|
||||||
|
done := make(chan struct{}) // closed when we find the answer
|
||||||
|
defer close(done)
|
||||||
|
|
||||||
|
rescv := make([]chan *pkg, len(candidates))
|
||||||
|
for i := range candidates {
|
||||||
|
rescv[i] = make(chan *pkg)
|
||||||
|
}
|
||||||
|
const maxConcurrentPackageImport = 4
|
||||||
|
loadExportsSem := make(chan struct{}, maxConcurrentPackageImport)
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
for i, pkg := range candidates {
|
||||||
|
select {
|
||||||
|
case loadExportsSem <- struct{}{}:
|
||||||
|
select {
|
||||||
|
case <-done:
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
case <-done:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
pkg := pkg
|
||||||
|
resc := rescv[i]
|
||||||
|
go func() {
|
||||||
|
if inTests {
|
||||||
|
testMu.RLock()
|
||||||
|
defer testMu.RUnlock()
|
||||||
|
}
|
||||||
|
defer func() { <-loadExportsSem }()
|
||||||
|
exports := loadExports(pkgName, pkg.dir)
|
||||||
|
|
||||||
|
// If it doesn't have the right
|
||||||
|
// symbols, send nil to mean no match.
|
||||||
|
for symbol := range symbols {
|
||||||
|
if !exports[symbol] {
|
||||||
|
pkg = nil
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
select {
|
||||||
|
case resc <- pkg:
|
||||||
|
case <-done:
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
for _, resc := range rescv {
|
||||||
|
pkg := <-resc
|
||||||
|
if pkg == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// If the package name in the source doesn't match the import path's base,
|
||||||
|
// return true so the rewriter adds a name (import foo "github.com/bar/go-foo")
|
||||||
|
needsRename := path.Base(pkg.importPath) != pkgName
|
||||||
|
return pkg.importPathShort, needsRename, nil
|
||||||
|
}
|
||||||
|
return "", false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// pkgIsCandidate reports whether pkg is a candidate for satisfying the
|
||||||
|
// finding which package pkgIdent in the file named by filename is trying
|
||||||
|
// to refer to.
|
||||||
|
//
|
||||||
|
// This check is purely lexical and is meant to be as fast as possible
|
||||||
|
// because it's run over all $GOPATH directories to filter out poor
|
||||||
|
// candidates in order to limit the CPU and I/O later parsing the
|
||||||
|
// exports in candidate packages.
|
||||||
|
//
|
||||||
|
// filename is the file being formatted.
|
||||||
|
// pkgIdent is the package being searched for, like "client" (if
|
||||||
|
// searching for "client.New")
|
||||||
|
func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool {
|
||||||
|
// Check "internal" and "vendor" visibility:
|
||||||
|
if !canUse(filename, pkg.dir) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Speed optimization to minimize disk I/O:
|
||||||
|
// the last two components on disk must contain the
|
||||||
|
// package name somewhere.
|
||||||
|
//
|
||||||
|
// This permits mismatch naming like directory
|
||||||
|
// "go-foo" being package "foo", or "pkg.v3" being "pkg",
|
||||||
|
// or directory "google.golang.org/api/cloudbilling/v1"
|
||||||
|
// being package "cloudbilling", but doesn't
|
||||||
|
// permit a directory "foo" to be package
|
||||||
|
// "bar", which is strongly discouraged
|
||||||
|
// anyway. There's no reason goimports needs
|
||||||
|
// to be slow just to accomodate that.
|
||||||
|
lastTwo := lastTwoComponents(pkg.importPathShort)
|
||||||
|
if strings.Contains(lastTwo, pkgIdent) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) {
|
||||||
|
lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
|
||||||
|
if strings.Contains(lastTwo, pkgIdent) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasHyphenOrUpperASCII(s string) bool {
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
b := s[i]
|
||||||
|
if b == '-' || ('A' <= b && b <= 'Z') {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func lowerASCIIAndRemoveHyphen(s string) (ret string) {
|
||||||
|
buf := make([]byte, 0, len(s))
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
b := s[i]
|
||||||
|
switch {
|
||||||
|
case b == '-':
|
||||||
|
continue
|
||||||
|
case 'A' <= b && b <= 'Z':
|
||||||
|
buf = append(buf, b+('a'-'A'))
|
||||||
|
default:
|
||||||
|
buf = append(buf, b)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return string(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
// canUse reports whether the package in dir is usable from filename,
|
||||||
|
// respecting the Go "internal" and "vendor" visibility rules.
|
||||||
|
func canUse(filename, dir string) bool {
|
||||||
|
// Fast path check, before any allocations. If it doesn't contain vendor
|
||||||
|
// or internal, it's not tricky:
|
||||||
|
// Note that this can false-negative on directories like "notinternal",
|
||||||
|
// but we check it correctly below. This is just a fast path.
|
||||||
|
if !strings.Contains(dir, "vendor") && !strings.Contains(dir, "internal") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
dirSlash := filepath.ToSlash(dir)
|
||||||
|
if !strings.Contains(dirSlash, "/vendor/") && !strings.Contains(dirSlash, "/internal/") && !strings.HasSuffix(dirSlash, "/internal") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
// Vendor or internal directory only visible from children of parent.
|
||||||
|
// That means the path from the current directory to the target directory
|
||||||
|
// can contain ../vendor or ../internal but not ../foo/vendor or ../foo/internal
|
||||||
|
// or bar/vendor or bar/internal.
|
||||||
|
// After stripping all the leading ../, the only okay place to see vendor or internal
|
||||||
|
// is at the very beginning of the path.
|
||||||
|
absfile, err := filepath.Abs(filename)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
absdir, err := filepath.Abs(dir)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
rel, err := filepath.Rel(absfile, absdir)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
relSlash := filepath.ToSlash(rel)
|
||||||
|
if i := strings.LastIndex(relSlash, "../"); i >= 0 {
|
||||||
|
relSlash = relSlash[i+len("../"):]
|
||||||
|
}
|
||||||
|
return !strings.Contains(relSlash, "/vendor/") && !strings.Contains(relSlash, "/internal/") && !strings.HasSuffix(relSlash, "/internal")
|
||||||
|
}
|
||||||
|
|
||||||
|
// lastTwoComponents returns at most the last two path components
|
||||||
|
// of v, using either / or \ as the path separator.
|
||||||
|
func lastTwoComponents(v string) string {
|
||||||
|
nslash := 0
|
||||||
|
for i := len(v) - 1; i >= 0; i-- {
|
||||||
|
if v[i] == '/' || v[i] == '\\' {
|
||||||
|
nslash++
|
||||||
|
if nslash == 2 {
|
||||||
|
return v[i:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
type visitFn func(node ast.Node) ast.Visitor
|
||||||
|
|
||||||
|
func (fn visitFn) Visit(node ast.Node) ast.Visitor {
|
||||||
|
return fn(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
func findImportStdlib(shortPkg string, symbols map[string]bool) (importPath string, rename, ok bool) {
|
||||||
|
for symbol := range symbols {
|
||||||
|
key := shortPkg + "." + symbol
|
||||||
|
path := stdlib[key]
|
||||||
|
if path == "" {
|
||||||
|
if key == "rand.Read" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return "", false, false
|
||||||
|
}
|
||||||
|
if importPath != "" && importPath != path {
|
||||||
|
// Ambiguous. Symbols pointed to different things.
|
||||||
|
return "", false, false
|
||||||
|
}
|
||||||
|
importPath = path
|
||||||
|
}
|
||||||
|
if importPath == "" && shortPkg == "rand" && symbols["Read"] {
|
||||||
|
return "crypto/rand", false, true
|
||||||
|
}
|
||||||
|
return importPath, false, importPath != ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// fileInDir reports whether the provided file path looks like
|
||||||
|
// it's in dir. (without hitting the filesystem)
|
||||||
|
func fileInDir(file, dir string) bool {
|
||||||
|
rest := strings.TrimPrefix(file, dir)
|
||||||
|
if len(rest) == len(file) {
|
||||||
|
// dir is not a prefix of file.
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
// Check for boundary: either nothing (file == dir), or a slash.
|
||||||
|
return len(rest) == 0 || rest[0] == '/' || rest[0] == '\\'
|
||||||
|
}
|
@ -2,7 +2,7 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// +build go1.5
|
// +build !go1.8
|
||||||
|
|
||||||
// Package eg implements the example-based refactoring tool whose
|
// Package eg implements the example-based refactoring tool whose
|
||||||
// command-line is defined in golang.org/x/tools/cmd/eg.
|
// command-line is defined in golang.org/x/tools/cmd/eg.
|
||||||
|
346
refactor/eg/eg18.go
Normal file
346
refactor/eg/eg18.go
Normal file
@ -0,0 +1,346 @@
|
|||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
// Package eg implements the example-based refactoring tool whose
|
||||||
|
// command-line is defined in golang.org/x/tools/cmd/eg.
|
||||||
|
package eg // import "golang.org/x/tools/refactor/eg"
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/format"
|
||||||
|
"go/printer"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
const Help = `
|
||||||
|
This tool implements example-based refactoring of expressions.
|
||||||
|
|
||||||
|
The transformation is specified as a Go file defining two functions,
|
||||||
|
'before' and 'after', of identical types. Each function body consists
|
||||||
|
of a single statement: either a return statement with a single
|
||||||
|
(possibly multi-valued) expression, or an expression statement. The
|
||||||
|
'before' expression specifies a pattern and the 'after' expression its
|
||||||
|
replacement.
|
||||||
|
|
||||||
|
package P
|
||||||
|
import ( "errors"; "fmt" )
|
||||||
|
func before(s string) error { return fmt.Errorf("%s", s) }
|
||||||
|
func after(s string) error { return errors.New(s) }
|
||||||
|
|
||||||
|
The expression statement form is useful when the expression has no
|
||||||
|
result, for example:
|
||||||
|
|
||||||
|
func before(msg string) { log.Fatalf("%s", msg) }
|
||||||
|
func after(msg string) { log.Fatal(msg) }
|
||||||
|
|
||||||
|
The parameters of both functions are wildcards that may match any
|
||||||
|
expression assignable to that type. If the pattern contains multiple
|
||||||
|
occurrences of the same parameter, each must match the same expression
|
||||||
|
in the input for the pattern to match. If the replacement contains
|
||||||
|
multiple occurrences of the same parameter, the expression will be
|
||||||
|
duplicated, possibly changing the side-effects.
|
||||||
|
|
||||||
|
The tool analyses all Go code in the packages specified by the
|
||||||
|
arguments, replacing all occurrences of the pattern with the
|
||||||
|
substitution.
|
||||||
|
|
||||||
|
So, the transform above would change this input:
|
||||||
|
err := fmt.Errorf("%s", "error: " + msg)
|
||||||
|
to this output:
|
||||||
|
err := errors.New("error: " + msg)
|
||||||
|
|
||||||
|
Identifiers, including qualified identifiers (p.X) are considered to
|
||||||
|
match only if they denote the same object. This allows correct
|
||||||
|
matching even in the presence of dot imports, named imports and
|
||||||
|
locally shadowed package names in the input program.
|
||||||
|
|
||||||
|
Matching of type syntax is semantic, not syntactic: type syntax in the
|
||||||
|
pattern matches type syntax in the input if the types are identical.
|
||||||
|
Thus, func(x int) matches func(y int).
|
||||||
|
|
||||||
|
This tool was inspired by other example-based refactoring tools,
|
||||||
|
'gofmt -r' for Go and Refaster for Java.
|
||||||
|
|
||||||
|
|
||||||
|
LIMITATIONS
|
||||||
|
===========
|
||||||
|
|
||||||
|
EXPRESSIVENESS
|
||||||
|
|
||||||
|
Only refactorings that replace one expression with another, regardless
|
||||||
|
of the expression's context, may be expressed. Refactoring arbitrary
|
||||||
|
statements (or sequences of statements) is a less well-defined problem
|
||||||
|
and is less amenable to this approach.
|
||||||
|
|
||||||
|
A pattern that contains a function literal (and hence statements)
|
||||||
|
never matches.
|
||||||
|
|
||||||
|
There is no way to generalize over related types, e.g. to express that
|
||||||
|
a wildcard may have any integer type, for example.
|
||||||
|
|
||||||
|
It is not possible to replace an expression by one of a different
|
||||||
|
type, even in contexts where this is legal, such as x in fmt.Print(x).
|
||||||
|
|
||||||
|
The struct literals T{x} and T{K: x} cannot both be matched by a single
|
||||||
|
template.
|
||||||
|
|
||||||
|
|
||||||
|
SAFETY
|
||||||
|
|
||||||
|
Verifying that a transformation does not introduce type errors is very
|
||||||
|
complex in the general case. An innocuous-looking replacement of one
|
||||||
|
constant by another (e.g. 1 to 2) may cause type errors relating to
|
||||||
|
array types and indices, for example. The tool performs only very
|
||||||
|
superficial checks of type preservation.
|
||||||
|
|
||||||
|
|
||||||
|
IMPORTS
|
||||||
|
|
||||||
|
Although the matching algorithm is fully aware of scoping rules, the
|
||||||
|
replacement algorithm is not, so the replacement code may contain
|
||||||
|
incorrect identifier syntax for imported objects if there are dot
|
||||||
|
imports, named imports or locally shadowed package names in the input
|
||||||
|
program.
|
||||||
|
|
||||||
|
Imports are added as needed, but they are not removed as needed.
|
||||||
|
Run 'goimports' on the modified file for now.
|
||||||
|
|
||||||
|
Dot imports are forbidden in the template.
|
||||||
|
|
||||||
|
|
||||||
|
TIPS
|
||||||
|
====
|
||||||
|
|
||||||
|
Sometimes a little creativity is required to implement the desired
|
||||||
|
migration. This section lists a few tips and tricks.
|
||||||
|
|
||||||
|
To remove the final parameter from a function, temporarily change the
|
||||||
|
function signature so that the final parameter is variadic, as this
|
||||||
|
allows legal calls both with and without the argument. Then use eg to
|
||||||
|
remove the final argument from all callers, and remove the variadic
|
||||||
|
parameter by hand. The reverse process can be used to add a final
|
||||||
|
parameter.
|
||||||
|
|
||||||
|
To add or remove parameters other than the final one, you must do it in
|
||||||
|
stages: (1) declare a variant function f' with a different name and the
|
||||||
|
desired parameters; (2) use eg to transform calls to f into calls to f',
|
||||||
|
changing the arguments as needed; (3) change the declaration of f to
|
||||||
|
match f'; (4) use eg to rename f' to f in all calls; (5) delete f'.
|
||||||
|
`
|
||||||
|
|
||||||
|
// TODO(adonovan): expand upon the above documentation as an HTML page.
|
||||||
|
|
||||||
|
// A Transformer represents a single example-based transformation.
|
||||||
|
type Transformer struct {
|
||||||
|
fset *token.FileSet
|
||||||
|
verbose bool
|
||||||
|
info *types.Info // combined type info for template/input/output ASTs
|
||||||
|
seenInfos map[*types.Info]bool
|
||||||
|
wildcards map[*types.Var]bool // set of parameters in func before()
|
||||||
|
env map[string]ast.Expr // maps parameter name to wildcard binding
|
||||||
|
importedObjs map[types.Object]*ast.SelectorExpr // objects imported by after().
|
||||||
|
before, after ast.Expr
|
||||||
|
allowWildcards bool
|
||||||
|
|
||||||
|
// Working state of Transform():
|
||||||
|
nsubsts int // number of substitutions made
|
||||||
|
currentPkg *types.Package // package of current call
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTransformer returns a transformer based on the specified template,
|
||||||
|
// a single-file package containing "before" and "after" functions as
|
||||||
|
// described in the package documentation.
|
||||||
|
// tmplInfo is the type information for tmplFile.
|
||||||
|
//
|
||||||
|
func NewTransformer(fset *token.FileSet, tmplPkg *types.Package, tmplFile *ast.File, tmplInfo *types.Info, verbose bool) (*Transformer, error) {
|
||||||
|
// Check the template.
|
||||||
|
beforeSig := funcSig(tmplPkg, "before")
|
||||||
|
if beforeSig == nil {
|
||||||
|
return nil, fmt.Errorf("no 'before' func found in template")
|
||||||
|
}
|
||||||
|
afterSig := funcSig(tmplPkg, "after")
|
||||||
|
if afterSig == nil {
|
||||||
|
return nil, fmt.Errorf("no 'after' func found in template")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(adonovan): should we also check the names of the params match?
|
||||||
|
if !types.Identical(afterSig, beforeSig) {
|
||||||
|
return nil, fmt.Errorf("before %s and after %s functions have different signatures",
|
||||||
|
beforeSig, afterSig)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, imp := range tmplFile.Imports {
|
||||||
|
if imp.Name != nil && imp.Name.Name == "." {
|
||||||
|
// Dot imports are currently forbidden. We
|
||||||
|
// make the simplifying assumption that all
|
||||||
|
// imports are regular, without local renames.
|
||||||
|
// TODO(adonovan): document
|
||||||
|
return nil, fmt.Errorf("dot-import (of %s) in template", imp.Path.Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var beforeDecl, afterDecl *ast.FuncDecl
|
||||||
|
for _, decl := range tmplFile.Decls {
|
||||||
|
if decl, ok := decl.(*ast.FuncDecl); ok {
|
||||||
|
switch decl.Name.Name {
|
||||||
|
case "before":
|
||||||
|
beforeDecl = decl
|
||||||
|
case "after":
|
||||||
|
afterDecl = decl
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
before, err := soleExpr(beforeDecl)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("before: %s", err)
|
||||||
|
}
|
||||||
|
after, err := soleExpr(afterDecl)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("after: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
wildcards := make(map[*types.Var]bool)
|
||||||
|
for i := 0; i < beforeSig.Params().Len(); i++ {
|
||||||
|
wildcards[beforeSig.Params().At(i)] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkExprTypes returns an error if Tb (type of before()) is not
|
||||||
|
// safe to replace with Ta (type of after()).
|
||||||
|
//
|
||||||
|
// Only superficial checks are performed, and they may result in both
|
||||||
|
// false positives and negatives.
|
||||||
|
//
|
||||||
|
// Ideally, we would only require that the replacement be assignable
|
||||||
|
// to the context of a specific pattern occurrence, but the type
|
||||||
|
// checker doesn't record that information and it's complex to deduce.
|
||||||
|
// A Go type cannot capture all the constraints of a given expression
|
||||||
|
// context, which may include the size, constness, signedness,
|
||||||
|
// namedness or constructor of its type, and even the specific value
|
||||||
|
// of the replacement. (Consider the rule that array literal keys
|
||||||
|
// must be unique.) So we cannot hope to prove the safety of a
|
||||||
|
// transformation in general.
|
||||||
|
Tb := tmplInfo.TypeOf(before)
|
||||||
|
Ta := tmplInfo.TypeOf(after)
|
||||||
|
if types.AssignableTo(Tb, Ta) {
|
||||||
|
// safe: replacement is assignable to pattern.
|
||||||
|
} else if tuple, ok := Tb.(*types.Tuple); ok && tuple.Len() == 0 {
|
||||||
|
// safe: pattern has void type (must appear in an ExprStmt).
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("%s is not a safe replacement for %s", Ta, Tb)
|
||||||
|
}
|
||||||
|
|
||||||
|
tr := &Transformer{
|
||||||
|
fset: fset,
|
||||||
|
verbose: verbose,
|
||||||
|
wildcards: wildcards,
|
||||||
|
allowWildcards: true,
|
||||||
|
seenInfos: make(map[*types.Info]bool),
|
||||||
|
importedObjs: make(map[types.Object]*ast.SelectorExpr),
|
||||||
|
before: before,
|
||||||
|
after: after,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Combine type info from the template and input packages, and
|
||||||
|
// type info for the synthesized ASTs too. This saves us
|
||||||
|
// having to book-keep where each ast.Node originated as we
|
||||||
|
// construct the resulting hybrid AST.
|
||||||
|
tr.info = &types.Info{
|
||||||
|
Types: make(map[ast.Expr]types.TypeAndValue),
|
||||||
|
Defs: make(map[*ast.Ident]types.Object),
|
||||||
|
Uses: make(map[*ast.Ident]types.Object),
|
||||||
|
Selections: make(map[*ast.SelectorExpr]*types.Selection),
|
||||||
|
}
|
||||||
|
mergeTypeInfo(tr.info, tmplInfo)
|
||||||
|
|
||||||
|
// Compute set of imported objects required by after().
|
||||||
|
// TODO(adonovan): reject dot-imports in pattern
|
||||||
|
ast.Inspect(after, func(n ast.Node) bool {
|
||||||
|
if n, ok := n.(*ast.SelectorExpr); ok {
|
||||||
|
if _, ok := tr.info.Selections[n]; !ok {
|
||||||
|
// qualified ident
|
||||||
|
obj := tr.info.Uses[n.Sel]
|
||||||
|
tr.importedObjs[obj] = n
|
||||||
|
return false // prune
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true // recur
|
||||||
|
})
|
||||||
|
|
||||||
|
return tr, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteAST is a convenience function that writes AST f to the specified file.
|
||||||
|
func WriteAST(fset *token.FileSet, filename string, f *ast.File) (err error) {
|
||||||
|
fh, err := os.Create(filename)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
if err2 := fh.Close(); err != nil {
|
||||||
|
err = err2 // prefer earlier error
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
return format.Node(fh, fset, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- utilities --------------------------------------------------------
|
||||||
|
|
||||||
|
// funcSig returns the signature of the specified package-level function.
|
||||||
|
func funcSig(pkg *types.Package, name string) *types.Signature {
|
||||||
|
if f, ok := pkg.Scope().Lookup(name).(*types.Func); ok {
|
||||||
|
return f.Type().(*types.Signature)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// soleExpr returns the sole expression in the before/after template function.
|
||||||
|
func soleExpr(fn *ast.FuncDecl) (ast.Expr, error) {
|
||||||
|
if fn.Body == nil {
|
||||||
|
return nil, fmt.Errorf("no body")
|
||||||
|
}
|
||||||
|
if len(fn.Body.List) != 1 {
|
||||||
|
return nil, fmt.Errorf("must contain a single statement")
|
||||||
|
}
|
||||||
|
switch stmt := fn.Body.List[0].(type) {
|
||||||
|
case *ast.ReturnStmt:
|
||||||
|
if len(stmt.Results) != 1 {
|
||||||
|
return nil, fmt.Errorf("return statement must have a single operand")
|
||||||
|
}
|
||||||
|
return stmt.Results[0], nil
|
||||||
|
|
||||||
|
case *ast.ExprStmt:
|
||||||
|
return stmt.X, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("must contain a single return or expression statement")
|
||||||
|
}
|
||||||
|
|
||||||
|
// mergeTypeInfo adds type info from src to dst.
|
||||||
|
func mergeTypeInfo(dst, src *types.Info) {
|
||||||
|
for k, v := range src.Types {
|
||||||
|
dst.Types[k] = v
|
||||||
|
}
|
||||||
|
for k, v := range src.Defs {
|
||||||
|
dst.Defs[k] = v
|
||||||
|
}
|
||||||
|
for k, v := range src.Uses {
|
||||||
|
dst.Uses[k] = v
|
||||||
|
}
|
||||||
|
for k, v := range src.Selections {
|
||||||
|
dst.Selections[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// (debugging only)
|
||||||
|
func astString(fset *token.FileSet, n ast.Node) string {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
printer.Fprint(&buf, fset, n)
|
||||||
|
return buf.String()
|
||||||
|
}
|
@ -2,7 +2,7 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// +build go1.5
|
// +build !go1.8
|
||||||
|
|
||||||
package eg
|
package eg
|
||||||
|
|
||||||
|
251
refactor/eg/match18.go
Normal file
251
refactor/eg/match18.go
Normal file
@ -0,0 +1,251 @@
|
|||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
package eg
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
exact "go/constant"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"reflect"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/ast/astutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// matchExpr reports whether pattern x matches y.
|
||||||
|
//
|
||||||
|
// If tr.allowWildcards, Idents in x that refer to parameters are
|
||||||
|
// treated as wildcards, and match any y that is assignable to the
|
||||||
|
// parameter type; matchExpr records this correspondence in tr.env.
|
||||||
|
// Otherwise, matchExpr simply reports whether the two trees are
|
||||||
|
// equivalent.
|
||||||
|
//
|
||||||
|
// A wildcard appearing more than once in the pattern must
|
||||||
|
// consistently match the same tree.
|
||||||
|
//
|
||||||
|
func (tr *Transformer) matchExpr(x, y ast.Expr) bool {
|
||||||
|
if x == nil && y == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if x == nil || y == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
x = unparen(x)
|
||||||
|
y = unparen(y)
|
||||||
|
|
||||||
|
// Is x a wildcard? (a reference to a 'before' parameter)
|
||||||
|
if xobj, ok := tr.wildcardObj(x); ok {
|
||||||
|
return tr.matchWildcard(xobj, y)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Object identifiers (including pkg-qualified ones)
|
||||||
|
// are handled semantically, not syntactically.
|
||||||
|
xobj := isRef(x, tr.info)
|
||||||
|
yobj := isRef(y, tr.info)
|
||||||
|
if xobj != nil {
|
||||||
|
return xobj == yobj
|
||||||
|
}
|
||||||
|
if yobj != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(adonovan): audit: we cannot assume these ast.Exprs
|
||||||
|
// contain non-nil pointers. e.g. ImportSpec.Name may be a
|
||||||
|
// nil *ast.Ident.
|
||||||
|
|
||||||
|
if reflect.TypeOf(x) != reflect.TypeOf(y) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
switch x := x.(type) {
|
||||||
|
case *ast.Ident:
|
||||||
|
log.Fatalf("unexpected Ident: %s", astString(tr.fset, x))
|
||||||
|
|
||||||
|
case *ast.BasicLit:
|
||||||
|
y := y.(*ast.BasicLit)
|
||||||
|
xval := exact.MakeFromLiteral(x.Value, x.Kind, 0)
|
||||||
|
yval := exact.MakeFromLiteral(y.Value, y.Kind, 0)
|
||||||
|
return exact.Compare(xval, token.EQL, yval)
|
||||||
|
|
||||||
|
case *ast.FuncLit:
|
||||||
|
// func literals (and thus statement syntax) never match.
|
||||||
|
return false
|
||||||
|
|
||||||
|
case *ast.CompositeLit:
|
||||||
|
y := y.(*ast.CompositeLit)
|
||||||
|
return (x.Type == nil) == (y.Type == nil) &&
|
||||||
|
(x.Type == nil || tr.matchType(x.Type, y.Type)) &&
|
||||||
|
tr.matchExprs(x.Elts, y.Elts)
|
||||||
|
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
y := y.(*ast.SelectorExpr)
|
||||||
|
return tr.matchSelectorExpr(x, y) &&
|
||||||
|
tr.info.Selections[x].Obj() == tr.info.Selections[y].Obj()
|
||||||
|
|
||||||
|
case *ast.IndexExpr:
|
||||||
|
y := y.(*ast.IndexExpr)
|
||||||
|
return tr.matchExpr(x.X, y.X) &&
|
||||||
|
tr.matchExpr(x.Index, y.Index)
|
||||||
|
|
||||||
|
case *ast.SliceExpr:
|
||||||
|
y := y.(*ast.SliceExpr)
|
||||||
|
return tr.matchExpr(x.X, y.X) &&
|
||||||
|
tr.matchExpr(x.Low, y.Low) &&
|
||||||
|
tr.matchExpr(x.High, y.High) &&
|
||||||
|
tr.matchExpr(x.Max, y.Max) &&
|
||||||
|
x.Slice3 == y.Slice3
|
||||||
|
|
||||||
|
case *ast.TypeAssertExpr:
|
||||||
|
y := y.(*ast.TypeAssertExpr)
|
||||||
|
return tr.matchExpr(x.X, y.X) &&
|
||||||
|
tr.matchType(x.Type, y.Type)
|
||||||
|
|
||||||
|
case *ast.CallExpr:
|
||||||
|
y := y.(*ast.CallExpr)
|
||||||
|
match := tr.matchExpr // function call
|
||||||
|
if tr.info.Types[x.Fun].IsType() {
|
||||||
|
match = tr.matchType // type conversion
|
||||||
|
}
|
||||||
|
return x.Ellipsis.IsValid() == y.Ellipsis.IsValid() &&
|
||||||
|
match(x.Fun, y.Fun) &&
|
||||||
|
tr.matchExprs(x.Args, y.Args)
|
||||||
|
|
||||||
|
case *ast.StarExpr:
|
||||||
|
y := y.(*ast.StarExpr)
|
||||||
|
return tr.matchExpr(x.X, y.X)
|
||||||
|
|
||||||
|
case *ast.UnaryExpr:
|
||||||
|
y := y.(*ast.UnaryExpr)
|
||||||
|
return x.Op == y.Op &&
|
||||||
|
tr.matchExpr(x.X, y.X)
|
||||||
|
|
||||||
|
case *ast.BinaryExpr:
|
||||||
|
y := y.(*ast.BinaryExpr)
|
||||||
|
return x.Op == y.Op &&
|
||||||
|
tr.matchExpr(x.X, y.X) &&
|
||||||
|
tr.matchExpr(x.Y, y.Y)
|
||||||
|
|
||||||
|
case *ast.KeyValueExpr:
|
||||||
|
y := y.(*ast.KeyValueExpr)
|
||||||
|
return tr.matchExpr(x.Key, y.Key) &&
|
||||||
|
tr.matchExpr(x.Value, y.Value)
|
||||||
|
}
|
||||||
|
|
||||||
|
panic(fmt.Sprintf("unhandled AST node type: %T", x))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (tr *Transformer) matchExprs(xx, yy []ast.Expr) bool {
|
||||||
|
if len(xx) != len(yy) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for i := range xx {
|
||||||
|
if !tr.matchExpr(xx[i], yy[i]) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// matchType reports whether the two type ASTs denote identical types.
|
||||||
|
func (tr *Transformer) matchType(x, y ast.Expr) bool {
|
||||||
|
tx := tr.info.Types[x].Type
|
||||||
|
ty := tr.info.Types[y].Type
|
||||||
|
return types.Identical(tx, ty)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (tr *Transformer) wildcardObj(x ast.Expr) (*types.Var, bool) {
|
||||||
|
if x, ok := x.(*ast.Ident); ok && x != nil && tr.allowWildcards {
|
||||||
|
if xobj, ok := tr.info.Uses[x].(*types.Var); ok && tr.wildcards[xobj] {
|
||||||
|
return xobj, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (tr *Transformer) matchSelectorExpr(x, y *ast.SelectorExpr) bool {
|
||||||
|
if xobj, ok := tr.wildcardObj(x.X); ok {
|
||||||
|
field := x.Sel.Name
|
||||||
|
yt := tr.info.TypeOf(y.X)
|
||||||
|
o, _, _ := types.LookupFieldOrMethod(yt, true, tr.currentPkg, field)
|
||||||
|
if o != nil {
|
||||||
|
tr.env[xobj.Name()] = y.X // record binding
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tr.matchExpr(x.X, y.X)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (tr *Transformer) matchWildcard(xobj *types.Var, y ast.Expr) bool {
|
||||||
|
name := xobj.Name()
|
||||||
|
|
||||||
|
if tr.verbose {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s: wildcard %s -> %s?: ",
|
||||||
|
tr.fset.Position(y.Pos()), name, astString(tr.fset, y))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that y is assignable to the declared type of the param.
|
||||||
|
yt := tr.info.TypeOf(y)
|
||||||
|
if yt == nil {
|
||||||
|
// y has no type.
|
||||||
|
// Perhaps it is an *ast.Ellipsis in [...]T{}, or
|
||||||
|
// an *ast.KeyValueExpr in T{k: v}.
|
||||||
|
// Clearly these pseudo-expressions cannot match a
|
||||||
|
// wildcard, but it would nice if we had a way to ignore
|
||||||
|
// the difference between T{v} and T{k:v} for structs.
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !types.AssignableTo(yt, xobj.Type()) {
|
||||||
|
if tr.verbose {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s not assignable to %s\n", yt, xobj.Type())
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// A wildcard matches any expression.
|
||||||
|
// If it appears multiple times in the pattern, it must match
|
||||||
|
// the same expression each time.
|
||||||
|
if old, ok := tr.env[name]; ok {
|
||||||
|
// found existing binding
|
||||||
|
tr.allowWildcards = false
|
||||||
|
r := tr.matchExpr(old, y)
|
||||||
|
if tr.verbose {
|
||||||
|
fmt.Fprintf(os.Stderr, "%t secondary match, primary was %s\n",
|
||||||
|
r, astString(tr.fset, old))
|
||||||
|
}
|
||||||
|
tr.allowWildcards = true
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
if tr.verbose {
|
||||||
|
fmt.Fprintf(os.Stderr, "primary match\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
tr.env[name] = y // record binding
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- utilities --------------------------------------------------------
|
||||||
|
|
||||||
|
func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) }
|
||||||
|
|
||||||
|
// isRef returns the object referred to by this (possibly qualified)
|
||||||
|
// identifier, or nil if the node is not a referring identifier.
|
||||||
|
func isRef(n ast.Node, info *types.Info) types.Object {
|
||||||
|
switch n := n.(type) {
|
||||||
|
case *ast.Ident:
|
||||||
|
return info.Uses[n]
|
||||||
|
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
if _, ok := info.Selections[n]; !ok {
|
||||||
|
// qualified ident
|
||||||
|
return info.Uses[n.Sel]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
@ -2,7 +2,7 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// +build go1.5
|
// +build !go1.8
|
||||||
|
|
||||||
package rename
|
package rename
|
||||||
|
|
||||||
|
860
refactor/rename/check18.go
Normal file
860
refactor/rename/check18.go
Normal file
@ -0,0 +1,860 @@
|
|||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
package rename
|
||||||
|
|
||||||
|
// This file defines the safety checks for each kind of renaming.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
"golang.org/x/tools/refactor/satisfy"
|
||||||
|
)
|
||||||
|
|
||||||
|
// errorf reports an error (e.g. conflict) and prevents file modification.
|
||||||
|
func (r *renamer) errorf(pos token.Pos, format string, args ...interface{}) {
|
||||||
|
r.hadConflicts = true
|
||||||
|
reportError(r.iprog.Fset.Position(pos), fmt.Sprintf(format, args...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// check performs safety checks of the renaming of the 'from' object to r.to.
|
||||||
|
func (r *renamer) check(from types.Object) {
|
||||||
|
if r.objsToUpdate[from] {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.objsToUpdate[from] = true
|
||||||
|
|
||||||
|
// NB: order of conditions is important.
|
||||||
|
if from_, ok := from.(*types.PkgName); ok {
|
||||||
|
r.checkInFileBlock(from_)
|
||||||
|
} else if from_, ok := from.(*types.Label); ok {
|
||||||
|
r.checkLabel(from_)
|
||||||
|
} else if isPackageLevel(from) {
|
||||||
|
r.checkInPackageBlock(from)
|
||||||
|
} else if v, ok := from.(*types.Var); ok && v.IsField() {
|
||||||
|
r.checkStructField(v)
|
||||||
|
} else if f, ok := from.(*types.Func); ok && recv(f) != nil {
|
||||||
|
r.checkMethod(f)
|
||||||
|
} else if isLocal(from) {
|
||||||
|
r.checkInLocalScope(from)
|
||||||
|
} else {
|
||||||
|
r.errorf(from.Pos(), "unexpected %s object %q (please report a bug)\n",
|
||||||
|
objectKind(from), from)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkInFileBlock performs safety checks for renames of objects in the file block,
|
||||||
|
// i.e. imported package names.
|
||||||
|
func (r *renamer) checkInFileBlock(from *types.PkgName) {
|
||||||
|
// Check import name is not "init".
|
||||||
|
if r.to == "init" {
|
||||||
|
r.errorf(from.Pos(), "%q is not a valid imported package name", r.to)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for conflicts between file and package block.
|
||||||
|
if prev := from.Pkg().Scope().Lookup(r.to); prev != nil {
|
||||||
|
r.errorf(from.Pos(), "renaming this %s %q to %q would conflict",
|
||||||
|
objectKind(from), from.Name(), r.to)
|
||||||
|
r.errorf(prev.Pos(), "\twith this package member %s",
|
||||||
|
objectKind(prev))
|
||||||
|
return // since checkInPackageBlock would report redundant errors
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for conflicts in lexical scope.
|
||||||
|
r.checkInLexicalScope(from, r.packages[from.Pkg()])
|
||||||
|
|
||||||
|
// Finally, modify ImportSpec syntax to add or remove the Name as needed.
|
||||||
|
info, path, _ := r.iprog.PathEnclosingInterval(from.Pos(), from.Pos())
|
||||||
|
if from.Imported().Name() == r.to {
|
||||||
|
// ImportSpec.Name not needed
|
||||||
|
path[1].(*ast.ImportSpec).Name = nil
|
||||||
|
} else {
|
||||||
|
// ImportSpec.Name needed
|
||||||
|
if spec := path[1].(*ast.ImportSpec); spec.Name == nil {
|
||||||
|
spec.Name = &ast.Ident{NamePos: spec.Path.Pos(), Name: r.to}
|
||||||
|
info.Defs[spec.Name] = from
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkInPackageBlock performs safety checks for renames of
|
||||||
|
// func/var/const/type objects in the package block.
|
||||||
|
func (r *renamer) checkInPackageBlock(from types.Object) {
|
||||||
|
// Check that there are no references to the name from another
|
||||||
|
// package if the renaming would make it unexported.
|
||||||
|
if ast.IsExported(from.Name()) && !ast.IsExported(r.to) {
|
||||||
|
for pkg, info := range r.packages {
|
||||||
|
if pkg == from.Pkg() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if id := someUse(info, from); id != nil &&
|
||||||
|
!r.checkExport(id, pkg, from) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info := r.packages[from.Pkg()]
|
||||||
|
|
||||||
|
// Check that in the package block, "init" is a function, and never referenced.
|
||||||
|
if r.to == "init" {
|
||||||
|
kind := objectKind(from)
|
||||||
|
if kind == "func" {
|
||||||
|
// Reject if intra-package references to it exist.
|
||||||
|
for id, obj := range info.Uses {
|
||||||
|
if obj == from {
|
||||||
|
r.errorf(from.Pos(),
|
||||||
|
"renaming this func %q to %q would make it a package initializer",
|
||||||
|
from.Name(), r.to)
|
||||||
|
r.errorf(id.Pos(), "\tbut references to it exist")
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
r.errorf(from.Pos(), "you cannot have a %s at package level named %q",
|
||||||
|
kind, r.to)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for conflicts between package block and all file blocks.
|
||||||
|
for _, f := range info.Files {
|
||||||
|
fileScope := info.Info.Scopes[f]
|
||||||
|
b, prev := fileScope.LookupParent(r.to, token.NoPos)
|
||||||
|
if b == fileScope {
|
||||||
|
r.errorf(from.Pos(), "renaming this %s %q to %q would conflict",
|
||||||
|
objectKind(from), from.Name(), r.to)
|
||||||
|
r.errorf(prev.Pos(), "\twith this %s",
|
||||||
|
objectKind(prev))
|
||||||
|
return // since checkInPackageBlock would report redundant errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for conflicts in lexical scope.
|
||||||
|
if from.Exported() {
|
||||||
|
for _, info := range r.packages {
|
||||||
|
r.checkInLexicalScope(from, info)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
r.checkInLexicalScope(from, info)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *renamer) checkInLocalScope(from types.Object) {
|
||||||
|
info := r.packages[from.Pkg()]
|
||||||
|
|
||||||
|
// Is this object an implicit local var for a type switch?
|
||||||
|
// Each case has its own var, whose position is the decl of y,
|
||||||
|
// but Ident in that decl does not appear in the Uses map.
|
||||||
|
//
|
||||||
|
// switch y := x.(type) { // Defs[Ident(y)] is undefined
|
||||||
|
// case int: print(y) // Implicits[CaseClause(int)] = Var(y_int)
|
||||||
|
// case string: print(y) // Implicits[CaseClause(string)] = Var(y_string)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
var isCaseVar bool
|
||||||
|
for syntax, obj := range info.Implicits {
|
||||||
|
if _, ok := syntax.(*ast.CaseClause); ok && obj.Pos() == from.Pos() {
|
||||||
|
isCaseVar = true
|
||||||
|
r.check(obj)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
r.checkInLexicalScope(from, info)
|
||||||
|
|
||||||
|
// Finally, if this was a type switch, change the variable y.
|
||||||
|
if isCaseVar {
|
||||||
|
_, path, _ := r.iprog.PathEnclosingInterval(from.Pos(), from.Pos())
|
||||||
|
path[0].(*ast.Ident).Name = r.to // path is [Ident AssignStmt TypeSwitchStmt...]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkInLexicalScope performs safety checks that a renaming does not
|
||||||
|
// change the lexical reference structure of the specified package.
|
||||||
|
//
|
||||||
|
// For objects in lexical scope, there are three kinds of conflicts:
|
||||||
|
// same-, sub-, and super-block conflicts. We will illustrate all three
|
||||||
|
// using this example:
|
||||||
|
//
|
||||||
|
// var x int
|
||||||
|
// var z int
|
||||||
|
//
|
||||||
|
// func f(y int) {
|
||||||
|
// print(x)
|
||||||
|
// print(y)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Renaming x to z encounters a SAME-BLOCK CONFLICT, because an object
|
||||||
|
// with the new name already exists, defined in the same lexical block
|
||||||
|
// as the old object.
|
||||||
|
//
|
||||||
|
// Renaming x to y encounters a SUB-BLOCK CONFLICT, because there exists
|
||||||
|
// a reference to x from within (what would become) a hole in its scope.
|
||||||
|
// The definition of y in an (inner) sub-block would cast a shadow in
|
||||||
|
// the scope of the renamed variable.
|
||||||
|
//
|
||||||
|
// Renaming y to x encounters a SUPER-BLOCK CONFLICT. This is the
|
||||||
|
// converse situation: there is an existing definition of the new name
|
||||||
|
// (x) in an (enclosing) super-block, and the renaming would create a
|
||||||
|
// hole in its scope, within which there exist references to it. The
|
||||||
|
// new name casts a shadow in scope of the existing definition of x in
|
||||||
|
// the super-block.
|
||||||
|
//
|
||||||
|
// Removing the old name (and all references to it) is always safe, and
|
||||||
|
// requires no checks.
|
||||||
|
//
|
||||||
|
func (r *renamer) checkInLexicalScope(from types.Object, info *loader.PackageInfo) {
|
||||||
|
b := from.Parent() // the block defining the 'from' object
|
||||||
|
if b != nil {
|
||||||
|
toBlock, to := b.LookupParent(r.to, from.Parent().End())
|
||||||
|
if toBlock == b {
|
||||||
|
// same-block conflict
|
||||||
|
r.errorf(from.Pos(), "renaming this %s %q to %q",
|
||||||
|
objectKind(from), from.Name(), r.to)
|
||||||
|
r.errorf(to.Pos(), "\tconflicts with %s in same block",
|
||||||
|
objectKind(to))
|
||||||
|
return
|
||||||
|
} else if toBlock != nil {
|
||||||
|
// Check for super-block conflict.
|
||||||
|
// The name r.to is defined in a superblock.
|
||||||
|
// Is that name referenced from within this block?
|
||||||
|
forEachLexicalRef(info, to, func(id *ast.Ident, block *types.Scope) bool {
|
||||||
|
_, obj := lexicalLookup(block, from.Name(), id.Pos())
|
||||||
|
if obj == from {
|
||||||
|
// super-block conflict
|
||||||
|
r.errorf(from.Pos(), "renaming this %s %q to %q",
|
||||||
|
objectKind(from), from.Name(), r.to)
|
||||||
|
r.errorf(id.Pos(), "\twould shadow this reference")
|
||||||
|
r.errorf(to.Pos(), "\tto the %s declared here",
|
||||||
|
objectKind(to))
|
||||||
|
return false // stop
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for sub-block conflict.
|
||||||
|
// Is there an intervening definition of r.to between
|
||||||
|
// the block defining 'from' and some reference to it?
|
||||||
|
forEachLexicalRef(info, from, func(id *ast.Ident, block *types.Scope) bool {
|
||||||
|
// Find the block that defines the found reference.
|
||||||
|
// It may be an ancestor.
|
||||||
|
fromBlock, _ := lexicalLookup(block, from.Name(), id.Pos())
|
||||||
|
|
||||||
|
// See what r.to would resolve to in the same scope.
|
||||||
|
toBlock, to := lexicalLookup(block, r.to, id.Pos())
|
||||||
|
if to != nil {
|
||||||
|
// sub-block conflict
|
||||||
|
if deeper(toBlock, fromBlock) {
|
||||||
|
r.errorf(from.Pos(), "renaming this %s %q to %q",
|
||||||
|
objectKind(from), from.Name(), r.to)
|
||||||
|
r.errorf(id.Pos(), "\twould cause this reference to become shadowed")
|
||||||
|
r.errorf(to.Pos(), "\tby this intervening %s definition",
|
||||||
|
objectKind(to))
|
||||||
|
return false // stop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
|
// Renaming a type that is used as an embedded field
|
||||||
|
// requires renaming the field too. e.g.
|
||||||
|
// type T int // if we rename this to U..
|
||||||
|
// var s struct {T}
|
||||||
|
// print(s.T) // ...this must change too
|
||||||
|
if _, ok := from.(*types.TypeName); ok {
|
||||||
|
for id, obj := range info.Uses {
|
||||||
|
if obj == from {
|
||||||
|
if field := info.Defs[id]; field != nil {
|
||||||
|
r.check(field)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// lexicalLookup is like (*types.Scope).LookupParent but respects the
|
||||||
|
// environment visible at pos. It assumes the relative position
|
||||||
|
// information is correct with each file.
|
||||||
|
func lexicalLookup(block *types.Scope, name string, pos token.Pos) (*types.Scope, types.Object) {
|
||||||
|
for b := block; b != nil; b = b.Parent() {
|
||||||
|
obj := b.Lookup(name)
|
||||||
|
// The scope of a package-level object is the entire package,
|
||||||
|
// so ignore pos in that case.
|
||||||
|
// No analogous clause is needed for file-level objects
|
||||||
|
// since no reference can appear before an import decl.
|
||||||
|
if obj != nil && (b == obj.Pkg().Scope() || obj.Pos() < pos) {
|
||||||
|
return b, obj
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// deeper reports whether block x is lexically deeper than y.
|
||||||
|
func deeper(x, y *types.Scope) bool {
|
||||||
|
if x == y || x == nil {
|
||||||
|
return false
|
||||||
|
} else if y == nil {
|
||||||
|
return true
|
||||||
|
} else {
|
||||||
|
return deeper(x.Parent(), y.Parent())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// forEachLexicalRef calls fn(id, block) for each identifier id in package
|
||||||
|
// info that is a reference to obj in lexical scope. block is the
|
||||||
|
// lexical block enclosing the reference. If fn returns false the
|
||||||
|
// iteration is terminated and findLexicalRefs returns false.
|
||||||
|
func forEachLexicalRef(info *loader.PackageInfo, obj types.Object, fn func(id *ast.Ident, block *types.Scope) bool) bool {
|
||||||
|
ok := true
|
||||||
|
var stack []ast.Node
|
||||||
|
|
||||||
|
var visit func(n ast.Node) bool
|
||||||
|
visit = func(n ast.Node) bool {
|
||||||
|
if n == nil {
|
||||||
|
stack = stack[:len(stack)-1] // pop
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !ok {
|
||||||
|
return false // bail out
|
||||||
|
}
|
||||||
|
|
||||||
|
stack = append(stack, n) // push
|
||||||
|
switch n := n.(type) {
|
||||||
|
case *ast.Ident:
|
||||||
|
if info.Uses[n] == obj {
|
||||||
|
block := enclosingBlock(&info.Info, stack)
|
||||||
|
if !fn(n, block) {
|
||||||
|
ok = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return visit(nil) // pop stack
|
||||||
|
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
// don't visit n.Sel
|
||||||
|
ast.Inspect(n.X, visit)
|
||||||
|
return visit(nil) // pop stack, don't descend
|
||||||
|
|
||||||
|
case *ast.CompositeLit:
|
||||||
|
// Handle recursion ourselves for struct literals
|
||||||
|
// so we don't visit field identifiers.
|
||||||
|
tv := info.Types[n]
|
||||||
|
if _, ok := deref(tv.Type).Underlying().(*types.Struct); ok {
|
||||||
|
if n.Type != nil {
|
||||||
|
ast.Inspect(n.Type, visit)
|
||||||
|
}
|
||||||
|
for _, elt := range n.Elts {
|
||||||
|
if kv, ok := elt.(*ast.KeyValueExpr); ok {
|
||||||
|
ast.Inspect(kv.Value, visit)
|
||||||
|
} else {
|
||||||
|
ast.Inspect(elt, visit)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return visit(nil) // pop stack, don't descend
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, f := range info.Files {
|
||||||
|
ast.Inspect(f, visit)
|
||||||
|
if len(stack) != 0 {
|
||||||
|
panic(stack)
|
||||||
|
}
|
||||||
|
if !ok {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// enclosingBlock returns the innermost block enclosing the specified
|
||||||
|
// AST node, specified in the form of a path from the root of the file,
|
||||||
|
// [file...n].
|
||||||
|
func enclosingBlock(info *types.Info, stack []ast.Node) *types.Scope {
|
||||||
|
for i := range stack {
|
||||||
|
n := stack[len(stack)-1-i]
|
||||||
|
// For some reason, go/types always associates a
|
||||||
|
// function's scope with its FuncType.
|
||||||
|
// TODO(adonovan): feature or a bug?
|
||||||
|
switch f := n.(type) {
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
n = f.Type
|
||||||
|
case *ast.FuncLit:
|
||||||
|
n = f.Type
|
||||||
|
}
|
||||||
|
if b := info.Scopes[n]; b != nil {
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
}
|
||||||
|
panic("no Scope for *ast.File")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *renamer) checkLabel(label *types.Label) {
|
||||||
|
// Check there are no identical labels in the function's label block.
|
||||||
|
// (Label blocks don't nest, so this is easy.)
|
||||||
|
if prev := label.Parent().Lookup(r.to); prev != nil {
|
||||||
|
r.errorf(label.Pos(), "renaming this label %q to %q", label.Name(), prev.Name())
|
||||||
|
r.errorf(prev.Pos(), "\twould conflict with this one")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkStructField checks that the field renaming will not cause
|
||||||
|
// conflicts at its declaration, or ambiguity or changes to any selection.
|
||||||
|
func (r *renamer) checkStructField(from *types.Var) {
|
||||||
|
// Check that the struct declaration is free of field conflicts,
|
||||||
|
// and field/method conflicts.
|
||||||
|
|
||||||
|
// go/types offers no easy way to get from a field (or interface
|
||||||
|
// method) to its declaring struct (or interface), so we must
|
||||||
|
// ascend the AST.
|
||||||
|
info, path, _ := r.iprog.PathEnclosingInterval(from.Pos(), from.Pos())
|
||||||
|
// path matches this pattern:
|
||||||
|
// [Ident SelectorExpr? StarExpr? Field FieldList StructType ParenExpr* ... File]
|
||||||
|
|
||||||
|
// Ascend to FieldList.
|
||||||
|
var i int
|
||||||
|
for {
|
||||||
|
if _, ok := path[i].(*ast.FieldList); ok {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
tStruct := path[i].(*ast.StructType)
|
||||||
|
i++
|
||||||
|
// Ascend past parens (unlikely).
|
||||||
|
for {
|
||||||
|
_, ok := path[i].(*ast.ParenExpr)
|
||||||
|
if !ok {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
if spec, ok := path[i].(*ast.TypeSpec); ok {
|
||||||
|
// This struct is also a named type.
|
||||||
|
// We must check for direct (non-promoted) field/field
|
||||||
|
// and method/field conflicts.
|
||||||
|
named := info.Defs[spec.Name].Type()
|
||||||
|
prev, indices, _ := types.LookupFieldOrMethod(named, true, info.Pkg, r.to)
|
||||||
|
if len(indices) == 1 {
|
||||||
|
r.errorf(from.Pos(), "renaming this field %q to %q",
|
||||||
|
from.Name(), r.to)
|
||||||
|
r.errorf(prev.Pos(), "\twould conflict with this %s",
|
||||||
|
objectKind(prev))
|
||||||
|
return // skip checkSelections to avoid redundant errors
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// This struct is not a named type.
|
||||||
|
// We need only check for direct (non-promoted) field/field conflicts.
|
||||||
|
T := info.Types[tStruct].Type.Underlying().(*types.Struct)
|
||||||
|
for i := 0; i < T.NumFields(); i++ {
|
||||||
|
if prev := T.Field(i); prev.Name() == r.to {
|
||||||
|
r.errorf(from.Pos(), "renaming this field %q to %q",
|
||||||
|
from.Name(), r.to)
|
||||||
|
r.errorf(prev.Pos(), "\twould conflict with this field")
|
||||||
|
return // skip checkSelections to avoid redundant errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Renaming an anonymous field requires renaming the type too. e.g.
|
||||||
|
// print(s.T) // if we rename T to U,
|
||||||
|
// type T int // this and
|
||||||
|
// var s struct {T} // this must change too.
|
||||||
|
if from.Anonymous() {
|
||||||
|
if named, ok := from.Type().(*types.Named); ok {
|
||||||
|
r.check(named.Obj())
|
||||||
|
} else if named, ok := deref(from.Type()).(*types.Named); ok {
|
||||||
|
r.check(named.Obj())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check integrity of existing (field and method) selections.
|
||||||
|
r.checkSelections(from)
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkSelection checks that all uses and selections that resolve to
|
||||||
|
// the specified object would continue to do so after the renaming.
|
||||||
|
func (r *renamer) checkSelections(from types.Object) {
|
||||||
|
for pkg, info := range r.packages {
|
||||||
|
if id := someUse(info, from); id != nil {
|
||||||
|
if !r.checkExport(id, pkg, from) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for syntax, sel := range info.Selections {
|
||||||
|
// There may be extant selections of only the old
|
||||||
|
// name or only the new name, so we must check both.
|
||||||
|
// (If neither, the renaming is sound.)
|
||||||
|
//
|
||||||
|
// In both cases, we wish to compare the lengths
|
||||||
|
// of the implicit field path (Selection.Index)
|
||||||
|
// to see if the renaming would change it.
|
||||||
|
//
|
||||||
|
// If a selection that resolves to 'from', when renamed,
|
||||||
|
// would yield a path of the same or shorter length,
|
||||||
|
// this indicates ambiguity or a changed referent,
|
||||||
|
// analogous to same- or sub-block lexical conflict.
|
||||||
|
//
|
||||||
|
// If a selection using the name 'to' would
|
||||||
|
// yield a path of the same or shorter length,
|
||||||
|
// this indicates ambiguity or shadowing,
|
||||||
|
// analogous to same- or super-block lexical conflict.
|
||||||
|
|
||||||
|
// TODO(adonovan): fix: derive from Types[syntax.X].Mode
|
||||||
|
// TODO(adonovan): test with pointer, value, addressable value.
|
||||||
|
isAddressable := true
|
||||||
|
|
||||||
|
if sel.Obj() == from {
|
||||||
|
if obj, indices, _ := types.LookupFieldOrMethod(sel.Recv(), isAddressable, from.Pkg(), r.to); obj != nil {
|
||||||
|
// Renaming this existing selection of
|
||||||
|
// 'from' may block access to an existing
|
||||||
|
// type member named 'to'.
|
||||||
|
delta := len(indices) - len(sel.Index())
|
||||||
|
if delta > 0 {
|
||||||
|
continue // no ambiguity
|
||||||
|
}
|
||||||
|
r.selectionConflict(from, delta, syntax, obj)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if sel.Obj().Name() == r.to {
|
||||||
|
if obj, indices, _ := types.LookupFieldOrMethod(sel.Recv(), isAddressable, from.Pkg(), from.Name()); obj == from {
|
||||||
|
// Renaming 'from' may cause this existing
|
||||||
|
// selection of the name 'to' to change
|
||||||
|
// its meaning.
|
||||||
|
delta := len(indices) - len(sel.Index())
|
||||||
|
if delta > 0 {
|
||||||
|
continue // no ambiguity
|
||||||
|
}
|
||||||
|
r.selectionConflict(from, -delta, syntax, sel.Obj())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *renamer) selectionConflict(from types.Object, delta int, syntax *ast.SelectorExpr, obj types.Object) {
|
||||||
|
r.errorf(from.Pos(), "renaming this %s %q to %q",
|
||||||
|
objectKind(from), from.Name(), r.to)
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case delta < 0:
|
||||||
|
// analogous to sub-block conflict
|
||||||
|
r.errorf(syntax.Sel.Pos(),
|
||||||
|
"\twould change the referent of this selection")
|
||||||
|
r.errorf(obj.Pos(), "\tof this %s", objectKind(obj))
|
||||||
|
case delta == 0:
|
||||||
|
// analogous to same-block conflict
|
||||||
|
r.errorf(syntax.Sel.Pos(),
|
||||||
|
"\twould make this reference ambiguous")
|
||||||
|
r.errorf(obj.Pos(), "\twith this %s", objectKind(obj))
|
||||||
|
case delta > 0:
|
||||||
|
// analogous to super-block conflict
|
||||||
|
r.errorf(syntax.Sel.Pos(),
|
||||||
|
"\twould shadow this selection")
|
||||||
|
r.errorf(obj.Pos(), "\tof the %s declared here",
|
||||||
|
objectKind(obj))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkMethod performs safety checks for renaming a method.
|
||||||
|
// There are three hazards:
|
||||||
|
// - declaration conflicts
|
||||||
|
// - selection ambiguity/changes
|
||||||
|
// - entailed renamings of assignable concrete/interface types.
|
||||||
|
// We reject renamings initiated at concrete methods if it would
|
||||||
|
// change the assignability relation. For renamings of abstract
|
||||||
|
// methods, we rename all methods transitively coupled to it via
|
||||||
|
// assignability.
|
||||||
|
func (r *renamer) checkMethod(from *types.Func) {
|
||||||
|
// e.g. error.Error
|
||||||
|
if from.Pkg() == nil {
|
||||||
|
r.errorf(from.Pos(), "you cannot rename built-in method %s", from)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// ASSIGNABILITY: We reject renamings of concrete methods that
|
||||||
|
// would break a 'satisfy' constraint; but renamings of abstract
|
||||||
|
// methods are allowed to proceed, and we rename affected
|
||||||
|
// concrete and abstract methods as necessary. It is the
|
||||||
|
// initial method that determines the policy.
|
||||||
|
|
||||||
|
// Check for conflict at point of declaration.
|
||||||
|
// Check to ensure preservation of assignability requirements.
|
||||||
|
R := recv(from).Type()
|
||||||
|
if isInterface(R) {
|
||||||
|
// Abstract method
|
||||||
|
|
||||||
|
// declaration
|
||||||
|
prev, _, _ := types.LookupFieldOrMethod(R, false, from.Pkg(), r.to)
|
||||||
|
if prev != nil {
|
||||||
|
r.errorf(from.Pos(), "renaming this interface method %q to %q",
|
||||||
|
from.Name(), r.to)
|
||||||
|
r.errorf(prev.Pos(), "\twould conflict with this method")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check all interfaces that embed this one for
|
||||||
|
// declaration conflicts too.
|
||||||
|
for _, info := range r.packages {
|
||||||
|
// Start with named interface types (better errors)
|
||||||
|
for _, obj := range info.Defs {
|
||||||
|
if obj, ok := obj.(*types.TypeName); ok && isInterface(obj.Type()) {
|
||||||
|
f, _, _ := types.LookupFieldOrMethod(
|
||||||
|
obj.Type(), false, from.Pkg(), from.Name())
|
||||||
|
if f == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
t, _, _ := types.LookupFieldOrMethod(
|
||||||
|
obj.Type(), false, from.Pkg(), r.to)
|
||||||
|
if t == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
r.errorf(from.Pos(), "renaming this interface method %q to %q",
|
||||||
|
from.Name(), r.to)
|
||||||
|
r.errorf(t.Pos(), "\twould conflict with this method")
|
||||||
|
r.errorf(obj.Pos(), "\tin named interface type %q", obj.Name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now look at all literal interface types (includes named ones again).
|
||||||
|
for e, tv := range info.Types {
|
||||||
|
if e, ok := e.(*ast.InterfaceType); ok {
|
||||||
|
_ = e
|
||||||
|
_ = tv.Type.(*types.Interface)
|
||||||
|
// TODO(adonovan): implement same check as above.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// assignability
|
||||||
|
//
|
||||||
|
// Find the set of concrete or abstract methods directly
|
||||||
|
// coupled to abstract method 'from' by some
|
||||||
|
// satisfy.Constraint, and rename them too.
|
||||||
|
for key := range r.satisfy() {
|
||||||
|
// key = (lhs, rhs) where lhs is always an interface.
|
||||||
|
|
||||||
|
lsel := r.msets.MethodSet(key.LHS).Lookup(from.Pkg(), from.Name())
|
||||||
|
if lsel == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
rmethods := r.msets.MethodSet(key.RHS)
|
||||||
|
rsel := rmethods.Lookup(from.Pkg(), from.Name())
|
||||||
|
if rsel == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// If both sides have a method of this name,
|
||||||
|
// and one of them is m, the other must be coupled.
|
||||||
|
var coupled *types.Func
|
||||||
|
switch from {
|
||||||
|
case lsel.Obj():
|
||||||
|
coupled = rsel.Obj().(*types.Func)
|
||||||
|
case rsel.Obj():
|
||||||
|
coupled = lsel.Obj().(*types.Func)
|
||||||
|
default:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// We must treat concrete-to-interface
|
||||||
|
// constraints like an implicit selection C.f of
|
||||||
|
// each interface method I.f, and check that the
|
||||||
|
// renaming leaves the selection unchanged and
|
||||||
|
// unambiguous.
|
||||||
|
//
|
||||||
|
// Fun fact: the implicit selection of C.f
|
||||||
|
// type I interface{f()}
|
||||||
|
// type C struct{I}
|
||||||
|
// func (C) g()
|
||||||
|
// var _ I = C{} // here
|
||||||
|
// yields abstract method I.f. This can make error
|
||||||
|
// messages less than obvious.
|
||||||
|
//
|
||||||
|
if !isInterface(key.RHS) {
|
||||||
|
// The logic below was derived from checkSelections.
|
||||||
|
|
||||||
|
rtosel := rmethods.Lookup(from.Pkg(), r.to)
|
||||||
|
if rtosel != nil {
|
||||||
|
rto := rtosel.Obj().(*types.Func)
|
||||||
|
delta := len(rsel.Index()) - len(rtosel.Index())
|
||||||
|
if delta < 0 {
|
||||||
|
continue // no ambiguity
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(adonovan): record the constraint's position.
|
||||||
|
keyPos := token.NoPos
|
||||||
|
|
||||||
|
r.errorf(from.Pos(), "renaming this method %q to %q",
|
||||||
|
from.Name(), r.to)
|
||||||
|
if delta == 0 {
|
||||||
|
// analogous to same-block conflict
|
||||||
|
r.errorf(keyPos, "\twould make the %s method of %s invoked via interface %s ambiguous",
|
||||||
|
r.to, key.RHS, key.LHS)
|
||||||
|
r.errorf(rto.Pos(), "\twith (%s).%s",
|
||||||
|
recv(rto).Type(), r.to)
|
||||||
|
} else {
|
||||||
|
// analogous to super-block conflict
|
||||||
|
r.errorf(keyPos, "\twould change the %s method of %s invoked via interface %s",
|
||||||
|
r.to, key.RHS, key.LHS)
|
||||||
|
r.errorf(coupled.Pos(), "\tfrom (%s).%s",
|
||||||
|
recv(coupled).Type(), r.to)
|
||||||
|
r.errorf(rto.Pos(), "\tto (%s).%s",
|
||||||
|
recv(rto).Type(), r.to)
|
||||||
|
}
|
||||||
|
return // one error is enough
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !r.changeMethods {
|
||||||
|
// This should be unreachable.
|
||||||
|
r.errorf(from.Pos(), "internal error: during renaming of abstract method %s", from)
|
||||||
|
r.errorf(coupled.Pos(), "\tchangedMethods=false, coupled method=%s", coupled)
|
||||||
|
r.errorf(from.Pos(), "\tPlease file a bug report")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rename the coupled method to preserve assignability.
|
||||||
|
r.check(coupled)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Concrete method
|
||||||
|
|
||||||
|
// declaration
|
||||||
|
prev, indices, _ := types.LookupFieldOrMethod(R, true, from.Pkg(), r.to)
|
||||||
|
if prev != nil && len(indices) == 1 {
|
||||||
|
r.errorf(from.Pos(), "renaming this method %q to %q",
|
||||||
|
from.Name(), r.to)
|
||||||
|
r.errorf(prev.Pos(), "\twould conflict with this %s",
|
||||||
|
objectKind(prev))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// assignability
|
||||||
|
//
|
||||||
|
// Find the set of abstract methods coupled to concrete
|
||||||
|
// method 'from' by some satisfy.Constraint, and rename
|
||||||
|
// them too.
|
||||||
|
//
|
||||||
|
// Coupling may be indirect, e.g. I.f <-> C.f via type D.
|
||||||
|
//
|
||||||
|
// type I interface {f()}
|
||||||
|
// type C int
|
||||||
|
// type (C) f()
|
||||||
|
// type D struct{C}
|
||||||
|
// var _ I = D{}
|
||||||
|
//
|
||||||
|
for key := range r.satisfy() {
|
||||||
|
// key = (lhs, rhs) where lhs is always an interface.
|
||||||
|
if isInterface(key.RHS) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
rsel := r.msets.MethodSet(key.RHS).Lookup(from.Pkg(), from.Name())
|
||||||
|
if rsel == nil || rsel.Obj() != from {
|
||||||
|
continue // rhs does not have the method
|
||||||
|
}
|
||||||
|
lsel := r.msets.MethodSet(key.LHS).Lookup(from.Pkg(), from.Name())
|
||||||
|
if lsel == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
imeth := lsel.Obj().(*types.Func)
|
||||||
|
|
||||||
|
// imeth is the abstract method (e.g. I.f)
|
||||||
|
// and key.RHS is the concrete coupling type (e.g. D).
|
||||||
|
if !r.changeMethods {
|
||||||
|
r.errorf(from.Pos(), "renaming this method %q to %q",
|
||||||
|
from.Name(), r.to)
|
||||||
|
var pos token.Pos
|
||||||
|
var iface string
|
||||||
|
|
||||||
|
I := recv(imeth).Type()
|
||||||
|
if named, ok := I.(*types.Named); ok {
|
||||||
|
pos = named.Obj().Pos()
|
||||||
|
iface = "interface " + named.Obj().Name()
|
||||||
|
} else {
|
||||||
|
pos = from.Pos()
|
||||||
|
iface = I.String()
|
||||||
|
}
|
||||||
|
r.errorf(pos, "\twould make %s no longer assignable to %s",
|
||||||
|
key.RHS, iface)
|
||||||
|
r.errorf(imeth.Pos(), "\t(rename %s.%s if you intend to change both types)",
|
||||||
|
I, from.Name())
|
||||||
|
return // one error is enough
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rename the coupled interface method to preserve assignability.
|
||||||
|
r.check(imeth)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check integrity of existing (field and method) selections.
|
||||||
|
// We skip this if there were errors above, to avoid redundant errors.
|
||||||
|
r.checkSelections(from)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *renamer) checkExport(id *ast.Ident, pkg *types.Package, from types.Object) bool {
|
||||||
|
// Reject cross-package references if r.to is unexported.
|
||||||
|
// (Such references may be qualified identifiers or field/method
|
||||||
|
// selections.)
|
||||||
|
if !ast.IsExported(r.to) && pkg != from.Pkg() {
|
||||||
|
r.errorf(from.Pos(),
|
||||||
|
"renaming this %s %q to %q would make it unexported",
|
||||||
|
objectKind(from), from.Name(), r.to)
|
||||||
|
r.errorf(id.Pos(), "\tbreaking references from packages such as %q",
|
||||||
|
pkg.Path())
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// satisfy returns the set of interface satisfaction constraints.
|
||||||
|
func (r *renamer) satisfy() map[satisfy.Constraint]bool {
|
||||||
|
if r.satisfyConstraints == nil {
|
||||||
|
// Compute on demand: it's expensive.
|
||||||
|
var f satisfy.Finder
|
||||||
|
for _, info := range r.packages {
|
||||||
|
f.Find(&info.Info, info.Files)
|
||||||
|
}
|
||||||
|
r.satisfyConstraints = f.Result
|
||||||
|
}
|
||||||
|
return r.satisfyConstraints
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- helpers ----------------------------------------------------------
|
||||||
|
|
||||||
|
// recv returns the method's receiver.
|
||||||
|
func recv(meth *types.Func) *types.Var {
|
||||||
|
return meth.Type().(*types.Signature).Recv()
|
||||||
|
}
|
||||||
|
|
||||||
|
// someUse returns an arbitrary use of obj within info.
|
||||||
|
func someUse(info *loader.PackageInfo, obj types.Object) *ast.Ident {
|
||||||
|
for id, o := range info.Uses {
|
||||||
|
if o == obj {
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Plundered from golang.org/x/tools/go/ssa -----------------
|
||||||
|
|
||||||
|
func isInterface(T types.Type) bool { return types.IsInterface(T) }
|
||||||
|
|
||||||
|
func deref(typ types.Type) types.Type {
|
||||||
|
if p, _ := typ.(*types.Pointer); p != nil {
|
||||||
|
return p.Elem()
|
||||||
|
}
|
||||||
|
return typ
|
||||||
|
}
|
@ -2,7 +2,7 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// +build go1.5
|
// +build !go1.8
|
||||||
|
|
||||||
package rename
|
package rename
|
||||||
|
|
||||||
|
107
refactor/rename/util18.go
Normal file
107
refactor/rename/util18.go
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
package rename
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"reflect"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/ast/astutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
func objectKind(obj types.Object) string {
|
||||||
|
switch obj := obj.(type) {
|
||||||
|
case *types.PkgName:
|
||||||
|
return "imported package name"
|
||||||
|
case *types.TypeName:
|
||||||
|
return "type"
|
||||||
|
case *types.Var:
|
||||||
|
if obj.IsField() {
|
||||||
|
return "field"
|
||||||
|
}
|
||||||
|
case *types.Func:
|
||||||
|
if obj.Type().(*types.Signature).Recv() != nil {
|
||||||
|
return "method"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// label, func, var, const
|
||||||
|
return strings.ToLower(strings.TrimPrefix(reflect.TypeOf(obj).String(), "*types."))
|
||||||
|
}
|
||||||
|
|
||||||
|
func typeKind(T types.Type) string {
|
||||||
|
return strings.ToLower(strings.TrimPrefix(reflect.TypeOf(T.Underlying()).String(), "*types."))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NB: for renamings, blank is not considered valid.
|
||||||
|
func isValidIdentifier(id string) bool {
|
||||||
|
if id == "" || id == "_" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for i, r := range id {
|
||||||
|
if !isLetter(r) && (i == 0 || !isDigit(r)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return token.Lookup(id) == token.IDENT
|
||||||
|
}
|
||||||
|
|
||||||
|
// isLocal reports whether obj is local to some function.
|
||||||
|
// Precondition: not a struct field or interface method.
|
||||||
|
func isLocal(obj types.Object) bool {
|
||||||
|
// [... 5=stmt 4=func 3=file 2=pkg 1=universe]
|
||||||
|
var depth int
|
||||||
|
for scope := obj.Parent(); scope != nil; scope = scope.Parent() {
|
||||||
|
depth++
|
||||||
|
}
|
||||||
|
return depth >= 4
|
||||||
|
}
|
||||||
|
|
||||||
|
func isPackageLevel(obj types.Object) bool {
|
||||||
|
return obj.Pkg().Scope().Lookup(obj.Name()) == obj
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Plundered from go/scanner: ---------------------------------------
|
||||||
|
|
||||||
|
func isLetter(ch rune) bool {
|
||||||
|
return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isDigit(ch rune) bool {
|
||||||
|
return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch)
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Plundered from golang.org/x/tools/cmd/guru -----------------
|
||||||
|
|
||||||
|
// sameFile returns true if x and y have the same basename and denote
|
||||||
|
// the same file.
|
||||||
|
//
|
||||||
|
func sameFile(x, y string) bool {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
x = filepath.ToSlash(x)
|
||||||
|
y = filepath.ToSlash(y)
|
||||||
|
}
|
||||||
|
if x == y {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if filepath.Base(x) == filepath.Base(y) { // (optimisation)
|
||||||
|
if xi, err := os.Stat(x); err == nil {
|
||||||
|
if yi, err := os.Stat(y); err == nil {
|
||||||
|
return os.SameFile(xi, yi)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) }
|
@ -2,7 +2,7 @@
|
|||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// +build go1.5
|
// +build !go1.8
|
||||||
|
|
||||||
// Package satisfy inspects the type-checked ASTs of Go packages and
|
// Package satisfy inspects the type-checked ASTs of Go packages and
|
||||||
// reports the set of discovered type constraints of the form (lhs, rhs
|
// reports the set of discovered type constraints of the form (lhs, rhs
|
||||||
|
707
refactor/satisfy/find18.go
Normal file
707
refactor/satisfy/find18.go
Normal file
@ -0,0 +1,707 @@
|
|||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.8
|
||||||
|
|
||||||
|
// Package satisfy inspects the type-checked ASTs of Go packages and
|
||||||
|
// reports the set of discovered type constraints of the form (lhs, rhs
|
||||||
|
// Type) where lhs is a non-trivial interface, rhs satisfies this
|
||||||
|
// interface, and this fact is necessary for the package to be
|
||||||
|
// well-typed.
|
||||||
|
//
|
||||||
|
// THIS PACKAGE IS EXPERIMENTAL AND MAY CHANGE AT ANY TIME.
|
||||||
|
//
|
||||||
|
// It is provided only for the gorename tool. Ideally this
|
||||||
|
// functionality will become part of the type-checker in due course,
|
||||||
|
// since it is computing it anyway, and it is robust for ill-typed
|
||||||
|
// inputs, which this package is not.
|
||||||
|
//
|
||||||
|
package satisfy // import "golang.org/x/tools/refactor/satisfy"
|
||||||
|
|
||||||
|
// NOTES:
|
||||||
|
//
|
||||||
|
// We don't care about numeric conversions, so we don't descend into
|
||||||
|
// types or constant expressions. This is unsound because
|
||||||
|
// constant expressions can contain arbitrary statements, e.g.
|
||||||
|
// const x = len([1]func(){func() {
|
||||||
|
// ...
|
||||||
|
// }})
|
||||||
|
//
|
||||||
|
// TODO(adonovan): make this robust against ill-typed input.
|
||||||
|
// Or move it into the type-checker.
|
||||||
|
//
|
||||||
|
// Assignability conversions are possible in the following places:
|
||||||
|
// - in assignments y = x, y := x, var y = x.
|
||||||
|
// - from call argument types to formal parameter types
|
||||||
|
// - in append and delete calls
|
||||||
|
// - from return operands to result parameter types
|
||||||
|
// - in composite literal T{k:v}, from k and v to T's field/element/key type
|
||||||
|
// - in map[key] from key to the map's key type
|
||||||
|
// - in comparisons x==y and switch x { case y: }.
|
||||||
|
// - in explicit conversions T(x)
|
||||||
|
// - in sends ch <- x, from x to the channel element type
|
||||||
|
// - in type assertions x.(T) and switch x.(type) { case T: }
|
||||||
|
//
|
||||||
|
// The results of this pass provide information equivalent to the
|
||||||
|
// ssa.MakeInterface and ssa.ChangeInterface instructions.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/ast/astutil"
|
||||||
|
"golang.org/x/tools/go/types/typeutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
// A Constraint records the fact that the RHS type does and must
|
||||||
|
// satisify the LHS type, which is an interface.
|
||||||
|
// The names are suggestive of an assignment statement LHS = RHS.
|
||||||
|
type Constraint struct {
|
||||||
|
LHS, RHS types.Type
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Finder inspects the type-checked ASTs of Go packages and
|
||||||
|
// accumulates the set of type constraints (x, y) such that x is
|
||||||
|
// assignable to y, y is an interface, and both x and y have methods.
|
||||||
|
//
|
||||||
|
// In other words, it returns the subset of the "implements" relation
|
||||||
|
// that is checked during compilation of a package. Refactoring tools
|
||||||
|
// will need to preserve at least this part of the relation to ensure
|
||||||
|
// continued compilation.
|
||||||
|
//
|
||||||
|
type Finder struct {
|
||||||
|
Result map[Constraint]bool
|
||||||
|
msetcache typeutil.MethodSetCache
|
||||||
|
|
||||||
|
// per-Find state
|
||||||
|
info *types.Info
|
||||||
|
sig *types.Signature
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find inspects a single package, populating Result with its pairs of
|
||||||
|
// constrained types.
|
||||||
|
//
|
||||||
|
// The result is non-canonical and thus may contain duplicates (but this
|
||||||
|
// tends to preserves names of interface types better).
|
||||||
|
//
|
||||||
|
// The package must be free of type errors, and
|
||||||
|
// info.{Defs,Uses,Selections,Types} must have been populated by the
|
||||||
|
// type-checker.
|
||||||
|
//
|
||||||
|
func (f *Finder) Find(info *types.Info, files []*ast.File) {
|
||||||
|
if f.Result == nil {
|
||||||
|
f.Result = make(map[Constraint]bool)
|
||||||
|
}
|
||||||
|
|
||||||
|
f.info = info
|
||||||
|
for _, file := range files {
|
||||||
|
for _, d := range file.Decls {
|
||||||
|
switch d := d.(type) {
|
||||||
|
case *ast.GenDecl:
|
||||||
|
if d.Tok == token.VAR { // ignore consts
|
||||||
|
for _, spec := range d.Specs {
|
||||||
|
f.valueSpec(spec.(*ast.ValueSpec))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
if d.Body != nil {
|
||||||
|
f.sig = f.info.Defs[d.Name].Type().(*types.Signature)
|
||||||
|
f.stmt(d.Body)
|
||||||
|
f.sig = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
f.info = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
tInvalid = types.Typ[types.Invalid]
|
||||||
|
tUntypedBool = types.Typ[types.UntypedBool]
|
||||||
|
tUntypedNil = types.Typ[types.UntypedNil]
|
||||||
|
)
|
||||||
|
|
||||||
|
// exprN visits an expression in a multi-value context.
|
||||||
|
func (f *Finder) exprN(e ast.Expr) types.Type {
|
||||||
|
typ := f.info.Types[e].Type.(*types.Tuple)
|
||||||
|
switch e := e.(type) {
|
||||||
|
case *ast.ParenExpr:
|
||||||
|
return f.exprN(e.X)
|
||||||
|
|
||||||
|
case *ast.CallExpr:
|
||||||
|
// x, err := f(args)
|
||||||
|
sig := f.expr(e.Fun).Underlying().(*types.Signature)
|
||||||
|
f.call(sig, e.Args)
|
||||||
|
|
||||||
|
case *ast.IndexExpr:
|
||||||
|
// y, ok := x[i]
|
||||||
|
x := f.expr(e.X)
|
||||||
|
f.assign(f.expr(e.Index), x.Underlying().(*types.Map).Key())
|
||||||
|
|
||||||
|
case *ast.TypeAssertExpr:
|
||||||
|
// y, ok := x.(T)
|
||||||
|
f.typeAssert(f.expr(e.X), typ.At(0).Type())
|
||||||
|
|
||||||
|
case *ast.UnaryExpr: // must be receive <-
|
||||||
|
// y, ok := <-x
|
||||||
|
f.expr(e.X)
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic(e)
|
||||||
|
}
|
||||||
|
return typ
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Finder) call(sig *types.Signature, args []ast.Expr) {
|
||||||
|
if len(args) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ellipsis call? e.g. f(x, y, z...)
|
||||||
|
if _, ok := args[len(args)-1].(*ast.Ellipsis); ok {
|
||||||
|
for i, arg := range args {
|
||||||
|
// The final arg is a slice, and so is the final param.
|
||||||
|
f.assign(sig.Params().At(i).Type(), f.expr(arg))
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var argtypes []types.Type
|
||||||
|
|
||||||
|
// Gather the effective actual parameter types.
|
||||||
|
if tuple, ok := f.info.Types[args[0]].Type.(*types.Tuple); ok {
|
||||||
|
// f(g()) call where g has multiple results?
|
||||||
|
f.expr(args[0])
|
||||||
|
// unpack the tuple
|
||||||
|
for i := 0; i < tuple.Len(); i++ {
|
||||||
|
argtypes = append(argtypes, tuple.At(i).Type())
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for _, arg := range args {
|
||||||
|
argtypes = append(argtypes, f.expr(arg))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assign the actuals to the formals.
|
||||||
|
if !sig.Variadic() {
|
||||||
|
for i, argtype := range argtypes {
|
||||||
|
f.assign(sig.Params().At(i).Type(), argtype)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// The first n-1 parameters are assigned normally.
|
||||||
|
nnormals := sig.Params().Len() - 1
|
||||||
|
for i, argtype := range argtypes[:nnormals] {
|
||||||
|
f.assign(sig.Params().At(i).Type(), argtype)
|
||||||
|
}
|
||||||
|
// Remaining args are assigned to elements of varargs slice.
|
||||||
|
tElem := sig.Params().At(nnormals).Type().(*types.Slice).Elem()
|
||||||
|
for i := nnormals; i < len(argtypes); i++ {
|
||||||
|
f.assign(tElem, argtypes[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Finder) builtin(obj *types.Builtin, sig *types.Signature, args []ast.Expr, T types.Type) types.Type {
|
||||||
|
switch obj.Name() {
|
||||||
|
case "make", "new":
|
||||||
|
// skip the type operand
|
||||||
|
for _, arg := range args[1:] {
|
||||||
|
f.expr(arg)
|
||||||
|
}
|
||||||
|
|
||||||
|
case "append":
|
||||||
|
s := f.expr(args[0])
|
||||||
|
if _, ok := args[len(args)-1].(*ast.Ellipsis); ok && len(args) == 2 {
|
||||||
|
// append(x, y...) including append([]byte, "foo"...)
|
||||||
|
f.expr(args[1])
|
||||||
|
} else {
|
||||||
|
// append(x, y, z)
|
||||||
|
tElem := s.Underlying().(*types.Slice).Elem()
|
||||||
|
for _, arg := range args[1:] {
|
||||||
|
f.assign(tElem, f.expr(arg))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case "delete":
|
||||||
|
m := f.expr(args[0])
|
||||||
|
k := f.expr(args[1])
|
||||||
|
f.assign(m.Underlying().(*types.Map).Key(), k)
|
||||||
|
|
||||||
|
default:
|
||||||
|
// ordinary call
|
||||||
|
f.call(sig, args)
|
||||||
|
}
|
||||||
|
|
||||||
|
return T
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Finder) extract(tuple types.Type, i int) types.Type {
|
||||||
|
if tuple, ok := tuple.(*types.Tuple); ok && i < tuple.Len() {
|
||||||
|
return tuple.At(i).Type()
|
||||||
|
}
|
||||||
|
return tInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Finder) valueSpec(spec *ast.ValueSpec) {
|
||||||
|
var T types.Type
|
||||||
|
if spec.Type != nil {
|
||||||
|
T = f.info.Types[spec.Type].Type
|
||||||
|
}
|
||||||
|
switch len(spec.Values) {
|
||||||
|
case len(spec.Names): // e.g. var x, y = f(), g()
|
||||||
|
for _, value := range spec.Values {
|
||||||
|
v := f.expr(value)
|
||||||
|
if T != nil {
|
||||||
|
f.assign(T, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case 1: // e.g. var x, y = f()
|
||||||
|
tuple := f.exprN(spec.Values[0])
|
||||||
|
for i := range spec.Names {
|
||||||
|
if T != nil {
|
||||||
|
f.assign(T, f.extract(tuple, i))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// assign records pairs of distinct types that are related by
|
||||||
|
// assignability, where the left-hand side is an interface and both
|
||||||
|
// sides have methods.
|
||||||
|
//
|
||||||
|
// It should be called for all assignability checks, type assertions,
|
||||||
|
// explicit conversions and comparisons between two types, unless the
|
||||||
|
// types are uninteresting (e.g. lhs is a concrete type, or the empty
|
||||||
|
// interface; rhs has no methods).
|
||||||
|
//
|
||||||
|
func (f *Finder) assign(lhs, rhs types.Type) {
|
||||||
|
if types.Identical(lhs, rhs) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !isInterface(lhs) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.msetcache.MethodSet(lhs).Len() == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if f.msetcache.MethodSet(rhs).Len() == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// record the pair
|
||||||
|
f.Result[Constraint{lhs, rhs}] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// typeAssert must be called for each type assertion x.(T) where x has
|
||||||
|
// interface type I.
|
||||||
|
func (f *Finder) typeAssert(I, T types.Type) {
|
||||||
|
// Type assertions are slightly subtle, because they are allowed
|
||||||
|
// to be "impossible", e.g.
|
||||||
|
//
|
||||||
|
// var x interface{f()}
|
||||||
|
// _ = x.(interface{f()int}) // legal
|
||||||
|
//
|
||||||
|
// (In hindsight, the language spec should probably not have
|
||||||
|
// allowed this, but it's too late to fix now.)
|
||||||
|
//
|
||||||
|
// This means that a type assert from I to T isn't exactly a
|
||||||
|
// constraint that T is assignable to I, but for a refactoring
|
||||||
|
// tool it is a conditional constraint that, if T is assignable
|
||||||
|
// to I before a refactoring, it should remain so after.
|
||||||
|
|
||||||
|
if types.AssignableTo(T, I) {
|
||||||
|
f.assign(I, T)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// compare must be called for each comparison x==y.
|
||||||
|
func (f *Finder) compare(x, y types.Type) {
|
||||||
|
if types.AssignableTo(x, y) {
|
||||||
|
f.assign(y, x)
|
||||||
|
} else if types.AssignableTo(y, x) {
|
||||||
|
f.assign(x, y)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// expr visits a true expression (not a type or defining ident)
|
||||||
|
// and returns its type.
|
||||||
|
func (f *Finder) expr(e ast.Expr) types.Type {
|
||||||
|
tv := f.info.Types[e]
|
||||||
|
if tv.Value != nil {
|
||||||
|
return tv.Type // prune the descent for constants
|
||||||
|
}
|
||||||
|
|
||||||
|
// tv.Type may be nil for an ast.Ident.
|
||||||
|
|
||||||
|
switch e := e.(type) {
|
||||||
|
case *ast.BadExpr, *ast.BasicLit:
|
||||||
|
// no-op
|
||||||
|
|
||||||
|
case *ast.Ident:
|
||||||
|
// (referring idents only)
|
||||||
|
if obj, ok := f.info.Uses[e]; ok {
|
||||||
|
return obj.Type()
|
||||||
|
}
|
||||||
|
if e.Name == "_" { // e.g. "for _ = range x"
|
||||||
|
return tInvalid
|
||||||
|
}
|
||||||
|
panic("undefined ident: " + e.Name)
|
||||||
|
|
||||||
|
case *ast.Ellipsis:
|
||||||
|
if e.Elt != nil {
|
||||||
|
f.expr(e.Elt)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.FuncLit:
|
||||||
|
saved := f.sig
|
||||||
|
f.sig = tv.Type.(*types.Signature)
|
||||||
|
f.stmt(e.Body)
|
||||||
|
f.sig = saved
|
||||||
|
|
||||||
|
case *ast.CompositeLit:
|
||||||
|
switch T := deref(tv.Type).Underlying().(type) {
|
||||||
|
case *types.Struct:
|
||||||
|
for i, elem := range e.Elts {
|
||||||
|
if kv, ok := elem.(*ast.KeyValueExpr); ok {
|
||||||
|
f.assign(f.info.Uses[kv.Key.(*ast.Ident)].Type(), f.expr(kv.Value))
|
||||||
|
} else {
|
||||||
|
f.assign(T.Field(i).Type(), f.expr(elem))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case *types.Map:
|
||||||
|
for _, elem := range e.Elts {
|
||||||
|
elem := elem.(*ast.KeyValueExpr)
|
||||||
|
f.assign(T.Key(), f.expr(elem.Key))
|
||||||
|
f.assign(T.Elem(), f.expr(elem.Value))
|
||||||
|
}
|
||||||
|
|
||||||
|
case *types.Array, *types.Slice:
|
||||||
|
tElem := T.(interface {
|
||||||
|
Elem() types.Type
|
||||||
|
}).Elem()
|
||||||
|
for _, elem := range e.Elts {
|
||||||
|
if kv, ok := elem.(*ast.KeyValueExpr); ok {
|
||||||
|
// ignore the key
|
||||||
|
f.assign(tElem, f.expr(kv.Value))
|
||||||
|
} else {
|
||||||
|
f.assign(tElem, f.expr(elem))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic("unexpected composite literal type: " + tv.Type.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.ParenExpr:
|
||||||
|
f.expr(e.X)
|
||||||
|
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
if _, ok := f.info.Selections[e]; ok {
|
||||||
|
f.expr(e.X) // selection
|
||||||
|
} else {
|
||||||
|
return f.info.Uses[e.Sel].Type() // qualified identifier
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.IndexExpr:
|
||||||
|
x := f.expr(e.X)
|
||||||
|
i := f.expr(e.Index)
|
||||||
|
if ux, ok := x.Underlying().(*types.Map); ok {
|
||||||
|
f.assign(ux.Key(), i)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.SliceExpr:
|
||||||
|
f.expr(e.X)
|
||||||
|
if e.Low != nil {
|
||||||
|
f.expr(e.Low)
|
||||||
|
}
|
||||||
|
if e.High != nil {
|
||||||
|
f.expr(e.High)
|
||||||
|
}
|
||||||
|
if e.Max != nil {
|
||||||
|
f.expr(e.Max)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.TypeAssertExpr:
|
||||||
|
x := f.expr(e.X)
|
||||||
|
f.typeAssert(x, f.info.Types[e.Type].Type)
|
||||||
|
|
||||||
|
case *ast.CallExpr:
|
||||||
|
if tvFun := f.info.Types[e.Fun]; tvFun.IsType() {
|
||||||
|
// conversion
|
||||||
|
arg0 := f.expr(e.Args[0])
|
||||||
|
f.assign(tvFun.Type, arg0)
|
||||||
|
} else {
|
||||||
|
// function call
|
||||||
|
if id, ok := unparen(e.Fun).(*ast.Ident); ok {
|
||||||
|
if obj, ok := f.info.Uses[id].(*types.Builtin); ok {
|
||||||
|
sig := f.info.Types[id].Type.(*types.Signature)
|
||||||
|
return f.builtin(obj, sig, e.Args, tv.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// ordinary call
|
||||||
|
f.call(f.expr(e.Fun).Underlying().(*types.Signature), e.Args)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.StarExpr:
|
||||||
|
f.expr(e.X)
|
||||||
|
|
||||||
|
case *ast.UnaryExpr:
|
||||||
|
f.expr(e.X)
|
||||||
|
|
||||||
|
case *ast.BinaryExpr:
|
||||||
|
x := f.expr(e.X)
|
||||||
|
y := f.expr(e.Y)
|
||||||
|
if e.Op == token.EQL || e.Op == token.NEQ {
|
||||||
|
f.compare(x, y)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.KeyValueExpr:
|
||||||
|
f.expr(e.Key)
|
||||||
|
f.expr(e.Value)
|
||||||
|
|
||||||
|
case *ast.ArrayType,
|
||||||
|
*ast.StructType,
|
||||||
|
*ast.FuncType,
|
||||||
|
*ast.InterfaceType,
|
||||||
|
*ast.MapType,
|
||||||
|
*ast.ChanType:
|
||||||
|
panic(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
if tv.Type == nil {
|
||||||
|
panic(fmt.Sprintf("no type for %T", e))
|
||||||
|
}
|
||||||
|
|
||||||
|
return tv.Type
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Finder) stmt(s ast.Stmt) {
|
||||||
|
switch s := s.(type) {
|
||||||
|
case *ast.BadStmt,
|
||||||
|
*ast.EmptyStmt,
|
||||||
|
*ast.BranchStmt:
|
||||||
|
// no-op
|
||||||
|
|
||||||
|
case *ast.DeclStmt:
|
||||||
|
d := s.Decl.(*ast.GenDecl)
|
||||||
|
if d.Tok == token.VAR { // ignore consts
|
||||||
|
for _, spec := range d.Specs {
|
||||||
|
f.valueSpec(spec.(*ast.ValueSpec))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.LabeledStmt:
|
||||||
|
f.stmt(s.Stmt)
|
||||||
|
|
||||||
|
case *ast.ExprStmt:
|
||||||
|
f.expr(s.X)
|
||||||
|
|
||||||
|
case *ast.SendStmt:
|
||||||
|
ch := f.expr(s.Chan)
|
||||||
|
val := f.expr(s.Value)
|
||||||
|
f.assign(ch.Underlying().(*types.Chan).Elem(), val)
|
||||||
|
|
||||||
|
case *ast.IncDecStmt:
|
||||||
|
f.expr(s.X)
|
||||||
|
|
||||||
|
case *ast.AssignStmt:
|
||||||
|
switch s.Tok {
|
||||||
|
case token.ASSIGN, token.DEFINE:
|
||||||
|
// y := x or y = x
|
||||||
|
var rhsTuple types.Type
|
||||||
|
if len(s.Lhs) != len(s.Rhs) {
|
||||||
|
rhsTuple = f.exprN(s.Rhs[0])
|
||||||
|
}
|
||||||
|
for i := range s.Lhs {
|
||||||
|
var lhs, rhs types.Type
|
||||||
|
if rhsTuple == nil {
|
||||||
|
rhs = f.expr(s.Rhs[i]) // 1:1 assignment
|
||||||
|
} else {
|
||||||
|
rhs = f.extract(rhsTuple, i) // n:1 assignment
|
||||||
|
}
|
||||||
|
|
||||||
|
if id, ok := s.Lhs[i].(*ast.Ident); ok {
|
||||||
|
if id.Name != "_" {
|
||||||
|
if obj, ok := f.info.Defs[id]; ok {
|
||||||
|
lhs = obj.Type() // definition
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if lhs == nil {
|
||||||
|
lhs = f.expr(s.Lhs[i]) // assignment
|
||||||
|
}
|
||||||
|
f.assign(lhs, rhs)
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
// y op= x
|
||||||
|
f.expr(s.Lhs[0])
|
||||||
|
f.expr(s.Rhs[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.GoStmt:
|
||||||
|
f.expr(s.Call)
|
||||||
|
|
||||||
|
case *ast.DeferStmt:
|
||||||
|
f.expr(s.Call)
|
||||||
|
|
||||||
|
case *ast.ReturnStmt:
|
||||||
|
formals := f.sig.Results()
|
||||||
|
switch len(s.Results) {
|
||||||
|
case formals.Len(): // 1:1
|
||||||
|
for i, result := range s.Results {
|
||||||
|
f.assign(formals.At(i).Type(), f.expr(result))
|
||||||
|
}
|
||||||
|
|
||||||
|
case 1: // n:1
|
||||||
|
tuple := f.exprN(s.Results[0])
|
||||||
|
for i := 0; i < formals.Len(); i++ {
|
||||||
|
f.assign(formals.At(i).Type(), f.extract(tuple, i))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.SelectStmt:
|
||||||
|
f.stmt(s.Body)
|
||||||
|
|
||||||
|
case *ast.BlockStmt:
|
||||||
|
for _, s := range s.List {
|
||||||
|
f.stmt(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.IfStmt:
|
||||||
|
if s.Init != nil {
|
||||||
|
f.stmt(s.Init)
|
||||||
|
}
|
||||||
|
f.expr(s.Cond)
|
||||||
|
f.stmt(s.Body)
|
||||||
|
if s.Else != nil {
|
||||||
|
f.stmt(s.Else)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.SwitchStmt:
|
||||||
|
if s.Init != nil {
|
||||||
|
f.stmt(s.Init)
|
||||||
|
}
|
||||||
|
var tag types.Type = tUntypedBool
|
||||||
|
if s.Tag != nil {
|
||||||
|
tag = f.expr(s.Tag)
|
||||||
|
}
|
||||||
|
for _, cc := range s.Body.List {
|
||||||
|
cc := cc.(*ast.CaseClause)
|
||||||
|
for _, cond := range cc.List {
|
||||||
|
f.compare(tag, f.info.Types[cond].Type)
|
||||||
|
}
|
||||||
|
for _, s := range cc.Body {
|
||||||
|
f.stmt(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.TypeSwitchStmt:
|
||||||
|
if s.Init != nil {
|
||||||
|
f.stmt(s.Init)
|
||||||
|
}
|
||||||
|
var I types.Type
|
||||||
|
switch ass := s.Assign.(type) {
|
||||||
|
case *ast.ExprStmt: // x.(type)
|
||||||
|
I = f.expr(unparen(ass.X).(*ast.TypeAssertExpr).X)
|
||||||
|
case *ast.AssignStmt: // y := x.(type)
|
||||||
|
I = f.expr(unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X)
|
||||||
|
}
|
||||||
|
for _, cc := range s.Body.List {
|
||||||
|
cc := cc.(*ast.CaseClause)
|
||||||
|
for _, cond := range cc.List {
|
||||||
|
tCase := f.info.Types[cond].Type
|
||||||
|
if tCase != tUntypedNil {
|
||||||
|
f.typeAssert(I, tCase)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, s := range cc.Body {
|
||||||
|
f.stmt(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.CommClause:
|
||||||
|
if s.Comm != nil {
|
||||||
|
f.stmt(s.Comm)
|
||||||
|
}
|
||||||
|
for _, s := range s.Body {
|
||||||
|
f.stmt(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.ForStmt:
|
||||||
|
if s.Init != nil {
|
||||||
|
f.stmt(s.Init)
|
||||||
|
}
|
||||||
|
if s.Cond != nil {
|
||||||
|
f.expr(s.Cond)
|
||||||
|
}
|
||||||
|
if s.Post != nil {
|
||||||
|
f.stmt(s.Post)
|
||||||
|
}
|
||||||
|
f.stmt(s.Body)
|
||||||
|
|
||||||
|
case *ast.RangeStmt:
|
||||||
|
x := f.expr(s.X)
|
||||||
|
// No conversions are involved when Tok==DEFINE.
|
||||||
|
if s.Tok == token.ASSIGN {
|
||||||
|
if s.Key != nil {
|
||||||
|
k := f.expr(s.Key)
|
||||||
|
var xelem types.Type
|
||||||
|
// keys of array, *array, slice, string aren't interesting
|
||||||
|
switch ux := x.Underlying().(type) {
|
||||||
|
case *types.Chan:
|
||||||
|
xelem = ux.Elem()
|
||||||
|
case *types.Map:
|
||||||
|
xelem = ux.Key()
|
||||||
|
}
|
||||||
|
if xelem != nil {
|
||||||
|
f.assign(xelem, k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if s.Value != nil {
|
||||||
|
val := f.expr(s.Value)
|
||||||
|
var xelem types.Type
|
||||||
|
// values of strings aren't interesting
|
||||||
|
switch ux := x.Underlying().(type) {
|
||||||
|
case *types.Array:
|
||||||
|
xelem = ux.Elem()
|
||||||
|
case *types.Chan:
|
||||||
|
xelem = ux.Elem()
|
||||||
|
case *types.Map:
|
||||||
|
xelem = ux.Elem()
|
||||||
|
case *types.Pointer: // *array
|
||||||
|
xelem = deref(ux).(*types.Array).Elem()
|
||||||
|
case *types.Slice:
|
||||||
|
xelem = ux.Elem()
|
||||||
|
}
|
||||||
|
if xelem != nil {
|
||||||
|
f.assign(xelem, val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
f.stmt(s.Body)
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Plundered from golang.org/x/tools/go/ssa -----------------
|
||||||
|
|
||||||
|
// deref returns a pointer's element type; otherwise it returns typ.
|
||||||
|
func deref(typ types.Type) types.Type {
|
||||||
|
if p, ok := typ.Underlying().(*types.Pointer); ok {
|
||||||
|
return p.Elem()
|
||||||
|
}
|
||||||
|
return typ
|
||||||
|
}
|
||||||
|
|
||||||
|
func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) }
|
||||||
|
|
||||||
|
func isInterface(T types.Type) bool { return types.IsInterface(T) }
|
Loading…
x
Reference in New Issue
Block a user