mirror of
https://github.com/golang/go.git
synced 2025-05-05 15:43:04 +00:00
x/tools: delete x/tools/go/types and dependent packages/client files
Per https://groups.google.com/forum/#!topic/golang-announce/qu_rAphYdxY this change deletes the packages go/exact go/gccgoimporter go/gcimporter go/importer go/types cmd/vet from the x/tools repo and any files depending on those packages building against Go 1.4. x/tools packages depending on any of these libraries must use the respective versions from the std lib or use vendored versions if building against 1.4. Remaining packages may or may not build against Go 1.4 anymore and will not be supported against 1.4. Change-Id: I1c655fc30aee49b6c7326ebd4eb1bb0836ac97e0 Reviewed-on: https://go-review.googlesource.com/20810 Reviewed-by: Alan Donovan <adonovan@google.com>
This commit is contained in:
parent
fe1488f8ab
commit
adaaa07486
@ -8,10 +8,8 @@
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"golang.org/x/tools/go/gcimporter"
|
||||
)
|
||||
import "go/importer"
|
||||
|
||||
func init() {
|
||||
register("gc", gcimporter.Import)
|
||||
register("gc", importer.For("gc", nil))
|
||||
}
|
||||
|
@ -1,17 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
// This file implements access to gc-generated export data.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"golang.org/x/tools/go/gcimporter"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("gc", gcimporter.Import)
|
||||
}
|
@ -9,34 +9,28 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"golang.org/x/tools/go/gccgoimporter"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var (
|
||||
initmap = make(map[*types.Package]gccgoimporter.InitData)
|
||||
"go/importer"
|
||||
"go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
incpaths := []string{"/"}
|
||||
|
||||
// importer for default gccgo
|
||||
var inst gccgoimporter.GccgoInstallation
|
||||
inst.InitFromDriver("gccgo")
|
||||
register("gccgo", inst.GetImporter(incpaths, initmap))
|
||||
register("gccgo", importer.For("gccgo", nil))
|
||||
}
|
||||
|
||||
// Print the extra gccgo compiler data for this package, if it exists.
|
||||
func (p *printer) printGccgoExtra(pkg *types.Package) {
|
||||
if initdata, ok := initmap[pkg]; ok {
|
||||
p.printf("/*\npriority %d\n", initdata.Priority)
|
||||
// Disabled for now.
|
||||
// TODO(gri) address this at some point.
|
||||
|
||||
p.printDecl("init", len(initdata.Inits), func() {
|
||||
for _, init := range initdata.Inits {
|
||||
p.printf("%s %s %d\n", init.Name, init.InitFunc, init.Priority)
|
||||
}
|
||||
})
|
||||
// if initdata, ok := initmap[pkg]; ok {
|
||||
// p.printf("/*\npriority %d\n", initdata.Priority)
|
||||
|
||||
p.print("*/\n")
|
||||
}
|
||||
// p.printDecl("init", len(initdata.Inits), func() {
|
||||
// for _, init := range initdata.Inits {
|
||||
// p.printf("%s %s %d\n", init.Name, init.InitFunc, init.Priority)
|
||||
// }
|
||||
// })
|
||||
|
||||
// p.print("*/\n")
|
||||
// }
|
||||
}
|
||||
|
@ -1,42 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
// This file implements access to gccgo-generated export data.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"golang.org/x/tools/go/gccgoimporter"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var (
|
||||
initmap = make(map[*types.Package]gccgoimporter.InitData)
|
||||
)
|
||||
|
||||
func init() {
|
||||
incpaths := []string{"/"}
|
||||
|
||||
// importer for default gccgo
|
||||
var inst gccgoimporter.GccgoInstallation
|
||||
inst.InitFromDriver("gccgo")
|
||||
register("gccgo", inst.GetImporter(incpaths, initmap))
|
||||
}
|
||||
|
||||
// Print the extra gccgo compiler data for this package, if it exists.
|
||||
func (p *printer) printGccgoExtra(pkg *types.Package) {
|
||||
if initdata, ok := initmap[pkg]; ok {
|
||||
p.printf("/*\npriority %d\n", initdata.Priority)
|
||||
|
||||
p.printDecl("init", len(initdata.Inits), func() {
|
||||
for _, init := range initdata.Inits {
|
||||
p.printf("%s %s %d\n", init.Name, init.InitFunc, init.Priority)
|
||||
}
|
||||
})
|
||||
|
||||
p.print("*/\n")
|
||||
}
|
||||
}
|
@ -11,12 +11,11 @@ import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/build"
|
||||
"go/types"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -31,9 +30,6 @@ var (
|
||||
importFailed = errors.New("import failed")
|
||||
)
|
||||
|
||||
// map of imported packages
|
||||
var packages = make(map[string]*types.Package)
|
||||
|
||||
func usage() {
|
||||
fmt.Fprintln(os.Stderr, "usage: godex [flags] {path|qualifiedIdent}")
|
||||
flag.PrintDefaults()
|
||||
@ -53,7 +49,7 @@ func main() {
|
||||
report("no package name, path, or file provided")
|
||||
}
|
||||
|
||||
imp := tryImports
|
||||
var imp types.Importer = new(tryImporters)
|
||||
if *source != "" {
|
||||
imp = lookup(*source)
|
||||
if imp == nil {
|
||||
@ -71,7 +67,7 @@ func main() {
|
||||
go genPrefixes(prefixes, !filepath.IsAbs(path) && !build.IsLocalImport(path))
|
||||
|
||||
// import package
|
||||
pkg, err := tryPrefixes(packages, prefixes, path, imp)
|
||||
pkg, err := tryPrefixes(prefixes, path, imp)
|
||||
if err != nil {
|
||||
logf("\t=> ignoring %q: %s\n", path, err)
|
||||
continue
|
||||
@ -115,7 +111,7 @@ func splitPathIdent(arg string) (path, name string) {
|
||||
// tryPrefixes tries to import the package given by (the possibly partial) path using the given importer imp
|
||||
// by prepending all possible prefixes to path. It returns with the first package that it could import, or
|
||||
// with an error.
|
||||
func tryPrefixes(packages map[string]*types.Package, prefixes chan string, path string, imp types.Importer) (pkg *types.Package, err error) {
|
||||
func tryPrefixes(prefixes chan string, path string, imp types.Importer) (pkg *types.Package, err error) {
|
||||
for prefix := range prefixes {
|
||||
actual := path
|
||||
if prefix == "" {
|
||||
@ -127,7 +123,7 @@ func tryPrefixes(packages map[string]*types.Package, prefixes chan string, path
|
||||
actual = filepath.Join(prefix, path)
|
||||
logf("\ttrying prefix %q\n", prefix)
|
||||
}
|
||||
pkg, err = imp(packages, actual)
|
||||
pkg, err = imp.Import(actual)
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
@ -136,12 +132,14 @@ func tryPrefixes(packages map[string]*types.Package, prefixes chan string, path
|
||||
return
|
||||
}
|
||||
|
||||
// tryImports is an importer that tries all registered importers
|
||||
// tryImporters is an importer that tries all registered importers
|
||||
// successively until one of them succeeds or all of them failed.
|
||||
func tryImports(packages map[string]*types.Package, path string) (pkg *types.Package, err error) {
|
||||
type tryImporters struct{}
|
||||
|
||||
func (t *tryImporters) Import(path string) (pkg *types.Package, err error) {
|
||||
for i, imp := range importers {
|
||||
logf("\t\ttrying %s import\n", sources[i])
|
||||
pkg, err = imp(packages, path)
|
||||
pkg, err = imp.Import(path)
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
@ -150,17 +148,23 @@ func tryImports(packages map[string]*types.Package, path string) (pkg *types.Pac
|
||||
return
|
||||
}
|
||||
|
||||
type protector struct {
|
||||
imp types.Importer
|
||||
}
|
||||
|
||||
func (p *protector) Import(path string) (pkg *types.Package, err error) {
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
pkg = nil
|
||||
err = importFailed
|
||||
}
|
||||
}()
|
||||
return p.imp.Import(path)
|
||||
}
|
||||
|
||||
// protect protects an importer imp from panics and returns the protected importer.
|
||||
func protect(imp types.Importer) types.Importer {
|
||||
return func(packages map[string]*types.Package, path string) (pkg *types.Package, err error) {
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
pkg = nil
|
||||
err = importFailed
|
||||
}
|
||||
}()
|
||||
return imp(packages, path)
|
||||
}
|
||||
return &protector{imp}
|
||||
}
|
||||
|
||||
// register registers an importer imp for a given source src.
|
||||
|
@ -1,209 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/build"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var (
|
||||
source = flag.String("s", "", "only consider packages from src, where src is one of the supported compilers")
|
||||
verbose = flag.Bool("v", false, "verbose mode")
|
||||
)
|
||||
|
||||
// lists of registered sources and corresponding importers
|
||||
var (
|
||||
sources []string
|
||||
importers []types.Importer
|
||||
importFailed = errors.New("import failed")
|
||||
)
|
||||
|
||||
// map of imported packages
|
||||
var packages = make(map[string]*types.Package)
|
||||
|
||||
func usage() {
|
||||
fmt.Fprintln(os.Stderr, "usage: godex [flags] {path|qualifiedIdent}")
|
||||
flag.PrintDefaults()
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
func report(msg string) {
|
||||
fmt.Fprintln(os.Stderr, "error: "+msg)
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Usage = usage
|
||||
flag.Parse()
|
||||
|
||||
if flag.NArg() == 0 {
|
||||
report("no package name, path, or file provided")
|
||||
}
|
||||
|
||||
imp := tryImports
|
||||
if *source != "" {
|
||||
imp = lookup(*source)
|
||||
if imp == nil {
|
||||
report("source (-s argument) must be one of: " + strings.Join(sources, ", "))
|
||||
}
|
||||
}
|
||||
|
||||
for _, arg := range flag.Args() {
|
||||
path, name := splitPathIdent(arg)
|
||||
logf("\tprocessing %q: path = %q, name = %s\n", arg, path, name)
|
||||
|
||||
// generate possible package path prefixes
|
||||
// (at the moment we do this for each argument - should probably cache the generated prefixes)
|
||||
prefixes := make(chan string)
|
||||
go genPrefixes(prefixes, !filepath.IsAbs(path) && !build.IsLocalImport(path))
|
||||
|
||||
// import package
|
||||
pkg, err := tryPrefixes(packages, prefixes, path, imp)
|
||||
if err != nil {
|
||||
logf("\t=> ignoring %q: %s\n", path, err)
|
||||
continue
|
||||
}
|
||||
|
||||
// filter objects if needed
|
||||
var filter func(types.Object) bool
|
||||
if name != "" {
|
||||
filter = func(obj types.Object) bool {
|
||||
// TODO(gri) perhaps use regular expression matching here?
|
||||
return obj.Name() == name
|
||||
}
|
||||
}
|
||||
|
||||
// print contents
|
||||
print(os.Stdout, pkg, filter)
|
||||
}
|
||||
}
|
||||
|
||||
func logf(format string, args ...interface{}) {
|
||||
if *verbose {
|
||||
fmt.Fprintf(os.Stderr, format, args...)
|
||||
}
|
||||
}
|
||||
|
||||
// splitPathIdent splits a path.name argument into its components.
|
||||
// All but the last path element may contain dots.
|
||||
func splitPathIdent(arg string) (path, name string) {
|
||||
if i := strings.LastIndex(arg, "."); i >= 0 {
|
||||
if j := strings.LastIndex(arg, "/"); j < i {
|
||||
// '.' is not part of path
|
||||
path = arg[:i]
|
||||
name = arg[i+1:]
|
||||
return
|
||||
}
|
||||
}
|
||||
path = arg
|
||||
return
|
||||
}
|
||||
|
||||
// tryPrefixes tries to import the package given by (the possibly partial) path using the given importer imp
|
||||
// by prepending all possible prefixes to path. It returns with the first package that it could import, or
|
||||
// with an error.
|
||||
func tryPrefixes(packages map[string]*types.Package, prefixes chan string, path string, imp types.Importer) (pkg *types.Package, err error) {
|
||||
for prefix := range prefixes {
|
||||
actual := path
|
||||
if prefix == "" {
|
||||
// don't use filepath.Join as it will sanitize the path and remove
|
||||
// a leading dot and then the path is not recognized as a relative
|
||||
// package path by the importers anymore
|
||||
logf("\ttrying no prefix\n")
|
||||
} else {
|
||||
actual = filepath.Join(prefix, path)
|
||||
logf("\ttrying prefix %q\n", prefix)
|
||||
}
|
||||
pkg, err = imp(packages, actual)
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
logf("\t=> importing %q failed: %s\n", actual, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// tryImports is an importer that tries all registered importers
|
||||
// successively until one of them succeeds or all of them failed.
|
||||
func tryImports(packages map[string]*types.Package, path string) (pkg *types.Package, err error) {
|
||||
for i, imp := range importers {
|
||||
logf("\t\ttrying %s import\n", sources[i])
|
||||
pkg, err = imp(packages, path)
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
logf("\t\t=> %s import failed: %s\n", sources[i], err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// protect protects an importer imp from panics and returns the protected importer.
|
||||
func protect(imp types.Importer) types.Importer {
|
||||
return func(packages map[string]*types.Package, path string) (pkg *types.Package, err error) {
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
pkg = nil
|
||||
err = importFailed
|
||||
}
|
||||
}()
|
||||
return imp(packages, path)
|
||||
}
|
||||
}
|
||||
|
||||
// register registers an importer imp for a given source src.
|
||||
func register(src string, imp types.Importer) {
|
||||
if lookup(src) != nil {
|
||||
panic(src + " importer already registered")
|
||||
}
|
||||
sources = append(sources, src)
|
||||
importers = append(importers, protect(imp))
|
||||
}
|
||||
|
||||
// lookup returns the importer imp for a given source src.
|
||||
func lookup(src string) types.Importer {
|
||||
for i, s := range sources {
|
||||
if s == src {
|
||||
return importers[i]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func genPrefixes(out chan string, all bool) {
|
||||
out <- ""
|
||||
if all {
|
||||
platform := build.Default.GOOS + "_" + build.Default.GOARCH
|
||||
dirnames := append([]string{build.Default.GOROOT}, filepath.SplitList(build.Default.GOPATH)...)
|
||||
for _, dirname := range dirnames {
|
||||
walkDir(filepath.Join(dirname, "pkg", platform), "", out)
|
||||
}
|
||||
}
|
||||
close(out)
|
||||
}
|
||||
|
||||
func walkDir(dirname, prefix string, out chan string) {
|
||||
fiList, err := ioutil.ReadDir(dirname)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
for _, fi := range fiList {
|
||||
if fi.IsDir() && !strings.HasPrefix(fi.Name(), ".") {
|
||||
prefix := filepath.Join(prefix, fi.Name())
|
||||
out <- prefix
|
||||
walkDir(filepath.Join(dirname, fi.Name()), prefix, out)
|
||||
}
|
||||
}
|
||||
}
|
@ -9,12 +9,11 @@ package main
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
exact "go/constant"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"io"
|
||||
"math/big"
|
||||
|
||||
"golang.org/x/tools/go/exact"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
// TODO(gri) use tabwriter for alignment?
|
||||
|
@ -1,370 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/token"
|
||||
"io"
|
||||
"math/big"
|
||||
|
||||
"golang.org/x/tools/go/exact"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
// TODO(gri) use tabwriter for alignment?
|
||||
|
||||
func print(w io.Writer, pkg *types.Package, filter func(types.Object) bool) {
|
||||
var p printer
|
||||
p.pkg = pkg
|
||||
p.printPackage(pkg, filter)
|
||||
p.printGccgoExtra(pkg)
|
||||
io.Copy(w, &p.buf)
|
||||
}
|
||||
|
||||
type printer struct {
|
||||
pkg *types.Package
|
||||
buf bytes.Buffer
|
||||
indent int // current indentation level
|
||||
last byte // last byte written
|
||||
}
|
||||
|
||||
func (p *printer) print(s string) {
|
||||
// Write the string one byte at a time. We care about the presence of
|
||||
// newlines for indentation which we will see even in the presence of
|
||||
// (non-corrupted) Unicode; no need to read one rune at a time.
|
||||
for i := 0; i < len(s); i++ {
|
||||
ch := s[i]
|
||||
if ch != '\n' && p.last == '\n' {
|
||||
// Note: This could lead to a range overflow for very large
|
||||
// indentations, but it's extremely unlikely to happen for
|
||||
// non-pathological code.
|
||||
p.buf.WriteString("\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t"[:p.indent])
|
||||
}
|
||||
p.buf.WriteByte(ch)
|
||||
p.last = ch
|
||||
}
|
||||
}
|
||||
|
||||
func (p *printer) printf(format string, args ...interface{}) {
|
||||
p.print(fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
// methodsFor returns the named type and corresponding methods if the type
|
||||
// denoted by obj is not an interface and has methods. Otherwise it returns
|
||||
// the zero value.
|
||||
func methodsFor(obj *types.TypeName) (*types.Named, []*types.Selection) {
|
||||
named, _ := obj.Type().(*types.Named)
|
||||
if named == nil {
|
||||
// A type name's type can also be the
|
||||
// exported basic type unsafe.Pointer.
|
||||
return nil, nil
|
||||
}
|
||||
if _, ok := named.Underlying().(*types.Interface); ok {
|
||||
// ignore interfaces
|
||||
return nil, nil
|
||||
}
|
||||
methods := combinedMethodSet(named)
|
||||
if len(methods) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
return named, methods
|
||||
}
|
||||
|
||||
func (p *printer) printPackage(pkg *types.Package, filter func(types.Object) bool) {
|
||||
// collect objects by kind
|
||||
var (
|
||||
consts []*types.Const
|
||||
typem []*types.Named // non-interface types with methods
|
||||
typez []*types.TypeName // interfaces or types without methods
|
||||
vars []*types.Var
|
||||
funcs []*types.Func
|
||||
builtins []*types.Builtin
|
||||
methods = make(map[*types.Named][]*types.Selection) // method sets for named types
|
||||
)
|
||||
scope := pkg.Scope()
|
||||
for _, name := range scope.Names() {
|
||||
obj := scope.Lookup(name)
|
||||
if obj.Exported() {
|
||||
// collect top-level exported and possibly filtered objects
|
||||
if filter == nil || filter(obj) {
|
||||
switch obj := obj.(type) {
|
||||
case *types.Const:
|
||||
consts = append(consts, obj)
|
||||
case *types.TypeName:
|
||||
// group into types with methods and types without
|
||||
if named, m := methodsFor(obj); named != nil {
|
||||
typem = append(typem, named)
|
||||
methods[named] = m
|
||||
} else {
|
||||
typez = append(typez, obj)
|
||||
}
|
||||
case *types.Var:
|
||||
vars = append(vars, obj)
|
||||
case *types.Func:
|
||||
funcs = append(funcs, obj)
|
||||
case *types.Builtin:
|
||||
// for unsafe.Sizeof, etc.
|
||||
builtins = append(builtins, obj)
|
||||
}
|
||||
}
|
||||
} else if filter == nil {
|
||||
// no filtering: collect top-level unexported types with methods
|
||||
if obj, _ := obj.(*types.TypeName); obj != nil {
|
||||
// see case *types.TypeName above
|
||||
if named, m := methodsFor(obj); named != nil {
|
||||
typem = append(typem, named)
|
||||
methods[named] = m
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
p.printf("package %s // %q\n", pkg.Name(), pkg.Path())
|
||||
|
||||
p.printDecl("const", len(consts), func() {
|
||||
for _, obj := range consts {
|
||||
p.printObj(obj)
|
||||
p.print("\n")
|
||||
}
|
||||
})
|
||||
|
||||
p.printDecl("var", len(vars), func() {
|
||||
for _, obj := range vars {
|
||||
p.printObj(obj)
|
||||
p.print("\n")
|
||||
}
|
||||
})
|
||||
|
||||
p.printDecl("type", len(typez), func() {
|
||||
for _, obj := range typez {
|
||||
p.printf("%s ", obj.Name())
|
||||
p.writeType(p.pkg, obj.Type().Underlying())
|
||||
p.print("\n")
|
||||
}
|
||||
})
|
||||
|
||||
// non-interface types with methods
|
||||
for _, named := range typem {
|
||||
first := true
|
||||
if obj := named.Obj(); obj.Exported() {
|
||||
if first {
|
||||
p.print("\n")
|
||||
first = false
|
||||
}
|
||||
p.printf("type %s ", obj.Name())
|
||||
p.writeType(p.pkg, named.Underlying())
|
||||
p.print("\n")
|
||||
}
|
||||
for _, m := range methods[named] {
|
||||
if obj := m.Obj(); obj.Exported() {
|
||||
if first {
|
||||
p.print("\n")
|
||||
first = false
|
||||
}
|
||||
p.printFunc(m.Recv(), obj.(*types.Func))
|
||||
p.print("\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(funcs) > 0 {
|
||||
p.print("\n")
|
||||
for _, obj := range funcs {
|
||||
p.printFunc(nil, obj)
|
||||
p.print("\n")
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(gri) better handling of builtins (package unsafe only)
|
||||
if len(builtins) > 0 {
|
||||
p.print("\n")
|
||||
for _, obj := range builtins {
|
||||
p.printf("func %s() // builtin\n", obj.Name())
|
||||
}
|
||||
}
|
||||
|
||||
p.print("\n")
|
||||
}
|
||||
|
||||
func (p *printer) printDecl(keyword string, n int, printGroup func()) {
|
||||
switch n {
|
||||
case 0:
|
||||
// nothing to do
|
||||
case 1:
|
||||
p.printf("\n%s ", keyword)
|
||||
printGroup()
|
||||
default:
|
||||
p.printf("\n%s (\n", keyword)
|
||||
p.indent++
|
||||
printGroup()
|
||||
p.indent--
|
||||
p.print(")\n")
|
||||
}
|
||||
}
|
||||
|
||||
// absInt returns the absolute value of v as a *big.Int.
|
||||
// v must be a numeric value.
|
||||
func absInt(v exact.Value) *big.Int {
|
||||
// compute big-endian representation of v
|
||||
b := exact.Bytes(v) // little-endian
|
||||
for i, j := 0, len(b)-1; i < j; i, j = i+1, j-1 {
|
||||
b[i], b[j] = b[j], b[i]
|
||||
}
|
||||
return new(big.Int).SetBytes(b)
|
||||
}
|
||||
|
||||
var (
|
||||
one = big.NewRat(1, 1)
|
||||
ten = big.NewRat(10, 1)
|
||||
)
|
||||
|
||||
// floatString returns the string representation for a
|
||||
// numeric value v in normalized floating-point format.
|
||||
func floatString(v exact.Value) string {
|
||||
if exact.Sign(v) == 0 {
|
||||
return "0.0"
|
||||
}
|
||||
// x != 0
|
||||
|
||||
// convert |v| into a big.Rat x
|
||||
x := new(big.Rat).SetFrac(absInt(exact.Num(v)), absInt(exact.Denom(v)))
|
||||
|
||||
// normalize x and determine exponent e
|
||||
// (This is not very efficient, but also not speed-critical.)
|
||||
var e int
|
||||
for x.Cmp(ten) >= 0 {
|
||||
x.Quo(x, ten)
|
||||
e++
|
||||
}
|
||||
for x.Cmp(one) < 0 {
|
||||
x.Mul(x, ten)
|
||||
e--
|
||||
}
|
||||
|
||||
// TODO(gri) Values such as 1/2 are easier to read in form 0.5
|
||||
// rather than 5.0e-1. Similarly, 1.0e1 is easier to read as
|
||||
// 10.0. Fine-tune best exponent range for readability.
|
||||
|
||||
s := x.FloatString(100) // good-enough precision
|
||||
|
||||
// trim trailing 0's
|
||||
i := len(s)
|
||||
for i > 0 && s[i-1] == '0' {
|
||||
i--
|
||||
}
|
||||
s = s[:i]
|
||||
|
||||
// add a 0 if the number ends in decimal point
|
||||
if len(s) > 0 && s[len(s)-1] == '.' {
|
||||
s += "0"
|
||||
}
|
||||
|
||||
// add exponent and sign
|
||||
if e != 0 {
|
||||
s += fmt.Sprintf("e%+d", e)
|
||||
}
|
||||
if exact.Sign(v) < 0 {
|
||||
s = "-" + s
|
||||
}
|
||||
|
||||
// TODO(gri) If v is a "small" fraction (i.e., numerator and denominator
|
||||
// are just a small number of decimal digits), add the exact fraction as
|
||||
// a comment. For instance: 3.3333...e-1 /* = 1/3 */
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
// valString returns the string representation for the value v.
|
||||
// Setting floatFmt forces an integer value to be formatted in
|
||||
// normalized floating-point format.
|
||||
// TODO(gri) Move this code into package exact.
|
||||
func valString(v exact.Value, floatFmt bool) string {
|
||||
switch v.Kind() {
|
||||
case exact.Int:
|
||||
if floatFmt {
|
||||
return floatString(v)
|
||||
}
|
||||
case exact.Float:
|
||||
return floatString(v)
|
||||
case exact.Complex:
|
||||
re := exact.Real(v)
|
||||
im := exact.Imag(v)
|
||||
var s string
|
||||
if exact.Sign(re) != 0 {
|
||||
s = floatString(re)
|
||||
if exact.Sign(im) >= 0 {
|
||||
s += " + "
|
||||
} else {
|
||||
s += " - "
|
||||
im = exact.UnaryOp(token.SUB, im, 0) // negate im
|
||||
}
|
||||
}
|
||||
// im != 0, otherwise v would be exact.Int or exact.Float
|
||||
return s + floatString(im) + "i"
|
||||
}
|
||||
return v.String()
|
||||
}
|
||||
|
||||
func (p *printer) printObj(obj types.Object) {
|
||||
p.print(obj.Name())
|
||||
|
||||
typ, basic := obj.Type().Underlying().(*types.Basic)
|
||||
if basic && typ.Info()&types.IsUntyped != 0 {
|
||||
// don't write untyped types
|
||||
} else {
|
||||
p.print(" ")
|
||||
p.writeType(p.pkg, obj.Type())
|
||||
}
|
||||
|
||||
if obj, ok := obj.(*types.Const); ok {
|
||||
floatFmt := basic && typ.Info()&(types.IsFloat|types.IsComplex) != 0
|
||||
p.print(" = ")
|
||||
p.print(valString(obj.Val(), floatFmt))
|
||||
}
|
||||
}
|
||||
|
||||
func (p *printer) printFunc(recvType types.Type, obj *types.Func) {
|
||||
p.print("func ")
|
||||
sig := obj.Type().(*types.Signature)
|
||||
if recvType != nil {
|
||||
p.print("(")
|
||||
p.writeType(p.pkg, recvType)
|
||||
p.print(") ")
|
||||
}
|
||||
p.print(obj.Name())
|
||||
p.writeSignature(p.pkg, sig)
|
||||
}
|
||||
|
||||
// combinedMethodSet returns the method set for a named type T
|
||||
// merged with all the methods of *T that have different names than
|
||||
// the methods of T.
|
||||
//
|
||||
// combinedMethodSet is analogous to types/typeutil.IntuitiveMethodSet
|
||||
// but doesn't require a MethodSetCache.
|
||||
// TODO(gri) If this functionality doesn't change over time, consider
|
||||
// just calling IntuitiveMethodSet eventually.
|
||||
func combinedMethodSet(T *types.Named) []*types.Selection {
|
||||
// method set for T
|
||||
mset := types.NewMethodSet(T)
|
||||
var res []*types.Selection
|
||||
for i, n := 0, mset.Len(); i < n; i++ {
|
||||
res = append(res, mset.At(i))
|
||||
}
|
||||
|
||||
// add all *T methods with names different from T methods
|
||||
pmset := types.NewMethodSet(types.NewPointer(T))
|
||||
for i, n := 0, pmset.Len(); i < n; i++ {
|
||||
pm := pmset.At(i)
|
||||
if obj := pm.Obj(); mset.Lookup(obj.Pkg(), obj.Name()) == nil {
|
||||
res = append(res, pm)
|
||||
}
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
@ -8,14 +8,14 @@
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
import "go/types"
|
||||
|
||||
func init() {
|
||||
register("source", sourceImporter)
|
||||
register("source", sourceImporter{})
|
||||
}
|
||||
|
||||
func sourceImporter(packages map[string]*types.Package, path string) (*types.Package, error) {
|
||||
type sourceImporter struct{}
|
||||
|
||||
func (sourceImporter) Import(path string) (*types.Package, error) {
|
||||
panic("unimplemented")
|
||||
}
|
||||
|
@ -1,21 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
// This file implements access to export data from source.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("source", sourceImporter)
|
||||
}
|
||||
|
||||
func sourceImporter(packages map[string]*types.Package, path string) (*types.Package, error) {
|
||||
panic("unimplemented")
|
||||
}
|
@ -14,7 +14,7 @@
|
||||
|
||||
package main
|
||||
|
||||
import "golang.org/x/tools/go/types"
|
||||
import "go/types"
|
||||
|
||||
func (p *printer) writeType(this *types.Package, typ types.Type) {
|
||||
p.writeTypeInternal(this, typ, make([]types.Type, 8))
|
||||
|
@ -1,244 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
// This file implements writing of types. The functionality is lifted
|
||||
// directly from go/types, but now contains various modifications for
|
||||
// nicer output.
|
||||
//
|
||||
// TODO(gri) back-port once we have a fixed interface and once the
|
||||
// go/types API is not frozen anymore for the 1.3 release; and remove
|
||||
// this implementation if possible.
|
||||
|
||||
package main
|
||||
|
||||
import "golang.org/x/tools/go/types"
|
||||
|
||||
func (p *printer) writeType(this *types.Package, typ types.Type) {
|
||||
p.writeTypeInternal(this, typ, make([]types.Type, 8))
|
||||
}
|
||||
|
||||
// From go/types - leave for now to ease back-porting this code.
|
||||
const GcCompatibilityMode = false
|
||||
|
||||
func (p *printer) writeTypeInternal(this *types.Package, typ types.Type, visited []types.Type) {
|
||||
// Theoretically, this is a quadratic lookup algorithm, but in
|
||||
// practice deeply nested composite types with unnamed component
|
||||
// types are uncommon. This code is likely more efficient than
|
||||
// using a map.
|
||||
for _, t := range visited {
|
||||
if t == typ {
|
||||
p.printf("○%T", typ) // cycle to typ
|
||||
return
|
||||
}
|
||||
}
|
||||
visited = append(visited, typ)
|
||||
|
||||
switch t := typ.(type) {
|
||||
case nil:
|
||||
p.print("<nil>")
|
||||
|
||||
case *types.Basic:
|
||||
if t.Kind() == types.UnsafePointer {
|
||||
p.print("unsafe.")
|
||||
}
|
||||
if GcCompatibilityMode {
|
||||
// forget the alias names
|
||||
switch t.Kind() {
|
||||
case types.Byte:
|
||||
t = types.Typ[types.Uint8]
|
||||
case types.Rune:
|
||||
t = types.Typ[types.Int32]
|
||||
}
|
||||
}
|
||||
p.print(t.Name())
|
||||
|
||||
case *types.Array:
|
||||
p.printf("[%d]", t.Len())
|
||||
p.writeTypeInternal(this, t.Elem(), visited)
|
||||
|
||||
case *types.Slice:
|
||||
p.print("[]")
|
||||
p.writeTypeInternal(this, t.Elem(), visited)
|
||||
|
||||
case *types.Struct:
|
||||
n := t.NumFields()
|
||||
if n == 0 {
|
||||
p.print("struct{}")
|
||||
return
|
||||
}
|
||||
|
||||
p.print("struct {\n")
|
||||
p.indent++
|
||||
for i := 0; i < n; i++ {
|
||||
f := t.Field(i)
|
||||
if !f.Anonymous() {
|
||||
p.printf("%s ", f.Name())
|
||||
}
|
||||
p.writeTypeInternal(this, f.Type(), visited)
|
||||
if tag := t.Tag(i); tag != "" {
|
||||
p.printf(" %q", tag)
|
||||
}
|
||||
p.print("\n")
|
||||
}
|
||||
p.indent--
|
||||
p.print("}")
|
||||
|
||||
case *types.Pointer:
|
||||
p.print("*")
|
||||
p.writeTypeInternal(this, t.Elem(), visited)
|
||||
|
||||
case *types.Tuple:
|
||||
p.writeTuple(this, t, false, visited)
|
||||
|
||||
case *types.Signature:
|
||||
p.print("func")
|
||||
p.writeSignatureInternal(this, t, visited)
|
||||
|
||||
case *types.Interface:
|
||||
// We write the source-level methods and embedded types rather
|
||||
// than the actual method set since resolved method signatures
|
||||
// may have non-printable cycles if parameters have anonymous
|
||||
// interface types that (directly or indirectly) embed the
|
||||
// current interface. For instance, consider the result type
|
||||
// of m:
|
||||
//
|
||||
// type T interface{
|
||||
// m() interface{ T }
|
||||
// }
|
||||
//
|
||||
n := t.NumMethods()
|
||||
if n == 0 {
|
||||
p.print("interface{}")
|
||||
return
|
||||
}
|
||||
|
||||
p.print("interface {\n")
|
||||
p.indent++
|
||||
if GcCompatibilityMode {
|
||||
// print flattened interface
|
||||
// (useful to compare against gc-generated interfaces)
|
||||
for i := 0; i < n; i++ {
|
||||
m := t.Method(i)
|
||||
p.print(m.Name())
|
||||
p.writeSignatureInternal(this, m.Type().(*types.Signature), visited)
|
||||
p.print("\n")
|
||||
}
|
||||
} else {
|
||||
// print explicit interface methods and embedded types
|
||||
for i, n := 0, t.NumExplicitMethods(); i < n; i++ {
|
||||
m := t.ExplicitMethod(i)
|
||||
p.print(m.Name())
|
||||
p.writeSignatureInternal(this, m.Type().(*types.Signature), visited)
|
||||
p.print("\n")
|
||||
}
|
||||
for i, n := 0, t.NumEmbeddeds(); i < n; i++ {
|
||||
typ := t.Embedded(i)
|
||||
p.writeTypeInternal(this, typ, visited)
|
||||
p.print("\n")
|
||||
}
|
||||
}
|
||||
p.indent--
|
||||
p.print("}")
|
||||
|
||||
case *types.Map:
|
||||
p.print("map[")
|
||||
p.writeTypeInternal(this, t.Key(), visited)
|
||||
p.print("]")
|
||||
p.writeTypeInternal(this, t.Elem(), visited)
|
||||
|
||||
case *types.Chan:
|
||||
var s string
|
||||
var parens bool
|
||||
switch t.Dir() {
|
||||
case types.SendRecv:
|
||||
s = "chan "
|
||||
// chan (<-chan T) requires parentheses
|
||||
if c, _ := t.Elem().(*types.Chan); c != nil && c.Dir() == types.RecvOnly {
|
||||
parens = true
|
||||
}
|
||||
case types.SendOnly:
|
||||
s = "chan<- "
|
||||
case types.RecvOnly:
|
||||
s = "<-chan "
|
||||
default:
|
||||
panic("unreachable")
|
||||
}
|
||||
p.print(s)
|
||||
if parens {
|
||||
p.print("(")
|
||||
}
|
||||
p.writeTypeInternal(this, t.Elem(), visited)
|
||||
if parens {
|
||||
p.print(")")
|
||||
}
|
||||
|
||||
case *types.Named:
|
||||
s := "<Named w/o object>"
|
||||
if obj := t.Obj(); obj != nil {
|
||||
if pkg := obj.Pkg(); pkg != nil {
|
||||
if pkg != this {
|
||||
p.print(pkg.Path())
|
||||
p.print(".")
|
||||
}
|
||||
// TODO(gri): function-local named types should be displayed
|
||||
// differently from named types at package level to avoid
|
||||
// ambiguity.
|
||||
}
|
||||
s = obj.Name()
|
||||
}
|
||||
p.print(s)
|
||||
|
||||
default:
|
||||
// For externally defined implementations of Type.
|
||||
p.print(t.String())
|
||||
}
|
||||
}
|
||||
|
||||
func (p *printer) writeTuple(this *types.Package, tup *types.Tuple, variadic bool, visited []types.Type) {
|
||||
p.print("(")
|
||||
for i, n := 0, tup.Len(); i < n; i++ {
|
||||
if i > 0 {
|
||||
p.print(", ")
|
||||
}
|
||||
v := tup.At(i)
|
||||
if name := v.Name(); name != "" {
|
||||
p.print(name)
|
||||
p.print(" ")
|
||||
}
|
||||
typ := v.Type()
|
||||
if variadic && i == n-1 {
|
||||
p.print("...")
|
||||
typ = typ.(*types.Slice).Elem()
|
||||
}
|
||||
p.writeTypeInternal(this, typ, visited)
|
||||
}
|
||||
p.print(")")
|
||||
}
|
||||
|
||||
func (p *printer) writeSignature(this *types.Package, sig *types.Signature) {
|
||||
p.writeSignatureInternal(this, sig, make([]types.Type, 8))
|
||||
}
|
||||
|
||||
func (p *printer) writeSignatureInternal(this *types.Package, sig *types.Signature, visited []types.Type) {
|
||||
p.writeTuple(this, sig.Params(), sig.Variadic(), visited)
|
||||
|
||||
res := sig.Results()
|
||||
n := res.Len()
|
||||
if n == 0 {
|
||||
// no result
|
||||
return
|
||||
}
|
||||
|
||||
p.print(" ")
|
||||
if n == 1 && res.At(0).Name() == "" {
|
||||
// single unnamed result
|
||||
p.writeTypeInternal(this, res.At(0).Type(), visited)
|
||||
return
|
||||
}
|
||||
|
||||
// multiple or named result(s)
|
||||
p.writeTuple(this, res, false, visited)
|
||||
}
|
@ -1,268 +0,0 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
// This is a 1:1 copy of gotype.go but for the changes required to build
|
||||
// against Go1.4 and before.
|
||||
// TODO(gri) Decide long-term fate of gotype (issue #12303).
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/parser"
|
||||
"go/scanner"
|
||||
"go/token"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"time"
|
||||
|
||||
"golang.org/x/tools/go/gccgoimporter"
|
||||
_ "golang.org/x/tools/go/gcimporter"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var (
|
||||
// main operation modes
|
||||
allFiles = flag.Bool("a", false, "use all (incl. _test.go) files when processing a directory")
|
||||
allErrors = flag.Bool("e", false, "report all errors (not just the first 10)")
|
||||
verbose = flag.Bool("v", false, "verbose mode")
|
||||
gccgo = flag.Bool("gccgo", false, "use gccgoimporter instead of gcimporter")
|
||||
|
||||
// debugging support
|
||||
sequential = flag.Bool("seq", false, "parse sequentially, rather than in parallel")
|
||||
printAST = flag.Bool("ast", false, "print AST (forces -seq)")
|
||||
printTrace = flag.Bool("trace", false, "print parse trace (forces -seq)")
|
||||
parseComments = flag.Bool("comments", false, "parse comments (ignored unless -ast or -trace is provided)")
|
||||
)
|
||||
|
||||
var (
|
||||
fset = token.NewFileSet()
|
||||
errorCount = 0
|
||||
parserMode parser.Mode
|
||||
sizes types.Sizes
|
||||
)
|
||||
|
||||
func initParserMode() {
|
||||
if *allErrors {
|
||||
parserMode |= parser.AllErrors
|
||||
}
|
||||
if *printTrace {
|
||||
parserMode |= parser.Trace
|
||||
}
|
||||
if *parseComments && (*printAST || *printTrace) {
|
||||
parserMode |= parser.ParseComments
|
||||
}
|
||||
}
|
||||
|
||||
func initSizes() {
|
||||
wordSize := 8
|
||||
maxAlign := 8
|
||||
switch build.Default.GOARCH {
|
||||
case "386", "arm":
|
||||
wordSize = 4
|
||||
maxAlign = 4
|
||||
// add more cases as needed
|
||||
}
|
||||
sizes = &types.StdSizes{WordSize: int64(wordSize), MaxAlign: int64(maxAlign)}
|
||||
}
|
||||
|
||||
func usage() {
|
||||
fmt.Fprintln(os.Stderr, "usage: gotype [flags] [path ...]")
|
||||
flag.PrintDefaults()
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
func report(err error) {
|
||||
scanner.PrintError(os.Stderr, err)
|
||||
if list, ok := err.(scanner.ErrorList); ok {
|
||||
errorCount += len(list)
|
||||
return
|
||||
}
|
||||
errorCount++
|
||||
}
|
||||
|
||||
// parse may be called concurrently
|
||||
func parse(filename string, src interface{}) (*ast.File, error) {
|
||||
if *verbose {
|
||||
fmt.Println(filename)
|
||||
}
|
||||
file, err := parser.ParseFile(fset, filename, src, parserMode) // ok to access fset concurrently
|
||||
if *printAST {
|
||||
ast.Print(fset, file)
|
||||
}
|
||||
return file, err
|
||||
}
|
||||
|
||||
func parseStdin() (*ast.File, error) {
|
||||
src, err := ioutil.ReadAll(os.Stdin)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return parse("<standard input>", src)
|
||||
}
|
||||
|
||||
func parseFiles(filenames []string) ([]*ast.File, error) {
|
||||
files := make([]*ast.File, len(filenames))
|
||||
|
||||
if *sequential {
|
||||
for i, filename := range filenames {
|
||||
var err error
|
||||
files[i], err = parse(filename, nil)
|
||||
if err != nil {
|
||||
return nil, err // leave unfinished goroutines hanging
|
||||
}
|
||||
}
|
||||
} else {
|
||||
type parseResult struct {
|
||||
file *ast.File
|
||||
err error
|
||||
}
|
||||
|
||||
out := make(chan parseResult)
|
||||
for _, filename := range filenames {
|
||||
go func(filename string) {
|
||||
file, err := parse(filename, nil)
|
||||
out <- parseResult{file, err}
|
||||
}(filename)
|
||||
}
|
||||
|
||||
for i := range filenames {
|
||||
res := <-out
|
||||
if res.err != nil {
|
||||
return nil, res.err // leave unfinished goroutines hanging
|
||||
}
|
||||
files[i] = res.file
|
||||
}
|
||||
}
|
||||
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func parseDir(dirname string) ([]*ast.File, error) {
|
||||
ctxt := build.Default
|
||||
pkginfo, err := ctxt.ImportDir(dirname, 0)
|
||||
if _, nogo := err.(*build.NoGoError); err != nil && !nogo {
|
||||
return nil, err
|
||||
}
|
||||
filenames := append(pkginfo.GoFiles, pkginfo.CgoFiles...)
|
||||
if *allFiles {
|
||||
filenames = append(filenames, pkginfo.TestGoFiles...)
|
||||
}
|
||||
|
||||
// complete file names
|
||||
for i, filename := range filenames {
|
||||
filenames[i] = filepath.Join(dirname, filename)
|
||||
}
|
||||
|
||||
return parseFiles(filenames)
|
||||
}
|
||||
|
||||
func getPkgFiles(args []string) ([]*ast.File, error) {
|
||||
if len(args) == 0 {
|
||||
// stdin
|
||||
file, err := parseStdin()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return []*ast.File{file}, nil
|
||||
}
|
||||
|
||||
if len(args) == 1 {
|
||||
// possibly a directory
|
||||
path := args[0]
|
||||
info, err := os.Stat(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if info.IsDir() {
|
||||
return parseDir(path)
|
||||
}
|
||||
}
|
||||
|
||||
// list of files
|
||||
return parseFiles(args)
|
||||
}
|
||||
|
||||
func checkPkgFiles(files []*ast.File) {
|
||||
type bailout struct{}
|
||||
conf := types.Config{
|
||||
FakeImportC: true,
|
||||
Error: func(err error) {
|
||||
if !*allErrors && errorCount >= 10 {
|
||||
panic(bailout{})
|
||||
}
|
||||
report(err)
|
||||
},
|
||||
Sizes: sizes,
|
||||
}
|
||||
if *gccgo {
|
||||
var inst gccgoimporter.GccgoInstallation
|
||||
inst.InitFromDriver("gccgo")
|
||||
conf.Import = inst.GetImporter(nil, nil)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
switch p := recover().(type) {
|
||||
case nil, bailout:
|
||||
// normal return or early exit
|
||||
default:
|
||||
// re-panic
|
||||
panic(p)
|
||||
}
|
||||
}()
|
||||
|
||||
const path = "pkg" // any non-empty string will do for now
|
||||
conf.Check(path, fset, files, nil)
|
||||
}
|
||||
|
||||
func printStats(d time.Duration) {
|
||||
fileCount := 0
|
||||
lineCount := 0
|
||||
fset.Iterate(func(f *token.File) bool {
|
||||
fileCount++
|
||||
lineCount += f.LineCount()
|
||||
return true
|
||||
})
|
||||
|
||||
fmt.Printf(
|
||||
"%s (%d files, %d lines, %d lines/s)\n",
|
||||
d, fileCount, lineCount, int64(float64(lineCount)/d.Seconds()),
|
||||
)
|
||||
}
|
||||
|
||||
func main() {
|
||||
runtime.GOMAXPROCS(runtime.NumCPU()) // not needed for go1.5
|
||||
|
||||
flag.Usage = usage
|
||||
flag.Parse()
|
||||
if *printAST || *printTrace {
|
||||
*sequential = true
|
||||
}
|
||||
initParserMode()
|
||||
initSizes()
|
||||
|
||||
start := time.Now()
|
||||
|
||||
files, err := getPkgFiles(flag.Args())
|
||||
if err != nil {
|
||||
report(err)
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
checkPkgFiles(files)
|
||||
if errorCount > 0 {
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
if *verbose {
|
||||
printStats(time.Since(start))
|
||||
}
|
||||
}
|
@ -1,188 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
// ssadump: a tool for displaying and interpreting the SSA form of Go programs.
|
||||
package main // import "golang.org/x/tools/cmd/ssadump"
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/build"
|
||||
"os"
|
||||
"runtime"
|
||||
"runtime/pprof"
|
||||
|
||||
"golang.org/x/tools/go/buildutil"
|
||||
"golang.org/x/tools/go/loader"
|
||||
"golang.org/x/tools/go/ssa"
|
||||
"golang.org/x/tools/go/ssa/interp"
|
||||
"golang.org/x/tools/go/ssa/ssautil"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var (
|
||||
modeFlag = ssa.BuilderModeFlag(flag.CommandLine, "build", 0)
|
||||
|
||||
testFlag = flag.Bool("test", false, "Loads test code (*_test.go) for imported packages.")
|
||||
|
||||
runFlag = flag.Bool("run", false, "Invokes the SSA interpreter on the program.")
|
||||
|
||||
interpFlag = flag.String("interp", "", `Options controlling the SSA test interpreter.
|
||||
The value is a sequence of zero or more more of these letters:
|
||||
R disable [R]ecover() from panic; show interpreter crash instead.
|
||||
T [T]race execution of the program. Best for single-threaded programs!
|
||||
`)
|
||||
)
|
||||
|
||||
const usage = `SSA builder and interpreter.
|
||||
Usage: ssadump [<flag> ...] <args> ...
|
||||
Use -help flag to display options.
|
||||
|
||||
Examples:
|
||||
% ssadump -build=F hello.go # dump SSA form of a single package
|
||||
% ssadump -run -interp=T hello.go # interpret a program, with tracing
|
||||
% ssadump -run -test unicode -- -test.v # interpret the unicode package's tests, verbosely
|
||||
` + loader.FromArgsUsage +
|
||||
`
|
||||
When -run is specified, ssadump will run the program.
|
||||
The entry point depends on the -test flag:
|
||||
if clear, it runs the first package named main.
|
||||
if set, it runs the tests of each package.
|
||||
`
|
||||
|
||||
var cpuprofile = flag.String("cpuprofile", "", "write cpu profile to file")
|
||||
|
||||
func init() {
|
||||
flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc)
|
||||
|
||||
// If $GOMAXPROCS isn't set, use the full capacity of the machine.
|
||||
// For small machines, use at least 4 threads.
|
||||
if os.Getenv("GOMAXPROCS") == "" {
|
||||
n := runtime.NumCPU()
|
||||
if n < 4 {
|
||||
n = 4
|
||||
}
|
||||
runtime.GOMAXPROCS(n)
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
if err := doMain(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ssadump: %s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func doMain() error {
|
||||
flag.Parse()
|
||||
args := flag.Args()
|
||||
|
||||
conf := loader.Config{Build: &build.Default}
|
||||
|
||||
// Choose types.Sizes from conf.Build.
|
||||
var wordSize int64 = 8
|
||||
switch conf.Build.GOARCH {
|
||||
case "386", "arm":
|
||||
wordSize = 4
|
||||
}
|
||||
conf.TypeChecker.Sizes = &types.StdSizes{
|
||||
MaxAlign: 8,
|
||||
WordSize: wordSize,
|
||||
}
|
||||
|
||||
var interpMode interp.Mode
|
||||
for _, c := range *interpFlag {
|
||||
switch c {
|
||||
case 'T':
|
||||
interpMode |= interp.EnableTracing
|
||||
case 'R':
|
||||
interpMode |= interp.DisableRecover
|
||||
default:
|
||||
return fmt.Errorf("unknown -interp option: '%c'", c)
|
||||
}
|
||||
}
|
||||
|
||||
if len(args) == 0 {
|
||||
fmt.Fprint(os.Stderr, usage)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Profiling support.
|
||||
if *cpuprofile != "" {
|
||||
f, err := os.Create(*cpuprofile)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
pprof.StartCPUProfile(f)
|
||||
defer pprof.StopCPUProfile()
|
||||
}
|
||||
|
||||
// Use the initial packages from the command line.
|
||||
args, err := conf.FromArgs(args, *testFlag)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// The interpreter needs the runtime package.
|
||||
if *runFlag {
|
||||
conf.Import("runtime")
|
||||
}
|
||||
|
||||
// Load, parse and type-check the whole program.
|
||||
iprog, err := conf.Load()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Create and build SSA-form program representation.
|
||||
prog := ssautil.CreateProgram(iprog, *modeFlag)
|
||||
|
||||
// Build and display only the initial packages
|
||||
// (and synthetic wrappers), unless -run is specified.
|
||||
for _, info := range iprog.InitialPackages() {
|
||||
prog.Package(info.Pkg).Build()
|
||||
}
|
||||
|
||||
// Run the interpreter.
|
||||
if *runFlag {
|
||||
prog.Build()
|
||||
|
||||
var main *ssa.Package
|
||||
pkgs := prog.AllPackages()
|
||||
if *testFlag {
|
||||
// If -test, run all packages' tests.
|
||||
if len(pkgs) > 0 {
|
||||
main = prog.CreateTestMainPackage(pkgs...)
|
||||
}
|
||||
if main == nil {
|
||||
return fmt.Errorf("no tests")
|
||||
}
|
||||
} else {
|
||||
// Otherwise, run main.main.
|
||||
for _, pkg := range pkgs {
|
||||
if pkg.Pkg.Name() == "main" {
|
||||
main = pkg
|
||||
if main.Func("main") == nil {
|
||||
return fmt.Errorf("no func main() in main package")
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if main == nil {
|
||||
return fmt.Errorf("no main package")
|
||||
}
|
||||
}
|
||||
|
||||
if runtime.GOARCH != build.Default.GOARCH {
|
||||
return fmt.Errorf("cross-interpretation is not supported (target has GOARCH %s, interpreter has %s)",
|
||||
build.Default.GOARCH, runtime.GOARCH)
|
||||
}
|
||||
|
||||
interp.Interpret(main, interpMode, conf.TypeChecker.Sizes, main.Pkg.Path(), args)
|
||||
}
|
||||
return nil
|
||||
}
|
@ -1,640 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
// Stringer is a tool to automate the creation of methods that satisfy the fmt.Stringer
|
||||
// interface. Given the name of a (signed or unsigned) integer type T that has constants
|
||||
// defined, stringer will create a new self-contained Go source file implementing
|
||||
// func (t T) String() string
|
||||
// The file is created in the same package and directory as the package that defines T.
|
||||
// It has helpful defaults designed for use with go generate.
|
||||
//
|
||||
// Stringer works best with constants that are consecutive values such as created using iota,
|
||||
// but creates good code regardless. In the future it might also provide custom support for
|
||||
// constant sets that are bit patterns.
|
||||
//
|
||||
// For example, given this snippet,
|
||||
//
|
||||
// package painkiller
|
||||
//
|
||||
// type Pill int
|
||||
//
|
||||
// const (
|
||||
// Placebo Pill = iota
|
||||
// Aspirin
|
||||
// Ibuprofen
|
||||
// Paracetamol
|
||||
// Acetaminophen = Paracetamol
|
||||
// )
|
||||
//
|
||||
// running this command
|
||||
//
|
||||
// stringer -type=Pill
|
||||
//
|
||||
// in the same directory will create the file pill_string.go, in package painkiller,
|
||||
// containing a definition of
|
||||
//
|
||||
// func (Pill) String() string
|
||||
//
|
||||
// That method will translate the value of a Pill constant to the string representation
|
||||
// of the respective constant name, so that the call fmt.Print(painkiller.Aspirin) will
|
||||
// print the string "Aspirin".
|
||||
//
|
||||
// Typically this process would be run using go generate, like this:
|
||||
//
|
||||
// //go:generate stringer -type=Pill
|
||||
//
|
||||
// If multiple constants have the same value, the lexically first matching name will
|
||||
// be used (in the example, Acetaminophen will print as "Paracetamol").
|
||||
//
|
||||
// With no arguments, it processes the package in the current directory.
|
||||
// Otherwise, the arguments must name a single directory holding a Go package
|
||||
// or a set of Go source files that represent a single Go package.
|
||||
//
|
||||
// The -type flag accepts a comma-separated list of types so a single run can
|
||||
// generate methods for multiple types. The default output file is t_string.go,
|
||||
// where t is the lower-cased name of the first type listed. It can be overridden
|
||||
// with the -output flag.
|
||||
//
|
||||
package main // import "golang.org/x/tools/cmd/stringer"
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/format"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/exact"
|
||||
"golang.org/x/tools/go/types"
|
||||
|
||||
_ "golang.org/x/tools/go/gcimporter"
|
||||
)
|
||||
|
||||
var (
|
||||
typeNames = flag.String("type", "", "comma-separated list of type names; must be set")
|
||||
output = flag.String("output", "", "output file name; default srcdir/<type>_string.go")
|
||||
)
|
||||
|
||||
// Usage is a replacement usage function for the flags package.
|
||||
func Usage() {
|
||||
fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, "\tstringer [flags] -type T [directory]\n")
|
||||
fmt.Fprintf(os.Stderr, "\tstringer [flags[ -type T files... # Must be a single package\n")
|
||||
fmt.Fprintf(os.Stderr, "For more information, see:\n")
|
||||
fmt.Fprintf(os.Stderr, "\thttp://godoc.org/golang.org/x/tools/cmd/stringer\n")
|
||||
fmt.Fprintf(os.Stderr, "Flags:\n")
|
||||
flag.PrintDefaults()
|
||||
}
|
||||
|
||||
func main() {
|
||||
log.SetFlags(0)
|
||||
log.SetPrefix("stringer: ")
|
||||
flag.Usage = Usage
|
||||
flag.Parse()
|
||||
if len(*typeNames) == 0 {
|
||||
flag.Usage()
|
||||
os.Exit(2)
|
||||
}
|
||||
types := strings.Split(*typeNames, ",")
|
||||
|
||||
// We accept either one directory or a list of files. Which do we have?
|
||||
args := flag.Args()
|
||||
if len(args) == 0 {
|
||||
// Default: process whole package in current directory.
|
||||
args = []string{"."}
|
||||
}
|
||||
|
||||
// Parse the package once.
|
||||
var (
|
||||
dir string
|
||||
g Generator
|
||||
)
|
||||
if len(args) == 1 && isDirectory(args[0]) {
|
||||
dir = args[0]
|
||||
g.parsePackageDir(args[0])
|
||||
} else {
|
||||
dir = filepath.Dir(args[0])
|
||||
g.parsePackageFiles(args)
|
||||
}
|
||||
|
||||
// Print the header and package clause.
|
||||
g.Printf("// Code generated by \"stringer %s\"; DO NOT EDIT\n", strings.Join(os.Args[1:], " "))
|
||||
g.Printf("\n")
|
||||
g.Printf("package %s", g.pkg.name)
|
||||
g.Printf("\n")
|
||||
g.Printf("import \"fmt\"\n") // Used by all methods.
|
||||
|
||||
// Run generate for each type.
|
||||
for _, typeName := range types {
|
||||
g.generate(typeName)
|
||||
}
|
||||
|
||||
// Format the output.
|
||||
src := g.format()
|
||||
|
||||
// Write to file.
|
||||
outputName := *output
|
||||
if outputName == "" {
|
||||
baseName := fmt.Sprintf("%s_string.go", types[0])
|
||||
outputName = filepath.Join(dir, strings.ToLower(baseName))
|
||||
}
|
||||
err := ioutil.WriteFile(outputName, src, 0644)
|
||||
if err != nil {
|
||||
log.Fatalf("writing output: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
// isDirectory reports whether the named file is a directory.
|
||||
func isDirectory(name string) bool {
|
||||
info, err := os.Stat(name)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return info.IsDir()
|
||||
}
|
||||
|
||||
// Generator holds the state of the analysis. Primarily used to buffer
|
||||
// the output for format.Source.
|
||||
type Generator struct {
|
||||
buf bytes.Buffer // Accumulated output.
|
||||
pkg *Package // Package we are scanning.
|
||||
}
|
||||
|
||||
func (g *Generator) Printf(format string, args ...interface{}) {
|
||||
fmt.Fprintf(&g.buf, format, args...)
|
||||
}
|
||||
|
||||
// File holds a single parsed file and associated data.
|
||||
type File struct {
|
||||
pkg *Package // Package to which this file belongs.
|
||||
file *ast.File // Parsed AST.
|
||||
// These fields are reset for each type being generated.
|
||||
typeName string // Name of the constant type.
|
||||
values []Value // Accumulator for constant values of that type.
|
||||
}
|
||||
|
||||
type Package struct {
|
||||
dir string
|
||||
name string
|
||||
defs map[*ast.Ident]types.Object
|
||||
files []*File
|
||||
typesPkg *types.Package
|
||||
}
|
||||
|
||||
// parsePackageDir parses the package residing in the directory.
|
||||
func (g *Generator) parsePackageDir(directory string) {
|
||||
pkg, err := build.Default.ImportDir(directory, 0)
|
||||
if err != nil {
|
||||
log.Fatalf("cannot process directory %s: %s", directory, err)
|
||||
}
|
||||
var names []string
|
||||
names = append(names, pkg.GoFiles...)
|
||||
names = append(names, pkg.CgoFiles...)
|
||||
// TODO: Need to think about constants in test files. Maybe write type_string_test.go
|
||||
// in a separate pass? For later.
|
||||
// names = append(names, pkg.TestGoFiles...) // These are also in the "foo" package.
|
||||
names = append(names, pkg.SFiles...)
|
||||
names = prefixDirectory(directory, names)
|
||||
g.parsePackage(directory, names, nil)
|
||||
}
|
||||
|
||||
// parsePackageFiles parses the package occupying the named files.
|
||||
func (g *Generator) parsePackageFiles(names []string) {
|
||||
g.parsePackage(".", names, nil)
|
||||
}
|
||||
|
||||
// prefixDirectory places the directory name on the beginning of each name in the list.
|
||||
func prefixDirectory(directory string, names []string) []string {
|
||||
if directory == "." {
|
||||
return names
|
||||
}
|
||||
ret := make([]string, len(names))
|
||||
for i, name := range names {
|
||||
ret[i] = filepath.Join(directory, name)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
// parsePackage analyzes the single package constructed from the named files.
|
||||
// If text is non-nil, it is a string to be used instead of the content of the file,
|
||||
// to be used for testing. parsePackage exits if there is an error.
|
||||
func (g *Generator) parsePackage(directory string, names []string, text interface{}) {
|
||||
var files []*File
|
||||
var astFiles []*ast.File
|
||||
g.pkg = new(Package)
|
||||
fs := token.NewFileSet()
|
||||
for _, name := range names {
|
||||
if !strings.HasSuffix(name, ".go") {
|
||||
continue
|
||||
}
|
||||
parsedFile, err := parser.ParseFile(fs, name, text, 0)
|
||||
if err != nil {
|
||||
log.Fatalf("parsing package: %s: %s", name, err)
|
||||
}
|
||||
astFiles = append(astFiles, parsedFile)
|
||||
files = append(files, &File{
|
||||
file: parsedFile,
|
||||
pkg: g.pkg,
|
||||
})
|
||||
}
|
||||
if len(astFiles) == 0 {
|
||||
log.Fatalf("%s: no buildable Go files", directory)
|
||||
}
|
||||
g.pkg.name = astFiles[0].Name.Name
|
||||
g.pkg.files = files
|
||||
g.pkg.dir = directory
|
||||
// Type check the package.
|
||||
g.pkg.check(fs, astFiles)
|
||||
}
|
||||
|
||||
// check type-checks the package. The package must be OK to proceed.
|
||||
func (pkg *Package) check(fs *token.FileSet, astFiles []*ast.File) {
|
||||
pkg.defs = make(map[*ast.Ident]types.Object)
|
||||
config := types.Config{FakeImportC: true}
|
||||
info := &types.Info{
|
||||
Defs: pkg.defs,
|
||||
}
|
||||
typesPkg, err := config.Check(pkg.dir, fs, astFiles, info)
|
||||
if err != nil {
|
||||
log.Fatalf("checking package: %s", err)
|
||||
}
|
||||
pkg.typesPkg = typesPkg
|
||||
}
|
||||
|
||||
// generate produces the String method for the named type.
|
||||
func (g *Generator) generate(typeName string) {
|
||||
values := make([]Value, 0, 100)
|
||||
for _, file := range g.pkg.files {
|
||||
// Set the state for this run of the walker.
|
||||
file.typeName = typeName
|
||||
file.values = nil
|
||||
if file.file != nil {
|
||||
ast.Inspect(file.file, file.genDecl)
|
||||
values = append(values, file.values...)
|
||||
}
|
||||
}
|
||||
|
||||
if len(values) == 0 {
|
||||
log.Fatalf("no values defined for type %s", typeName)
|
||||
}
|
||||
runs := splitIntoRuns(values)
|
||||
// The decision of which pattern to use depends on the number of
|
||||
// runs in the numbers. If there's only one, it's easy. For more than
|
||||
// one, there's a tradeoff between complexity and size of the data
|
||||
// and code vs. the simplicity of a map. A map takes more space,
|
||||
// but so does the code. The decision here (crossover at 10) is
|
||||
// arbitrary, but considers that for large numbers of runs the cost
|
||||
// of the linear scan in the switch might become important, and
|
||||
// rather than use yet another algorithm such as binary search,
|
||||
// we punt and use a map. In any case, the likelihood of a map
|
||||
// being necessary for any realistic example other than bitmasks
|
||||
// is very low. And bitmasks probably deserve their own analysis,
|
||||
// to be done some other day.
|
||||
switch {
|
||||
case len(runs) == 1:
|
||||
g.buildOneRun(runs, typeName)
|
||||
case len(runs) <= 10:
|
||||
g.buildMultipleRuns(runs, typeName)
|
||||
default:
|
||||
g.buildMap(runs, typeName)
|
||||
}
|
||||
}
|
||||
|
||||
// splitIntoRuns breaks the values into runs of contiguous sequences.
|
||||
// For example, given 1,2,3,5,6,7 it returns {1,2,3},{5,6,7}.
|
||||
// The input slice is known to be non-empty.
|
||||
func splitIntoRuns(values []Value) [][]Value {
|
||||
// We use stable sort so the lexically first name is chosen for equal elements.
|
||||
sort.Stable(byValue(values))
|
||||
// Remove duplicates. Stable sort has put the one we want to print first,
|
||||
// so use that one. The String method won't care about which named constant
|
||||
// was the argument, so the first name for the given value is the only one to keep.
|
||||
// We need to do this because identical values would cause the switch or map
|
||||
// to fail to compile.
|
||||
j := 1
|
||||
for i := 1; i < len(values); i++ {
|
||||
if values[i].value != values[i-1].value {
|
||||
values[j] = values[i]
|
||||
j++
|
||||
}
|
||||
}
|
||||
values = values[:j]
|
||||
runs := make([][]Value, 0, 10)
|
||||
for len(values) > 0 {
|
||||
// One contiguous sequence per outer loop.
|
||||
i := 1
|
||||
for i < len(values) && values[i].value == values[i-1].value+1 {
|
||||
i++
|
||||
}
|
||||
runs = append(runs, values[:i])
|
||||
values = values[i:]
|
||||
}
|
||||
return runs
|
||||
}
|
||||
|
||||
// format returns the gofmt-ed contents of the Generator's buffer.
|
||||
func (g *Generator) format() []byte {
|
||||
src, err := format.Source(g.buf.Bytes())
|
||||
if err != nil {
|
||||
// Should never happen, but can arise when developing this code.
|
||||
// The user can compile the output to see the error.
|
||||
log.Printf("warning: internal error: invalid Go generated: %s", err)
|
||||
log.Printf("warning: compile the package to analyze the error")
|
||||
return g.buf.Bytes()
|
||||
}
|
||||
return src
|
||||
}
|
||||
|
||||
// Value represents a declared constant.
|
||||
type Value struct {
|
||||
name string // The name of the constant.
|
||||
// The value is stored as a bit pattern alone. The boolean tells us
|
||||
// whether to interpret it as an int64 or a uint64; the only place
|
||||
// this matters is when sorting.
|
||||
// Much of the time the str field is all we need; it is printed
|
||||
// by Value.String.
|
||||
value uint64 // Will be converted to int64 when needed.
|
||||
signed bool // Whether the constant is a signed type.
|
||||
str string // The string representation given by the "go/exact" package.
|
||||
}
|
||||
|
||||
func (v *Value) String() string {
|
||||
return v.str
|
||||
}
|
||||
|
||||
// byValue lets us sort the constants into increasing order.
|
||||
// We take care in the Less method to sort in signed or unsigned order,
|
||||
// as appropriate.
|
||||
type byValue []Value
|
||||
|
||||
func (b byValue) Len() int { return len(b) }
|
||||
func (b byValue) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
||||
func (b byValue) Less(i, j int) bool {
|
||||
if b[i].signed {
|
||||
return int64(b[i].value) < int64(b[j].value)
|
||||
}
|
||||
return b[i].value < b[j].value
|
||||
}
|
||||
|
||||
// genDecl processes one declaration clause.
|
||||
func (f *File) genDecl(node ast.Node) bool {
|
||||
decl, ok := node.(*ast.GenDecl)
|
||||
if !ok || decl.Tok != token.CONST {
|
||||
// We only care about const declarations.
|
||||
return true
|
||||
}
|
||||
// The name of the type of the constants we are declaring.
|
||||
// Can change if this is a multi-element declaration.
|
||||
typ := ""
|
||||
// Loop over the elements of the declaration. Each element is a ValueSpec:
|
||||
// a list of names possibly followed by a type, possibly followed by values.
|
||||
// If the type and value are both missing, we carry down the type (and value,
|
||||
// but the "go/types" package takes care of that).
|
||||
for _, spec := range decl.Specs {
|
||||
vspec := spec.(*ast.ValueSpec) // Guaranteed to succeed as this is CONST.
|
||||
if vspec.Type == nil && len(vspec.Values) > 0 {
|
||||
// "X = 1". With no type but a value, the constant is untyped.
|
||||
// Skip this vspec and reset the remembered type.
|
||||
typ = ""
|
||||
continue
|
||||
}
|
||||
if vspec.Type != nil {
|
||||
// "X T". We have a type. Remember it.
|
||||
ident, ok := vspec.Type.(*ast.Ident)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
typ = ident.Name
|
||||
}
|
||||
if typ != f.typeName {
|
||||
// This is not the type we're looking for.
|
||||
continue
|
||||
}
|
||||
// We now have a list of names (from one line of source code) all being
|
||||
// declared with the desired type.
|
||||
// Grab their names and actual values and store them in f.values.
|
||||
for _, name := range vspec.Names {
|
||||
if name.Name == "_" {
|
||||
continue
|
||||
}
|
||||
// This dance lets the type checker find the values for us. It's a
|
||||
// bit tricky: look up the object declared by the name, find its
|
||||
// types.Const, and extract its value.
|
||||
obj, ok := f.pkg.defs[name]
|
||||
if !ok {
|
||||
log.Fatalf("no value for constant %s", name)
|
||||
}
|
||||
info := obj.Type().Underlying().(*types.Basic).Info()
|
||||
if info&types.IsInteger == 0 {
|
||||
log.Fatalf("can't handle non-integer constant type %s", typ)
|
||||
}
|
||||
value := obj.(*types.Const).Val() // Guaranteed to succeed as this is CONST.
|
||||
if value.Kind() != exact.Int {
|
||||
log.Fatalf("can't happen: constant is not an integer %s", name)
|
||||
}
|
||||
i64, isInt := exact.Int64Val(value)
|
||||
u64, isUint := exact.Uint64Val(value)
|
||||
if !isInt && !isUint {
|
||||
log.Fatalf("internal error: value of %s is not an integer: %s", name, value.String())
|
||||
}
|
||||
if !isInt {
|
||||
u64 = uint64(i64)
|
||||
}
|
||||
v := Value{
|
||||
name: name.Name,
|
||||
value: u64,
|
||||
signed: info&types.IsUnsigned == 0,
|
||||
str: value.String(),
|
||||
}
|
||||
f.values = append(f.values, v)
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Helpers
|
||||
|
||||
// usize returns the number of bits of the smallest unsigned integer
|
||||
// type that will hold n. Used to create the smallest possible slice of
|
||||
// integers to use as indexes into the concatenated strings.
|
||||
func usize(n int) int {
|
||||
switch {
|
||||
case n < 1<<8:
|
||||
return 8
|
||||
case n < 1<<16:
|
||||
return 16
|
||||
default:
|
||||
// 2^32 is enough constants for anyone.
|
||||
return 32
|
||||
}
|
||||
}
|
||||
|
||||
// declareIndexAndNameVars declares the index slices and concatenated names
|
||||
// strings representing the runs of values.
|
||||
func (g *Generator) declareIndexAndNameVars(runs [][]Value, typeName string) {
|
||||
var indexes, names []string
|
||||
for i, run := range runs {
|
||||
index, name := g.createIndexAndNameDecl(run, typeName, fmt.Sprintf("_%d", i))
|
||||
indexes = append(indexes, index)
|
||||
names = append(names, name)
|
||||
}
|
||||
g.Printf("const (\n")
|
||||
for _, name := range names {
|
||||
g.Printf("\t%s\n", name)
|
||||
}
|
||||
g.Printf(")\n\n")
|
||||
g.Printf("var (")
|
||||
for _, index := range indexes {
|
||||
g.Printf("\t%s\n", index)
|
||||
}
|
||||
g.Printf(")\n\n")
|
||||
}
|
||||
|
||||
// declareIndexAndNameVar is the single-run version of declareIndexAndNameVars
|
||||
func (g *Generator) declareIndexAndNameVar(run []Value, typeName string) {
|
||||
index, name := g.createIndexAndNameDecl(run, typeName, "")
|
||||
g.Printf("const %s\n", name)
|
||||
g.Printf("var %s\n", index)
|
||||
}
|
||||
|
||||
// createIndexAndNameDecl returns the pair of declarations for the run. The caller will add "const" and "var".
|
||||
func (g *Generator) createIndexAndNameDecl(run []Value, typeName string, suffix string) (string, string) {
|
||||
b := new(bytes.Buffer)
|
||||
indexes := make([]int, len(run))
|
||||
for i := range run {
|
||||
b.WriteString(run[i].name)
|
||||
indexes[i] = b.Len()
|
||||
}
|
||||
nameConst := fmt.Sprintf("_%s_name%s = %q", typeName, suffix, b.String())
|
||||
nameLen := b.Len()
|
||||
b.Reset()
|
||||
fmt.Fprintf(b, "_%s_index%s = [...]uint%d{0, ", typeName, suffix, usize(nameLen))
|
||||
for i, v := range indexes {
|
||||
if i > 0 {
|
||||
fmt.Fprintf(b, ", ")
|
||||
}
|
||||
fmt.Fprintf(b, "%d", v)
|
||||
}
|
||||
fmt.Fprintf(b, "}")
|
||||
return b.String(), nameConst
|
||||
}
|
||||
|
||||
// declareNameVars declares the concatenated names string representing all the values in the runs.
|
||||
func (g *Generator) declareNameVars(runs [][]Value, typeName string, suffix string) {
|
||||
g.Printf("const _%s_name%s = \"", typeName, suffix)
|
||||
for _, run := range runs {
|
||||
for i := range run {
|
||||
g.Printf("%s", run[i].name)
|
||||
}
|
||||
}
|
||||
g.Printf("\"\n")
|
||||
}
|
||||
|
||||
// buildOneRun generates the variables and String method for a single run of contiguous values.
|
||||
func (g *Generator) buildOneRun(runs [][]Value, typeName string) {
|
||||
values := runs[0]
|
||||
g.Printf("\n")
|
||||
g.declareIndexAndNameVar(values, typeName)
|
||||
// The generated code is simple enough to write as a Printf format.
|
||||
lessThanZero := ""
|
||||
if values[0].signed {
|
||||
lessThanZero = "i < 0 || "
|
||||
}
|
||||
if values[0].value == 0 { // Signed or unsigned, 0 is still 0.
|
||||
g.Printf(stringOneRun, typeName, usize(len(values)), lessThanZero)
|
||||
} else {
|
||||
g.Printf(stringOneRunWithOffset, typeName, values[0].String(), usize(len(values)), lessThanZero)
|
||||
}
|
||||
}
|
||||
|
||||
// Arguments to format are:
|
||||
// [1]: type name
|
||||
// [2]: size of index element (8 for uint8 etc.)
|
||||
// [3]: less than zero check (for signed types)
|
||||
const stringOneRun = `func (i %[1]s) String() string {
|
||||
if %[3]si >= %[1]s(len(_%[1]s_index)-1) {
|
||||
return fmt.Sprintf("%[1]s(%%d)", i)
|
||||
}
|
||||
return _%[1]s_name[_%[1]s_index[i]:_%[1]s_index[i+1]]
|
||||
}
|
||||
`
|
||||
|
||||
// Arguments to format are:
|
||||
// [1]: type name
|
||||
// [2]: lowest defined value for type, as a string
|
||||
// [3]: size of index element (8 for uint8 etc.)
|
||||
// [4]: less than zero check (for signed types)
|
||||
/*
|
||||
*/
|
||||
const stringOneRunWithOffset = `func (i %[1]s) String() string {
|
||||
i -= %[2]s
|
||||
if %[4]si >= %[1]s(len(_%[1]s_index)-1) {
|
||||
return fmt.Sprintf("%[1]s(%%d)", i + %[2]s)
|
||||
}
|
||||
return _%[1]s_name[_%[1]s_index[i] : _%[1]s_index[i+1]]
|
||||
}
|
||||
`
|
||||
|
||||
// buildMultipleRuns generates the variables and String method for multiple runs of contiguous values.
|
||||
// For this pattern, a single Printf format won't do.
|
||||
func (g *Generator) buildMultipleRuns(runs [][]Value, typeName string) {
|
||||
g.Printf("\n")
|
||||
g.declareIndexAndNameVars(runs, typeName)
|
||||
g.Printf("func (i %s) String() string {\n", typeName)
|
||||
g.Printf("\tswitch {\n")
|
||||
for i, values := range runs {
|
||||
if len(values) == 1 {
|
||||
g.Printf("\tcase i == %s:\n", &values[0])
|
||||
g.Printf("\t\treturn _%s_name_%d\n", typeName, i)
|
||||
continue
|
||||
}
|
||||
g.Printf("\tcase %s <= i && i <= %s:\n", &values[0], &values[len(values)-1])
|
||||
if values[0].value != 0 {
|
||||
g.Printf("\t\ti -= %s\n", &values[0])
|
||||
}
|
||||
g.Printf("\t\treturn _%s_name_%d[_%s_index_%d[i]:_%s_index_%d[i+1]]\n",
|
||||
typeName, i, typeName, i, typeName, i)
|
||||
}
|
||||
g.Printf("\tdefault:\n")
|
||||
g.Printf("\t\treturn fmt.Sprintf(\"%s(%%d)\", i)\n", typeName)
|
||||
g.Printf("\t}\n")
|
||||
g.Printf("}\n")
|
||||
}
|
||||
|
||||
// buildMap handles the case where the space is so sparse a map is a reasonable fallback.
|
||||
// It's a rare situation but has simple code.
|
||||
func (g *Generator) buildMap(runs [][]Value, typeName string) {
|
||||
g.Printf("\n")
|
||||
g.declareNameVars(runs, typeName, "")
|
||||
g.Printf("\nvar _%s_map = map[%s]string{\n", typeName, typeName)
|
||||
n := 0
|
||||
for _, values := range runs {
|
||||
for _, value := range values {
|
||||
g.Printf("\t%s: _%s_name[%d:%d],\n", &value, typeName, n, n+len(value.name))
|
||||
n += len(value.name)
|
||||
}
|
||||
}
|
||||
g.Printf("}\n\n")
|
||||
g.Printf(stringMap, typeName)
|
||||
}
|
||||
|
||||
// Argument to format is the type name.
|
||||
const stringMap = `func (i %[1]s) String() string {
|
||||
if str, ok := _%[1]s_map[i]; ok {
|
||||
return str
|
||||
}
|
||||
return fmt.Sprintf("%[1]s(%%d)", i)
|
||||
}
|
||||
`
|
@ -1,662 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Identify mismatches between assembly files and Go func declarations.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// 'kind' is a kind of assembly variable.
|
||||
// The kinds 1, 2, 4, 8 stand for values of that size.
|
||||
type asmKind int
|
||||
|
||||
// These special kinds are not valid sizes.
|
||||
const (
|
||||
asmString asmKind = 100 + iota
|
||||
asmSlice
|
||||
asmInterface
|
||||
asmEmptyInterface
|
||||
)
|
||||
|
||||
// An asmArch describes assembly parameters for an architecture
|
||||
type asmArch struct {
|
||||
name string
|
||||
ptrSize int
|
||||
intSize int
|
||||
maxAlign int
|
||||
bigEndian bool
|
||||
stack string
|
||||
lr bool
|
||||
}
|
||||
|
||||
// An asmFunc describes the expected variables for a function on a given architecture.
|
||||
type asmFunc struct {
|
||||
arch *asmArch
|
||||
size int // size of all arguments
|
||||
vars map[string]*asmVar
|
||||
varByOffset map[int]*asmVar
|
||||
}
|
||||
|
||||
// An asmVar describes a single assembly variable.
|
||||
type asmVar struct {
|
||||
name string
|
||||
kind asmKind
|
||||
typ string
|
||||
off int
|
||||
size int
|
||||
inner []*asmVar
|
||||
}
|
||||
|
||||
var (
|
||||
asmArch386 = asmArch{"386", 4, 4, 4, false, "SP", false}
|
||||
asmArchArm = asmArch{"arm", 4, 4, 4, false, "R13", true}
|
||||
asmArchArm64 = asmArch{"arm64", 8, 8, 8, false, "RSP", true}
|
||||
asmArchAmd64 = asmArch{"amd64", 8, 8, 8, false, "SP", false}
|
||||
asmArchAmd64p32 = asmArch{"amd64p32", 4, 4, 8, false, "SP", false}
|
||||
asmArchPower64 = asmArch{"power64", 8, 8, 8, true, "R1", true}
|
||||
asmArchPower64LE = asmArch{"power64le", 8, 8, 8, false, "R1", true}
|
||||
|
||||
arches = []*asmArch{
|
||||
&asmArch386,
|
||||
&asmArchArm,
|
||||
&asmArchArm64,
|
||||
&asmArchAmd64,
|
||||
&asmArchAmd64p32,
|
||||
&asmArchPower64,
|
||||
&asmArchPower64LE,
|
||||
}
|
||||
)
|
||||
|
||||
var (
|
||||
re = regexp.MustCompile
|
||||
asmPlusBuild = re(`//\s+\+build\s+([^\n]+)`)
|
||||
asmTEXT = re(`\bTEXT\b.*·([^\(]+)\(SB\)(?:\s*,\s*([0-9A-Z|+]+))?(?:\s*,\s*\$(-?[0-9]+)(?:-([0-9]+))?)?`)
|
||||
asmDATA = re(`\b(DATA|GLOBL)\b`)
|
||||
asmNamedFP = re(`([a-zA-Z0-9_\xFF-\x{10FFFF}]+)(?:\+([0-9]+))\(FP\)`)
|
||||
asmUnnamedFP = re(`[^+\-0-9](([0-9]+)\(FP\))`)
|
||||
asmSP = re(`[^+\-0-9](([0-9]+)\(([A-Z0-9]+)\))`)
|
||||
asmOpcode = re(`^\s*(?:[A-Z0-9a-z_]+:)?\s*([A-Z]+)\s*([^,]*)(?:,\s*(.*))?`)
|
||||
power64Suff = re(`([BHWD])(ZU|Z|U|BR)?$`)
|
||||
)
|
||||
|
||||
func asmCheck(pkg *Package) {
|
||||
if !vet("asmdecl") {
|
||||
return
|
||||
}
|
||||
|
||||
// No work if no assembly files.
|
||||
if !pkg.hasFileWithSuffix(".s") {
|
||||
return
|
||||
}
|
||||
|
||||
// Gather declarations. knownFunc[name][arch] is func description.
|
||||
knownFunc := make(map[string]map[string]*asmFunc)
|
||||
|
||||
for _, f := range pkg.files {
|
||||
if f.file != nil {
|
||||
for _, decl := range f.file.Decls {
|
||||
if decl, ok := decl.(*ast.FuncDecl); ok && decl.Body == nil {
|
||||
knownFunc[decl.Name.Name] = f.asmParseDecl(decl)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Files:
|
||||
for _, f := range pkg.files {
|
||||
if !strings.HasSuffix(f.name, ".s") {
|
||||
continue
|
||||
}
|
||||
Println("Checking file", f.name)
|
||||
|
||||
// Determine architecture from file name if possible.
|
||||
var arch string
|
||||
var archDef *asmArch
|
||||
for _, a := range arches {
|
||||
if strings.HasSuffix(f.name, "_"+a.name+".s") {
|
||||
arch = a.name
|
||||
archDef = a
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
lines := strings.SplitAfter(string(f.content), "\n")
|
||||
var (
|
||||
fn *asmFunc
|
||||
fnName string
|
||||
localSize, argSize int
|
||||
wroteSP bool
|
||||
haveRetArg bool
|
||||
retLine []int
|
||||
)
|
||||
|
||||
flushRet := func() {
|
||||
if fn != nil && fn.vars["ret"] != nil && !haveRetArg && len(retLine) > 0 {
|
||||
v := fn.vars["ret"]
|
||||
for _, line := range retLine {
|
||||
f.Badf(token.NoPos, "%s:%d: [%s] %s: RET without writing to %d-byte ret+%d(FP)", f.name, line, arch, fnName, v.size, v.off)
|
||||
}
|
||||
}
|
||||
retLine = nil
|
||||
}
|
||||
for lineno, line := range lines {
|
||||
lineno++
|
||||
|
||||
badf := func(format string, args ...interface{}) {
|
||||
f.Badf(token.NoPos, "%s:%d: [%s] %s: %s", f.name, lineno, arch, fnName, fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
if arch == "" {
|
||||
// Determine architecture from +build line if possible.
|
||||
if m := asmPlusBuild.FindStringSubmatch(line); m != nil {
|
||||
Fields:
|
||||
for _, fld := range strings.Fields(m[1]) {
|
||||
for _, a := range arches {
|
||||
if a.name == fld {
|
||||
arch = a.name
|
||||
archDef = a
|
||||
break Fields
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if m := asmTEXT.FindStringSubmatch(line); m != nil {
|
||||
flushRet()
|
||||
if arch == "" {
|
||||
f.Warnf(token.NoPos, "%s: cannot determine architecture for assembly file", f.name)
|
||||
continue Files
|
||||
}
|
||||
fnName = m[1]
|
||||
fn = knownFunc[m[1]][arch]
|
||||
if fn != nil {
|
||||
size, _ := strconv.Atoi(m[4])
|
||||
if size != fn.size && (m[2] != "7" && !strings.Contains(m[2], "NOSPLIT") || size != 0) {
|
||||
badf("wrong argument size %d; expected $...-%d", size, fn.size)
|
||||
}
|
||||
}
|
||||
localSize, _ = strconv.Atoi(m[3])
|
||||
localSize += archDef.intSize
|
||||
if archDef.lr {
|
||||
// Account for caller's saved LR
|
||||
localSize += archDef.intSize
|
||||
}
|
||||
argSize, _ = strconv.Atoi(m[4])
|
||||
if fn == nil && !strings.Contains(fnName, "<>") {
|
||||
badf("function %s missing Go declaration", fnName)
|
||||
}
|
||||
wroteSP = false
|
||||
haveRetArg = false
|
||||
continue
|
||||
} else if strings.Contains(line, "TEXT") && strings.Contains(line, "SB") {
|
||||
// function, but not visible from Go (didn't match asmTEXT), so stop checking
|
||||
flushRet()
|
||||
fn = nil
|
||||
fnName = ""
|
||||
continue
|
||||
}
|
||||
|
||||
if strings.Contains(line, "RET") {
|
||||
retLine = append(retLine, lineno)
|
||||
}
|
||||
|
||||
if fnName == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if asmDATA.FindStringSubmatch(line) != nil {
|
||||
fn = nil
|
||||
}
|
||||
|
||||
if archDef == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if strings.Contains(line, ", "+archDef.stack) || strings.Contains(line, ",\t"+archDef.stack) {
|
||||
wroteSP = true
|
||||
continue
|
||||
}
|
||||
|
||||
for _, m := range asmSP.FindAllStringSubmatch(line, -1) {
|
||||
if m[3] != archDef.stack || wroteSP {
|
||||
continue
|
||||
}
|
||||
off := 0
|
||||
if m[1] != "" {
|
||||
off, _ = strconv.Atoi(m[2])
|
||||
}
|
||||
if off >= localSize {
|
||||
if fn != nil {
|
||||
v := fn.varByOffset[off-localSize]
|
||||
if v != nil {
|
||||
badf("%s should be %s+%d(FP)", m[1], v.name, off-localSize)
|
||||
continue
|
||||
}
|
||||
}
|
||||
if off >= localSize+argSize {
|
||||
badf("use of %s points beyond argument frame", m[1])
|
||||
continue
|
||||
}
|
||||
badf("use of %s to access argument frame", m[1])
|
||||
}
|
||||
}
|
||||
|
||||
if fn == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, m := range asmUnnamedFP.FindAllStringSubmatch(line, -1) {
|
||||
off, _ := strconv.Atoi(m[2])
|
||||
v := fn.varByOffset[off]
|
||||
if v != nil {
|
||||
badf("use of unnamed argument %s; offset %d is %s+%d(FP)", m[1], off, v.name, v.off)
|
||||
} else {
|
||||
badf("use of unnamed argument %s", m[1])
|
||||
}
|
||||
}
|
||||
|
||||
for _, m := range asmNamedFP.FindAllStringSubmatch(line, -1) {
|
||||
name := m[1]
|
||||
off := 0
|
||||
if m[2] != "" {
|
||||
off, _ = strconv.Atoi(m[2])
|
||||
}
|
||||
if name == "ret" || strings.HasPrefix(name, "ret_") {
|
||||
haveRetArg = true
|
||||
}
|
||||
v := fn.vars[name]
|
||||
if v == nil {
|
||||
// Allow argframe+0(FP).
|
||||
if name == "argframe" && off == 0 {
|
||||
continue
|
||||
}
|
||||
v = fn.varByOffset[off]
|
||||
if v != nil {
|
||||
badf("unknown variable %s; offset %d is %s+%d(FP)", name, off, v.name, v.off)
|
||||
} else {
|
||||
badf("unknown variable %s", name)
|
||||
}
|
||||
continue
|
||||
}
|
||||
asmCheckVar(badf, fn, line, m[0], off, v)
|
||||
}
|
||||
}
|
||||
flushRet()
|
||||
}
|
||||
}
|
||||
|
||||
// asmParseDecl parses a function decl for expected assembly variables.
|
||||
func (f *File) asmParseDecl(decl *ast.FuncDecl) map[string]*asmFunc {
|
||||
var (
|
||||
arch *asmArch
|
||||
fn *asmFunc
|
||||
offset int
|
||||
failed bool
|
||||
)
|
||||
|
||||
addVar := func(outer string, v asmVar) {
|
||||
if vo := fn.vars[outer]; vo != nil {
|
||||
vo.inner = append(vo.inner, &v)
|
||||
}
|
||||
fn.vars[v.name] = &v
|
||||
for i := 0; i < v.size; i++ {
|
||||
fn.varByOffset[v.off+i] = &v
|
||||
}
|
||||
}
|
||||
|
||||
addParams := func(list []*ast.Field) {
|
||||
for i, fld := range list {
|
||||
// Determine alignment, size, and kind of type in declaration.
|
||||
var align, size int
|
||||
var kind asmKind
|
||||
names := fld.Names
|
||||
typ := f.gofmt(fld.Type)
|
||||
switch t := fld.Type.(type) {
|
||||
default:
|
||||
switch typ {
|
||||
default:
|
||||
f.Warnf(fld.Type.Pos(), "unknown assembly argument type %s", typ)
|
||||
failed = true
|
||||
return
|
||||
case "int8", "uint8", "byte", "bool":
|
||||
size = 1
|
||||
case "int16", "uint16":
|
||||
size = 2
|
||||
case "int32", "uint32", "float32":
|
||||
size = 4
|
||||
case "int64", "uint64", "float64":
|
||||
align = arch.maxAlign
|
||||
size = 8
|
||||
case "int", "uint":
|
||||
size = arch.intSize
|
||||
case "uintptr", "iword", "Word", "Errno", "unsafe.Pointer":
|
||||
size = arch.ptrSize
|
||||
case "string", "ErrorString":
|
||||
size = arch.ptrSize * 2
|
||||
align = arch.ptrSize
|
||||
kind = asmString
|
||||
}
|
||||
case *ast.ChanType, *ast.FuncType, *ast.MapType, *ast.StarExpr:
|
||||
size = arch.ptrSize
|
||||
case *ast.InterfaceType:
|
||||
align = arch.ptrSize
|
||||
size = 2 * arch.ptrSize
|
||||
if len(t.Methods.List) > 0 {
|
||||
kind = asmInterface
|
||||
} else {
|
||||
kind = asmEmptyInterface
|
||||
}
|
||||
case *ast.ArrayType:
|
||||
if t.Len == nil {
|
||||
size = arch.ptrSize + 2*arch.intSize
|
||||
align = arch.ptrSize
|
||||
kind = asmSlice
|
||||
break
|
||||
}
|
||||
f.Warnf(fld.Type.Pos(), "unsupported assembly argument type %s", typ)
|
||||
failed = true
|
||||
case *ast.StructType:
|
||||
f.Warnf(fld.Type.Pos(), "unsupported assembly argument type %s", typ)
|
||||
failed = true
|
||||
}
|
||||
if align == 0 {
|
||||
align = size
|
||||
}
|
||||
if kind == 0 {
|
||||
kind = asmKind(size)
|
||||
}
|
||||
offset += -offset & (align - 1)
|
||||
|
||||
// Create variable for each name being declared with this type.
|
||||
if len(names) == 0 {
|
||||
name := "unnamed"
|
||||
if decl.Type.Results != nil && len(decl.Type.Results.List) > 0 && &list[0] == &decl.Type.Results.List[0] && i == 0 {
|
||||
// Assume assembly will refer to single unnamed result as r.
|
||||
name = "ret"
|
||||
}
|
||||
names = []*ast.Ident{{Name: name}}
|
||||
}
|
||||
for _, id := range names {
|
||||
name := id.Name
|
||||
addVar("", asmVar{
|
||||
name: name,
|
||||
kind: kind,
|
||||
typ: typ,
|
||||
off: offset,
|
||||
size: size,
|
||||
})
|
||||
switch kind {
|
||||
case 8:
|
||||
if arch.ptrSize == 4 {
|
||||
w1, w2 := "lo", "hi"
|
||||
if arch.bigEndian {
|
||||
w1, w2 = w2, w1
|
||||
}
|
||||
addVar(name, asmVar{
|
||||
name: name + "_" + w1,
|
||||
kind: 4,
|
||||
typ: "half " + typ,
|
||||
off: offset,
|
||||
size: 4,
|
||||
})
|
||||
addVar(name, asmVar{
|
||||
name: name + "_" + w2,
|
||||
kind: 4,
|
||||
typ: "half " + typ,
|
||||
off: offset + 4,
|
||||
size: 4,
|
||||
})
|
||||
}
|
||||
|
||||
case asmEmptyInterface:
|
||||
addVar(name, asmVar{
|
||||
name: name + "_type",
|
||||
kind: asmKind(arch.ptrSize),
|
||||
typ: "interface type",
|
||||
off: offset,
|
||||
size: arch.ptrSize,
|
||||
})
|
||||
addVar(name, asmVar{
|
||||
name: name + "_data",
|
||||
kind: asmKind(arch.ptrSize),
|
||||
typ: "interface data",
|
||||
off: offset + arch.ptrSize,
|
||||
size: arch.ptrSize,
|
||||
})
|
||||
|
||||
case asmInterface:
|
||||
addVar(name, asmVar{
|
||||
name: name + "_itable",
|
||||
kind: asmKind(arch.ptrSize),
|
||||
typ: "interface itable",
|
||||
off: offset,
|
||||
size: arch.ptrSize,
|
||||
})
|
||||
addVar(name, asmVar{
|
||||
name: name + "_data",
|
||||
kind: asmKind(arch.ptrSize),
|
||||
typ: "interface data",
|
||||
off: offset + arch.ptrSize,
|
||||
size: arch.ptrSize,
|
||||
})
|
||||
|
||||
case asmSlice:
|
||||
addVar(name, asmVar{
|
||||
name: name + "_base",
|
||||
kind: asmKind(arch.ptrSize),
|
||||
typ: "slice base",
|
||||
off: offset,
|
||||
size: arch.ptrSize,
|
||||
})
|
||||
addVar(name, asmVar{
|
||||
name: name + "_len",
|
||||
kind: asmKind(arch.intSize),
|
||||
typ: "slice len",
|
||||
off: offset + arch.ptrSize,
|
||||
size: arch.intSize,
|
||||
})
|
||||
addVar(name, asmVar{
|
||||
name: name + "_cap",
|
||||
kind: asmKind(arch.intSize),
|
||||
typ: "slice cap",
|
||||
off: offset + arch.ptrSize + arch.intSize,
|
||||
size: arch.intSize,
|
||||
})
|
||||
|
||||
case asmString:
|
||||
addVar(name, asmVar{
|
||||
name: name + "_base",
|
||||
kind: asmKind(arch.ptrSize),
|
||||
typ: "string base",
|
||||
off: offset,
|
||||
size: arch.ptrSize,
|
||||
})
|
||||
addVar(name, asmVar{
|
||||
name: name + "_len",
|
||||
kind: asmKind(arch.intSize),
|
||||
typ: "string len",
|
||||
off: offset + arch.ptrSize,
|
||||
size: arch.intSize,
|
||||
})
|
||||
}
|
||||
offset += size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
m := make(map[string]*asmFunc)
|
||||
for _, arch = range arches {
|
||||
fn = &asmFunc{
|
||||
arch: arch,
|
||||
vars: make(map[string]*asmVar),
|
||||
varByOffset: make(map[int]*asmVar),
|
||||
}
|
||||
offset = 0
|
||||
addParams(decl.Type.Params.List)
|
||||
if decl.Type.Results != nil && len(decl.Type.Results.List) > 0 {
|
||||
offset += -offset & (arch.maxAlign - 1)
|
||||
addParams(decl.Type.Results.List)
|
||||
}
|
||||
fn.size = offset
|
||||
m[arch.name] = fn
|
||||
}
|
||||
|
||||
if failed {
|
||||
return nil
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// asmCheckVar checks a single variable reference.
|
||||
func asmCheckVar(badf func(string, ...interface{}), fn *asmFunc, line, expr string, off int, v *asmVar) {
|
||||
m := asmOpcode.FindStringSubmatch(line)
|
||||
if m == nil {
|
||||
if !strings.HasPrefix(strings.TrimSpace(line), "//") {
|
||||
badf("cannot find assembly opcode")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Determine operand sizes from instruction.
|
||||
// Typically the suffix suffices, but there are exceptions.
|
||||
var src, dst, kind asmKind
|
||||
op := m[1]
|
||||
switch fn.arch.name + "." + op {
|
||||
case "386.FMOVLP":
|
||||
src, dst = 8, 4
|
||||
case "arm.MOVD":
|
||||
src = 8
|
||||
case "arm.MOVW":
|
||||
src = 4
|
||||
case "arm.MOVH", "arm.MOVHU":
|
||||
src = 2
|
||||
case "arm.MOVB", "arm.MOVBU":
|
||||
src = 1
|
||||
// LEA* opcodes don't really read the second arg.
|
||||
// They just take the address of it.
|
||||
case "386.LEAL":
|
||||
dst = 4
|
||||
case "amd64.LEAQ":
|
||||
dst = 8
|
||||
case "amd64p32.LEAL":
|
||||
dst = 4
|
||||
default:
|
||||
switch fn.arch.name {
|
||||
case "386", "amd64":
|
||||
if strings.HasPrefix(op, "F") && (strings.HasSuffix(op, "D") || strings.HasSuffix(op, "DP")) {
|
||||
// FMOVDP, FXCHD, etc
|
||||
src = 8
|
||||
break
|
||||
}
|
||||
if strings.HasPrefix(op, "F") && (strings.HasSuffix(op, "F") || strings.HasSuffix(op, "FP")) {
|
||||
// FMOVFP, FXCHF, etc
|
||||
src = 4
|
||||
break
|
||||
}
|
||||
if strings.HasSuffix(op, "SD") {
|
||||
// MOVSD, SQRTSD, etc
|
||||
src = 8
|
||||
break
|
||||
}
|
||||
if strings.HasSuffix(op, "SS") {
|
||||
// MOVSS, SQRTSS, etc
|
||||
src = 4
|
||||
break
|
||||
}
|
||||
if strings.HasPrefix(op, "SET") {
|
||||
// SETEQ, etc
|
||||
src = 1
|
||||
break
|
||||
}
|
||||
switch op[len(op)-1] {
|
||||
case 'B':
|
||||
src = 1
|
||||
case 'W':
|
||||
src = 2
|
||||
case 'L':
|
||||
src = 4
|
||||
case 'D', 'Q':
|
||||
src = 8
|
||||
}
|
||||
case "power64", "power64le":
|
||||
// Strip standard suffixes to reveal size letter.
|
||||
m := power64Suff.FindStringSubmatch(op)
|
||||
if m != nil {
|
||||
switch m[1][0] {
|
||||
case 'B':
|
||||
src = 1
|
||||
case 'H':
|
||||
src = 2
|
||||
case 'W':
|
||||
src = 4
|
||||
case 'D':
|
||||
src = 8
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if dst == 0 {
|
||||
dst = src
|
||||
}
|
||||
|
||||
// Determine whether the match we're holding
|
||||
// is the first or second argument.
|
||||
if strings.Index(line, expr) > strings.Index(line, ",") {
|
||||
kind = dst
|
||||
} else {
|
||||
kind = src
|
||||
}
|
||||
|
||||
vk := v.kind
|
||||
vt := v.typ
|
||||
switch vk {
|
||||
case asmInterface, asmEmptyInterface, asmString, asmSlice:
|
||||
// allow reference to first word (pointer)
|
||||
vk = v.inner[0].kind
|
||||
vt = v.inner[0].typ
|
||||
}
|
||||
|
||||
if off != v.off {
|
||||
var inner bytes.Buffer
|
||||
for i, vi := range v.inner {
|
||||
if len(v.inner) > 1 {
|
||||
fmt.Fprintf(&inner, ",")
|
||||
}
|
||||
fmt.Fprintf(&inner, " ")
|
||||
if i == len(v.inner)-1 {
|
||||
fmt.Fprintf(&inner, "or ")
|
||||
}
|
||||
fmt.Fprintf(&inner, "%s+%d(FP)", vi.name, vi.off)
|
||||
}
|
||||
badf("invalid offset %s; expected %s+%d(FP)%s", expr, v.name, v.off, inner.String())
|
||||
return
|
||||
}
|
||||
if kind != 0 && kind != vk {
|
||||
var inner bytes.Buffer
|
||||
if len(v.inner) > 0 {
|
||||
fmt.Fprintf(&inner, " containing")
|
||||
for i, vi := range v.inner {
|
||||
if i > 0 && len(v.inner) > 2 {
|
||||
fmt.Fprintf(&inner, ",")
|
||||
}
|
||||
fmt.Fprintf(&inner, " ")
|
||||
if i > 0 && i == len(v.inner)-1 {
|
||||
fmt.Fprintf(&inner, "and ")
|
||||
}
|
||||
fmt.Fprintf(&inner, "%s+%d(FP)", vi.name, vi.off)
|
||||
}
|
||||
}
|
||||
badf("invalid %s of %s; %s is %d-byte value%s", op, expr, vt, vk, inner.String())
|
||||
}
|
||||
}
|
@ -1,49 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
This file contains the code to check for useless assignments.
|
||||
*/
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("assign",
|
||||
"check for useless assignments",
|
||||
checkAssignStmt,
|
||||
assignStmt)
|
||||
}
|
||||
|
||||
// TODO: should also check for assignments to struct fields inside methods
|
||||
// that are on T instead of *T.
|
||||
|
||||
// checkAssignStmt checks for assignments of the form "<expr> = <expr>".
|
||||
// These are almost always useless, and even when they aren't they are usually a mistake.
|
||||
func checkAssignStmt(f *File, node ast.Node) {
|
||||
stmt := node.(*ast.AssignStmt)
|
||||
if stmt.Tok != token.ASSIGN {
|
||||
return // ignore :=
|
||||
}
|
||||
if len(stmt.Lhs) != len(stmt.Rhs) {
|
||||
// If LHS and RHS have different cardinality, they can't be the same.
|
||||
return
|
||||
}
|
||||
for i, lhs := range stmt.Lhs {
|
||||
rhs := stmt.Rhs[i]
|
||||
if reflect.TypeOf(lhs) != reflect.TypeOf(rhs) {
|
||||
continue // short-circuit the heavy-weight gofmt check
|
||||
}
|
||||
le := f.gofmt(lhs)
|
||||
re := f.gofmt(rhs)
|
||||
if le == re {
|
||||
f.Badf(stmt.Pos(), "self-assignment of %s to %s", re, le)
|
||||
}
|
||||
}
|
||||
}
|
@ -1,66 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("atomic",
|
||||
"check for common mistaken usages of the sync/atomic package",
|
||||
checkAtomicAssignment,
|
||||
assignStmt)
|
||||
}
|
||||
|
||||
// checkAtomicAssignment walks the assignment statement checking for common
|
||||
// mistaken usage of atomic package, such as: x = atomic.AddUint64(&x, 1)
|
||||
func checkAtomicAssignment(f *File, node ast.Node) {
|
||||
n := node.(*ast.AssignStmt)
|
||||
if len(n.Lhs) != len(n.Rhs) {
|
||||
return
|
||||
}
|
||||
|
||||
for i, right := range n.Rhs {
|
||||
call, ok := right.(*ast.CallExpr)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
sel, ok := call.Fun.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
pkg, ok := sel.X.(*ast.Ident)
|
||||
if !ok || pkg.Name != "atomic" {
|
||||
continue
|
||||
}
|
||||
|
||||
switch sel.Sel.Name {
|
||||
case "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr":
|
||||
f.checkAtomicAddAssignment(n.Lhs[i], call)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkAtomicAddAssignment walks the atomic.Add* method calls checking for assigning the return value
|
||||
// to the same variable being used in the operation
|
||||
func (f *File) checkAtomicAddAssignment(left ast.Expr, call *ast.CallExpr) {
|
||||
if len(call.Args) != 2 {
|
||||
return
|
||||
}
|
||||
arg := call.Args[0]
|
||||
broken := false
|
||||
|
||||
if uarg, ok := arg.(*ast.UnaryExpr); ok && uarg.Op == token.AND {
|
||||
broken = f.gofmt(left) == f.gofmt(uarg.X)
|
||||
} else if star, ok := left.(*ast.StarExpr); ok {
|
||||
broken = f.gofmt(star.X) == f.gofmt(arg)
|
||||
}
|
||||
|
||||
if broken {
|
||||
f.Bad(left.Pos(), "direct assignment to atomic value")
|
||||
}
|
||||
}
|
186
cmd/vet/bool.go
186
cmd/vet/bool.go
@ -1,186 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains boolean condition tests.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("bool",
|
||||
"check for mistakes involving boolean operators",
|
||||
checkBool,
|
||||
binaryExpr)
|
||||
}
|
||||
|
||||
func checkBool(f *File, n ast.Node) {
|
||||
e := n.(*ast.BinaryExpr)
|
||||
|
||||
var op boolOp
|
||||
switch e.Op {
|
||||
case token.LOR:
|
||||
op = or
|
||||
case token.LAND:
|
||||
op = and
|
||||
default:
|
||||
return
|
||||
}
|
||||
|
||||
comm := op.commutativeSets(e)
|
||||
for _, exprs := range comm {
|
||||
op.checkRedundant(f, exprs)
|
||||
op.checkSuspect(f, exprs)
|
||||
}
|
||||
}
|
||||
|
||||
type boolOp struct {
|
||||
name string
|
||||
tok token.Token // token corresponding to this operator
|
||||
badEq token.Token // token corresponding to the equality test that should not be used with this operator
|
||||
}
|
||||
|
||||
var (
|
||||
or = boolOp{"or", token.LOR, token.NEQ}
|
||||
and = boolOp{"and", token.LAND, token.EQL}
|
||||
)
|
||||
|
||||
// commutativeSets returns all side effect free sets of
|
||||
// expressions in e that are connected by op.
|
||||
// For example, given 'a || b || f() || c || d' with the or op,
|
||||
// commutativeSets returns {{b, a}, {d, c}}.
|
||||
func (op boolOp) commutativeSets(e *ast.BinaryExpr) [][]ast.Expr {
|
||||
exprs := op.split(e)
|
||||
|
||||
// Partition the slice of expressions into commutative sets.
|
||||
i := 0
|
||||
var sets [][]ast.Expr
|
||||
for j := 0; j <= len(exprs); j++ {
|
||||
if j == len(exprs) || hasSideEffects(exprs[j]) {
|
||||
if i < j {
|
||||
sets = append(sets, exprs[i:j])
|
||||
}
|
||||
i = j + 1
|
||||
}
|
||||
}
|
||||
|
||||
return sets
|
||||
}
|
||||
|
||||
// checkRedundant checks for expressions of the form
|
||||
// e && e
|
||||
// e || e
|
||||
// Exprs must contain only side effect free expressions.
|
||||
func (op boolOp) checkRedundant(f *File, exprs []ast.Expr) {
|
||||
seen := make(map[string]bool)
|
||||
for _, e := range exprs {
|
||||
efmt := f.gofmt(e)
|
||||
if seen[efmt] {
|
||||
f.Badf(e.Pos(), "redundant %s: %s %s %s", op.name, efmt, op.tok, efmt)
|
||||
} else {
|
||||
seen[efmt] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkSuspect checks for expressions of the form
|
||||
// x != c1 || x != c2
|
||||
// x == c1 && x == c2
|
||||
// where c1 and c2 are constant expressions.
|
||||
// If c1 and c2 are the same then it's redundant;
|
||||
// if c1 and c2 are different then it's always true or always false.
|
||||
// Exprs must contain only side effect free expressions.
|
||||
func (op boolOp) checkSuspect(f *File, exprs []ast.Expr) {
|
||||
// seen maps from expressions 'x' to equality expressions 'x != c'.
|
||||
seen := make(map[string]string)
|
||||
|
||||
for _, e := range exprs {
|
||||
bin, ok := e.(*ast.BinaryExpr)
|
||||
if !ok || bin.Op != op.badEq {
|
||||
continue
|
||||
}
|
||||
|
||||
// In order to avoid false positives, restrict to cases
|
||||
// in which one of the operands is constant. We're then
|
||||
// interested in the other operand.
|
||||
// In the rare case in which both operands are constant
|
||||
// (e.g. runtime.GOOS and "windows"), we'll only catch
|
||||
// mistakes if the LHS is repeated, which is how most
|
||||
// code is written.
|
||||
var x ast.Expr
|
||||
switch {
|
||||
case f.pkg.types[bin.Y].Value != nil:
|
||||
x = bin.X
|
||||
case f.pkg.types[bin.X].Value != nil:
|
||||
x = bin.Y
|
||||
default:
|
||||
continue
|
||||
}
|
||||
|
||||
// e is of the form 'x != c' or 'x == c'.
|
||||
xfmt := f.gofmt(x)
|
||||
efmt := f.gofmt(e)
|
||||
if prev, found := seen[xfmt]; found {
|
||||
// checkRedundant handles the case in which efmt == prev.
|
||||
if efmt != prev {
|
||||
f.Badf(e.Pos(), "suspect %s: %s %s %s", op.name, efmt, op.tok, prev)
|
||||
}
|
||||
} else {
|
||||
seen[xfmt] = efmt
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// hasSideEffects reports whether evaluation of e has side effects.
|
||||
func hasSideEffects(e ast.Expr) bool {
|
||||
safe := true
|
||||
ast.Inspect(e, func(node ast.Node) bool {
|
||||
switch n := node.(type) {
|
||||
// Using CallExpr here will catch conversions
|
||||
// as well as function and method invocations.
|
||||
// We'll live with the false negatives for now.
|
||||
case *ast.CallExpr:
|
||||
safe = false
|
||||
return false
|
||||
case *ast.UnaryExpr:
|
||||
if n.Op == token.ARROW {
|
||||
safe = false
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
return !safe
|
||||
}
|
||||
|
||||
// split returns a slice of all subexpressions in e that are connected by op.
|
||||
// For example, given 'a || (b || c) || d' with the or op,
|
||||
// split returns []{d, c, b, a}.
|
||||
func (op boolOp) split(e ast.Expr) (exprs []ast.Expr) {
|
||||
for {
|
||||
e = unparen(e)
|
||||
if b, ok := e.(*ast.BinaryExpr); ok && b.Op == op.tok {
|
||||
exprs = append(exprs, op.split(b.Y)...)
|
||||
e = b.X
|
||||
} else {
|
||||
exprs = append(exprs, e)
|
||||
break
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// unparen returns e with any enclosing parentheses stripped.
|
||||
func unparen(e ast.Expr) ast.Expr {
|
||||
for {
|
||||
p, ok := e.(*ast.ParenExpr)
|
||||
if !ok {
|
||||
return e
|
||||
}
|
||||
e = p.X
|
||||
}
|
||||
}
|
@ -1,91 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
var (
|
||||
nl = []byte("\n")
|
||||
slashSlash = []byte("//")
|
||||
plusBuild = []byte("+build")
|
||||
)
|
||||
|
||||
// checkBuildTag checks that build tags are in the correct location and well-formed.
|
||||
func checkBuildTag(name string, data []byte) {
|
||||
if !vet("buildtags") {
|
||||
return
|
||||
}
|
||||
lines := bytes.SplitAfter(data, nl)
|
||||
|
||||
// Determine cutpoint where +build comments are no longer valid.
|
||||
// They are valid in leading // comments in the file followed by
|
||||
// a blank line.
|
||||
var cutoff int
|
||||
for i, line := range lines {
|
||||
line = bytes.TrimSpace(line)
|
||||
if len(line) == 0 {
|
||||
cutoff = i
|
||||
continue
|
||||
}
|
||||
if bytes.HasPrefix(line, slashSlash) {
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
for i, line := range lines {
|
||||
line = bytes.TrimSpace(line)
|
||||
if !bytes.HasPrefix(line, slashSlash) {
|
||||
continue
|
||||
}
|
||||
text := bytes.TrimSpace(line[2:])
|
||||
if bytes.HasPrefix(text, plusBuild) {
|
||||
fields := bytes.Fields(text)
|
||||
if !bytes.Equal(fields[0], plusBuild) {
|
||||
// Comment is something like +buildasdf not +build.
|
||||
fmt.Fprintf(os.Stderr, "%s:%d: possible malformed +build comment\n", name, i+1)
|
||||
continue
|
||||
}
|
||||
if i >= cutoff {
|
||||
fmt.Fprintf(os.Stderr, "%s:%d: +build comment must appear before package clause and be followed by a blank line\n", name, i+1)
|
||||
setExit(1)
|
||||
continue
|
||||
}
|
||||
// Check arguments.
|
||||
Args:
|
||||
for _, arg := range fields[1:] {
|
||||
for _, elem := range strings.Split(string(arg), ",") {
|
||||
if strings.HasPrefix(elem, "!!") {
|
||||
fmt.Fprintf(os.Stderr, "%s:%d: invalid double negative in build constraint: %s\n", name, i+1, arg)
|
||||
setExit(1)
|
||||
break Args
|
||||
}
|
||||
if strings.HasPrefix(elem, "!") {
|
||||
elem = elem[1:]
|
||||
}
|
||||
for _, c := range elem {
|
||||
if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' {
|
||||
fmt.Fprintf(os.Stderr, "%s:%d: invalid non-alphanumeric build constraint: %s\n", name, i+1, arg)
|
||||
setExit(1)
|
||||
break Args
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
// Comment with +build but not at beginning.
|
||||
if bytes.Contains(line, plusBuild) && i < cutoff {
|
||||
fmt.Fprintf(os.Stderr, "%s:%d: possible malformed +build comment\n", name, i+1)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
@ -1,125 +0,0 @@
|
||||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the test for unkeyed struct literals.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"go/ast"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/cmd/vet/whitelist"
|
||||
)
|
||||
|
||||
var compositeWhiteList = flag.Bool("compositewhitelist", true, "use composite white list; for testing only")
|
||||
|
||||
func init() {
|
||||
register("composites",
|
||||
"check that composite literals used field-keyed elements",
|
||||
checkUnkeyedLiteral,
|
||||
compositeLit)
|
||||
}
|
||||
|
||||
// checkUnkeyedLiteral checks if a composite literal is a struct literal with
|
||||
// unkeyed fields.
|
||||
func checkUnkeyedLiteral(f *File, node ast.Node) {
|
||||
c := node.(*ast.CompositeLit)
|
||||
typ := c.Type
|
||||
for {
|
||||
if typ1, ok := c.Type.(*ast.ParenExpr); ok {
|
||||
typ = typ1
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
switch typ.(type) {
|
||||
case *ast.ArrayType:
|
||||
return
|
||||
case *ast.MapType:
|
||||
return
|
||||
case *ast.StructType:
|
||||
return // a literal struct type does not need to use keys
|
||||
case *ast.Ident:
|
||||
// A simple type name like t or T does not need keys either,
|
||||
// since it is almost certainly declared in the current package.
|
||||
// (The exception is names being used via import . "pkg", but
|
||||
// those are already breaking the Go 1 compatibility promise,
|
||||
// so not reporting potential additional breakage seems okay.)
|
||||
return
|
||||
}
|
||||
|
||||
// Otherwise the type is a selector like pkg.Name.
|
||||
// We only care if pkg.Name is a struct, not if it's a map, array, or slice.
|
||||
isStruct, typeString := f.pkg.isStruct(c)
|
||||
if !isStruct {
|
||||
return
|
||||
}
|
||||
|
||||
if typeString == "" { // isStruct doesn't know
|
||||
typeString = f.gofmt(typ)
|
||||
}
|
||||
|
||||
// It's a struct, or we can't tell it's not a struct because we don't have types.
|
||||
|
||||
// Check if the CompositeLit contains an unkeyed field.
|
||||
allKeyValue := true
|
||||
for _, e := range c.Elts {
|
||||
if _, ok := e.(*ast.KeyValueExpr); !ok {
|
||||
allKeyValue = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if allKeyValue {
|
||||
return
|
||||
}
|
||||
|
||||
// Check that the CompositeLit's type has the form pkg.Typ.
|
||||
s, ok := c.Type.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
pkg, ok := s.X.(*ast.Ident)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
// Convert the package name to an import path, and compare to a whitelist.
|
||||
path := pkgPath(f, pkg.Name)
|
||||
if path == "" {
|
||||
f.Badf(c.Pos(), "unresolvable package for %s.%s literal", pkg.Name, s.Sel.Name)
|
||||
return
|
||||
}
|
||||
typeName := path + "." + s.Sel.Name
|
||||
if *compositeWhiteList && whitelist.UnkeyedLiteral[typeName] {
|
||||
return
|
||||
}
|
||||
|
||||
f.Bad(c.Pos(), typeString+" composite literal uses unkeyed fields")
|
||||
}
|
||||
|
||||
// pkgPath returns the import path "image/png" for the package name "png".
|
||||
//
|
||||
// This is based purely on syntax and convention, and not on the imported
|
||||
// package's contents. It will be incorrect if a package name differs from the
|
||||
// leaf element of the import path, or if the package was a dot import.
|
||||
func pkgPath(f *File, pkgName string) (path string) {
|
||||
for _, x := range f.file.Imports {
|
||||
s := strings.Trim(x.Path.Value, `"`)
|
||||
if x.Name != nil {
|
||||
// Catch `import pkgName "foo/bar"`.
|
||||
if x.Name.Name == pkgName {
|
||||
return s
|
||||
}
|
||||
} else {
|
||||
// Catch `import "pkgName"` or `import "foo/bar/pkgName"`.
|
||||
if s == pkgName || strings.HasSuffix(s, "/"+pkgName) {
|
||||
return s
|
||||
}
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
@ -1,155 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the code to check that locks are not passed by value.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("copylocks",
|
||||
"check that locks are not passed by value",
|
||||
checkCopyLocks,
|
||||
funcDecl, rangeStmt)
|
||||
}
|
||||
|
||||
// checkCopyLocks checks whether node might
|
||||
// inadvertently copy a lock.
|
||||
func checkCopyLocks(f *File, node ast.Node) {
|
||||
switch node := node.(type) {
|
||||
case *ast.RangeStmt:
|
||||
checkCopyLocksRange(f, node)
|
||||
case *ast.FuncDecl:
|
||||
checkCopyLocksFunc(f, node)
|
||||
}
|
||||
}
|
||||
|
||||
// checkCopyLocksFunc checks whether a function might
|
||||
// inadvertently copy a lock, by checking whether
|
||||
// its receiver, parameters, or return values
|
||||
// are locks.
|
||||
func checkCopyLocksFunc(f *File, d *ast.FuncDecl) {
|
||||
if d.Recv != nil && len(d.Recv.List) > 0 {
|
||||
expr := d.Recv.List[0].Type
|
||||
if path := lockPath(f.pkg.typesPkg, f.pkg.types[expr].Type); path != nil {
|
||||
f.Badf(expr.Pos(), "%s passes Lock by value: %v", d.Name.Name, path)
|
||||
}
|
||||
}
|
||||
|
||||
if d.Type.Params != nil {
|
||||
for _, field := range d.Type.Params.List {
|
||||
expr := field.Type
|
||||
if path := lockPath(f.pkg.typesPkg, f.pkg.types[expr].Type); path != nil {
|
||||
f.Badf(expr.Pos(), "%s passes Lock by value: %v", d.Name.Name, path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if d.Type.Results != nil {
|
||||
for _, field := range d.Type.Results.List {
|
||||
expr := field.Type
|
||||
if path := lockPath(f.pkg.typesPkg, f.pkg.types[expr].Type); path != nil {
|
||||
f.Badf(expr.Pos(), "%s returns Lock by value: %v", d.Name.Name, path)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkCopyLocksRange checks whether a range statement
|
||||
// might inadvertently copy a lock by checking whether
|
||||
// any of the range variables are locks.
|
||||
func checkCopyLocksRange(f *File, r *ast.RangeStmt) {
|
||||
checkCopyLocksRangeVar(f, r.Tok, r.Key)
|
||||
checkCopyLocksRangeVar(f, r.Tok, r.Value)
|
||||
}
|
||||
|
||||
func checkCopyLocksRangeVar(f *File, rtok token.Token, e ast.Expr) {
|
||||
if e == nil {
|
||||
return
|
||||
}
|
||||
id, isId := e.(*ast.Ident)
|
||||
if isId && id.Name == "_" {
|
||||
return
|
||||
}
|
||||
|
||||
var typ types.Type
|
||||
if rtok == token.DEFINE {
|
||||
if !isId {
|
||||
return
|
||||
}
|
||||
obj := f.pkg.defs[id]
|
||||
if obj == nil {
|
||||
return
|
||||
}
|
||||
typ = obj.Type()
|
||||
} else {
|
||||
typ = f.pkg.types[e].Type
|
||||
}
|
||||
|
||||
if typ == nil {
|
||||
return
|
||||
}
|
||||
if path := lockPath(f.pkg.typesPkg, typ); path != nil {
|
||||
f.Badf(e.Pos(), "range var %s copies Lock: %v", f.gofmt(e), path)
|
||||
}
|
||||
}
|
||||
|
||||
type typePath []types.Type
|
||||
|
||||
// String pretty-prints a typePath.
|
||||
func (path typePath) String() string {
|
||||
n := len(path)
|
||||
var buf bytes.Buffer
|
||||
for i := range path {
|
||||
if i > 0 {
|
||||
fmt.Fprint(&buf, " contains ")
|
||||
}
|
||||
// The human-readable path is in reverse order, outermost to innermost.
|
||||
fmt.Fprint(&buf, path[n-i-1].String())
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// lockPath returns a typePath describing the location of a lock value
|
||||
// contained in typ. If there is no contained lock, it returns nil.
|
||||
func lockPath(tpkg *types.Package, typ types.Type) typePath {
|
||||
if typ == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// We're only interested in the case in which the underlying
|
||||
// type is a struct. (Interfaces and pointers are safe to copy.)
|
||||
styp, ok := typ.Underlying().(*types.Struct)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// We're looking for cases in which a reference to this type
|
||||
// can be locked, but a value cannot. This differentiates
|
||||
// embedded interfaces from embedded values.
|
||||
if plock := types.NewMethodSet(types.NewPointer(typ)).Lookup(tpkg, "Lock"); plock != nil {
|
||||
if lock := types.NewMethodSet(typ).Lookup(tpkg, "Lock"); lock == nil {
|
||||
return []types.Type{typ}
|
||||
}
|
||||
}
|
||||
|
||||
nfields := styp.NumFields()
|
||||
for i := 0; i < nfields; i++ {
|
||||
ftyp := styp.Field(i).Type()
|
||||
subpath := lockPath(tpkg, ftyp)
|
||||
if subpath != nil {
|
||||
return append(subpath, typ)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -1,296 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Check for syntactically unreachable code.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("unreachable",
|
||||
"check for unreachable code",
|
||||
checkUnreachable,
|
||||
funcDecl, funcLit)
|
||||
}
|
||||
|
||||
type deadState struct {
|
||||
f *File
|
||||
hasBreak map[ast.Stmt]bool
|
||||
hasGoto map[string]bool
|
||||
labels map[string]ast.Stmt
|
||||
breakTarget ast.Stmt
|
||||
|
||||
reachable bool
|
||||
}
|
||||
|
||||
// checkUnreachable checks a function body for dead code.
|
||||
func checkUnreachable(f *File, node ast.Node) {
|
||||
var body *ast.BlockStmt
|
||||
switch n := node.(type) {
|
||||
case *ast.FuncDecl:
|
||||
body = n.Body
|
||||
case *ast.FuncLit:
|
||||
body = n.Body
|
||||
}
|
||||
if body == nil {
|
||||
return
|
||||
}
|
||||
|
||||
d := &deadState{
|
||||
f: f,
|
||||
hasBreak: make(map[ast.Stmt]bool),
|
||||
hasGoto: make(map[string]bool),
|
||||
labels: make(map[string]ast.Stmt),
|
||||
}
|
||||
|
||||
d.findLabels(body)
|
||||
|
||||
d.reachable = true
|
||||
d.findDead(body)
|
||||
}
|
||||
|
||||
// findLabels gathers information about the labels defined and used by stmt
|
||||
// and about which statements break, whether a label is involved or not.
|
||||
func (d *deadState) findLabels(stmt ast.Stmt) {
|
||||
switch x := stmt.(type) {
|
||||
default:
|
||||
d.f.Warnf(x.Pos(), "internal error in findLabels: unexpected statement %T", x)
|
||||
|
||||
case *ast.AssignStmt,
|
||||
*ast.BadStmt,
|
||||
*ast.DeclStmt,
|
||||
*ast.DeferStmt,
|
||||
*ast.EmptyStmt,
|
||||
*ast.ExprStmt,
|
||||
*ast.GoStmt,
|
||||
*ast.IncDecStmt,
|
||||
*ast.ReturnStmt,
|
||||
*ast.SendStmt:
|
||||
// no statements inside
|
||||
|
||||
case *ast.BlockStmt:
|
||||
for _, stmt := range x.List {
|
||||
d.findLabels(stmt)
|
||||
}
|
||||
|
||||
case *ast.BranchStmt:
|
||||
switch x.Tok {
|
||||
case token.GOTO:
|
||||
if x.Label != nil {
|
||||
d.hasGoto[x.Label.Name] = true
|
||||
}
|
||||
|
||||
case token.BREAK:
|
||||
stmt := d.breakTarget
|
||||
if x.Label != nil {
|
||||
stmt = d.labels[x.Label.Name]
|
||||
}
|
||||
if stmt != nil {
|
||||
d.hasBreak[stmt] = true
|
||||
}
|
||||
}
|
||||
|
||||
case *ast.IfStmt:
|
||||
d.findLabels(x.Body)
|
||||
if x.Else != nil {
|
||||
d.findLabels(x.Else)
|
||||
}
|
||||
|
||||
case *ast.LabeledStmt:
|
||||
d.labels[x.Label.Name] = x.Stmt
|
||||
d.findLabels(x.Stmt)
|
||||
|
||||
// These cases are all the same, but the x.Body only works
|
||||
// when the specific type of x is known, so the cases cannot
|
||||
// be merged.
|
||||
case *ast.ForStmt:
|
||||
outer := d.breakTarget
|
||||
d.breakTarget = x
|
||||
d.findLabels(x.Body)
|
||||
d.breakTarget = outer
|
||||
|
||||
case *ast.RangeStmt:
|
||||
outer := d.breakTarget
|
||||
d.breakTarget = x
|
||||
d.findLabels(x.Body)
|
||||
d.breakTarget = outer
|
||||
|
||||
case *ast.SelectStmt:
|
||||
outer := d.breakTarget
|
||||
d.breakTarget = x
|
||||
d.findLabels(x.Body)
|
||||
d.breakTarget = outer
|
||||
|
||||
case *ast.SwitchStmt:
|
||||
outer := d.breakTarget
|
||||
d.breakTarget = x
|
||||
d.findLabels(x.Body)
|
||||
d.breakTarget = outer
|
||||
|
||||
case *ast.TypeSwitchStmt:
|
||||
outer := d.breakTarget
|
||||
d.breakTarget = x
|
||||
d.findLabels(x.Body)
|
||||
d.breakTarget = outer
|
||||
|
||||
case *ast.CommClause:
|
||||
for _, stmt := range x.Body {
|
||||
d.findLabels(stmt)
|
||||
}
|
||||
|
||||
case *ast.CaseClause:
|
||||
for _, stmt := range x.Body {
|
||||
d.findLabels(stmt)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// findDead walks the statement looking for dead code.
|
||||
// If d.reachable is false on entry, stmt itself is dead.
|
||||
// When findDead returns, d.reachable tells whether the
|
||||
// statement following stmt is reachable.
|
||||
func (d *deadState) findDead(stmt ast.Stmt) {
|
||||
// Is this a labeled goto target?
|
||||
// If so, assume it is reachable due to the goto.
|
||||
// This is slightly conservative, in that we don't
|
||||
// check that the goto is reachable, so
|
||||
// L: goto L
|
||||
// will not provoke a warning.
|
||||
// But it's good enough.
|
||||
if x, isLabel := stmt.(*ast.LabeledStmt); isLabel && d.hasGoto[x.Label.Name] {
|
||||
d.reachable = true
|
||||
}
|
||||
|
||||
if !d.reachable {
|
||||
switch stmt.(type) {
|
||||
case *ast.EmptyStmt:
|
||||
// do not warn about unreachable empty statements
|
||||
default:
|
||||
d.f.Bad(stmt.Pos(), "unreachable code")
|
||||
d.reachable = true // silence error about next statement
|
||||
}
|
||||
}
|
||||
|
||||
switch x := stmt.(type) {
|
||||
default:
|
||||
d.f.Warnf(x.Pos(), "internal error in findDead: unexpected statement %T", x)
|
||||
|
||||
case *ast.AssignStmt,
|
||||
*ast.BadStmt,
|
||||
*ast.DeclStmt,
|
||||
*ast.DeferStmt,
|
||||
*ast.EmptyStmt,
|
||||
*ast.GoStmt,
|
||||
*ast.IncDecStmt,
|
||||
*ast.SendStmt:
|
||||
// no control flow
|
||||
|
||||
case *ast.BlockStmt:
|
||||
for _, stmt := range x.List {
|
||||
d.findDead(stmt)
|
||||
}
|
||||
|
||||
case *ast.BranchStmt:
|
||||
switch x.Tok {
|
||||
case token.BREAK, token.GOTO, token.FALLTHROUGH:
|
||||
d.reachable = false
|
||||
case token.CONTINUE:
|
||||
// NOTE: We accept "continue" statements as terminating.
|
||||
// They are not necessary in the spec definition of terminating,
|
||||
// because a continue statement cannot be the final statement
|
||||
// before a return. But for the more general problem of syntactically
|
||||
// identifying dead code, continue redirects control flow just
|
||||
// like the other terminating statements.
|
||||
d.reachable = false
|
||||
}
|
||||
|
||||
case *ast.ExprStmt:
|
||||
// Call to panic?
|
||||
call, ok := x.X.(*ast.CallExpr)
|
||||
if ok {
|
||||
name, ok := call.Fun.(*ast.Ident)
|
||||
if ok && name.Name == "panic" && name.Obj == nil {
|
||||
d.reachable = false
|
||||
}
|
||||
}
|
||||
|
||||
case *ast.ForStmt:
|
||||
d.findDead(x.Body)
|
||||
d.reachable = x.Cond != nil || d.hasBreak[x]
|
||||
|
||||
case *ast.IfStmt:
|
||||
d.findDead(x.Body)
|
||||
if x.Else != nil {
|
||||
r := d.reachable
|
||||
d.reachable = true
|
||||
d.findDead(x.Else)
|
||||
d.reachable = d.reachable || r
|
||||
} else {
|
||||
// might not have executed if statement
|
||||
d.reachable = true
|
||||
}
|
||||
|
||||
case *ast.LabeledStmt:
|
||||
d.findDead(x.Stmt)
|
||||
|
||||
case *ast.RangeStmt:
|
||||
d.findDead(x.Body)
|
||||
d.reachable = true
|
||||
|
||||
case *ast.ReturnStmt:
|
||||
d.reachable = false
|
||||
|
||||
case *ast.SelectStmt:
|
||||
// NOTE: Unlike switch and type switch below, we don't care
|
||||
// whether a select has a default, because a select without a
|
||||
// default blocks until one of the cases can run. That's different
|
||||
// from a switch without a default, which behaves like it has
|
||||
// a default with an empty body.
|
||||
anyReachable := false
|
||||
for _, comm := range x.Body.List {
|
||||
d.reachable = true
|
||||
for _, stmt := range comm.(*ast.CommClause).Body {
|
||||
d.findDead(stmt)
|
||||
}
|
||||
anyReachable = anyReachable || d.reachable
|
||||
}
|
||||
d.reachable = anyReachable || d.hasBreak[x]
|
||||
|
||||
case *ast.SwitchStmt:
|
||||
anyReachable := false
|
||||
hasDefault := false
|
||||
for _, cas := range x.Body.List {
|
||||
cc := cas.(*ast.CaseClause)
|
||||
if cc.List == nil {
|
||||
hasDefault = true
|
||||
}
|
||||
d.reachable = true
|
||||
for _, stmt := range cc.Body {
|
||||
d.findDead(stmt)
|
||||
}
|
||||
anyReachable = anyReachable || d.reachable
|
||||
}
|
||||
d.reachable = anyReachable || d.hasBreak[x] || !hasDefault
|
||||
|
||||
case *ast.TypeSwitchStmt:
|
||||
anyReachable := false
|
||||
hasDefault := false
|
||||
for _, cas := range x.Body.List {
|
||||
cc := cas.(*ast.CaseClause)
|
||||
if cc.List == nil {
|
||||
hasDefault = true
|
||||
}
|
||||
d.reachable = true
|
||||
for _, stmt := range cc.Body {
|
||||
d.findDead(stmt)
|
||||
}
|
||||
anyReachable = anyReachable || d.reachable
|
||||
}
|
||||
d.reachable = anyReachable || d.hasBreak[x] || !hasDefault
|
||||
}
|
||||
}
|
197
cmd/vet/doc.go
197
cmd/vet/doc.go
@ -1,197 +0,0 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
|
||||
Vet examines Go source code and reports suspicious constructs, such as Printf
|
||||
calls whose arguments do not align with the format string. Vet uses heuristics
|
||||
that do not guarantee all reports are genuine problems, but it can find errors
|
||||
not caught by the compilers.
|
||||
|
||||
It can be invoked three ways:
|
||||
|
||||
By package, from the go tool:
|
||||
go vet package/path/name
|
||||
vets the package whose path is provided.
|
||||
|
||||
By files:
|
||||
go tool vet source/directory/*.go
|
||||
vets the files named, all of which must be in the same package.
|
||||
|
||||
By directory:
|
||||
go tool vet source/directory
|
||||
recursively descends the directory, vetting each package it finds.
|
||||
|
||||
Vet's exit code is 2 for erroneous invocation of the tool, 1 if a
|
||||
problem was reported, and 0 otherwise. Note that the tool does not
|
||||
check every possible problem and depends on unreliable heuristics
|
||||
so it should be used as guidance only, not as a firm indicator of
|
||||
program correctness.
|
||||
|
||||
By default all checks are performed. If any flags are explicitly set
|
||||
to true, only those tests are run. Conversely, if any flag is
|
||||
explicitly set to false, only those tests are disabled.
|
||||
Thus -printf=true runs the printf check, -printf=false runs all checks
|
||||
except the printf check.
|
||||
|
||||
Available checks:
|
||||
|
||||
Assembly declarations
|
||||
|
||||
Flag: -asmdecl
|
||||
|
||||
Mismatches between assembly files and Go function declarations.
|
||||
|
||||
Useless assignments
|
||||
|
||||
Flag: -assign
|
||||
|
||||
Check for useless assignments.
|
||||
|
||||
Atomic mistakes
|
||||
|
||||
Flag: -atomic
|
||||
|
||||
Common mistaken usages of the sync/atomic package.
|
||||
|
||||
Boolean conditions
|
||||
|
||||
Flag: -bool
|
||||
|
||||
Mistakes involving boolean operators.
|
||||
|
||||
Build tags
|
||||
|
||||
Flag: -buildtags
|
||||
|
||||
Badly formed or misplaced +build tags.
|
||||
|
||||
Unkeyed composite literals
|
||||
|
||||
Flag: -composites
|
||||
|
||||
Composite struct literals that do not use the field-keyed syntax.
|
||||
|
||||
Copying locks
|
||||
|
||||
Flag: -copylocks
|
||||
|
||||
Locks that are erroneously passed by value.
|
||||
|
||||
Documentation examples
|
||||
|
||||
Flag: -example
|
||||
|
||||
Mistakes involving example tests, including examples with incorrect names or
|
||||
function signatures, or that document identifiers not in the package.
|
||||
|
||||
Methods
|
||||
|
||||
Flag: -methods
|
||||
|
||||
Non-standard signatures for methods with familiar names, including:
|
||||
Format GobEncode GobDecode MarshalJSON MarshalXML
|
||||
Peek ReadByte ReadFrom ReadRune Scan Seek
|
||||
UnmarshalJSON UnreadByte UnreadRune WriteByte
|
||||
WriteTo
|
||||
|
||||
Nil function comparison
|
||||
|
||||
Flag: -nilfunc
|
||||
|
||||
Comparisons between functions and nil.
|
||||
|
||||
Printf family
|
||||
|
||||
Flag: -printf
|
||||
|
||||
Suspicious calls to functions in the Printf family, including any functions
|
||||
with these names, disregarding case:
|
||||
Print Printf Println
|
||||
Fprint Fprintf Fprintln
|
||||
Sprint Sprintf Sprintln
|
||||
Error Errorf
|
||||
Fatal Fatalf
|
||||
Log Logf
|
||||
Panic Panicf Panicln
|
||||
If the function name ends with an 'f', the function is assumed to take
|
||||
a format descriptor string in the manner of fmt.Printf. If not, vet
|
||||
complains about arguments that look like format descriptor strings.
|
||||
|
||||
It also checks for errors such as using a Writer as the first argument of
|
||||
Printf.
|
||||
|
||||
Range loop variables
|
||||
|
||||
Flag: -rangeloops
|
||||
|
||||
Incorrect uses of range loop variables in closures.
|
||||
|
||||
Shadowed variables
|
||||
|
||||
Flag: -shadow=false (experimental; must be set explicitly)
|
||||
|
||||
Variables that may have been unintentionally shadowed.
|
||||
|
||||
Shifts
|
||||
|
||||
Flag: -shift
|
||||
|
||||
Shifts equal to or longer than the variable's length.
|
||||
|
||||
Struct tags
|
||||
|
||||
Flag: -structtags
|
||||
|
||||
Struct tags that do not follow the format understood by reflect.StructTag.Get.
|
||||
Well-known encoding struct tags (json, xml) used with unexported fields.
|
||||
|
||||
Unreachable code
|
||||
|
||||
Flag: -unreachable
|
||||
|
||||
Unreachable code.
|
||||
|
||||
Misuse of unsafe Pointers
|
||||
|
||||
Flag: -unsafeptr
|
||||
|
||||
Likely incorrect uses of unsafe.Pointer to convert integers to pointers.
|
||||
A conversion from uintptr to unsafe.Pointer is invalid if it implies that
|
||||
there is a uintptr-typed word in memory that holds a pointer value,
|
||||
because that word will be invisible to stack copying and to the garbage
|
||||
collector.
|
||||
|
||||
Unused result of certain function calls
|
||||
|
||||
Flag: -unusedresult
|
||||
|
||||
Calls to well-known functions and methods that return a value that is
|
||||
discarded. By default, this includes functions like fmt.Errorf and
|
||||
fmt.Sprintf and methods like String and Error. The flags -unusedfuncs
|
||||
and -unusedstringmethods control the set.
|
||||
|
||||
Other flags
|
||||
|
||||
These flags configure the behavior of vet:
|
||||
|
||||
-all (default true)
|
||||
Check everything; disabled if any explicit check is requested.
|
||||
-v
|
||||
Verbose mode
|
||||
-printfuncs
|
||||
A comma-separated list of print-like functions to supplement
|
||||
the standard list. Each entry is in the form Name:N where N
|
||||
is the zero-based argument position of the first argument
|
||||
involved in the print: either the format or the first print
|
||||
argument for non-formatted prints. For example,
|
||||
if you have Warn and Warnf functions that take an
|
||||
io.Writer as their first argument, like Fprintf,
|
||||
-printfuncs=Warn:1,Warnf:1
|
||||
-shadowstrict
|
||||
Whether to be strict about shadowing; can be noisy.
|
||||
-test
|
||||
For testing only: sets -all and -shadow.
|
||||
*/
|
||||
package main // import "golang.org/x/tools/cmd/vet"
|
@ -1,125 +0,0 @@
|
||||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("example",
|
||||
"check for common mistaken usages of documentation examples",
|
||||
checkExample,
|
||||
funcDecl)
|
||||
}
|
||||
|
||||
func isExampleSuffix(s string) bool {
|
||||
r, size := utf8.DecodeRuneInString(s)
|
||||
return size > 0 && unicode.IsLower(r)
|
||||
}
|
||||
|
||||
// checkExample walks the documentation example functions checking for common
|
||||
// mistakes of misnamed functions, failure to map functions to existing
|
||||
// identifiers, etc.
|
||||
func checkExample(f *File, node ast.Node) {
|
||||
if !strings.HasSuffix(f.name, "_test.go") {
|
||||
return
|
||||
}
|
||||
var (
|
||||
pkg = f.pkg
|
||||
pkgName = pkg.typesPkg.Name()
|
||||
scopes = []*types.Scope{pkg.typesPkg.Scope()}
|
||||
lookup = func(name string) types.Object {
|
||||
for _, scope := range scopes {
|
||||
if o := scope.Lookup(name); o != nil {
|
||||
return o
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
)
|
||||
if strings.HasSuffix(pkgName, "_test") {
|
||||
// Treat 'package foo_test' as an alias for 'package foo'.
|
||||
var (
|
||||
basePkg = strings.TrimSuffix(pkgName, "_test")
|
||||
pkg = f.pkg
|
||||
)
|
||||
for _, p := range pkg.typesPkg.Imports() {
|
||||
if p.Name() == basePkg {
|
||||
scopes = append(scopes, p.Scope())
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
fn, ok := node.(*ast.FuncDecl)
|
||||
if !ok {
|
||||
// Ignore non-functions.
|
||||
return
|
||||
}
|
||||
var (
|
||||
fnName = fn.Name.Name
|
||||
report = func(format string, args ...interface{}) { f.Badf(node.Pos(), format, args...) }
|
||||
)
|
||||
if fn.Recv != nil || !strings.HasPrefix(fnName, "Example") {
|
||||
// Ignore methods and types not named "Example".
|
||||
return
|
||||
}
|
||||
if params := fn.Type.Params; len(params.List) != 0 {
|
||||
report("%s should be niladic", fnName)
|
||||
}
|
||||
if results := fn.Type.Results; results != nil && len(results.List) != 0 {
|
||||
report("%s should return nothing", fnName)
|
||||
}
|
||||
if fnName == "Example" {
|
||||
// Nothing more to do.
|
||||
return
|
||||
}
|
||||
if filesRun && !includesNonTest {
|
||||
// The coherence checks between a test and the package it tests
|
||||
// will report false positives if no non-test files have
|
||||
// been provided.
|
||||
return
|
||||
}
|
||||
var (
|
||||
exName = strings.TrimPrefix(fnName, "Example")
|
||||
elems = strings.SplitN(exName, "_", 3)
|
||||
ident = elems[0]
|
||||
obj = lookup(ident)
|
||||
)
|
||||
if ident != "" && obj == nil {
|
||||
// Check ExampleFoo and ExampleBadFoo.
|
||||
report("%s refers to unknown identifier: %s", fnName, ident)
|
||||
// Abort since obj is absent and no subsequent checks can be performed.
|
||||
return
|
||||
}
|
||||
if elemCnt := strings.Count(exName, "_"); elemCnt == 0 {
|
||||
// Nothing more to do.
|
||||
return
|
||||
}
|
||||
mmbr := elems[1]
|
||||
if ident == "" {
|
||||
// Check Example_suffix and Example_BadSuffix.
|
||||
if residual := strings.TrimPrefix(exName, "_"); !isExampleSuffix(residual) {
|
||||
report("%s has malformed example suffix: %s", fnName, residual)
|
||||
}
|
||||
return
|
||||
}
|
||||
if !isExampleSuffix(mmbr) {
|
||||
// Check ExampleFoo_Method and ExampleFoo_BadMethod.
|
||||
if obj, _, _ := types.LookupFieldOrMethod(obj.Type(), true, obj.Pkg(), mmbr); obj == nil {
|
||||
report("%s refers to unknown field or method: %s.%s", fnName, ident, mmbr)
|
||||
}
|
||||
}
|
||||
if len(elems) == 3 && !isExampleSuffix(elems[2]) {
|
||||
// Check ExampleFoo_Method_suffix and ExampleFoo_Method_Badsuffix.
|
||||
report("%s has malformed example suffix: %s", fnName, elems[2])
|
||||
}
|
||||
return
|
||||
}
|
500
cmd/vet/main.go
500
cmd/vet/main.go
@ -1,500 +0,0 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// NOTE: This version of vet is retired. Bug fixes only.
|
||||
// Vet now lives in the core repository.
|
||||
|
||||
// Vet is a simple checker for static errors in Go source code.
|
||||
// See doc.go for more information.
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/parser"
|
||||
"go/printer"
|
||||
"go/token"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
_ "golang.org/x/tools/go/gcimporter"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var (
|
||||
verbose = flag.Bool("v", false, "verbose")
|
||||
testFlag = flag.Bool("test", false, "for testing only: sets -all and -shadow")
|
||||
tags = flag.String("tags", "", "comma-separated list of build tags to apply when parsing")
|
||||
tagList = []string{} // exploded version of tags flag; set in main
|
||||
)
|
||||
|
||||
var exitCode = 0
|
||||
|
||||
// "all" is here only for the appearance of backwards compatibility.
|
||||
// It has no effect; the triState flags do the work.
|
||||
var all = flag.Bool("all", true, "check everything; disabled if any explicit check is requested")
|
||||
|
||||
// Flags to control which individual checks to perform.
|
||||
var report = map[string]*triState{
|
||||
// Only unusual checks are written here.
|
||||
// Most checks that operate during the AST walk are added by register.
|
||||
"asmdecl": triStateFlag("asmdecl", unset, "check assembly against Go declarations"),
|
||||
"buildtags": triStateFlag("buildtags", unset, "check that +build tags are valid"),
|
||||
}
|
||||
|
||||
// experimental records the flags enabling experimental features. These must be
|
||||
// requested explicitly; they are not enabled by -all.
|
||||
var experimental = map[string]bool{}
|
||||
|
||||
// setTrueCount record how many flags are explicitly set to true.
|
||||
var setTrueCount int
|
||||
|
||||
// dirsRun and filesRun indicate whether the vet is applied to directory or
|
||||
// file targets. The distinction affects which checks are run.
|
||||
var dirsRun, filesRun bool
|
||||
|
||||
// includesNonTest indicates whether the vet is applied to non-test targets.
|
||||
// Certain checks are relevant only if they touch both test and non-test files.
|
||||
var includesNonTest bool
|
||||
|
||||
// A triState is a boolean that knows whether it has been set to either true or false.
|
||||
// It is used to identify if a flag appears; the standard boolean flag cannot
|
||||
// distinguish missing from unset. It also satisfies flag.Value.
|
||||
type triState int
|
||||
|
||||
const (
|
||||
unset triState = iota
|
||||
setTrue
|
||||
setFalse
|
||||
)
|
||||
|
||||
func triStateFlag(name string, value triState, usage string) *triState {
|
||||
flag.Var(&value, name, usage)
|
||||
return &value
|
||||
}
|
||||
|
||||
// triState implements flag.Value, flag.Getter, and flag.boolFlag.
|
||||
// They work like boolean flags: we can say vet -printf as well as vet -printf=true
|
||||
func (ts *triState) Get() interface{} {
|
||||
return *ts == setTrue
|
||||
}
|
||||
|
||||
func (ts triState) isTrue() bool {
|
||||
return ts == setTrue
|
||||
}
|
||||
|
||||
func (ts *triState) Set(value string) error {
|
||||
b, err := strconv.ParseBool(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if b {
|
||||
*ts = setTrue
|
||||
setTrueCount++
|
||||
} else {
|
||||
*ts = setFalse
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ts *triState) String() string {
|
||||
switch *ts {
|
||||
case unset:
|
||||
return "unset"
|
||||
case setTrue:
|
||||
return "true"
|
||||
case setFalse:
|
||||
return "false"
|
||||
}
|
||||
panic("not reached")
|
||||
}
|
||||
|
||||
func (ts triState) IsBoolFlag() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// vet tells whether to report errors for the named check, a flag name.
|
||||
func vet(name string) bool {
|
||||
if *testFlag {
|
||||
return true
|
||||
}
|
||||
return report[name].isTrue()
|
||||
}
|
||||
|
||||
// setExit sets the value for os.Exit when it is called, later. It
|
||||
// remembers the highest value.
|
||||
func setExit(err int) {
|
||||
if err > exitCode {
|
||||
exitCode = err
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
// Each of these vars has a corresponding case in (*File).Visit.
|
||||
assignStmt *ast.AssignStmt
|
||||
binaryExpr *ast.BinaryExpr
|
||||
callExpr *ast.CallExpr
|
||||
compositeLit *ast.CompositeLit
|
||||
exprStmt *ast.ExprStmt
|
||||
field *ast.Field
|
||||
funcDecl *ast.FuncDecl
|
||||
funcLit *ast.FuncLit
|
||||
genDecl *ast.GenDecl
|
||||
interfaceType *ast.InterfaceType
|
||||
rangeStmt *ast.RangeStmt
|
||||
|
||||
// checkers is a two-level map.
|
||||
// The outer level is keyed by a nil pointer, one of the AST vars above.
|
||||
// The inner level is keyed by checker name.
|
||||
checkers = make(map[ast.Node]map[string]func(*File, ast.Node))
|
||||
)
|
||||
|
||||
func register(name, usage string, fn func(*File, ast.Node), types ...ast.Node) {
|
||||
report[name] = triStateFlag(name, unset, usage)
|
||||
for _, typ := range types {
|
||||
m := checkers[typ]
|
||||
if m == nil {
|
||||
m = make(map[string]func(*File, ast.Node))
|
||||
checkers[typ] = m
|
||||
}
|
||||
m[name] = fn
|
||||
}
|
||||
}
|
||||
|
||||
// Usage is a replacement usage function for the flags package.
|
||||
func Usage() {
|
||||
fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, "\tvet [flags] directory...\n")
|
||||
fmt.Fprintf(os.Stderr, "\tvet [flags] files... # Must be a single package\n")
|
||||
fmt.Fprintf(os.Stderr, "For more information run\n")
|
||||
fmt.Fprintf(os.Stderr, "\tgodoc golang.org/x/tools/cmd/vet\n\n")
|
||||
fmt.Fprintf(os.Stderr, "Flags:\n")
|
||||
flag.PrintDefaults()
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
// File is a wrapper for the state of a file used in the parser.
|
||||
// The parse tree walkers are all methods of this type.
|
||||
type File struct {
|
||||
pkg *Package
|
||||
fset *token.FileSet
|
||||
name string
|
||||
content []byte
|
||||
file *ast.File
|
||||
b bytes.Buffer // for use by methods
|
||||
|
||||
// The objects that are receivers of a "String() string" method.
|
||||
// This is used by the recursiveStringer method in print.go.
|
||||
stringers map[*ast.Object]bool
|
||||
|
||||
// Registered checkers to run.
|
||||
checkers map[ast.Node][]func(*File, ast.Node)
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Usage = Usage
|
||||
flag.Parse()
|
||||
|
||||
// If any flag is set, we run only those checks requested.
|
||||
// If no flags are set true, set all the non-experimental ones not explicitly set (in effect, set the "-all" flag).
|
||||
if setTrueCount == 0 {
|
||||
for name, setting := range report {
|
||||
if *setting == unset && !experimental[name] {
|
||||
*setting = setTrue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tagList = strings.Split(*tags, ",")
|
||||
|
||||
initPrintFlags()
|
||||
initUnusedFlags()
|
||||
|
||||
if flag.NArg() == 0 {
|
||||
Usage()
|
||||
}
|
||||
for _, name := range flag.Args() {
|
||||
// Is it a directory?
|
||||
fi, err := os.Stat(name)
|
||||
if err != nil {
|
||||
warnf("error walking tree: %s", err)
|
||||
continue
|
||||
}
|
||||
if fi.IsDir() {
|
||||
dirsRun = true
|
||||
} else {
|
||||
filesRun = true
|
||||
if !strings.HasSuffix(name, "_test.go") {
|
||||
includesNonTest = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if dirsRun && filesRun {
|
||||
Usage()
|
||||
}
|
||||
if dirsRun {
|
||||
for _, name := range flag.Args() {
|
||||
walkDir(name)
|
||||
}
|
||||
os.Exit(exitCode)
|
||||
}
|
||||
if !doPackage(".", flag.Args()) {
|
||||
warnf("no files checked")
|
||||
}
|
||||
os.Exit(exitCode)
|
||||
}
|
||||
|
||||
// prefixDirectory places the directory name on the beginning of each name in the list.
|
||||
func prefixDirectory(directory string, names []string) {
|
||||
if directory != "." {
|
||||
for i, name := range names {
|
||||
names[i] = filepath.Join(directory, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// doPackageDir analyzes the single package found in the directory, if there is one,
|
||||
// plus a test package, if there is one.
|
||||
func doPackageDir(directory string) {
|
||||
context := build.Default
|
||||
if len(context.BuildTags) != 0 {
|
||||
warnf("build tags %s previously set", context.BuildTags)
|
||||
}
|
||||
context.BuildTags = append(tagList, context.BuildTags...)
|
||||
|
||||
pkg, err := context.ImportDir(directory, 0)
|
||||
if err != nil {
|
||||
// If it's just that there are no go source files, that's fine.
|
||||
if _, nogo := err.(*build.NoGoError); nogo {
|
||||
return
|
||||
}
|
||||
// Non-fatal: we are doing a recursive walk and there may be other directories.
|
||||
warnf("cannot process directory %s: %s", directory, err)
|
||||
return
|
||||
}
|
||||
var names []string
|
||||
names = append(names, pkg.GoFiles...)
|
||||
names = append(names, pkg.CgoFiles...)
|
||||
names = append(names, pkg.TestGoFiles...) // These are also in the "foo" package.
|
||||
names = append(names, pkg.SFiles...)
|
||||
prefixDirectory(directory, names)
|
||||
doPackage(directory, names)
|
||||
// Is there also a "foo_test" package? If so, do that one as well.
|
||||
if len(pkg.XTestGoFiles) > 0 {
|
||||
names = pkg.XTestGoFiles
|
||||
prefixDirectory(directory, names)
|
||||
doPackage(directory, names)
|
||||
}
|
||||
}
|
||||
|
||||
type Package struct {
|
||||
path string
|
||||
defs map[*ast.Ident]types.Object
|
||||
uses map[*ast.Ident]types.Object
|
||||
selectors map[*ast.SelectorExpr]*types.Selection
|
||||
types map[ast.Expr]types.TypeAndValue
|
||||
spans map[types.Object]Span
|
||||
files []*File
|
||||
typesPkg *types.Package
|
||||
}
|
||||
|
||||
// doPackage analyzes the single package constructed from the named files.
|
||||
// It returns whether any files were checked.
|
||||
func doPackage(directory string, names []string) bool {
|
||||
var files []*File
|
||||
var astFiles []*ast.File
|
||||
fs := token.NewFileSet()
|
||||
for _, name := range names {
|
||||
data, err := ioutil.ReadFile(name)
|
||||
if err != nil {
|
||||
// Warn but continue to next package.
|
||||
warnf("%s: %s", name, err)
|
||||
return false
|
||||
}
|
||||
checkBuildTag(name, data)
|
||||
var parsedFile *ast.File
|
||||
if strings.HasSuffix(name, ".go") {
|
||||
parsedFile, err = parser.ParseFile(fs, name, data, 0)
|
||||
if err != nil {
|
||||
warnf("%s: %s", name, err)
|
||||
return false
|
||||
}
|
||||
astFiles = append(astFiles, parsedFile)
|
||||
}
|
||||
files = append(files, &File{fset: fs, content: data, name: name, file: parsedFile})
|
||||
}
|
||||
if len(astFiles) == 0 {
|
||||
return false
|
||||
}
|
||||
pkg := new(Package)
|
||||
pkg.path = astFiles[0].Name.Name
|
||||
pkg.files = files
|
||||
// Type check the package.
|
||||
err := pkg.check(fs, astFiles)
|
||||
if err != nil && *verbose {
|
||||
warnf("%s", err)
|
||||
}
|
||||
|
||||
// Check.
|
||||
chk := make(map[ast.Node][]func(*File, ast.Node))
|
||||
for typ, set := range checkers {
|
||||
for name, fn := range set {
|
||||
if vet(name) {
|
||||
chk[typ] = append(chk[typ], fn)
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, file := range files {
|
||||
file.pkg = pkg
|
||||
file.checkers = chk
|
||||
if file.file != nil {
|
||||
file.walkFile(file.name, file.file)
|
||||
}
|
||||
}
|
||||
asmCheck(pkg)
|
||||
return true
|
||||
}
|
||||
|
||||
func visit(path string, f os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
warnf("walk error: %s", err)
|
||||
return err
|
||||
}
|
||||
// One package per directory. Ignore the files themselves.
|
||||
if !f.IsDir() {
|
||||
return nil
|
||||
}
|
||||
doPackageDir(path)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pkg *Package) hasFileWithSuffix(suffix string) bool {
|
||||
for _, f := range pkg.files {
|
||||
if strings.HasSuffix(f.name, suffix) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// walkDir recursively walks the tree looking for Go packages.
|
||||
func walkDir(root string) {
|
||||
filepath.Walk(root, visit)
|
||||
}
|
||||
|
||||
// errorf formats the error to standard error, adding program
|
||||
// identification and a newline, and exits.
|
||||
func errorf(format string, args ...interface{}) {
|
||||
fmt.Fprintf(os.Stderr, "vet: "+format+"\n", args...)
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
// warnf formats the error to standard error, adding program
|
||||
// identification and a newline, but does not exit.
|
||||
func warnf(format string, args ...interface{}) {
|
||||
fmt.Fprintf(os.Stderr, "vet: "+format+"\n", args...)
|
||||
setExit(1)
|
||||
}
|
||||
|
||||
// Println is fmt.Println guarded by -v.
|
||||
func Println(args ...interface{}) {
|
||||
if !*verbose {
|
||||
return
|
||||
}
|
||||
fmt.Println(args...)
|
||||
}
|
||||
|
||||
// Printf is fmt.Printf guarded by -v.
|
||||
func Printf(format string, args ...interface{}) {
|
||||
if !*verbose {
|
||||
return
|
||||
}
|
||||
fmt.Printf(format+"\n", args...)
|
||||
}
|
||||
|
||||
// Bad reports an error and sets the exit code..
|
||||
func (f *File) Bad(pos token.Pos, args ...interface{}) {
|
||||
f.Warn(pos, args...)
|
||||
setExit(1)
|
||||
}
|
||||
|
||||
// Badf reports a formatted error and sets the exit code.
|
||||
func (f *File) Badf(pos token.Pos, format string, args ...interface{}) {
|
||||
f.Warnf(pos, format, args...)
|
||||
setExit(1)
|
||||
}
|
||||
|
||||
// loc returns a formatted representation of the position.
|
||||
func (f *File) loc(pos token.Pos) string {
|
||||
if pos == token.NoPos {
|
||||
return ""
|
||||
}
|
||||
// Do not print columns. Because the pos often points to the start of an
|
||||
// expression instead of the inner part with the actual error, the
|
||||
// precision can mislead.
|
||||
posn := f.fset.Position(pos)
|
||||
return fmt.Sprintf("%s:%d: ", posn.Filename, posn.Line)
|
||||
}
|
||||
|
||||
// Warn reports an error but does not set the exit code.
|
||||
func (f *File) Warn(pos token.Pos, args ...interface{}) {
|
||||
fmt.Fprint(os.Stderr, f.loc(pos)+fmt.Sprintln(args...))
|
||||
}
|
||||
|
||||
// Warnf reports a formatted error but does not set the exit code.
|
||||
func (f *File) Warnf(pos token.Pos, format string, args ...interface{}) {
|
||||
fmt.Fprintf(os.Stderr, f.loc(pos)+format+"\n", args...)
|
||||
}
|
||||
|
||||
// walkFile walks the file's tree.
|
||||
func (f *File) walkFile(name string, file *ast.File) {
|
||||
Println("Checking file", name)
|
||||
ast.Walk(f, file)
|
||||
}
|
||||
|
||||
// Visit implements the ast.Visitor interface.
|
||||
func (f *File) Visit(node ast.Node) ast.Visitor {
|
||||
var key ast.Node
|
||||
switch node.(type) {
|
||||
case *ast.AssignStmt:
|
||||
key = assignStmt
|
||||
case *ast.BinaryExpr:
|
||||
key = binaryExpr
|
||||
case *ast.CallExpr:
|
||||
key = callExpr
|
||||
case *ast.CompositeLit:
|
||||
key = compositeLit
|
||||
case *ast.ExprStmt:
|
||||
key = exprStmt
|
||||
case *ast.Field:
|
||||
key = field
|
||||
case *ast.FuncDecl:
|
||||
key = funcDecl
|
||||
case *ast.FuncLit:
|
||||
key = funcLit
|
||||
case *ast.GenDecl:
|
||||
key = genDecl
|
||||
case *ast.InterfaceType:
|
||||
key = interfaceType
|
||||
case *ast.RangeStmt:
|
||||
key = rangeStmt
|
||||
}
|
||||
for _, fn := range f.checkers[key] {
|
||||
fn(f, node)
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
// gofmt returns a string representation of the expression.
|
||||
func (f *File) gofmt(x ast.Expr) string {
|
||||
f.b.Reset()
|
||||
printer.Fprint(&f.b, f.fset, x)
|
||||
return f.b.String()
|
||||
}
|
@ -1,182 +0,0 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the code to check canonical methods.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/printer"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("methods",
|
||||
"check that canonically named methods are canonically defined",
|
||||
checkCanonicalMethod,
|
||||
funcDecl, interfaceType)
|
||||
}
|
||||
|
||||
type MethodSig struct {
|
||||
args []string
|
||||
results []string
|
||||
}
|
||||
|
||||
// canonicalMethods lists the input and output types for Go methods
|
||||
// that are checked using dynamic interface checks. Because the
|
||||
// checks are dynamic, such methods would not cause a compile error
|
||||
// if they have the wrong signature: instead the dynamic check would
|
||||
// fail, sometimes mysteriously. If a method is found with a name listed
|
||||
// here but not the input/output types listed here, vet complains.
|
||||
//
|
||||
// A few of the canonical methods have very common names.
|
||||
// For example, a type might implement a Scan method that
|
||||
// has nothing to do with fmt.Scanner, but we still want to check
|
||||
// the methods that are intended to implement fmt.Scanner.
|
||||
// To do that, the arguments that have a = prefix are treated as
|
||||
// signals that the canonical meaning is intended: if a Scan
|
||||
// method doesn't have a fmt.ScanState as its first argument,
|
||||
// we let it go. But if it does have a fmt.ScanState, then the
|
||||
// rest has to match.
|
||||
var canonicalMethods = map[string]MethodSig{
|
||||
// "Flush": {{}, {"error"}}, // http.Flusher and jpeg.writer conflict
|
||||
"Format": {[]string{"=fmt.State", "rune"}, []string{}}, // fmt.Formatter
|
||||
"GobDecode": {[]string{"[]byte"}, []string{"error"}}, // gob.GobDecoder
|
||||
"GobEncode": {[]string{}, []string{"[]byte", "error"}}, // gob.GobEncoder
|
||||
"MarshalJSON": {[]string{}, []string{"[]byte", "error"}}, // json.Marshaler
|
||||
"MarshalXML": {[]string{"*xml.Encoder", "xml.StartElement"}, []string{"error"}}, // xml.Marshaler
|
||||
"Peek": {[]string{"=int"}, []string{"[]byte", "error"}}, // image.reader (matching bufio.Reader)
|
||||
"ReadByte": {[]string{}, []string{"byte", "error"}}, // io.ByteReader
|
||||
"ReadFrom": {[]string{"=io.Reader"}, []string{"int64", "error"}}, // io.ReaderFrom
|
||||
"ReadRune": {[]string{}, []string{"rune", "int", "error"}}, // io.RuneReader
|
||||
"Scan": {[]string{"=fmt.ScanState", "rune"}, []string{"error"}}, // fmt.Scanner
|
||||
"Seek": {[]string{"=int64", "int"}, []string{"int64", "error"}}, // io.Seeker
|
||||
"UnmarshalJSON": {[]string{"[]byte"}, []string{"error"}}, // json.Unmarshaler
|
||||
"UnmarshalXML": {[]string{"*xml.Decoder", "xml.StartElement"}, []string{"error"}}, // xml.Unmarshaler
|
||||
"UnreadByte": {[]string{}, []string{"error"}},
|
||||
"UnreadRune": {[]string{}, []string{"error"}},
|
||||
"WriteByte": {[]string{"byte"}, []string{"error"}}, // jpeg.writer (matching bufio.Writer)
|
||||
"WriteTo": {[]string{"=io.Writer"}, []string{"int64", "error"}}, // io.WriterTo
|
||||
}
|
||||
|
||||
func checkCanonicalMethod(f *File, node ast.Node) {
|
||||
switch n := node.(type) {
|
||||
case *ast.FuncDecl:
|
||||
if n.Recv != nil {
|
||||
canonicalMethod(f, n.Name, n.Type)
|
||||
}
|
||||
case *ast.InterfaceType:
|
||||
for _, field := range n.Methods.List {
|
||||
for _, id := range field.Names {
|
||||
canonicalMethod(f, id, field.Type.(*ast.FuncType))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func canonicalMethod(f *File, id *ast.Ident, t *ast.FuncType) {
|
||||
// Expected input/output.
|
||||
expect, ok := canonicalMethods[id.Name]
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
// Actual input/output
|
||||
args := typeFlatten(t.Params.List)
|
||||
var results []ast.Expr
|
||||
if t.Results != nil {
|
||||
results = typeFlatten(t.Results.List)
|
||||
}
|
||||
|
||||
// Do the =s (if any) all match?
|
||||
if !f.matchParams(expect.args, args, "=") || !f.matchParams(expect.results, results, "=") {
|
||||
return
|
||||
}
|
||||
|
||||
// Everything must match.
|
||||
if !f.matchParams(expect.args, args, "") || !f.matchParams(expect.results, results, "") {
|
||||
expectFmt := id.Name + "(" + argjoin(expect.args) + ")"
|
||||
if len(expect.results) == 1 {
|
||||
expectFmt += " " + argjoin(expect.results)
|
||||
} else if len(expect.results) > 1 {
|
||||
expectFmt += " (" + argjoin(expect.results) + ")"
|
||||
}
|
||||
|
||||
f.b.Reset()
|
||||
if err := printer.Fprint(&f.b, f.fset, t); err != nil {
|
||||
fmt.Fprintf(&f.b, "<%s>", err)
|
||||
}
|
||||
actual := f.b.String()
|
||||
actual = strings.TrimPrefix(actual, "func")
|
||||
actual = id.Name + actual
|
||||
|
||||
f.Badf(id.Pos(), "method %s should have signature %s", actual, expectFmt)
|
||||
}
|
||||
}
|
||||
|
||||
func argjoin(x []string) string {
|
||||
y := make([]string, len(x))
|
||||
for i, s := range x {
|
||||
if s[0] == '=' {
|
||||
s = s[1:]
|
||||
}
|
||||
y[i] = s
|
||||
}
|
||||
return strings.Join(y, ", ")
|
||||
}
|
||||
|
||||
// Turn parameter list into slice of types
|
||||
// (in the ast, types are Exprs).
|
||||
// Have to handle f(int, bool) and f(x, y, z int)
|
||||
// so not a simple 1-to-1 conversion.
|
||||
func typeFlatten(l []*ast.Field) []ast.Expr {
|
||||
var t []ast.Expr
|
||||
for _, f := range l {
|
||||
if len(f.Names) == 0 {
|
||||
t = append(t, f.Type)
|
||||
continue
|
||||
}
|
||||
for _ = range f.Names {
|
||||
t = append(t, f.Type)
|
||||
}
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
// Does each type in expect with the given prefix match the corresponding type in actual?
|
||||
func (f *File) matchParams(expect []string, actual []ast.Expr, prefix string) bool {
|
||||
for i, x := range expect {
|
||||
if !strings.HasPrefix(x, prefix) {
|
||||
continue
|
||||
}
|
||||
if i >= len(actual) {
|
||||
return false
|
||||
}
|
||||
if !f.matchParamType(x, actual[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if prefix == "" && len(actual) > len(expect) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Does this one type match?
|
||||
func (f *File) matchParamType(expect string, actual ast.Expr) bool {
|
||||
if strings.HasPrefix(expect, "=") {
|
||||
expect = expect[1:]
|
||||
}
|
||||
// Strip package name if we're in that package.
|
||||
if n := len(f.file.Name.Name); len(expect) > n && expect[:n] == f.file.Name.Name && expect[n] == '.' {
|
||||
expect = expect[n+1:]
|
||||
}
|
||||
|
||||
// Overkill but easy.
|
||||
f.b.Reset()
|
||||
printer.Fprint(&f.b, f.fset, actual)
|
||||
return f.b.String() == expect
|
||||
}
|
@ -1,68 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
This file contains the code to check for useless function comparisons.
|
||||
A useless comparison is one like f == nil as opposed to f() == nil.
|
||||
*/
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("nilfunc",
|
||||
"check for comparisons between functions and nil",
|
||||
checkNilFuncComparison,
|
||||
binaryExpr)
|
||||
}
|
||||
|
||||
func checkNilFuncComparison(f *File, node ast.Node) {
|
||||
e := node.(*ast.BinaryExpr)
|
||||
|
||||
// Only want == or != comparisons.
|
||||
if e.Op != token.EQL && e.Op != token.NEQ {
|
||||
return
|
||||
}
|
||||
|
||||
// Only want comparisons with a nil identifier on one side.
|
||||
var e2 ast.Expr
|
||||
switch {
|
||||
case f.isNil(e.X):
|
||||
e2 = e.Y
|
||||
case f.isNil(e.Y):
|
||||
e2 = e.X
|
||||
default:
|
||||
return
|
||||
}
|
||||
|
||||
// Only want identifiers or selector expressions.
|
||||
var obj types.Object
|
||||
switch v := e2.(type) {
|
||||
case *ast.Ident:
|
||||
obj = f.pkg.uses[v]
|
||||
case *ast.SelectorExpr:
|
||||
obj = f.pkg.uses[v.Sel]
|
||||
default:
|
||||
return
|
||||
}
|
||||
|
||||
// Only want functions.
|
||||
if _, ok := obj.(*types.Func); !ok {
|
||||
return
|
||||
}
|
||||
|
||||
f.Badf(e.Pos(), "comparison of function %v %v nil is always %v", obj.Name(), e.Op, e.Op == token.NEQ)
|
||||
}
|
||||
|
||||
// isNil reports whether the provided expression is the built-in nil
|
||||
// identifier.
|
||||
func (f *File) isNil(e ast.Expr) bool {
|
||||
return f.pkg.types[e].Type == types.Typ[types.UntypedNil]
|
||||
}
|
587
cmd/vet/print.go
587
cmd/vet/print.go
@ -1,587 +0,0 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the printf-checker.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"golang.org/x/tools/go/exact"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var printfuncs = flag.String("printfuncs", "", "comma-separated list of print function names to check")
|
||||
|
||||
func init() {
|
||||
register("printf",
|
||||
"check printf-like invocations",
|
||||
checkFmtPrintfCall,
|
||||
funcDecl, callExpr)
|
||||
}
|
||||
|
||||
func initPrintFlags() {
|
||||
if *printfuncs == "" {
|
||||
return
|
||||
}
|
||||
for _, name := range strings.Split(*printfuncs, ",") {
|
||||
if len(name) == 0 {
|
||||
flag.Usage()
|
||||
}
|
||||
skip := 0
|
||||
if colon := strings.LastIndex(name, ":"); colon > 0 {
|
||||
var err error
|
||||
skip, err = strconv.Atoi(name[colon+1:])
|
||||
if err != nil {
|
||||
errorf(`illegal format for "Func:N" argument %q; %s`, name, err)
|
||||
}
|
||||
name = name[:colon]
|
||||
}
|
||||
name = strings.ToLower(name)
|
||||
if name[len(name)-1] == 'f' {
|
||||
printfList[name] = skip
|
||||
} else {
|
||||
printList[name] = skip
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// printfList records the formatted-print functions. The value is the location
|
||||
// of the format parameter. Names are lower-cased so the lookup is
|
||||
// case insensitive.
|
||||
var printfList = map[string]int{
|
||||
"errorf": 0,
|
||||
"fatalf": 0,
|
||||
"fprintf": 1,
|
||||
"logf": 0,
|
||||
"panicf": 0,
|
||||
"printf": 0,
|
||||
"sprintf": 0,
|
||||
}
|
||||
|
||||
// printList records the unformatted-print functions. The value is the location
|
||||
// of the first parameter to be printed. Names are lower-cased so the lookup is
|
||||
// case insensitive.
|
||||
var printList = map[string]int{
|
||||
"error": 0,
|
||||
"fatal": 0,
|
||||
"fprint": 1, "fprintln": 1,
|
||||
"log": 0,
|
||||
"panic": 0, "panicln": 0,
|
||||
"print": 0, "println": 0,
|
||||
"sprint": 0, "sprintln": 0,
|
||||
}
|
||||
|
||||
// checkCall triggers the print-specific checks if the call invokes a print function.
|
||||
func checkFmtPrintfCall(f *File, node ast.Node) {
|
||||
if d, ok := node.(*ast.FuncDecl); ok && isStringer(f, d) {
|
||||
// Remember we saw this.
|
||||
if f.stringers == nil {
|
||||
f.stringers = make(map[*ast.Object]bool)
|
||||
}
|
||||
if l := d.Recv.List; len(l) == 1 {
|
||||
if n := l[0].Names; len(n) == 1 {
|
||||
f.stringers[n[0].Obj] = true
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
call, ok := node.(*ast.CallExpr)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
var Name string
|
||||
switch x := call.Fun.(type) {
|
||||
case *ast.Ident:
|
||||
Name = x.Name
|
||||
case *ast.SelectorExpr:
|
||||
Name = x.Sel.Name
|
||||
default:
|
||||
return
|
||||
}
|
||||
|
||||
name := strings.ToLower(Name)
|
||||
if skip, ok := printfList[name]; ok {
|
||||
f.checkPrintf(call, Name, skip)
|
||||
return
|
||||
}
|
||||
if skip, ok := printList[name]; ok {
|
||||
f.checkPrint(call, Name, skip)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// isStringer returns true if the provided declaration is a "String() string"
|
||||
// method, an implementation of fmt.Stringer.
|
||||
func isStringer(f *File, d *ast.FuncDecl) bool {
|
||||
return d.Recv != nil && d.Name.Name == "String" && d.Type.Results != nil &&
|
||||
len(d.Type.Params.List) == 0 && len(d.Type.Results.List) == 1 &&
|
||||
f.pkg.types[d.Type.Results.List[0].Type].Type == types.Typ[types.String]
|
||||
}
|
||||
|
||||
// formatState holds the parsed representation of a printf directive such as "%3.*[4]d".
|
||||
// It is constructed by parsePrintfVerb.
|
||||
type formatState struct {
|
||||
verb rune // the format verb: 'd' for "%d"
|
||||
format string // the full format directive from % through verb, "%.3d".
|
||||
name string // Printf, Sprintf etc.
|
||||
flags []byte // the list of # + etc.
|
||||
argNums []int // the successive argument numbers that are consumed, adjusted to refer to actual arg in call
|
||||
indexed bool // whether an indexing expression appears: %[1]d.
|
||||
firstArg int // Index of first argument after the format in the Printf call.
|
||||
// Used only during parse.
|
||||
file *File
|
||||
call *ast.CallExpr
|
||||
argNum int // Which argument we're expecting to format now.
|
||||
indexPending bool // Whether we have an indexed argument that has not resolved.
|
||||
nbytes int // number of bytes of the format string consumed.
|
||||
}
|
||||
|
||||
// checkPrintf checks a call to a formatted print routine such as Printf.
|
||||
// call.Args[formatIndex] is (well, should be) the format argument.
|
||||
func (f *File) checkPrintf(call *ast.CallExpr, name string, formatIndex int) {
|
||||
if formatIndex >= len(call.Args) {
|
||||
f.Bad(call.Pos(), "too few arguments in call to", name)
|
||||
return
|
||||
}
|
||||
lit := f.pkg.types[call.Args[formatIndex]].Value
|
||||
if lit == nil {
|
||||
if *verbose {
|
||||
f.Warn(call.Pos(), "can't check non-constant format in call to", name)
|
||||
}
|
||||
return
|
||||
}
|
||||
if lit.Kind() != exact.String {
|
||||
f.Badf(call.Pos(), "constant %v not a string in call to %s", lit, name)
|
||||
return
|
||||
}
|
||||
format := exact.StringVal(lit)
|
||||
firstArg := formatIndex + 1 // Arguments are immediately after format string.
|
||||
if !strings.Contains(format, "%") {
|
||||
if len(call.Args) > firstArg {
|
||||
f.Badf(call.Pos(), "no formatting directive in %s call", name)
|
||||
}
|
||||
return
|
||||
}
|
||||
// Hard part: check formats against args.
|
||||
argNum := firstArg
|
||||
indexed := false
|
||||
for i, w := 0, 0; i < len(format); i += w {
|
||||
w = 1
|
||||
if format[i] == '%' {
|
||||
state := f.parsePrintfVerb(call, name, format[i:], firstArg, argNum)
|
||||
if state == nil {
|
||||
return
|
||||
}
|
||||
w = len(state.format)
|
||||
if state.indexed {
|
||||
indexed = true
|
||||
}
|
||||
if !f.okPrintfArg(call, state) { // One error per format is enough.
|
||||
return
|
||||
}
|
||||
if len(state.argNums) > 0 {
|
||||
// Continue with the next sequential argument.
|
||||
argNum = state.argNums[len(state.argNums)-1] + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
// Dotdotdot is hard.
|
||||
if call.Ellipsis.IsValid() && argNum >= len(call.Args)-1 {
|
||||
return
|
||||
}
|
||||
// If the arguments were direct indexed, we assume the programmer knows what's up.
|
||||
// Otherwise, there should be no leftover arguments.
|
||||
if !indexed && argNum != len(call.Args) {
|
||||
expect := argNum - firstArg
|
||||
numArgs := len(call.Args) - firstArg
|
||||
f.Badf(call.Pos(), "wrong number of args for format in %s call: %d needed but %d args", name, expect, numArgs)
|
||||
}
|
||||
}
|
||||
|
||||
// parseFlags accepts any printf flags.
|
||||
func (s *formatState) parseFlags() {
|
||||
for s.nbytes < len(s.format) {
|
||||
switch c := s.format[s.nbytes]; c {
|
||||
case '#', '0', '+', '-', ' ':
|
||||
s.flags = append(s.flags, c)
|
||||
s.nbytes++
|
||||
default:
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// scanNum advances through a decimal number if present.
|
||||
func (s *formatState) scanNum() {
|
||||
for ; s.nbytes < len(s.format); s.nbytes++ {
|
||||
c := s.format[s.nbytes]
|
||||
if c < '0' || '9' < c {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseIndex scans an index expression. It returns false if there is a syntax error.
|
||||
func (s *formatState) parseIndex() bool {
|
||||
if s.nbytes == len(s.format) || s.format[s.nbytes] != '[' {
|
||||
return true
|
||||
}
|
||||
// Argument index present.
|
||||
s.indexed = true
|
||||
s.nbytes++ // skip '['
|
||||
start := s.nbytes
|
||||
s.scanNum()
|
||||
if s.nbytes == len(s.format) || s.nbytes == start || s.format[s.nbytes] != ']' {
|
||||
s.file.Badf(s.call.Pos(), "illegal syntax for printf argument index")
|
||||
return false
|
||||
}
|
||||
arg32, err := strconv.ParseInt(s.format[start:s.nbytes], 10, 32)
|
||||
if err != nil {
|
||||
s.file.Badf(s.call.Pos(), "illegal syntax for printf argument index: %s", err)
|
||||
return false
|
||||
}
|
||||
s.nbytes++ // skip ']'
|
||||
arg := int(arg32)
|
||||
arg += s.firstArg - 1 // We want to zero-index the actual arguments.
|
||||
s.argNum = arg
|
||||
s.indexPending = true
|
||||
return true
|
||||
}
|
||||
|
||||
// parseNum scans a width or precision (or *). It returns false if there's a bad index expression.
|
||||
func (s *formatState) parseNum() bool {
|
||||
if s.nbytes < len(s.format) && s.format[s.nbytes] == '*' {
|
||||
if s.indexPending { // Absorb it.
|
||||
s.indexPending = false
|
||||
}
|
||||
s.nbytes++
|
||||
s.argNums = append(s.argNums, s.argNum)
|
||||
s.argNum++
|
||||
} else {
|
||||
s.scanNum()
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// parsePrecision scans for a precision. It returns false if there's a bad index expression.
|
||||
func (s *formatState) parsePrecision() bool {
|
||||
// If there's a period, there may be a precision.
|
||||
if s.nbytes < len(s.format) && s.format[s.nbytes] == '.' {
|
||||
s.flags = append(s.flags, '.') // Treat precision as a flag.
|
||||
s.nbytes++
|
||||
if !s.parseIndex() {
|
||||
return false
|
||||
}
|
||||
if !s.parseNum() {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// parsePrintfVerb looks the formatting directive that begins the format string
|
||||
// and returns a formatState that encodes what the directive wants, without looking
|
||||
// at the actual arguments present in the call. The result is nil if there is an error.
|
||||
func (f *File) parsePrintfVerb(call *ast.CallExpr, name, format string, firstArg, argNum int) *formatState {
|
||||
state := &formatState{
|
||||
format: format,
|
||||
name: name,
|
||||
flags: make([]byte, 0, 5),
|
||||
argNum: argNum,
|
||||
argNums: make([]int, 0, 1),
|
||||
nbytes: 1, // There's guaranteed to be a percent sign.
|
||||
indexed: false,
|
||||
firstArg: firstArg,
|
||||
file: f,
|
||||
call: call,
|
||||
}
|
||||
// There may be flags.
|
||||
state.parseFlags()
|
||||
indexPending := false
|
||||
// There may be an index.
|
||||
if !state.parseIndex() {
|
||||
return nil
|
||||
}
|
||||
// There may be a width.
|
||||
if !state.parseNum() {
|
||||
return nil
|
||||
}
|
||||
// There may be a precision.
|
||||
if !state.parsePrecision() {
|
||||
return nil
|
||||
}
|
||||
// Now a verb, possibly prefixed by an index (which we may already have).
|
||||
if !indexPending && !state.parseIndex() {
|
||||
return nil
|
||||
}
|
||||
if state.nbytes == len(state.format) {
|
||||
f.Badf(call.Pos(), "missing verb at end of format string in %s call", name)
|
||||
return nil
|
||||
}
|
||||
verb, w := utf8.DecodeRuneInString(state.format[state.nbytes:])
|
||||
state.verb = verb
|
||||
state.nbytes += w
|
||||
if verb != '%' {
|
||||
state.argNums = append(state.argNums, state.argNum)
|
||||
}
|
||||
state.format = state.format[:state.nbytes]
|
||||
return state
|
||||
}
|
||||
|
||||
// printfArgType encodes the types of expressions a printf verb accepts. It is a bitmask.
|
||||
type printfArgType int
|
||||
|
||||
const (
|
||||
argBool printfArgType = 1 << iota
|
||||
argInt
|
||||
argRune
|
||||
argString
|
||||
argFloat
|
||||
argComplex
|
||||
argPointer
|
||||
anyType printfArgType = ^0
|
||||
)
|
||||
|
||||
type printVerb struct {
|
||||
verb rune // User may provide verb through Formatter; could be a rune.
|
||||
flags string // known flags are all ASCII
|
||||
typ printfArgType
|
||||
}
|
||||
|
||||
// Common flag sets for printf verbs.
|
||||
const (
|
||||
noFlag = ""
|
||||
numFlag = " -+.0"
|
||||
sharpNumFlag = " -+.0#"
|
||||
allFlags = " -+.0#"
|
||||
)
|
||||
|
||||
// printVerbs identifies which flags are known to printf for each verb.
|
||||
// TODO: A type that implements Formatter may do what it wants, and vet
|
||||
// will complain incorrectly.
|
||||
var printVerbs = []printVerb{
|
||||
// '-' is a width modifier, always valid.
|
||||
// '.' is a precision for float, max width for strings.
|
||||
// '+' is required sign for numbers, Go format for %v.
|
||||
// '#' is alternate format for several verbs.
|
||||
// ' ' is spacer for numbers
|
||||
{'%', noFlag, 0},
|
||||
{'b', numFlag, argInt | argFloat | argComplex},
|
||||
{'c', "-", argRune | argInt},
|
||||
{'d', numFlag, argInt},
|
||||
{'e', numFlag, argFloat | argComplex},
|
||||
{'E', numFlag, argFloat | argComplex},
|
||||
{'f', numFlag, argFloat | argComplex},
|
||||
{'F', numFlag, argFloat | argComplex},
|
||||
{'g', numFlag, argFloat | argComplex},
|
||||
{'G', numFlag, argFloat | argComplex},
|
||||
{'o', sharpNumFlag, argInt},
|
||||
{'p', "-#", argPointer},
|
||||
{'q', " -+.0#", argRune | argInt | argString},
|
||||
{'s', " -+.0", argString},
|
||||
{'t', "-", argBool},
|
||||
{'T', "-", anyType},
|
||||
{'U', "-#", argRune | argInt},
|
||||
{'v', allFlags, anyType},
|
||||
{'x', sharpNumFlag, argRune | argInt | argString},
|
||||
{'X', sharpNumFlag, argRune | argInt | argString},
|
||||
}
|
||||
|
||||
// okPrintfArg compares the formatState to the arguments actually present,
|
||||
// reporting any discrepancies it can discern. If the final argument is ellipsissed,
|
||||
// there's little it can do for that.
|
||||
func (f *File) okPrintfArg(call *ast.CallExpr, state *formatState) (ok bool) {
|
||||
var v printVerb
|
||||
found := false
|
||||
// Linear scan is fast enough for a small list.
|
||||
for _, v = range printVerbs {
|
||||
if v.verb == state.verb {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
f.Badf(call.Pos(), "unrecognized printf verb %q", state.verb)
|
||||
return false
|
||||
}
|
||||
for _, flag := range state.flags {
|
||||
if !strings.ContainsRune(v.flags, rune(flag)) {
|
||||
f.Badf(call.Pos(), "unrecognized printf flag for verb %q: %q", state.verb, flag)
|
||||
return false
|
||||
}
|
||||
}
|
||||
// Verb is good. If len(state.argNums)>trueArgs, we have something like %.*s and all
|
||||
// but the final arg must be an integer.
|
||||
trueArgs := 1
|
||||
if state.verb == '%' {
|
||||
trueArgs = 0
|
||||
}
|
||||
nargs := len(state.argNums)
|
||||
for i := 0; i < nargs-trueArgs; i++ {
|
||||
argNum := state.argNums[i]
|
||||
if !f.argCanBeChecked(call, i, true, state) {
|
||||
return
|
||||
}
|
||||
arg := call.Args[argNum]
|
||||
if !f.matchArgType(argInt, nil, arg) {
|
||||
f.Badf(call.Pos(), "arg %s for * in printf format not of type int", f.gofmt(arg))
|
||||
return false
|
||||
}
|
||||
}
|
||||
if state.verb == '%' {
|
||||
return true
|
||||
}
|
||||
argNum := state.argNums[len(state.argNums)-1]
|
||||
if !f.argCanBeChecked(call, len(state.argNums)-1, false, state) {
|
||||
return false
|
||||
}
|
||||
arg := call.Args[argNum]
|
||||
if !f.matchArgType(v.typ, nil, arg) {
|
||||
typeString := ""
|
||||
if typ := f.pkg.types[arg].Type; typ != nil {
|
||||
typeString = typ.String()
|
||||
}
|
||||
f.Badf(call.Pos(), "arg %s for printf verb %%%c of wrong type: %s", f.gofmt(arg), state.verb, typeString)
|
||||
return false
|
||||
}
|
||||
if v.typ&argString != 0 && v.verb != 'T' && !bytes.Contains(state.flags, []byte{'#'}) && f.recursiveStringer(arg) {
|
||||
f.Badf(call.Pos(), "arg %s for printf causes recursive call to String method", f.gofmt(arg))
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// recursiveStringer reports whether the provided argument is r or &r for the
|
||||
// fmt.Stringer receiver identifier r.
|
||||
func (f *File) recursiveStringer(e ast.Expr) bool {
|
||||
if len(f.stringers) == 0 {
|
||||
return false
|
||||
}
|
||||
var obj *ast.Object
|
||||
switch e := e.(type) {
|
||||
case *ast.Ident:
|
||||
obj = e.Obj
|
||||
case *ast.UnaryExpr:
|
||||
if id, ok := e.X.(*ast.Ident); ok && e.Op == token.AND {
|
||||
obj = id.Obj
|
||||
}
|
||||
}
|
||||
|
||||
// It's unlikely to be a recursive stringer if it has a Format method.
|
||||
if typ := f.pkg.types[e].Type; typ != nil {
|
||||
// Not a perfect match; see issue 6259.
|
||||
if f.hasMethod(typ, "Format") {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// We compare the underlying Object, which checks that the identifier
|
||||
// is the one we declared as the receiver for the String method in
|
||||
// which this printf appears.
|
||||
return f.stringers[obj]
|
||||
}
|
||||
|
||||
// argCanBeChecked reports whether the specified argument is statically present;
|
||||
// it may be beyond the list of arguments or in a terminal slice... argument, which
|
||||
// means we can't see it.
|
||||
func (f *File) argCanBeChecked(call *ast.CallExpr, formatArg int, isStar bool, state *formatState) bool {
|
||||
argNum := state.argNums[formatArg]
|
||||
if argNum < 0 {
|
||||
// Shouldn't happen, so catch it with prejudice.
|
||||
panic("negative arg num")
|
||||
}
|
||||
if argNum == 0 {
|
||||
f.Badf(call.Pos(), `index value [0] for %s("%s"); indexes start at 1`, state.name, state.format)
|
||||
return false
|
||||
}
|
||||
if argNum < len(call.Args)-1 {
|
||||
return true // Always OK.
|
||||
}
|
||||
if call.Ellipsis.IsValid() {
|
||||
return false // We just can't tell; there could be many more arguments.
|
||||
}
|
||||
if argNum < len(call.Args) {
|
||||
return true
|
||||
}
|
||||
// There are bad indexes in the format or there are fewer arguments than the format needs.
|
||||
// This is the argument number relative to the format: Printf("%s", "hi") will give 1 for the "hi".
|
||||
arg := argNum - state.firstArg + 1 // People think of arguments as 1-indexed.
|
||||
f.Badf(call.Pos(), `missing argument for %s("%s"): format reads arg %d, have only %d args`, state.name, state.format, arg, len(call.Args)-state.firstArg)
|
||||
return false
|
||||
}
|
||||
|
||||
// checkPrint checks a call to an unformatted print routine such as Println.
|
||||
// call.Args[firstArg] is the first argument to be printed.
|
||||
func (f *File) checkPrint(call *ast.CallExpr, name string, firstArg int) {
|
||||
isLn := strings.HasSuffix(name, "ln")
|
||||
isF := strings.HasPrefix(name, "F")
|
||||
args := call.Args
|
||||
if name == "Log" && len(args) > 0 {
|
||||
// Special case: Don't complain about math.Log or cmplx.Log.
|
||||
// Not strictly necessary because the only complaint likely is for Log("%d")
|
||||
// but it feels wrong to check that math.Log is a good print function.
|
||||
if sel, ok := args[0].(*ast.SelectorExpr); ok {
|
||||
if x, ok := sel.X.(*ast.Ident); ok {
|
||||
if x.Name == "math" || x.Name == "cmplx" {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// check for Println(os.Stderr, ...)
|
||||
if firstArg == 0 && !isF && len(args) > 0 {
|
||||
if sel, ok := args[0].(*ast.SelectorExpr); ok {
|
||||
if x, ok := sel.X.(*ast.Ident); ok {
|
||||
if x.Name == "os" && strings.HasPrefix(sel.Sel.Name, "Std") {
|
||||
f.Badf(call.Pos(), "first argument to %s is %s.%s", name, x.Name, sel.Sel.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(args) <= firstArg {
|
||||
// If we have a call to a method called Error that satisfies the Error interface,
|
||||
// then it's ok. Otherwise it's something like (*T).Error from the testing package
|
||||
// and we need to check it.
|
||||
if name == "Error" && f.isErrorMethodCall(call) {
|
||||
return
|
||||
}
|
||||
// If it's an Error call now, it's probably for printing errors.
|
||||
if !isLn {
|
||||
// Check the signature to be sure: there are niladic functions called "error".
|
||||
if firstArg != 0 || f.numArgsInSignature(call) != firstArg {
|
||||
f.Badf(call.Pos(), "no args in %s call", name)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
arg := args[firstArg]
|
||||
if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING {
|
||||
if strings.Contains(lit.Value, "%") {
|
||||
f.Badf(call.Pos(), "possible formatting directive in %s call", name)
|
||||
}
|
||||
}
|
||||
if isLn {
|
||||
// The last item, if a string, should not have a newline.
|
||||
arg = args[len(call.Args)-1]
|
||||
if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING {
|
||||
if strings.HasSuffix(lit.Value, `\n"`) {
|
||||
f.Badf(call.Pos(), "%s call ends with newline", name)
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, arg := range args {
|
||||
if f.recursiveStringer(arg) {
|
||||
f.Badf(call.Pos(), "arg %s for print causes recursive call to String method", f.gofmt(arg))
|
||||
}
|
||||
}
|
||||
}
|
@ -1,70 +0,0 @@
|
||||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
This file contains the code to check range loop variables bound inside function
|
||||
literals that are deferred or launched in new goroutines. We only check
|
||||
instances where the defer or go statement is the last statement in the loop
|
||||
body, as otherwise we would need whole program analysis.
|
||||
|
||||
For example:
|
||||
|
||||
for i, v := range s {
|
||||
go func() {
|
||||
println(i, v) // not what you might expect
|
||||
}()
|
||||
}
|
||||
|
||||
See: http://golang.org/doc/go_faq.html#closures_and_goroutines
|
||||
*/
|
||||
|
||||
package main
|
||||
|
||||
import "go/ast"
|
||||
|
||||
func init() {
|
||||
register("rangeloops",
|
||||
"check that range loop variables are used correctly",
|
||||
checkRangeLoop,
|
||||
rangeStmt)
|
||||
}
|
||||
|
||||
// checkRangeLoop walks the body of the provided range statement, checking if
|
||||
// its index or value variables are used unsafely inside goroutines or deferred
|
||||
// function literals.
|
||||
func checkRangeLoop(f *File, node ast.Node) {
|
||||
n := node.(*ast.RangeStmt)
|
||||
key, _ := n.Key.(*ast.Ident)
|
||||
val, _ := n.Value.(*ast.Ident)
|
||||
if key == nil && val == nil {
|
||||
return
|
||||
}
|
||||
sl := n.Body.List
|
||||
if len(sl) == 0 {
|
||||
return
|
||||
}
|
||||
var last *ast.CallExpr
|
||||
switch s := sl[len(sl)-1].(type) {
|
||||
case *ast.GoStmt:
|
||||
last = s.Call
|
||||
case *ast.DeferStmt:
|
||||
last = s.Call
|
||||
default:
|
||||
return
|
||||
}
|
||||
lit, ok := last.Fun.(*ast.FuncLit)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
ast.Inspect(lit.Body, func(n ast.Node) bool {
|
||||
id, ok := n.(*ast.Ident)
|
||||
if !ok || id.Obj == nil {
|
||||
return true
|
||||
}
|
||||
if key != nil && id.Obj == key.Obj || val != nil && id.Obj == val.Obj {
|
||||
f.Bad(id.Pos(), "range variable", id.Name, "captured by func literal")
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
@ -1,245 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
This file contains the code to check for shadowed variables.
|
||||
A shadowed variable is a variable declared in an inner scope
|
||||
with the same name and type as a variable in an outer scope,
|
||||
and where the outer variable is mentioned after the inner one
|
||||
is declared.
|
||||
|
||||
(This definition can be refined; the module generates too many
|
||||
false positives and is not yet enabled by default.)
|
||||
|
||||
For example:
|
||||
|
||||
func BadRead(f *os.File, buf []byte) error {
|
||||
var err error
|
||||
for {
|
||||
n, err := f.Read(buf) // shadows the function variable 'err'
|
||||
if err != nil {
|
||||
break // causes return of wrong value
|
||||
}
|
||||
foo(buf)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
*/
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var strictShadowing = flag.Bool("shadowstrict", false, "whether to be strict about shadowing; can be noisy")
|
||||
|
||||
func init() {
|
||||
register("shadow",
|
||||
"check for shadowed variables (experimental; must be set explicitly)",
|
||||
checkShadow,
|
||||
assignStmt, genDecl)
|
||||
experimental["shadow"] = true
|
||||
}
|
||||
|
||||
func checkShadow(f *File, node ast.Node) {
|
||||
switch n := node.(type) {
|
||||
case *ast.AssignStmt:
|
||||
checkShadowAssignment(f, n)
|
||||
case *ast.GenDecl:
|
||||
checkShadowDecl(f, n)
|
||||
}
|
||||
}
|
||||
|
||||
// Span stores the minimum range of byte positions in the file in which a
|
||||
// given variable (types.Object) is mentioned. It is lexically defined: it spans
|
||||
// from the beginning of its first mention to the end of its last mention.
|
||||
// A variable is considered shadowed (if *strictShadowing is off) only if the
|
||||
// shadowing variable is declared within the span of the shadowed variable.
|
||||
// In other words, if a variable is shadowed but not used after the shadowed
|
||||
// variable is declared, it is inconsequential and not worth complaining about.
|
||||
// This simple check dramatically reduces the nuisance rate for the shadowing
|
||||
// check, at least until something cleverer comes along.
|
||||
//
|
||||
// One wrinkle: A "naked return" is a silent use of a variable that the Span
|
||||
// will not capture, but the compilers catch naked returns of shadowed
|
||||
// variables so we don't need to.
|
||||
//
|
||||
// Cases this gets wrong (TODO):
|
||||
// - If a for loop's continuation statement mentions a variable redeclared in
|
||||
// the block, we should complain about it but don't.
|
||||
// - A variable declared inside a function literal can falsely be identified
|
||||
// as shadowing a variable in the outer function.
|
||||
//
|
||||
type Span struct {
|
||||
min token.Pos
|
||||
max token.Pos
|
||||
}
|
||||
|
||||
// contains reports whether the position is inside the span.
|
||||
func (s Span) contains(pos token.Pos) bool {
|
||||
return s.min <= pos && pos < s.max
|
||||
}
|
||||
|
||||
// growSpan expands the span for the object to contain the instance represented
|
||||
// by the identifier.
|
||||
func (pkg *Package) growSpan(ident *ast.Ident, obj types.Object) {
|
||||
if *strictShadowing {
|
||||
return // No need
|
||||
}
|
||||
pos := ident.Pos()
|
||||
end := ident.End()
|
||||
span, ok := pkg.spans[obj]
|
||||
if ok {
|
||||
if span.min > pos {
|
||||
span.min = pos
|
||||
}
|
||||
if span.max < end {
|
||||
span.max = end
|
||||
}
|
||||
} else {
|
||||
span = Span{pos, end}
|
||||
}
|
||||
pkg.spans[obj] = span
|
||||
}
|
||||
|
||||
// checkShadowAssignment checks for shadowing in a short variable declaration.
|
||||
func checkShadowAssignment(f *File, a *ast.AssignStmt) {
|
||||
if a.Tok != token.DEFINE {
|
||||
return
|
||||
}
|
||||
if f.idiomaticShortRedecl(a) {
|
||||
return
|
||||
}
|
||||
for _, expr := range a.Lhs {
|
||||
ident, ok := expr.(*ast.Ident)
|
||||
if !ok {
|
||||
f.Badf(expr.Pos(), "invalid AST: short variable declaration of non-identifier")
|
||||
return
|
||||
}
|
||||
checkShadowing(f, ident)
|
||||
}
|
||||
}
|
||||
|
||||
// idiomaticShortRedecl reports whether this short declaration can be ignored for
|
||||
// the purposes of shadowing, that is, that any redeclarations it contains are deliberate.
|
||||
func (f *File) idiomaticShortRedecl(a *ast.AssignStmt) bool {
|
||||
// Don't complain about deliberate redeclarations of the form
|
||||
// i := i
|
||||
// Such constructs are idiomatic in range loops to create a new variable
|
||||
// for each iteration. Another example is
|
||||
// switch n := n.(type)
|
||||
if len(a.Rhs) != len(a.Lhs) {
|
||||
return false
|
||||
}
|
||||
// We know it's an assignment, so the LHS must be all identifiers. (We check anyway.)
|
||||
for i, expr := range a.Lhs {
|
||||
lhs, ok := expr.(*ast.Ident)
|
||||
if !ok {
|
||||
f.Badf(expr.Pos(), "invalid AST: short variable declaration of non-identifier")
|
||||
return true // Don't do any more processing.
|
||||
}
|
||||
switch rhs := a.Rhs[i].(type) {
|
||||
case *ast.Ident:
|
||||
if lhs.Name != rhs.Name {
|
||||
return false
|
||||
}
|
||||
case *ast.TypeAssertExpr:
|
||||
if id, ok := rhs.X.(*ast.Ident); ok {
|
||||
if lhs.Name != id.Name {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// idiomaticRedecl reports whether this declaration spec can be ignored for
|
||||
// the purposes of shadowing, that is, that any redeclarations it contains are deliberate.
|
||||
func (f *File) idiomaticRedecl(d *ast.ValueSpec) bool {
|
||||
// Don't complain about deliberate redeclarations of the form
|
||||
// var i, j = i, j
|
||||
if len(d.Names) != len(d.Values) {
|
||||
return false
|
||||
}
|
||||
for i, lhs := range d.Names {
|
||||
if rhs, ok := d.Values[i].(*ast.Ident); ok {
|
||||
if lhs.Name != rhs.Name {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// checkShadowDecl checks for shadowing in a general variable declaration.
|
||||
func checkShadowDecl(f *File, d *ast.GenDecl) {
|
||||
if d.Tok != token.VAR {
|
||||
return
|
||||
}
|
||||
for _, spec := range d.Specs {
|
||||
valueSpec, ok := spec.(*ast.ValueSpec)
|
||||
if !ok {
|
||||
f.Badf(spec.Pos(), "invalid AST: var GenDecl not ValueSpec")
|
||||
return
|
||||
}
|
||||
// Don't complain about deliberate redeclarations of the form
|
||||
// var i = i
|
||||
if f.idiomaticRedecl(valueSpec) {
|
||||
return
|
||||
}
|
||||
for _, ident := range valueSpec.Names {
|
||||
checkShadowing(f, ident)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkShadowing checks whether the identifier shadows an identifier in an outer scope.
|
||||
func checkShadowing(f *File, ident *ast.Ident) {
|
||||
if ident.Name == "_" {
|
||||
// Can't shadow the blank identifier.
|
||||
return
|
||||
}
|
||||
obj := f.pkg.defs[ident]
|
||||
if obj == nil {
|
||||
return
|
||||
}
|
||||
// obj.Parent.Parent is the surrounding scope. If we can find another declaration
|
||||
// starting from there, we have a shadowed identifier.
|
||||
_, shadowed := obj.Parent().Parent().LookupParent(obj.Name(), obj.Pos())
|
||||
if shadowed == nil {
|
||||
return
|
||||
}
|
||||
// Don't complain if it's shadowing a universe-declared identifier; that's fine.
|
||||
if shadowed.Parent() == types.Universe {
|
||||
return
|
||||
}
|
||||
if *strictShadowing {
|
||||
// The shadowed identifier must appear before this one to be an instance of shadowing.
|
||||
if shadowed.Pos() > ident.Pos() {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
// Don't complain if the span of validity of the shadowed identifier doesn't include
|
||||
// the shadowing identifier.
|
||||
span, ok := f.pkg.spans[shadowed]
|
||||
if !ok {
|
||||
f.Badf(ident.Pos(), "internal error: no range for %s", ident.Name)
|
||||
return
|
||||
}
|
||||
if !span.contains(ident.Pos()) {
|
||||
return
|
||||
}
|
||||
}
|
||||
// Don't complain if the types differ: that implies the programmer really wants two different things.
|
||||
if types.Identical(obj.Type(), shadowed.Type()) {
|
||||
f.Badf(ident.Pos(), "declaration of %s shadows declaration at %s", obj.Name(), f.loc(shadowed.Pos()))
|
||||
}
|
||||
}
|
@ -1,83 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
This file contains the code to check for suspicious shifts.
|
||||
*/
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
|
||||
"golang.org/x/tools/go/exact"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("shift",
|
||||
"check for useless shifts",
|
||||
checkShift,
|
||||
binaryExpr, assignStmt)
|
||||
}
|
||||
|
||||
func checkShift(f *File, node ast.Node) {
|
||||
switch node := node.(type) {
|
||||
case *ast.BinaryExpr:
|
||||
if node.Op == token.SHL || node.Op == token.SHR {
|
||||
checkLongShift(f, node, node.X, node.Y)
|
||||
}
|
||||
case *ast.AssignStmt:
|
||||
if len(node.Lhs) != 1 || len(node.Rhs) != 1 {
|
||||
return
|
||||
}
|
||||
if node.Tok == token.SHL_ASSIGN || node.Tok == token.SHR_ASSIGN {
|
||||
checkLongShift(f, node, node.Lhs[0], node.Rhs[0])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkLongShift checks if shift or shift-assign operations shift by more than
|
||||
// the length of the underlying variable.
|
||||
func checkLongShift(f *File, node ast.Node, x, y ast.Expr) {
|
||||
v := f.pkg.types[y].Value
|
||||
if v == nil {
|
||||
return
|
||||
}
|
||||
amt, ok := exact.Int64Val(v)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
t := f.pkg.types[x].Type
|
||||
if t == nil {
|
||||
return
|
||||
}
|
||||
b, ok := t.Underlying().(*types.Basic)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
var size int64
|
||||
var msg string
|
||||
switch b.Kind() {
|
||||
case types.Uint8, types.Int8:
|
||||
size = 8
|
||||
case types.Uint16, types.Int16:
|
||||
size = 16
|
||||
case types.Uint32, types.Int32:
|
||||
size = 32
|
||||
case types.Uint64, types.Int64:
|
||||
size = 64
|
||||
case types.Int, types.Uint, types.Uintptr:
|
||||
// These types may be as small as 32 bits, but no smaller.
|
||||
size = 32
|
||||
msg = "might be "
|
||||
default:
|
||||
return
|
||||
}
|
||||
if amt >= size {
|
||||
ident := f.gofmt(x)
|
||||
f.Badf(node.Pos(), "%s %stoo small for shift of %d", ident, msg, amt)
|
||||
}
|
||||
}
|
@ -1,122 +0,0 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the test for canonical struct tags.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"go/ast"
|
||||
"reflect"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("structtags",
|
||||
"check that struct field tags have canonical format and apply to exported fields as needed",
|
||||
checkCanonicalFieldTag,
|
||||
field)
|
||||
}
|
||||
|
||||
// checkCanonicalFieldTag checks a struct field tag.
|
||||
func checkCanonicalFieldTag(f *File, node ast.Node) {
|
||||
field := node.(*ast.Field)
|
||||
if field.Tag == nil {
|
||||
return
|
||||
}
|
||||
|
||||
tag, err := strconv.Unquote(field.Tag.Value)
|
||||
if err != nil {
|
||||
f.Badf(field.Pos(), "unable to read struct tag %s", field.Tag.Value)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateStructTag(tag); err != nil {
|
||||
f.Badf(field.Pos(), "struct field tag %s not compatible with reflect.StructTag.Get: %s", field.Tag.Value, err)
|
||||
}
|
||||
|
||||
// Check for use of json or xml tags with unexported fields.
|
||||
|
||||
// Embedded struct. Nothing to do for now, but that
|
||||
// may change, depending on what happens with issue 7363.
|
||||
if len(field.Names) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
if field.Names[0].IsExported() {
|
||||
return
|
||||
}
|
||||
|
||||
st := reflect.StructTag(tag)
|
||||
for _, enc := range [...]string{"json", "xml"} {
|
||||
if st.Get(enc) != "" {
|
||||
f.Badf(field.Pos(), "struct field %s has %s tag but is not exported", field.Names[0].Name, enc)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
errTagSyntax = errors.New("bad syntax for struct tag pair")
|
||||
errTagKeySyntax = errors.New("bad syntax for struct tag key")
|
||||
errTagValueSyntax = errors.New("bad syntax for struct tag value")
|
||||
)
|
||||
|
||||
// validateStructTag parses the struct tag and returns an error if it is not
|
||||
// in the canonical format, which is a space-separated list of key:"value"
|
||||
// settings. The value may contain spaces.
|
||||
func validateStructTag(tag string) error {
|
||||
// This code is based on the StructTag.Get code in package reflect.
|
||||
|
||||
for tag != "" {
|
||||
// Skip leading space.
|
||||
i := 0
|
||||
for i < len(tag) && tag[i] == ' ' {
|
||||
i++
|
||||
}
|
||||
tag = tag[i:]
|
||||
if tag == "" {
|
||||
break
|
||||
}
|
||||
|
||||
// Scan to colon. A space, a quote or a control character is a syntax error.
|
||||
// Strictly speaking, control chars include the range [0x7f, 0x9f], not just
|
||||
// [0x00, 0x1f], but in practice, we ignore the multi-byte control characters
|
||||
// as it is simpler to inspect the tag's bytes than the tag's runes.
|
||||
i = 0
|
||||
for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f {
|
||||
i++
|
||||
}
|
||||
if i == 0 {
|
||||
return errTagKeySyntax
|
||||
}
|
||||
if i+1 >= len(tag) || tag[i] != ':' {
|
||||
return errTagSyntax
|
||||
}
|
||||
if tag[i+1] != '"' {
|
||||
return errTagValueSyntax
|
||||
}
|
||||
tag = tag[i+1:]
|
||||
|
||||
// Scan quoted string to find value.
|
||||
i = 1
|
||||
for i < len(tag) && tag[i] != '"' {
|
||||
if tag[i] == '\\' {
|
||||
i++
|
||||
}
|
||||
i++
|
||||
}
|
||||
if i >= len(tag) {
|
||||
return errTagValueSyntax
|
||||
}
|
||||
qvalue := string(tag[:i+1])
|
||||
tag = tag[i+1:]
|
||||
|
||||
if _, err := strconv.Unquote(qvalue); err != nil {
|
||||
return errTagValueSyntax
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
33
cmd/vet/testdata/asm.go
vendored
33
cmd/vet/testdata/asm.go
vendored
@ -1,33 +0,0 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
// This file contains declarations to test the assembly in test_asm.s.
|
||||
|
||||
package testdata
|
||||
|
||||
func arg1(x int8, y uint8)
|
||||
func arg2(x int16, y uint16)
|
||||
func arg4(x int32, y uint32)
|
||||
func arg8(x int64, y uint64)
|
||||
func argint(x int, y uint)
|
||||
func argptr(x *byte, y *byte, c chan int, m map[int]int, f func())
|
||||
func argstring(x, y string)
|
||||
func argslice(x, y []string)
|
||||
func argiface(x interface{}, y interface {
|
||||
m()
|
||||
})
|
||||
func returnint() int
|
||||
func returnbyte(x int) byte
|
||||
func returnnamed(x byte) (r1 int, r2 int16, r3 string, r4 byte)
|
||||
func returnintmissing() int
|
||||
func leaf(x, y int) int
|
||||
|
||||
func noprof(x int)
|
||||
func dupok(x int)
|
||||
func nosplit(x int)
|
||||
func rodata(x int)
|
||||
func noptr(x int)
|
||||
func wrapper(x int)
|
254
cmd/vet/testdata/asm1.s
vendored
254
cmd/vet/testdata/asm1.s
vendored
@ -1,254 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build amd64
|
||||
// +build vet_test
|
||||
|
||||
TEXT ·arg1(SB),0,$0-2
|
||||
MOVB x+0(FP), AX
|
||||
// MOVB x+0(FP), AX // commented out instructions used to panic
|
||||
MOVB y+1(FP), BX
|
||||
MOVW x+0(FP), AX // ERROR "\[amd64\] arg1: invalid MOVW of x\+0\(FP\); int8 is 1-byte value"
|
||||
MOVW y+1(FP), AX // ERROR "invalid MOVW of y\+1\(FP\); uint8 is 1-byte value"
|
||||
MOVL x+0(FP), AX // ERROR "invalid MOVL of x\+0\(FP\); int8 is 1-byte value"
|
||||
MOVL y+1(FP), AX // ERROR "invalid MOVL of y\+1\(FP\); uint8 is 1-byte value"
|
||||
MOVQ x+0(FP), AX // ERROR "invalid MOVQ of x\+0\(FP\); int8 is 1-byte value"
|
||||
MOVQ y+1(FP), AX // ERROR "invalid MOVQ of y\+1\(FP\); uint8 is 1-byte value"
|
||||
MOVB x+1(FP), AX // ERROR "invalid offset x\+1\(FP\); expected x\+0\(FP\)"
|
||||
MOVB y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+1\(FP\)"
|
||||
TESTB x+0(FP), AX
|
||||
TESTB y+1(FP), BX
|
||||
TESTW x+0(FP), AX // ERROR "invalid TESTW of x\+0\(FP\); int8 is 1-byte value"
|
||||
TESTW y+1(FP), AX // ERROR "invalid TESTW of y\+1\(FP\); uint8 is 1-byte value"
|
||||
TESTL x+0(FP), AX // ERROR "invalid TESTL of x\+0\(FP\); int8 is 1-byte value"
|
||||
TESTL y+1(FP), AX // ERROR "invalid TESTL of y\+1\(FP\); uint8 is 1-byte value"
|
||||
TESTQ x+0(FP), AX // ERROR "invalid TESTQ of x\+0\(FP\); int8 is 1-byte value"
|
||||
TESTQ y+1(FP), AX // ERROR "invalid TESTQ of y\+1\(FP\); uint8 is 1-byte value"
|
||||
TESTB x+1(FP), AX // ERROR "invalid offset x\+1\(FP\); expected x\+0\(FP\)"
|
||||
TESTB y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+1\(FP\)"
|
||||
MOVB 8(SP), AX // ERROR "8\(SP\) should be x\+0\(FP\)"
|
||||
MOVB 9(SP), AX // ERROR "9\(SP\) should be y\+1\(FP\)"
|
||||
MOVB 10(SP), AX // ERROR "use of 10\(SP\) points beyond argument frame"
|
||||
RET
|
||||
|
||||
TEXT ·arg2(SB),0,$0-4
|
||||
MOVB x+0(FP), AX // ERROR "arg2: invalid MOVB of x\+0\(FP\); int16 is 2-byte value"
|
||||
MOVB y+2(FP), AX // ERROR "invalid MOVB of y\+2\(FP\); uint16 is 2-byte value"
|
||||
MOVW x+0(FP), AX
|
||||
MOVW y+2(FP), BX
|
||||
MOVL x+0(FP), AX // ERROR "invalid MOVL of x\+0\(FP\); int16 is 2-byte value"
|
||||
MOVL y+2(FP), AX // ERROR "invalid MOVL of y\+2\(FP\); uint16 is 2-byte value"
|
||||
MOVQ x+0(FP), AX // ERROR "invalid MOVQ of x\+0\(FP\); int16 is 2-byte value"
|
||||
MOVQ y+2(FP), AX // ERROR "invalid MOVQ of y\+2\(FP\); uint16 is 2-byte value"
|
||||
MOVW x+2(FP), AX // ERROR "invalid offset x\+2\(FP\); expected x\+0\(FP\)"
|
||||
MOVW y+0(FP), AX // ERROR "invalid offset y\+0\(FP\); expected y\+2\(FP\)"
|
||||
TESTB x+0(FP), AX // ERROR "invalid TESTB of x\+0\(FP\); int16 is 2-byte value"
|
||||
TESTB y+2(FP), AX // ERROR "invalid TESTB of y\+2\(FP\); uint16 is 2-byte value"
|
||||
TESTW x+0(FP), AX
|
||||
TESTW y+2(FP), BX
|
||||
TESTL x+0(FP), AX // ERROR "invalid TESTL of x\+0\(FP\); int16 is 2-byte value"
|
||||
TESTL y+2(FP), AX // ERROR "invalid TESTL of y\+2\(FP\); uint16 is 2-byte value"
|
||||
TESTQ x+0(FP), AX // ERROR "invalid TESTQ of x\+0\(FP\); int16 is 2-byte value"
|
||||
TESTQ y+2(FP), AX // ERROR "invalid TESTQ of y\+2\(FP\); uint16 is 2-byte value"
|
||||
TESTW x+2(FP), AX // ERROR "invalid offset x\+2\(FP\); expected x\+0\(FP\)"
|
||||
TESTW y+0(FP), AX // ERROR "invalid offset y\+0\(FP\); expected y\+2\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·arg4(SB),0,$0-2 // ERROR "arg4: wrong argument size 2; expected \$\.\.\.-8"
|
||||
MOVB x+0(FP), AX // ERROR "invalid MOVB of x\+0\(FP\); int32 is 4-byte value"
|
||||
MOVB y+4(FP), BX // ERROR "invalid MOVB of y\+4\(FP\); uint32 is 4-byte value"
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); int32 is 4-byte value"
|
||||
MOVW y+4(FP), AX // ERROR "invalid MOVW of y\+4\(FP\); uint32 is 4-byte value"
|
||||
MOVL x+0(FP), AX
|
||||
MOVL y+4(FP), AX
|
||||
MOVQ x+0(FP), AX // ERROR "invalid MOVQ of x\+0\(FP\); int32 is 4-byte value"
|
||||
MOVQ y+4(FP), AX // ERROR "invalid MOVQ of y\+4\(FP\); uint32 is 4-byte value"
|
||||
MOVL x+4(FP), AX // ERROR "invalid offset x\+4\(FP\); expected x\+0\(FP\)"
|
||||
MOVL y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+4\(FP\)"
|
||||
TESTB x+0(FP), AX // ERROR "invalid TESTB of x\+0\(FP\); int32 is 4-byte value"
|
||||
TESTB y+4(FP), BX // ERROR "invalid TESTB of y\+4\(FP\); uint32 is 4-byte value"
|
||||
TESTW x+0(FP), AX // ERROR "invalid TESTW of x\+0\(FP\); int32 is 4-byte value"
|
||||
TESTW y+4(FP), AX // ERROR "invalid TESTW of y\+4\(FP\); uint32 is 4-byte value"
|
||||
TESTL x+0(FP), AX
|
||||
TESTL y+4(FP), AX
|
||||
TESTQ x+0(FP), AX // ERROR "invalid TESTQ of x\+0\(FP\); int32 is 4-byte value"
|
||||
TESTQ y+4(FP), AX // ERROR "invalid TESTQ of y\+4\(FP\); uint32 is 4-byte value"
|
||||
TESTL x+4(FP), AX // ERROR "invalid offset x\+4\(FP\); expected x\+0\(FP\)"
|
||||
TESTL y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+4\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·arg8(SB),7,$0-2 // ERROR "wrong argument size 2; expected \$\.\.\.-16"
|
||||
MOVB x+0(FP), AX // ERROR "invalid MOVB of x\+0\(FP\); int64 is 8-byte value"
|
||||
MOVB y+8(FP), BX // ERROR "invalid MOVB of y\+8\(FP\); uint64 is 8-byte value"
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); int64 is 8-byte value"
|
||||
MOVW y+8(FP), AX // ERROR "invalid MOVW of y\+8\(FP\); uint64 is 8-byte value"
|
||||
MOVL x+0(FP), AX // ERROR "invalid MOVL of x\+0\(FP\); int64 is 8-byte value"
|
||||
MOVL y+8(FP), AX // ERROR "invalid MOVL of y\+8\(FP\); uint64 is 8-byte value"
|
||||
MOVQ x+0(FP), AX
|
||||
MOVQ y+8(FP), AX
|
||||
MOVQ x+8(FP), AX // ERROR "invalid offset x\+8\(FP\); expected x\+0\(FP\)"
|
||||
MOVQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+8\(FP\)"
|
||||
TESTB x+0(FP), AX // ERROR "invalid TESTB of x\+0\(FP\); int64 is 8-byte value"
|
||||
TESTB y+8(FP), BX // ERROR "invalid TESTB of y\+8\(FP\); uint64 is 8-byte value"
|
||||
TESTW x+0(FP), AX // ERROR "invalid TESTW of x\+0\(FP\); int64 is 8-byte value"
|
||||
TESTW y+8(FP), AX // ERROR "invalid TESTW of y\+8\(FP\); uint64 is 8-byte value"
|
||||
TESTL x+0(FP), AX // ERROR "invalid TESTL of x\+0\(FP\); int64 is 8-byte value"
|
||||
TESTL y+8(FP), AX // ERROR "invalid TESTL of y\+8\(FP\); uint64 is 8-byte value"
|
||||
TESTQ x+0(FP), AX
|
||||
TESTQ y+8(FP), AX
|
||||
TESTQ x+8(FP), AX // ERROR "invalid offset x\+8\(FP\); expected x\+0\(FP\)"
|
||||
TESTQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+8\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·argint(SB),0,$0-2 // ERROR "wrong argument size 2; expected \$\.\.\.-16"
|
||||
MOVB x+0(FP), AX // ERROR "invalid MOVB of x\+0\(FP\); int is 8-byte value"
|
||||
MOVB y+8(FP), BX // ERROR "invalid MOVB of y\+8\(FP\); uint is 8-byte value"
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); int is 8-byte value"
|
||||
MOVW y+8(FP), AX // ERROR "invalid MOVW of y\+8\(FP\); uint is 8-byte value"
|
||||
MOVL x+0(FP), AX // ERROR "invalid MOVL of x\+0\(FP\); int is 8-byte value"
|
||||
MOVL y+8(FP), AX // ERROR "invalid MOVL of y\+8\(FP\); uint is 8-byte value"
|
||||
MOVQ x+0(FP), AX
|
||||
MOVQ y+8(FP), AX
|
||||
MOVQ x+8(FP), AX // ERROR "invalid offset x\+8\(FP\); expected x\+0\(FP\)"
|
||||
MOVQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+8\(FP\)"
|
||||
TESTB x+0(FP), AX // ERROR "invalid TESTB of x\+0\(FP\); int is 8-byte value"
|
||||
TESTB y+8(FP), BX // ERROR "invalid TESTB of y\+8\(FP\); uint is 8-byte value"
|
||||
TESTW x+0(FP), AX // ERROR "invalid TESTW of x\+0\(FP\); int is 8-byte value"
|
||||
TESTW y+8(FP), AX // ERROR "invalid TESTW of y\+8\(FP\); uint is 8-byte value"
|
||||
TESTL x+0(FP), AX // ERROR "invalid TESTL of x\+0\(FP\); int is 8-byte value"
|
||||
TESTL y+8(FP), AX // ERROR "invalid TESTL of y\+8\(FP\); uint is 8-byte value"
|
||||
TESTQ x+0(FP), AX
|
||||
TESTQ y+8(FP), AX
|
||||
TESTQ x+8(FP), AX // ERROR "invalid offset x\+8\(FP\); expected x\+0\(FP\)"
|
||||
TESTQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+8\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·argptr(SB),7,$0-2 // ERROR "wrong argument size 2; expected \$\.\.\.-40"
|
||||
MOVB x+0(FP), AX // ERROR "invalid MOVB of x\+0\(FP\); \*byte is 8-byte value"
|
||||
MOVB y+8(FP), BX // ERROR "invalid MOVB of y\+8\(FP\); \*byte is 8-byte value"
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); \*byte is 8-byte value"
|
||||
MOVW y+8(FP), AX // ERROR "invalid MOVW of y\+8\(FP\); \*byte is 8-byte value"
|
||||
MOVL x+0(FP), AX // ERROR "invalid MOVL of x\+0\(FP\); \*byte is 8-byte value"
|
||||
MOVL y+8(FP), AX // ERROR "invalid MOVL of y\+8\(FP\); \*byte is 8-byte value"
|
||||
MOVQ x+0(FP), AX
|
||||
MOVQ y+8(FP), AX
|
||||
MOVQ x+8(FP), AX // ERROR "invalid offset x\+8\(FP\); expected x\+0\(FP\)"
|
||||
MOVQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+8\(FP\)"
|
||||
TESTB x+0(FP), AX // ERROR "invalid TESTB of x\+0\(FP\); \*byte is 8-byte value"
|
||||
TESTB y+8(FP), BX // ERROR "invalid TESTB of y\+8\(FP\); \*byte is 8-byte value"
|
||||
TESTW x+0(FP), AX // ERROR "invalid TESTW of x\+0\(FP\); \*byte is 8-byte value"
|
||||
TESTW y+8(FP), AX // ERROR "invalid TESTW of y\+8\(FP\); \*byte is 8-byte value"
|
||||
TESTL x+0(FP), AX // ERROR "invalid TESTL of x\+0\(FP\); \*byte is 8-byte value"
|
||||
TESTL y+8(FP), AX // ERROR "invalid TESTL of y\+8\(FP\); \*byte is 8-byte value"
|
||||
TESTQ x+0(FP), AX
|
||||
TESTQ y+8(FP), AX
|
||||
TESTQ x+8(FP), AX // ERROR "invalid offset x\+8\(FP\); expected x\+0\(FP\)"
|
||||
TESTQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+8\(FP\)"
|
||||
MOVL c+16(FP), AX // ERROR "invalid MOVL of c\+16\(FP\); chan int is 8-byte value"
|
||||
MOVL m+24(FP), AX // ERROR "invalid MOVL of m\+24\(FP\); map\[int\]int is 8-byte value"
|
||||
MOVL f+32(FP), AX // ERROR "invalid MOVL of f\+32\(FP\); func\(\) is 8-byte value"
|
||||
RET
|
||||
|
||||
TEXT ·argstring(SB),0,$32 // ERROR "wrong argument size 0; expected \$\.\.\.-32"
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); string base is 8-byte value"
|
||||
MOVL x+0(FP), AX // ERROR "invalid MOVL of x\+0\(FP\); string base is 8-byte value"
|
||||
MOVQ x+0(FP), AX
|
||||
MOVW x_base+0(FP), AX // ERROR "invalid MOVW of x_base\+0\(FP\); string base is 8-byte value"
|
||||
MOVL x_base+0(FP), AX // ERROR "invalid MOVL of x_base\+0\(FP\); string base is 8-byte value"
|
||||
MOVQ x_base+0(FP), AX
|
||||
MOVW x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+8\(FP\)"
|
||||
MOVL x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+8\(FP\)"
|
||||
MOVQ x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+8\(FP\)"
|
||||
MOVW x_len+8(FP), AX // ERROR "invalid MOVW of x_len\+8\(FP\); string len is 8-byte value"
|
||||
MOVL x_len+8(FP), AX // ERROR "invalid MOVL of x_len\+8\(FP\); string len is 8-byte value"
|
||||
MOVQ x_len+8(FP), AX
|
||||
MOVQ y+0(FP), AX // ERROR "invalid offset y\+0\(FP\); expected y\+16\(FP\)"
|
||||
MOVQ y_len+8(FP), AX // ERROR "invalid offset y_len\+8\(FP\); expected y_len\+24\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·argslice(SB),0,$48 // ERROR "wrong argument size 0; expected \$\.\.\.-48"
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); slice base is 8-byte value"
|
||||
MOVL x+0(FP), AX // ERROR "invalid MOVL of x\+0\(FP\); slice base is 8-byte value"
|
||||
MOVQ x+0(FP), AX
|
||||
MOVW x_base+0(FP), AX // ERROR "invalid MOVW of x_base\+0\(FP\); slice base is 8-byte value"
|
||||
MOVL x_base+0(FP), AX // ERROR "invalid MOVL of x_base\+0\(FP\); slice base is 8-byte value"
|
||||
MOVQ x_base+0(FP), AX
|
||||
MOVW x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+8\(FP\)"
|
||||
MOVL x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+8\(FP\)"
|
||||
MOVQ x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+8\(FP\)"
|
||||
MOVW x_len+8(FP), AX // ERROR "invalid MOVW of x_len\+8\(FP\); slice len is 8-byte value"
|
||||
MOVL x_len+8(FP), AX // ERROR "invalid MOVL of x_len\+8\(FP\); slice len is 8-byte value"
|
||||
MOVQ x_len+8(FP), AX
|
||||
MOVW x_cap+0(FP), AX // ERROR "invalid offset x_cap\+0\(FP\); expected x_cap\+16\(FP\)"
|
||||
MOVL x_cap+0(FP), AX // ERROR "invalid offset x_cap\+0\(FP\); expected x_cap\+16\(FP\)"
|
||||
MOVQ x_cap+0(FP), AX // ERROR "invalid offset x_cap\+0\(FP\); expected x_cap\+16\(FP\)"
|
||||
MOVW x_cap+16(FP), AX // ERROR "invalid MOVW of x_cap\+16\(FP\); slice cap is 8-byte value"
|
||||
MOVL x_cap+16(FP), AX // ERROR "invalid MOVL of x_cap\+16\(FP\); slice cap is 8-byte value"
|
||||
MOVQ x_cap+16(FP), AX
|
||||
MOVQ y+0(FP), AX // ERROR "invalid offset y\+0\(FP\); expected y\+24\(FP\)"
|
||||
MOVQ y_len+8(FP), AX // ERROR "invalid offset y_len\+8\(FP\); expected y_len\+32\(FP\)"
|
||||
MOVQ y_cap+16(FP), AX // ERROR "invalid offset y_cap\+16\(FP\); expected y_cap\+40\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·argiface(SB),0,$0-32
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); interface type is 8-byte value"
|
||||
MOVL x+0(FP), AX // ERROR "invalid MOVL of x\+0\(FP\); interface type is 8-byte value"
|
||||
MOVQ x+0(FP), AX
|
||||
MOVW x_type+0(FP), AX // ERROR "invalid MOVW of x_type\+0\(FP\); interface type is 8-byte value"
|
||||
MOVL x_type+0(FP), AX // ERROR "invalid MOVL of x_type\+0\(FP\); interface type is 8-byte value"
|
||||
MOVQ x_type+0(FP), AX
|
||||
MOVQ x_itable+0(FP), AX // ERROR "unknown variable x_itable; offset 0 is x_type\+0\(FP\)"
|
||||
MOVQ x_itable+1(FP), AX // ERROR "unknown variable x_itable; offset 1 is x_type\+0\(FP\)"
|
||||
MOVW x_data+0(FP), AX // ERROR "invalid offset x_data\+0\(FP\); expected x_data\+8\(FP\)"
|
||||
MOVL x_data+0(FP), AX // ERROR "invalid offset x_data\+0\(FP\); expected x_data\+8\(FP\)"
|
||||
MOVQ x_data+0(FP), AX // ERROR "invalid offset x_data\+0\(FP\); expected x_data\+8\(FP\)"
|
||||
MOVW x_data+8(FP), AX // ERROR "invalid MOVW of x_data\+8\(FP\); interface data is 8-byte value"
|
||||
MOVL x_data+8(FP), AX // ERROR "invalid MOVL of x_data\+8\(FP\); interface data is 8-byte value"
|
||||
MOVQ x_data+8(FP), AX
|
||||
MOVW y+16(FP), AX // ERROR "invalid MOVW of y\+16\(FP\); interface itable is 8-byte value"
|
||||
MOVL y+16(FP), AX // ERROR "invalid MOVL of y\+16\(FP\); interface itable is 8-byte value"
|
||||
MOVQ y+16(FP), AX
|
||||
MOVW y_itable+16(FP), AX // ERROR "invalid MOVW of y_itable\+16\(FP\); interface itable is 8-byte value"
|
||||
MOVL y_itable+16(FP), AX // ERROR "invalid MOVL of y_itable\+16\(FP\); interface itable is 8-byte value"
|
||||
MOVQ y_itable+16(FP), AX
|
||||
MOVQ y_type+16(FP), AX // ERROR "unknown variable y_type; offset 16 is y_itable\+16\(FP\)"
|
||||
MOVW y_data+16(FP), AX // ERROR "invalid offset y_data\+16\(FP\); expected y_data\+24\(FP\)"
|
||||
MOVL y_data+16(FP), AX // ERROR "invalid offset y_data\+16\(FP\); expected y_data\+24\(FP\)"
|
||||
MOVQ y_data+16(FP), AX // ERROR "invalid offset y_data\+16\(FP\); expected y_data\+24\(FP\)"
|
||||
MOVW y_data+24(FP), AX // ERROR "invalid MOVW of y_data\+24\(FP\); interface data is 8-byte value"
|
||||
MOVL y_data+24(FP), AX // ERROR "invalid MOVL of y_data\+24\(FP\); interface data is 8-byte value"
|
||||
MOVQ y_data+24(FP), AX
|
||||
RET
|
||||
|
||||
TEXT ·returnint(SB),0,$0-8
|
||||
MOVB AX, ret+0(FP) // ERROR "invalid MOVB of ret\+0\(FP\); int is 8-byte value"
|
||||
MOVW AX, ret+0(FP) // ERROR "invalid MOVW of ret\+0\(FP\); int is 8-byte value"
|
||||
MOVL AX, ret+0(FP) // ERROR "invalid MOVL of ret\+0\(FP\); int is 8-byte value"
|
||||
MOVQ AX, ret+0(FP)
|
||||
MOVQ AX, ret+1(FP) // ERROR "invalid offset ret\+1\(FP\); expected ret\+0\(FP\)"
|
||||
MOVQ AX, r+0(FP) // ERROR "unknown variable r; offset 0 is ret\+0\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·returnbyte(SB),0,$0-9
|
||||
MOVQ x+0(FP), AX
|
||||
MOVB AX, ret+8(FP)
|
||||
MOVW AX, ret+8(FP) // ERROR "invalid MOVW of ret\+8\(FP\); byte is 1-byte value"
|
||||
MOVL AX, ret+8(FP) // ERROR "invalid MOVL of ret\+8\(FP\); byte is 1-byte value"
|
||||
MOVQ AX, ret+8(FP) // ERROR "invalid MOVQ of ret\+8\(FP\); byte is 1-byte value"
|
||||
MOVB AX, ret+7(FP) // ERROR "invalid offset ret\+7\(FP\); expected ret\+8\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·returnnamed(SB),0,$0-41
|
||||
MOVB x+0(FP), AX
|
||||
MOVQ AX, r1+8(FP)
|
||||
MOVW AX, r2+16(FP)
|
||||
MOVQ AX, r3+24(FP)
|
||||
MOVQ AX, r3_base+24(FP)
|
||||
MOVQ AX, r3_len+32(FP)
|
||||
MOVB AX, r4+40(FP)
|
||||
MOVL AX, r1+8(FP) // ERROR "invalid MOVL of r1\+8\(FP\); int is 8-byte value"
|
||||
RET
|
||||
|
||||
TEXT ·returnintmissing(SB),0,$0-8
|
||||
RET // ERROR "RET without writing to 8-byte ret\+0\(FP\)"
|
257
cmd/vet/testdata/asm2.s
vendored
257
cmd/vet/testdata/asm2.s
vendored
@ -1,257 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build 386
|
||||
// +build vet_test
|
||||
|
||||
TEXT ·arg1(SB),0,$0-2
|
||||
MOVB x+0(FP), AX
|
||||
MOVB y+1(FP), BX
|
||||
MOVW x+0(FP), AX // ERROR "\[386\] arg1: invalid MOVW of x\+0\(FP\); int8 is 1-byte value"
|
||||
MOVW y+1(FP), AX // ERROR "invalid MOVW of y\+1\(FP\); uint8 is 1-byte value"
|
||||
MOVL x+0(FP), AX // ERROR "invalid MOVL of x\+0\(FP\); int8 is 1-byte value"
|
||||
MOVL y+1(FP), AX // ERROR "invalid MOVL of y\+1\(FP\); uint8 is 1-byte value"
|
||||
MOVQ x+0(FP), AX // ERROR "invalid MOVQ of x\+0\(FP\); int8 is 1-byte value"
|
||||
MOVQ y+1(FP), AX // ERROR "invalid MOVQ of y\+1\(FP\); uint8 is 1-byte value"
|
||||
MOVB x+1(FP), AX // ERROR "invalid offset x\+1\(FP\); expected x\+0\(FP\)"
|
||||
MOVB y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+1\(FP\)"
|
||||
TESTB x+0(FP), AX
|
||||
TESTB y+1(FP), BX
|
||||
TESTW x+0(FP), AX // ERROR "invalid TESTW of x\+0\(FP\); int8 is 1-byte value"
|
||||
TESTW y+1(FP), AX // ERROR "invalid TESTW of y\+1\(FP\); uint8 is 1-byte value"
|
||||
TESTL x+0(FP), AX // ERROR "invalid TESTL of x\+0\(FP\); int8 is 1-byte value"
|
||||
TESTL y+1(FP), AX // ERROR "invalid TESTL of y\+1\(FP\); uint8 is 1-byte value"
|
||||
TESTQ x+0(FP), AX // ERROR "invalid TESTQ of x\+0\(FP\); int8 is 1-byte value"
|
||||
TESTQ y+1(FP), AX // ERROR "invalid TESTQ of y\+1\(FP\); uint8 is 1-byte value"
|
||||
TESTB x+1(FP), AX // ERROR "invalid offset x\+1\(FP\); expected x\+0\(FP\)"
|
||||
TESTB y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+1\(FP\)"
|
||||
MOVB 4(SP), AX // ERROR "4\(SP\) should be x\+0\(FP\)"
|
||||
MOVB 5(SP), AX // ERROR "5\(SP\) should be y\+1\(FP\)"
|
||||
MOVB 6(SP), AX // ERROR "use of 6\(SP\) points beyond argument frame"
|
||||
RET
|
||||
|
||||
TEXT ·arg2(SB),0,$0-4
|
||||
MOVB x+0(FP), AX // ERROR "arg2: invalid MOVB of x\+0\(FP\); int16 is 2-byte value"
|
||||
MOVB y+2(FP), AX // ERROR "invalid MOVB of y\+2\(FP\); uint16 is 2-byte value"
|
||||
MOVW x+0(FP), AX
|
||||
MOVW y+2(FP), BX
|
||||
MOVL x+0(FP), AX // ERROR "invalid MOVL of x\+0\(FP\); int16 is 2-byte value"
|
||||
MOVL y+2(FP), AX // ERROR "invalid MOVL of y\+2\(FP\); uint16 is 2-byte value"
|
||||
MOVQ x+0(FP), AX // ERROR "invalid MOVQ of x\+0\(FP\); int16 is 2-byte value"
|
||||
MOVQ y+2(FP), AX // ERROR "invalid MOVQ of y\+2\(FP\); uint16 is 2-byte value"
|
||||
MOVW x+2(FP), AX // ERROR "invalid offset x\+2\(FP\); expected x\+0\(FP\)"
|
||||
MOVW y+0(FP), AX // ERROR "invalid offset y\+0\(FP\); expected y\+2\(FP\)"
|
||||
TESTB x+0(FP), AX // ERROR "invalid TESTB of x\+0\(FP\); int16 is 2-byte value"
|
||||
TESTB y+2(FP), AX // ERROR "invalid TESTB of y\+2\(FP\); uint16 is 2-byte value"
|
||||
TESTW x+0(FP), AX
|
||||
TESTW y+2(FP), BX
|
||||
TESTL x+0(FP), AX // ERROR "invalid TESTL of x\+0\(FP\); int16 is 2-byte value"
|
||||
TESTL y+2(FP), AX // ERROR "invalid TESTL of y\+2\(FP\); uint16 is 2-byte value"
|
||||
TESTQ x+0(FP), AX // ERROR "invalid TESTQ of x\+0\(FP\); int16 is 2-byte value"
|
||||
TESTQ y+2(FP), AX // ERROR "invalid TESTQ of y\+2\(FP\); uint16 is 2-byte value"
|
||||
TESTW x+2(FP), AX // ERROR "invalid offset x\+2\(FP\); expected x\+0\(FP\)"
|
||||
TESTW y+0(FP), AX // ERROR "invalid offset y\+0\(FP\); expected y\+2\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·arg4(SB),0,$0-2 // ERROR "arg4: wrong argument size 2; expected \$\.\.\.-8"
|
||||
MOVB x+0(FP), AX // ERROR "invalid MOVB of x\+0\(FP\); int32 is 4-byte value"
|
||||
MOVB y+4(FP), BX // ERROR "invalid MOVB of y\+4\(FP\); uint32 is 4-byte value"
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); int32 is 4-byte value"
|
||||
MOVW y+4(FP), AX // ERROR "invalid MOVW of y\+4\(FP\); uint32 is 4-byte value"
|
||||
MOVL x+0(FP), AX
|
||||
MOVL y+4(FP), AX
|
||||
MOVQ x+0(FP), AX // ERROR "invalid MOVQ of x\+0\(FP\); int32 is 4-byte value"
|
||||
MOVQ y+4(FP), AX // ERROR "invalid MOVQ of y\+4\(FP\); uint32 is 4-byte value"
|
||||
MOVL x+4(FP), AX // ERROR "invalid offset x\+4\(FP\); expected x\+0\(FP\)"
|
||||
MOVL y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+4\(FP\)"
|
||||
TESTB x+0(FP), AX // ERROR "invalid TESTB of x\+0\(FP\); int32 is 4-byte value"
|
||||
TESTB y+4(FP), BX // ERROR "invalid TESTB of y\+4\(FP\); uint32 is 4-byte value"
|
||||
TESTW x+0(FP), AX // ERROR "invalid TESTW of x\+0\(FP\); int32 is 4-byte value"
|
||||
TESTW y+4(FP), AX // ERROR "invalid TESTW of y\+4\(FP\); uint32 is 4-byte value"
|
||||
TESTL x+0(FP), AX
|
||||
TESTL y+4(FP), AX
|
||||
TESTQ x+0(FP), AX // ERROR "invalid TESTQ of x\+0\(FP\); int32 is 4-byte value"
|
||||
TESTQ y+4(FP), AX // ERROR "invalid TESTQ of y\+4\(FP\); uint32 is 4-byte value"
|
||||
TESTL x+4(FP), AX // ERROR "invalid offset x\+4\(FP\); expected x\+0\(FP\)"
|
||||
TESTL y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+4\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·arg8(SB),7,$0-2 // ERROR "wrong argument size 2; expected \$\.\.\.-16"
|
||||
MOVB x+0(FP), AX // ERROR "invalid MOVB of x\+0\(FP\); int64 is 8-byte value"
|
||||
MOVB y+8(FP), BX // ERROR "invalid MOVB of y\+8\(FP\); uint64 is 8-byte value"
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); int64 is 8-byte value"
|
||||
MOVW y+8(FP), AX // ERROR "invalid MOVW of y\+8\(FP\); uint64 is 8-byte value"
|
||||
MOVL x+0(FP), AX // ERROR "invalid MOVL of x\+0\(FP\); int64 is 8-byte value containing x_lo\+0\(FP\) and x_hi\+4\(FP\)"
|
||||
MOVL x_lo+0(FP), AX
|
||||
MOVL x_hi+4(FP), AX
|
||||
MOVL y+8(FP), AX // ERROR "invalid MOVL of y\+8\(FP\); uint64 is 8-byte value containing y_lo\+8\(FP\) and y_hi\+12\(FP\)"
|
||||
MOVL y_lo+8(FP), AX
|
||||
MOVL y_hi+12(FP), AX
|
||||
MOVQ x+0(FP), AX
|
||||
MOVQ y+8(FP), AX
|
||||
MOVQ x+8(FP), AX // ERROR "invalid offset x\+8\(FP\); expected x\+0\(FP\)"
|
||||
MOVQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+8\(FP\)"
|
||||
TESTB x+0(FP), AX // ERROR "invalid TESTB of x\+0\(FP\); int64 is 8-byte value"
|
||||
TESTB y+8(FP), BX // ERROR "invalid TESTB of y\+8\(FP\); uint64 is 8-byte value"
|
||||
TESTW x+0(FP), AX // ERROR "invalid TESTW of x\+0\(FP\); int64 is 8-byte value"
|
||||
TESTW y+8(FP), AX // ERROR "invalid TESTW of y\+8\(FP\); uint64 is 8-byte value"
|
||||
TESTL x+0(FP), AX // ERROR "invalid TESTL of x\+0\(FP\); int64 is 8-byte value containing x_lo\+0\(FP\) and x_hi\+4\(FP\)"
|
||||
TESTL y+8(FP), AX // ERROR "invalid TESTL of y\+8\(FP\); uint64 is 8-byte value containing y_lo\+8\(FP\) and y_hi\+12\(FP\)"
|
||||
TESTQ x+0(FP), AX
|
||||
TESTQ y+8(FP), AX
|
||||
TESTQ x+8(FP), AX // ERROR "invalid offset x\+8\(FP\); expected x\+0\(FP\)"
|
||||
TESTQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+8\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·argint(SB),0,$0-2 // ERROR "wrong argument size 2; expected \$\.\.\.-8"
|
||||
MOVB x+0(FP), AX // ERROR "invalid MOVB of x\+0\(FP\); int is 4-byte value"
|
||||
MOVB y+4(FP), BX // ERROR "invalid MOVB of y\+4\(FP\); uint is 4-byte value"
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); int is 4-byte value"
|
||||
MOVW y+4(FP), AX // ERROR "invalid MOVW of y\+4\(FP\); uint is 4-byte value"
|
||||
MOVL x+0(FP), AX
|
||||
MOVL y+4(FP), AX
|
||||
MOVQ x+0(FP), AX // ERROR "invalid MOVQ of x\+0\(FP\); int is 4-byte value"
|
||||
MOVQ y+4(FP), AX // ERROR "invalid MOVQ of y\+4\(FP\); uint is 4-byte value"
|
||||
MOVQ x+4(FP), AX // ERROR "invalid offset x\+4\(FP\); expected x\+0\(FP\)"
|
||||
MOVQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+4\(FP\)"
|
||||
TESTB x+0(FP), AX // ERROR "invalid TESTB of x\+0\(FP\); int is 4-byte value"
|
||||
TESTB y+4(FP), BX // ERROR "invalid TESTB of y\+4\(FP\); uint is 4-byte value"
|
||||
TESTW x+0(FP), AX // ERROR "invalid TESTW of x\+0\(FP\); int is 4-byte value"
|
||||
TESTW y+4(FP), AX // ERROR "invalid TESTW of y\+4\(FP\); uint is 4-byte value"
|
||||
TESTL x+0(FP), AX
|
||||
TESTL y+4(FP), AX
|
||||
TESTQ x+0(FP), AX // ERROR "invalid TESTQ of x\+0\(FP\); int is 4-byte value"
|
||||
TESTQ y+4(FP), AX // ERROR "invalid TESTQ of y\+4\(FP\); uint is 4-byte value"
|
||||
TESTQ x+4(FP), AX // ERROR "invalid offset x\+4\(FP\); expected x\+0\(FP\)"
|
||||
TESTQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+4\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·argptr(SB),7,$0-2 // ERROR "wrong argument size 2; expected \$\.\.\.-20"
|
||||
MOVB x+0(FP), AX // ERROR "invalid MOVB of x\+0\(FP\); \*byte is 4-byte value"
|
||||
MOVB y+4(FP), BX // ERROR "invalid MOVB of y\+4\(FP\); \*byte is 4-byte value"
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); \*byte is 4-byte value"
|
||||
MOVW y+4(FP), AX // ERROR "invalid MOVW of y\+4\(FP\); \*byte is 4-byte value"
|
||||
MOVL x+0(FP), AX
|
||||
MOVL y+4(FP), AX
|
||||
MOVQ x+0(FP), AX // ERROR "invalid MOVQ of x\+0\(FP\); \*byte is 4-byte value"
|
||||
MOVQ y+4(FP), AX // ERROR "invalid MOVQ of y\+4\(FP\); \*byte is 4-byte value"
|
||||
MOVQ x+4(FP), AX // ERROR "invalid offset x\+4\(FP\); expected x\+0\(FP\)"
|
||||
MOVQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+4\(FP\)"
|
||||
TESTB x+0(FP), AX // ERROR "invalid TESTB of x\+0\(FP\); \*byte is 4-byte value"
|
||||
TESTB y+4(FP), BX // ERROR "invalid TESTB of y\+4\(FP\); \*byte is 4-byte value"
|
||||
TESTW x+0(FP), AX // ERROR "invalid TESTW of x\+0\(FP\); \*byte is 4-byte value"
|
||||
TESTW y+4(FP), AX // ERROR "invalid TESTW of y\+4\(FP\); \*byte is 4-byte value"
|
||||
TESTL x+0(FP), AX
|
||||
TESTL y+4(FP), AX
|
||||
TESTQ x+0(FP), AX // ERROR "invalid TESTQ of x\+0\(FP\); \*byte is 4-byte value"
|
||||
TESTQ y+4(FP), AX // ERROR "invalid TESTQ of y\+4\(FP\); \*byte is 4-byte value"
|
||||
TESTQ x+4(FP), AX // ERROR "invalid offset x\+4\(FP\); expected x\+0\(FP\)"
|
||||
TESTQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+4\(FP\)"
|
||||
MOVW c+8(FP), AX // ERROR "invalid MOVW of c\+8\(FP\); chan int is 4-byte value"
|
||||
MOVW m+12(FP), AX // ERROR "invalid MOVW of m\+12\(FP\); map\[int\]int is 4-byte value"
|
||||
MOVW f+16(FP), AX // ERROR "invalid MOVW of f\+16\(FP\); func\(\) is 4-byte value"
|
||||
RET
|
||||
|
||||
TEXT ·argstring(SB),0,$16 // ERROR "wrong argument size 0; expected \$\.\.\.-16"
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); string base is 4-byte value"
|
||||
MOVL x+0(FP), AX
|
||||
MOVQ x+0(FP), AX // ERROR "invalid MOVQ of x\+0\(FP\); string base is 4-byte value"
|
||||
MOVW x_base+0(FP), AX // ERROR "invalid MOVW of x_base\+0\(FP\); string base is 4-byte value"
|
||||
MOVL x_base+0(FP), AX
|
||||
MOVQ x_base+0(FP), AX // ERROR "invalid MOVQ of x_base\+0\(FP\); string base is 4-byte value"
|
||||
MOVW x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+4\(FP\)"
|
||||
MOVL x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+4\(FP\)"
|
||||
MOVQ x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+4\(FP\)"
|
||||
MOVW x_len+4(FP), AX // ERROR "invalid MOVW of x_len\+4\(FP\); string len is 4-byte value"
|
||||
MOVL x_len+4(FP), AX
|
||||
MOVQ x_len+4(FP), AX // ERROR "invalid MOVQ of x_len\+4\(FP\); string len is 4-byte value"
|
||||
MOVQ y+0(FP), AX // ERROR "invalid offset y\+0\(FP\); expected y\+8\(FP\)"
|
||||
MOVQ y_len+4(FP), AX // ERROR "invalid offset y_len\+4\(FP\); expected y_len\+12\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·argslice(SB),0,$24 // ERROR "wrong argument size 0; expected \$\.\.\.-24"
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); slice base is 4-byte value"
|
||||
MOVL x+0(FP), AX
|
||||
MOVQ x+0(FP), AX // ERROR "invalid MOVQ of x\+0\(FP\); slice base is 4-byte value"
|
||||
MOVW x_base+0(FP), AX // ERROR "invalid MOVW of x_base\+0\(FP\); slice base is 4-byte value"
|
||||
MOVL x_base+0(FP), AX
|
||||
MOVQ x_base+0(FP), AX // ERROR "invalid MOVQ of x_base\+0\(FP\); slice base is 4-byte value"
|
||||
MOVW x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+4\(FP\)"
|
||||
MOVL x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+4\(FP\)"
|
||||
MOVQ x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+4\(FP\)"
|
||||
MOVW x_len+4(FP), AX // ERROR "invalid MOVW of x_len\+4\(FP\); slice len is 4-byte value"
|
||||
MOVL x_len+4(FP), AX
|
||||
MOVQ x_len+4(FP), AX // ERROR "invalid MOVQ of x_len\+4\(FP\); slice len is 4-byte value"
|
||||
MOVW x_cap+0(FP), AX // ERROR "invalid offset x_cap\+0\(FP\); expected x_cap\+8\(FP\)"
|
||||
MOVL x_cap+0(FP), AX // ERROR "invalid offset x_cap\+0\(FP\); expected x_cap\+8\(FP\)"
|
||||
MOVQ x_cap+0(FP), AX // ERROR "invalid offset x_cap\+0\(FP\); expected x_cap\+8\(FP\)"
|
||||
MOVW x_cap+8(FP), AX // ERROR "invalid MOVW of x_cap\+8\(FP\); slice cap is 4-byte value"
|
||||
MOVL x_cap+8(FP), AX
|
||||
MOVQ x_cap+8(FP), AX // ERROR "invalid MOVQ of x_cap\+8\(FP\); slice cap is 4-byte value"
|
||||
MOVQ y+0(FP), AX // ERROR "invalid offset y\+0\(FP\); expected y\+12\(FP\)"
|
||||
MOVQ y_len+4(FP), AX // ERROR "invalid offset y_len\+4\(FP\); expected y_len\+16\(FP\)"
|
||||
MOVQ y_cap+8(FP), AX // ERROR "invalid offset y_cap\+8\(FP\); expected y_cap\+20\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·argiface(SB),0,$0-16
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); interface type is 4-byte value"
|
||||
MOVL x+0(FP), AX
|
||||
MOVQ x+0(FP), AX // ERROR "invalid MOVQ of x\+0\(FP\); interface type is 4-byte value"
|
||||
MOVW x_type+0(FP), AX // ERROR "invalid MOVW of x_type\+0\(FP\); interface type is 4-byte value"
|
||||
MOVL x_type+0(FP), AX
|
||||
MOVQ x_type+0(FP), AX // ERROR "invalid MOVQ of x_type\+0\(FP\); interface type is 4-byte value"
|
||||
MOVQ x_itable+0(FP), AX // ERROR "unknown variable x_itable; offset 0 is x_type\+0\(FP\)"
|
||||
MOVQ x_itable+1(FP), AX // ERROR "unknown variable x_itable; offset 1 is x_type\+0\(FP\)"
|
||||
MOVW x_data+0(FP), AX // ERROR "invalid offset x_data\+0\(FP\); expected x_data\+4\(FP\)"
|
||||
MOVL x_data+0(FP), AX // ERROR "invalid offset x_data\+0\(FP\); expected x_data\+4\(FP\)"
|
||||
MOVQ x_data+0(FP), AX // ERROR "invalid offset x_data\+0\(FP\); expected x_data\+4\(FP\)"
|
||||
MOVW x_data+4(FP), AX // ERROR "invalid MOVW of x_data\+4\(FP\); interface data is 4-byte value"
|
||||
MOVL x_data+4(FP), AX
|
||||
MOVQ x_data+4(FP), AX // ERROR "invalid MOVQ of x_data\+4\(FP\); interface data is 4-byte value"
|
||||
MOVW y+8(FP), AX // ERROR "invalid MOVW of y\+8\(FP\); interface itable is 4-byte value"
|
||||
MOVL y+8(FP), AX
|
||||
MOVQ y+8(FP), AX // ERROR "invalid MOVQ of y\+8\(FP\); interface itable is 4-byte value"
|
||||
MOVW y_itable+8(FP), AX // ERROR "invalid MOVW of y_itable\+8\(FP\); interface itable is 4-byte value"
|
||||
MOVL y_itable+8(FP), AX
|
||||
MOVQ y_itable+8(FP), AX // ERROR "invalid MOVQ of y_itable\+8\(FP\); interface itable is 4-byte value"
|
||||
MOVQ y_type+8(FP), AX // ERROR "unknown variable y_type; offset 8 is y_itable\+8\(FP\)"
|
||||
MOVW y_data+8(FP), AX // ERROR "invalid offset y_data\+8\(FP\); expected y_data\+12\(FP\)"
|
||||
MOVL y_data+8(FP), AX // ERROR "invalid offset y_data\+8\(FP\); expected y_data\+12\(FP\)"
|
||||
MOVQ y_data+8(FP), AX // ERROR "invalid offset y_data\+8\(FP\); expected y_data\+12\(FP\)"
|
||||
MOVW y_data+12(FP), AX // ERROR "invalid MOVW of y_data\+12\(FP\); interface data is 4-byte value"
|
||||
MOVL y_data+12(FP), AX
|
||||
MOVQ y_data+12(FP), AX // ERROR "invalid MOVQ of y_data\+12\(FP\); interface data is 4-byte value"
|
||||
RET
|
||||
|
||||
TEXT ·returnint(SB),0,$0-4
|
||||
MOVB AX, ret+0(FP) // ERROR "invalid MOVB of ret\+0\(FP\); int is 4-byte value"
|
||||
MOVW AX, ret+0(FP) // ERROR "invalid MOVW of ret\+0\(FP\); int is 4-byte value"
|
||||
MOVL AX, ret+0(FP)
|
||||
MOVQ AX, ret+0(FP) // ERROR "invalid MOVQ of ret\+0\(FP\); int is 4-byte value"
|
||||
MOVQ AX, ret+1(FP) // ERROR "invalid offset ret\+1\(FP\); expected ret\+0\(FP\)"
|
||||
MOVQ AX, r+0(FP) // ERROR "unknown variable r; offset 0 is ret\+0\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·returnbyte(SB),0,$0-5
|
||||
MOVL x+0(FP), AX
|
||||
MOVB AX, ret+4(FP)
|
||||
MOVW AX, ret+4(FP) // ERROR "invalid MOVW of ret\+4\(FP\); byte is 1-byte value"
|
||||
MOVL AX, ret+4(FP) // ERROR "invalid MOVL of ret\+4\(FP\); byte is 1-byte value"
|
||||
MOVQ AX, ret+4(FP) // ERROR "invalid MOVQ of ret\+4\(FP\); byte is 1-byte value"
|
||||
MOVB AX, ret+3(FP) // ERROR "invalid offset ret\+3\(FP\); expected ret\+4\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·returnnamed(SB),0,$0-21
|
||||
MOVB x+0(FP), AX
|
||||
MOVL AX, r1+4(FP)
|
||||
MOVW AX, r2+8(FP)
|
||||
MOVL AX, r3+12(FP)
|
||||
MOVL AX, r3_base+12(FP)
|
||||
MOVL AX, r3_len+16(FP)
|
||||
MOVB AX, r4+20(FP)
|
||||
MOVQ AX, r1+4(FP) // ERROR "invalid MOVQ of r1\+4\(FP\); int is 4-byte value"
|
||||
RET
|
||||
|
||||
TEXT ·returnintmissing(SB),0,$0-4
|
||||
RET // ERROR "RET without writing to 4-byte ret\+0\(FP\)"
|
178
cmd/vet/testdata/asm3.s
vendored
178
cmd/vet/testdata/asm3.s
vendored
@ -1,178 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build arm
|
||||
// +build vet_test
|
||||
|
||||
TEXT ·arg1(SB),0,$0-2
|
||||
MOVB x+0(FP), AX
|
||||
MOVB y+1(FP), BX
|
||||
MOVH x+0(FP), AX // ERROR "\[arm\] arg1: invalid MOVH of x\+0\(FP\); int8 is 1-byte value"
|
||||
MOVH y+1(FP), AX // ERROR "invalid MOVH of y\+1\(FP\); uint8 is 1-byte value"
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); int8 is 1-byte value"
|
||||
MOVW y+1(FP), AX // ERROR "invalid MOVW of y\+1\(FP\); uint8 is 1-byte value"
|
||||
MOVB x+1(FP), AX // ERROR "invalid offset x\+1\(FP\); expected x\+0\(FP\)"
|
||||
MOVB y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+1\(FP\)"
|
||||
MOVB 8(R13), AX // ERROR "8\(R13\) should be x\+0\(FP\)"
|
||||
MOVB 9(R13), AX // ERROR "9\(R13\) should be y\+1\(FP\)"
|
||||
MOVB 10(R13), AX // ERROR "use of 10\(R13\) points beyond argument frame"
|
||||
RET
|
||||
|
||||
TEXT ·arg2(SB),0,$0-4
|
||||
MOVB x+0(FP), AX // ERROR "arg2: invalid MOVB of x\+0\(FP\); int16 is 2-byte value"
|
||||
MOVB y+2(FP), AX // ERROR "invalid MOVB of y\+2\(FP\); uint16 is 2-byte value"
|
||||
MOVH x+0(FP), AX
|
||||
MOVH y+2(FP), BX
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); int16 is 2-byte value"
|
||||
MOVW y+2(FP), AX // ERROR "invalid MOVW of y\+2\(FP\); uint16 is 2-byte value"
|
||||
MOVH x+2(FP), AX // ERROR "invalid offset x\+2\(FP\); expected x\+0\(FP\)"
|
||||
MOVH y+0(FP), AX // ERROR "invalid offset y\+0\(FP\); expected y\+2\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·arg4(SB),0,$0-2 // ERROR "arg4: wrong argument size 2; expected \$\.\.\.-8"
|
||||
MOVB x+0(FP), AX // ERROR "invalid MOVB of x\+0\(FP\); int32 is 4-byte value"
|
||||
MOVB y+4(FP), BX // ERROR "invalid MOVB of y\+4\(FP\); uint32 is 4-byte value"
|
||||
MOVH x+0(FP), AX // ERROR "invalid MOVH of x\+0\(FP\); int32 is 4-byte value"
|
||||
MOVH y+4(FP), AX // ERROR "invalid MOVH of y\+4\(FP\); uint32 is 4-byte value"
|
||||
MOVW x+0(FP), AX
|
||||
MOVW y+4(FP), AX
|
||||
MOVW x+4(FP), AX // ERROR "invalid offset x\+4\(FP\); expected x\+0\(FP\)"
|
||||
MOVW y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+4\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·arg8(SB),7,$0-2 // ERROR "wrong argument size 2; expected \$\.\.\.-16"
|
||||
MOVB x+0(FP), AX // ERROR "invalid MOVB of x\+0\(FP\); int64 is 8-byte value"
|
||||
MOVB y+8(FP), BX // ERROR "invalid MOVB of y\+8\(FP\); uint64 is 8-byte value"
|
||||
MOVH x+0(FP), AX // ERROR "invalid MOVH of x\+0\(FP\); int64 is 8-byte value"
|
||||
MOVH y+8(FP), AX // ERROR "invalid MOVH of y\+8\(FP\); uint64 is 8-byte value"
|
||||
MOVW x+0(FP), AX // ERROR "invalid MOVW of x\+0\(FP\); int64 is 8-byte value containing x_lo\+0\(FP\) and x_hi\+4\(FP\)"
|
||||
MOVW x_lo+0(FP), AX
|
||||
MOVW x_hi+4(FP), AX
|
||||
MOVW y+8(FP), AX // ERROR "invalid MOVW of y\+8\(FP\); uint64 is 8-byte value containing y_lo\+8\(FP\) and y_hi\+12\(FP\)"
|
||||
MOVW y_lo+8(FP), AX
|
||||
MOVW y_hi+12(FP), AX
|
||||
MOVQ x+0(FP), AX
|
||||
MOVQ y+8(FP), AX
|
||||
MOVQ x+8(FP), AX // ERROR "invalid offset x\+8\(FP\); expected x\+0\(FP\)"
|
||||
MOVQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+8\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·argint(SB),0,$0-2 // ERROR "wrong argument size 2; expected \$\.\.\.-8"
|
||||
MOVB x+0(FP), AX // ERROR "invalid MOVB of x\+0\(FP\); int is 4-byte value"
|
||||
MOVB y+4(FP), BX // ERROR "invalid MOVB of y\+4\(FP\); uint is 4-byte value"
|
||||
MOVH x+0(FP), AX // ERROR "invalid MOVH of x\+0\(FP\); int is 4-byte value"
|
||||
MOVH y+4(FP), AX // ERROR "invalid MOVH of y\+4\(FP\); uint is 4-byte value"
|
||||
MOVW x+0(FP), AX
|
||||
MOVW y+4(FP), AX
|
||||
MOVQ x+4(FP), AX // ERROR "invalid offset x\+4\(FP\); expected x\+0\(FP\)"
|
||||
MOVQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+4\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·argptr(SB),7,$0-2 // ERROR "wrong argument size 2; expected \$\.\.\.-20"
|
||||
MOVB x+0(FP), AX // ERROR "invalid MOVB of x\+0\(FP\); \*byte is 4-byte value"
|
||||
MOVB y+4(FP), BX // ERROR "invalid MOVB of y\+4\(FP\); \*byte is 4-byte value"
|
||||
MOVH x+0(FP), AX // ERROR "invalid MOVH of x\+0\(FP\); \*byte is 4-byte value"
|
||||
MOVH y+4(FP), AX // ERROR "invalid MOVH of y\+4\(FP\); \*byte is 4-byte value"
|
||||
MOVW x+0(FP), AX
|
||||
MOVW y+4(FP), AX
|
||||
MOVQ x+4(FP), AX // ERROR "invalid offset x\+4\(FP\); expected x\+0\(FP\)"
|
||||
MOVQ y+2(FP), AX // ERROR "invalid offset y\+2\(FP\); expected y\+4\(FP\)"
|
||||
MOVH c+8(FP), AX // ERROR "invalid MOVH of c\+8\(FP\); chan int is 4-byte value"
|
||||
MOVH m+12(FP), AX // ERROR "invalid MOVH of m\+12\(FP\); map\[int\]int is 4-byte value"
|
||||
MOVH f+16(FP), AX // ERROR "invalid MOVH of f\+16\(FP\); func\(\) is 4-byte value"
|
||||
RET
|
||||
|
||||
TEXT ·argstring(SB),0,$16 // ERROR "wrong argument size 0; expected \$\.\.\.-16"
|
||||
MOVH x+0(FP), AX // ERROR "invalid MOVH of x\+0\(FP\); string base is 4-byte value"
|
||||
MOVW x+0(FP), AX
|
||||
MOVH x_base+0(FP), AX // ERROR "invalid MOVH of x_base\+0\(FP\); string base is 4-byte value"
|
||||
MOVW x_base+0(FP), AX
|
||||
MOVH x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+4\(FP\)"
|
||||
MOVW x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+4\(FP\)"
|
||||
MOVQ x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+4\(FP\)"
|
||||
MOVH x_len+4(FP), AX // ERROR "invalid MOVH of x_len\+4\(FP\); string len is 4-byte value"
|
||||
MOVW x_len+4(FP), AX
|
||||
MOVQ y+0(FP), AX // ERROR "invalid offset y\+0\(FP\); expected y\+8\(FP\)"
|
||||
MOVQ y_len+4(FP), AX // ERROR "invalid offset y_len\+4\(FP\); expected y_len\+12\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·argslice(SB),0,$24 // ERROR "wrong argument size 0; expected \$\.\.\.-24"
|
||||
MOVH x+0(FP), AX // ERROR "invalid MOVH of x\+0\(FP\); slice base is 4-byte value"
|
||||
MOVW x+0(FP), AX
|
||||
MOVH x_base+0(FP), AX // ERROR "invalid MOVH of x_base\+0\(FP\); slice base is 4-byte value"
|
||||
MOVW x_base+0(FP), AX
|
||||
MOVH x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+4\(FP\)"
|
||||
MOVW x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+4\(FP\)"
|
||||
MOVQ x_len+0(FP), AX // ERROR "invalid offset x_len\+0\(FP\); expected x_len\+4\(FP\)"
|
||||
MOVH x_len+4(FP), AX // ERROR "invalid MOVH of x_len\+4\(FP\); slice len is 4-byte value"
|
||||
MOVW x_len+4(FP), AX
|
||||
MOVH x_cap+0(FP), AX // ERROR "invalid offset x_cap\+0\(FP\); expected x_cap\+8\(FP\)"
|
||||
MOVW x_cap+0(FP), AX // ERROR "invalid offset x_cap\+0\(FP\); expected x_cap\+8\(FP\)"
|
||||
MOVQ x_cap+0(FP), AX // ERROR "invalid offset x_cap\+0\(FP\); expected x_cap\+8\(FP\)"
|
||||
MOVH x_cap+8(FP), AX // ERROR "invalid MOVH of x_cap\+8\(FP\); slice cap is 4-byte value"
|
||||
MOVW x_cap+8(FP), AX
|
||||
MOVQ y+0(FP), AX // ERROR "invalid offset y\+0\(FP\); expected y\+12\(FP\)"
|
||||
MOVQ y_len+4(FP), AX // ERROR "invalid offset y_len\+4\(FP\); expected y_len\+16\(FP\)"
|
||||
MOVQ y_cap+8(FP), AX // ERROR "invalid offset y_cap\+8\(FP\); expected y_cap\+20\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·argiface(SB),0,$0-16
|
||||
MOVH x+0(FP), AX // ERROR "invalid MOVH of x\+0\(FP\); interface type is 4-byte value"
|
||||
MOVW x+0(FP), AX
|
||||
MOVH x_type+0(FP), AX // ERROR "invalid MOVH of x_type\+0\(FP\); interface type is 4-byte value"
|
||||
MOVW x_type+0(FP), AX
|
||||
MOVQ x_itable+0(FP), AX // ERROR "unknown variable x_itable; offset 0 is x_type\+0\(FP\)"
|
||||
MOVQ x_itable+1(FP), AX // ERROR "unknown variable x_itable; offset 1 is x_type\+0\(FP\)"
|
||||
MOVH x_data+0(FP), AX // ERROR "invalid offset x_data\+0\(FP\); expected x_data\+4\(FP\)"
|
||||
MOVW x_data+0(FP), AX // ERROR "invalid offset x_data\+0\(FP\); expected x_data\+4\(FP\)"
|
||||
MOVQ x_data+0(FP), AX // ERROR "invalid offset x_data\+0\(FP\); expected x_data\+4\(FP\)"
|
||||
MOVH x_data+4(FP), AX // ERROR "invalid MOVH of x_data\+4\(FP\); interface data is 4-byte value"
|
||||
MOVW x_data+4(FP), AX
|
||||
MOVH y+8(FP), AX // ERROR "invalid MOVH of y\+8\(FP\); interface itable is 4-byte value"
|
||||
MOVW y+8(FP), AX
|
||||
MOVH y_itable+8(FP), AX // ERROR "invalid MOVH of y_itable\+8\(FP\); interface itable is 4-byte value"
|
||||
MOVW y_itable+8(FP), AX
|
||||
MOVQ y_type+8(FP), AX // ERROR "unknown variable y_type; offset 8 is y_itable\+8\(FP\)"
|
||||
MOVH y_data+8(FP), AX // ERROR "invalid offset y_data\+8\(FP\); expected y_data\+12\(FP\)"
|
||||
MOVW y_data+8(FP), AX // ERROR "invalid offset y_data\+8\(FP\); expected y_data\+12\(FP\)"
|
||||
MOVQ y_data+8(FP), AX // ERROR "invalid offset y_data\+8\(FP\); expected y_data\+12\(FP\)"
|
||||
MOVH y_data+12(FP), AX // ERROR "invalid MOVH of y_data\+12\(FP\); interface data is 4-byte value"
|
||||
MOVW y_data+12(FP), AX
|
||||
RET
|
||||
|
||||
TEXT ·returnint(SB),0,$0-4
|
||||
MOVB AX, ret+0(FP) // ERROR "invalid MOVB of ret\+0\(FP\); int is 4-byte value"
|
||||
MOVH AX, ret+0(FP) // ERROR "invalid MOVH of ret\+0\(FP\); int is 4-byte value"
|
||||
MOVW AX, ret+0(FP)
|
||||
MOVQ AX, ret+1(FP) // ERROR "invalid offset ret\+1\(FP\); expected ret\+0\(FP\)"
|
||||
MOVQ AX, r+0(FP) // ERROR "unknown variable r; offset 0 is ret\+0\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·returnbyte(SB),0,$0-5
|
||||
MOVW x+0(FP), AX
|
||||
MOVB AX, ret+4(FP)
|
||||
MOVH AX, ret+4(FP) // ERROR "invalid MOVH of ret\+4\(FP\); byte is 1-byte value"
|
||||
MOVW AX, ret+4(FP) // ERROR "invalid MOVW of ret\+4\(FP\); byte is 1-byte value"
|
||||
MOVB AX, ret+3(FP) // ERROR "invalid offset ret\+3\(FP\); expected ret\+4\(FP\)"
|
||||
RET
|
||||
|
||||
TEXT ·returnnamed(SB),0,$0-21
|
||||
MOVB x+0(FP), AX
|
||||
MOVW AX, r1+4(FP)
|
||||
MOVH AX, r2+8(FP)
|
||||
MOVW AX, r3+12(FP)
|
||||
MOVW AX, r3_base+12(FP)
|
||||
MOVW AX, r3_len+16(FP)
|
||||
MOVB AX, r4+20(FP)
|
||||
MOVB AX, r1+4(FP) // ERROR "invalid MOVB of r1\+4\(FP\); int is 4-byte value"
|
||||
RET
|
||||
|
||||
TEXT ·returnintmissing(SB),0,$0-4
|
||||
RET // ERROR "RET without writing to 4-byte ret\+0\(FP\)"
|
||||
|
||||
TEXT ·leaf(SB),0,$-4-12
|
||||
MOVW x+0(FP), AX
|
||||
MOVW y+4(FP), AX
|
||||
MOVW AX, ret+8(FP)
|
||||
RET
|
26
cmd/vet/testdata/asm4.s
vendored
26
cmd/vet/testdata/asm4.s
vendored
@ -1,26 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build amd64
|
||||
// +build vet_test
|
||||
|
||||
// Test cases for symbolic NOSPLIT etc. on TEXT symbols.
|
||||
|
||||
TEXT ·noprof(SB),NOPROF,$0-8
|
||||
RET
|
||||
|
||||
TEXT ·dupok(SB),DUPOK,$0-8
|
||||
RET
|
||||
|
||||
TEXT ·nosplit(SB),NOSPLIT,$0
|
||||
RET
|
||||
|
||||
TEXT ·rodata(SB),RODATA,$0-8
|
||||
RET
|
||||
|
||||
TEXT ·noptr(SB),NOPTR|NOSPLIT,$0
|
||||
RET
|
||||
|
||||
TEXT ·wrapper(SB),WRAPPER,$0-8
|
||||
RET
|
18
cmd/vet/testdata/assign.go
vendored
18
cmd/vet/testdata/assign.go
vendored
@ -1,18 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains tests for the useless-assignment checker.
|
||||
|
||||
package testdata
|
||||
|
||||
type ST struct {
|
||||
x int
|
||||
}
|
||||
|
||||
func (s *ST) SetX(x int) {
|
||||
// Accidental self-assignment; it should be "s.x = x"
|
||||
x = x // ERROR "self-assignment of x to x"
|
||||
// Another mistake
|
||||
s.x = s.x // ERROR "self-assignment of s.x to s.x"
|
||||
}
|
43
cmd/vet/testdata/atomic.go
vendored
43
cmd/vet/testdata/atomic.go
vendored
@ -1,43 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains tests for the atomic checker.
|
||||
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"sync/atomic"
|
||||
)
|
||||
|
||||
type Counter uint64
|
||||
|
||||
func AtomicTests() {
|
||||
x := uint64(1)
|
||||
x = atomic.AddUint64(&x, 1) // ERROR "direct assignment to atomic value"
|
||||
_, x = 10, atomic.AddUint64(&x, 1) // ERROR "direct assignment to atomic value"
|
||||
x, _ = atomic.AddUint64(&x, 1), 10 // ERROR "direct assignment to atomic value"
|
||||
|
||||
y := &x
|
||||
*y = atomic.AddUint64(y, 1) // ERROR "direct assignment to atomic value"
|
||||
|
||||
var su struct{ Counter uint64 }
|
||||
su.Counter = atomic.AddUint64(&su.Counter, 1) // ERROR "direct assignment to atomic value"
|
||||
z1 := atomic.AddUint64(&su.Counter, 1)
|
||||
_ = z1 // Avoid err "z declared and not used"
|
||||
|
||||
var sp struct{ Counter *uint64 }
|
||||
*sp.Counter = atomic.AddUint64(sp.Counter, 1) // ERROR "direct assignment to atomic value"
|
||||
z2 := atomic.AddUint64(sp.Counter, 1)
|
||||
_ = z2 // Avoid err "z declared and not used"
|
||||
|
||||
au := []uint64{10, 20}
|
||||
au[0] = atomic.AddUint64(&au[0], 1) // ERROR "direct assignment to atomic value"
|
||||
au[1] = atomic.AddUint64(&au[0], 1)
|
||||
|
||||
ap := []*uint64{&au[0], &au[1]}
|
||||
*ap[0] = atomic.AddUint64(ap[0], 1) // ERROR "direct assignment to atomic value"
|
||||
*ap[1] = atomic.AddUint64(ap[0], 1)
|
||||
|
||||
x = atomic.AddUint64() // Used to make vet crash; now silently ignored.
|
||||
}
|
113
cmd/vet/testdata/bool.go
vendored
113
cmd/vet/testdata/bool.go
vendored
@ -1,113 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains tests for the bool checker.
|
||||
|
||||
package testdata
|
||||
|
||||
import "io"
|
||||
|
||||
func RatherStupidConditions() {
|
||||
var f, g func() int
|
||||
if f() == 0 || f() == 0 { // OK f might have side effects
|
||||
}
|
||||
if v, w := f(), g(); v == w || v == w { // ERROR "redundant or: v == w || v == w"
|
||||
}
|
||||
_ = f == nil || f == nil // ERROR "redundant or: f == nil || f == nil"
|
||||
|
||||
_ = i == byte(1) || i == byte(1) // TODO conversions are treated as if they may have side effects
|
||||
|
||||
var c chan int
|
||||
_ = 0 == <-c || 0 == <-c // OK subsequent receives may yield different values
|
||||
for i, j := <-c, <-c; i == j || i == j; i, j = <-c, <-c { // ERROR "redundant or: i == j || i == j"
|
||||
}
|
||||
|
||||
var i, j, k int
|
||||
_ = i+1 == 1 || i+1 == 1 // ERROR "redundant or: i\+1 == 1 || i\+1 == 1"
|
||||
_ = i == 1 || j+1 == i || i == 1 // ERROR "redundant or: i == 1 || i == 1"
|
||||
|
||||
_ = i == 1 || i == 1 || f() == 1 // ERROR "redundant or: i == 1 || i == 1"
|
||||
_ = i == 1 || f() == 1 || i == 1 // OK f may alter i as a side effect
|
||||
_ = f() == 1 || i == 1 || i == 1 // ERROR "redundant or: i == 1 || i == 1"
|
||||
|
||||
// Test partition edge cases
|
||||
_ = f() == 1 || i == 1 || i == 1 || j == 1 // ERROR "redundant or: i == 1 || i == 1"
|
||||
_ = f() == 1 || j == 1 || i == 1 || i == 1 // ERROR "redundant or: i == 1 || i == 1"
|
||||
_ = i == 1 || f() == 1 || i == 1 || i == 1 // ERROR "redundant or: i == 1 || i == 1"
|
||||
_ = i == 1 || i == 1 || f() == 1 || i == 1 // ERROR "redundant or: i == 1 || i == 1"
|
||||
_ = i == 1 || i == 1 || j == 1 || f() == 1 // ERROR "redundant or: i == 1 || i == 1"
|
||||
_ = j == 1 || i == 1 || i == 1 || f() == 1 // ERROR "redundant or: i == 1 || i == 1"
|
||||
_ = i == 1 || f() == 1 || f() == 1 || i == 1
|
||||
|
||||
_ = i == 1 || (i == 1 || i == 2) // ERROR "redundant or: i == 1 || i == 1"
|
||||
_ = i == 1 || (f() == 1 || i == 1) // OK f may alter i as a side effect
|
||||
_ = i == 1 || (i == 1 || f() == 1) // ERROR "redundant or: i == 1 || i == 1"
|
||||
_ = i == 1 || (i == 2 || (i == 1 || i == 3)) // ERROR "redundant or: i == 1 || i == 1"
|
||||
|
||||
var a, b bool
|
||||
_ = i == 1 || (a || (i == 1 || b)) // ERROR "redundant or: i == 1 || i == 1"
|
||||
|
||||
// Check that all redundant ors are flagged
|
||||
_ = j == 0 ||
|
||||
i == 1 ||
|
||||
f() == 1 ||
|
||||
j == 0 || // ERROR "redundant or: j == 0 || j == 0"
|
||||
i == 1 || // ERROR "redundant or: i == 1 || i == 1"
|
||||
i == 1 || // ERROR "redundant or: i == 1 || i == 1"
|
||||
i == 1 ||
|
||||
j == 0 ||
|
||||
k == 0
|
||||
|
||||
_ = i == 1*2*3 || i == 1*2*3 // ERROR "redundant or: i == 1\*2\*3 || i == 1\*2\*3"
|
||||
|
||||
// These test that redundant, suspect expressions do not trigger multiple errors.
|
||||
_ = i != 0 || i != 0 // ERROR "redundant or: i != 0 || i != 0"
|
||||
_ = i == 0 && i == 0 // ERROR "redundant and: i == 0 && i == 0"
|
||||
|
||||
// and is dual to or; check the basics and
|
||||
// let the or tests pull the rest of the weight.
|
||||
_ = 0 != <-c && 0 != <-c // OK subsequent receives may yield different values
|
||||
_ = f() != 0 && f() != 0 // OK f might have side effects
|
||||
_ = f != nil && f != nil // ERROR "redundant and: f != nil && f != nil"
|
||||
_ = i != 1 && i != 1 && f() != 1 // ERROR "redundant and: i != 1 && i != 1"
|
||||
_ = i != 1 && f() != 1 && i != 1 // OK f may alter i as a side effect
|
||||
_ = f() != 1 && i != 1 && i != 1 // ERROR "redundant and: i != 1 && i != 1"
|
||||
}
|
||||
|
||||
func RoyallySuspectConditions() {
|
||||
var i, j int
|
||||
|
||||
_ = i == 0 || i == 1 // OK
|
||||
_ = i != 0 || i != 1 // ERROR "suspect or: i != 0 || i != 1"
|
||||
_ = i != 0 || 1 != i // ERROR "suspect or: i != 0 || 1 != i"
|
||||
_ = 0 != i || 1 != i // ERROR "suspect or: 0 != i || 1 != i"
|
||||
_ = 0 != i || i != 1 // ERROR "suspect or: 0 != i || i != 1"
|
||||
|
||||
_ = (0 != i) || i != 1 // ERROR "suspect or: 0 != i || i != 1"
|
||||
|
||||
_ = i+3 != 7 || j+5 == 0 || i+3 != 9 // ERROR "suspect or: i\+3 != 7 || i\+3 != 9"
|
||||
|
||||
_ = i != 0 || j == 0 || i != 1 // ERROR "suspect or: i != 0 || i != 1"
|
||||
|
||||
_ = i != 0 || i != 1<<4 // ERROR "suspect or: i != 0 || i != 1<<4"
|
||||
|
||||
_ = i != 0 || j != 0
|
||||
_ = 0 != i || 0 != j
|
||||
|
||||
var s string
|
||||
_ = s != "one" || s != "the other" // ERROR "suspect or: s != .one. || s != .the other."
|
||||
|
||||
_ = "et" != "alii" || "et" != "cetera" // ERROR "suspect or: .et. != .alii. || .et. != .cetera."
|
||||
_ = "me gustas" != "tu" || "le gustas" != "tu" // OK we could catch this case, but it's not worth the code
|
||||
|
||||
var err error
|
||||
_ = err != nil || err != io.EOF // TODO catch this case?
|
||||
|
||||
// Sanity check and.
|
||||
_ = i != 0 && i != 1 // OK
|
||||
_ = i == 0 && i == 1 // ERROR "suspect and: i == 0 && i == 1"
|
||||
_ = i == 0 && 1 == i // ERROR "suspect and: i == 0 && 1 == i"
|
||||
_ = 0 == i && 1 == i // ERROR "suspect and: 0 == i && 1 == i"
|
||||
_ = 0 == i && i == 1 // ERROR "suspect and: 0 == i && i == 1"
|
||||
}
|
14
cmd/vet/testdata/buildtag.go
vendored
14
cmd/vet/testdata/buildtag.go
vendored
@ -1,14 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains tests for the buildtag checker.
|
||||
|
||||
// +builder // ERROR "possible malformed \+build comment"
|
||||
// +build !ignore
|
||||
|
||||
package testdata
|
||||
|
||||
// +build toolate // ERROR "build comment must appear before package clause and be followed by a blank line"
|
||||
|
||||
var _ = 3
|
15
cmd/vet/testdata/buildtag_bad.go
vendored
15
cmd/vet/testdata/buildtag_bad.go
vendored
@ -1,15 +0,0 @@
|
||||
// This file contains misplaced or malformed build constraints.
|
||||
// The Go tool will skip it, because the constraints are invalid.
|
||||
// It serves only to test the tag checker during make test.
|
||||
|
||||
// Mention +build // ERROR "possible malformed \+build comment"
|
||||
|
||||
// +build !!bang // ERROR "invalid double negative in build constraint"
|
||||
// +build @#$ // ERROR "invalid non-alphanumeric build constraint"
|
||||
|
||||
// +build toolate // ERROR "build comment must appear before package clause and be followed by a blank line"
|
||||
package bad
|
||||
|
||||
// This is package 'bad' rather than 'main' so the erroneous build
|
||||
// tag doesn't end up looking like a package doc for the vet command
|
||||
// when examined by godoc.
|
63
cmd/vet/testdata/composite.go
vendored
63
cmd/vet/testdata/composite.go
vendored
@ -1,63 +0,0 @@
|
||||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains tests for the untagged struct literal checker.
|
||||
|
||||
// This file contains the test for untagged struct literals.
|
||||
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"go/scanner"
|
||||
)
|
||||
|
||||
var Okay1 = []string{
|
||||
"Name",
|
||||
"Usage",
|
||||
"DefValue",
|
||||
}
|
||||
|
||||
var Okay2 = map[string]bool{
|
||||
"Name": true,
|
||||
"Usage": true,
|
||||
"DefValue": true,
|
||||
}
|
||||
|
||||
var Okay3 = struct {
|
||||
X string
|
||||
Y string
|
||||
Z string
|
||||
}{
|
||||
"Name",
|
||||
"Usage",
|
||||
"DefValue",
|
||||
}
|
||||
|
||||
type MyStruct struct {
|
||||
X string
|
||||
Y string
|
||||
Z string
|
||||
}
|
||||
|
||||
var Okay4 = MyStruct{
|
||||
"Name",
|
||||
"Usage",
|
||||
"DefValue",
|
||||
}
|
||||
|
||||
// Testing is awkward because we need to reference things from a separate package
|
||||
// to trigger the warnings.
|
||||
|
||||
var BadStructLiteralUsedInTests = flag.Flag{ // ERROR "unkeyed fields"
|
||||
"Name",
|
||||
"Usage",
|
||||
nil, // Value
|
||||
"DefValue",
|
||||
}
|
||||
|
||||
// Used to test the check for slices and arrays: If that test is disabled and
|
||||
// vet is run with --compositewhitelist=false, this line triggers an error.
|
||||
// Clumsy but sufficient.
|
||||
var scannerErrorListTest = scanner.ErrorList{nil, nil}
|
90
cmd/vet/testdata/copylock_func.go
vendored
90
cmd/vet/testdata/copylock_func.go
vendored
@ -1,90 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains tests for the copylock checker's
|
||||
// function declaration analysis.
|
||||
|
||||
package testdata
|
||||
|
||||
import "sync"
|
||||
|
||||
func OkFunc(*sync.Mutex) {}
|
||||
func BadFunc(sync.Mutex) {} // ERROR "BadFunc passes Lock by value: sync.Mutex"
|
||||
func OkRet() *sync.Mutex {}
|
||||
func BadRet() sync.Mutex {} // ERROR "BadRet returns Lock by value: sync.Mutex"
|
||||
|
||||
type EmbeddedRWMutex struct {
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
func (*EmbeddedRWMutex) OkMeth() {}
|
||||
func (EmbeddedRWMutex) BadMeth() {} // ERROR "BadMeth passes Lock by value: testdata.EmbeddedRWMutex"
|
||||
func OkFunc(e *EmbeddedRWMutex) {}
|
||||
func BadFunc(EmbeddedRWMutex) {} // ERROR "BadFunc passes Lock by value: testdata.EmbeddedRWMutex"
|
||||
func OkRet() *EmbeddedRWMutex {}
|
||||
func BadRet() EmbeddedRWMutex {} // ERROR "BadRet returns Lock by value: testdata.EmbeddedRWMutex"
|
||||
|
||||
type FieldMutex struct {
|
||||
s sync.Mutex
|
||||
}
|
||||
|
||||
func (*FieldMutex) OkMeth() {}
|
||||
func (FieldMutex) BadMeth() {} // ERROR "BadMeth passes Lock by value: testdata.FieldMutex contains sync.Mutex"
|
||||
func OkFunc(*FieldMutex) {}
|
||||
func BadFunc(FieldMutex, int) {} // ERROR "BadFunc passes Lock by value: testdata.FieldMutex contains sync.Mutex"
|
||||
|
||||
type L0 struct {
|
||||
L1
|
||||
}
|
||||
|
||||
type L1 struct {
|
||||
l L2
|
||||
}
|
||||
|
||||
type L2 struct {
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
func (*L0) Ok() {}
|
||||
func (L0) Bad() {} // ERROR "Bad passes Lock by value: testdata.L0 contains testdata.L1 contains testdata.L2"
|
||||
|
||||
type EmbeddedMutexPointer struct {
|
||||
s *sync.Mutex // safe to copy this pointer
|
||||
}
|
||||
|
||||
func (*EmbeddedMutexPointer) Ok() {}
|
||||
func (EmbeddedMutexPointer) AlsoOk() {}
|
||||
func StillOk(EmbeddedMutexPointer) {}
|
||||
func LookinGood() EmbeddedMutexPointer {}
|
||||
|
||||
type EmbeddedLocker struct {
|
||||
sync.Locker // safe to copy interface values
|
||||
}
|
||||
|
||||
func (*EmbeddedLocker) Ok() {}
|
||||
func (EmbeddedLocker) AlsoOk() {}
|
||||
|
||||
type CustomLock struct{}
|
||||
|
||||
func (*CustomLock) Lock() {}
|
||||
func (*CustomLock) Unlock() {}
|
||||
|
||||
func Ok(*CustomLock) {}
|
||||
func Bad(CustomLock) {} // ERROR "Bad passes Lock by value: testdata.CustomLock"
|
||||
|
||||
// TODO: Unfortunate cases
|
||||
|
||||
// Non-ideal error message:
|
||||
// Since we're looking for Lock methods, sync.Once's underlying
|
||||
// sync.Mutex gets called out, but without any reference to the sync.Once.
|
||||
type LocalOnce sync.Once
|
||||
|
||||
func (LocalOnce) Bad() {} // ERROR "Bad passes Lock by value: testdata.LocalOnce contains sync.Mutex"
|
||||
|
||||
// False negative:
|
||||
// LocalMutex doesn't have a Lock method.
|
||||
// Nevertheless, it is probably a bad idea to pass it by value.
|
||||
type LocalMutex sync.Mutex
|
||||
|
||||
func (LocalMutex) Bad() {} // WANTED: An error here :(
|
67
cmd/vet/testdata/copylock_range.go
vendored
67
cmd/vet/testdata/copylock_range.go
vendored
@ -1,67 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains tests for the copylock checker's
|
||||
// range statement analysis.
|
||||
|
||||
package testdata
|
||||
|
||||
import "sync"
|
||||
|
||||
func rangeMutex() {
|
||||
var mu sync.Mutex
|
||||
var i int
|
||||
|
||||
var s []sync.Mutex
|
||||
for range s {
|
||||
}
|
||||
for i = range s {
|
||||
}
|
||||
for i := range s {
|
||||
}
|
||||
for i, _ = range s {
|
||||
}
|
||||
for i, _ := range s {
|
||||
}
|
||||
for _, mu = range s { // ERROR "range var mu copies Lock: sync.Mutex"
|
||||
}
|
||||
for _, m := range s { // ERROR "range var m copies Lock: sync.Mutex"
|
||||
}
|
||||
for i, mu = range s { // ERROR "range var mu copies Lock: sync.Mutex"
|
||||
}
|
||||
for i, m := range s { // ERROR "range var m copies Lock: sync.Mutex"
|
||||
}
|
||||
|
||||
var a [3]sync.Mutex
|
||||
for _, m := range a { // ERROR "range var m copies Lock: sync.Mutex"
|
||||
}
|
||||
|
||||
var m map[sync.Mutex]sync.Mutex
|
||||
for k := range m { // ERROR "range var k copies Lock: sync.Mutex"
|
||||
}
|
||||
for mu, _ = range m { // ERROR "range var mu copies Lock: sync.Mutex"
|
||||
}
|
||||
for k, _ := range m { // ERROR "range var k copies Lock: sync.Mutex"
|
||||
}
|
||||
for _, mu = range m { // ERROR "range var mu copies Lock: sync.Mutex"
|
||||
}
|
||||
for _, v := range m { // ERROR "range var v copies Lock: sync.Mutex"
|
||||
}
|
||||
|
||||
var c chan sync.Mutex
|
||||
for range c {
|
||||
}
|
||||
for mu = range c { // ERROR "range var mu copies Lock: sync.Mutex"
|
||||
}
|
||||
for v := range c { // ERROR "range var v copies Lock: sync.Mutex"
|
||||
}
|
||||
|
||||
// Test non-idents in range variables
|
||||
var t struct {
|
||||
i int
|
||||
mu sync.Mutex
|
||||
}
|
||||
for t.i, t.mu = range s { // ERROR "range var t.mu copies Lock: sync.Mutex"
|
||||
}
|
||||
}
|
2125
cmd/vet/testdata/deadcode.go
vendored
2125
cmd/vet/testdata/deadcode.go
vendored
File diff suppressed because it is too large
Load Diff
17
cmd/vet/testdata/divergent/buf.go
vendored
17
cmd/vet/testdata/divergent/buf.go
vendored
@ -1,17 +0,0 @@
|
||||
// Test of examples with divergent packages.
|
||||
|
||||
// Package buf ...
|
||||
package buf
|
||||
|
||||
// Buf is a ...
|
||||
type Buf []byte
|
||||
|
||||
// Append ...
|
||||
func (*Buf) Append([]byte) {}
|
||||
|
||||
func (Buf) Reset() {}
|
||||
|
||||
func (Buf) Len() int { return 0 }
|
||||
|
||||
// DefaultBuf is a ...
|
||||
var DefaultBuf Buf
|
35
cmd/vet/testdata/divergent/buf_test.go
vendored
35
cmd/vet/testdata/divergent/buf_test.go
vendored
@ -1,35 +0,0 @@
|
||||
// Test of examples with divergent packages.
|
||||
|
||||
package buf_test
|
||||
|
||||
func Example() {} // OK because is package-level.
|
||||
|
||||
func Example_suffix() // OK because refers to suffix annotation.
|
||||
|
||||
func Example_BadSuffix() // ERROR "Example_BadSuffix has malformed example suffix: BadSuffix"
|
||||
|
||||
func ExampleBuf() // OK because refers to known top-level type.
|
||||
|
||||
func ExampleBuf_Append() {} // OK because refers to known method.
|
||||
|
||||
func ExampleBuf_Clear() {} // ERROR "ExampleBuf_Clear refers to unknown field or method: Buf.Clear"
|
||||
|
||||
func ExampleBuf_suffix() {} // OK because refers to suffix annotation.
|
||||
|
||||
func ExampleBuf_Append_Bad() {} // ERROR "ExampleBuf_Append_Bad has malformed example suffix: Bad"
|
||||
|
||||
func ExampleBuf_Append_suffix() {} // OK because refers to known method with valid suffix.
|
||||
|
||||
func ExampleDefaultBuf() {} // OK because refers to top-level identifier.
|
||||
|
||||
func ExampleBuf_Reset() bool { return true } // ERROR "ExampleBuf_Reset should return nothing"
|
||||
|
||||
func ExampleBuf_Len(i int) {} // ERROR "ExampleBuf_Len should be niladic"
|
||||
|
||||
// "Puffer" is German for "Buffer".
|
||||
|
||||
func ExamplePuffer() // ERROR "ExamplePuffer refers to unknown identifier: Puffer"
|
||||
|
||||
func ExamplePuffer_Append() // ERROR "ExamplePuffer_Append refers to unknown identifier: Puffer"
|
||||
|
||||
func ExamplePuffer_suffix() // ERROR "ExamplePuffer_suffix refers to unknown identifier: Puffer"
|
48
cmd/vet/testdata/examples_test.go
vendored
48
cmd/vet/testdata/examples_test.go
vendored
@ -1,48 +0,0 @@
|
||||
// Test of examples.
|
||||
|
||||
package testdata
|
||||
|
||||
// Buf is a ...
|
||||
type Buf []byte
|
||||
|
||||
// Append ...
|
||||
func (*Buf) Append([]byte) {}
|
||||
|
||||
func (Buf) Reset() {}
|
||||
|
||||
func (Buf) Len() int { return 0 }
|
||||
|
||||
// DefaultBuf is a ...
|
||||
var DefaultBuf Buf
|
||||
|
||||
func Example() {} // OK because is package-level.
|
||||
|
||||
func Example_goodSuffix() // OK because refers to suffix annotation.
|
||||
|
||||
func Example_BadSuffix() // ERROR "Example_BadSuffix has malformed example suffix: BadSuffix"
|
||||
|
||||
func ExampleBuf() // OK because refers to known top-level type.
|
||||
|
||||
func ExampleBuf_Append() {} // OK because refers to known method.
|
||||
|
||||
func ExampleBuf_Clear() {} // ERROR "ExampleBuf_Clear refers to unknown field or method: Buf.Clear"
|
||||
|
||||
func ExampleBuf_suffix() {} // OK because refers to suffix annotation.
|
||||
|
||||
func ExampleBuf_Append_Bad() {} // ERROR "ExampleBuf_Append_Bad has malformed example suffix: Bad"
|
||||
|
||||
func ExampleBuf_Append_suffix() {} // OK because refers to known method with valid suffix.
|
||||
|
||||
func ExampleDefaultBuf() {} // OK because refers to top-level identifier.
|
||||
|
||||
func ExampleBuf_Reset() bool { return true } // ERROR "ExampleBuf_Reset should return nothing"
|
||||
|
||||
func ExampleBuf_Len(i int) {} // ERROR "ExampleBuf_Len should be niladic"
|
||||
|
||||
// "Puffer" is German for "Buffer".
|
||||
|
||||
func ExamplePuffer() // ERROR "ExamplePuffer refers to unknown identifier: Puffer"
|
||||
|
||||
func ExamplePuffer_Append() // ERROR "ExamplePuffer_Append refers to unknown identifier: Puffer"
|
||||
|
||||
func ExamplePuffer_suffix() // ERROR "ExamplePuffer_suffix refers to unknown identifier: Puffer"
|
33
cmd/vet/testdata/incomplete/examples_test.go
vendored
33
cmd/vet/testdata/incomplete/examples_test.go
vendored
@ -1,33 +0,0 @@
|
||||
// Test of examples.
|
||||
|
||||
package testdata
|
||||
|
||||
func Example() {} // OK because is package-level.
|
||||
|
||||
func Example_suffix() // OK because refers to suffix annotation.
|
||||
|
||||
func Example_BadSuffix() // OK because non-test package was excluded. No false positives wanted.
|
||||
|
||||
func ExampleBuf() // OK because non-test package was excluded. No false positives wanted.
|
||||
|
||||
func ExampleBuf_Append() {} // OK because non-test package was excluded. No false positives wanted.
|
||||
|
||||
func ExampleBuf_Clear() {} // OK because non-test package was excluded. No false positives wanted.
|
||||
|
||||
func ExampleBuf_suffix() {} // OK because refers to suffix annotation.
|
||||
|
||||
func ExampleBuf_Append_Bad() {} // OK because non-test package was excluded. No false positives wanted.
|
||||
|
||||
func ExampleBuf_Append_suffix() {} // OK because refers to known method with valid suffix.
|
||||
|
||||
func ExampleBuf_Reset() bool { return true } // ERROR "ExampleBuf_Reset should return nothing"
|
||||
|
||||
func ExampleBuf_Len(i int) {} // ERROR "ExampleBuf_Len should be niladic"
|
||||
|
||||
// "Puffer" is German for "Buffer".
|
||||
|
||||
func ExamplePuffer() // OK because non-test package was excluded. No false positives wanted.
|
||||
|
||||
func ExamplePuffer_Append() // OK because non-test package was excluded. No false positives wanted.
|
||||
|
||||
func ExamplePuffer_suffix() // OK because non-test package was excluded. No false positives wanted.
|
22
cmd/vet/testdata/method.go
vendored
22
cmd/vet/testdata/method.go
vendored
@ -1,22 +0,0 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains tests for the canonical method checker.
|
||||
|
||||
// This file contains the code to check canonical methods.
|
||||
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type MethodTest int
|
||||
|
||||
func (t *MethodTest) Scan(x fmt.ScanState, c byte) { // ERROR "should have signature Scan"
|
||||
}
|
||||
|
||||
type MethodTestInterface interface {
|
||||
ReadByte() byte // ERROR "should have signature ReadByte"
|
||||
}
|
35
cmd/vet/testdata/nilfunc.go
vendored
35
cmd/vet/testdata/nilfunc.go
vendored
@ -1,35 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package testdata
|
||||
|
||||
func F() {}
|
||||
|
||||
type T struct {
|
||||
F func()
|
||||
}
|
||||
|
||||
func (T) M() {}
|
||||
|
||||
var Fv = F
|
||||
|
||||
func Comparison() {
|
||||
var t T
|
||||
var fn func()
|
||||
if fn == nil || Fv == nil || t.F == nil {
|
||||
// no error; these func vars or fields may be nil
|
||||
}
|
||||
if F == nil { // ERROR "comparison of function F == nil is always false"
|
||||
panic("can't happen")
|
||||
}
|
||||
if t.M == nil { // ERROR "comparison of function M == nil is always false"
|
||||
panic("can't happen")
|
||||
}
|
||||
if F != nil { // ERROR "comparison of function F != nil is always true"
|
||||
if t.M != nil { // ERROR "comparison of function M != nil is always true"
|
||||
return
|
||||
}
|
||||
}
|
||||
panic("can't happen")
|
||||
}
|
342
cmd/vet/testdata/print.go
vendored
342
cmd/vet/testdata/print.go
vendored
@ -1,342 +0,0 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains tests for the printf checker.
|
||||
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"unsafe" // just for test case printing unsafe.Pointer
|
||||
)
|
||||
|
||||
func UnsafePointerPrintfTest() {
|
||||
var up unsafe.Pointer
|
||||
fmt.Printf("%p, %x %X", up, up, up)
|
||||
}
|
||||
|
||||
// Error methods that do not satisfy the Error interface and should be checked.
|
||||
type errorTest1 int
|
||||
|
||||
func (errorTest1) Error(...interface{}) string {
|
||||
return "hi"
|
||||
}
|
||||
|
||||
type errorTest2 int // Analogous to testing's *T type.
|
||||
func (errorTest2) Error(...interface{}) {
|
||||
}
|
||||
|
||||
type errorTest3 int
|
||||
|
||||
func (errorTest3) Error() { // No return value.
|
||||
}
|
||||
|
||||
type errorTest4 int
|
||||
|
||||
func (errorTest4) Error() int { // Different return type.
|
||||
return 3
|
||||
}
|
||||
|
||||
type errorTest5 int
|
||||
|
||||
func (errorTest5) error() { // niladic; don't complain if no args (was bug)
|
||||
}
|
||||
|
||||
// This function never executes, but it serves as a simple test for the program.
|
||||
// Test with make test.
|
||||
func PrintfTests() {
|
||||
var b bool
|
||||
var i int
|
||||
var r rune
|
||||
var s string
|
||||
var x float64
|
||||
var p *int
|
||||
var imap map[int]int
|
||||
var fslice []float64
|
||||
var c complex64
|
||||
// Some good format/argtypes
|
||||
fmt.Printf("")
|
||||
fmt.Printf("%b %b %b", 3, i, x)
|
||||
fmt.Printf("%c %c %c %c", 3, i, 'x', r)
|
||||
fmt.Printf("%d %d %d", 3, i, imap)
|
||||
fmt.Printf("%e %e %e %e", 3e9, x, fslice, c)
|
||||
fmt.Printf("%E %E %E %E", 3e9, x, fslice, c)
|
||||
fmt.Printf("%f %f %f %f", 3e9, x, fslice, c)
|
||||
fmt.Printf("%F %F %F %F", 3e9, x, fslice, c)
|
||||
fmt.Printf("%g %g %g %g", 3e9, x, fslice, c)
|
||||
fmt.Printf("%G %G %G %G", 3e9, x, fslice, c)
|
||||
fmt.Printf("%b %b %b %b", 3e9, x, fslice, c)
|
||||
fmt.Printf("%o %o", 3, i)
|
||||
fmt.Printf("%p %p", p, nil)
|
||||
fmt.Printf("%q %q %q %q", 3, i, 'x', r)
|
||||
fmt.Printf("%s %s %s", "hi", s, []byte{65})
|
||||
fmt.Printf("%t %t", true, b)
|
||||
fmt.Printf("%T %T", 3, i)
|
||||
fmt.Printf("%U %U", 3, i)
|
||||
fmt.Printf("%v %v", 3, i)
|
||||
fmt.Printf("%x %x %x %x", 3, i, "hi", s)
|
||||
fmt.Printf("%X %X %X %X", 3, i, "hi", s)
|
||||
fmt.Printf("%.*s %d %g", 3, "hi", 23, 2.3)
|
||||
fmt.Printf("%s", &stringerv)
|
||||
fmt.Printf("%v", &stringerv)
|
||||
fmt.Printf("%T", &stringerv)
|
||||
fmt.Printf("%v", notstringerv)
|
||||
fmt.Printf("%T", notstringerv)
|
||||
fmt.Printf("%q", stringerarrayv)
|
||||
fmt.Printf("%v", stringerarrayv)
|
||||
fmt.Printf("%s", stringerarrayv)
|
||||
fmt.Printf("%v", notstringerarrayv)
|
||||
fmt.Printf("%T", notstringerarrayv)
|
||||
fmt.Printf("%d", new(Formatter))
|
||||
fmt.Printf("%*%", 2) // Ridiculous but allowed.
|
||||
fmt.Printf("%s", interface{}(nil)) // Nothing useful we can say.
|
||||
|
||||
fmt.Printf("%g", 1+2i)
|
||||
// Some bad format/argTypes
|
||||
fmt.Printf("%b", "hi") // ERROR "arg .hi. for printf verb %b of wrong type"
|
||||
fmt.Printf("%t", c) // ERROR "arg c for printf verb %t of wrong type"
|
||||
fmt.Printf("%t", 1+2i) // ERROR "arg 1 \+ 2i for printf verb %t of wrong type"
|
||||
fmt.Printf("%c", 2.3) // ERROR "arg 2.3 for printf verb %c of wrong type"
|
||||
fmt.Printf("%d", 2.3) // ERROR "arg 2.3 for printf verb %d of wrong type"
|
||||
fmt.Printf("%e", "hi") // ERROR "arg .hi. for printf verb %e of wrong type"
|
||||
fmt.Printf("%E", true) // ERROR "arg true for printf verb %E of wrong type"
|
||||
fmt.Printf("%f", "hi") // ERROR "arg .hi. for printf verb %f of wrong type"
|
||||
fmt.Printf("%F", 'x') // ERROR "arg 'x' for printf verb %F of wrong type"
|
||||
fmt.Printf("%g", "hi") // ERROR "arg .hi. for printf verb %g of wrong type"
|
||||
fmt.Printf("%g", imap) // ERROR "arg imap for printf verb %g of wrong type"
|
||||
fmt.Printf("%G", i) // ERROR "arg i for printf verb %G of wrong type"
|
||||
fmt.Printf("%o", x) // ERROR "arg x for printf verb %o of wrong type"
|
||||
fmt.Printf("%p", 23) // ERROR "arg 23 for printf verb %p of wrong type"
|
||||
fmt.Printf("%q", x) // ERROR "arg x for printf verb %q of wrong type"
|
||||
fmt.Printf("%s", b) // ERROR "arg b for printf verb %s of wrong type"
|
||||
fmt.Printf("%s", byte(65)) // ERROR "arg byte\(65\) for printf verb %s of wrong type"
|
||||
fmt.Printf("%t", 23) // ERROR "arg 23 for printf verb %t of wrong type"
|
||||
fmt.Printf("%U", x) // ERROR "arg x for printf verb %U of wrong type"
|
||||
fmt.Printf("%x", nil) // ERROR "arg nil for printf verb %x of wrong type"
|
||||
fmt.Printf("%X", 2.3) // ERROR "arg 2.3 for printf verb %X of wrong type"
|
||||
fmt.Printf("%s", stringerv) // ERROR "arg stringerv for printf verb %s of wrong type"
|
||||
fmt.Printf("%t", stringerv) // ERROR "arg stringerv for printf verb %t of wrong type"
|
||||
fmt.Printf("%q", notstringerv) // ERROR "arg notstringerv for printf verb %q of wrong type"
|
||||
fmt.Printf("%t", notstringerv) // ERROR "arg notstringerv for printf verb %t of wrong type"
|
||||
fmt.Printf("%t", stringerarrayv) // ERROR "arg stringerarrayv for printf verb %t of wrong type"
|
||||
fmt.Printf("%t", notstringerarrayv) // ERROR "arg notstringerarrayv for printf verb %t of wrong type"
|
||||
fmt.Printf("%q", notstringerarrayv) // ERROR "arg notstringerarrayv for printf verb %q of wrong type"
|
||||
fmt.Printf("%d", Formatter(true)) // correct (the type is responsible for formatting)
|
||||
fmt.Printf("%s", nonemptyinterface) // correct (the dynamic type of nonemptyinterface may be a stringer)
|
||||
fmt.Printf("%.*s %d %g", 3, "hi", 23, 'x') // ERROR "arg 'x' for printf verb %g of wrong type"
|
||||
fmt.Println() // not an error
|
||||
fmt.Println("%s", "hi") // ERROR "possible formatting directive in Println call"
|
||||
fmt.Printf("%s", "hi", 3) // ERROR "wrong number of args for format in Printf call"
|
||||
_ = fmt.Sprintf("%"+("s"), "hi", 3) // ERROR "wrong number of args for format in Sprintf call"
|
||||
fmt.Printf("%s%%%d", "hi", 3) // correct
|
||||
fmt.Printf("%08s", "woo") // correct
|
||||
fmt.Printf("% 8s", "woo") // correct
|
||||
fmt.Printf("%.*d", 3, 3) // correct
|
||||
fmt.Printf("%.*d", 3, 3, 3, 3) // ERROR "wrong number of args for format in Printf call.*4 args"
|
||||
fmt.Printf("%.*d", "hi", 3) // ERROR "arg .hi. for \* in printf format not of type int"
|
||||
fmt.Printf("%.*d", i, 3) // correct
|
||||
fmt.Printf("%.*d", s, 3) // ERROR "arg s for \* in printf format not of type int"
|
||||
fmt.Printf("%*%", 0.22) // ERROR "arg 0.22 for \* in printf format not of type int"
|
||||
fmt.Printf("%q %q", multi()...) // ok
|
||||
fmt.Printf("%#q", `blah`) // ok
|
||||
printf("now is the time", "buddy") // ERROR "no formatting directive"
|
||||
Printf("now is the time", "buddy") // ERROR "no formatting directive"
|
||||
Printf("hi") // ok
|
||||
const format = "%s %s\n"
|
||||
Printf(format, "hi", "there")
|
||||
Printf(format, "hi") // ERROR "missing argument for Printf..%s..: format reads arg 2, have only 1"
|
||||
Printf("%s %d %.3v %q", "str", 4) // ERROR "missing argument for Printf..%.3v..: format reads arg 3, have only 2"
|
||||
f := new(stringer)
|
||||
f.Warn(0, "%s", "hello", 3) // ERROR "possible formatting directive in Warn call"
|
||||
f.Warnf(0, "%s", "hello", 3) // ERROR "wrong number of args for format in Warnf call"
|
||||
f.Warnf(0, "%r", "hello") // ERROR "unrecognized printf verb"
|
||||
f.Warnf(0, "%#s", "hello") // ERROR "unrecognized printf flag"
|
||||
Printf("d%", 2) // ERROR "missing verb at end of format string in Printf call"
|
||||
Printf("%d", percentDV)
|
||||
Printf("%d", &percentDV)
|
||||
Printf("%d", notPercentDV) // ERROR "arg notPercentDV for printf verb %d of wrong type"
|
||||
Printf("%d", ¬PercentDV) // ERROR "arg ¬PercentDV for printf verb %d of wrong type"
|
||||
Printf("%p", ¬PercentDV) // Works regardless: we print it as a pointer.
|
||||
Printf("%s", percentSV)
|
||||
Printf("%s", &percentSV)
|
||||
// Good argument reorderings.
|
||||
Printf("%[1]d", 3)
|
||||
Printf("%[1]*d", 3, 1)
|
||||
Printf("%[2]*[1]d", 1, 3)
|
||||
Printf("%[2]*.[1]*[3]d", 2, 3, 4)
|
||||
fmt.Fprintf(os.Stderr, "%[2]*.[1]*[3]d", 2, 3, 4) // Use Fprintf to make sure we count arguments correctly.
|
||||
// Bad argument reorderings.
|
||||
Printf("%[xd", 3) // ERROR "illegal syntax for printf argument index"
|
||||
Printf("%[x]d", 3) // ERROR "illegal syntax for printf argument index"
|
||||
Printf("%[3]*s", "hi", 2) // ERROR "missing argument for Printf.* reads arg 3, have only 2"
|
||||
_ = fmt.Sprintf("%[3]d", 2) // ERROR "missing argument for Sprintf.* reads arg 3, have only 1"
|
||||
Printf("%[2]*.[1]*[3]d", 2, "hi", 4) // ERROR "arg .hi. for \* in printf format not of type int"
|
||||
Printf("%[0]s", "arg1") // ERROR "index value \[0\] for Printf.*; indexes start at 1"
|
||||
Printf("%[0]d", 1) // ERROR "index value \[0\] for Printf.*; indexes start at 1"
|
||||
// Something that satisfies the error interface.
|
||||
var e error
|
||||
fmt.Println(e.Error()) // ok
|
||||
// Something that looks like an error interface but isn't, such as the (*T).Error method
|
||||
// in the testing package.
|
||||
var et1 errorTest1
|
||||
fmt.Println(et1.Error()) // ERROR "no args in Error call"
|
||||
fmt.Println(et1.Error("hi")) // ok
|
||||
fmt.Println(et1.Error("%d", 3)) // ERROR "possible formatting directive in Error call"
|
||||
var et2 errorTest2
|
||||
et2.Error() // ERROR "no args in Error call"
|
||||
et2.Error("hi") // ok, not an error method.
|
||||
et2.Error("%d", 3) // ERROR "possible formatting directive in Error call"
|
||||
var et3 errorTest3
|
||||
et3.Error() // ok, not an error method.
|
||||
var et4 errorTest4
|
||||
et4.Error() // ok, not an error method.
|
||||
var et5 errorTest5
|
||||
et5.error() // ok, not an error method.
|
||||
// Bug: used to recur forever.
|
||||
Printf("%p %x", recursiveStructV, recursiveStructV.next)
|
||||
Printf("%p %x", recursiveStruct1V, recursiveStruct1V.next)
|
||||
Printf("%p %x", recursiveSliceV, recursiveSliceV)
|
||||
Printf("%p %x", recursiveMapV, recursiveMapV)
|
||||
// Special handling for Log.
|
||||
math.Log(3) // OK
|
||||
Log(3) // OK
|
||||
Log("%d", 3) // ERROR "possible formatting directive in Log call"
|
||||
Logf("%d", 3)
|
||||
Logf("%d", "hi") // ERROR "arg .hi. for printf verb %d of wrong type: untyped string"
|
||||
|
||||
}
|
||||
|
||||
// Printf is used by the test so we must declare it.
|
||||
func Printf(format string, args ...interface{}) {
|
||||
panic("don't call - testing only")
|
||||
}
|
||||
|
||||
// printf is used by the test so we must declare it.
|
||||
func printf(format string, args ...interface{}) {
|
||||
panic("don't call - testing only")
|
||||
}
|
||||
|
||||
// multi is used by the test.
|
||||
func multi() []interface{} {
|
||||
panic("don't call - testing only")
|
||||
}
|
||||
|
||||
type stringer float64
|
||||
|
||||
var stringerv stringer
|
||||
|
||||
func (*stringer) String() string {
|
||||
return "string"
|
||||
}
|
||||
|
||||
func (*stringer) Warn(int, ...interface{}) string {
|
||||
return "warn"
|
||||
}
|
||||
|
||||
func (*stringer) Warnf(int, string, ...interface{}) string {
|
||||
return "warnf"
|
||||
}
|
||||
|
||||
type notstringer struct {
|
||||
f float64
|
||||
}
|
||||
|
||||
var notstringerv notstringer
|
||||
|
||||
type stringerarray [4]float64
|
||||
|
||||
func (stringerarray) String() string {
|
||||
return "string"
|
||||
}
|
||||
|
||||
var stringerarrayv stringerarray
|
||||
|
||||
type notstringerarray [4]float64
|
||||
|
||||
var notstringerarrayv notstringerarray
|
||||
|
||||
var nonemptyinterface = interface {
|
||||
f()
|
||||
}(nil)
|
||||
|
||||
// A data type we can print with "%d".
|
||||
type percentDStruct struct {
|
||||
a int
|
||||
b []byte
|
||||
c *float64
|
||||
}
|
||||
|
||||
var percentDV percentDStruct
|
||||
|
||||
// A data type we cannot print correctly with "%d".
|
||||
type notPercentDStruct struct {
|
||||
a int
|
||||
b []byte
|
||||
c bool
|
||||
}
|
||||
|
||||
var notPercentDV notPercentDStruct
|
||||
|
||||
// A data type we can print with "%s".
|
||||
type percentSStruct struct {
|
||||
a string
|
||||
b []byte
|
||||
c stringerarray
|
||||
}
|
||||
|
||||
var percentSV percentSStruct
|
||||
|
||||
type recursiveStringer int
|
||||
|
||||
func (s recursiveStringer) String() string {
|
||||
_ = fmt.Sprintf("%d", s)
|
||||
_ = fmt.Sprintf("%#v", s)
|
||||
_ = fmt.Sprintf("%v", s) // ERROR "arg s for printf causes recursive call to String method"
|
||||
_ = fmt.Sprintf("%v", &s) // ERROR "arg &s for printf causes recursive call to String method"
|
||||
_ = fmt.Sprintf("%T", s) // ok; does not recursively call String
|
||||
return fmt.Sprintln(s) // ERROR "arg s for print causes recursive call to String method"
|
||||
}
|
||||
|
||||
type recursivePtrStringer int
|
||||
|
||||
func (p *recursivePtrStringer) String() string {
|
||||
_ = fmt.Sprintf("%v", *p)
|
||||
return fmt.Sprintln(p) // ERROR "arg p for print causes recursive call to String method"
|
||||
}
|
||||
|
||||
type Formatter bool
|
||||
|
||||
func (*Formatter) Format(fmt.State, rune) {
|
||||
}
|
||||
|
||||
type RecursiveSlice []RecursiveSlice
|
||||
|
||||
var recursiveSliceV = &RecursiveSlice{}
|
||||
|
||||
type RecursiveMap map[int]RecursiveMap
|
||||
|
||||
var recursiveMapV = make(RecursiveMap)
|
||||
|
||||
type RecursiveStruct struct {
|
||||
next *RecursiveStruct
|
||||
}
|
||||
|
||||
var recursiveStructV = &RecursiveStruct{}
|
||||
|
||||
type RecursiveStruct1 struct {
|
||||
next *Recursive2Struct
|
||||
}
|
||||
|
||||
type RecursiveStruct2 struct {
|
||||
next *Recursive1Struct
|
||||
}
|
||||
|
||||
var recursiveStruct1V = &RecursiveStruct1{}
|
||||
|
||||
// Fix for issue 7149: Missing return type on String method caused fault.
|
||||
func (int) String() {
|
||||
return ""
|
||||
}
|
59
cmd/vet/testdata/rangeloop.go
vendored
59
cmd/vet/testdata/rangeloop.go
vendored
@ -1,59 +0,0 @@
|
||||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains tests for the rangeloop checker.
|
||||
|
||||
package testdata
|
||||
|
||||
func RangeLoopTests() {
|
||||
var s []int
|
||||
for i, v := range s {
|
||||
go func() {
|
||||
println(i) // ERROR "range variable i captured by func literal"
|
||||
println(v) // ERROR "range variable v captured by func literal"
|
||||
}()
|
||||
}
|
||||
for i, v := range s {
|
||||
defer func() {
|
||||
println(i) // ERROR "range variable i captured by func literal"
|
||||
println(v) // ERROR "range variable v captured by func literal"
|
||||
}()
|
||||
}
|
||||
for i := range s {
|
||||
go func() {
|
||||
println(i) // ERROR "range variable i captured by func literal"
|
||||
}()
|
||||
}
|
||||
for _, v := range s {
|
||||
go func() {
|
||||
println(v) // ERROR "range variable v captured by func literal"
|
||||
}()
|
||||
}
|
||||
for i, v := range s {
|
||||
go func() {
|
||||
println(i, v)
|
||||
}()
|
||||
println("unfortunately, we don't catch the error above because of this statement")
|
||||
}
|
||||
for i, v := range s {
|
||||
go func(i, v int) {
|
||||
println(i, v)
|
||||
}(i, v)
|
||||
}
|
||||
for i, v := range s {
|
||||
i, v := i, v
|
||||
go func() {
|
||||
println(i, v)
|
||||
}()
|
||||
}
|
||||
// If the key of the range statement is not an identifier
|
||||
// the code should not panic (it used to).
|
||||
var x [2]int
|
||||
var f int
|
||||
for x[0], f = range s {
|
||||
go func() {
|
||||
_ = f // ERROR "range variable f captured by func literal"
|
||||
}()
|
||||
}
|
||||
}
|
54
cmd/vet/testdata/shadow.go
vendored
54
cmd/vet/testdata/shadow.go
vendored
@ -1,54 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains tests for the shadowed variable checker.
|
||||
// Some of these errors are caught by the compiler (shadowed return parameters for example)
|
||||
// but are nonetheless useful tests.
|
||||
|
||||
package testdata
|
||||
|
||||
import "os"
|
||||
|
||||
func ShadowRead(f *os.File, buf []byte) (err error) {
|
||||
var x int
|
||||
if f != nil {
|
||||
err := 3 // OK - different type.
|
||||
_ = err
|
||||
}
|
||||
if f != nil {
|
||||
_, err := f.Read(buf) // ERROR "declaration of err shadows declaration at testdata/shadow.go:13"
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
i := 3 // OK
|
||||
_ = i
|
||||
}
|
||||
if f != nil {
|
||||
var _, err = f.Read(buf) // ERROR "declaration of err shadows declaration at testdata/shadow.go:13"
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
for i := 0; i < 10; i++ {
|
||||
i := i // OK: obviously intentional idiomatic redeclaration
|
||||
go func() {
|
||||
println(i)
|
||||
}()
|
||||
}
|
||||
var shadowTemp interface{}
|
||||
switch shadowTemp := shadowTemp.(type) { // OK: obviously intentional idiomatic redeclaration
|
||||
case int:
|
||||
println("OK")
|
||||
_ = shadowTemp
|
||||
}
|
||||
if shadowTemp := shadowTemp; true { // OK: obviously intentional idiomatic redeclaration
|
||||
var f *os.File // OK because f is not mentioned later in the function.
|
||||
// The declaration of x is a shadow because x is mentioned below.
|
||||
var x int // ERROR "declaration of x shadows declaration at testdata/shadow.go:14"
|
||||
_, _, _ = x, f, shadowTemp
|
||||
}
|
||||
// Use a couple of variables to trigger shadowing errors.
|
||||
_, _ = err, x
|
||||
return
|
||||
}
|
78
cmd/vet/testdata/shift.go
vendored
78
cmd/vet/testdata/shift.go
vendored
@ -1,78 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains tests for the suspicious shift checker.
|
||||
|
||||
package testdata
|
||||
|
||||
func ShiftTest() {
|
||||
var i8 int8
|
||||
_ = i8 << 7
|
||||
_ = (i8 + 1) << 8 // ERROR "\(i8 \+ 1\) too small for shift of 8"
|
||||
_ = i8 << (7 + 1) // ERROR "i8 too small for shift of 8"
|
||||
_ = i8 >> 8 // ERROR "i8 too small for shift of 8"
|
||||
i8 <<= 8 // ERROR "i8 too small for shift of 8"
|
||||
i8 >>= 8 // ERROR "i8 too small for shift of 8"
|
||||
var i16 int16
|
||||
_ = i16 << 15
|
||||
_ = i16 << 16 // ERROR "i16 too small for shift of 16"
|
||||
_ = i16 >> 16 // ERROR "i16 too small for shift of 16"
|
||||
i16 <<= 16 // ERROR "i16 too small for shift of 16"
|
||||
i16 >>= 16 // ERROR "i16 too small for shift of 16"
|
||||
var i32 int32
|
||||
_ = i32 << 31
|
||||
_ = i32 << 32 // ERROR "i32 too small for shift of 32"
|
||||
_ = i32 >> 32 // ERROR "i32 too small for shift of 32"
|
||||
i32 <<= 32 // ERROR "i32 too small for shift of 32"
|
||||
i32 >>= 32 // ERROR "i32 too small for shift of 32"
|
||||
var i64 int64
|
||||
_ = i64 << 63
|
||||
_ = i64 << 64 // ERROR "i64 too small for shift of 64"
|
||||
_ = i64 >> 64 // ERROR "i64 too small for shift of 64"
|
||||
i64 <<= 64 // ERROR "i64 too small for shift of 64"
|
||||
i64 >>= 64 // ERROR "i64 too small for shift of 64"
|
||||
var u8 uint8
|
||||
_ = u8 << 7
|
||||
_ = u8 << 8 // ERROR "u8 too small for shift of 8"
|
||||
_ = u8 >> 8 // ERROR "u8 too small for shift of 8"
|
||||
u8 <<= 8 // ERROR "u8 too small for shift of 8"
|
||||
u8 >>= 8 // ERROR "u8 too small for shift of 8"
|
||||
var u16 uint16
|
||||
_ = u16 << 15
|
||||
_ = u16 << 16 // ERROR "u16 too small for shift of 16"
|
||||
_ = u16 >> 16 // ERROR "u16 too small for shift of 16"
|
||||
u16 <<= 16 // ERROR "u16 too small for shift of 16"
|
||||
u16 >>= 16 // ERROR "u16 too small for shift of 16"
|
||||
var u32 uint32
|
||||
_ = u32 << 31
|
||||
_ = u32 << 32 // ERROR "u32 too small for shift of 32"
|
||||
_ = u32 >> 32 // ERROR "u32 too small for shift of 32"
|
||||
u32 <<= 32 // ERROR "u32 too small for shift of 32"
|
||||
u32 >>= 32 // ERROR "u32 too small for shift of 32"
|
||||
var u64 uint64
|
||||
_ = u64 << 63
|
||||
_ = u64 << 64 // ERROR "u64 too small for shift of 64"
|
||||
_ = u64 >> 64 // ERROR "u64 too small for shift of 64"
|
||||
u64 <<= 64 // ERROR "u64 too small for shift of 64"
|
||||
u64 >>= 64 // ERROR "u64 too small for shift of 64"
|
||||
_ = u64 << u64 // Non-constant shifts should succeed.
|
||||
var i int
|
||||
_ = i << 31
|
||||
_ = i << 32 // ERROR "i might be too small for shift of 32"
|
||||
_ = i >> 32 // ERROR "i might be too small for shift of 32"
|
||||
i <<= 32 // ERROR "i might be too small for shift of 32"
|
||||
i >>= 32 // ERROR "i might be too small for shift of 32"
|
||||
var u uint
|
||||
_ = u << 31
|
||||
_ = u << 32 // ERROR "u might be too small for shift of 32"
|
||||
_ = u >> 32 // ERROR "u might be too small for shift of 32"
|
||||
u <<= 32 // ERROR "u might be too small for shift of 32"
|
||||
u >>= 32 // ERROR "u might be too small for shift of 32"
|
||||
var p uintptr
|
||||
_ = p << 31
|
||||
_ = p << 32 // ERROR "p might be too small for shift of 32"
|
||||
_ = p >> 32 // ERROR "p might be too small for shift of 32"
|
||||
p <<= 32 // ERROR "p might be too small for shift of 32"
|
||||
p >>= 32 // ERROR "p might be too small for shift of 32"
|
||||
}
|
36
cmd/vet/testdata/structtag.go
vendored
36
cmd/vet/testdata/structtag.go
vendored
@ -1,36 +0,0 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the test for canonical struct tags.
|
||||
|
||||
package testdata
|
||||
|
||||
type StructTagTest struct {
|
||||
A int "hello" // ERROR "not compatible with reflect.StructTag.Get: bad syntax for struct tag pair"
|
||||
B int "\tx:\"y\"" // ERROR "not compatible with reflect.StructTag.Get: bad syntax for struct tag key"
|
||||
C int "x:\"y\"\tx:\"y\"" // ERROR "not compatible with reflect.StructTag.Get"
|
||||
D int "x:`y`" // ERROR "not compatible with reflect.StructTag.Get: bad syntax for struct tag value"
|
||||
E int "ct\brl:\"char\"" // ERROR "not compatible with reflect.StructTag.Get: bad syntax for struct tag pair"
|
||||
F int `:"emptykey"` // ERROR "not compatible with reflect.StructTag.Get: bad syntax for struct tag key"
|
||||
G int `x:"noEndQuote` // ERROR "not compatible with reflect.StructTag.Get: bad syntax for struct tag value"
|
||||
H int `x:"trunc\x0"` // ERROR "not compatible with reflect.StructTag.Get: bad syntax for struct tag value"
|
||||
OK0 int `x:"y" u:"v" w:""`
|
||||
OK1 int `x:"y:z" u:"v" w:""` // note multiple colons.
|
||||
OK2 int "k0:\"values contain spaces\" k1:\"literal\ttabs\" k2:\"and\\tescaped\\tabs\""
|
||||
OK3 int `under_scores:"and" CAPS:"ARE_OK"`
|
||||
}
|
||||
|
||||
type UnexportedEncodingTagTest struct {
|
||||
x int `json:"xx"` // ERROR "struct field x has json tag but is not exported"
|
||||
y int `xml:"yy"` // ERROR "struct field y has xml tag but is not exported"
|
||||
z int
|
||||
A int `json:"aa" xml:"bb"`
|
||||
}
|
||||
|
||||
type unexp struct{}
|
||||
|
||||
type JSONEmbeddedField struct {
|
||||
UnexportedEncodingTagTest `is:"embedded"`
|
||||
unexp `is:"embedded,notexported" json:"unexp"` // OK for now, see issue 7363
|
||||
}
|
10
cmd/vet/testdata/tagtest/file1.go
vendored
10
cmd/vet/testdata/tagtest/file1.go
vendored
@ -1,10 +0,0 @@
|
||||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build testtag
|
||||
|
||||
package main
|
||||
|
||||
func main() {
|
||||
}
|
10
cmd/vet/testdata/tagtest/file2.go
vendored
10
cmd/vet/testdata/tagtest/file2.go
vendored
@ -1,10 +0,0 @@
|
||||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !testtag
|
||||
|
||||
package main
|
||||
|
||||
func ignore() {
|
||||
}
|
61
cmd/vet/testdata/unsafeptr.go
vendored
61
cmd/vet/testdata/unsafeptr.go
vendored
@ -1,61 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func f() {
|
||||
var x unsafe.Pointer
|
||||
var y uintptr
|
||||
x = unsafe.Pointer(y) // ERROR "possible misuse of unsafe.Pointer"
|
||||
y = uintptr(x)
|
||||
|
||||
// only allowed pointer arithmetic is ptr +/- num.
|
||||
// num+ptr is technically okay but still flagged: write ptr+num instead.
|
||||
x = unsafe.Pointer(uintptr(x) + 1)
|
||||
x = unsafe.Pointer(1 + uintptr(x)) // ERROR "possible misuse of unsafe.Pointer"
|
||||
x = unsafe.Pointer(uintptr(x) + uintptr(x)) // ERROR "possible misuse of unsafe.Pointer"
|
||||
x = unsafe.Pointer(uintptr(x) - 1)
|
||||
x = unsafe.Pointer(1 - uintptr(x)) // ERROR "possible misuse of unsafe.Pointer"
|
||||
|
||||
// certain uses of reflect are okay
|
||||
var v reflect.Value
|
||||
x = unsafe.Pointer(v.Pointer())
|
||||
x = unsafe.Pointer(v.UnsafeAddr())
|
||||
var s1 *reflect.StringHeader
|
||||
x = unsafe.Pointer(s1.Data)
|
||||
var s2 *reflect.SliceHeader
|
||||
x = unsafe.Pointer(s2.Data)
|
||||
var s3 reflect.StringHeader
|
||||
x = unsafe.Pointer(s3.Data) // ERROR "possible misuse of unsafe.Pointer"
|
||||
var s4 reflect.SliceHeader
|
||||
x = unsafe.Pointer(s4.Data) // ERROR "possible misuse of unsafe.Pointer"
|
||||
|
||||
// but only in reflect
|
||||
var vv V
|
||||
x = unsafe.Pointer(vv.Pointer()) // ERROR "possible misuse of unsafe.Pointer"
|
||||
x = unsafe.Pointer(vv.UnsafeAddr()) // ERROR "possible misuse of unsafe.Pointer"
|
||||
var ss1 *StringHeader
|
||||
x = unsafe.Pointer(ss1.Data) // ERROR "possible misuse of unsafe.Pointer"
|
||||
var ss2 *SliceHeader
|
||||
x = unsafe.Pointer(ss2.Data) // ERROR "possible misuse of unsafe.Pointer"
|
||||
|
||||
}
|
||||
|
||||
type V interface {
|
||||
Pointer() uintptr
|
||||
UnsafeAddr() uintptr
|
||||
}
|
||||
|
||||
type StringHeader struct {
|
||||
Data uintptr
|
||||
}
|
||||
|
||||
type SliceHeader struct {
|
||||
Data uintptr
|
||||
}
|
29
cmd/vet/testdata/unused.go
vendored
29
cmd/vet/testdata/unused.go
vendored
@ -1,29 +0,0 @@
|
||||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains tests for the unusedresult checker.
|
||||
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func _() {
|
||||
fmt.Errorf("") // ERROR "result of fmt.Errorf call not used"
|
||||
_ = fmt.Errorf("")
|
||||
|
||||
errors.New("") // ERROR "result of errors.New call not used"
|
||||
|
||||
err := errors.New("")
|
||||
err.Error() // ERROR "result of \(error\).Error call not used"
|
||||
|
||||
var buf bytes.Buffer
|
||||
buf.String() // ERROR "result of \(bytes.Buffer\).String call not used"
|
||||
|
||||
fmt.Sprint("") // ERROR "result of fmt.Sprint call not used"
|
||||
fmt.Sprintf("") // ERROR "result of fmt.Sprintf call not used"
|
||||
}
|
369
cmd/vet/types.go
369
cmd/vet/types.go
@ -1,369 +0,0 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains the pieces of the tool that use typechecking from the go/types package.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
// imports is the canonical map of imported packages we need for typechecking.
|
||||
// It is created during initialization.
|
||||
var imports = make(map[string]*types.Package)
|
||||
|
||||
var (
|
||||
errorType *types.Interface
|
||||
stringerType *types.Interface // possibly nil
|
||||
formatterType *types.Interface // possibly nil
|
||||
)
|
||||
|
||||
func init() {
|
||||
errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
|
||||
|
||||
if typ := importType("fmt", "Stringer"); typ != nil {
|
||||
stringerType = typ.Underlying().(*types.Interface)
|
||||
}
|
||||
|
||||
if typ := importType("fmt", "Formatter"); typ != nil {
|
||||
formatterType = typ.Underlying().(*types.Interface)
|
||||
}
|
||||
}
|
||||
|
||||
// importType returns the type denoted by the qualified identifier
|
||||
// path.name, and adds the respective package to the imports map
|
||||
// as a side effect. In case of an error, importType returns nil.
|
||||
func importType(path, name string) types.Type {
|
||||
pkg, err := types.DefaultImport(imports, path)
|
||||
if err != nil {
|
||||
// This can happen if the package at path hasn't been compiled yet.
|
||||
warnf("import failed: %v", err)
|
||||
return nil
|
||||
}
|
||||
if obj, ok := pkg.Scope().Lookup(name).(*types.TypeName); ok {
|
||||
return obj.Type()
|
||||
}
|
||||
warnf("invalid type name %q", name)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pkg *Package) check(fs *token.FileSet, astFiles []*ast.File) error {
|
||||
pkg.defs = make(map[*ast.Ident]types.Object)
|
||||
pkg.uses = make(map[*ast.Ident]types.Object)
|
||||
pkg.selectors = make(map[*ast.SelectorExpr]*types.Selection)
|
||||
pkg.spans = make(map[types.Object]Span)
|
||||
pkg.types = make(map[ast.Expr]types.TypeAndValue)
|
||||
config := types.Config{
|
||||
// We provide the same packages map for all imports to ensure
|
||||
// that everybody sees identical packages for the given paths.
|
||||
Packages: imports,
|
||||
// By providing a Config with our own error function, it will continue
|
||||
// past the first error. There is no need for that function to do anything.
|
||||
Error: func(error) {},
|
||||
}
|
||||
info := &types.Info{
|
||||
Selections: pkg.selectors,
|
||||
Types: pkg.types,
|
||||
Defs: pkg.defs,
|
||||
Uses: pkg.uses,
|
||||
}
|
||||
typesPkg, err := config.Check(pkg.path, fs, astFiles, info)
|
||||
pkg.typesPkg = typesPkg
|
||||
// update spans
|
||||
for id, obj := range pkg.defs {
|
||||
pkg.growSpan(id, obj)
|
||||
}
|
||||
for id, obj := range pkg.uses {
|
||||
pkg.growSpan(id, obj)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// isStruct reports whether the composite literal c is a struct.
|
||||
// If it is not (probably a struct), it returns a printable form of the type.
|
||||
func (pkg *Package) isStruct(c *ast.CompositeLit) (bool, string) {
|
||||
// Check that the CompositeLit's type is a slice or array (which needs no field keys), if possible.
|
||||
typ := pkg.types[c].Type
|
||||
// If it's a named type, pull out the underlying type. If it's not, the Underlying
|
||||
// method returns the type itself.
|
||||
actual := typ
|
||||
if actual != nil {
|
||||
actual = actual.Underlying()
|
||||
}
|
||||
if actual == nil {
|
||||
// No type information available. Assume true, so we do the check.
|
||||
return true, ""
|
||||
}
|
||||
switch actual.(type) {
|
||||
case *types.Struct:
|
||||
return true, typ.String()
|
||||
default:
|
||||
return false, ""
|
||||
}
|
||||
}
|
||||
|
||||
// matchArgType reports an error if printf verb t is not appropriate
|
||||
// for operand arg.
|
||||
//
|
||||
// typ is used only for recursive calls; external callers must supply nil.
|
||||
//
|
||||
// (Recursion arises from the compound types {map,chan,slice} which
|
||||
// may be printed with %d etc. if that is appropriate for their element
|
||||
// types.)
|
||||
func (f *File) matchArgType(t printfArgType, typ types.Type, arg ast.Expr) bool {
|
||||
return f.matchArgTypeInternal(t, typ, arg, make(map[types.Type]bool))
|
||||
}
|
||||
|
||||
// matchArgTypeInternal is the internal version of matchArgType. It carries a map
|
||||
// remembering what types are in progress so we don't recur when faced with recursive
|
||||
// types or mutually recursive types.
|
||||
func (f *File) matchArgTypeInternal(t printfArgType, typ types.Type, arg ast.Expr, inProgress map[types.Type]bool) bool {
|
||||
// %v, %T accept any argument type.
|
||||
if t == anyType {
|
||||
return true
|
||||
}
|
||||
if typ == nil {
|
||||
// external call
|
||||
typ = f.pkg.types[arg].Type
|
||||
if typ == nil {
|
||||
return true // probably a type check problem
|
||||
}
|
||||
}
|
||||
// If the type implements fmt.Formatter, we have nothing to check.
|
||||
// formatterTyp may be nil - be conservative and check for Format method in that case.
|
||||
if formatterType != nil && types.Implements(typ, formatterType) || f.hasMethod(typ, "Format") {
|
||||
return true
|
||||
}
|
||||
// If we can use a string, might arg (dynamically) implement the Stringer or Error interface?
|
||||
if t&argString != 0 {
|
||||
if types.AssertableTo(errorType, typ) || stringerType != nil && types.AssertableTo(stringerType, typ) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
typ = typ.Underlying()
|
||||
if inProgress[typ] {
|
||||
// We're already looking at this type. The call that started it will take care of it.
|
||||
return true
|
||||
}
|
||||
inProgress[typ] = true
|
||||
|
||||
switch typ := typ.(type) {
|
||||
case *types.Signature:
|
||||
return t&argPointer != 0
|
||||
|
||||
case *types.Map:
|
||||
// Recur: map[int]int matches %d.
|
||||
return t&argPointer != 0 ||
|
||||
(f.matchArgTypeInternal(t, typ.Key(), arg, inProgress) && f.matchArgTypeInternal(t, typ.Elem(), arg, inProgress))
|
||||
|
||||
case *types.Chan:
|
||||
return t&argPointer != 0
|
||||
|
||||
case *types.Array:
|
||||
// Same as slice.
|
||||
if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
|
||||
return true // %s matches []byte
|
||||
}
|
||||
// Recur: []int matches %d.
|
||||
return t&argPointer != 0 || f.matchArgTypeInternal(t, typ.Elem().Underlying(), arg, inProgress)
|
||||
|
||||
case *types.Slice:
|
||||
// Same as array.
|
||||
if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
|
||||
return true // %s matches []byte
|
||||
}
|
||||
// Recur: []int matches %d. But watch out for
|
||||
// type T []T
|
||||
// If the element is a pointer type (type T[]*T), it's handled fine by the Pointer case below.
|
||||
return t&argPointer != 0 || f.matchArgTypeInternal(t, typ.Elem(), arg, inProgress)
|
||||
|
||||
case *types.Pointer:
|
||||
// Ugly, but dealing with an edge case: a known pointer to an invalid type,
|
||||
// probably something from a failed import.
|
||||
if typ.Elem().String() == "invalid type" {
|
||||
if *verbose {
|
||||
f.Warnf(arg.Pos(), "printf argument %v is pointer to invalid or unknown type", f.gofmt(arg))
|
||||
}
|
||||
return true // special case
|
||||
}
|
||||
// If it's actually a pointer with %p, it prints as one.
|
||||
if t == argPointer {
|
||||
return true
|
||||
}
|
||||
// If it's pointer to struct, that's equivalent in our analysis to whether we can print the struct.
|
||||
if str, ok := typ.Elem().Underlying().(*types.Struct); ok {
|
||||
return f.matchStructArgType(t, str, arg, inProgress)
|
||||
}
|
||||
// The rest can print with %p as pointers, or as integers with %x etc.
|
||||
return t&(argInt|argPointer) != 0
|
||||
|
||||
case *types.Struct:
|
||||
return f.matchStructArgType(t, typ, arg, inProgress)
|
||||
|
||||
case *types.Interface:
|
||||
// If the static type of the argument is empty interface, there's little we can do.
|
||||
// Example:
|
||||
// func f(x interface{}) { fmt.Printf("%s", x) }
|
||||
// Whether x is valid for %s depends on the type of the argument to f. One day
|
||||
// we will be able to do better. For now, we assume that empty interface is OK
|
||||
// but non-empty interfaces, with Stringer and Error handled above, are errors.
|
||||
return typ.NumMethods() == 0
|
||||
|
||||
case *types.Basic:
|
||||
switch typ.Kind() {
|
||||
case types.UntypedBool,
|
||||
types.Bool:
|
||||
return t&argBool != 0
|
||||
|
||||
case types.UntypedInt,
|
||||
types.Int,
|
||||
types.Int8,
|
||||
types.Int16,
|
||||
types.Int32,
|
||||
types.Int64,
|
||||
types.Uint,
|
||||
types.Uint8,
|
||||
types.Uint16,
|
||||
types.Uint32,
|
||||
types.Uint64,
|
||||
types.Uintptr:
|
||||
return t&argInt != 0
|
||||
|
||||
case types.UntypedFloat,
|
||||
types.Float32,
|
||||
types.Float64:
|
||||
return t&argFloat != 0
|
||||
|
||||
case types.UntypedComplex,
|
||||
types.Complex64,
|
||||
types.Complex128:
|
||||
return t&argComplex != 0
|
||||
|
||||
case types.UntypedString,
|
||||
types.String:
|
||||
return t&argString != 0
|
||||
|
||||
case types.UnsafePointer:
|
||||
return t&(argPointer|argInt) != 0
|
||||
|
||||
case types.UntypedRune:
|
||||
return t&(argInt|argRune) != 0
|
||||
|
||||
case types.UntypedNil:
|
||||
return t&argPointer != 0 // TODO?
|
||||
|
||||
case types.Invalid:
|
||||
if *verbose {
|
||||
f.Warnf(arg.Pos(), "printf argument %v has invalid or unknown type", f.gofmt(arg))
|
||||
}
|
||||
return true // Probably a type check problem.
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// hasBasicType reports whether x's type is a types.Basic with the given kind.
|
||||
func (f *File) hasBasicType(x ast.Expr, kind types.BasicKind) bool {
|
||||
t := f.pkg.types[x].Type
|
||||
if t != nil {
|
||||
t = t.Underlying()
|
||||
}
|
||||
b, ok := t.(*types.Basic)
|
||||
return ok && b.Kind() == kind
|
||||
}
|
||||
|
||||
// matchStructArgType reports whether all the elements of the struct match the expected
|
||||
// type. For instance, with "%d" all the elements must be printable with the "%d" format.
|
||||
func (f *File) matchStructArgType(t printfArgType, typ *types.Struct, arg ast.Expr, inProgress map[types.Type]bool) bool {
|
||||
for i := 0; i < typ.NumFields(); i++ {
|
||||
if !f.matchArgTypeInternal(t, typ.Field(i).Type(), arg, inProgress) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// numArgsInSignature tells how many formal arguments the function type
|
||||
// being called has.
|
||||
func (f *File) numArgsInSignature(call *ast.CallExpr) int {
|
||||
// Check the type of the function or method declaration
|
||||
typ := f.pkg.types[call.Fun].Type
|
||||
if typ == nil {
|
||||
return 0
|
||||
}
|
||||
// The type must be a signature, but be sure for safety.
|
||||
sig, ok := typ.(*types.Signature)
|
||||
if !ok {
|
||||
return 0
|
||||
}
|
||||
return sig.Params().Len()
|
||||
}
|
||||
|
||||
// isErrorMethodCall reports whether the call is of a method with signature
|
||||
// func Error() string
|
||||
// where "string" is the universe's string type. We know the method is called "Error".
|
||||
func (f *File) isErrorMethodCall(call *ast.CallExpr) bool {
|
||||
typ := f.pkg.types[call].Type
|
||||
if typ != nil {
|
||||
// We know it's called "Error", so just check the function signature
|
||||
// (stringerType has exactly one method, String).
|
||||
if stringerType != nil && stringerType.NumMethods() == 1 {
|
||||
return types.Identical(f.pkg.types[call.Fun].Type, stringerType.Method(0).Type())
|
||||
}
|
||||
}
|
||||
// Without types, we can still check by hand.
|
||||
// Is it a selector expression? Otherwise it's a function call, not a method call.
|
||||
sel, ok := call.Fun.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
// The package is type-checked, so if there are no arguments, we're done.
|
||||
if len(call.Args) > 0 {
|
||||
return false
|
||||
}
|
||||
// Check the type of the method declaration
|
||||
typ = f.pkg.types[sel].Type
|
||||
if typ == nil {
|
||||
return false
|
||||
}
|
||||
// The type must be a signature, but be sure for safety.
|
||||
sig, ok := typ.(*types.Signature)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
// There must be a receiver for it to be a method call. Otherwise it is
|
||||
// a function, not something that satisfies the error interface.
|
||||
if sig.Recv() == nil {
|
||||
return false
|
||||
}
|
||||
// There must be no arguments. Already verified by type checking, but be thorough.
|
||||
if sig.Params().Len() > 0 {
|
||||
return false
|
||||
}
|
||||
// Finally the real questions.
|
||||
// There must be one result.
|
||||
if sig.Results().Len() != 1 {
|
||||
return false
|
||||
}
|
||||
// It must have return type "string" from the universe.
|
||||
return sig.Results().At(0).Type() == types.Typ[types.String]
|
||||
}
|
||||
|
||||
// hasMethod reports whether the type contains a method with the given name.
|
||||
// It is part of the workaround for Formatters and should be deleted when
|
||||
// that workaround is no longer necessary.
|
||||
// TODO: This could be better once issue 6259 is fixed.
|
||||
func (f *File) hasMethod(typ types.Type, name string) bool {
|
||||
// assume we have an addressable variable of type typ
|
||||
obj, _, _ := types.LookupFieldOrMethod(typ, true, f.pkg.typesPkg, name)
|
||||
_, ok := obj.(*types.Func)
|
||||
return ok
|
||||
}
|
@ -1,98 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Check for invalid uintptr -> unsafe.Pointer conversions.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("unsafeptr",
|
||||
"check for misuse of unsafe.Pointer",
|
||||
checkUnsafePointer,
|
||||
callExpr)
|
||||
}
|
||||
|
||||
func checkUnsafePointer(f *File, node ast.Node) {
|
||||
x := node.(*ast.CallExpr)
|
||||
if len(x.Args) != 1 {
|
||||
return
|
||||
}
|
||||
if f.hasBasicType(x.Fun, types.UnsafePointer) && f.hasBasicType(x.Args[0], types.Uintptr) && !f.isSafeUintptr(x.Args[0]) {
|
||||
f.Badf(x.Pos(), "possible misuse of unsafe.Pointer")
|
||||
}
|
||||
}
|
||||
|
||||
// isSafeUintptr reports whether x - already known to be a uintptr -
|
||||
// is safe to convert to unsafe.Pointer. It is safe if x is itself derived
|
||||
// directly from an unsafe.Pointer via conversion and pointer arithmetic
|
||||
// or if x is the result of reflect.Value.Pointer or reflect.Value.UnsafeAddr
|
||||
// or obtained from the Data field of a *reflect.SliceHeader or *reflect.StringHeader.
|
||||
func (f *File) isSafeUintptr(x ast.Expr) bool {
|
||||
switch x := x.(type) {
|
||||
case *ast.ParenExpr:
|
||||
return f.isSafeUintptr(x.X)
|
||||
|
||||
case *ast.SelectorExpr:
|
||||
switch x.Sel.Name {
|
||||
case "Data":
|
||||
// reflect.SliceHeader and reflect.StringHeader are okay,
|
||||
// but only if they are pointing at a real slice or string.
|
||||
// It's not okay to do:
|
||||
// var x SliceHeader
|
||||
// x.Data = uintptr(unsafe.Pointer(...))
|
||||
// ... use x ...
|
||||
// p := unsafe.Pointer(x.Data)
|
||||
// because in the middle the garbage collector doesn't
|
||||
// see x.Data as a pointer and so x.Data may be dangling
|
||||
// by the time we get to the conversion at the end.
|
||||
// For now approximate by saying that *Header is okay
|
||||
// but Header is not.
|
||||
pt, ok := f.pkg.types[x.X].Type.(*types.Pointer)
|
||||
if ok {
|
||||
t, ok := pt.Elem().(*types.Named)
|
||||
if ok && t.Obj().Pkg().Path() == "reflect" {
|
||||
switch t.Obj().Name() {
|
||||
case "StringHeader", "SliceHeader":
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case *ast.CallExpr:
|
||||
switch len(x.Args) {
|
||||
case 0:
|
||||
// maybe call to reflect.Value.Pointer or reflect.Value.UnsafeAddr.
|
||||
sel, ok := x.Fun.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
switch sel.Sel.Name {
|
||||
case "Pointer", "UnsafeAddr":
|
||||
t, ok := f.pkg.types[sel.X].Type.(*types.Named)
|
||||
if ok && t.Obj().Pkg().Path() == "reflect" && t.Obj().Name() == "Value" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
case 1:
|
||||
// maybe conversion of uintptr to unsafe.Pointer
|
||||
return f.hasBasicType(x.Fun, types.Uintptr) && f.hasBasicType(x.Args[0], types.UnsafePointer)
|
||||
}
|
||||
|
||||
case *ast.BinaryExpr:
|
||||
switch x.Op {
|
||||
case token.ADD, token.SUB:
|
||||
return f.isSafeUintptr(x.X) && !f.isSafeUintptr(x.Y)
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
@ -1,94 +0,0 @@
|
||||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file defines the check for unused results of calls to certain
|
||||
// pure functions.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var unusedFuncsFlag = flag.String("unusedfuncs",
|
||||
"errors.New,fmt.Errorf,fmt.Sprintf,fmt.Sprint,sort.Reverse",
|
||||
"comma-separated list of functions whose results must be used")
|
||||
|
||||
var unusedStringMethodsFlag = flag.String("unusedstringmethods",
|
||||
"Error,String",
|
||||
"comma-separated list of names of methods of type func() string whose results must be used")
|
||||
|
||||
func init() {
|
||||
register("unusedresult",
|
||||
"check for unused result of calls to functions in -unusedfuncs list and methods in -unusedstringmethods list",
|
||||
checkUnusedResult,
|
||||
exprStmt)
|
||||
}
|
||||
|
||||
// func() string
|
||||
var sigNoArgsStringResult = types.NewSignature(nil, nil,
|
||||
types.NewTuple(types.NewVar(token.NoPos, nil, "", types.Typ[types.String])),
|
||||
false)
|
||||
|
||||
var unusedFuncs = make(map[string]bool)
|
||||
var unusedStringMethods = make(map[string]bool)
|
||||
|
||||
func initUnusedFlags() {
|
||||
commaSplit := func(s string, m map[string]bool) {
|
||||
if s != "" {
|
||||
for _, name := range strings.Split(s, ",") {
|
||||
if len(name) == 0 {
|
||||
flag.Usage()
|
||||
}
|
||||
m[name] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
commaSplit(*unusedFuncsFlag, unusedFuncs)
|
||||
commaSplit(*unusedStringMethodsFlag, unusedStringMethods)
|
||||
}
|
||||
|
||||
func checkUnusedResult(f *File, n ast.Node) {
|
||||
call, ok := unparen(n.(*ast.ExprStmt).X).(*ast.CallExpr)
|
||||
if !ok {
|
||||
return // not a call statement
|
||||
}
|
||||
fun := unparen(call.Fun)
|
||||
|
||||
if f.pkg.types[fun].IsType() {
|
||||
return // a conversion, not a call
|
||||
}
|
||||
|
||||
selector, ok := fun.(*ast.SelectorExpr)
|
||||
if !ok {
|
||||
return // neither a method call nor a qualified ident
|
||||
}
|
||||
|
||||
sel, ok := f.pkg.selectors[selector]
|
||||
if ok && sel.Kind() == types.MethodVal {
|
||||
// method (e.g. foo.String())
|
||||
obj := sel.Obj().(*types.Func)
|
||||
sig := sel.Type().(*types.Signature)
|
||||
if types.Identical(sig, sigNoArgsStringResult) {
|
||||
if unusedStringMethods[obj.Name()] {
|
||||
f.Badf(call.Lparen, "result of (%s).%s call not used",
|
||||
sig.Recv().Type(), obj.Name())
|
||||
}
|
||||
}
|
||||
} else if !ok {
|
||||
// package-qualified function (e.g. fmt.Errorf)
|
||||
obj, _ := f.pkg.uses[selector.Sel]
|
||||
if obj, ok := obj.(*types.Func); ok {
|
||||
qname := obj.Pkg().Path() + "." + obj.Name()
|
||||
if unusedFuncs[qname] {
|
||||
f.Badf(call.Lparen, "result of %v call not used", qname)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,141 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// No testdata on Android.
|
||||
|
||||
// +build !android
|
||||
|
||||
package main_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"testing"
|
||||
)
|
||||
|
||||
const (
|
||||
dataDir = "testdata"
|
||||
binary = "testvet.exe"
|
||||
)
|
||||
|
||||
func CanRun(t *testing.T) bool {
|
||||
// Plan 9 and Windows systems can't be guaranteed to have Perl and so can't run errchk.
|
||||
switch runtime.GOOS {
|
||||
case "plan9", "windows":
|
||||
t.Skip("skipping test; no Perl on %q", runtime.GOOS)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func Build(t *testing.T) {
|
||||
// go build
|
||||
cmd := exec.Command("go", "build", "-o", binary)
|
||||
run(cmd, t)
|
||||
}
|
||||
|
||||
func Vet(t *testing.T, files []string) {
|
||||
errchk := filepath.Join(runtime.GOROOT(), "test", "errchk")
|
||||
flags := []string{
|
||||
"./" + binary,
|
||||
"-printfuncs=Warn:1,Warnf:1",
|
||||
"-test", // TODO: Delete once -shadow is part of -all.
|
||||
}
|
||||
cmd := exec.Command(errchk, append(flags, files...)...)
|
||||
if !run(cmd, t) {
|
||||
t.Fatal("vet command failed")
|
||||
}
|
||||
}
|
||||
|
||||
// Run this shell script, but do it in Go so it can be run by "go test".
|
||||
// go build -o testvet
|
||||
// $(GOROOT)/test/errchk ./testvet -shadow -printfuncs='Warn:1,Warnf:1' testdata/*.go testdata/*.s
|
||||
// rm testvet
|
||||
//
|
||||
|
||||
func TestVet(t *testing.T) {
|
||||
if !CanRun(t) {
|
||||
t.Skip("cannot run on this environment")
|
||||
}
|
||||
Build(t)
|
||||
defer os.Remove(binary)
|
||||
|
||||
// errchk ./testvet
|
||||
gos, err := filepath.Glob(filepath.Join(dataDir, "*.go"))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
asms, err := filepath.Glob(filepath.Join(dataDir, "*.s"))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
files := append(gos, asms...)
|
||||
Vet(t, files)
|
||||
}
|
||||
|
||||
func TestDivergentPackagesExamples(t *testing.T) {
|
||||
if !CanRun(t) {
|
||||
t.Skip("cannot run on this environment")
|
||||
}
|
||||
Build(t)
|
||||
defer os.Remove(binary)
|
||||
|
||||
// errchk ./testvet
|
||||
Vet(t, []string{"testdata/divergent/buf.go", "testdata/divergent/buf_test.go"})
|
||||
}
|
||||
|
||||
func TestIncompleteExamples(t *testing.T) {
|
||||
if !CanRun(t) {
|
||||
t.Skip("cannot run on this environment")
|
||||
}
|
||||
Build(t)
|
||||
defer os.Remove(binary)
|
||||
|
||||
// errchk ./testvet
|
||||
Vet(t, []string{"testdata/incomplete/examples_test.go"})
|
||||
}
|
||||
|
||||
func run(c *exec.Cmd, t *testing.T) bool {
|
||||
output, err := c.CombinedOutput()
|
||||
os.Stderr.Write(output)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
// Errchk delights by not returning non-zero status if it finds errors, so we look at the output.
|
||||
// It prints "BUG" if there is a failure.
|
||||
if !c.ProcessState.Success() {
|
||||
return false
|
||||
}
|
||||
return !bytes.Contains(output, []byte("BUG"))
|
||||
}
|
||||
|
||||
// TestTags verifies that the -tags argument controls which files to check.
|
||||
func TestTags(t *testing.T) {
|
||||
// go build
|
||||
cmd := exec.Command("go", "build", "-o", binary)
|
||||
run(cmd, t)
|
||||
|
||||
defer os.Remove(binary)
|
||||
|
||||
args := []string{
|
||||
"-tags=testtag",
|
||||
"-v", // We're going to look at the files it examines.
|
||||
"testdata/tagtest",
|
||||
}
|
||||
cmd = exec.Command("./"+binary, args...)
|
||||
output, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
// file1 has testtag and file2 has !testtag.
|
||||
if !bytes.Contains(output, []byte(filepath.Join("tagtest", "file1.go"))) {
|
||||
t.Error("file1 was excluded, should be included")
|
||||
}
|
||||
if bytes.Contains(output, []byte(filepath.Join("tagtest", "file2.go"))) {
|
||||
t.Error("file2 was included, should be excluded")
|
||||
}
|
||||
}
|
@ -1,53 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package whitelist defines exceptions for the vet tool.
|
||||
package whitelist // import "golang.org/x/tools/cmd/vet/whitelist"
|
||||
|
||||
// UnkeyedLiteral are types that are actually slices, but
|
||||
// syntactically, we cannot tell whether the Typ in pkg.Typ{1, 2, 3}
|
||||
// is a slice or a struct, so we whitelist all the standard package
|
||||
// library's exported slice types.
|
||||
var UnkeyedLiteral = map[string]bool{
|
||||
/*
|
||||
find $GOROOT/src -type f | grep -v _test.go | xargs grep '^type.*\[\]' | \
|
||||
grep -v ' map\[' | sed 's,/[^/]*go.type,,' | sed 's,.*src/,,' | \
|
||||
sed 's, ,.,' | sed 's, .*,,' | grep -v '\.[a-z]' | \
|
||||
sort | awk '{ print "\"" $0 "\": true," }'
|
||||
*/
|
||||
"crypto/x509/pkix.RDNSequence": true,
|
||||
"crypto/x509/pkix.RelativeDistinguishedNameSET": true,
|
||||
"database/sql.RawBytes": true,
|
||||
"debug/macho.LoadBytes": true,
|
||||
"encoding/asn1.ObjectIdentifier": true,
|
||||
"encoding/asn1.RawContent": true,
|
||||
"encoding/json.RawMessage": true,
|
||||
"encoding/xml.CharData": true,
|
||||
"encoding/xml.Comment": true,
|
||||
"encoding/xml.Directive": true,
|
||||
"go/scanner.ErrorList": true,
|
||||
"image/color.Palette": true,
|
||||
"net.HardwareAddr": true,
|
||||
"net.IP": true,
|
||||
"net.IPMask": true,
|
||||
"sort.Float64Slice": true,
|
||||
"sort.IntSlice": true,
|
||||
"sort.StringSlice": true,
|
||||
"unicode.SpecialCase": true,
|
||||
|
||||
// These image and image/color struct types are frozen. We will never add fields to them.
|
||||
"image/color.Alpha16": true,
|
||||
"image/color.Alpha": true,
|
||||
"image/color.CMYK": true,
|
||||
"image/color.Gray16": true,
|
||||
"image/color.Gray": true,
|
||||
"image/color.NRGBA64": true,
|
||||
"image/color.NRGBA": true,
|
||||
"image/color.RGBA64": true,
|
||||
"image/color.RGBA": true,
|
||||
"image/color.YCbCr": true,
|
||||
"image.Point": true,
|
||||
"image.Rectangle": true,
|
||||
"image.Uniform": true,
|
||||
}
|
@ -1,126 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
// Package cha computes the call graph of a Go program using the Class
|
||||
// Hierarchy Analysis (CHA) algorithm.
|
||||
//
|
||||
// CHA was first described in "Optimization of Object-Oriented Programs
|
||||
// Using Static Class Hierarchy Analysis", Jeffrey Dean, David Grove,
|
||||
// and Craig Chambers, ECOOP'95.
|
||||
//
|
||||
// CHA is related to RTA (see go/callgraph/rta); the difference is that
|
||||
// CHA conservatively computes the entire "implements" relation between
|
||||
// interfaces and concrete types ahead of time, whereas RTA uses dynamic
|
||||
// programming to construct it on the fly as it encounters new functions
|
||||
// reachable from main. CHA may thus include spurious call edges for
|
||||
// types that haven't been instantiated yet, or types that are never
|
||||
// instantiated.
|
||||
//
|
||||
// Since CHA conservatively assumes that all functions are address-taken
|
||||
// and all concrete types are put into interfaces, it is sound to run on
|
||||
// partial programs, such as libraries without a main or test function.
|
||||
//
|
||||
package cha // import "golang.org/x/tools/go/callgraph/cha"
|
||||
|
||||
import (
|
||||
"golang.org/x/tools/go/callgraph"
|
||||
"golang.org/x/tools/go/ssa"
|
||||
"golang.org/x/tools/go/ssa/ssautil"
|
||||
"golang.org/x/tools/go/types"
|
||||
"golang.org/x/tools/go/types/typeutil"
|
||||
)
|
||||
|
||||
// CallGraph computes the call graph of the specified program using the
|
||||
// Class Hierarchy Analysis algorithm.
|
||||
//
|
||||
func CallGraph(prog *ssa.Program) *callgraph.Graph {
|
||||
cg := callgraph.New(nil) // TODO(adonovan) eliminate concept of rooted callgraph
|
||||
|
||||
allFuncs := ssautil.AllFunctions(prog)
|
||||
|
||||
// funcsBySig contains all functions, keyed by signature. It is
|
||||
// the effective set of address-taken functions used to resolve
|
||||
// a dynamic call of a particular signature.
|
||||
var funcsBySig typeutil.Map // value is []*ssa.Function
|
||||
|
||||
// methodsByName contains all methods,
|
||||
// grouped by name for efficient lookup.
|
||||
methodsByName := make(map[string][]*ssa.Function)
|
||||
|
||||
// methodsMemo records, for every abstract method call call I.f on
|
||||
// interface type I, the set of concrete methods C.f of all
|
||||
// types C that satisfy interface I.
|
||||
methodsMemo := make(map[*types.Func][]*ssa.Function)
|
||||
lookupMethods := func(m *types.Func) []*ssa.Function {
|
||||
methods, ok := methodsMemo[m]
|
||||
if !ok {
|
||||
I := m.Type().(*types.Signature).Recv().Type().Underlying().(*types.Interface)
|
||||
for _, f := range methodsByName[m.Name()] {
|
||||
C := f.Signature.Recv().Type() // named or *named
|
||||
if types.Implements(C, I) {
|
||||
methods = append(methods, f)
|
||||
}
|
||||
}
|
||||
methodsMemo[m] = methods
|
||||
}
|
||||
return methods
|
||||
}
|
||||
|
||||
for f := range allFuncs {
|
||||
if f.Signature.Recv() == nil {
|
||||
// Package initializers can never be address-taken.
|
||||
if f.Name() == "init" && f.Synthetic == "package initializer" {
|
||||
continue
|
||||
}
|
||||
funcs, _ := funcsBySig.At(f.Signature).([]*ssa.Function)
|
||||
funcs = append(funcs, f)
|
||||
funcsBySig.Set(f.Signature, funcs)
|
||||
} else {
|
||||
methodsByName[f.Name()] = append(methodsByName[f.Name()], f)
|
||||
}
|
||||
}
|
||||
|
||||
addEdge := func(fnode *callgraph.Node, site ssa.CallInstruction, g *ssa.Function) {
|
||||
gnode := cg.CreateNode(g)
|
||||
callgraph.AddEdge(fnode, site, gnode)
|
||||
}
|
||||
|
||||
addEdges := func(fnode *callgraph.Node, site ssa.CallInstruction, callees []*ssa.Function) {
|
||||
// Because every call to a highly polymorphic and
|
||||
// frequently used abstract method such as
|
||||
// (io.Writer).Write is assumed to call every concrete
|
||||
// Write method in the program, the call graph can
|
||||
// contain a lot of duplication.
|
||||
//
|
||||
// TODO(adonovan): opt: consider factoring the callgraph
|
||||
// API so that the Callers component of each edge is a
|
||||
// slice of nodes, not a singleton.
|
||||
for _, g := range callees {
|
||||
addEdge(fnode, site, g)
|
||||
}
|
||||
}
|
||||
|
||||
for f := range allFuncs {
|
||||
fnode := cg.CreateNode(f)
|
||||
for _, b := range f.Blocks {
|
||||
for _, instr := range b.Instrs {
|
||||
if site, ok := instr.(ssa.CallInstruction); ok {
|
||||
call := site.Common()
|
||||
if call.IsInvoke() {
|
||||
addEdges(fnode, site, lookupMethods(call.Method))
|
||||
} else if g := call.StaticCallee(); g != nil {
|
||||
addEdge(fnode, site, g)
|
||||
} else if _, ok := call.Value.(*ssa.Builtin); !ok {
|
||||
callees, _ := funcsBySig.At(call.Signature()).([]*ssa.Function)
|
||||
addEdges(fnode, site, callees)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return cg
|
||||
}
|
@ -1,112 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
// No testdata on Android.
|
||||
|
||||
// +build !android
|
||||
|
||||
package cha_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io/ioutil"
|
||||
"sort"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"golang.org/x/tools/go/callgraph"
|
||||
"golang.org/x/tools/go/callgraph/cha"
|
||||
"golang.org/x/tools/go/loader"
|
||||
"golang.org/x/tools/go/ssa/ssautil"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var inputs = []string{
|
||||
"testdata/func.go",
|
||||
"testdata/iface.go",
|
||||
"testdata/recv.go",
|
||||
}
|
||||
|
||||
func expectation(f *ast.File) (string, token.Pos) {
|
||||
for _, c := range f.Comments {
|
||||
text := strings.TrimSpace(c.Text())
|
||||
if t := strings.TrimPrefix(text, "WANT:\n"); t != text {
|
||||
return t, c.Pos()
|
||||
}
|
||||
}
|
||||
return "", token.NoPos
|
||||
}
|
||||
|
||||
// TestCHA runs CHA on each file in inputs, prints the dynamic edges of
|
||||
// the call graph, and compares it with the golden results embedded in
|
||||
// the WANT comment at the end of the file.
|
||||
//
|
||||
func TestCHA(t *testing.T) {
|
||||
for _, filename := range inputs {
|
||||
content, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
t.Errorf("couldn't read file '%s': %s", filename, err)
|
||||
continue
|
||||
}
|
||||
|
||||
conf := loader.Config{
|
||||
ParserMode: parser.ParseComments,
|
||||
}
|
||||
f, err := conf.ParseFile(filename, content)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
continue
|
||||
}
|
||||
|
||||
want, pos := expectation(f)
|
||||
if pos == token.NoPos {
|
||||
t.Errorf("No WANT: comment in %s", filename)
|
||||
continue
|
||||
}
|
||||
|
||||
conf.CreateFromFiles("main", f)
|
||||
iprog, err := conf.Load()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
continue
|
||||
}
|
||||
|
||||
prog := ssautil.CreateProgram(iprog, 0)
|
||||
mainPkg := prog.Package(iprog.Created[0].Pkg)
|
||||
prog.Build()
|
||||
|
||||
cg := cha.CallGraph(prog)
|
||||
|
||||
if got := printGraph(cg, mainPkg.Pkg); got != want {
|
||||
t.Errorf("%s: got:\n%s\nwant:\n%s",
|
||||
prog.Fset.Position(pos), got, want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func printGraph(cg *callgraph.Graph, from *types.Package) string {
|
||||
var edges []string
|
||||
callgraph.GraphVisitEdges(cg, func(e *callgraph.Edge) error {
|
||||
if strings.Contains(e.Description(), "dynamic") {
|
||||
edges = append(edges, fmt.Sprintf("%s --> %s",
|
||||
e.Caller.Func.RelString(from),
|
||||
e.Callee.Func.RelString(from)))
|
||||
}
|
||||
return nil
|
||||
})
|
||||
sort.Strings(edges)
|
||||
|
||||
var buf bytes.Buffer
|
||||
buf.WriteString("Dynamic calls\n")
|
||||
for _, edge := range edges {
|
||||
fmt.Fprintf(&buf, " %s\n", edge)
|
||||
}
|
||||
return strings.TrimSpace(buf.String())
|
||||
}
|
@ -1,461 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
// This package provides Rapid Type Analysis (RTA) for Go, a fast
|
||||
// algorithm for call graph construction and discovery of reachable code
|
||||
// (and hence dead code) and runtime types. The algorithm was first
|
||||
// described in:
|
||||
//
|
||||
// David F. Bacon and Peter F. Sweeney. 1996.
|
||||
// Fast static analysis of C++ virtual function calls. (OOPSLA '96)
|
||||
// http://doi.acm.org/10.1145/236337.236371
|
||||
//
|
||||
// The algorithm uses dynamic programming to tabulate the cross-product
|
||||
// of the set of known "address taken" functions with the set of known
|
||||
// dynamic calls of the same type. As each new address-taken function
|
||||
// is discovered, call graph edges are added from each known callsite,
|
||||
// and as each new call site is discovered, call graph edges are added
|
||||
// from it to each known address-taken function.
|
||||
//
|
||||
// A similar approach is used for dynamic calls via interfaces: it
|
||||
// tabulates the cross-product of the set of known "runtime types",
|
||||
// i.e. types that may appear in an interface value, or be derived from
|
||||
// one via reflection, with the set of known "invoke"-mode dynamic
|
||||
// calls. As each new "runtime type" is discovered, call edges are
|
||||
// added from the known call sites, and as each new call site is
|
||||
// discovered, call graph edges are added to each compatible
|
||||
// method.
|
||||
//
|
||||
// In addition, we must consider all exported methods of any runtime type
|
||||
// as reachable, since they may be called via reflection.
|
||||
//
|
||||
// Each time a newly added call edge causes a new function to become
|
||||
// reachable, the code of that function is analyzed for more call sites,
|
||||
// address-taken functions, and runtime types. The process continues
|
||||
// until a fixed point is achieved.
|
||||
//
|
||||
// The resulting call graph is less precise than one produced by pointer
|
||||
// analysis, but the algorithm is much faster. For example, running the
|
||||
// cmd/callgraph tool on its own source takes ~2.1s for RTA and ~5.4s
|
||||
// for points-to analysis.
|
||||
//
|
||||
package rta // import "golang.org/x/tools/go/callgraph/rta"
|
||||
|
||||
// TODO(adonovan): test it by connecting it to the interpreter and
|
||||
// replacing all "unreachable" functions by a special intrinsic, and
|
||||
// ensure that that intrinsic is never called.
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"golang.org/x/tools/go/callgraph"
|
||||
"golang.org/x/tools/go/ssa"
|
||||
"golang.org/x/tools/go/types"
|
||||
"golang.org/x/tools/go/types/typeutil"
|
||||
)
|
||||
|
||||
// A Result holds the results of Rapid Type Analysis, which includes the
|
||||
// set of reachable functions/methods, runtime types, and the call graph.
|
||||
//
|
||||
type Result struct {
|
||||
// CallGraph is the discovered callgraph.
|
||||
// It does not include edges for calls made via reflection.
|
||||
CallGraph *callgraph.Graph
|
||||
|
||||
// Reachable contains the set of reachable functions and methods.
|
||||
// This includes exported methods of runtime types, since
|
||||
// they may be accessed via reflection.
|
||||
// The value indicates whether the function is address-taken.
|
||||
//
|
||||
// (We wrap the bool in a struct to avoid inadvertent use of
|
||||
// "if Reachable[f] {" to test for set membership.)
|
||||
Reachable map[*ssa.Function]struct{ AddrTaken bool }
|
||||
|
||||
// RuntimeTypes contains the set of types that are needed at
|
||||
// runtime, for interfaces or reflection.
|
||||
//
|
||||
// The value indicates whether the type is inaccessible to reflection.
|
||||
// Consider:
|
||||
// type A struct{B}
|
||||
// fmt.Println(new(A))
|
||||
// Types *A, A and B are accessible to reflection, but the unnamed
|
||||
// type struct{B} is not.
|
||||
RuntimeTypes typeutil.Map
|
||||
}
|
||||
|
||||
// Working state of the RTA algorithm.
|
||||
type rta struct {
|
||||
result *Result
|
||||
|
||||
prog *ssa.Program
|
||||
|
||||
worklist []*ssa.Function // list of functions to visit
|
||||
|
||||
// addrTakenFuncsBySig contains all address-taken *Functions, grouped by signature.
|
||||
// Keys are *types.Signature, values are map[*ssa.Function]bool sets.
|
||||
addrTakenFuncsBySig typeutil.Map
|
||||
|
||||
// dynCallSites contains all dynamic "call"-mode call sites, grouped by signature.
|
||||
// Keys are *types.Signature, values are unordered []ssa.CallInstruction.
|
||||
dynCallSites typeutil.Map
|
||||
|
||||
// invokeSites contains all "invoke"-mode call sites, grouped by interface.
|
||||
// Keys are *types.Interface (never *types.Named),
|
||||
// Values are unordered []ssa.CallInstruction sets.
|
||||
invokeSites typeutil.Map
|
||||
|
||||
// The following two maps together define the subset of the
|
||||
// m:n "implements" relation needed by the algorithm.
|
||||
|
||||
// concreteTypes maps each concrete type to the set of interfaces that it implements.
|
||||
// Keys are types.Type, values are unordered []*types.Interface.
|
||||
// Only concrete types used as MakeInterface operands are included.
|
||||
concreteTypes typeutil.Map
|
||||
|
||||
// interfaceTypes maps each interface type to
|
||||
// the set of concrete types that implement it.
|
||||
// Keys are *types.Interface, values are unordered []types.Type.
|
||||
// Only interfaces used in "invoke"-mode CallInstructions are included.
|
||||
interfaceTypes typeutil.Map
|
||||
}
|
||||
|
||||
// addReachable marks a function as potentially callable at run-time,
|
||||
// and ensures that it gets processed.
|
||||
func (r *rta) addReachable(f *ssa.Function, addrTaken bool) {
|
||||
reachable := r.result.Reachable
|
||||
n := len(reachable)
|
||||
v := reachable[f]
|
||||
if addrTaken {
|
||||
v.AddrTaken = true
|
||||
}
|
||||
reachable[f] = v
|
||||
if len(reachable) > n {
|
||||
// First time seeing f. Add it to the worklist.
|
||||
r.worklist = append(r.worklist, f)
|
||||
}
|
||||
}
|
||||
|
||||
// addEdge adds the specified call graph edge, and marks it reachable.
|
||||
// addrTaken indicates whether to mark the callee as "address-taken".
|
||||
func (r *rta) addEdge(site ssa.CallInstruction, callee *ssa.Function, addrTaken bool) {
|
||||
r.addReachable(callee, addrTaken)
|
||||
|
||||
if g := r.result.CallGraph; g != nil {
|
||||
if site.Parent() == nil {
|
||||
panic(site)
|
||||
}
|
||||
from := g.CreateNode(site.Parent())
|
||||
to := g.CreateNode(callee)
|
||||
callgraph.AddEdge(from, site, to)
|
||||
}
|
||||
}
|
||||
|
||||
// ---------- addrTakenFuncs × dynCallSites ----------
|
||||
|
||||
// visitAddrTakenFunc is called each time we encounter an address-taken function f.
|
||||
func (r *rta) visitAddrTakenFunc(f *ssa.Function) {
|
||||
// Create two-level map (Signature -> Function -> bool).
|
||||
S := f.Signature
|
||||
funcs, _ := r.addrTakenFuncsBySig.At(S).(map[*ssa.Function]bool)
|
||||
if funcs == nil {
|
||||
funcs = make(map[*ssa.Function]bool)
|
||||
r.addrTakenFuncsBySig.Set(S, funcs)
|
||||
}
|
||||
if !funcs[f] {
|
||||
// First time seeing f.
|
||||
funcs[f] = true
|
||||
|
||||
// If we've seen any dyncalls of this type, mark it reachable,
|
||||
// and add call graph edges.
|
||||
sites, _ := r.dynCallSites.At(S).([]ssa.CallInstruction)
|
||||
for _, site := range sites {
|
||||
r.addEdge(site, f, true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// visitDynCall is called each time we encounter a dynamic "call"-mode call.
|
||||
func (r *rta) visitDynCall(site ssa.CallInstruction) {
|
||||
S := site.Common().Signature()
|
||||
|
||||
// Record the call site.
|
||||
sites, _ := r.dynCallSites.At(S).([]ssa.CallInstruction)
|
||||
r.dynCallSites.Set(S, append(sites, site))
|
||||
|
||||
// For each function of signature S that we know is address-taken,
|
||||
// mark it reachable. We'll add the callgraph edges later.
|
||||
funcs, _ := r.addrTakenFuncsBySig.At(S).(map[*ssa.Function]bool)
|
||||
for g := range funcs {
|
||||
r.addEdge(site, g, true)
|
||||
}
|
||||
}
|
||||
|
||||
// ---------- concrete types × invoke sites ----------
|
||||
|
||||
// addInvokeEdge is called for each new pair (site, C) in the matrix.
|
||||
func (r *rta) addInvokeEdge(site ssa.CallInstruction, C types.Type) {
|
||||
// Ascertain the concrete method of C to be called.
|
||||
imethod := site.Common().Method
|
||||
cmethod := r.prog.MethodValue(r.prog.MethodSets.MethodSet(C).Lookup(imethod.Pkg(), imethod.Name()))
|
||||
r.addEdge(site, cmethod, true)
|
||||
}
|
||||
|
||||
// visitInvoke is called each time the algorithm encounters an "invoke"-mode call.
|
||||
func (r *rta) visitInvoke(site ssa.CallInstruction) {
|
||||
I := site.Common().Value.Type().Underlying().(*types.Interface)
|
||||
|
||||
// Record the invoke site.
|
||||
sites, _ := r.invokeSites.At(I).([]ssa.CallInstruction)
|
||||
r.invokeSites.Set(I, append(sites, site))
|
||||
|
||||
// Add callgraph edge for each existing
|
||||
// address-taken concrete type implementing I.
|
||||
for _, C := range r.implementations(I) {
|
||||
r.addInvokeEdge(site, C)
|
||||
}
|
||||
}
|
||||
|
||||
// ---------- main algorithm ----------
|
||||
|
||||
// visitFunc processes function f.
|
||||
func (r *rta) visitFunc(f *ssa.Function) {
|
||||
var space [32]*ssa.Value // preallocate space for common case
|
||||
|
||||
for _, b := range f.Blocks {
|
||||
for _, instr := range b.Instrs {
|
||||
rands := instr.Operands(space[:0])
|
||||
|
||||
switch instr := instr.(type) {
|
||||
case ssa.CallInstruction:
|
||||
call := instr.Common()
|
||||
if call.IsInvoke() {
|
||||
r.visitInvoke(instr)
|
||||
} else if g := call.StaticCallee(); g != nil {
|
||||
r.addEdge(instr, g, false)
|
||||
} else if _, ok := call.Value.(*ssa.Builtin); !ok {
|
||||
r.visitDynCall(instr)
|
||||
}
|
||||
|
||||
// Ignore the call-position operand when
|
||||
// looking for address-taken Functions.
|
||||
// Hack: assume this is rands[0].
|
||||
rands = rands[1:]
|
||||
|
||||
case *ssa.MakeInterface:
|
||||
r.addRuntimeType(instr.X.Type(), false)
|
||||
}
|
||||
|
||||
// Process all address-taken functions.
|
||||
for _, op := range rands {
|
||||
if g, ok := (*op).(*ssa.Function); ok {
|
||||
r.visitAddrTakenFunc(g)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Analyze performs Rapid Type Analysis, starting at the specified root
|
||||
// functions. It returns nil if no roots were specified.
|
||||
//
|
||||
// If buildCallGraph is true, Result.CallGraph will contain a call
|
||||
// graph; otherwise, only the other fields (reachable functions) are
|
||||
// populated.
|
||||
//
|
||||
func Analyze(roots []*ssa.Function, buildCallGraph bool) *Result {
|
||||
if len(roots) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
r := &rta{
|
||||
result: &Result{Reachable: make(map[*ssa.Function]struct{ AddrTaken bool })},
|
||||
prog: roots[0].Prog,
|
||||
}
|
||||
|
||||
if buildCallGraph {
|
||||
// TODO(adonovan): change callgraph API to eliminate the
|
||||
// notion of a distinguished root node. Some callgraphs
|
||||
// have many roots, or none.
|
||||
r.result.CallGraph = callgraph.New(roots[0])
|
||||
}
|
||||
|
||||
hasher := typeutil.MakeHasher()
|
||||
r.result.RuntimeTypes.SetHasher(hasher)
|
||||
r.addrTakenFuncsBySig.SetHasher(hasher)
|
||||
r.dynCallSites.SetHasher(hasher)
|
||||
r.invokeSites.SetHasher(hasher)
|
||||
r.concreteTypes.SetHasher(hasher)
|
||||
r.interfaceTypes.SetHasher(hasher)
|
||||
|
||||
// Visit functions, processing their instructions, and adding
|
||||
// new functions to the worklist, until a fixed point is
|
||||
// reached.
|
||||
var shadow []*ssa.Function // for efficiency, we double-buffer the worklist
|
||||
r.worklist = append(r.worklist, roots...)
|
||||
for len(r.worklist) > 0 {
|
||||
shadow, r.worklist = r.worklist, shadow[:0]
|
||||
for _, f := range shadow {
|
||||
r.visitFunc(f)
|
||||
}
|
||||
}
|
||||
return r.result
|
||||
}
|
||||
|
||||
// interfaces(C) returns all currently known interfaces implemented by C.
|
||||
func (r *rta) interfaces(C types.Type) []*types.Interface {
|
||||
// Ascertain set of interfaces C implements
|
||||
// and update 'implements' relation.
|
||||
var ifaces []*types.Interface
|
||||
r.interfaceTypes.Iterate(func(I types.Type, concs interface{}) {
|
||||
if I := I.(*types.Interface); types.Implements(C, I) {
|
||||
concs, _ := concs.([]types.Type)
|
||||
r.interfaceTypes.Set(I, append(concs, C))
|
||||
ifaces = append(ifaces, I)
|
||||
}
|
||||
})
|
||||
r.concreteTypes.Set(C, ifaces)
|
||||
return ifaces
|
||||
}
|
||||
|
||||
// implementations(I) returns all currently known concrete types that implement I.
|
||||
func (r *rta) implementations(I *types.Interface) []types.Type {
|
||||
var concs []types.Type
|
||||
if v := r.interfaceTypes.At(I); v != nil {
|
||||
concs = v.([]types.Type)
|
||||
} else {
|
||||
// First time seeing this interface.
|
||||
// Update the 'implements' relation.
|
||||
r.concreteTypes.Iterate(func(C types.Type, ifaces interface{}) {
|
||||
if types.Implements(C, I) {
|
||||
ifaces, _ := ifaces.([]*types.Interface)
|
||||
r.concreteTypes.Set(C, append(ifaces, I))
|
||||
concs = append(concs, C)
|
||||
}
|
||||
})
|
||||
r.interfaceTypes.Set(I, concs)
|
||||
}
|
||||
return concs
|
||||
}
|
||||
|
||||
// addRuntimeType is called for each concrete type that can be the
|
||||
// dynamic type of some interface or reflect.Value.
|
||||
// Adapted from needMethods in go/ssa/builder.go
|
||||
//
|
||||
func (r *rta) addRuntimeType(T types.Type, skip bool) {
|
||||
if prev, ok := r.result.RuntimeTypes.At(T).(bool); ok {
|
||||
if skip && !prev {
|
||||
r.result.RuntimeTypes.Set(T, skip)
|
||||
}
|
||||
return
|
||||
}
|
||||
r.result.RuntimeTypes.Set(T, skip)
|
||||
|
||||
mset := r.prog.MethodSets.MethodSet(T)
|
||||
|
||||
if _, ok := T.Underlying().(*types.Interface); !ok {
|
||||
// T is a new concrete type.
|
||||
for i, n := 0, mset.Len(); i < n; i++ {
|
||||
sel := mset.At(i)
|
||||
m := sel.Obj()
|
||||
|
||||
if m.Exported() {
|
||||
// Exported methods are always potentially callable via reflection.
|
||||
r.addReachable(r.prog.MethodValue(sel), true)
|
||||
}
|
||||
}
|
||||
|
||||
// Add callgraph edge for each existing dynamic
|
||||
// "invoke"-mode call via that interface.
|
||||
for _, I := range r.interfaces(T) {
|
||||
sites, _ := r.invokeSites.At(I).([]ssa.CallInstruction)
|
||||
for _, site := range sites {
|
||||
r.addInvokeEdge(site, T)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Precondition: T is not a method signature (*Signature with Recv()!=nil).
|
||||
// Recursive case: skip => don't call makeMethods(T).
|
||||
// Each package maintains its own set of types it has visited.
|
||||
|
||||
var n *types.Named
|
||||
switch T := T.(type) {
|
||||
case *types.Named:
|
||||
n = T
|
||||
case *types.Pointer:
|
||||
n, _ = T.Elem().(*types.Named)
|
||||
}
|
||||
if n != nil {
|
||||
owner := n.Obj().Pkg()
|
||||
if owner == nil {
|
||||
return // built-in error type
|
||||
}
|
||||
}
|
||||
|
||||
// Recursion over signatures of each exported method.
|
||||
for i := 0; i < mset.Len(); i++ {
|
||||
if mset.At(i).Obj().Exported() {
|
||||
sig := mset.At(i).Type().(*types.Signature)
|
||||
r.addRuntimeType(sig.Params(), true) // skip the Tuple itself
|
||||
r.addRuntimeType(sig.Results(), true) // skip the Tuple itself
|
||||
}
|
||||
}
|
||||
|
||||
switch t := T.(type) {
|
||||
case *types.Basic:
|
||||
// nop
|
||||
|
||||
case *types.Interface:
|
||||
// nop---handled by recursion over method set.
|
||||
|
||||
case *types.Pointer:
|
||||
r.addRuntimeType(t.Elem(), false)
|
||||
|
||||
case *types.Slice:
|
||||
r.addRuntimeType(t.Elem(), false)
|
||||
|
||||
case *types.Chan:
|
||||
r.addRuntimeType(t.Elem(), false)
|
||||
|
||||
case *types.Map:
|
||||
r.addRuntimeType(t.Key(), false)
|
||||
r.addRuntimeType(t.Elem(), false)
|
||||
|
||||
case *types.Signature:
|
||||
if t.Recv() != nil {
|
||||
panic(fmt.Sprintf("Signature %s has Recv %s", t, t.Recv()))
|
||||
}
|
||||
r.addRuntimeType(t.Params(), true) // skip the Tuple itself
|
||||
r.addRuntimeType(t.Results(), true) // skip the Tuple itself
|
||||
|
||||
case *types.Named:
|
||||
// A pointer-to-named type can be derived from a named
|
||||
// type via reflection. It may have methods too.
|
||||
r.addRuntimeType(types.NewPointer(T), false)
|
||||
|
||||
// Consider 'type T struct{S}' where S has methods.
|
||||
// Reflection provides no way to get from T to struct{S},
|
||||
// only to S, so the method set of struct{S} is unwanted,
|
||||
// so set 'skip' flag during recursion.
|
||||
r.addRuntimeType(t.Underlying(), true)
|
||||
|
||||
case *types.Array:
|
||||
r.addRuntimeType(t.Elem(), false)
|
||||
|
||||
case *types.Struct:
|
||||
for i, n := 0, t.NumFields(); i < n; i++ {
|
||||
r.addRuntimeType(t.Field(i).Type(), false)
|
||||
}
|
||||
|
||||
case *types.Tuple:
|
||||
for i, n := 0, t.Len(); i < n; i++ {
|
||||
r.addRuntimeType(t.At(i).Type(), false)
|
||||
}
|
||||
|
||||
default:
|
||||
panic(T)
|
||||
}
|
||||
}
|
@ -1,141 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
// No testdata on Android.
|
||||
|
||||
// +build !android
|
||||
|
||||
package rta_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io/ioutil"
|
||||
"sort"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"golang.org/x/tools/go/callgraph"
|
||||
"golang.org/x/tools/go/callgraph/rta"
|
||||
"golang.org/x/tools/go/loader"
|
||||
"golang.org/x/tools/go/ssa"
|
||||
"golang.org/x/tools/go/ssa/ssautil"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var inputs = []string{
|
||||
"testdata/func.go",
|
||||
"testdata/rtype.go",
|
||||
"testdata/iface.go",
|
||||
}
|
||||
|
||||
func expectation(f *ast.File) (string, token.Pos) {
|
||||
for _, c := range f.Comments {
|
||||
text := strings.TrimSpace(c.Text())
|
||||
if t := strings.TrimPrefix(text, "WANT:\n"); t != text {
|
||||
return t, c.Pos()
|
||||
}
|
||||
}
|
||||
return "", token.NoPos
|
||||
}
|
||||
|
||||
// TestRTA runs RTA on each file in inputs, prints the results, and
|
||||
// compares it with the golden results embedded in the WANT comment at
|
||||
// the end of the file.
|
||||
//
|
||||
// The results string consists of two parts: the set of dynamic call
|
||||
// edges, "f --> g", one per line, and the set of reachable functions,
|
||||
// one per line. Each set is sorted.
|
||||
//
|
||||
func TestRTA(t *testing.T) {
|
||||
for _, filename := range inputs {
|
||||
content, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
t.Errorf("couldn't read file '%s': %s", filename, err)
|
||||
continue
|
||||
}
|
||||
|
||||
conf := loader.Config{
|
||||
ParserMode: parser.ParseComments,
|
||||
}
|
||||
f, err := conf.ParseFile(filename, content)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
continue
|
||||
}
|
||||
|
||||
want, pos := expectation(f)
|
||||
if pos == token.NoPos {
|
||||
t.Errorf("No WANT: comment in %s", filename)
|
||||
continue
|
||||
}
|
||||
|
||||
conf.CreateFromFiles("main", f)
|
||||
iprog, err := conf.Load()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
continue
|
||||
}
|
||||
|
||||
prog := ssautil.CreateProgram(iprog, 0)
|
||||
mainPkg := prog.Package(iprog.Created[0].Pkg)
|
||||
prog.Build()
|
||||
|
||||
res := rta.Analyze([]*ssa.Function{
|
||||
mainPkg.Func("main"),
|
||||
mainPkg.Func("init"),
|
||||
}, true)
|
||||
|
||||
if got := printResult(res, mainPkg.Pkg); got != want {
|
||||
t.Errorf("%s: got:\n%s\nwant:\n%s",
|
||||
prog.Fset.Position(pos), got, want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func printResult(res *rta.Result, from *types.Package) string {
|
||||
var buf bytes.Buffer
|
||||
|
||||
writeSorted := func(ss []string) {
|
||||
sort.Strings(ss)
|
||||
for _, s := range ss {
|
||||
fmt.Fprintf(&buf, " %s\n", s)
|
||||
}
|
||||
}
|
||||
|
||||
buf.WriteString("Dynamic calls\n")
|
||||
var edges []string
|
||||
callgraph.GraphVisitEdges(res.CallGraph, func(e *callgraph.Edge) error {
|
||||
if strings.Contains(e.Description(), "dynamic") {
|
||||
edges = append(edges, fmt.Sprintf("%s --> %s",
|
||||
e.Caller.Func.RelString(from),
|
||||
e.Callee.Func.RelString(from)))
|
||||
}
|
||||
return nil
|
||||
})
|
||||
writeSorted(edges)
|
||||
|
||||
buf.WriteString("Reachable functions\n")
|
||||
var reachable []string
|
||||
for f := range res.Reachable {
|
||||
reachable = append(reachable, f.RelString(from))
|
||||
}
|
||||
writeSorted(reachable)
|
||||
|
||||
buf.WriteString("Reflect types\n")
|
||||
var rtypes []string
|
||||
res.RuntimeTypes.Iterate(func(key types.Type, value interface{}) {
|
||||
if value == false { // accessible to reflection
|
||||
rtypes = append(rtypes, types.TypeString(key, types.RelativeTo(from)))
|
||||
}
|
||||
})
|
||||
writeSorted(rtypes)
|
||||
|
||||
return strings.TrimSpace(buf.String())
|
||||
}
|
@ -1,920 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package exact implements Values representing untyped
|
||||
// Go constants and the corresponding operations. Values
|
||||
// and operations have unlimited precision.
|
||||
//
|
||||
// A special Unknown value may be used when a value
|
||||
// is unknown due to an error. Operations on unknown
|
||||
// values produce unknown values unless specified
|
||||
// otherwise.
|
||||
//
|
||||
package exact // import "golang.org/x/tools/go/exact"
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/token"
|
||||
"math/big"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// Kind specifies the kind of value represented by a Value.
|
||||
type Kind int
|
||||
|
||||
// Implementation note: Kinds must be enumerated in
|
||||
// order of increasing "complexity" (used by match).
|
||||
|
||||
const (
|
||||
// unknown values
|
||||
Unknown Kind = iota
|
||||
|
||||
// non-numeric values
|
||||
Bool
|
||||
String
|
||||
|
||||
// numeric values
|
||||
Int
|
||||
Float
|
||||
Complex
|
||||
)
|
||||
|
||||
// A Value represents a mathematically exact value of a given Kind.
|
||||
type Value interface {
|
||||
// Kind returns the value kind; it is always the smallest
|
||||
// kind in which the value can be represented exactly.
|
||||
Kind() Kind
|
||||
|
||||
// String returns a human-readable form of the value.
|
||||
String() string
|
||||
|
||||
// Prevent external implementations.
|
||||
implementsValue()
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Implementations
|
||||
|
||||
type (
|
||||
unknownVal struct{}
|
||||
boolVal bool
|
||||
stringVal string
|
||||
int64Val int64
|
||||
intVal struct{ val *big.Int }
|
||||
floatVal struct{ val *big.Rat }
|
||||
complexVal struct{ re, im *big.Rat }
|
||||
)
|
||||
|
||||
func (unknownVal) Kind() Kind { return Unknown }
|
||||
func (boolVal) Kind() Kind { return Bool }
|
||||
func (stringVal) Kind() Kind { return String }
|
||||
func (int64Val) Kind() Kind { return Int }
|
||||
func (intVal) Kind() Kind { return Int }
|
||||
func (floatVal) Kind() Kind { return Float }
|
||||
func (complexVal) Kind() Kind { return Complex }
|
||||
|
||||
func (unknownVal) String() string { return "unknown" }
|
||||
func (x boolVal) String() string { return fmt.Sprintf("%v", bool(x)) }
|
||||
func (x stringVal) String() string { return strconv.Quote(string(x)) }
|
||||
func (x int64Val) String() string { return strconv.FormatInt(int64(x), 10) }
|
||||
func (x intVal) String() string { return x.val.String() }
|
||||
func (x floatVal) String() string { return x.val.String() }
|
||||
func (x complexVal) String() string { return fmt.Sprintf("(%s + %si)", x.re, x.im) }
|
||||
|
||||
func (unknownVal) implementsValue() {}
|
||||
func (boolVal) implementsValue() {}
|
||||
func (stringVal) implementsValue() {}
|
||||
func (int64Val) implementsValue() {}
|
||||
func (intVal) implementsValue() {}
|
||||
func (floatVal) implementsValue() {}
|
||||
func (complexVal) implementsValue() {}
|
||||
|
||||
// int64 bounds
|
||||
var (
|
||||
minInt64 = big.NewInt(-1 << 63)
|
||||
maxInt64 = big.NewInt(1<<63 - 1)
|
||||
)
|
||||
|
||||
func normInt(x *big.Int) Value {
|
||||
if minInt64.Cmp(x) <= 0 && x.Cmp(maxInt64) <= 0 {
|
||||
return int64Val(x.Int64())
|
||||
}
|
||||
return intVal{x}
|
||||
}
|
||||
|
||||
func normFloat(x *big.Rat) Value {
|
||||
if x.IsInt() {
|
||||
return normInt(x.Num())
|
||||
}
|
||||
return floatVal{x}
|
||||
}
|
||||
|
||||
func normComplex(re, im *big.Rat) Value {
|
||||
if im.Sign() == 0 {
|
||||
return normFloat(re)
|
||||
}
|
||||
return complexVal{re, im}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Factories
|
||||
|
||||
// MakeUnknown returns the Unknown value.
|
||||
func MakeUnknown() Value { return unknownVal{} }
|
||||
|
||||
// MakeBool returns the Bool value for x.
|
||||
func MakeBool(b bool) Value { return boolVal(b) }
|
||||
|
||||
// MakeString returns the String value for x.
|
||||
func MakeString(s string) Value { return stringVal(s) }
|
||||
|
||||
// MakeInt64 returns the Int value for x.
|
||||
func MakeInt64(x int64) Value { return int64Val(x) }
|
||||
|
||||
// MakeUint64 returns the Int value for x.
|
||||
func MakeUint64(x uint64) Value { return normInt(new(big.Int).SetUint64(x)) }
|
||||
|
||||
// MakeFloat64 returns the numeric value for x.
|
||||
// If x is not finite, the result is unknown.
|
||||
func MakeFloat64(x float64) Value {
|
||||
if f := new(big.Rat).SetFloat64(x); f != nil {
|
||||
return normFloat(f)
|
||||
}
|
||||
return unknownVal{}
|
||||
}
|
||||
|
||||
// MakeFromLiteral returns the corresponding integer, floating-point,
|
||||
// imaginary, character, or string value for a Go literal string. The
|
||||
// result is nil if the literal string is invalid.
|
||||
func MakeFromLiteral(lit string, tok token.Token) Value {
|
||||
switch tok {
|
||||
case token.INT:
|
||||
if x, err := strconv.ParseInt(lit, 0, 64); err == nil {
|
||||
return int64Val(x)
|
||||
}
|
||||
if x, ok := new(big.Int).SetString(lit, 0); ok {
|
||||
return intVal{x}
|
||||
}
|
||||
|
||||
case token.FLOAT:
|
||||
if x, ok := new(big.Rat).SetString(lit); ok {
|
||||
return normFloat(x)
|
||||
}
|
||||
|
||||
case token.IMAG:
|
||||
if n := len(lit); n > 0 && lit[n-1] == 'i' {
|
||||
if im, ok := new(big.Rat).SetString(lit[0 : n-1]); ok {
|
||||
return normComplex(big.NewRat(0, 1), im)
|
||||
}
|
||||
}
|
||||
|
||||
case token.CHAR:
|
||||
if n := len(lit); n >= 2 {
|
||||
if code, _, _, err := strconv.UnquoteChar(lit[1:n-1], '\''); err == nil {
|
||||
return int64Val(code)
|
||||
}
|
||||
}
|
||||
|
||||
case token.STRING:
|
||||
if s, err := strconv.Unquote(lit); err == nil {
|
||||
return stringVal(s)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Accessors
|
||||
//
|
||||
// For unknown arguments the result is the zero value for the respective
|
||||
// accessor type, except for Sign, where the result is 1.
|
||||
|
||||
// BoolVal returns the Go boolean value of x, which must be a Bool or an Unknown.
|
||||
// If x is Unknown, the result is false.
|
||||
func BoolVal(x Value) bool {
|
||||
switch x := x.(type) {
|
||||
case boolVal:
|
||||
return bool(x)
|
||||
case unknownVal:
|
||||
return false
|
||||
}
|
||||
panic(fmt.Sprintf("%v not a Bool", x))
|
||||
}
|
||||
|
||||
// StringVal returns the Go string value of x, which must be a String or an Unknown.
|
||||
// If x is Unknown, the result is "".
|
||||
func StringVal(x Value) string {
|
||||
switch x := x.(type) {
|
||||
case stringVal:
|
||||
return string(x)
|
||||
case unknownVal:
|
||||
return ""
|
||||
}
|
||||
panic(fmt.Sprintf("%v not a String", x))
|
||||
}
|
||||
|
||||
// Int64Val returns the Go int64 value of x and whether the result is exact;
|
||||
// x must be an Int or an Unknown. If the result is not exact, its value is undefined.
|
||||
// If x is Unknown, the result is (0, false).
|
||||
func Int64Val(x Value) (int64, bool) {
|
||||
switch x := x.(type) {
|
||||
case int64Val:
|
||||
return int64(x), true
|
||||
case intVal:
|
||||
return x.val.Int64(), x.val.BitLen() <= 63
|
||||
case unknownVal:
|
||||
return 0, false
|
||||
}
|
||||
panic(fmt.Sprintf("%v not an Int", x))
|
||||
}
|
||||
|
||||
// Uint64Val returns the Go uint64 value of x and whether the result is exact;
|
||||
// x must be an Int or an Unknown. If the result is not exact, its value is undefined.
|
||||
// If x is Unknown, the result is (0, false).
|
||||
func Uint64Val(x Value) (uint64, bool) {
|
||||
switch x := x.(type) {
|
||||
case int64Val:
|
||||
return uint64(x), x >= 0
|
||||
case intVal:
|
||||
return x.val.Uint64(), x.val.Sign() >= 0 && x.val.BitLen() <= 64
|
||||
case unknownVal:
|
||||
return 0, false
|
||||
}
|
||||
panic(fmt.Sprintf("%v not an Int", x))
|
||||
}
|
||||
|
||||
// Float32Val is like Float64Val but for float32 instead of float64.
|
||||
func Float32Val(x Value) (float32, bool) {
|
||||
switch x := x.(type) {
|
||||
case int64Val:
|
||||
f := float32(x)
|
||||
return f, int64Val(f) == x
|
||||
case intVal:
|
||||
return ratToFloat32(new(big.Rat).SetFrac(x.val, int1))
|
||||
case floatVal:
|
||||
return ratToFloat32(x.val)
|
||||
case unknownVal:
|
||||
return 0, false
|
||||
}
|
||||
panic(fmt.Sprintf("%v not a Float", x))
|
||||
}
|
||||
|
||||
// Float64Val returns the nearest Go float64 value of x and whether the result is exact;
|
||||
// x must be numeric but not Complex, or Unknown. For values too small (too close to 0)
|
||||
// to represent as float64, Float64Val silently underflows to 0. The result sign always
|
||||
// matches the sign of x, even for 0.
|
||||
// If x is Unknown, the result is (0, false).
|
||||
func Float64Val(x Value) (float64, bool) {
|
||||
switch x := x.(type) {
|
||||
case int64Val:
|
||||
f := float64(int64(x))
|
||||
return f, int64Val(f) == x
|
||||
case intVal:
|
||||
return new(big.Rat).SetFrac(x.val, int1).Float64()
|
||||
case floatVal:
|
||||
return x.val.Float64()
|
||||
case unknownVal:
|
||||
return 0, false
|
||||
}
|
||||
panic(fmt.Sprintf("%v not a Float", x))
|
||||
}
|
||||
|
||||
// BitLen returns the number of bits required to represent
|
||||
// the absolute value x in binary representation; x must be an Int or an Unknown.
|
||||
// If x is Unknown, the result is 0.
|
||||
func BitLen(x Value) int {
|
||||
switch x := x.(type) {
|
||||
case int64Val:
|
||||
return new(big.Int).SetInt64(int64(x)).BitLen()
|
||||
case intVal:
|
||||
return x.val.BitLen()
|
||||
case unknownVal:
|
||||
return 0
|
||||
}
|
||||
panic(fmt.Sprintf("%v not an Int", x))
|
||||
}
|
||||
|
||||
// Sign returns -1, 0, or 1 depending on whether x < 0, x == 0, or x > 0;
|
||||
// x must be numeric or Unknown. For complex values x, the sign is 0 if x == 0,
|
||||
// otherwise it is != 0. If x is Unknown, the result is 1.
|
||||
func Sign(x Value) int {
|
||||
switch x := x.(type) {
|
||||
case int64Val:
|
||||
switch {
|
||||
case x < 0:
|
||||
return -1
|
||||
case x > 0:
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
case intVal:
|
||||
return x.val.Sign()
|
||||
case floatVal:
|
||||
return x.val.Sign()
|
||||
case complexVal:
|
||||
return x.re.Sign() | x.im.Sign()
|
||||
case unknownVal:
|
||||
return 1 // avoid spurious division by zero errors
|
||||
}
|
||||
panic(fmt.Sprintf("%v not numeric", x))
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Support for serializing/deserializing integers
|
||||
|
||||
const (
|
||||
// Compute the size of a Word in bytes.
|
||||
_m = ^big.Word(0)
|
||||
_log = _m>>8&1 + _m>>16&1 + _m>>32&1
|
||||
wordSize = 1 << _log
|
||||
)
|
||||
|
||||
// Bytes returns the bytes for the absolute value of x in little-
|
||||
// endian binary representation; x must be an Int.
|
||||
func Bytes(x Value) []byte {
|
||||
var val *big.Int
|
||||
switch x := x.(type) {
|
||||
case int64Val:
|
||||
val = new(big.Int).SetInt64(int64(x))
|
||||
case intVal:
|
||||
val = x.val
|
||||
default:
|
||||
panic(fmt.Sprintf("%v not an Int", x))
|
||||
}
|
||||
|
||||
words := val.Bits()
|
||||
bytes := make([]byte, len(words)*wordSize)
|
||||
|
||||
i := 0
|
||||
for _, w := range words {
|
||||
for j := 0; j < wordSize; j++ {
|
||||
bytes[i] = byte(w)
|
||||
w >>= 8
|
||||
i++
|
||||
}
|
||||
}
|
||||
// remove leading 0's
|
||||
for i > 0 && bytes[i-1] == 0 {
|
||||
i--
|
||||
}
|
||||
|
||||
return bytes[:i]
|
||||
}
|
||||
|
||||
// MakeFromBytes returns the Int value given the bytes of its little-endian
|
||||
// binary representation. An empty byte slice argument represents 0.
|
||||
func MakeFromBytes(bytes []byte) Value {
|
||||
words := make([]big.Word, (len(bytes)+(wordSize-1))/wordSize)
|
||||
|
||||
i := 0
|
||||
var w big.Word
|
||||
var s uint
|
||||
for _, b := range bytes {
|
||||
w |= big.Word(b) << s
|
||||
if s += 8; s == wordSize*8 {
|
||||
words[i] = w
|
||||
i++
|
||||
w = 0
|
||||
s = 0
|
||||
}
|
||||
}
|
||||
// store last word
|
||||
if i < len(words) {
|
||||
words[i] = w
|
||||
i++
|
||||
}
|
||||
// remove leading 0's
|
||||
for i > 0 && words[i-1] == 0 {
|
||||
i--
|
||||
}
|
||||
|
||||
return normInt(new(big.Int).SetBits(words[:i]))
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Support for disassembling fractions
|
||||
|
||||
// Num returns the numerator of x; x must be Int, Float, or Unknown.
|
||||
// If x is Unknown, the result is Unknown, otherwise it is an Int
|
||||
// with the same sign as x.
|
||||
func Num(x Value) Value {
|
||||
switch x := x.(type) {
|
||||
case unknownVal, int64Val, intVal:
|
||||
return x
|
||||
case floatVal:
|
||||
return normInt(x.val.Num())
|
||||
}
|
||||
panic(fmt.Sprintf("%v not Int or Float", x))
|
||||
}
|
||||
|
||||
// Denom returns the denominator of x; x must be Int, Float, or Unknown.
|
||||
// If x is Unknown, the result is Unknown, otherwise it is an Int >= 1.
|
||||
func Denom(x Value) Value {
|
||||
switch x := x.(type) {
|
||||
case unknownVal:
|
||||
return x
|
||||
case int64Val, intVal:
|
||||
return int64Val(1)
|
||||
case floatVal:
|
||||
return normInt(x.val.Denom())
|
||||
}
|
||||
panic(fmt.Sprintf("%v not Int or Float", x))
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Support for assembling/disassembling complex numbers
|
||||
|
||||
// MakeImag returns the numeric value x*i (possibly 0);
|
||||
// x must be Int, Float, or Unknown.
|
||||
// If x is Unknown, the result is Unknown.
|
||||
func MakeImag(x Value) Value {
|
||||
var im *big.Rat
|
||||
switch x := x.(type) {
|
||||
case unknownVal:
|
||||
return x
|
||||
case int64Val:
|
||||
im = big.NewRat(int64(x), 1)
|
||||
case intVal:
|
||||
im = new(big.Rat).SetFrac(x.val, int1)
|
||||
case floatVal:
|
||||
im = x.val
|
||||
default:
|
||||
panic(fmt.Sprintf("%v not Int or Float", x))
|
||||
}
|
||||
return normComplex(rat0, im)
|
||||
}
|
||||
|
||||
// Real returns the real part of x, which must be a numeric or unknown value.
|
||||
// If x is Unknown, the result is Unknown.
|
||||
func Real(x Value) Value {
|
||||
switch x := x.(type) {
|
||||
case unknownVal, int64Val, intVal, floatVal:
|
||||
return x
|
||||
case complexVal:
|
||||
return normFloat(x.re)
|
||||
}
|
||||
panic(fmt.Sprintf("%v not numeric", x))
|
||||
}
|
||||
|
||||
// Imag returns the imaginary part of x, which must be a numeric or unknown value.
|
||||
// If x is Unknown, the result is Unknown.
|
||||
func Imag(x Value) Value {
|
||||
switch x := x.(type) {
|
||||
case unknownVal:
|
||||
return x
|
||||
case int64Val, intVal, floatVal:
|
||||
return int64Val(0)
|
||||
case complexVal:
|
||||
return normFloat(x.im)
|
||||
}
|
||||
panic(fmt.Sprintf("%v not numeric", x))
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Operations
|
||||
|
||||
// is32bit reports whether x can be represented using 32 bits.
|
||||
func is32bit(x int64) bool {
|
||||
const s = 32
|
||||
return -1<<(s-1) <= x && x <= 1<<(s-1)-1
|
||||
}
|
||||
|
||||
// is63bit reports whether x can be represented using 63 bits.
|
||||
func is63bit(x int64) bool {
|
||||
const s = 63
|
||||
return -1<<(s-1) <= x && x <= 1<<(s-1)-1
|
||||
}
|
||||
|
||||
// UnaryOp returns the result of the unary expression op y.
|
||||
// The operation must be defined for the operand.
|
||||
// If size >= 0 it specifies the ^ (xor) result size in bytes.
|
||||
// If y is Unknown, the result is Unknown.
|
||||
//
|
||||
func UnaryOp(op token.Token, y Value, size int) Value {
|
||||
switch op {
|
||||
case token.ADD:
|
||||
switch y.(type) {
|
||||
case unknownVal, int64Val, intVal, floatVal, complexVal:
|
||||
return y
|
||||
}
|
||||
|
||||
case token.SUB:
|
||||
switch y := y.(type) {
|
||||
case unknownVal:
|
||||
return y
|
||||
case int64Val:
|
||||
if z := -y; z != y {
|
||||
return z // no overflow
|
||||
}
|
||||
return normInt(new(big.Int).Neg(big.NewInt(int64(y))))
|
||||
case intVal:
|
||||
return normInt(new(big.Int).Neg(y.val))
|
||||
case floatVal:
|
||||
return normFloat(new(big.Rat).Neg(y.val))
|
||||
case complexVal:
|
||||
return normComplex(new(big.Rat).Neg(y.re), new(big.Rat).Neg(y.im))
|
||||
}
|
||||
|
||||
case token.XOR:
|
||||
var z big.Int
|
||||
switch y := y.(type) {
|
||||
case unknownVal:
|
||||
return y
|
||||
case int64Val:
|
||||
z.Not(big.NewInt(int64(y)))
|
||||
case intVal:
|
||||
z.Not(y.val)
|
||||
default:
|
||||
goto Error
|
||||
}
|
||||
// For unsigned types, the result will be negative and
|
||||
// thus "too large": We must limit the result size to
|
||||
// the type's size.
|
||||
if size >= 0 {
|
||||
s := uint(size) * 8
|
||||
z.AndNot(&z, new(big.Int).Lsh(big.NewInt(-1), s)) // z &^= (-1)<<s
|
||||
}
|
||||
return normInt(&z)
|
||||
|
||||
case token.NOT:
|
||||
switch y := y.(type) {
|
||||
case unknownVal:
|
||||
return y
|
||||
case boolVal:
|
||||
return !y
|
||||
}
|
||||
}
|
||||
|
||||
Error:
|
||||
panic(fmt.Sprintf("invalid unary operation %s%v", op, y))
|
||||
}
|
||||
|
||||
var (
|
||||
int1 = big.NewInt(1)
|
||||
rat0 = big.NewRat(0, 1)
|
||||
)
|
||||
|
||||
func ord(x Value) int {
|
||||
switch x.(type) {
|
||||
default:
|
||||
return 0
|
||||
case boolVal, stringVal:
|
||||
return 1
|
||||
case int64Val:
|
||||
return 2
|
||||
case intVal:
|
||||
return 3
|
||||
case floatVal:
|
||||
return 4
|
||||
case complexVal:
|
||||
return 5
|
||||
}
|
||||
}
|
||||
|
||||
// match returns the matching representation (same type) with the
|
||||
// smallest complexity for two values x and y. If one of them is
|
||||
// numeric, both of them must be numeric. If one of them is Unknown,
|
||||
// both results are Unknown.
|
||||
//
|
||||
func match(x, y Value) (_, _ Value) {
|
||||
if ord(x) > ord(y) {
|
||||
y, x = match(y, x)
|
||||
return x, y
|
||||
}
|
||||
// ord(x) <= ord(y)
|
||||
|
||||
switch x := x.(type) {
|
||||
case unknownVal:
|
||||
return x, x
|
||||
|
||||
case boolVal, stringVal, complexVal:
|
||||
return x, y
|
||||
|
||||
case int64Val:
|
||||
switch y := y.(type) {
|
||||
case int64Val:
|
||||
return x, y
|
||||
case intVal:
|
||||
return intVal{big.NewInt(int64(x))}, y
|
||||
case floatVal:
|
||||
return floatVal{big.NewRat(int64(x), 1)}, y
|
||||
case complexVal:
|
||||
return complexVal{big.NewRat(int64(x), 1), rat0}, y
|
||||
}
|
||||
|
||||
case intVal:
|
||||
switch y := y.(type) {
|
||||
case intVal:
|
||||
return x, y
|
||||
case floatVal:
|
||||
return floatVal{new(big.Rat).SetFrac(x.val, int1)}, y
|
||||
case complexVal:
|
||||
return complexVal{new(big.Rat).SetFrac(x.val, int1), rat0}, y
|
||||
}
|
||||
|
||||
case floatVal:
|
||||
switch y := y.(type) {
|
||||
case floatVal:
|
||||
return x, y
|
||||
case complexVal:
|
||||
return complexVal{x.val, rat0}, y
|
||||
}
|
||||
}
|
||||
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// BinaryOp returns the result of the binary expression x op y.
|
||||
// The operation must be defined for the operands. If one of the
|
||||
// operands is Unknown, the result is Unknown.
|
||||
// To force integer division of Int operands, use op == token.QUO_ASSIGN
|
||||
// instead of token.QUO; the result is guaranteed to be Int in this case.
|
||||
// Division by zero leads to a run-time panic.
|
||||
//
|
||||
func BinaryOp(x Value, op token.Token, y Value) Value {
|
||||
x, y = match(x, y)
|
||||
|
||||
switch x := x.(type) {
|
||||
case unknownVal:
|
||||
return x
|
||||
|
||||
case boolVal:
|
||||
y := y.(boolVal)
|
||||
switch op {
|
||||
case token.LAND:
|
||||
return x && y
|
||||
case token.LOR:
|
||||
return x || y
|
||||
}
|
||||
|
||||
case int64Val:
|
||||
a := int64(x)
|
||||
b := int64(y.(int64Val))
|
||||
var c int64
|
||||
switch op {
|
||||
case token.ADD:
|
||||
if !is63bit(a) || !is63bit(b) {
|
||||
return normInt(new(big.Int).Add(big.NewInt(a), big.NewInt(b)))
|
||||
}
|
||||
c = a + b
|
||||
case token.SUB:
|
||||
if !is63bit(a) || !is63bit(b) {
|
||||
return normInt(new(big.Int).Sub(big.NewInt(a), big.NewInt(b)))
|
||||
}
|
||||
c = a - b
|
||||
case token.MUL:
|
||||
if !is32bit(a) || !is32bit(b) {
|
||||
return normInt(new(big.Int).Mul(big.NewInt(a), big.NewInt(b)))
|
||||
}
|
||||
c = a * b
|
||||
case token.QUO:
|
||||
return normFloat(new(big.Rat).SetFrac(big.NewInt(a), big.NewInt(b)))
|
||||
case token.QUO_ASSIGN: // force integer division
|
||||
c = a / b
|
||||
case token.REM:
|
||||
c = a % b
|
||||
case token.AND:
|
||||
c = a & b
|
||||
case token.OR:
|
||||
c = a | b
|
||||
case token.XOR:
|
||||
c = a ^ b
|
||||
case token.AND_NOT:
|
||||
c = a &^ b
|
||||
default:
|
||||
goto Error
|
||||
}
|
||||
return int64Val(c)
|
||||
|
||||
case intVal:
|
||||
a := x.val
|
||||
b := y.(intVal).val
|
||||
var c big.Int
|
||||
switch op {
|
||||
case token.ADD:
|
||||
c.Add(a, b)
|
||||
case token.SUB:
|
||||
c.Sub(a, b)
|
||||
case token.MUL:
|
||||
c.Mul(a, b)
|
||||
case token.QUO:
|
||||
return normFloat(new(big.Rat).SetFrac(a, b))
|
||||
case token.QUO_ASSIGN: // force integer division
|
||||
c.Quo(a, b)
|
||||
case token.REM:
|
||||
c.Rem(a, b)
|
||||
case token.AND:
|
||||
c.And(a, b)
|
||||
case token.OR:
|
||||
c.Or(a, b)
|
||||
case token.XOR:
|
||||
c.Xor(a, b)
|
||||
case token.AND_NOT:
|
||||
c.AndNot(a, b)
|
||||
default:
|
||||
goto Error
|
||||
}
|
||||
return normInt(&c)
|
||||
|
||||
case floatVal:
|
||||
a := x.val
|
||||
b := y.(floatVal).val
|
||||
var c big.Rat
|
||||
switch op {
|
||||
case token.ADD:
|
||||
c.Add(a, b)
|
||||
case token.SUB:
|
||||
c.Sub(a, b)
|
||||
case token.MUL:
|
||||
c.Mul(a, b)
|
||||
case token.QUO:
|
||||
c.Quo(a, b)
|
||||
default:
|
||||
goto Error
|
||||
}
|
||||
return normFloat(&c)
|
||||
|
||||
case complexVal:
|
||||
y := y.(complexVal)
|
||||
a, b := x.re, x.im
|
||||
c, d := y.re, y.im
|
||||
var re, im big.Rat
|
||||
switch op {
|
||||
case token.ADD:
|
||||
// (a+c) + i(b+d)
|
||||
re.Add(a, c)
|
||||
im.Add(b, d)
|
||||
case token.SUB:
|
||||
// (a-c) + i(b-d)
|
||||
re.Sub(a, c)
|
||||
im.Sub(b, d)
|
||||
case token.MUL:
|
||||
// (ac-bd) + i(bc+ad)
|
||||
var ac, bd, bc, ad big.Rat
|
||||
ac.Mul(a, c)
|
||||
bd.Mul(b, d)
|
||||
bc.Mul(b, c)
|
||||
ad.Mul(a, d)
|
||||
re.Sub(&ac, &bd)
|
||||
im.Add(&bc, &ad)
|
||||
case token.QUO:
|
||||
// (ac+bd)/s + i(bc-ad)/s, with s = cc + dd
|
||||
var ac, bd, bc, ad, s, cc, dd big.Rat
|
||||
ac.Mul(a, c)
|
||||
bd.Mul(b, d)
|
||||
bc.Mul(b, c)
|
||||
ad.Mul(a, d)
|
||||
cc.Mul(c, c)
|
||||
dd.Mul(d, d)
|
||||
s.Add(&cc, &dd)
|
||||
re.Add(&ac, &bd)
|
||||
re.Quo(&re, &s)
|
||||
im.Sub(&bc, &ad)
|
||||
im.Quo(&im, &s)
|
||||
default:
|
||||
goto Error
|
||||
}
|
||||
return normComplex(&re, &im)
|
||||
|
||||
case stringVal:
|
||||
if op == token.ADD {
|
||||
return x + y.(stringVal)
|
||||
}
|
||||
}
|
||||
|
||||
Error:
|
||||
panic(fmt.Sprintf("invalid binary operation %v %s %v", x, op, y))
|
||||
}
|
||||
|
||||
// Shift returns the result of the shift expression x op s
|
||||
// with op == token.SHL or token.SHR (<< or >>). x must be
|
||||
// an Int or an Unknown. If x is Unknown, the result is x.
|
||||
//
|
||||
func Shift(x Value, op token.Token, s uint) Value {
|
||||
switch x := x.(type) {
|
||||
case unknownVal:
|
||||
return x
|
||||
|
||||
case int64Val:
|
||||
if s == 0 {
|
||||
return x
|
||||
}
|
||||
switch op {
|
||||
case token.SHL:
|
||||
z := big.NewInt(int64(x))
|
||||
return normInt(z.Lsh(z, s))
|
||||
case token.SHR:
|
||||
return x >> s
|
||||
}
|
||||
|
||||
case intVal:
|
||||
if s == 0 {
|
||||
return x
|
||||
}
|
||||
var z big.Int
|
||||
switch op {
|
||||
case token.SHL:
|
||||
return normInt(z.Lsh(x.val, s))
|
||||
case token.SHR:
|
||||
return normInt(z.Rsh(x.val, s))
|
||||
}
|
||||
}
|
||||
|
||||
panic(fmt.Sprintf("invalid shift %v %s %d", x, op, s))
|
||||
}
|
||||
|
||||
func cmpZero(x int, op token.Token) bool {
|
||||
switch op {
|
||||
case token.EQL:
|
||||
return x == 0
|
||||
case token.NEQ:
|
||||
return x != 0
|
||||
case token.LSS:
|
||||
return x < 0
|
||||
case token.LEQ:
|
||||
return x <= 0
|
||||
case token.GTR:
|
||||
return x > 0
|
||||
case token.GEQ:
|
||||
return x >= 0
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// Compare returns the result of the comparison x op y.
|
||||
// The comparison must be defined for the operands.
|
||||
// If one of the operands is Unknown, the result is
|
||||
// false.
|
||||
//
|
||||
func Compare(x Value, op token.Token, y Value) bool {
|
||||
x, y = match(x, y)
|
||||
|
||||
switch x := x.(type) {
|
||||
case unknownVal:
|
||||
return false
|
||||
|
||||
case boolVal:
|
||||
y := y.(boolVal)
|
||||
switch op {
|
||||
case token.EQL:
|
||||
return x == y
|
||||
case token.NEQ:
|
||||
return x != y
|
||||
}
|
||||
|
||||
case int64Val:
|
||||
y := y.(int64Val)
|
||||
switch op {
|
||||
case token.EQL:
|
||||
return x == y
|
||||
case token.NEQ:
|
||||
return x != y
|
||||
case token.LSS:
|
||||
return x < y
|
||||
case token.LEQ:
|
||||
return x <= y
|
||||
case token.GTR:
|
||||
return x > y
|
||||
case token.GEQ:
|
||||
return x >= y
|
||||
}
|
||||
|
||||
case intVal:
|
||||
return cmpZero(x.val.Cmp(y.(intVal).val), op)
|
||||
|
||||
case floatVal:
|
||||
return cmpZero(x.val.Cmp(y.(floatVal).val), op)
|
||||
|
||||
case complexVal:
|
||||
y := y.(complexVal)
|
||||
re := x.re.Cmp(y.re)
|
||||
im := x.im.Cmp(y.im)
|
||||
switch op {
|
||||
case token.EQL:
|
||||
return re == 0 && im == 0
|
||||
case token.NEQ:
|
||||
return re != 0 || im != 0
|
||||
}
|
||||
|
||||
case stringVal:
|
||||
y := y.(stringVal)
|
||||
switch op {
|
||||
case token.EQL:
|
||||
return x == y
|
||||
case token.NEQ:
|
||||
return x != y
|
||||
case token.LSS:
|
||||
return x < y
|
||||
case token.LEQ:
|
||||
return x <= y
|
||||
case token.GTR:
|
||||
return x > y
|
||||
case token.GEQ:
|
||||
return x >= y
|
||||
}
|
||||
}
|
||||
|
||||
panic(fmt.Sprintf("invalid comparison %v %s %v", x, op, y))
|
||||
}
|
@ -1,375 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package exact
|
||||
|
||||
import (
|
||||
"go/token"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TODO(gri) expand this test framework
|
||||
|
||||
var opTests = []string{
|
||||
// unary operations
|
||||
`+ 0 = 0`,
|
||||
`+ ? = ?`,
|
||||
`- 1 = -1`,
|
||||
`- ? = ?`,
|
||||
`^ 0 = -1`,
|
||||
`^ ? = ?`,
|
||||
|
||||
`! true = false`,
|
||||
`! false = true`,
|
||||
`! ? = ?`,
|
||||
|
||||
// etc.
|
||||
|
||||
// binary operations
|
||||
`"" + "" = ""`,
|
||||
`"foo" + "" = "foo"`,
|
||||
`"" + "bar" = "bar"`,
|
||||
`"foo" + "bar" = "foobar"`,
|
||||
|
||||
`0 + 0 = 0`,
|
||||
`0 + 0.1 = 0.1`,
|
||||
`0 + 0.1i = 0.1i`,
|
||||
`0.1 + 0.9 = 1`,
|
||||
`1e100 + 1e100 = 2e100`,
|
||||
`? + 0 = ?`,
|
||||
`0 + ? = ?`,
|
||||
|
||||
`0 - 0 = 0`,
|
||||
`0 - 0.1 = -0.1`,
|
||||
`0 - 0.1i = -0.1i`,
|
||||
`1e100 - 1e100 = 0`,
|
||||
`? - 0 = ?`,
|
||||
`0 - ? = ?`,
|
||||
|
||||
`0 * 0 = 0`,
|
||||
`1 * 0.1 = 0.1`,
|
||||
`1 * 0.1i = 0.1i`,
|
||||
`1i * 1i = -1`,
|
||||
`? * 0 = ?`,
|
||||
`0 * ? = ?`,
|
||||
|
||||
`0 / 0 = "division_by_zero"`,
|
||||
`10 / 2 = 5`,
|
||||
`5 / 3 = 5/3`,
|
||||
`5i / 3i = 5/3`,
|
||||
`? / 0 = ?`,
|
||||
`0 / ? = ?`,
|
||||
|
||||
`0 % 0 = "runtime_error:_integer_divide_by_zero"`, // TODO(gri) should be the same as for /
|
||||
`10 % 3 = 1`,
|
||||
`? % 0 = ?`,
|
||||
`0 % ? = ?`,
|
||||
|
||||
`0 & 0 = 0`,
|
||||
`12345 & 0 = 0`,
|
||||
`0xff & 0xf = 0xf`,
|
||||
`? & 0 = ?`,
|
||||
`0 & ? = ?`,
|
||||
|
||||
`0 | 0 = 0`,
|
||||
`12345 | 0 = 12345`,
|
||||
`0xb | 0xa0 = 0xab`,
|
||||
`? | 0 = ?`,
|
||||
`0 | ? = ?`,
|
||||
|
||||
`0 ^ 0 = 0`,
|
||||
`1 ^ -1 = -2`,
|
||||
`? ^ 0 = ?`,
|
||||
`0 ^ ? = ?`,
|
||||
|
||||
`0 &^ 0 = 0`,
|
||||
`0xf &^ 1 = 0xe`,
|
||||
`1 &^ 0xf = 0`,
|
||||
// etc.
|
||||
|
||||
// shifts
|
||||
`0 << 0 = 0`,
|
||||
`1 << 10 = 1024`,
|
||||
`0 >> 0 = 0`,
|
||||
`1024 >> 10 == 1`,
|
||||
`? << 0 == ?`,
|
||||
`? >> 10 == ?`,
|
||||
// etc.
|
||||
|
||||
// comparisons
|
||||
`false == false = true`,
|
||||
`false == true = false`,
|
||||
`true == false = false`,
|
||||
`true == true = true`,
|
||||
|
||||
`false != false = false`,
|
||||
`false != true = true`,
|
||||
`true != false = true`,
|
||||
`true != true = false`,
|
||||
|
||||
`"foo" == "bar" = false`,
|
||||
`"foo" != "bar" = true`,
|
||||
`"foo" < "bar" = false`,
|
||||
`"foo" <= "bar" = false`,
|
||||
`"foo" > "bar" = true`,
|
||||
`"foo" >= "bar" = true`,
|
||||
|
||||
`0 == 0 = true`,
|
||||
`0 != 0 = false`,
|
||||
`0 < 10 = true`,
|
||||
`10 <= 10 = true`,
|
||||
`0 > 10 = false`,
|
||||
`10 >= 10 = true`,
|
||||
|
||||
`1/123456789 == 1/123456789 == true`,
|
||||
`1/123456789 != 1/123456789 == false`,
|
||||
`1/123456789 < 1/123456788 == true`,
|
||||
`1/123456788 <= 1/123456789 == false`,
|
||||
`0.11 > 0.11 = false`,
|
||||
`0.11 >= 0.11 = true`,
|
||||
|
||||
`? == 0 = false`,
|
||||
`? != 0 = false`,
|
||||
`? < 10 = false`,
|
||||
`? <= 10 = false`,
|
||||
`? > 10 = false`,
|
||||
`? >= 10 = false`,
|
||||
|
||||
`0 == ? = false`,
|
||||
`0 != ? = false`,
|
||||
`0 < ? = false`,
|
||||
`10 <= ? = false`,
|
||||
`0 > ? = false`,
|
||||
`10 >= ? = false`,
|
||||
|
||||
// etc.
|
||||
}
|
||||
|
||||
func TestOps(t *testing.T) {
|
||||
for _, test := range opTests {
|
||||
a := strings.Split(test, " ")
|
||||
i := 0 // operator index
|
||||
|
||||
var x, x0 Value
|
||||
switch len(a) {
|
||||
case 4:
|
||||
// unary operation
|
||||
case 5:
|
||||
// binary operation
|
||||
x, x0 = val(a[0]), val(a[0])
|
||||
i = 1
|
||||
default:
|
||||
t.Errorf("invalid test case: %s", test)
|
||||
continue
|
||||
}
|
||||
|
||||
op, ok := optab[a[i]]
|
||||
if !ok {
|
||||
panic("missing optab entry for " + a[i])
|
||||
}
|
||||
|
||||
y, y0 := val(a[i+1]), val(a[i+1])
|
||||
|
||||
got := doOp(x, op, y)
|
||||
want := val(a[i+3])
|
||||
if !eql(got, want) {
|
||||
t.Errorf("%s: got %s; want %s", test, got, want)
|
||||
}
|
||||
if x0 != nil && !eql(x, x0) {
|
||||
t.Errorf("%s: x changed to %s", test, x)
|
||||
}
|
||||
if !eql(y, y0) {
|
||||
t.Errorf("%s: y changed to %s", test, y)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func eql(x, y Value) bool {
|
||||
_, ux := x.(unknownVal)
|
||||
_, uy := y.(unknownVal)
|
||||
if ux || uy {
|
||||
return ux == uy
|
||||
}
|
||||
return Compare(x, token.EQL, y)
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Support functions
|
||||
|
||||
func val(lit string) Value {
|
||||
if len(lit) == 0 {
|
||||
return MakeUnknown()
|
||||
}
|
||||
|
||||
switch lit {
|
||||
case "?":
|
||||
return MakeUnknown()
|
||||
case "true":
|
||||
return MakeBool(true)
|
||||
case "false":
|
||||
return MakeBool(false)
|
||||
}
|
||||
|
||||
tok := token.INT
|
||||
switch first, last := lit[0], lit[len(lit)-1]; {
|
||||
case first == '"' || first == '`':
|
||||
tok = token.STRING
|
||||
lit = strings.Replace(lit, "_", " ", -1)
|
||||
case first == '\'':
|
||||
tok = token.CHAR
|
||||
case last == 'i':
|
||||
tok = token.IMAG
|
||||
default:
|
||||
if !strings.HasPrefix(lit, "0x") && strings.ContainsAny(lit, "./Ee") {
|
||||
tok = token.FLOAT
|
||||
}
|
||||
}
|
||||
|
||||
return MakeFromLiteral(lit, tok)
|
||||
}
|
||||
|
||||
var optab = map[string]token.Token{
|
||||
"!": token.NOT,
|
||||
|
||||
"+": token.ADD,
|
||||
"-": token.SUB,
|
||||
"*": token.MUL,
|
||||
"/": token.QUO,
|
||||
"%": token.REM,
|
||||
|
||||
"<<": token.SHL,
|
||||
">>": token.SHR,
|
||||
|
||||
"&": token.AND,
|
||||
"|": token.OR,
|
||||
"^": token.XOR,
|
||||
"&^": token.AND_NOT,
|
||||
|
||||
"==": token.EQL,
|
||||
"!=": token.NEQ,
|
||||
"<": token.LSS,
|
||||
"<=": token.LEQ,
|
||||
">": token.GTR,
|
||||
">=": token.GEQ,
|
||||
}
|
||||
|
||||
func panicHandler(v *Value) {
|
||||
switch p := recover().(type) {
|
||||
case nil:
|
||||
// nothing to do
|
||||
case string:
|
||||
*v = MakeString(p)
|
||||
case error:
|
||||
*v = MakeString(p.Error())
|
||||
default:
|
||||
panic(p)
|
||||
}
|
||||
}
|
||||
|
||||
func doOp(x Value, op token.Token, y Value) (z Value) {
|
||||
defer panicHandler(&z)
|
||||
|
||||
if x == nil {
|
||||
return UnaryOp(op, y, -1)
|
||||
}
|
||||
|
||||
switch op {
|
||||
case token.EQL, token.NEQ, token.LSS, token.LEQ, token.GTR, token.GEQ:
|
||||
return MakeBool(Compare(x, op, y))
|
||||
case token.SHL, token.SHR:
|
||||
s, _ := Int64Val(y)
|
||||
return Shift(x, op, uint(s))
|
||||
default:
|
||||
return BinaryOp(x, op, y)
|
||||
}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Other tests
|
||||
|
||||
var fracTests = []string{
|
||||
"0 0 1",
|
||||
"1 1 1",
|
||||
"-1 -1 1",
|
||||
"1.2 6 5",
|
||||
"-0.991 -991 1000",
|
||||
"1e100 1e100 1",
|
||||
}
|
||||
|
||||
func TestFractions(t *testing.T) {
|
||||
for _, test := range fracTests {
|
||||
a := strings.Split(test, " ")
|
||||
if len(a) != 3 {
|
||||
t.Errorf("invalid test case: %s", test)
|
||||
continue
|
||||
}
|
||||
|
||||
x := val(a[0])
|
||||
n := val(a[1])
|
||||
d := val(a[2])
|
||||
|
||||
if got := Num(x); !eql(got, n) {
|
||||
t.Errorf("%s: got num = %s; want %s", test, got, n)
|
||||
}
|
||||
|
||||
if got := Denom(x); !eql(got, d) {
|
||||
t.Errorf("%s: got denom = %s; want %s", test, got, d)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var bytesTests = []string{
|
||||
"0",
|
||||
"1",
|
||||
"123456789",
|
||||
"123456789012345678901234567890123456789012345678901234567890",
|
||||
}
|
||||
|
||||
func TestBytes(t *testing.T) {
|
||||
for _, test := range bytesTests {
|
||||
x := val(test)
|
||||
bytes := Bytes(x)
|
||||
|
||||
// special case 0
|
||||
if Sign(x) == 0 && len(bytes) != 0 {
|
||||
t.Errorf("%s: got %v; want empty byte slice", test, bytes)
|
||||
}
|
||||
|
||||
if n := len(bytes); n > 0 && bytes[n-1] == 0 {
|
||||
t.Errorf("%s: got %v; want no leading 0 byte", test, bytes)
|
||||
}
|
||||
|
||||
if got := MakeFromBytes(bytes); !eql(got, x) {
|
||||
t.Errorf("%s: got %s; want %s (bytes = %v)", test, got, x, bytes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestUnknown(t *testing.T) {
|
||||
u := MakeUnknown()
|
||||
var values = []Value{
|
||||
u,
|
||||
MakeBool(false), // token.ADD ok below, operation is never considered
|
||||
MakeString(""),
|
||||
MakeInt64(1),
|
||||
MakeFromLiteral("-1234567890123456789012345678901234567890", token.INT),
|
||||
MakeFloat64(1.2),
|
||||
MakeImag(MakeFloat64(1.2)),
|
||||
}
|
||||
for _, val := range values {
|
||||
x, y := val, u
|
||||
for i := range [2]int{} {
|
||||
if i == 1 {
|
||||
x, y = y, x
|
||||
}
|
||||
if got := BinaryOp(x, token.ADD, y); got.Kind() != Unknown {
|
||||
t.Errorf("%s + %s: got %s; want %s", x, y, got, u)
|
||||
}
|
||||
if got := Compare(x, token.EQL, y); got {
|
||||
t.Errorf("%s == %s: got true; want false", x, y)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.4
|
||||
|
||||
package exact
|
||||
|
||||
import (
|
||||
"math"
|
||||
"math/big"
|
||||
)
|
||||
|
||||
func ratToFloat32(x *big.Rat) (float32, bool) {
|
||||
// Before 1.4, there's no Rat.Float32.
|
||||
// Emulate it, albeit at the cost of
|
||||
// imprecision in corner cases.
|
||||
x64, exact := x.Float64()
|
||||
x32 := float32(x64)
|
||||
if math.IsInf(float64(x32), 0) {
|
||||
exact = false
|
||||
}
|
||||
return x32, exact
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build go1.4
|
||||
|
||||
package exact
|
||||
|
||||
import "math/big"
|
||||
|
||||
func ratToFloat32(x *big.Rat) (float32, bool) {
|
||||
return x.Float32()
|
||||
}
|
@ -1,95 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package gccgoimporter
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
// Information about a specific installation of gccgo.
|
||||
type GccgoInstallation struct {
|
||||
// Version of gcc (e.g. 4.8.0).
|
||||
GccVersion string
|
||||
|
||||
// Target triple (e.g. x86_64-unknown-linux-gnu).
|
||||
TargetTriple string
|
||||
|
||||
// Built-in library paths used by this installation.
|
||||
LibPaths []string
|
||||
}
|
||||
|
||||
// Ask the driver at the given path for information for this GccgoInstallation.
|
||||
func (inst *GccgoInstallation) InitFromDriver(gccgoPath string) (err error) {
|
||||
cmd := exec.Command(gccgoPath, "-###", "-S", "-x", "go", "-")
|
||||
stderr, err := cmd.StderrPipe()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err = cmd.Start()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
scanner := bufio.NewScanner(stderr)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
switch {
|
||||
case strings.HasPrefix(line, "Target: "):
|
||||
inst.TargetTriple = line[8:]
|
||||
|
||||
case line[0] == ' ':
|
||||
args := strings.Fields(line)
|
||||
for _, arg := range args[1:] {
|
||||
if strings.HasPrefix(arg, "-L") {
|
||||
inst.LibPaths = append(inst.LibPaths, arg[2:])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stdout, err := exec.Command(gccgoPath, "-dumpversion").Output()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
inst.GccVersion = strings.TrimSpace(string(stdout))
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Return the list of export search paths for this GccgoInstallation.
|
||||
func (inst *GccgoInstallation) SearchPaths() (paths []string) {
|
||||
for _, lpath := range inst.LibPaths {
|
||||
spath := filepath.Join(lpath, "go", inst.GccVersion)
|
||||
fi, err := os.Stat(spath)
|
||||
if err != nil || !fi.IsDir() {
|
||||
continue
|
||||
}
|
||||
paths = append(paths, spath)
|
||||
|
||||
spath = filepath.Join(spath, inst.TargetTriple)
|
||||
fi, err = os.Stat(spath)
|
||||
if err != nil || !fi.IsDir() {
|
||||
continue
|
||||
}
|
||||
paths = append(paths, spath)
|
||||
}
|
||||
|
||||
paths = append(paths, inst.LibPaths...)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Return an importer that searches incpaths followed by the gcc installation's
|
||||
// built-in search paths and the current directory.
|
||||
func (inst *GccgoInstallation) GetImporter(incpaths []string, initmap map[*types.Package]InitData) types.Importer {
|
||||
return GetImporter(append(append(incpaths, inst.SearchPaths()...), "."), initmap)
|
||||
}
|
@ -1,194 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package gccgoimporter
|
||||
|
||||
import (
|
||||
"runtime"
|
||||
"testing"
|
||||
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var importablePackages = [...]string{
|
||||
"archive/tar",
|
||||
"archive/zip",
|
||||
"bufio",
|
||||
"bytes",
|
||||
"compress/bzip2",
|
||||
"compress/flate",
|
||||
"compress/gzip",
|
||||
"compress/lzw",
|
||||
"compress/zlib",
|
||||
"container/heap",
|
||||
"container/list",
|
||||
"container/ring",
|
||||
"crypto/aes",
|
||||
"crypto/cipher",
|
||||
"crypto/des",
|
||||
"crypto/dsa",
|
||||
"crypto/ecdsa",
|
||||
"crypto/elliptic",
|
||||
"crypto",
|
||||
"crypto/hmac",
|
||||
"crypto/md5",
|
||||
"crypto/rand",
|
||||
"crypto/rc4",
|
||||
"crypto/rsa",
|
||||
"crypto/sha1",
|
||||
"crypto/sha256",
|
||||
"crypto/sha512",
|
||||
"crypto/subtle",
|
||||
"crypto/tls",
|
||||
"crypto/x509",
|
||||
"crypto/x509/pkix",
|
||||
"database/sql/driver",
|
||||
"database/sql",
|
||||
"debug/dwarf",
|
||||
"debug/elf",
|
||||
"debug/gosym",
|
||||
"debug/macho",
|
||||
"debug/pe",
|
||||
"encoding/ascii85",
|
||||
"encoding/asn1",
|
||||
"encoding/base32",
|
||||
"encoding/base64",
|
||||
"encoding/binary",
|
||||
"encoding/csv",
|
||||
"encoding/gob",
|
||||
"encoding",
|
||||
"encoding/hex",
|
||||
"encoding/json",
|
||||
"encoding/pem",
|
||||
"encoding/xml",
|
||||
"errors",
|
||||
"exp/proxy",
|
||||
"exp/terminal",
|
||||
"expvar",
|
||||
"flag",
|
||||
"fmt",
|
||||
"go/ast",
|
||||
"go/build",
|
||||
"go/doc",
|
||||
"go/format",
|
||||
"go/parser",
|
||||
"go/printer",
|
||||
"go/scanner",
|
||||
"go/token",
|
||||
"hash/adler32",
|
||||
"hash/crc32",
|
||||
"hash/crc64",
|
||||
"hash/fnv",
|
||||
"hash",
|
||||
"html",
|
||||
"html/template",
|
||||
"image/color",
|
||||
"image/color/palette",
|
||||
"image/draw",
|
||||
"image/gif",
|
||||
"image",
|
||||
"image/jpeg",
|
||||
"image/png",
|
||||
"index/suffixarray",
|
||||
"io",
|
||||
"io/ioutil",
|
||||
"log",
|
||||
"log/syslog",
|
||||
"math/big",
|
||||
"math/cmplx",
|
||||
"math",
|
||||
"math/rand",
|
||||
"mime",
|
||||
"mime/multipart",
|
||||
"net",
|
||||
"net/http/cgi",
|
||||
"net/http/cookiejar",
|
||||
"net/http/fcgi",
|
||||
"net/http",
|
||||
"net/http/httptest",
|
||||
"net/http/httputil",
|
||||
"net/http/pprof",
|
||||
"net/mail",
|
||||
"net/rpc",
|
||||
"net/rpc/jsonrpc",
|
||||
"net/smtp",
|
||||
"net/textproto",
|
||||
"net/url",
|
||||
"old/regexp",
|
||||
"old/template",
|
||||
"os/exec",
|
||||
"os",
|
||||
"os/signal",
|
||||
"os/user",
|
||||
"path/filepath",
|
||||
"path",
|
||||
"reflect",
|
||||
"regexp",
|
||||
"regexp/syntax",
|
||||
"runtime/debug",
|
||||
"runtime",
|
||||
"runtime/pprof",
|
||||
"sort",
|
||||
"strconv",
|
||||
"strings",
|
||||
"sync/atomic",
|
||||
"sync",
|
||||
"syscall",
|
||||
"testing",
|
||||
"testing/iotest",
|
||||
"testing/quick",
|
||||
"text/scanner",
|
||||
"text/tabwriter",
|
||||
"text/template",
|
||||
"text/template/parse",
|
||||
"time",
|
||||
"unicode",
|
||||
"unicode/utf16",
|
||||
"unicode/utf8",
|
||||
}
|
||||
|
||||
func TestInstallationImporter(t *testing.T) {
|
||||
// This test relies on gccgo being around, which it most likely will be if we
|
||||
// were compiled with gccgo.
|
||||
if runtime.Compiler != "gccgo" {
|
||||
t.Skip("This test needs gccgo")
|
||||
return
|
||||
}
|
||||
|
||||
var inst GccgoInstallation
|
||||
err := inst.InitFromDriver("gccgo")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
imp := inst.GetImporter(nil, nil)
|
||||
|
||||
// Ensure we don't regress the number of packages we can parse. First import
|
||||
// all packages into the same map and then each individually.
|
||||
pkgMap := make(map[string]*types.Package)
|
||||
for _, pkg := range importablePackages {
|
||||
_, err = imp(pkgMap, pkg)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, pkg := range importablePackages {
|
||||
_, err = imp(make(map[string]*types.Package), pkg)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
// Test for certain specific entities in the imported data.
|
||||
for _, test := range [...]importerTest{
|
||||
{pkgpath: "io", name: "Reader", want: "type Reader interface{Read(p []uint8) (n int, err error)}"},
|
||||
{pkgpath: "io", name: "ReadWriter", want: "type ReadWriter interface{Reader; Writer}"},
|
||||
{pkgpath: "math", name: "Pi", want: "const Pi untyped float"},
|
||||
{pkgpath: "math", name: "Sin", want: "func Sin(x float64) float64"},
|
||||
{pkgpath: "sort", name: "Ints", want: "func Ints(a []int)"},
|
||||
{pkgpath: "unsafe", name: "Pointer", want: "type Pointer unsafe.Pointer"},
|
||||
} {
|
||||
runImporterTest(t, imp, nil, &test)
|
||||
}
|
||||
}
|
@ -1,205 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package gccgoimporter implements Import for gccgo-generated object files.
|
||||
package gccgoimporter // import "golang.org/x/tools/go/gccgoimporter"
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"debug/elf"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/importer"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
// A PackageInit describes an imported package that needs initialization.
|
||||
type PackageInit struct {
|
||||
Name string // short package name
|
||||
InitFunc string // name of init function
|
||||
Priority int // priority of init function, see InitData.Priority
|
||||
}
|
||||
|
||||
// The gccgo-specific init data for a package.
|
||||
type InitData struct {
|
||||
// Initialization priority of this package relative to other packages.
|
||||
// This is based on the maximum depth of the package's dependency graph;
|
||||
// it is guaranteed to be greater than that of its dependencies.
|
||||
Priority int
|
||||
|
||||
// The list of packages which this package depends on to be initialized,
|
||||
// including itself if needed. This is the subset of the transitive closure of
|
||||
// the package's dependencies that need initialization.
|
||||
Inits []PackageInit
|
||||
}
|
||||
|
||||
// Locate the file from which to read export data.
|
||||
// This is intended to replicate the logic in gofrontend.
|
||||
func findExportFile(searchpaths []string, pkgpath string) (string, error) {
|
||||
for _, spath := range searchpaths {
|
||||
pkgfullpath := filepath.Join(spath, pkgpath)
|
||||
pkgdir, name := filepath.Split(pkgfullpath)
|
||||
|
||||
for _, filepath := range [...]string{
|
||||
pkgfullpath,
|
||||
pkgfullpath + ".gox",
|
||||
pkgdir + "lib" + name + ".so",
|
||||
pkgdir + "lib" + name + ".a",
|
||||
pkgfullpath + ".o",
|
||||
} {
|
||||
fi, err := os.Stat(filepath)
|
||||
if err == nil && !fi.IsDir() {
|
||||
return filepath, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("%s: could not find export data (tried %s)", pkgpath, strings.Join(searchpaths, ":"))
|
||||
}
|
||||
|
||||
const (
|
||||
gccgov1Magic = "v1;\n"
|
||||
goimporterMagic = "\n$$ "
|
||||
archiveMagic = "!<ar"
|
||||
)
|
||||
|
||||
// Opens the export data file at the given path. If this is an ELF file,
|
||||
// searches for and opens the .go_export section. If this is an archive,
|
||||
// reads the export data from the first member, which is assumed to be an ELF file.
|
||||
// This is intended to replicate the logic in gofrontend.
|
||||
func openExportFile(fpath string) (reader io.ReadSeeker, closer io.Closer, err error) {
|
||||
f, err := os.Open(fpath)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
closer = f
|
||||
defer func() {
|
||||
if err != nil && closer != nil {
|
||||
f.Close()
|
||||
}
|
||||
}()
|
||||
|
||||
var magic [4]byte
|
||||
_, err = f.ReadAt(magic[:], 0)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
// reset to offset 0 - needed on Plan 9 (see issue #11265)
|
||||
// TODO: remove once issue #11265 has been resolved.
|
||||
_, err = f.Seek(0, 0)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
var elfreader io.ReaderAt
|
||||
switch string(magic[:]) {
|
||||
case gccgov1Magic, goimporterMagic:
|
||||
// Raw export data.
|
||||
reader = f
|
||||
return
|
||||
|
||||
case archiveMagic:
|
||||
// TODO(pcc): Read the archive directly instead of using "ar".
|
||||
f.Close()
|
||||
closer = nil
|
||||
|
||||
cmd := exec.Command("ar", "p", fpath)
|
||||
var out []byte
|
||||
out, err = cmd.Output()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
elfreader = bytes.NewReader(out)
|
||||
|
||||
default:
|
||||
elfreader = f
|
||||
}
|
||||
|
||||
ef, err := elf.NewFile(elfreader)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
sec := ef.Section(".go_export")
|
||||
if sec == nil {
|
||||
err = fmt.Errorf("%s: .go_export section not found", fpath)
|
||||
return
|
||||
}
|
||||
|
||||
reader = sec.Open()
|
||||
return
|
||||
}
|
||||
|
||||
func GetImporter(searchpaths []string, initmap map[*types.Package]InitData) types.Importer {
|
||||
return func(imports map[string]*types.Package, pkgpath string) (pkg *types.Package, err error) {
|
||||
if pkgpath == "unsafe" {
|
||||
return types.Unsafe, nil
|
||||
}
|
||||
|
||||
fpath, err := findExportFile(searchpaths, pkgpath)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
reader, closer, err := openExportFile(fpath)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if closer != nil {
|
||||
defer closer.Close()
|
||||
}
|
||||
|
||||
var magic [4]byte
|
||||
_, err = reader.Read(magic[:])
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
_, err = reader.Seek(0, 0)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
switch string(magic[:]) {
|
||||
case gccgov1Magic:
|
||||
var p parser
|
||||
p.init(fpath, reader, imports)
|
||||
pkg = p.parsePackage()
|
||||
if initmap != nil {
|
||||
initmap[pkg] = p.initdata
|
||||
}
|
||||
|
||||
case goimporterMagic:
|
||||
var data []byte
|
||||
data, err = ioutil.ReadAll(reader)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
var n int
|
||||
n, pkg, err = importer.ImportData(imports, data)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if initmap != nil {
|
||||
suffixreader := bytes.NewReader(data[n:])
|
||||
var p parser
|
||||
p.init(fpath, suffixreader, nil)
|
||||
p.parseInitData()
|
||||
initmap[pkg] = p.initdata
|
||||
}
|
||||
|
||||
default:
|
||||
err = fmt.Errorf("unrecognized magic string: %q", string(magic[:]))
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
}
|
@ -1,170 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package gccgoimporter
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"testing"
|
||||
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
type importerTest struct {
|
||||
pkgpath, name, want, wantval string
|
||||
wantinits []string
|
||||
}
|
||||
|
||||
func runImporterTest(t *testing.T, imp types.Importer, initmap map[*types.Package]InitData, test *importerTest) {
|
||||
pkg, err := imp(make(map[string]*types.Package), test.pkgpath)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
if test.name != "" {
|
||||
obj := pkg.Scope().Lookup(test.name)
|
||||
if obj == nil {
|
||||
t.Errorf("%s: object not found", test.name)
|
||||
return
|
||||
}
|
||||
|
||||
got := types.ObjectString(obj, types.RelativeTo(pkg))
|
||||
if got != test.want {
|
||||
t.Errorf("%s: got %q; want %q", test.name, got, test.want)
|
||||
}
|
||||
|
||||
if test.wantval != "" {
|
||||
gotval := obj.(*types.Const).Val().String()
|
||||
if gotval != test.wantval {
|
||||
t.Errorf("%s: got val %q; want val %q", test.name, gotval, test.wantval)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(test.wantinits) > 0 {
|
||||
initdata := initmap[pkg]
|
||||
found := false
|
||||
// Check that the package's own init function has the package's priority
|
||||
for _, pkginit := range initdata.Inits {
|
||||
if pkginit.InitFunc == test.wantinits[0] {
|
||||
if initdata.Priority != pkginit.Priority {
|
||||
t.Errorf("%s: got self priority %d; want %d", test.pkgpath, pkginit.Priority, initdata.Priority)
|
||||
}
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
t.Errorf("%s: could not find expected function %q", test.pkgpath, test.wantinits[0])
|
||||
}
|
||||
|
||||
// Each init function in the list other than the first one is a
|
||||
// dependency of the function immediately before it. Check that
|
||||
// the init functions appear in descending priority order.
|
||||
priority := initdata.Priority
|
||||
for _, wantdepinit := range test.wantinits[1:] {
|
||||
found = false
|
||||
for _, pkginit := range initdata.Inits {
|
||||
if pkginit.InitFunc == wantdepinit {
|
||||
if priority <= pkginit.Priority {
|
||||
t.Errorf("%s: got dep priority %d; want less than %d", test.pkgpath, pkginit.Priority, priority)
|
||||
}
|
||||
found = true
|
||||
priority = pkginit.Priority
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
t.Errorf("%s: could not find expected function %q", test.pkgpath, wantdepinit)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var importerTests = [...]importerTest{
|
||||
{pkgpath: "pointer", name: "Int8Ptr", want: "type Int8Ptr *int8"},
|
||||
{pkgpath: "complexnums", name: "NN", want: "const NN untyped complex", wantval: "(-1/1 + -1/1i)"},
|
||||
{pkgpath: "complexnums", name: "NP", want: "const NP untyped complex", wantval: "(-1/1 + 1/1i)"},
|
||||
{pkgpath: "complexnums", name: "PN", want: "const PN untyped complex", wantval: "(1/1 + -1/1i)"},
|
||||
{pkgpath: "complexnums", name: "PP", want: "const PP untyped complex", wantval: "(1/1 + 1/1i)"},
|
||||
// TODO: enable this entry once bug has been tracked down
|
||||
//{pkgpath: "imports", wantinits: []string{"imports..import", "fmt..import", "math..import"}},
|
||||
}
|
||||
|
||||
func TestGoxImporter(t *testing.T) {
|
||||
if runtime.GOOS == "android" {
|
||||
t.Skipf("no testdata directory on %s", runtime.GOOS)
|
||||
}
|
||||
initmap := make(map[*types.Package]InitData)
|
||||
imp := GetImporter([]string{"testdata"}, initmap)
|
||||
|
||||
for _, test := range importerTests {
|
||||
runImporterTest(t, imp, initmap, &test)
|
||||
}
|
||||
}
|
||||
|
||||
func TestObjImporter(t *testing.T) {
|
||||
// This test relies on gccgo being around, which it most likely will be if we
|
||||
// were compiled with gccgo.
|
||||
if runtime.Compiler != "gccgo" {
|
||||
t.Skip("This test needs gccgo")
|
||||
return
|
||||
}
|
||||
|
||||
tmpdir, err := ioutil.TempDir("", "")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
initmap := make(map[*types.Package]InitData)
|
||||
imp := GetImporter([]string{tmpdir}, initmap)
|
||||
|
||||
artmpdir, err := ioutil.TempDir("", "")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
arinitmap := make(map[*types.Package]InitData)
|
||||
arimp := GetImporter([]string{artmpdir}, arinitmap)
|
||||
|
||||
for _, test := range importerTests {
|
||||
gofile := filepath.Join("testdata", test.pkgpath+".go")
|
||||
ofile := filepath.Join(tmpdir, test.pkgpath+".o")
|
||||
afile := filepath.Join(artmpdir, "lib"+test.pkgpath+".a")
|
||||
|
||||
cmd := exec.Command("gccgo", "-fgo-pkgpath="+test.pkgpath, "-c", "-o", ofile, gofile)
|
||||
out, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
t.Logf("%s", out)
|
||||
t.Fatalf("gccgo %s failed: %s", gofile, err)
|
||||
}
|
||||
|
||||
runImporterTest(t, imp, initmap, &test)
|
||||
|
||||
cmd = exec.Command("ar", "cr", afile, ofile)
|
||||
out, err = cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
t.Logf("%s", out)
|
||||
t.Fatalf("ar cr %s %s failed: %s", afile, ofile, err)
|
||||
}
|
||||
|
||||
runImporterTest(t, arimp, arinitmap, &test)
|
||||
|
||||
if err = os.Remove(ofile); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err = os.Remove(afile); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
if err = os.Remove(tmpdir); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
@ -1,856 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package gccgoimporter
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"go/token"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/scanner"
|
||||
|
||||
"golang.org/x/tools/go/exact"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
type parser struct {
|
||||
scanner scanner.Scanner
|
||||
tok rune // current token
|
||||
lit string // literal string; only valid for Ident, Int, String tokens
|
||||
pkgpath string // package path of imported package
|
||||
pkgname string // name of imported package
|
||||
pkg *types.Package // reference to imported package
|
||||
imports map[string]*types.Package // package path -> package object
|
||||
typeMap map[int]types.Type // type number -> type
|
||||
initdata InitData // package init priority data
|
||||
}
|
||||
|
||||
func (p *parser) init(filename string, src io.Reader, imports map[string]*types.Package) {
|
||||
p.scanner.Init(src)
|
||||
p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
|
||||
p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
|
||||
p.scanner.Whitespace = 1<<'\t' | 1<<'\n' | 1<<' '
|
||||
p.scanner.Filename = filename // for good error messages
|
||||
p.next()
|
||||
p.imports = imports
|
||||
p.typeMap = make(map[int]types.Type)
|
||||
}
|
||||
|
||||
type importError struct {
|
||||
pos scanner.Position
|
||||
err error
|
||||
}
|
||||
|
||||
func (e importError) Error() string {
|
||||
return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err)
|
||||
}
|
||||
|
||||
func (p *parser) error(err interface{}) {
|
||||
if s, ok := err.(string); ok {
|
||||
err = errors.New(s)
|
||||
}
|
||||
// panic with a runtime.Error if err is not an error
|
||||
panic(importError{p.scanner.Pos(), err.(error)})
|
||||
}
|
||||
|
||||
func (p *parser) errorf(format string, args ...interface{}) {
|
||||
p.error(fmt.Errorf(format, args...))
|
||||
}
|
||||
|
||||
func (p *parser) expect(tok rune) string {
|
||||
lit := p.lit
|
||||
if p.tok != tok {
|
||||
p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit)
|
||||
}
|
||||
p.next()
|
||||
return lit
|
||||
}
|
||||
|
||||
func (p *parser) expectKeyword(keyword string) {
|
||||
lit := p.expect(scanner.Ident)
|
||||
if lit != keyword {
|
||||
p.errorf("expected keyword %s, got %q", keyword, lit)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) parseString() string {
|
||||
str, err := strconv.Unquote(p.expect(scanner.String))
|
||||
if err != nil {
|
||||
p.error(err)
|
||||
}
|
||||
return str
|
||||
}
|
||||
|
||||
// unquotedString = { unquotedStringChar } .
|
||||
// unquotedStringChar = <neither a whitespace nor a ';' char> .
|
||||
func (p *parser) parseUnquotedString() string {
|
||||
if p.tok == scanner.EOF {
|
||||
p.error("unexpected EOF")
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
buf.WriteString(p.scanner.TokenText())
|
||||
// This loop needs to examine each character before deciding whether to consume it. If we see a semicolon,
|
||||
// we need to let it be consumed by p.next().
|
||||
for ch := p.scanner.Peek(); ch != ';' && ch != scanner.EOF && p.scanner.Whitespace&(1<<uint(ch)) == 0; ch = p.scanner.Peek() {
|
||||
buf.WriteRune(ch)
|
||||
p.scanner.Next()
|
||||
}
|
||||
p.next()
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func (p *parser) next() {
|
||||
p.tok = p.scanner.Scan()
|
||||
switch p.tok {
|
||||
case scanner.Ident, scanner.Int, scanner.Float, scanner.String, '·':
|
||||
p.lit = p.scanner.TokenText()
|
||||
default:
|
||||
p.lit = ""
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) parseQualifiedName() (path, name string) {
|
||||
return p.parseQualifiedNameStr(p.parseString())
|
||||
}
|
||||
|
||||
func (p *parser) parseUnquotedQualifiedName() (path, name string) {
|
||||
return p.parseQualifiedNameStr(p.parseUnquotedString())
|
||||
}
|
||||
|
||||
// qualifiedName = [ ["."] unquotedString "." ] unquotedString .
|
||||
//
|
||||
// The above production uses greedy matching.
|
||||
func (p *parser) parseQualifiedNameStr(unquotedName string) (pkgpath, name string) {
|
||||
parts := strings.Split(unquotedName, ".")
|
||||
if parts[0] == "" {
|
||||
parts = parts[1:]
|
||||
}
|
||||
|
||||
switch len(parts) {
|
||||
case 0:
|
||||
p.errorf("malformed qualified name: %q", unquotedName)
|
||||
case 1:
|
||||
// unqualified name
|
||||
pkgpath = p.pkgpath
|
||||
name = parts[0]
|
||||
default:
|
||||
// qualified name, which may contain periods
|
||||
pkgpath = strings.Join(parts[0:len(parts)-1], ".")
|
||||
name = parts[len(parts)-1]
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// getPkg returns the package for a given path. If the package is
|
||||
// not found but we have a package name, create the package and
|
||||
// add it to the p.imports map.
|
||||
//
|
||||
func (p *parser) getPkg(pkgpath, name string) *types.Package {
|
||||
// package unsafe is not in the imports map - handle explicitly
|
||||
if pkgpath == "unsafe" {
|
||||
return types.Unsafe
|
||||
}
|
||||
pkg := p.imports[pkgpath]
|
||||
if pkg == nil && name != "" {
|
||||
pkg = types.NewPackage(pkgpath, name)
|
||||
p.imports[pkgpath] = pkg
|
||||
}
|
||||
return pkg
|
||||
}
|
||||
|
||||
// parseExportedName is like parseQualifiedName, but
|
||||
// the package path is resolved to an imported *types.Package.
|
||||
//
|
||||
// ExportedName = string [string] .
|
||||
func (p *parser) parseExportedName() (pkg *types.Package, name string) {
|
||||
path, name := p.parseQualifiedName()
|
||||
var pkgname string
|
||||
if p.tok == scanner.String {
|
||||
pkgname = p.parseString()
|
||||
}
|
||||
pkg = p.getPkg(path, pkgname)
|
||||
if pkg == nil {
|
||||
p.errorf("package %s (path = %q) not found", name, path)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Name = QualifiedName | "?" .
|
||||
func (p *parser) parseName() string {
|
||||
if p.tok == '?' {
|
||||
// Anonymous.
|
||||
p.next()
|
||||
return ""
|
||||
}
|
||||
// The package path is redundant for us. Don't try to parse it.
|
||||
_, name := p.parseUnquotedQualifiedName()
|
||||
return name
|
||||
}
|
||||
|
||||
func deref(typ types.Type) types.Type {
|
||||
if p, _ := typ.(*types.Pointer); p != nil {
|
||||
typ = p.Elem()
|
||||
}
|
||||
return typ
|
||||
}
|
||||
|
||||
// Field = Name Type [string] .
|
||||
func (p *parser) parseField(pkg *types.Package) (field *types.Var, tag string) {
|
||||
name := p.parseName()
|
||||
typ := p.parseType(pkg)
|
||||
anon := false
|
||||
if name == "" {
|
||||
anon = true
|
||||
switch typ := deref(typ).(type) {
|
||||
case *types.Basic:
|
||||
name = typ.Name()
|
||||
case *types.Named:
|
||||
name = typ.Obj().Name()
|
||||
default:
|
||||
p.error("anonymous field expected")
|
||||
}
|
||||
}
|
||||
field = types.NewField(token.NoPos, pkg, name, typ, anon)
|
||||
if p.tok == scanner.String {
|
||||
tag = p.parseString()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Param = Name ["..."] Type .
|
||||
func (p *parser) parseParam(pkg *types.Package) (param *types.Var, isVariadic bool) {
|
||||
name := p.parseName()
|
||||
if p.tok == '.' {
|
||||
p.next()
|
||||
p.expect('.')
|
||||
p.expect('.')
|
||||
isVariadic = true
|
||||
}
|
||||
typ := p.parseType(pkg)
|
||||
if isVariadic {
|
||||
typ = types.NewSlice(typ)
|
||||
}
|
||||
param = types.NewParam(token.NoPos, pkg, name, typ)
|
||||
return
|
||||
}
|
||||
|
||||
// Var = Name Type .
|
||||
func (p *parser) parseVar(pkg *types.Package) *types.Var {
|
||||
name := p.parseName()
|
||||
return types.NewVar(token.NoPos, pkg, name, p.parseType(pkg))
|
||||
}
|
||||
|
||||
// ConstValue = string | "false" | "true" | ["-"] (int ["'"] | FloatOrComplex) .
|
||||
// FloatOrComplex = float ["i" | ("+"|"-") float "i"] .
|
||||
func (p *parser) parseConstValue() (val exact.Value, typ types.Type) {
|
||||
switch p.tok {
|
||||
case scanner.String:
|
||||
str := p.parseString()
|
||||
val = exact.MakeString(str)
|
||||
typ = types.Typ[types.UntypedString]
|
||||
return
|
||||
|
||||
case scanner.Ident:
|
||||
b := false
|
||||
switch p.lit {
|
||||
case "false":
|
||||
case "true":
|
||||
b = true
|
||||
|
||||
default:
|
||||
p.errorf("expected const value, got %s (%q)", scanner.TokenString(p.tok), p.lit)
|
||||
}
|
||||
|
||||
p.next()
|
||||
val = exact.MakeBool(b)
|
||||
typ = types.Typ[types.UntypedBool]
|
||||
return
|
||||
}
|
||||
|
||||
sign := ""
|
||||
if p.tok == '-' {
|
||||
p.next()
|
||||
sign = "-"
|
||||
}
|
||||
|
||||
switch p.tok {
|
||||
case scanner.Int:
|
||||
val = exact.MakeFromLiteral(sign+p.lit, token.INT)
|
||||
if val == nil {
|
||||
p.error("could not parse integer literal")
|
||||
}
|
||||
|
||||
p.next()
|
||||
if p.tok == '\'' {
|
||||
p.next()
|
||||
typ = types.Typ[types.UntypedRune]
|
||||
} else {
|
||||
typ = types.Typ[types.UntypedInt]
|
||||
}
|
||||
|
||||
case scanner.Float:
|
||||
re := sign + p.lit
|
||||
p.next()
|
||||
|
||||
var im string
|
||||
switch p.tok {
|
||||
case '+':
|
||||
p.next()
|
||||
im = p.expect(scanner.Float)
|
||||
|
||||
case '-':
|
||||
p.next()
|
||||
im = "-" + p.expect(scanner.Float)
|
||||
|
||||
case scanner.Ident:
|
||||
// re is in fact the imaginary component. Expect "i" below.
|
||||
im = re
|
||||
re = "0"
|
||||
|
||||
default:
|
||||
val = exact.MakeFromLiteral(re, token.FLOAT)
|
||||
if val == nil {
|
||||
p.error("could not parse float literal")
|
||||
}
|
||||
typ = types.Typ[types.UntypedFloat]
|
||||
return
|
||||
}
|
||||
|
||||
p.expectKeyword("i")
|
||||
reval := exact.MakeFromLiteral(re, token.FLOAT)
|
||||
if reval == nil {
|
||||
p.error("could not parse real component of complex literal")
|
||||
}
|
||||
imval := exact.MakeFromLiteral(im+"i", token.IMAG)
|
||||
if imval == nil {
|
||||
p.error("could not parse imag component of complex literal")
|
||||
}
|
||||
val = exact.BinaryOp(reval, token.ADD, imval)
|
||||
typ = types.Typ[types.UntypedComplex]
|
||||
|
||||
default:
|
||||
p.errorf("expected const value, got %s (%q)", scanner.TokenString(p.tok), p.lit)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Const = Name [Type] "=" ConstValue .
|
||||
func (p *parser) parseConst(pkg *types.Package) *types.Const {
|
||||
name := p.parseName()
|
||||
var typ types.Type
|
||||
if p.tok == '<' {
|
||||
typ = p.parseType(pkg)
|
||||
}
|
||||
p.expect('=')
|
||||
val, vtyp := p.parseConstValue()
|
||||
if typ == nil {
|
||||
typ = vtyp
|
||||
}
|
||||
return types.NewConst(token.NoPos, pkg, name, typ, val)
|
||||
}
|
||||
|
||||
// TypeName = ExportedName .
|
||||
func (p *parser) parseTypeName() *types.TypeName {
|
||||
pkg, name := p.parseExportedName()
|
||||
scope := pkg.Scope()
|
||||
if obj := scope.Lookup(name); obj != nil {
|
||||
return obj.(*types.TypeName)
|
||||
}
|
||||
obj := types.NewTypeName(token.NoPos, pkg, name, nil)
|
||||
// a named type may be referred to before the underlying type
|
||||
// is known - set it up
|
||||
types.NewNamed(obj, nil, nil)
|
||||
scope.Insert(obj)
|
||||
return obj
|
||||
}
|
||||
|
||||
// NamedType = TypeName Type { Method } .
|
||||
// Method = "func" "(" Param ")" Name ParamList ResultList ";" .
|
||||
func (p *parser) parseNamedType(n int) types.Type {
|
||||
obj := p.parseTypeName()
|
||||
|
||||
pkg := obj.Pkg()
|
||||
typ := obj.Type()
|
||||
p.typeMap[n] = typ
|
||||
|
||||
nt, ok := typ.(*types.Named)
|
||||
if !ok {
|
||||
// This can happen for unsafe.Pointer, which is a TypeName holding a Basic type.
|
||||
pt := p.parseType(pkg)
|
||||
if pt != typ {
|
||||
p.error("unexpected underlying type for non-named TypeName")
|
||||
}
|
||||
return typ
|
||||
}
|
||||
|
||||
underlying := p.parseType(pkg)
|
||||
if nt.Underlying() == nil {
|
||||
nt.SetUnderlying(underlying.Underlying())
|
||||
}
|
||||
|
||||
for p.tok == scanner.Ident {
|
||||
// collect associated methods
|
||||
p.expectKeyword("func")
|
||||
p.expect('(')
|
||||
receiver, _ := p.parseParam(pkg)
|
||||
p.expect(')')
|
||||
name := p.parseName()
|
||||
params, isVariadic := p.parseParamList(pkg)
|
||||
results := p.parseResultList(pkg)
|
||||
p.expect(';')
|
||||
|
||||
sig := types.NewSignature(receiver, params, results, isVariadic)
|
||||
nt.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
|
||||
}
|
||||
|
||||
return nt
|
||||
}
|
||||
|
||||
func (p *parser) parseInt() int64 {
|
||||
lit := p.expect(scanner.Int)
|
||||
n, err := strconv.ParseInt(lit, 10, 0)
|
||||
if err != nil {
|
||||
p.error(err)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
// ArrayOrSliceType = "[" [ int ] "]" Type .
|
||||
func (p *parser) parseArrayOrSliceType(pkg *types.Package) types.Type {
|
||||
p.expect('[')
|
||||
if p.tok == ']' {
|
||||
p.next()
|
||||
return types.NewSlice(p.parseType(pkg))
|
||||
}
|
||||
|
||||
n := p.parseInt()
|
||||
p.expect(']')
|
||||
return types.NewArray(p.parseType(pkg), n)
|
||||
}
|
||||
|
||||
// MapType = "map" "[" Type "]" Type .
|
||||
func (p *parser) parseMapType(pkg *types.Package) types.Type {
|
||||
p.expectKeyword("map")
|
||||
p.expect('[')
|
||||
key := p.parseType(pkg)
|
||||
p.expect(']')
|
||||
elem := p.parseType(pkg)
|
||||
return types.NewMap(key, elem)
|
||||
}
|
||||
|
||||
// ChanType = "chan" ["<-" | "-<"] Type .
|
||||
func (p *parser) parseChanType(pkg *types.Package) types.Type {
|
||||
p.expectKeyword("chan")
|
||||
dir := types.SendRecv
|
||||
switch p.tok {
|
||||
case '-':
|
||||
p.next()
|
||||
p.expect('<')
|
||||
dir = types.SendOnly
|
||||
|
||||
case '<':
|
||||
// don't consume '<' if it belongs to Type
|
||||
if p.scanner.Peek() == '-' {
|
||||
p.next()
|
||||
p.expect('-')
|
||||
dir = types.RecvOnly
|
||||
}
|
||||
}
|
||||
|
||||
return types.NewChan(dir, p.parseType(pkg))
|
||||
}
|
||||
|
||||
// StructType = "struct" "{" { Field } "}" .
|
||||
func (p *parser) parseStructType(pkg *types.Package) types.Type {
|
||||
p.expectKeyword("struct")
|
||||
|
||||
var fields []*types.Var
|
||||
var tags []string
|
||||
|
||||
p.expect('{')
|
||||
for p.tok != '}' && p.tok != scanner.EOF {
|
||||
field, tag := p.parseField(pkg)
|
||||
p.expect(';')
|
||||
fields = append(fields, field)
|
||||
tags = append(tags, tag)
|
||||
}
|
||||
p.expect('}')
|
||||
|
||||
return types.NewStruct(fields, tags)
|
||||
}
|
||||
|
||||
// ParamList = "(" [ { Parameter "," } Parameter ] ")" .
|
||||
func (p *parser) parseParamList(pkg *types.Package) (*types.Tuple, bool) {
|
||||
var list []*types.Var
|
||||
isVariadic := false
|
||||
|
||||
p.expect('(')
|
||||
for p.tok != ')' && p.tok != scanner.EOF {
|
||||
if len(list) > 0 {
|
||||
p.expect(',')
|
||||
}
|
||||
par, variadic := p.parseParam(pkg)
|
||||
list = append(list, par)
|
||||
if variadic {
|
||||
if isVariadic {
|
||||
p.error("... not on final argument")
|
||||
}
|
||||
isVariadic = true
|
||||
}
|
||||
}
|
||||
p.expect(')')
|
||||
|
||||
return types.NewTuple(list...), isVariadic
|
||||
}
|
||||
|
||||
// ResultList = Type | ParamList .
|
||||
func (p *parser) parseResultList(pkg *types.Package) *types.Tuple {
|
||||
switch p.tok {
|
||||
case '<':
|
||||
return types.NewTuple(types.NewParam(token.NoPos, pkg, "", p.parseType(pkg)))
|
||||
|
||||
case '(':
|
||||
params, _ := p.parseParamList(pkg)
|
||||
return params
|
||||
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// FunctionType = ParamList ResultList .
|
||||
func (p *parser) parseFunctionType(pkg *types.Package) *types.Signature {
|
||||
params, isVariadic := p.parseParamList(pkg)
|
||||
results := p.parseResultList(pkg)
|
||||
return types.NewSignature(nil, params, results, isVariadic)
|
||||
}
|
||||
|
||||
// Func = Name FunctionType .
|
||||
func (p *parser) parseFunc(pkg *types.Package) *types.Func {
|
||||
name := p.parseName()
|
||||
if strings.ContainsRune(name, '$') {
|
||||
// This is a Type$equal or Type$hash function, which we don't want to parse,
|
||||
// except for the types.
|
||||
p.discardDirectiveWhileParsingTypes(pkg)
|
||||
return nil
|
||||
}
|
||||
return types.NewFunc(token.NoPos, pkg, name, p.parseFunctionType(pkg))
|
||||
}
|
||||
|
||||
// InterfaceType = "interface" "{" { ("?" Type | Func) ";" } "}" .
|
||||
func (p *parser) parseInterfaceType(pkg *types.Package) types.Type {
|
||||
p.expectKeyword("interface")
|
||||
|
||||
var methods []*types.Func
|
||||
var typs []*types.Named
|
||||
|
||||
p.expect('{')
|
||||
for p.tok != '}' && p.tok != scanner.EOF {
|
||||
if p.tok == '?' {
|
||||
p.next()
|
||||
typs = append(typs, p.parseType(pkg).(*types.Named))
|
||||
} else {
|
||||
method := p.parseFunc(pkg)
|
||||
methods = append(methods, method)
|
||||
}
|
||||
p.expect(';')
|
||||
}
|
||||
p.expect('}')
|
||||
|
||||
return types.NewInterface(methods, typs)
|
||||
}
|
||||
|
||||
// PointerType = "*" ("any" | Type) .
|
||||
func (p *parser) parsePointerType(pkg *types.Package) types.Type {
|
||||
p.expect('*')
|
||||
if p.tok == scanner.Ident {
|
||||
p.expectKeyword("any")
|
||||
return types.Typ[types.UnsafePointer]
|
||||
}
|
||||
return types.NewPointer(p.parseType(pkg))
|
||||
}
|
||||
|
||||
// TypeDefinition = NamedType | MapType | ChanType | StructType | InterfaceType | PointerType | ArrayOrSliceType | FunctionType .
|
||||
func (p *parser) parseTypeDefinition(pkg *types.Package, n int) types.Type {
|
||||
var t types.Type
|
||||
switch p.tok {
|
||||
case scanner.String:
|
||||
t = p.parseNamedType(n)
|
||||
|
||||
case scanner.Ident:
|
||||
switch p.lit {
|
||||
case "map":
|
||||
t = p.parseMapType(pkg)
|
||||
|
||||
case "chan":
|
||||
t = p.parseChanType(pkg)
|
||||
|
||||
case "struct":
|
||||
t = p.parseStructType(pkg)
|
||||
|
||||
case "interface":
|
||||
t = p.parseInterfaceType(pkg)
|
||||
}
|
||||
|
||||
case '*':
|
||||
t = p.parsePointerType(pkg)
|
||||
|
||||
case '[':
|
||||
t = p.parseArrayOrSliceType(pkg)
|
||||
|
||||
case '(':
|
||||
t = p.parseFunctionType(pkg)
|
||||
}
|
||||
|
||||
p.typeMap[n] = t
|
||||
return t
|
||||
}
|
||||
|
||||
const (
|
||||
// From gofrontend/go/export.h
|
||||
// Note that these values are negative in the gofrontend and have been made positive
|
||||
// in the gccgoimporter.
|
||||
gccgoBuiltinINT8 = 1
|
||||
gccgoBuiltinINT16 = 2
|
||||
gccgoBuiltinINT32 = 3
|
||||
gccgoBuiltinINT64 = 4
|
||||
gccgoBuiltinUINT8 = 5
|
||||
gccgoBuiltinUINT16 = 6
|
||||
gccgoBuiltinUINT32 = 7
|
||||
gccgoBuiltinUINT64 = 8
|
||||
gccgoBuiltinFLOAT32 = 9
|
||||
gccgoBuiltinFLOAT64 = 10
|
||||
gccgoBuiltinINT = 11
|
||||
gccgoBuiltinUINT = 12
|
||||
gccgoBuiltinUINTPTR = 13
|
||||
gccgoBuiltinBOOL = 15
|
||||
gccgoBuiltinSTRING = 16
|
||||
gccgoBuiltinCOMPLEX64 = 17
|
||||
gccgoBuiltinCOMPLEX128 = 18
|
||||
gccgoBuiltinERROR = 19
|
||||
gccgoBuiltinBYTE = 20
|
||||
gccgoBuiltinRUNE = 21
|
||||
)
|
||||
|
||||
func lookupBuiltinType(typ int) types.Type {
|
||||
return [...]types.Type{
|
||||
gccgoBuiltinINT8: types.Typ[types.Int8],
|
||||
gccgoBuiltinINT16: types.Typ[types.Int16],
|
||||
gccgoBuiltinINT32: types.Typ[types.Int32],
|
||||
gccgoBuiltinINT64: types.Typ[types.Int64],
|
||||
gccgoBuiltinUINT8: types.Typ[types.Uint8],
|
||||
gccgoBuiltinUINT16: types.Typ[types.Uint16],
|
||||
gccgoBuiltinUINT32: types.Typ[types.Uint32],
|
||||
gccgoBuiltinUINT64: types.Typ[types.Uint64],
|
||||
gccgoBuiltinFLOAT32: types.Typ[types.Float32],
|
||||
gccgoBuiltinFLOAT64: types.Typ[types.Float64],
|
||||
gccgoBuiltinINT: types.Typ[types.Int],
|
||||
gccgoBuiltinUINT: types.Typ[types.Uint],
|
||||
gccgoBuiltinUINTPTR: types.Typ[types.Uintptr],
|
||||
gccgoBuiltinBOOL: types.Typ[types.Bool],
|
||||
gccgoBuiltinSTRING: types.Typ[types.String],
|
||||
gccgoBuiltinCOMPLEX64: types.Typ[types.Complex64],
|
||||
gccgoBuiltinCOMPLEX128: types.Typ[types.Complex128],
|
||||
gccgoBuiltinERROR: types.Universe.Lookup("error").Type(),
|
||||
gccgoBuiltinBYTE: types.Universe.Lookup("byte").Type(),
|
||||
gccgoBuiltinRUNE: types.Universe.Lookup("rune").Type(),
|
||||
}[typ]
|
||||
}
|
||||
|
||||
// Type = "<" "type" ( "-" int | int [ TypeDefinition ] ) ">" .
|
||||
func (p *parser) parseType(pkg *types.Package) (t types.Type) {
|
||||
p.expect('<')
|
||||
p.expectKeyword("type")
|
||||
|
||||
switch p.tok {
|
||||
case scanner.Int:
|
||||
n := p.parseInt()
|
||||
|
||||
if p.tok == '>' {
|
||||
t = p.typeMap[int(n)]
|
||||
} else {
|
||||
t = p.parseTypeDefinition(pkg, int(n))
|
||||
}
|
||||
|
||||
case '-':
|
||||
p.next()
|
||||
n := p.parseInt()
|
||||
t = lookupBuiltinType(int(n))
|
||||
|
||||
default:
|
||||
p.errorf("expected type number, got %s (%q)", scanner.TokenString(p.tok), p.lit)
|
||||
return nil
|
||||
}
|
||||
|
||||
p.expect('>')
|
||||
return
|
||||
}
|
||||
|
||||
// PackageInit = unquotedString unquotedString int .
|
||||
func (p *parser) parsePackageInit() PackageInit {
|
||||
name := p.parseUnquotedString()
|
||||
initfunc := p.parseUnquotedString()
|
||||
priority := int(p.parseInt())
|
||||
return PackageInit{Name: name, InitFunc: initfunc, Priority: priority}
|
||||
}
|
||||
|
||||
// Throw away tokens until we see a ';'. If we see a '<', attempt to parse as a type.
|
||||
func (p *parser) discardDirectiveWhileParsingTypes(pkg *types.Package) {
|
||||
for {
|
||||
switch p.tok {
|
||||
case ';':
|
||||
return
|
||||
case '<':
|
||||
p.parseType(p.pkg)
|
||||
case scanner.EOF:
|
||||
p.error("unexpected EOF")
|
||||
default:
|
||||
p.next()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create the package if we have parsed both the package path and package name.
|
||||
func (p *parser) maybeCreatePackage() {
|
||||
if p.pkgname != "" && p.pkgpath != "" {
|
||||
p.pkg = p.getPkg(p.pkgpath, p.pkgname)
|
||||
}
|
||||
}
|
||||
|
||||
// InitDataDirective = "v1" ";" |
|
||||
// "priority" int ";" |
|
||||
// "init" { PackageInit } ";" |
|
||||
// "checksum" unquotedString ";" .
|
||||
func (p *parser) parseInitDataDirective() {
|
||||
if p.tok != scanner.Ident {
|
||||
// unexpected token kind; panic
|
||||
p.expect(scanner.Ident)
|
||||
}
|
||||
|
||||
switch p.lit {
|
||||
case "v1":
|
||||
p.next()
|
||||
p.expect(';')
|
||||
|
||||
case "priority":
|
||||
p.next()
|
||||
p.initdata.Priority = int(p.parseInt())
|
||||
p.expect(';')
|
||||
|
||||
case "init":
|
||||
p.next()
|
||||
for p.tok != ';' && p.tok != scanner.EOF {
|
||||
p.initdata.Inits = append(p.initdata.Inits, p.parsePackageInit())
|
||||
}
|
||||
p.expect(';')
|
||||
|
||||
case "checksum":
|
||||
// Don't let the scanner try to parse the checksum as a number.
|
||||
defer func(mode uint) {
|
||||
p.scanner.Mode = mode
|
||||
}(p.scanner.Mode)
|
||||
p.scanner.Mode &^= scanner.ScanInts | scanner.ScanFloats
|
||||
p.next()
|
||||
p.parseUnquotedString()
|
||||
p.expect(';')
|
||||
|
||||
default:
|
||||
p.errorf("unexpected identifier: %q", p.lit)
|
||||
}
|
||||
}
|
||||
|
||||
// Directive = InitDataDirective |
|
||||
// "package" unquotedString ";" |
|
||||
// "pkgpath" unquotedString ";" |
|
||||
// "import" unquotedString unquotedString string ";" |
|
||||
// "func" Func ";" |
|
||||
// "type" Type ";" |
|
||||
// "var" Var ";" |
|
||||
// "const" Const ";" .
|
||||
func (p *parser) parseDirective() {
|
||||
if p.tok != scanner.Ident {
|
||||
// unexpected token kind; panic
|
||||
p.expect(scanner.Ident)
|
||||
}
|
||||
|
||||
switch p.lit {
|
||||
case "v1", "priority", "init", "checksum":
|
||||
p.parseInitDataDirective()
|
||||
|
||||
case "package":
|
||||
p.next()
|
||||
p.pkgname = p.parseUnquotedString()
|
||||
p.maybeCreatePackage()
|
||||
p.expect(';')
|
||||
|
||||
case "pkgpath":
|
||||
p.next()
|
||||
p.pkgpath = p.parseUnquotedString()
|
||||
p.maybeCreatePackage()
|
||||
p.expect(';')
|
||||
|
||||
case "import":
|
||||
p.next()
|
||||
pkgname := p.parseUnquotedString()
|
||||
pkgpath := p.parseUnquotedString()
|
||||
p.getPkg(pkgpath, pkgname)
|
||||
p.parseString()
|
||||
p.expect(';')
|
||||
|
||||
case "func":
|
||||
p.next()
|
||||
fun := p.parseFunc(p.pkg)
|
||||
if fun != nil {
|
||||
p.pkg.Scope().Insert(fun)
|
||||
}
|
||||
p.expect(';')
|
||||
|
||||
case "type":
|
||||
p.next()
|
||||
p.parseType(p.pkg)
|
||||
p.expect(';')
|
||||
|
||||
case "var":
|
||||
p.next()
|
||||
v := p.parseVar(p.pkg)
|
||||
p.pkg.Scope().Insert(v)
|
||||
p.expect(';')
|
||||
|
||||
case "const":
|
||||
p.next()
|
||||
c := p.parseConst(p.pkg)
|
||||
p.pkg.Scope().Insert(c)
|
||||
p.expect(';')
|
||||
|
||||
default:
|
||||
p.errorf("unexpected identifier: %q", p.lit)
|
||||
}
|
||||
}
|
||||
|
||||
// Package = { Directive } .
|
||||
func (p *parser) parsePackage() *types.Package {
|
||||
for p.tok != scanner.EOF {
|
||||
p.parseDirective()
|
||||
}
|
||||
for _, typ := range p.typeMap {
|
||||
if it, ok := typ.(*types.Interface); ok {
|
||||
it.Complete()
|
||||
}
|
||||
}
|
||||
p.pkg.MarkComplete()
|
||||
return p.pkg
|
||||
}
|
||||
|
||||
// InitData = { InitDataDirective } .
|
||||
func (p *parser) parseInitData() {
|
||||
for p.tok != scanner.EOF {
|
||||
p.parseInitDataDirective()
|
||||
}
|
||||
}
|
@ -1,73 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package gccgoimporter
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
"testing"
|
||||
"text/scanner"
|
||||
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var typeParserTests = []struct {
|
||||
id, typ, want, underlying, methods string
|
||||
}{
|
||||
{id: "foo", typ: "<type -1>", want: "int8"},
|
||||
{id: "foo", typ: "<type 1 *<type -19>>", want: "*error"},
|
||||
{id: "foo", typ: "<type 1 *any>", want: "unsafe.Pointer"},
|
||||
{id: "foo", typ: "<type 1 \"Bar\" <type 2 *<type 1>>>", want: "foo.Bar", underlying: "*foo.Bar"},
|
||||
{id: "foo", typ: "<type 1 \"bar.Foo\" \"bar\" <type -1> func (? <type 1>) M (); >", want: "bar.Foo", underlying: "int8", methods: "func (bar.Foo).M()"},
|
||||
{id: "foo", typ: "<type 1 \".bar.foo\" \"bar\" <type -1>>", want: "bar.foo", underlying: "int8"},
|
||||
{id: "foo", typ: "<type 1 []<type -1>>", want: "[]int8"},
|
||||
{id: "foo", typ: "<type 1 [42]<type -1>>", want: "[42]int8"},
|
||||
{id: "foo", typ: "<type 1 map [<type -1>] <type -2>>", want: "map[int8]int16"},
|
||||
{id: "foo", typ: "<type 1 chan <type -1>>", want: "chan int8"},
|
||||
{id: "foo", typ: "<type 1 chan <- <type -1>>", want: "<-chan int8"},
|
||||
{id: "foo", typ: "<type 1 chan -< <type -1>>", want: "chan<- int8"},
|
||||
{id: "foo", typ: "<type 1 struct { I8 <type -1>; I16 <type -2> \"i16\"; }>", want: "struct{I8 int8; I16 int16 \"i16\"}"},
|
||||
{id: "foo", typ: "<type 1 interface { Foo (a <type -1>, b <type -2>) <type -1>; Bar (? <type -2>, ? ...<type -1>) (? <type -2>, ? <type -1>); Baz (); }>", want: "interface{Bar(int16, ...int8) (int16, int8); Baz(); Foo(a int8, b int16) int8}"},
|
||||
{id: "foo", typ: "<type 1 (? <type -1>) <type -2>>", want: "func(int8) int16"},
|
||||
}
|
||||
|
||||
func TestTypeParser(t *testing.T) {
|
||||
for _, test := range typeParserTests {
|
||||
var p parser
|
||||
p.init("test.gox", strings.NewReader(test.typ), make(map[string]*types.Package))
|
||||
p.pkgname = test.id
|
||||
p.pkgpath = test.id
|
||||
p.maybeCreatePackage()
|
||||
typ := p.parseType(p.pkg)
|
||||
|
||||
if p.tok != scanner.EOF {
|
||||
t.Errorf("expected full parse, stopped at %q", p.lit)
|
||||
}
|
||||
|
||||
got := typ.String()
|
||||
if got != test.want {
|
||||
t.Errorf("got type %q, expected %q", got, test.want)
|
||||
}
|
||||
|
||||
if test.underlying != "" {
|
||||
underlying := typ.Underlying().String()
|
||||
if underlying != test.underlying {
|
||||
t.Errorf("got underlying type %q, expected %q", underlying, test.underlying)
|
||||
}
|
||||
}
|
||||
|
||||
if test.methods != "" {
|
||||
nt := typ.(*types.Named)
|
||||
var buf bytes.Buffer
|
||||
for i := 0; i != nt.NumMethods(); i++ {
|
||||
buf.WriteString(nt.Method(i).String())
|
||||
}
|
||||
methods := buf.String()
|
||||
if methods != test.methods {
|
||||
t.Errorf("got methods %q, expected %q", methods, test.methods)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
6
go/gccgoimporter/testdata/complexnums.go
vendored
6
go/gccgoimporter/testdata/complexnums.go
vendored
@ -1,6 +0,0 @@
|
||||
package complexnums
|
||||
|
||||
const NN = -1 - 1i
|
||||
const NP = -1 + 1i
|
||||
const PN = 1 - 1i
|
||||
const PP = 1 + 1i
|
8
go/gccgoimporter/testdata/complexnums.gox
vendored
8
go/gccgoimporter/testdata/complexnums.gox
vendored
@ -1,8 +0,0 @@
|
||||
v1;
|
||||
package complexnums;
|
||||
pkgpath complexnums;
|
||||
priority 1;
|
||||
const NN = -0.1E1-0.1E1i ;
|
||||
const NP = -0.1E1+0.1E1i ;
|
||||
const PN = 0.1E1-0.1E1i ;
|
||||
const PP = 0.1E1+0.1E1i ;
|
5
go/gccgoimporter/testdata/imports.go
vendored
5
go/gccgoimporter/testdata/imports.go
vendored
@ -1,5 +0,0 @@
|
||||
package imports
|
||||
|
||||
import "fmt"
|
||||
|
||||
var Hello = fmt.Sprintf("Hello, world")
|
7
go/gccgoimporter/testdata/imports.gox
vendored
7
go/gccgoimporter/testdata/imports.gox
vendored
@ -1,7 +0,0 @@
|
||||
v1;
|
||||
package imports;
|
||||
pkgpath imports;
|
||||
priority 7;
|
||||
import fmt fmt "fmt";
|
||||
init imports imports..import 7 math math..import 1 runtime runtime..import 1 strconv strconv..import 2 io io..import 3 reflect reflect..import 3 syscall syscall..import 3 time time..import 4 os os..import 5 fmt fmt..import 6;
|
||||
var Hello <type -16>;
|
3
go/gccgoimporter/testdata/pointer.go
vendored
3
go/gccgoimporter/testdata/pointer.go
vendored
@ -1,3 +0,0 @@
|
||||
package pointer
|
||||
|
||||
type Int8Ptr *int8
|
4
go/gccgoimporter/testdata/pointer.gox
vendored
4
go/gccgoimporter/testdata/pointer.gox
vendored
@ -1,4 +0,0 @@
|
||||
v1;
|
||||
package pointer;
|
||||
pkgpath pointer;
|
||||
type <type 1 "Int8Ptr" <type 2 *<type -1>>>;
|
@ -1,108 +0,0 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file implements FindExportData.
|
||||
|
||||
package gcimporter
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
|
||||
// See $GOROOT/include/ar.h.
|
||||
hdr := make([]byte, 16+12+6+6+8+10+2)
|
||||
_, err = io.ReadFull(r, hdr)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
// leave for debugging
|
||||
if false {
|
||||
fmt.Printf("header: %s", hdr)
|
||||
}
|
||||
s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
|
||||
size, err = strconv.Atoi(s)
|
||||
if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
|
||||
err = errors.New("invalid archive header")
|
||||
return
|
||||
}
|
||||
name = strings.TrimSpace(string(hdr[:16]))
|
||||
return
|
||||
}
|
||||
|
||||
// FindExportData positions the reader r at the beginning of the
|
||||
// export data section of an underlying GC-created object/archive
|
||||
// file by reading from it. The reader must be positioned at the
|
||||
// start of the file before calling this function.
|
||||
//
|
||||
func FindExportData(r *bufio.Reader) (err error) {
|
||||
// Read first line to make sure this is an object file.
|
||||
line, err := r.ReadSlice('\n')
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if string(line) == "!<arch>\n" {
|
||||
// Archive file. Scan to __.PKGDEF.
|
||||
var name string
|
||||
var size int
|
||||
if name, size, err = readGopackHeader(r); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Optional leading __.GOSYMDEF or __.SYMDEF.
|
||||
// Read and discard.
|
||||
if name == "__.SYMDEF" || name == "__.GOSYMDEF" {
|
||||
const block = 4096
|
||||
tmp := make([]byte, block)
|
||||
for size > 0 {
|
||||
n := size
|
||||
if n > block {
|
||||
n = block
|
||||
}
|
||||
if _, err = io.ReadFull(r, tmp[:n]); err != nil {
|
||||
return
|
||||
}
|
||||
size -= n
|
||||
}
|
||||
|
||||
if name, size, err = readGopackHeader(r); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// First real entry should be __.PKGDEF.
|
||||
if name != "__.PKGDEF" {
|
||||
err = errors.New("go archive is missing __.PKGDEF")
|
||||
return
|
||||
}
|
||||
|
||||
// Read first line of __.PKGDEF data, so that line
|
||||
// is once again the first line of the input.
|
||||
if line, err = r.ReadSlice('\n'); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Now at __.PKGDEF in archive or still at beginning of file.
|
||||
// Either way, line should begin with "go object ".
|
||||
if !strings.HasPrefix(string(line), "go object ") {
|
||||
err = errors.New("not a go object file")
|
||||
return
|
||||
}
|
||||
|
||||
// Skip over object header to export data.
|
||||
// Begins after first line with $$.
|
||||
for line[0] != '$' {
|
||||
if line, err = r.ReadSlice('\n'); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
@ -1,995 +0,0 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package gcimporter implements Import for gc-generated object files.
|
||||
// Importing this package installs Import as go/types.DefaultImport.
|
||||
package gcimporter // import "golang.org/x/tools/go/gcimporter"
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"go/build"
|
||||
"go/token"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/scanner"
|
||||
|
||||
"golang.org/x/tools/go/exact"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
// debugging/development support
|
||||
const debug = false
|
||||
|
||||
func init() {
|
||||
types.DefaultImport = Import
|
||||
}
|
||||
|
||||
var pkgExts = [...]string{".a", ".5", ".6", ".7", ".8", ".9"}
|
||||
|
||||
// FindPkg returns the filename and unique package id for an import
|
||||
// path based on package information provided by build.Import (using
|
||||
// the build.Default build.Context).
|
||||
// If no file was found, an empty filename is returned.
|
||||
//
|
||||
func FindPkg(path, srcDir string) (filename, id string) {
|
||||
if len(path) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
id = path
|
||||
var noext string
|
||||
switch {
|
||||
default:
|
||||
// "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
|
||||
// Don't require the source files to be present.
|
||||
bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary)
|
||||
if bp.PkgObj == "" {
|
||||
return
|
||||
}
|
||||
noext = strings.TrimSuffix(bp.PkgObj, ".a")
|
||||
|
||||
case build.IsLocalImport(path):
|
||||
// "./x" -> "/this/directory/x.ext", "/this/directory/x"
|
||||
noext = filepath.Join(srcDir, path)
|
||||
id = noext
|
||||
|
||||
case filepath.IsAbs(path):
|
||||
// for completeness only - go/build.Import
|
||||
// does not support absolute imports
|
||||
// "/x" -> "/x.ext", "/x"
|
||||
noext = path
|
||||
}
|
||||
|
||||
// try extensions
|
||||
for _, ext := range pkgExts {
|
||||
filename = noext + ext
|
||||
if f, err := os.Stat(filename); err == nil && !f.IsDir() {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
filename = "" // not found
|
||||
return
|
||||
}
|
||||
|
||||
// ImportData imports a package by reading the gc-generated export data,
|
||||
// adds the corresponding package object to the packages map indexed by id,
|
||||
// and returns the object.
|
||||
//
|
||||
// The packages map must contains all packages already imported. The data
|
||||
// reader position must be the beginning of the export data section. The
|
||||
// filename is only used in error messages.
|
||||
//
|
||||
// If packages[id] contains the completely imported package, that package
|
||||
// can be used directly, and there is no need to call this function (but
|
||||
// there is also no harm but for extra time used).
|
||||
//
|
||||
func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) {
|
||||
// support for parser error handling
|
||||
defer func() {
|
||||
switch r := recover().(type) {
|
||||
case nil:
|
||||
// nothing to do
|
||||
case importError:
|
||||
err = r
|
||||
default:
|
||||
panic(r) // internal error
|
||||
}
|
||||
}()
|
||||
|
||||
var p parser
|
||||
p.init(filename, id, data, packages)
|
||||
pkg = p.parseExport()
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Import imports a gc-generated package given its import path, adds the
|
||||
// corresponding package object to the packages map, and returns the object.
|
||||
// Local import paths are interpreted relative to the current working directory.
|
||||
// The packages map must contains all packages already imported.
|
||||
//
|
||||
func Import(packages map[string]*types.Package, path string) (pkg *types.Package, err error) {
|
||||
if path == "unsafe" {
|
||||
return types.Unsafe, nil
|
||||
}
|
||||
|
||||
srcDir := "."
|
||||
if build.IsLocalImport(path) {
|
||||
srcDir, err = os.Getwd()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
filename, id := FindPkg(path, srcDir)
|
||||
if filename == "" {
|
||||
err = fmt.Errorf("can't find import: %s", id)
|
||||
return
|
||||
}
|
||||
|
||||
// no need to re-import if the package was imported completely before
|
||||
if pkg = packages[id]; pkg != nil && pkg.Complete() {
|
||||
return
|
||||
}
|
||||
|
||||
// open file
|
||||
f, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
f.Close()
|
||||
if err != nil {
|
||||
// add file name to error
|
||||
err = fmt.Errorf("reading export data: %s: %v", filename, err)
|
||||
}
|
||||
}()
|
||||
|
||||
buf := bufio.NewReader(f)
|
||||
if err = FindExportData(buf); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
pkg, err = ImportData(packages, filename, id, buf)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Parser
|
||||
|
||||
// TODO(gri) Imported objects don't have position information.
|
||||
// Ideally use the debug table line info; alternatively
|
||||
// create some fake position (or the position of the
|
||||
// import). That way error messages referring to imported
|
||||
// objects can print meaningful information.
|
||||
|
||||
// parser parses the exports inside a gc compiler-produced
|
||||
// object/archive file and populates its scope with the results.
|
||||
type parser struct {
|
||||
scanner scanner.Scanner
|
||||
tok rune // current token
|
||||
lit string // literal string; only valid for Ident, Int, String tokens
|
||||
id string // package id of imported package
|
||||
sharedPkgs map[string]*types.Package // package id -> package object (across importer)
|
||||
localPkgs map[string]*types.Package // package id -> package object (just this package)
|
||||
}
|
||||
|
||||
func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) {
|
||||
p.scanner.Init(src)
|
||||
p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
|
||||
p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
|
||||
p.scanner.Whitespace = 1<<'\t' | 1<<' '
|
||||
p.scanner.Filename = filename // for good error messages
|
||||
p.next()
|
||||
p.id = id
|
||||
p.sharedPkgs = packages
|
||||
if debug {
|
||||
// check consistency of packages map
|
||||
for _, pkg := range packages {
|
||||
if pkg.Name() == "" {
|
||||
fmt.Printf("no package name for %s\n", pkg.Path())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) next() {
|
||||
p.tok = p.scanner.Scan()
|
||||
switch p.tok {
|
||||
case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·':
|
||||
p.lit = p.scanner.TokenText()
|
||||
default:
|
||||
p.lit = ""
|
||||
}
|
||||
if debug {
|
||||
fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit)
|
||||
}
|
||||
}
|
||||
|
||||
func declTypeName(pkg *types.Package, name string) *types.TypeName {
|
||||
scope := pkg.Scope()
|
||||
if obj := scope.Lookup(name); obj != nil {
|
||||
return obj.(*types.TypeName)
|
||||
}
|
||||
obj := types.NewTypeName(token.NoPos, pkg, name, nil)
|
||||
// a named type may be referred to before the underlying type
|
||||
// is known - set it up
|
||||
types.NewNamed(obj, nil, nil)
|
||||
scope.Insert(obj)
|
||||
return obj
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Error handling
|
||||
|
||||
// Internal errors are boxed as importErrors.
|
||||
type importError struct {
|
||||
pos scanner.Position
|
||||
err error
|
||||
}
|
||||
|
||||
func (e importError) Error() string {
|
||||
return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err)
|
||||
}
|
||||
|
||||
func (p *parser) error(err interface{}) {
|
||||
if s, ok := err.(string); ok {
|
||||
err = errors.New(s)
|
||||
}
|
||||
// panic with a runtime.Error if err is not an error
|
||||
panic(importError{p.scanner.Pos(), err.(error)})
|
||||
}
|
||||
|
||||
func (p *parser) errorf(format string, args ...interface{}) {
|
||||
p.error(fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
func (p *parser) expect(tok rune) string {
|
||||
lit := p.lit
|
||||
if p.tok != tok {
|
||||
p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit)
|
||||
}
|
||||
p.next()
|
||||
return lit
|
||||
}
|
||||
|
||||
func (p *parser) expectSpecial(tok string) {
|
||||
sep := 'x' // not white space
|
||||
i := 0
|
||||
for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' {
|
||||
sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
|
||||
p.next()
|
||||
i++
|
||||
}
|
||||
if i < len(tok) {
|
||||
p.errorf("expected %q, got %q", tok, tok[0:i])
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) expectKeyword(keyword string) {
|
||||
lit := p.expect(scanner.Ident)
|
||||
if lit != keyword {
|
||||
p.errorf("expected keyword %s, got %q", keyword, lit)
|
||||
}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Qualified and unqualified names
|
||||
|
||||
// PackageId = string_lit .
|
||||
//
|
||||
func (p *parser) parsePackageId() string {
|
||||
id, err := strconv.Unquote(p.expect(scanner.String))
|
||||
if err != nil {
|
||||
p.error(err)
|
||||
}
|
||||
// id == "" stands for the imported package id
|
||||
// (only known at time of package installation)
|
||||
if id == "" {
|
||||
id = p.id
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
// PackageName = ident .
|
||||
//
|
||||
func (p *parser) parsePackageName() string {
|
||||
return p.expect(scanner.Ident)
|
||||
}
|
||||
|
||||
// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
|
||||
func (p *parser) parseDotIdent() string {
|
||||
ident := ""
|
||||
if p.tok != scanner.Int {
|
||||
sep := 'x' // not white space
|
||||
for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' {
|
||||
ident += p.lit
|
||||
sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
|
||||
p.next()
|
||||
}
|
||||
}
|
||||
if ident == "" {
|
||||
p.expect(scanner.Ident) // use expect() for error handling
|
||||
}
|
||||
return ident
|
||||
}
|
||||
|
||||
// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
|
||||
//
|
||||
func (p *parser) parseQualifiedName() (id, name string) {
|
||||
p.expect('@')
|
||||
id = p.parsePackageId()
|
||||
p.expect('.')
|
||||
// Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields.
|
||||
if p.tok == '?' {
|
||||
p.next()
|
||||
} else {
|
||||
name = p.parseDotIdent()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// getPkg returns the package for a given id. If the package is
|
||||
// not found but we have a package name, create the package and
|
||||
// add it to the p.localPkgs and p.sharedPkgs maps.
|
||||
//
|
||||
// id identifies a package, usually by a canonical package path like
|
||||
// "encoding/json" but possibly by a non-canonical import path like
|
||||
// "./json".
|
||||
//
|
||||
func (p *parser) getPkg(id, name string) *types.Package {
|
||||
// package unsafe is not in the packages maps - handle explicitly
|
||||
if id == "unsafe" {
|
||||
return types.Unsafe
|
||||
}
|
||||
|
||||
pkg := p.localPkgs[id]
|
||||
if pkg == nil && name != "" {
|
||||
// first import of id from this package
|
||||
pkg = p.sharedPkgs[id]
|
||||
if pkg == nil {
|
||||
// first import of id by this importer
|
||||
pkg = types.NewPackage(id, name)
|
||||
p.sharedPkgs[id] = pkg
|
||||
}
|
||||
|
||||
if p.localPkgs == nil {
|
||||
p.localPkgs = make(map[string]*types.Package)
|
||||
}
|
||||
p.localPkgs[id] = pkg
|
||||
}
|
||||
return pkg
|
||||
}
|
||||
|
||||
// parseExportedName is like parseQualifiedName, but
|
||||
// the package id is resolved to an imported *types.Package.
|
||||
//
|
||||
func (p *parser) parseExportedName() (pkg *types.Package, name string) {
|
||||
id, name := p.parseQualifiedName()
|
||||
pkg = p.getPkg(id, "")
|
||||
if pkg == nil {
|
||||
p.errorf("%s package not found", id)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Types
|
||||
|
||||
// BasicType = identifier .
|
||||
//
|
||||
func (p *parser) parseBasicType() types.Type {
|
||||
id := p.expect(scanner.Ident)
|
||||
obj := types.Universe.Lookup(id)
|
||||
if obj, ok := obj.(*types.TypeName); ok {
|
||||
return obj.Type()
|
||||
}
|
||||
p.errorf("not a basic type: %s", id)
|
||||
return nil
|
||||
}
|
||||
|
||||
// ArrayType = "[" int_lit "]" Type .
|
||||
//
|
||||
func (p *parser) parseArrayType() types.Type {
|
||||
// "[" already consumed and lookahead known not to be "]"
|
||||
lit := p.expect(scanner.Int)
|
||||
p.expect(']')
|
||||
elem := p.parseType()
|
||||
n, err := strconv.ParseInt(lit, 10, 64)
|
||||
if err != nil {
|
||||
p.error(err)
|
||||
}
|
||||
return types.NewArray(elem, n)
|
||||
}
|
||||
|
||||
// MapType = "map" "[" Type "]" Type .
|
||||
//
|
||||
func (p *parser) parseMapType() types.Type {
|
||||
p.expectKeyword("map")
|
||||
p.expect('[')
|
||||
key := p.parseType()
|
||||
p.expect(']')
|
||||
elem := p.parseType()
|
||||
return types.NewMap(key, elem)
|
||||
}
|
||||
|
||||
// Name = identifier | "?" | QualifiedName .
|
||||
//
|
||||
// If materializePkg is set, the returned package is guaranteed to be set.
|
||||
// For fully qualified names, the returned package may be a fake package
|
||||
// (without name, scope, and not in the p.imports map), created for the
|
||||
// sole purpose of providing a package path. Fake packages are created
|
||||
// when the package id is not found in the p.imports map; in that case
|
||||
// we cannot create a real package because we don't have a package name.
|
||||
// For non-qualified names, the returned package is the imported package.
|
||||
//
|
||||
func (p *parser) parseName(materializePkg bool) (pkg *types.Package, name string) {
|
||||
switch p.tok {
|
||||
case scanner.Ident:
|
||||
pkg = p.sharedPkgs[p.id]
|
||||
name = p.lit
|
||||
p.next()
|
||||
case '?':
|
||||
// anonymous
|
||||
pkg = p.sharedPkgs[p.id]
|
||||
p.next()
|
||||
case '@':
|
||||
// exported name prefixed with package path
|
||||
var id string
|
||||
id, name = p.parseQualifiedName()
|
||||
if materializePkg {
|
||||
// we don't have a package name - if the package
|
||||
// doesn't exist yet, create a fake package instead
|
||||
pkg = p.getPkg(id, "")
|
||||
if pkg == nil {
|
||||
pkg = types.NewPackage(id, "")
|
||||
}
|
||||
}
|
||||
default:
|
||||
p.error("name expected")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func deref(typ types.Type) types.Type {
|
||||
if p, _ := typ.(*types.Pointer); p != nil {
|
||||
return p.Elem()
|
||||
}
|
||||
return typ
|
||||
}
|
||||
|
||||
// Field = Name Type [ string_lit ] .
|
||||
//
|
||||
func (p *parser) parseField() (*types.Var, string) {
|
||||
pkg, name := p.parseName(true)
|
||||
typ := p.parseType()
|
||||
anonymous := false
|
||||
if name == "" {
|
||||
// anonymous field - typ must be T or *T and T must be a type name
|
||||
switch typ := deref(typ).(type) {
|
||||
case *types.Basic: // basic types are named types
|
||||
pkg = nil
|
||||
name = typ.Name()
|
||||
case *types.Named:
|
||||
name = typ.Obj().Name()
|
||||
default:
|
||||
p.errorf("anonymous field expected")
|
||||
}
|
||||
anonymous = true
|
||||
}
|
||||
tag := ""
|
||||
if p.tok == scanner.String {
|
||||
s := p.expect(scanner.String)
|
||||
var err error
|
||||
tag, err = strconv.Unquote(s)
|
||||
if err != nil {
|
||||
p.errorf("invalid struct tag %s: %s", s, err)
|
||||
}
|
||||
}
|
||||
return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag
|
||||
}
|
||||
|
||||
// StructType = "struct" "{" [ FieldList ] "}" .
|
||||
// FieldList = Field { ";" Field } .
|
||||
//
|
||||
func (p *parser) parseStructType() types.Type {
|
||||
var fields []*types.Var
|
||||
var tags []string
|
||||
|
||||
p.expectKeyword("struct")
|
||||
p.expect('{')
|
||||
for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
|
||||
if i > 0 {
|
||||
p.expect(';')
|
||||
}
|
||||
fld, tag := p.parseField()
|
||||
if tag != "" && tags == nil {
|
||||
tags = make([]string, i)
|
||||
}
|
||||
if tags != nil {
|
||||
tags = append(tags, tag)
|
||||
}
|
||||
fields = append(fields, fld)
|
||||
}
|
||||
p.expect('}')
|
||||
|
||||
return types.NewStruct(fields, tags)
|
||||
}
|
||||
|
||||
// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
|
||||
//
|
||||
func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
|
||||
_, name := p.parseName(false)
|
||||
// remove gc-specific parameter numbering
|
||||
if i := strings.Index(name, "·"); i >= 0 {
|
||||
name = name[:i]
|
||||
}
|
||||
if p.tok == '.' {
|
||||
p.expectSpecial("...")
|
||||
isVariadic = true
|
||||
}
|
||||
typ := p.parseType()
|
||||
if isVariadic {
|
||||
typ = types.NewSlice(typ)
|
||||
}
|
||||
// ignore argument tag (e.g. "noescape")
|
||||
if p.tok == scanner.String {
|
||||
p.next()
|
||||
}
|
||||
// TODO(gri) should we provide a package?
|
||||
par = types.NewVar(token.NoPos, nil, name, typ)
|
||||
return
|
||||
}
|
||||
|
||||
// Parameters = "(" [ ParameterList ] ")" .
|
||||
// ParameterList = { Parameter "," } Parameter .
|
||||
//
|
||||
func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
|
||||
p.expect('(')
|
||||
for p.tok != ')' && p.tok != scanner.EOF {
|
||||
if len(list) > 0 {
|
||||
p.expect(',')
|
||||
}
|
||||
par, variadic := p.parseParameter()
|
||||
list = append(list, par)
|
||||
if variadic {
|
||||
if isVariadic {
|
||||
p.error("... not on final argument")
|
||||
}
|
||||
isVariadic = true
|
||||
}
|
||||
}
|
||||
p.expect(')')
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Signature = Parameters [ Result ] .
|
||||
// Result = Type | Parameters .
|
||||
//
|
||||
func (p *parser) parseSignature(recv *types.Var) *types.Signature {
|
||||
params, isVariadic := p.parseParameters()
|
||||
|
||||
// optional result type
|
||||
var results []*types.Var
|
||||
if p.tok == '(' {
|
||||
var variadic bool
|
||||
results, variadic = p.parseParameters()
|
||||
if variadic {
|
||||
p.error("... not permitted on result type")
|
||||
}
|
||||
}
|
||||
|
||||
return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic)
|
||||
}
|
||||
|
||||
// InterfaceType = "interface" "{" [ MethodList ] "}" .
|
||||
// MethodList = Method { ";" Method } .
|
||||
// Method = Name Signature .
|
||||
//
|
||||
// The methods of embedded interfaces are always "inlined"
|
||||
// by the compiler and thus embedded interfaces are never
|
||||
// visible in the export data.
|
||||
//
|
||||
func (p *parser) parseInterfaceType() types.Type {
|
||||
var methods []*types.Func
|
||||
|
||||
p.expectKeyword("interface")
|
||||
p.expect('{')
|
||||
for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
|
||||
if i > 0 {
|
||||
p.expect(';')
|
||||
}
|
||||
pkg, name := p.parseName(true)
|
||||
sig := p.parseSignature(nil)
|
||||
methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig))
|
||||
}
|
||||
p.expect('}')
|
||||
|
||||
// Complete requires the type's embedded interfaces to be fully defined,
|
||||
// but we do not define any
|
||||
return types.NewInterface(methods, nil).Complete()
|
||||
}
|
||||
|
||||
// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
|
||||
//
|
||||
func (p *parser) parseChanType() types.Type {
|
||||
dir := types.SendRecv
|
||||
if p.tok == scanner.Ident {
|
||||
p.expectKeyword("chan")
|
||||
if p.tok == '<' {
|
||||
p.expectSpecial("<-")
|
||||
dir = types.SendOnly
|
||||
}
|
||||
} else {
|
||||
p.expectSpecial("<-")
|
||||
p.expectKeyword("chan")
|
||||
dir = types.RecvOnly
|
||||
}
|
||||
elem := p.parseType()
|
||||
return types.NewChan(dir, elem)
|
||||
}
|
||||
|
||||
// Type =
|
||||
// BasicType | TypeName | ArrayType | SliceType | StructType |
|
||||
// PointerType | FuncType | InterfaceType | MapType | ChanType |
|
||||
// "(" Type ")" .
|
||||
//
|
||||
// BasicType = ident .
|
||||
// TypeName = ExportedName .
|
||||
// SliceType = "[" "]" Type .
|
||||
// PointerType = "*" Type .
|
||||
// FuncType = "func" Signature .
|
||||
//
|
||||
func (p *parser) parseType() types.Type {
|
||||
switch p.tok {
|
||||
case scanner.Ident:
|
||||
switch p.lit {
|
||||
default:
|
||||
return p.parseBasicType()
|
||||
case "struct":
|
||||
return p.parseStructType()
|
||||
case "func":
|
||||
// FuncType
|
||||
p.next()
|
||||
return p.parseSignature(nil)
|
||||
case "interface":
|
||||
return p.parseInterfaceType()
|
||||
case "map":
|
||||
return p.parseMapType()
|
||||
case "chan":
|
||||
return p.parseChanType()
|
||||
}
|
||||
case '@':
|
||||
// TypeName
|
||||
pkg, name := p.parseExportedName()
|
||||
return declTypeName(pkg, name).Type()
|
||||
case '[':
|
||||
p.next() // look ahead
|
||||
if p.tok == ']' {
|
||||
// SliceType
|
||||
p.next()
|
||||
return types.NewSlice(p.parseType())
|
||||
}
|
||||
return p.parseArrayType()
|
||||
case '*':
|
||||
// PointerType
|
||||
p.next()
|
||||
return types.NewPointer(p.parseType())
|
||||
case '<':
|
||||
return p.parseChanType()
|
||||
case '(':
|
||||
// "(" Type ")"
|
||||
p.next()
|
||||
typ := p.parseType()
|
||||
p.expect(')')
|
||||
return typ
|
||||
}
|
||||
p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit)
|
||||
return nil
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Declarations
|
||||
|
||||
// ImportDecl = "import" PackageName PackageId .
|
||||
//
|
||||
func (p *parser) parseImportDecl() {
|
||||
p.expectKeyword("import")
|
||||
name := p.parsePackageName()
|
||||
p.getPkg(p.parsePackageId(), name)
|
||||
}
|
||||
|
||||
// int_lit = [ "+" | "-" ] { "0" ... "9" } .
|
||||
//
|
||||
func (p *parser) parseInt() string {
|
||||
s := ""
|
||||
switch p.tok {
|
||||
case '-':
|
||||
s = "-"
|
||||
p.next()
|
||||
case '+':
|
||||
p.next()
|
||||
}
|
||||
return s + p.expect(scanner.Int)
|
||||
}
|
||||
|
||||
// number = int_lit [ "p" int_lit ] .
|
||||
//
|
||||
func (p *parser) parseNumber() (typ *types.Basic, val exact.Value) {
|
||||
// mantissa
|
||||
mant := exact.MakeFromLiteral(p.parseInt(), token.INT)
|
||||
if mant == nil {
|
||||
panic("invalid mantissa")
|
||||
}
|
||||
|
||||
if p.lit == "p" {
|
||||
// exponent (base 2)
|
||||
p.next()
|
||||
exp, err := strconv.ParseInt(p.parseInt(), 10, 0)
|
||||
if err != nil {
|
||||
p.error(err)
|
||||
}
|
||||
if exp < 0 {
|
||||
denom := exact.MakeInt64(1)
|
||||
denom = exact.Shift(denom, token.SHL, uint(-exp))
|
||||
typ = types.Typ[types.UntypedFloat]
|
||||
val = exact.BinaryOp(mant, token.QUO, denom)
|
||||
return
|
||||
}
|
||||
if exp > 0 {
|
||||
mant = exact.Shift(mant, token.SHL, uint(exp))
|
||||
}
|
||||
typ = types.Typ[types.UntypedFloat]
|
||||
val = mant
|
||||
return
|
||||
}
|
||||
|
||||
typ = types.Typ[types.UntypedInt]
|
||||
val = mant
|
||||
return
|
||||
}
|
||||
|
||||
// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
|
||||
// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
|
||||
// bool_lit = "true" | "false" .
|
||||
// complex_lit = "(" float_lit "+" float_lit "i" ")" .
|
||||
// rune_lit = "(" int_lit "+" int_lit ")" .
|
||||
// string_lit = `"` { unicode_char } `"` .
|
||||
//
|
||||
func (p *parser) parseConstDecl() {
|
||||
p.expectKeyword("const")
|
||||
pkg, name := p.parseExportedName()
|
||||
|
||||
var typ0 types.Type
|
||||
if p.tok != '=' {
|
||||
typ0 = p.parseType()
|
||||
}
|
||||
|
||||
p.expect('=')
|
||||
var typ types.Type
|
||||
var val exact.Value
|
||||
switch p.tok {
|
||||
case scanner.Ident:
|
||||
// bool_lit
|
||||
if p.lit != "true" && p.lit != "false" {
|
||||
p.error("expected true or false")
|
||||
}
|
||||
typ = types.Typ[types.UntypedBool]
|
||||
val = exact.MakeBool(p.lit == "true")
|
||||
p.next()
|
||||
|
||||
case '-', scanner.Int:
|
||||
// int_lit
|
||||
typ, val = p.parseNumber()
|
||||
|
||||
case '(':
|
||||
// complex_lit or rune_lit
|
||||
p.next()
|
||||
if p.tok == scanner.Char {
|
||||
p.next()
|
||||
p.expect('+')
|
||||
typ = types.Typ[types.UntypedRune]
|
||||
_, val = p.parseNumber()
|
||||
p.expect(')')
|
||||
break
|
||||
}
|
||||
_, re := p.parseNumber()
|
||||
p.expect('+')
|
||||
_, im := p.parseNumber()
|
||||
p.expectKeyword("i")
|
||||
p.expect(')')
|
||||
typ = types.Typ[types.UntypedComplex]
|
||||
val = exact.BinaryOp(re, token.ADD, exact.MakeImag(im))
|
||||
|
||||
case scanner.Char:
|
||||
// rune_lit
|
||||
typ = types.Typ[types.UntypedRune]
|
||||
val = exact.MakeFromLiteral(p.lit, token.CHAR)
|
||||
p.next()
|
||||
|
||||
case scanner.String:
|
||||
// string_lit
|
||||
typ = types.Typ[types.UntypedString]
|
||||
val = exact.MakeFromLiteral(p.lit, token.STRING)
|
||||
p.next()
|
||||
|
||||
default:
|
||||
p.errorf("expected literal got %s", scanner.TokenString(p.tok))
|
||||
}
|
||||
|
||||
if typ0 == nil {
|
||||
typ0 = typ
|
||||
}
|
||||
|
||||
pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val))
|
||||
}
|
||||
|
||||
// TypeDecl = "type" ExportedName Type .
|
||||
//
|
||||
func (p *parser) parseTypeDecl() {
|
||||
p.expectKeyword("type")
|
||||
pkg, name := p.parseExportedName()
|
||||
obj := declTypeName(pkg, name)
|
||||
|
||||
// The type object may have been imported before and thus already
|
||||
// have a type associated with it. We still need to parse the type
|
||||
// structure, but throw it away if the object already has a type.
|
||||
// This ensures that all imports refer to the same type object for
|
||||
// a given type declaration.
|
||||
typ := p.parseType()
|
||||
|
||||
if name := obj.Type().(*types.Named); name.Underlying() == nil {
|
||||
name.SetUnderlying(typ)
|
||||
}
|
||||
}
|
||||
|
||||
// VarDecl = "var" ExportedName Type .
|
||||
//
|
||||
func (p *parser) parseVarDecl() {
|
||||
p.expectKeyword("var")
|
||||
pkg, name := p.parseExportedName()
|
||||
typ := p.parseType()
|
||||
pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ))
|
||||
}
|
||||
|
||||
// Func = Signature [ Body ] .
|
||||
// Body = "{" ... "}" .
|
||||
//
|
||||
func (p *parser) parseFunc(recv *types.Var) *types.Signature {
|
||||
sig := p.parseSignature(recv)
|
||||
if p.tok == '{' {
|
||||
p.next()
|
||||
for i := 1; i > 0; p.next() {
|
||||
switch p.tok {
|
||||
case '{':
|
||||
i++
|
||||
case '}':
|
||||
i--
|
||||
}
|
||||
}
|
||||
}
|
||||
return sig
|
||||
}
|
||||
|
||||
// MethodDecl = "func" Receiver Name Func .
|
||||
// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
|
||||
//
|
||||
func (p *parser) parseMethodDecl() {
|
||||
// "func" already consumed
|
||||
p.expect('(')
|
||||
recv, _ := p.parseParameter() // receiver
|
||||
p.expect(')')
|
||||
|
||||
// determine receiver base type object
|
||||
base := deref(recv.Type()).(*types.Named)
|
||||
|
||||
// parse method name, signature, and possibly inlined body
|
||||
_, name := p.parseName(true)
|
||||
sig := p.parseFunc(recv)
|
||||
|
||||
// methods always belong to the same package as the base type object
|
||||
pkg := base.Obj().Pkg()
|
||||
|
||||
// add method to type unless type was imported before
|
||||
// and method exists already
|
||||
// TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small.
|
||||
base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
|
||||
}
|
||||
|
||||
// FuncDecl = "func" ExportedName Func .
|
||||
//
|
||||
func (p *parser) parseFuncDecl() {
|
||||
// "func" already consumed
|
||||
pkg, name := p.parseExportedName()
|
||||
typ := p.parseFunc(nil)
|
||||
pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ))
|
||||
}
|
||||
|
||||
// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
|
||||
//
|
||||
func (p *parser) parseDecl() {
|
||||
if p.tok == scanner.Ident {
|
||||
switch p.lit {
|
||||
case "import":
|
||||
p.parseImportDecl()
|
||||
case "const":
|
||||
p.parseConstDecl()
|
||||
case "type":
|
||||
p.parseTypeDecl()
|
||||
case "var":
|
||||
p.parseVarDecl()
|
||||
case "func":
|
||||
p.next() // look ahead
|
||||
if p.tok == '(' {
|
||||
p.parseMethodDecl()
|
||||
} else {
|
||||
p.parseFuncDecl()
|
||||
}
|
||||
}
|
||||
}
|
||||
p.expect('\n')
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Export
|
||||
|
||||
// Export = "PackageClause { Decl } "$$" .
|
||||
// PackageClause = "package" PackageName [ "safe" ] "\n" .
|
||||
//
|
||||
func (p *parser) parseExport() *types.Package {
|
||||
p.expectKeyword("package")
|
||||
name := p.parsePackageName()
|
||||
if p.tok == scanner.Ident && p.lit == "safe" {
|
||||
// package was compiled with -u option - ignore
|
||||
p.next()
|
||||
}
|
||||
p.expect('\n')
|
||||
|
||||
pkg := p.getPkg(p.id, name)
|
||||
|
||||
for p.tok != '$' && p.tok != scanner.EOF {
|
||||
p.parseDecl()
|
||||
}
|
||||
|
||||
if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' {
|
||||
// don't call next()/expect() since reading past the
|
||||
// export data may cause scanner errors (e.g. NUL chars)
|
||||
p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch)
|
||||
}
|
||||
|
||||
if n := p.scanner.ErrorCount; n != 0 {
|
||||
p.errorf("expected no scanner errors, got %d", n)
|
||||
}
|
||||
|
||||
// Record all referenced packages as imports.
|
||||
var imports []*types.Package
|
||||
for id, pkg2 := range p.localPkgs {
|
||||
if id == p.id {
|
||||
continue // avoid self-edge
|
||||
}
|
||||
imports = append(imports, pkg2)
|
||||
}
|
||||
sort.Sort(byPath(imports))
|
||||
pkg.SetImports(imports)
|
||||
|
||||
// package was imported completely and without errors
|
||||
pkg.MarkComplete()
|
||||
|
||||
return pkg
|
||||
}
|
||||
|
||||
type byPath []*types.Package
|
||||
|
||||
func (a byPath) Len() int { return len(a) }
|
||||
func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() }
|
@ -1,242 +0,0 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package gcimporter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/build"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
// skipSpecialPlatforms causes the test to be skipped for platforms where
|
||||
// builders (build.golang.org) don't have access to compiled packages for
|
||||
// import.
|
||||
func skipSpecialPlatforms(t *testing.T) {
|
||||
switch platform := runtime.GOOS + "-" + runtime.GOARCH; platform {
|
||||
case "nacl-amd64p32",
|
||||
"nacl-386",
|
||||
"nacl-arm",
|
||||
"darwin-arm",
|
||||
"darwin-arm64":
|
||||
t.Skipf("no compiled packages available for import on %s", platform)
|
||||
}
|
||||
}
|
||||
|
||||
var gcPath string // Go compiler path
|
||||
|
||||
func init() {
|
||||
if char, err := build.ArchChar(runtime.GOARCH); err == nil {
|
||||
gcPath = filepath.Join(build.ToolDir, char+"g")
|
||||
return
|
||||
}
|
||||
gcPath = "unknown-GOARCH-compiler"
|
||||
}
|
||||
|
||||
func compile(t *testing.T, dirname, filename string) string {
|
||||
cmd := exec.Command(gcPath, filename)
|
||||
cmd.Dir = dirname
|
||||
out, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
t.Logf("%s", out)
|
||||
t.Fatalf("%s %s failed: %s", gcPath, filename, err)
|
||||
}
|
||||
archCh, _ := build.ArchChar(runtime.GOARCH)
|
||||
// filename should end with ".go"
|
||||
return filepath.Join(dirname, filename[:len(filename)-2]+archCh)
|
||||
}
|
||||
|
||||
// Use the same global imports map for all tests. The effect is
|
||||
// as if all tested packages were imported into a single package.
|
||||
var imports = make(map[string]*types.Package)
|
||||
|
||||
func testPath(t *testing.T, path string) *types.Package {
|
||||
t0 := time.Now()
|
||||
pkg, err := Import(imports, path)
|
||||
if err != nil {
|
||||
t.Errorf("testPath(%s): %s", path, err)
|
||||
return nil
|
||||
}
|
||||
t.Logf("testPath(%s): %v", path, time.Since(t0))
|
||||
return pkg
|
||||
}
|
||||
|
||||
const maxTime = 30 * time.Second
|
||||
|
||||
func testDir(t *testing.T, dir string, endTime time.Time) (nimports int) {
|
||||
dirname := filepath.Join(runtime.GOROOT(), "pkg", runtime.GOOS+"_"+runtime.GOARCH, dir)
|
||||
list, err := ioutil.ReadDir(dirname)
|
||||
if err != nil {
|
||||
t.Fatalf("testDir(%s): %s", dirname, err)
|
||||
}
|
||||
for _, f := range list {
|
||||
if time.Now().After(endTime) {
|
||||
t.Log("testing time used up")
|
||||
return
|
||||
}
|
||||
switch {
|
||||
case !f.IsDir():
|
||||
// try extensions
|
||||
for _, ext := range pkgExts {
|
||||
if strings.HasSuffix(f.Name(), ext) {
|
||||
name := f.Name()[0 : len(f.Name())-len(ext)] // remove extension
|
||||
if testPath(t, filepath.Join(dir, name)) != nil {
|
||||
nimports++
|
||||
}
|
||||
}
|
||||
}
|
||||
case f.IsDir():
|
||||
nimports += testDir(t, filepath.Join(dir, f.Name()), endTime)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func TestImport(t *testing.T) {
|
||||
// This package only handles gc export data.
|
||||
if runtime.Compiler != "gc" {
|
||||
t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler)
|
||||
return
|
||||
}
|
||||
|
||||
// On cross-compile builds, the path will not exist.
|
||||
// Need to use GOHOSTOS, which is not available.
|
||||
if _, err := os.Stat(gcPath); err != nil {
|
||||
t.Skipf("skipping test: %v", err)
|
||||
}
|
||||
|
||||
if outFn := compile(t, "testdata", "exports.go"); outFn != "" {
|
||||
defer os.Remove(outFn)
|
||||
}
|
||||
|
||||
nimports := 0
|
||||
if pkg := testPath(t, "./testdata/exports"); pkg != nil {
|
||||
nimports++
|
||||
// The package's Imports should include all the types
|
||||
// referenced by the exportdata, which may be more than
|
||||
// the import statements in the package's source, but
|
||||
// fewer than the transitive closure of dependencies.
|
||||
want := `[package ast ("go/ast") package token ("go/token") package runtime ("runtime")]`
|
||||
got := fmt.Sprint(pkg.Imports())
|
||||
if got != want {
|
||||
t.Errorf(`Package("exports").Imports() = %s, want %s`, got, want)
|
||||
}
|
||||
}
|
||||
nimports += testDir(t, "", time.Now().Add(maxTime)) // installed packages
|
||||
t.Logf("tested %d imports", nimports)
|
||||
}
|
||||
|
||||
var importedObjectTests = []struct {
|
||||
name string
|
||||
want string
|
||||
}{
|
||||
{"unsafe.Pointer", "type Pointer unsafe.Pointer"},
|
||||
{"math.Pi", "const Pi untyped float"},
|
||||
{"io.Reader", "type Reader interface{Read(p []byte) (n int, err error)}"},
|
||||
{"io.ReadWriter", "type ReadWriter interface{Read(p []byte) (n int, err error); Write(p []byte) (n int, err error)}"},
|
||||
{"math.Sin", "func Sin(x float64) float64"},
|
||||
// TODO(gri) add more tests
|
||||
}
|
||||
|
||||
func TestImportedTypes(t *testing.T) {
|
||||
skipSpecialPlatforms(t)
|
||||
|
||||
// This package only handles gc export data.
|
||||
if runtime.Compiler != "gc" {
|
||||
t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler)
|
||||
return
|
||||
}
|
||||
|
||||
for _, test := range importedObjectTests {
|
||||
s := strings.Split(test.name, ".")
|
||||
if len(s) != 2 {
|
||||
t.Fatal("inconsistent test data")
|
||||
}
|
||||
importPath := s[0]
|
||||
objName := s[1]
|
||||
|
||||
pkg, err := Import(imports, importPath)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
continue
|
||||
}
|
||||
|
||||
obj := pkg.Scope().Lookup(objName)
|
||||
if obj == nil {
|
||||
t.Errorf("%s: object not found", test.name)
|
||||
continue
|
||||
}
|
||||
|
||||
got := types.ObjectString(obj, types.RelativeTo(pkg))
|
||||
if got != test.want {
|
||||
t.Errorf("%s: got %q; want %q", test.name, got, test.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIssue5815(t *testing.T) {
|
||||
skipSpecialPlatforms(t)
|
||||
|
||||
// This package only handles gc export data.
|
||||
if runtime.Compiler != "gc" {
|
||||
t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler)
|
||||
return
|
||||
}
|
||||
|
||||
pkg, err := Import(make(map[string]*types.Package), "strings")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
scope := pkg.Scope()
|
||||
for _, name := range scope.Names() {
|
||||
obj := scope.Lookup(name)
|
||||
if obj.Pkg() == nil {
|
||||
t.Errorf("no pkg for %s", obj)
|
||||
}
|
||||
if tname, _ := obj.(*types.TypeName); tname != nil {
|
||||
named := tname.Type().(*types.Named)
|
||||
for i := 0; i < named.NumMethods(); i++ {
|
||||
m := named.Method(i)
|
||||
if m.Pkg() == nil {
|
||||
t.Errorf("no pkg for %s", m)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Smoke test to ensure that imported methods get the correct package.
|
||||
func TestCorrectMethodPackage(t *testing.T) {
|
||||
skipSpecialPlatforms(t)
|
||||
|
||||
// This package only handles gc export data.
|
||||
if runtime.Compiler != "gc" {
|
||||
t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler)
|
||||
return
|
||||
}
|
||||
|
||||
imports := make(map[string]*types.Package)
|
||||
_, err := Import(imports, "net/http")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
mutex := imports["sync"].Scope().Lookup("Mutex").(*types.TypeName).Type()
|
||||
mset := types.NewMethodSet(types.NewPointer(mutex)) // methods of *sync.Mutex
|
||||
sel := mset.Lookup(nil, "Lock")
|
||||
lock := sel.Obj().(*types.Func)
|
||||
if got, want := lock.Pkg().Path(), "sync"; got != want {
|
||||
t.Errorf("got package path %q; want %q", got, want)
|
||||
}
|
||||
}
|
89
go/gcimporter/testdata/exports.go
vendored
89
go/gcimporter/testdata/exports.go
vendored
@ -1,89 +0,0 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file is used to generate an object file which
|
||||
// serves as test file for gcimporter_test.go.
|
||||
|
||||
package exports
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
)
|
||||
|
||||
// Issue 3682: Correctly read dotted identifiers from export data.
|
||||
const init1 = 0
|
||||
|
||||
func init() {}
|
||||
|
||||
const (
|
||||
C0 int = 0
|
||||
C1 = 3.14159265
|
||||
C2 = 2.718281828i
|
||||
C3 = -123.456e-789
|
||||
C4 = +123.456E+789
|
||||
C5 = 1234i
|
||||
C6 = "foo\n"
|
||||
C7 = `bar\n`
|
||||
)
|
||||
|
||||
type (
|
||||
T1 int
|
||||
T2 [10]int
|
||||
T3 []int
|
||||
T4 *int
|
||||
T5 chan int
|
||||
T6a chan<- int
|
||||
T6b chan (<-chan int)
|
||||
T6c chan<- (chan int)
|
||||
T7 <-chan *ast.File
|
||||
T8 struct{}
|
||||
T9 struct {
|
||||
a int
|
||||
b, c float32
|
||||
d []string `go:"tag"`
|
||||
}
|
||||
T10 struct {
|
||||
T8
|
||||
T9
|
||||
_ *T10
|
||||
}
|
||||
T11 map[int]string
|
||||
T12 interface{}
|
||||
T13 interface {
|
||||
m1()
|
||||
m2(int) float32
|
||||
}
|
||||
T14 interface {
|
||||
T12
|
||||
T13
|
||||
m3(x ...struct{}) []T9
|
||||
}
|
||||
T15 func()
|
||||
T16 func(int)
|
||||
T17 func(x int)
|
||||
T18 func() float32
|
||||
T19 func() (x float32)
|
||||
T20 func(...interface{})
|
||||
T21 struct{ next *T21 }
|
||||
T22 struct{ link *T23 }
|
||||
T23 struct{ link *T22 }
|
||||
T24 *T24
|
||||
T25 *T26
|
||||
T26 *T27
|
||||
T27 *T25
|
||||
T28 func(T28) T28
|
||||
)
|
||||
|
||||
var (
|
||||
V0 int
|
||||
V1 = -991.0
|
||||
)
|
||||
|
||||
func F1() {}
|
||||
func F2(x int) {}
|
||||
func F3() int { return 0 }
|
||||
func F4() float32 { return 0 }
|
||||
func F5(a, b, c int, u, v, w struct{ x, y T1 }, more ...interface{}) (p, q, r chan<- T10)
|
||||
|
||||
func (p *T1) M1()
|
@ -1,462 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package importer
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/exact"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
// debugging support
|
||||
const (
|
||||
debug = false // emit debugging data
|
||||
trace = false // print emitted data
|
||||
)
|
||||
|
||||
// format returns a byte indicating the low-level encoding/decoding format
|
||||
// (debug vs product).
|
||||
func format() byte {
|
||||
if debug {
|
||||
return 'd'
|
||||
}
|
||||
return 'p'
|
||||
}
|
||||
|
||||
// ExportData serializes the interface (exported package objects)
|
||||
// of package pkg and returns the corresponding data. The export
|
||||
// format is described elsewhere (TODO).
|
||||
func ExportData(pkg *types.Package) []byte {
|
||||
p := exporter{
|
||||
data: append([]byte(magic), format()),
|
||||
pkgIndex: make(map[*types.Package]int),
|
||||
typIndex: make(map[types.Type]int),
|
||||
}
|
||||
|
||||
// populate typIndex with predeclared types
|
||||
for _, t := range predeclared {
|
||||
p.typIndex[t] = len(p.typIndex)
|
||||
}
|
||||
|
||||
if trace {
|
||||
p.tracef("export %s\n", pkg.Name())
|
||||
defer p.tracef("\n")
|
||||
}
|
||||
|
||||
p.string(version)
|
||||
|
||||
p.pkg(pkg)
|
||||
|
||||
// collect exported objects from package scope
|
||||
var list []types.Object
|
||||
scope := pkg.Scope()
|
||||
for _, name := range scope.Names() {
|
||||
if exported(name) {
|
||||
list = append(list, scope.Lookup(name))
|
||||
}
|
||||
}
|
||||
|
||||
// write objects
|
||||
p.int(len(list))
|
||||
for _, obj := range list {
|
||||
p.obj(obj)
|
||||
}
|
||||
|
||||
return p.data
|
||||
}
|
||||
|
||||
type exporter struct {
|
||||
data []byte
|
||||
pkgIndex map[*types.Package]int
|
||||
typIndex map[types.Type]int
|
||||
|
||||
// tracing support
|
||||
indent string
|
||||
}
|
||||
|
||||
func (p *exporter) pkg(pkg *types.Package) {
|
||||
if trace {
|
||||
p.tracef("package { ")
|
||||
defer p.tracef("} ")
|
||||
}
|
||||
|
||||
if pkg == nil {
|
||||
panic("unexpected nil pkg")
|
||||
}
|
||||
|
||||
// if the package was seen before, write its index (>= 0)
|
||||
if i, ok := p.pkgIndex[pkg]; ok {
|
||||
p.int(i)
|
||||
return
|
||||
}
|
||||
p.pkgIndex[pkg] = len(p.pkgIndex)
|
||||
|
||||
// otherwise, write the package tag (< 0) and package data
|
||||
p.int(packageTag)
|
||||
p.string(pkg.Name())
|
||||
p.string(pkg.Path())
|
||||
}
|
||||
|
||||
func (p *exporter) obj(obj types.Object) {
|
||||
if trace {
|
||||
p.tracef("object %s {\n", obj.Name())
|
||||
defer p.tracef("}\n")
|
||||
}
|
||||
|
||||
switch obj := obj.(type) {
|
||||
case *types.Const:
|
||||
p.int(constTag)
|
||||
p.string(obj.Name())
|
||||
p.typ(obj.Type())
|
||||
p.value(obj.Val())
|
||||
case *types.TypeName:
|
||||
p.int(typeTag)
|
||||
// name is written by corresponding named type
|
||||
p.typ(obj.Type().(*types.Named))
|
||||
case *types.Var:
|
||||
p.int(varTag)
|
||||
p.string(obj.Name())
|
||||
p.typ(obj.Type())
|
||||
case *types.Func:
|
||||
p.int(funcTag)
|
||||
p.string(obj.Name())
|
||||
p.typ(obj.Type())
|
||||
default:
|
||||
panic(fmt.Sprintf("unexpected object type %T", obj))
|
||||
}
|
||||
}
|
||||
|
||||
func (p *exporter) value(x exact.Value) {
|
||||
if trace {
|
||||
p.tracef("value { ")
|
||||
defer p.tracef("} ")
|
||||
}
|
||||
|
||||
switch kind := x.Kind(); kind {
|
||||
case exact.Bool:
|
||||
tag := falseTag
|
||||
if exact.BoolVal(x) {
|
||||
tag = trueTag
|
||||
}
|
||||
p.int(tag)
|
||||
case exact.Int:
|
||||
if i, ok := exact.Int64Val(x); ok {
|
||||
p.int(int64Tag)
|
||||
p.int64(i)
|
||||
return
|
||||
}
|
||||
p.int(floatTag)
|
||||
p.float(x)
|
||||
case exact.Float:
|
||||
p.int(fractionTag)
|
||||
p.fraction(x)
|
||||
case exact.Complex:
|
||||
p.int(complexTag)
|
||||
p.fraction(exact.Real(x))
|
||||
p.fraction(exact.Imag(x))
|
||||
case exact.String:
|
||||
p.int(stringTag)
|
||||
p.string(exact.StringVal(x))
|
||||
default:
|
||||
panic(fmt.Sprintf("unexpected value kind %d", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func (p *exporter) float(x exact.Value) {
|
||||
sign := exact.Sign(x)
|
||||
p.int(sign)
|
||||
if sign == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
p.ufloat(x)
|
||||
}
|
||||
|
||||
func (p *exporter) fraction(x exact.Value) {
|
||||
sign := exact.Sign(x)
|
||||
p.int(sign)
|
||||
if sign == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
p.ufloat(exact.Num(x))
|
||||
p.ufloat(exact.Denom(x))
|
||||
}
|
||||
|
||||
// ufloat writes abs(x) in form of a binary exponent
|
||||
// followed by its mantissa bytes; x must be != 0.
|
||||
func (p *exporter) ufloat(x exact.Value) {
|
||||
mant := exact.Bytes(x)
|
||||
exp8 := -1
|
||||
for i, b := range mant {
|
||||
if b != 0 {
|
||||
exp8 = i
|
||||
break
|
||||
}
|
||||
}
|
||||
if exp8 < 0 {
|
||||
panic(fmt.Sprintf("%s has no mantissa", x))
|
||||
}
|
||||
p.int(exp8 * 8)
|
||||
p.bytes(mant[exp8:])
|
||||
}
|
||||
|
||||
func (p *exporter) typ(typ types.Type) {
|
||||
if trace {
|
||||
p.tracef("type {\n")
|
||||
defer p.tracef("}\n")
|
||||
}
|
||||
|
||||
// if the type was seen before, write its index (>= 0)
|
||||
if i, ok := p.typIndex[typ]; ok {
|
||||
p.int(i)
|
||||
return
|
||||
}
|
||||
p.typIndex[typ] = len(p.typIndex)
|
||||
|
||||
// otherwise, write the type tag (< 0) and type data
|
||||
switch t := typ.(type) {
|
||||
case *types.Array:
|
||||
p.int(arrayTag)
|
||||
p.int64(t.Len())
|
||||
p.typ(t.Elem())
|
||||
|
||||
case *types.Slice:
|
||||
p.int(sliceTag)
|
||||
p.typ(t.Elem())
|
||||
|
||||
case *types.Struct:
|
||||
p.int(structTag)
|
||||
n := t.NumFields()
|
||||
p.int(n)
|
||||
for i := 0; i < n; i++ {
|
||||
p.field(t.Field(i))
|
||||
p.string(t.Tag(i))
|
||||
}
|
||||
|
||||
case *types.Pointer:
|
||||
p.int(pointerTag)
|
||||
p.typ(t.Elem())
|
||||
|
||||
case *types.Signature:
|
||||
p.int(signatureTag)
|
||||
p.signature(t)
|
||||
|
||||
case *types.Interface:
|
||||
p.int(interfaceTag)
|
||||
|
||||
// write embedded interfaces
|
||||
m := t.NumEmbeddeds()
|
||||
p.int(m)
|
||||
for i := 0; i < m; i++ {
|
||||
p.typ(t.Embedded(i))
|
||||
}
|
||||
|
||||
// write methods
|
||||
n := t.NumExplicitMethods()
|
||||
p.int(n)
|
||||
for i := 0; i < n; i++ {
|
||||
m := t.ExplicitMethod(i)
|
||||
p.qualifiedName(m.Pkg(), m.Name())
|
||||
p.typ(m.Type())
|
||||
}
|
||||
|
||||
case *types.Map:
|
||||
p.int(mapTag)
|
||||
p.typ(t.Key())
|
||||
p.typ(t.Elem())
|
||||
|
||||
case *types.Chan:
|
||||
p.int(chanTag)
|
||||
p.int(int(t.Dir()))
|
||||
p.typ(t.Elem())
|
||||
|
||||
case *types.Named:
|
||||
p.int(namedTag)
|
||||
|
||||
// write type object
|
||||
obj := t.Obj()
|
||||
p.string(obj.Name())
|
||||
p.pkg(obj.Pkg())
|
||||
|
||||
// write underlying type
|
||||
p.typ(t.Underlying())
|
||||
|
||||
// write associated methods
|
||||
n := t.NumMethods()
|
||||
p.int(n)
|
||||
for i := 0; i < n; i++ {
|
||||
m := t.Method(i)
|
||||
p.string(m.Name())
|
||||
p.typ(m.Type())
|
||||
}
|
||||
|
||||
default:
|
||||
panic("unreachable")
|
||||
}
|
||||
}
|
||||
|
||||
func (p *exporter) field(f *types.Var) {
|
||||
// anonymous fields have "" name
|
||||
name := ""
|
||||
if !f.Anonymous() {
|
||||
name = f.Name()
|
||||
}
|
||||
|
||||
// qualifiedName will always emit the field package for
|
||||
// anonymous fields because "" is not an exported name.
|
||||
p.qualifiedName(f.Pkg(), name)
|
||||
p.typ(f.Type())
|
||||
}
|
||||
|
||||
func (p *exporter) qualifiedName(pkg *types.Package, name string) {
|
||||
p.string(name)
|
||||
// exported names don't need package
|
||||
if !exported(name) {
|
||||
if pkg == nil {
|
||||
panic(fmt.Sprintf("nil package for unexported qualified name %s", name))
|
||||
}
|
||||
p.pkg(pkg)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *exporter) signature(sig *types.Signature) {
|
||||
// We need the receiver information (T vs *T)
|
||||
// for methods associated with named types.
|
||||
// We do not record interface receiver types in the
|
||||
// export data because 1) the importer can derive them
|
||||
// from the interface type and 2) they create cycles
|
||||
// in the type graph.
|
||||
if recv := sig.Recv(); recv != nil {
|
||||
if _, ok := recv.Type().Underlying().(*types.Interface); !ok {
|
||||
// 1-element tuple
|
||||
p.int(1)
|
||||
p.param(recv)
|
||||
} else {
|
||||
// 0-element tuple
|
||||
p.int(0)
|
||||
}
|
||||
} else {
|
||||
// 0-element tuple
|
||||
p.int(0)
|
||||
}
|
||||
p.tuple(sig.Params())
|
||||
p.tuple(sig.Results())
|
||||
if sig.Variadic() {
|
||||
p.int(1)
|
||||
} else {
|
||||
p.int(0)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *exporter) param(v *types.Var) {
|
||||
p.string(v.Name())
|
||||
p.typ(v.Type())
|
||||
}
|
||||
|
||||
func (p *exporter) tuple(t *types.Tuple) {
|
||||
n := t.Len()
|
||||
p.int(n)
|
||||
for i := 0; i < n; i++ {
|
||||
p.param(t.At(i))
|
||||
}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// encoders
|
||||
|
||||
func (p *exporter) string(s string) {
|
||||
p.bytes([]byte(s)) // (could be inlined if extra allocation matters)
|
||||
}
|
||||
|
||||
func (p *exporter) int(x int) {
|
||||
p.int64(int64(x))
|
||||
}
|
||||
|
||||
func (p *exporter) int64(x int64) {
|
||||
if debug {
|
||||
p.marker('i')
|
||||
}
|
||||
|
||||
if trace {
|
||||
p.tracef("%d ", x)
|
||||
}
|
||||
|
||||
p.rawInt64(x)
|
||||
}
|
||||
|
||||
func (p *exporter) bytes(b []byte) {
|
||||
if debug {
|
||||
p.marker('b')
|
||||
}
|
||||
|
||||
if trace {
|
||||
p.tracef("%q ", b)
|
||||
}
|
||||
|
||||
p.rawInt64(int64(len(b)))
|
||||
if len(b) > 0 {
|
||||
p.data = append(p.data, b...)
|
||||
}
|
||||
}
|
||||
|
||||
// marker emits a marker byte and position information which makes
|
||||
// it easy for a reader to detect if it is "out of sync". Used for
|
||||
// debug format only.
|
||||
func (p *exporter) marker(m byte) {
|
||||
if debug {
|
||||
p.data = append(p.data, m)
|
||||
p.rawInt64(int64(len(p.data)))
|
||||
}
|
||||
}
|
||||
|
||||
// rawInt64 should only be used by low-level encoders
|
||||
func (p *exporter) rawInt64(x int64) {
|
||||
var tmp [binary.MaxVarintLen64]byte
|
||||
n := binary.PutVarint(tmp[:], x)
|
||||
p.data = append(p.data, tmp[:n]...)
|
||||
}
|
||||
|
||||
// utility functions
|
||||
|
||||
func (p *exporter) tracef(format string, args ...interface{}) {
|
||||
// rewrite format string to take care of indentation
|
||||
const indent = ". "
|
||||
if strings.IndexAny(format, "{}\n") >= 0 {
|
||||
var buf bytes.Buffer
|
||||
for i := 0; i < len(format); i++ {
|
||||
// no need to deal with runes
|
||||
ch := format[i]
|
||||
switch ch {
|
||||
case '{':
|
||||
p.indent += indent
|
||||
case '}':
|
||||
p.indent = p.indent[:len(p.indent)-len(indent)]
|
||||
if i+1 < len(format) && format[i+1] == '\n' {
|
||||
buf.WriteByte('\n')
|
||||
buf.WriteString(p.indent)
|
||||
buf.WriteString("} ")
|
||||
i++
|
||||
continue
|
||||
}
|
||||
}
|
||||
buf.WriteByte(ch)
|
||||
if ch == '\n' {
|
||||
buf.WriteString(p.indent)
|
||||
}
|
||||
}
|
||||
format = buf.String()
|
||||
}
|
||||
fmt.Printf(format, args...)
|
||||
}
|
||||
|
||||
func exported(name string) bool {
|
||||
return ast.IsExported(name)
|
||||
}
|
@ -1,456 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This implementation is loosely based on the algorithm described
|
||||
// in: "On the linearization of graphs and writing symbol files",
|
||||
// by R. Griesemer, Technical Report 156, ETH Zürich, 1991.
|
||||
|
||||
// Package importer implements an exporter and importer for Go export data.
|
||||
package importer // import "golang.org/x/tools/go/importer"
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"go/token"
|
||||
|
||||
"golang.org/x/tools/go/exact"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
// ImportData imports a package from the serialized package data
|
||||
// and returns the number of bytes consumed and a reference to the package.
|
||||
// If data is obviously malformed, an error is returned but in
|
||||
// general it is not recommended to call ImportData on untrusted
|
||||
// data.
|
||||
func ImportData(imports map[string]*types.Package, data []byte) (int, *types.Package, error) {
|
||||
datalen := len(data)
|
||||
|
||||
// check magic string
|
||||
var s string
|
||||
if len(data) >= len(magic) {
|
||||
s = string(data[:len(magic)])
|
||||
data = data[len(magic):]
|
||||
}
|
||||
if s != magic {
|
||||
return 0, nil, fmt.Errorf("incorrect magic string: got %q; want %q", s, magic)
|
||||
}
|
||||
|
||||
// check low-level encoding format
|
||||
var m byte = 'm' // missing format
|
||||
if len(data) > 0 {
|
||||
m = data[0]
|
||||
data = data[1:]
|
||||
}
|
||||
if m != format() {
|
||||
return 0, nil, fmt.Errorf("incorrect low-level encoding format: got %c; want %c", m, format())
|
||||
}
|
||||
|
||||
p := importer{
|
||||
data: data,
|
||||
datalen: datalen,
|
||||
imports: imports,
|
||||
}
|
||||
|
||||
// populate typList with predeclared types
|
||||
for _, t := range predeclared {
|
||||
p.typList = append(p.typList, t)
|
||||
}
|
||||
|
||||
if v := p.string(); v != version {
|
||||
return 0, nil, fmt.Errorf("unknown version: got %s; want %s", v, version)
|
||||
}
|
||||
|
||||
pkg := p.pkg()
|
||||
if debug && p.pkgList[0] != pkg {
|
||||
panic("imported packaged not found in pkgList[0]")
|
||||
}
|
||||
|
||||
// read objects
|
||||
n := p.int()
|
||||
for i := 0; i < n; i++ {
|
||||
p.obj(pkg)
|
||||
}
|
||||
|
||||
// complete interfaces
|
||||
for _, typ := range p.typList {
|
||||
if it, ok := typ.(*types.Interface); ok {
|
||||
it.Complete()
|
||||
}
|
||||
}
|
||||
|
||||
// package was imported completely and without errors
|
||||
pkg.MarkComplete()
|
||||
|
||||
return p.consumed(), pkg, nil
|
||||
}
|
||||
|
||||
type importer struct {
|
||||
data []byte
|
||||
datalen int
|
||||
imports map[string]*types.Package
|
||||
pkgList []*types.Package
|
||||
typList []types.Type
|
||||
}
|
||||
|
||||
func (p *importer) pkg() *types.Package {
|
||||
// if the package was seen before, i is its index (>= 0)
|
||||
i := p.int()
|
||||
if i >= 0 {
|
||||
return p.pkgList[i]
|
||||
}
|
||||
|
||||
// otherwise, i is the package tag (< 0)
|
||||
if i != packageTag {
|
||||
panic(fmt.Sprintf("unexpected package tag %d", i))
|
||||
}
|
||||
|
||||
// read package data
|
||||
name := p.string()
|
||||
path := p.string()
|
||||
|
||||
// if the package was imported before, use that one; otherwise create a new one
|
||||
pkg := p.imports[path]
|
||||
if pkg == nil {
|
||||
pkg = types.NewPackage(path, name)
|
||||
p.imports[path] = pkg
|
||||
}
|
||||
p.pkgList = append(p.pkgList, pkg)
|
||||
|
||||
return pkg
|
||||
}
|
||||
|
||||
func (p *importer) obj(pkg *types.Package) {
|
||||
var obj types.Object
|
||||
switch tag := p.int(); tag {
|
||||
case constTag:
|
||||
obj = types.NewConst(token.NoPos, pkg, p.string(), p.typ(), p.value())
|
||||
case typeTag:
|
||||
// type object is added to scope via respective named type
|
||||
_ = p.typ().(*types.Named)
|
||||
return
|
||||
case varTag:
|
||||
obj = types.NewVar(token.NoPos, pkg, p.string(), p.typ())
|
||||
case funcTag:
|
||||
obj = types.NewFunc(token.NoPos, pkg, p.string(), p.typ().(*types.Signature))
|
||||
default:
|
||||
panic(fmt.Sprintf("unexpected object tag %d", tag))
|
||||
}
|
||||
|
||||
if alt := pkg.Scope().Insert(obj); alt != nil {
|
||||
panic(fmt.Sprintf("%s already declared", alt.Name()))
|
||||
}
|
||||
}
|
||||
|
||||
func (p *importer) value() exact.Value {
|
||||
switch kind := exact.Kind(p.int()); kind {
|
||||
case falseTag:
|
||||
return exact.MakeBool(false)
|
||||
case trueTag:
|
||||
return exact.MakeBool(true)
|
||||
case int64Tag:
|
||||
return exact.MakeInt64(p.int64())
|
||||
case floatTag:
|
||||
return p.float()
|
||||
case fractionTag:
|
||||
return p.fraction()
|
||||
case complexTag:
|
||||
re := p.fraction()
|
||||
im := p.fraction()
|
||||
return exact.BinaryOp(re, token.ADD, exact.MakeImag(im))
|
||||
case stringTag:
|
||||
return exact.MakeString(p.string())
|
||||
default:
|
||||
panic(fmt.Sprintf("unexpected value kind %d", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func (p *importer) float() exact.Value {
|
||||
sign := p.int()
|
||||
if sign == 0 {
|
||||
return exact.MakeInt64(0)
|
||||
}
|
||||
|
||||
x := p.ufloat()
|
||||
if sign < 0 {
|
||||
x = exact.UnaryOp(token.SUB, x, 0)
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
func (p *importer) fraction() exact.Value {
|
||||
sign := p.int()
|
||||
if sign == 0 {
|
||||
return exact.MakeInt64(0)
|
||||
}
|
||||
|
||||
x := exact.BinaryOp(p.ufloat(), token.QUO, p.ufloat())
|
||||
if sign < 0 {
|
||||
x = exact.UnaryOp(token.SUB, x, 0)
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
func (p *importer) ufloat() exact.Value {
|
||||
exp := p.int()
|
||||
x := exact.MakeFromBytes(p.bytes())
|
||||
switch {
|
||||
case exp < 0:
|
||||
d := exact.Shift(exact.MakeInt64(1), token.SHL, uint(-exp))
|
||||
x = exact.BinaryOp(x, token.QUO, d)
|
||||
case exp > 0:
|
||||
x = exact.Shift(x, token.SHL, uint(exp))
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
func (p *importer) record(t types.Type) {
|
||||
p.typList = append(p.typList, t)
|
||||
}
|
||||
|
||||
func (p *importer) typ() types.Type {
|
||||
// if the type was seen before, i is its index (>= 0)
|
||||
i := p.int()
|
||||
if i >= 0 {
|
||||
return p.typList[i]
|
||||
}
|
||||
|
||||
// otherwise, i is the type tag (< 0)
|
||||
switch i {
|
||||
case arrayTag:
|
||||
t := new(types.Array)
|
||||
p.record(t)
|
||||
|
||||
n := p.int64()
|
||||
*t = *types.NewArray(p.typ(), n)
|
||||
return t
|
||||
|
||||
case sliceTag:
|
||||
t := new(types.Slice)
|
||||
p.record(t)
|
||||
|
||||
*t = *types.NewSlice(p.typ())
|
||||
return t
|
||||
|
||||
case structTag:
|
||||
t := new(types.Struct)
|
||||
p.record(t)
|
||||
|
||||
n := p.int()
|
||||
fields := make([]*types.Var, n)
|
||||
tags := make([]string, n)
|
||||
for i := range fields {
|
||||
fields[i] = p.field()
|
||||
tags[i] = p.string()
|
||||
}
|
||||
*t = *types.NewStruct(fields, tags)
|
||||
return t
|
||||
|
||||
case pointerTag:
|
||||
t := new(types.Pointer)
|
||||
p.record(t)
|
||||
|
||||
*t = *types.NewPointer(p.typ())
|
||||
return t
|
||||
|
||||
case signatureTag:
|
||||
t := new(types.Signature)
|
||||
p.record(t)
|
||||
|
||||
*t = *p.signature()
|
||||
return t
|
||||
|
||||
case interfaceTag:
|
||||
// Create a dummy entry in the type list. This is safe because we
|
||||
// cannot expect the interface type to appear in a cycle, as any
|
||||
// such cycle must contain a named type which would have been
|
||||
// first defined earlier.
|
||||
n := len(p.typList)
|
||||
p.record(nil)
|
||||
|
||||
// read embedded interfaces
|
||||
embeddeds := make([]*types.Named, p.int())
|
||||
for i := range embeddeds {
|
||||
embeddeds[i] = p.typ().(*types.Named)
|
||||
}
|
||||
|
||||
// read methods
|
||||
methods := make([]*types.Func, p.int())
|
||||
for i := range methods {
|
||||
pkg, name := p.qualifiedName()
|
||||
methods[i] = types.NewFunc(token.NoPos, pkg, name, p.typ().(*types.Signature))
|
||||
}
|
||||
|
||||
t := types.NewInterface(methods, embeddeds)
|
||||
p.typList[n] = t
|
||||
return t
|
||||
|
||||
case mapTag:
|
||||
t := new(types.Map)
|
||||
p.record(t)
|
||||
|
||||
*t = *types.NewMap(p.typ(), p.typ())
|
||||
return t
|
||||
|
||||
case chanTag:
|
||||
t := new(types.Chan)
|
||||
p.record(t)
|
||||
|
||||
*t = *types.NewChan(types.ChanDir(p.int()), p.typ())
|
||||
return t
|
||||
|
||||
case namedTag:
|
||||
// read type object
|
||||
name := p.string()
|
||||
pkg := p.pkg()
|
||||
scope := pkg.Scope()
|
||||
obj := scope.Lookup(name)
|
||||
|
||||
// if the object doesn't exist yet, create and insert it
|
||||
if obj == nil {
|
||||
obj = types.NewTypeName(token.NoPos, pkg, name, nil)
|
||||
scope.Insert(obj)
|
||||
}
|
||||
|
||||
// associate new named type with obj if it doesn't exist yet
|
||||
t0 := types.NewNamed(obj.(*types.TypeName), nil, nil)
|
||||
|
||||
// but record the existing type, if any
|
||||
t := obj.Type().(*types.Named)
|
||||
p.record(t)
|
||||
|
||||
// read underlying type
|
||||
t0.SetUnderlying(p.typ())
|
||||
|
||||
// read associated methods
|
||||
for i, n := 0, p.int(); i < n; i++ {
|
||||
t0.AddMethod(types.NewFunc(token.NoPos, pkg, p.string(), p.typ().(*types.Signature)))
|
||||
}
|
||||
|
||||
return t
|
||||
|
||||
default:
|
||||
panic(fmt.Sprintf("unexpected type tag %d", i))
|
||||
}
|
||||
}
|
||||
|
||||
func deref(typ types.Type) types.Type {
|
||||
if p, _ := typ.(*types.Pointer); p != nil {
|
||||
return p.Elem()
|
||||
}
|
||||
return typ
|
||||
}
|
||||
|
||||
func (p *importer) field() *types.Var {
|
||||
pkg, name := p.qualifiedName()
|
||||
typ := p.typ()
|
||||
|
||||
anonymous := false
|
||||
if name == "" {
|
||||
// anonymous field - typ must be T or *T and T must be a type name
|
||||
switch typ := deref(typ).(type) {
|
||||
case *types.Basic: // basic types are named types
|
||||
pkg = nil
|
||||
name = typ.Name()
|
||||
case *types.Named:
|
||||
obj := typ.Obj()
|
||||
name = obj.Name()
|
||||
// correct the field package for anonymous fields
|
||||
if exported(name) {
|
||||
pkg = p.pkgList[0]
|
||||
}
|
||||
default:
|
||||
panic("anonymous field expected")
|
||||
}
|
||||
anonymous = true
|
||||
}
|
||||
|
||||
return types.NewField(token.NoPos, pkg, name, typ, anonymous)
|
||||
}
|
||||
|
||||
func (p *importer) qualifiedName() (*types.Package, string) {
|
||||
name := p.string()
|
||||
pkg := p.pkgList[0] // exported names assume current package
|
||||
if !exported(name) {
|
||||
pkg = p.pkg()
|
||||
}
|
||||
return pkg, name
|
||||
}
|
||||
|
||||
func (p *importer) signature() *types.Signature {
|
||||
var recv *types.Var
|
||||
if p.int() != 0 {
|
||||
recv = p.param()
|
||||
}
|
||||
return types.NewSignature(recv, p.tuple(), p.tuple(), p.int() != 0)
|
||||
}
|
||||
|
||||
func (p *importer) param() *types.Var {
|
||||
return types.NewVar(token.NoPos, nil, p.string(), p.typ())
|
||||
}
|
||||
|
||||
func (p *importer) tuple() *types.Tuple {
|
||||
vars := make([]*types.Var, p.int())
|
||||
for i := range vars {
|
||||
vars[i] = p.param()
|
||||
}
|
||||
return types.NewTuple(vars...)
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// decoders
|
||||
|
||||
func (p *importer) string() string {
|
||||
return string(p.bytes())
|
||||
}
|
||||
|
||||
func (p *importer) int() int {
|
||||
return int(p.int64())
|
||||
}
|
||||
|
||||
func (p *importer) int64() int64 {
|
||||
if debug {
|
||||
p.marker('i')
|
||||
}
|
||||
|
||||
return p.rawInt64()
|
||||
}
|
||||
|
||||
// Note: bytes() returns the respective byte slice w/o copy.
|
||||
func (p *importer) bytes() []byte {
|
||||
if debug {
|
||||
p.marker('b')
|
||||
}
|
||||
|
||||
var b []byte
|
||||
if n := int(p.rawInt64()); n > 0 {
|
||||
b = p.data[:n]
|
||||
p.data = p.data[n:]
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func (p *importer) marker(want byte) {
|
||||
if debug {
|
||||
if got := p.data[0]; got != want {
|
||||
panic(fmt.Sprintf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.consumed()))
|
||||
}
|
||||
p.data = p.data[1:]
|
||||
|
||||
pos := p.consumed()
|
||||
if n := int(p.rawInt64()); n != pos {
|
||||
panic(fmt.Sprintf("incorrect position: got %d; want %d", n, pos))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// rawInt64 should only be used by low-level decoders
|
||||
func (p *importer) rawInt64() int64 {
|
||||
i, n := binary.Varint(p.data)
|
||||
p.data = p.data[n:]
|
||||
return i
|
||||
}
|
||||
|
||||
func (p *importer) consumed() int {
|
||||
return p.datalen - len(p.data)
|
||||
}
|
@ -1,387 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package importer
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"sort"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"golang.org/x/tools/go/gcimporter"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
var fset = token.NewFileSet()
|
||||
|
||||
var tests = []string{
|
||||
`package p`,
|
||||
|
||||
// consts
|
||||
`package p; const X = true`,
|
||||
`package p; const X, y, Z = true, false, 0 != 0`,
|
||||
`package p; const ( A float32 = 1<<iota; B; C; D)`,
|
||||
`package p; const X = "foo"`,
|
||||
`package p; const X string = "foo"`,
|
||||
`package p; const X = 0`,
|
||||
`package p; const X = -42`,
|
||||
`package p; const X = 3.14159265`,
|
||||
`package p; const X = -1e-10`,
|
||||
`package p; const X = 1.2 + 2.3i`,
|
||||
`package p; const X = -1i`,
|
||||
`package p; import "math"; const Pi = math.Pi`,
|
||||
`package p; import m "math"; const Pi = m.Pi`,
|
||||
|
||||
// types
|
||||
`package p; type T int`,
|
||||
`package p; type T [10]int`,
|
||||
`package p; type T []int`,
|
||||
`package p; type T struct{}`,
|
||||
`package p; type T struct{x int}`,
|
||||
`package p; type T *int`,
|
||||
`package p; type T func()`,
|
||||
`package p; type T *T`,
|
||||
`package p; type T interface{}`,
|
||||
`package p; type T interface{ foo() }`,
|
||||
`package p; type T interface{ m() T }`,
|
||||
// TODO(gri) disabled for now - import/export works but
|
||||
// types.Type.String() used in the test cannot handle cases
|
||||
// like this yet
|
||||
// `package p; type T interface{ m() interface{T} }`,
|
||||
`package p; type T map[string]bool`,
|
||||
`package p; type T chan int`,
|
||||
`package p; type T <-chan complex64`,
|
||||
`package p; type T chan<- map[int]string`,
|
||||
// test case for issue 8177
|
||||
`package p; type T1 interface { F(T2) }; type T2 interface { T1 }`,
|
||||
|
||||
// vars
|
||||
`package p; var X int`,
|
||||
`package p; var X, Y, Z struct{f int "tag"}`,
|
||||
|
||||
// funcs
|
||||
`package p; func F()`,
|
||||
`package p; func F(x int, y struct{}) bool`,
|
||||
`package p; type T int; func (*T) F(x int, y struct{}) T`,
|
||||
|
||||
// selected special cases
|
||||
`package p; type T int`,
|
||||
`package p; type T uint8`,
|
||||
`package p; type T byte`,
|
||||
`package p; type T error`,
|
||||
`package p; import "net/http"; type T http.Client`,
|
||||
`package p; import "net/http"; type ( T1 http.Client; T2 struct { http.Client } )`,
|
||||
`package p; import "unsafe"; type ( T1 unsafe.Pointer; T2 unsafe.Pointer )`,
|
||||
`package p; import "unsafe"; type T struct { p unsafe.Pointer }`,
|
||||
}
|
||||
|
||||
func TestImportSrc(t *testing.T) {
|
||||
for _, src := range tests {
|
||||
pkg, err := pkgForSource(src)
|
||||
if err != nil {
|
||||
t.Errorf("typecheck failed: %s", err)
|
||||
continue
|
||||
}
|
||||
testExportImport(t, pkg, "")
|
||||
}
|
||||
}
|
||||
|
||||
func TestImportStdLib(t *testing.T) {
|
||||
start := time.Now()
|
||||
|
||||
libs, err := stdLibs()
|
||||
if err != nil {
|
||||
t.Fatalf("could not compute list of std libraries: %s", err)
|
||||
}
|
||||
if len(libs) < 100 {
|
||||
t.Fatalf("only %d std libraries found - something's not right", len(libs))
|
||||
}
|
||||
|
||||
// make sure printed go/types types and gc-imported types
|
||||
// can be compared reasonably well
|
||||
types.GcCompatibilityMode = true
|
||||
|
||||
var totSize, totGcSize int
|
||||
for _, lib := range libs {
|
||||
// limit run time for short tests
|
||||
if testing.Short() && time.Since(start) >= 750*time.Millisecond {
|
||||
return
|
||||
}
|
||||
if lib == "cmd/internal/objfile" || lib == "net/http" {
|
||||
// gcimporter doesn't support vendored imports.
|
||||
// TODO(gri): fix.
|
||||
continue
|
||||
}
|
||||
|
||||
pkg, err := pkgForPath(lib)
|
||||
switch err := err.(type) {
|
||||
case nil:
|
||||
// ok
|
||||
case *build.NoGoError:
|
||||
// no Go files - ignore
|
||||
continue
|
||||
default:
|
||||
t.Errorf("typecheck failed: %s", err)
|
||||
continue
|
||||
}
|
||||
|
||||
size, gcsize := testExportImport(t, pkg, lib)
|
||||
if gcsize == 0 {
|
||||
// if gc import didn't happen, assume same size
|
||||
// (and avoid division by zero below)
|
||||
gcsize = size
|
||||
}
|
||||
|
||||
if testing.Verbose() {
|
||||
fmt.Printf("%s\t%d\t%d\t%d%%\n", lib, size, gcsize, int(float64(size)*100/float64(gcsize)))
|
||||
}
|
||||
totSize += size
|
||||
totGcSize += gcsize
|
||||
}
|
||||
|
||||
if testing.Verbose() {
|
||||
fmt.Printf("\n%d\t%d\t%d%%\n", totSize, totGcSize, int(float64(totSize)*100/float64(totGcSize)))
|
||||
}
|
||||
|
||||
types.GcCompatibilityMode = false
|
||||
}
|
||||
|
||||
func testExportImport(t *testing.T, pkg0 *types.Package, path string) (size, gcsize int) {
|
||||
data := ExportData(pkg0)
|
||||
size = len(data)
|
||||
|
||||
imports := make(map[string]*types.Package)
|
||||
n, pkg1, err := ImportData(imports, data)
|
||||
if err != nil {
|
||||
t.Errorf("package %s: import failed: %s", pkg0.Name(), err)
|
||||
return
|
||||
}
|
||||
if n != size {
|
||||
t.Errorf("package %s: not all input data consumed", pkg0.Name())
|
||||
return
|
||||
}
|
||||
|
||||
s0 := pkgString(pkg0)
|
||||
s1 := pkgString(pkg1)
|
||||
if s1 != s0 {
|
||||
t.Errorf("package %s: \nimport got:\n%s\nwant:\n%s\n", pkg0.Name(), s1, s0)
|
||||
}
|
||||
|
||||
// If we have a standard library, compare also against the gcimported package.
|
||||
if path == "" {
|
||||
return // not std library
|
||||
}
|
||||
|
||||
gcdata, err := gcExportData(path)
|
||||
if err != nil {
|
||||
if pkg0.Name() == "main" {
|
||||
return // no export data present for main package
|
||||
}
|
||||
t.Errorf("package %s: couldn't get export data: %s", pkg0.Name(), err)
|
||||
}
|
||||
gcsize = len(gcdata)
|
||||
|
||||
imports = make(map[string]*types.Package)
|
||||
pkg2, err := gcImportData(imports, gcdata, path)
|
||||
if err != nil {
|
||||
t.Errorf("package %s: gcimport failed: %s", pkg0.Name(), err)
|
||||
return
|
||||
}
|
||||
|
||||
s2 := pkgString(pkg2)
|
||||
if s2 != s0 {
|
||||
t.Errorf("package %s: \ngcimport got:\n%s\nwant:\n%s\n", pkg0.Name(), s2, s0)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func pkgForSource(src string) (*types.Package, error) {
|
||||
f, err := parser.ParseFile(fset, "", src, 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return typecheck("import-test", f)
|
||||
}
|
||||
|
||||
func pkgForPath(path string) (*types.Package, error) {
|
||||
// collect filenames
|
||||
ctxt := build.Default
|
||||
pkginfo, err := ctxt.Import(path, "", 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
filenames := append(pkginfo.GoFiles, pkginfo.CgoFiles...)
|
||||
|
||||
// parse files
|
||||
files := make([]*ast.File, len(filenames))
|
||||
for i, filename := range filenames {
|
||||
var err error
|
||||
files[i], err = parser.ParseFile(fset, filepath.Join(pkginfo.Dir, filename), nil, 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return typecheck(path, files...)
|
||||
}
|
||||
|
||||
var defaultConf = types.Config{
|
||||
// we only care about exports and thus can ignore function bodies
|
||||
IgnoreFuncBodies: true,
|
||||
// work around C imports if possible
|
||||
FakeImportC: true,
|
||||
// strconv exports IntSize as a constant. The type-checker must
|
||||
// use the same word size otherwise the result of the type-checker
|
||||
// and gc imports is different. We don't care about alignment
|
||||
// since none of the tests have exported constants depending
|
||||
// on alignment (see also issue 8366).
|
||||
Sizes: &types.StdSizes{WordSize: strconv.IntSize / 8, MaxAlign: 8},
|
||||
}
|
||||
|
||||
func typecheck(path string, files ...*ast.File) (*types.Package, error) {
|
||||
return defaultConf.Check(path, fset, files, nil)
|
||||
}
|
||||
|
||||
// pkgString returns a string representation of a package's exported interface.
|
||||
func pkgString(pkg *types.Package) string {
|
||||
var buf bytes.Buffer
|
||||
|
||||
fmt.Fprintf(&buf, "package %s\n", pkg.Name())
|
||||
|
||||
scope := pkg.Scope()
|
||||
for _, name := range scope.Names() {
|
||||
if exported(name) {
|
||||
obj := scope.Lookup(name)
|
||||
buf.WriteString(obj.String())
|
||||
|
||||
switch obj := obj.(type) {
|
||||
case *types.Const:
|
||||
// For now only print constant values if they are not float
|
||||
// or complex. This permits comparing go/types results with
|
||||
// gc-generated gcimported package interfaces.
|
||||
info := obj.Type().Underlying().(*types.Basic).Info()
|
||||
if info&types.IsFloat == 0 && info&types.IsComplex == 0 {
|
||||
fmt.Fprintf(&buf, " = %s", obj.Val())
|
||||
}
|
||||
|
||||
case *types.TypeName:
|
||||
// Print associated methods.
|
||||
// Basic types (e.g., unsafe.Pointer) have *types.Basic
|
||||
// type rather than *types.Named; so we need to check.
|
||||
if typ, _ := obj.Type().(*types.Named); typ != nil {
|
||||
if n := typ.NumMethods(); n > 0 {
|
||||
// Sort methods by name so that we get the
|
||||
// same order independent of whether the
|
||||
// methods got imported or coming directly
|
||||
// for the source.
|
||||
// TODO(gri) This should probably be done
|
||||
// in go/types.
|
||||
list := make([]*types.Func, n)
|
||||
for i := 0; i < n; i++ {
|
||||
list[i] = typ.Method(i)
|
||||
}
|
||||
sort.Sort(byName(list))
|
||||
|
||||
buf.WriteString("\nmethods (\n")
|
||||
for _, m := range list {
|
||||
fmt.Fprintf(&buf, "\t%s\n", m)
|
||||
}
|
||||
buf.WriteString(")")
|
||||
}
|
||||
}
|
||||
}
|
||||
buf.WriteByte('\n')
|
||||
}
|
||||
}
|
||||
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
var stdLibRoot = filepath.Join(runtime.GOROOT(), "src") + string(filepath.Separator)
|
||||
|
||||
// The following std libraries are excluded from the stdLibs list.
|
||||
var excluded = map[string]bool{
|
||||
"builtin": true, // contains type declarations with cycles
|
||||
"unsafe": true, // contains fake declarations
|
||||
}
|
||||
|
||||
// stdLibs returns the list of standard library package paths.
|
||||
func stdLibs() (list []string, err error) {
|
||||
err = filepath.Walk(stdLibRoot, func(path string, info os.FileInfo, err error) error {
|
||||
if err == nil && info.IsDir() {
|
||||
// testdata directories don't contain importable libraries
|
||||
if info.Name() == "testdata" {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
pkgPath := path[len(stdLibRoot):] // remove stdLibRoot
|
||||
if len(pkgPath) > 0 && !excluded[pkgPath] {
|
||||
list = append(list, pkgPath)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
type byName []*types.Func
|
||||
|
||||
func (a byName) Len() int { return len(a) }
|
||||
func (a byName) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a byName) Less(i, j int) bool { return a[i].Name() < a[j].Name() }
|
||||
|
||||
// gcExportData returns the gc-generated export data for the given path.
|
||||
// It is based on a trimmed-down version of gcimporter.Import which does
|
||||
// not do the actual import, does not handle package unsafe, and assumes
|
||||
// that path is a correct standard library package path (no canonicalization,
|
||||
// or handling of local import paths).
|
||||
func gcExportData(path string) ([]byte, error) {
|
||||
filename, id := gcimporter.FindPkg(path, "")
|
||||
if filename == "" {
|
||||
return nil, fmt.Errorf("can't find import: %s", path)
|
||||
}
|
||||
if id != path {
|
||||
panic("path should be canonicalized")
|
||||
}
|
||||
|
||||
f, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
buf := bufio.NewReader(f)
|
||||
if err = gcimporter.FindExportData(buf); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var data []byte
|
||||
for {
|
||||
line, err := buf.ReadBytes('\n')
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
data = append(data, line...)
|
||||
// export data ends in "$$\n"
|
||||
if len(line) == 3 && line[0] == '$' && line[1] == '$' {
|
||||
return data, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func gcImportData(imports map[string]*types.Package, data []byte, path string) (*types.Package, error) {
|
||||
filename := fmt.Sprintf("<filename for %s>", path) // so we have a decent error message if necessary
|
||||
return gcimporter.ImportData(imports, filename, path, bufio.NewReader(bytes.NewBuffer(data)))
|
||||
}
|
@ -1,84 +0,0 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package importer
|
||||
|
||||
import "golang.org/x/tools/go/types"
|
||||
|
||||
const (
|
||||
magic = "\n$$ exports $$\n"
|
||||
version = "v0"
|
||||
)
|
||||
|
||||
// Tags. Must be < 0.
|
||||
const (
|
||||
// Packages
|
||||
packageTag = -(iota + 1)
|
||||
|
||||
// Objects
|
||||
constTag
|
||||
typeTag
|
||||
varTag
|
||||
funcTag
|
||||
|
||||
// Types
|
||||
arrayTag
|
||||
sliceTag
|
||||
structTag
|
||||
pointerTag
|
||||
signatureTag
|
||||
interfaceTag
|
||||
mapTag
|
||||
chanTag
|
||||
namedTag
|
||||
|
||||
// Values
|
||||
falseTag
|
||||
trueTag
|
||||
int64Tag
|
||||
floatTag
|
||||
fractionTag
|
||||
complexTag
|
||||
stringTag
|
||||
)
|
||||
|
||||
var predeclared = []types.Type{
|
||||
// basic types
|
||||
types.Typ[types.Bool],
|
||||
types.Typ[types.Int],
|
||||
types.Typ[types.Int8],
|
||||
types.Typ[types.Int16],
|
||||
types.Typ[types.Int32],
|
||||
types.Typ[types.Int64],
|
||||
types.Typ[types.Uint],
|
||||
types.Typ[types.Uint8],
|
||||
types.Typ[types.Uint16],
|
||||
types.Typ[types.Uint32],
|
||||
types.Typ[types.Uint64],
|
||||
types.Typ[types.Uintptr],
|
||||
types.Typ[types.Float32],
|
||||
types.Typ[types.Float64],
|
||||
types.Typ[types.Complex64],
|
||||
types.Typ[types.Complex128],
|
||||
types.Typ[types.String],
|
||||
|
||||
// untyped types
|
||||
types.Typ[types.UntypedBool],
|
||||
types.Typ[types.UntypedInt],
|
||||
types.Typ[types.UntypedRune],
|
||||
types.Typ[types.UntypedFloat],
|
||||
types.Typ[types.UntypedComplex],
|
||||
types.Typ[types.UntypedString],
|
||||
types.Typ[types.UntypedNil],
|
||||
|
||||
// package unsafe
|
||||
types.Typ[types.UnsafePointer],
|
||||
|
||||
// aliases
|
||||
types.Universe.Lookup("byte").Type(),
|
||||
types.Universe.Lookup("rune").Type(),
|
||||
|
||||
// error
|
||||
types.Universe.Lookup("error").Type(),
|
||||
}
|
@ -1,210 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
package loader
|
||||
|
||||
// This file handles cgo preprocessing of files containing `import "C"`.
|
||||
//
|
||||
// DESIGN
|
||||
//
|
||||
// The approach taken is to run the cgo processor on the package's
|
||||
// CgoFiles and parse the output, faking the filenames of the
|
||||
// resulting ASTs so that the synthetic file containing the C types is
|
||||
// called "C" (e.g. "~/go/src/net/C") and the preprocessed files
|
||||
// have their original names (e.g. "~/go/src/net/cgo_unix.go"),
|
||||
// not the names of the actual temporary files.
|
||||
//
|
||||
// The advantage of this approach is its fidelity to 'go build'. The
|
||||
// downside is that the token.Position.Offset for each AST node is
|
||||
// incorrect, being an offset within the temporary file. Line numbers
|
||||
// should still be correct because of the //line comments.
|
||||
//
|
||||
// The logic of this file is mostly plundered from the 'go build'
|
||||
// tool, which also invokes the cgo preprocessor.
|
||||
//
|
||||
//
|
||||
// REJECTED ALTERNATIVE
|
||||
//
|
||||
// An alternative approach that we explored is to extend go/types'
|
||||
// Importer mechanism to provide the identity of the importing package
|
||||
// so that each time `import "C"` appears it resolves to a different
|
||||
// synthetic package containing just the objects needed in that case.
|
||||
// The loader would invoke cgo but parse only the cgo_types.go file
|
||||
// defining the package-level objects, discarding the other files
|
||||
// resulting from preprocessing.
|
||||
//
|
||||
// The benefit of this approach would have been that source-level
|
||||
// syntax information would correspond exactly to the original cgo
|
||||
// file, with no preprocessing involved, making source tools like
|
||||
// godoc, oracle, and eg happy. However, the approach was rejected
|
||||
// due to the additional complexity it would impose on go/types. (It
|
||||
// made for a beautiful demo, though.)
|
||||
//
|
||||
// cgo files, despite their *.go extension, are not legal Go source
|
||||
// files per the specification since they may refer to unexported
|
||||
// members of package "C" such as C.int. Also, a function such as
|
||||
// C.getpwent has in effect two types, one matching its C type and one
|
||||
// which additionally returns (errno C.int). The cgo preprocessor
|
||||
// uses name mangling to distinguish these two functions in the
|
||||
// processed code, but go/types would need to duplicate this logic in
|
||||
// its handling of function calls, analogous to the treatment of map
|
||||
// lookups in which y=m[k] and y,ok=m[k] are both legal.
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// processCgoFiles invokes the cgo preprocessor on bp.CgoFiles, parses
|
||||
// the output and returns the resulting ASTs.
|
||||
//
|
||||
func processCgoFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) {
|
||||
tmpdir, err := ioutil.TempDir("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer os.RemoveAll(tmpdir)
|
||||
|
||||
pkgdir := bp.Dir
|
||||
if DisplayPath != nil {
|
||||
pkgdir = DisplayPath(pkgdir)
|
||||
}
|
||||
|
||||
cgoFiles, cgoDisplayFiles, err := runCgo(bp, pkgdir, tmpdir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var files []*ast.File
|
||||
for i := range cgoFiles {
|
||||
rd, err := os.Open(cgoFiles[i])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
display := filepath.Join(bp.Dir, cgoDisplayFiles[i])
|
||||
f, err := parser.ParseFile(fset, display, rd, mode)
|
||||
rd.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files = append(files, f)
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
var cgoRe = regexp.MustCompile(`[/\\:]`)
|
||||
|
||||
// runCgo invokes the cgo preprocessor on bp.CgoFiles and returns two
|
||||
// lists of files: the resulting processed files (in temporary
|
||||
// directory tmpdir) and the corresponding names of the unprocessed files.
|
||||
//
|
||||
// runCgo is adapted from (*builder).cgo in
|
||||
// $GOROOT/src/cmd/go/build.go, but these features are unsupported:
|
||||
// Objective C, CGOPKGPATH, CGO_FLAGS.
|
||||
//
|
||||
func runCgo(bp *build.Package, pkgdir, tmpdir string) (files, displayFiles []string, err error) {
|
||||
cgoCPPFLAGS, _, _, cgoLDFLAGS := cflags(bp, true)
|
||||
_, cgoexeCFLAGS, _, _ := cflags(bp, false)
|
||||
|
||||
if len(bp.CgoPkgConfig) > 0 {
|
||||
pcCFLAGS, pcLDFLAGS, err := pkgConfigFlags(bp)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...)
|
||||
cgoLDFLAGS = append(cgoLDFLAGS, pcLDFLAGS...)
|
||||
}
|
||||
|
||||
// Allows including _cgo_export.h from .[ch] files in the package.
|
||||
cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir)
|
||||
|
||||
// _cgo_gotypes.go (displayed "C") contains the type definitions.
|
||||
files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go"))
|
||||
displayFiles = append(displayFiles, "C")
|
||||
for _, fn := range bp.CgoFiles {
|
||||
// "foo.cgo1.go" (displayed "foo.go") is the processed Go source.
|
||||
f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_")
|
||||
files = append(files, filepath.Join(tmpdir, f+"cgo1.go"))
|
||||
displayFiles = append(displayFiles, fn)
|
||||
}
|
||||
|
||||
var cgoflags []string
|
||||
if bp.Goroot && bp.ImportPath == "runtime/cgo" {
|
||||
cgoflags = append(cgoflags, "-import_runtime_cgo=false")
|
||||
}
|
||||
if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" {
|
||||
cgoflags = append(cgoflags, "-import_syscall=false")
|
||||
}
|
||||
|
||||
args := stringList(
|
||||
"go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--",
|
||||
cgoCPPFLAGS, cgoLDFLAGS, cgoexeCFLAGS, bp.CgoFiles,
|
||||
)
|
||||
if false {
|
||||
log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir)
|
||||
}
|
||||
cmd := exec.Command(args[0], args[1:]...)
|
||||
cmd.Dir = pkgdir
|
||||
cmd.Stdout = os.Stderr
|
||||
cmd.Stderr = os.Stderr
|
||||
if err := cmd.Run(); err != nil {
|
||||
return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err)
|
||||
}
|
||||
|
||||
return files, displayFiles, nil
|
||||
}
|
||||
|
||||
// -- unmodified from 'go build' ---------------------------------------
|
||||
|
||||
// Return the flags to use when invoking the C or C++ compilers, or cgo.
|
||||
func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) {
|
||||
var defaults string
|
||||
if def {
|
||||
defaults = "-g -O2"
|
||||
}
|
||||
|
||||
cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS)
|
||||
cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS)
|
||||
cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS)
|
||||
ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS)
|
||||
return
|
||||
}
|
||||
|
||||
// envList returns the value of the given environment variable broken
|
||||
// into fields, using the default value when the variable is empty.
|
||||
func envList(key, def string) []string {
|
||||
v := os.Getenv(key)
|
||||
if v == "" {
|
||||
v = def
|
||||
}
|
||||
return strings.Fields(v)
|
||||
}
|
||||
|
||||
// stringList's arguments should be a sequence of string or []string values.
|
||||
// stringList flattens them into a single []string.
|
||||
func stringList(args ...interface{}) []string {
|
||||
var x []string
|
||||
for _, arg := range args {
|
||||
switch arg := arg.(type) {
|
||||
case []string:
|
||||
x = append(x, arg...)
|
||||
case string:
|
||||
x = append(x, arg)
|
||||
default:
|
||||
panic("stringList: invalid argument")
|
||||
}
|
||||
}
|
||||
return x
|
||||
}
|
@ -1,173 +0,0 @@
|
||||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
// +build !windows
|
||||
|
||||
package loader_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/token"
|
||||
"log"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"sort"
|
||||
|
||||
"golang.org/x/tools/go/loader"
|
||||
)
|
||||
|
||||
func printProgram(prog *loader.Program) {
|
||||
// Created packages are the initial packages specified by a call
|
||||
// to CreateFromFilenames or CreateFromFiles.
|
||||
var names []string
|
||||
for _, info := range prog.Created {
|
||||
names = append(names, info.Pkg.Path())
|
||||
}
|
||||
fmt.Printf("created: %s\n", names)
|
||||
|
||||
// Imported packages are the initial packages specified by a
|
||||
// call to Import or ImportWithTests.
|
||||
names = nil
|
||||
for _, info := range prog.Imported {
|
||||
names = append(names, info.Pkg.Path())
|
||||
}
|
||||
sort.Strings(names)
|
||||
fmt.Printf("imported: %s\n", names)
|
||||
|
||||
// InitialPackages contains the union of created and imported.
|
||||
names = nil
|
||||
for _, info := range prog.InitialPackages() {
|
||||
names = append(names, info.Pkg.Path())
|
||||
}
|
||||
sort.Strings(names)
|
||||
fmt.Printf("initial: %s\n", names)
|
||||
|
||||
// AllPackages contains all initial packages and their dependencies.
|
||||
names = nil
|
||||
for pkg := range prog.AllPackages {
|
||||
names = append(names, pkg.Path())
|
||||
}
|
||||
sort.Strings(names)
|
||||
fmt.Printf("all: %s\n", names)
|
||||
}
|
||||
|
||||
func printFilenames(fset *token.FileSet, info *loader.PackageInfo) {
|
||||
var names []string
|
||||
for _, f := range info.Files {
|
||||
names = append(names, filepath.Base(fset.File(f.Pos()).Name()))
|
||||
}
|
||||
fmt.Printf("%s.Files: %s\n", info.Pkg.Path(), names)
|
||||
}
|
||||
|
||||
// This example loads a set of packages and all of their dependencies
|
||||
// from a typical command-line. FromArgs parses a command line and
|
||||
// makes calls to the other methods of Config shown in the examples that
|
||||
// follow.
|
||||
func ExampleConfig_FromArgs() {
|
||||
args := []string{"mytool", "unicode/utf8", "errors", "runtime", "--", "foo", "bar"}
|
||||
const wantTests = false
|
||||
|
||||
var conf loader.Config
|
||||
rest, err := conf.FromArgs(args[1:], wantTests)
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
fmt.Printf("rest: %s\n", rest)
|
||||
printProgram(prog)
|
||||
// Output:
|
||||
// rest: [foo bar]
|
||||
// created: []
|
||||
// imported: [errors runtime unicode/utf8]
|
||||
// initial: [errors runtime unicode/utf8]
|
||||
// all: [errors runtime unicode/utf8 unsafe]
|
||||
}
|
||||
|
||||
// This example creates and type-checks a single package (without tests)
|
||||
// from a list of filenames, and loads all of its dependencies.
|
||||
func ExampleConfig_CreateFromFilenames() {
|
||||
var conf loader.Config
|
||||
filename := filepath.Join(runtime.GOROOT(), "src/container/heap/heap.go")
|
||||
conf.CreateFromFilenames("container/heap", filename)
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
printProgram(prog)
|
||||
// Output:
|
||||
// created: [container/heap]
|
||||
// imported: []
|
||||
// initial: [container/heap]
|
||||
// all: [container/heap sort]
|
||||
}
|
||||
|
||||
// In the examples below, for stability, the chosen packages are
|
||||
// relatively small, platform-independent, and low-level (and thus
|
||||
// infrequently changing).
|
||||
// The strconv package has internal and external tests.
|
||||
|
||||
const hello = `package main
|
||||
|
||||
import "fmt"
|
||||
|
||||
func main() {
|
||||
fmt.Println("Hello, world.")
|
||||
}
|
||||
`
|
||||
|
||||
// This example creates and type-checks a package from a list of
|
||||
// already-parsed files, and loads all its dependencies.
|
||||
func ExampleConfig_CreateFromFiles() {
|
||||
var conf loader.Config
|
||||
f, err := conf.ParseFile("hello.go", hello)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
conf.CreateFromFiles("hello", f)
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
printProgram(prog)
|
||||
printFilenames(prog.Fset, prog.Package("strconv"))
|
||||
// Output:
|
||||
// created: [hello]
|
||||
// imported: []
|
||||
// initial: [hello]
|
||||
// all: [errors fmt hello io math os reflect runtime strconv sync sync/atomic syscall time unicode/utf8 unsafe]
|
||||
// strconv.Files: [atob.go atof.go atoi.go decimal.go extfloat.go ftoa.go isprint.go itoa.go quote.go]
|
||||
}
|
||||
|
||||
// This example imports three packages, including the tests for one of
|
||||
// them, and loads all their dependencies.
|
||||
func ExampleConfig_Import() {
|
||||
// ImportWithTest("strconv") causes strconv to include
|
||||
// internal_test.go, and creates an external test package,
|
||||
// strconv_test.
|
||||
// (Compare with the example of CreateFromFiles.)
|
||||
|
||||
var conf loader.Config
|
||||
conf.Import("unicode/utf8")
|
||||
conf.Import("errors")
|
||||
conf.ImportWithTests("strconv")
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
printProgram(prog)
|
||||
printFilenames(prog.Fset, prog.Package("strconv"))
|
||||
printFilenames(prog.Fset, prog.Package("strconv_test"))
|
||||
// Output:
|
||||
// created: [strconv_test]
|
||||
// imported: [errors strconv unicode/utf8]
|
||||
// initial: [errors strconv strconv_test unicode/utf8]
|
||||
// all: [bufio bytes errors flag fmt io math math/rand os reflect runtime runtime/pprof sort strconv strconv_test strings sync sync/atomic syscall testing text/tabwriter time unicode unicode/utf8 unsafe]
|
||||
// strconv.Files: [atob.go atof.go atoi.go decimal.go extfloat.go ftoa.go isprint.go itoa.go quote.go internal_test.go]
|
||||
// strconv_test.Files: [atob_test.go atof_test.go atoi_test.go decimal_test.go fp_test.go ftoa_test.go itoa_test.go quote_example_test.go quote_test.go strconv_test.go]
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,676 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
// No testdata on Android.
|
||||
|
||||
// +build !android
|
||||
|
||||
package loader_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/build"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"golang.org/x/tools/go/buildutil"
|
||||
"golang.org/x/tools/go/loader"
|
||||
)
|
||||
|
||||
// TestFromArgs checks that conf.FromArgs populates conf correctly.
|
||||
// It does no I/O.
|
||||
func TestFromArgs(t *testing.T) {
|
||||
type result struct {
|
||||
Err string
|
||||
Rest []string
|
||||
ImportPkgs map[string]bool
|
||||
CreatePkgs []loader.PkgSpec
|
||||
}
|
||||
for _, test := range []struct {
|
||||
args []string
|
||||
tests bool
|
||||
want result
|
||||
}{
|
||||
// Mix of existing and non-existent packages.
|
||||
{
|
||||
args: []string{"nosuchpkg", "errors"},
|
||||
want: result{
|
||||
ImportPkgs: map[string]bool{"errors": false, "nosuchpkg": false},
|
||||
},
|
||||
},
|
||||
// Same, with -test flag.
|
||||
{
|
||||
args: []string{"nosuchpkg", "errors"},
|
||||
tests: true,
|
||||
want: result{
|
||||
ImportPkgs: map[string]bool{"errors": true, "nosuchpkg": true},
|
||||
},
|
||||
},
|
||||
// Surplus arguments.
|
||||
{
|
||||
args: []string{"fmt", "errors", "--", "surplus"},
|
||||
want: result{
|
||||
Rest: []string{"surplus"},
|
||||
ImportPkgs: map[string]bool{"errors": false, "fmt": false},
|
||||
},
|
||||
},
|
||||
// Ad hoc package specified as *.go files.
|
||||
{
|
||||
args: []string{"foo.go", "bar.go"},
|
||||
want: result{CreatePkgs: []loader.PkgSpec{{
|
||||
Filenames: []string{"foo.go", "bar.go"},
|
||||
}}},
|
||||
},
|
||||
// Mixture of *.go and import paths.
|
||||
{
|
||||
args: []string{"foo.go", "fmt"},
|
||||
want: result{
|
||||
Err: "named files must be .go files: fmt",
|
||||
},
|
||||
},
|
||||
} {
|
||||
var conf loader.Config
|
||||
rest, err := conf.FromArgs(test.args, test.tests)
|
||||
got := result{
|
||||
Rest: rest,
|
||||
ImportPkgs: conf.ImportPkgs,
|
||||
CreatePkgs: conf.CreatePkgs,
|
||||
}
|
||||
if err != nil {
|
||||
got.Err = err.Error()
|
||||
}
|
||||
if !reflect.DeepEqual(got, test.want) {
|
||||
t.Errorf("FromArgs(%q) = %+v, want %+v", test.args, got, test.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_NoInitialPackages(t *testing.T) {
|
||||
var conf loader.Config
|
||||
|
||||
const wantErr = "no initial packages were loaded"
|
||||
|
||||
prog, err := conf.Load()
|
||||
if err == nil {
|
||||
t.Errorf("Load succeeded unexpectedly, want %q", wantErr)
|
||||
} else if err.Error() != wantErr {
|
||||
t.Errorf("Load failed with wrong error %q, want %q", err, wantErr)
|
||||
}
|
||||
if prog != nil {
|
||||
t.Errorf("Load unexpectedly returned a Program")
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_MissingInitialPackage(t *testing.T) {
|
||||
var conf loader.Config
|
||||
conf.Import("nosuchpkg")
|
||||
conf.Import("errors")
|
||||
|
||||
const wantErr = "couldn't load packages due to errors: nosuchpkg"
|
||||
|
||||
prog, err := conf.Load()
|
||||
if err == nil {
|
||||
t.Errorf("Load succeeded unexpectedly, want %q", wantErr)
|
||||
} else if err.Error() != wantErr {
|
||||
t.Errorf("Load failed with wrong error %q, want %q", err, wantErr)
|
||||
}
|
||||
if prog != nil {
|
||||
t.Errorf("Load unexpectedly returned a Program")
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_MissingInitialPackage_AllowErrors(t *testing.T) {
|
||||
var conf loader.Config
|
||||
conf.AllowErrors = true
|
||||
conf.Import("nosuchpkg")
|
||||
conf.ImportWithTests("errors")
|
||||
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
t.Errorf("Load failed unexpectedly: %v", err)
|
||||
}
|
||||
if prog == nil {
|
||||
t.Fatalf("Load returned a nil Program")
|
||||
}
|
||||
if got, want := created(prog), "errors_test"; got != want {
|
||||
t.Errorf("Created = %s, want %s", got, want)
|
||||
}
|
||||
if got, want := imported(prog), "errors"; got != want {
|
||||
t.Errorf("Imported = %s, want %s", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateUnnamedPackage(t *testing.T) {
|
||||
var conf loader.Config
|
||||
conf.CreateFromFilenames("")
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
t.Fatalf("Load failed: %v", err)
|
||||
}
|
||||
if got, want := fmt.Sprint(prog.InitialPackages()), "[(unnamed)]"; got != want {
|
||||
t.Errorf("InitialPackages = %s, want %s", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_MissingFileInCreatedPackage(t *testing.T) {
|
||||
var conf loader.Config
|
||||
conf.CreateFromFilenames("", "missing.go")
|
||||
|
||||
const wantErr = "couldn't load packages due to errors: (unnamed)"
|
||||
|
||||
prog, err := conf.Load()
|
||||
if prog != nil {
|
||||
t.Errorf("Load unexpectedly returned a Program")
|
||||
}
|
||||
if err == nil {
|
||||
t.Fatalf("Load succeeded unexpectedly, want %q", wantErr)
|
||||
}
|
||||
if err.Error() != wantErr {
|
||||
t.Fatalf("Load failed with wrong error %q, want %q", err, wantErr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_MissingFileInCreatedPackage_AllowErrors(t *testing.T) {
|
||||
conf := loader.Config{AllowErrors: true}
|
||||
conf.CreateFromFilenames("", "missing.go")
|
||||
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
t.Errorf("Load failed: %v", err)
|
||||
}
|
||||
if got, want := fmt.Sprint(prog.InitialPackages()), "[(unnamed)]"; got != want {
|
||||
t.Fatalf("InitialPackages = %s, want %s", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_ParseError(t *testing.T) {
|
||||
var conf loader.Config
|
||||
conf.CreateFromFilenames("badpkg", "testdata/badpkgdecl.go")
|
||||
|
||||
const wantErr = "couldn't load packages due to errors: badpkg"
|
||||
|
||||
prog, err := conf.Load()
|
||||
if prog != nil {
|
||||
t.Errorf("Load unexpectedly returned a Program")
|
||||
}
|
||||
if err == nil {
|
||||
t.Fatalf("Load succeeded unexpectedly, want %q", wantErr)
|
||||
}
|
||||
if err.Error() != wantErr {
|
||||
t.Fatalf("Load failed with wrong error %q, want %q", err, wantErr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_ParseError_AllowErrors(t *testing.T) {
|
||||
var conf loader.Config
|
||||
conf.AllowErrors = true
|
||||
conf.CreateFromFilenames("badpkg", "testdata/badpkgdecl.go")
|
||||
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
t.Errorf("Load failed unexpectedly: %v", err)
|
||||
}
|
||||
if prog == nil {
|
||||
t.Fatalf("Load returned a nil Program")
|
||||
}
|
||||
if got, want := created(prog), "badpkg"; got != want {
|
||||
t.Errorf("Created = %s, want %s", got, want)
|
||||
}
|
||||
|
||||
badpkg := prog.Created[0]
|
||||
if len(badpkg.Files) != 1 {
|
||||
t.Errorf("badpkg has %d files, want 1", len(badpkg.Files))
|
||||
}
|
||||
wantErr := filepath.Join("testdata", "badpkgdecl.go") + ":1:34: expected 'package', found 'EOF'"
|
||||
if !hasError(badpkg.Errors, wantErr) {
|
||||
t.Errorf("badpkg.Errors = %v, want %s", badpkg.Errors, wantErr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_FromSource_Success(t *testing.T) {
|
||||
var conf loader.Config
|
||||
conf.CreateFromFilenames("P", "testdata/a.go", "testdata/b.go")
|
||||
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
t.Errorf("Load failed unexpectedly: %v", err)
|
||||
}
|
||||
if prog == nil {
|
||||
t.Fatalf("Load returned a nil Program")
|
||||
}
|
||||
if got, want := created(prog), "P"; got != want {
|
||||
t.Errorf("Created = %s, want %s", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_FromImports_Success(t *testing.T) {
|
||||
var conf loader.Config
|
||||
conf.ImportWithTests("fmt")
|
||||
conf.ImportWithTests("errors")
|
||||
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
t.Errorf("Load failed unexpectedly: %v", err)
|
||||
}
|
||||
if prog == nil {
|
||||
t.Fatalf("Load returned a nil Program")
|
||||
}
|
||||
if got, want := created(prog), "errors_test fmt_test"; got != want {
|
||||
t.Errorf("Created = %q, want %s", got, want)
|
||||
}
|
||||
if got, want := imported(prog), "errors fmt"; got != want {
|
||||
t.Errorf("Imported = %s, want %s", got, want)
|
||||
}
|
||||
// Check set of transitive packages.
|
||||
// There are >30 and the set may grow over time, so only check a few.
|
||||
want := map[string]bool{
|
||||
"strings": true,
|
||||
"time": true,
|
||||
"runtime": true,
|
||||
"testing": true,
|
||||
"unicode": true,
|
||||
}
|
||||
for _, path := range all(prog) {
|
||||
delete(want, path)
|
||||
}
|
||||
if len(want) > 0 {
|
||||
t.Errorf("AllPackages is missing these keys: %q", keys(want))
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_MissingIndirectImport(t *testing.T) {
|
||||
pkgs := map[string]string{
|
||||
"a": `package a; import _ "b"`,
|
||||
"b": `package b; import _ "c"`,
|
||||
}
|
||||
conf := loader.Config{Build: fakeContext(pkgs)}
|
||||
conf.Import("a")
|
||||
|
||||
const wantErr = "couldn't load packages due to errors: b"
|
||||
|
||||
prog, err := conf.Load()
|
||||
if err == nil {
|
||||
t.Errorf("Load succeeded unexpectedly, want %q", wantErr)
|
||||
} else if err.Error() != wantErr {
|
||||
t.Errorf("Load failed with wrong error %q, want %q", err, wantErr)
|
||||
}
|
||||
if prog != nil {
|
||||
t.Errorf("Load unexpectedly returned a Program")
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_BadDependency_AllowErrors(t *testing.T) {
|
||||
for _, test := range []struct {
|
||||
descr string
|
||||
pkgs map[string]string
|
||||
wantPkgs string
|
||||
}{
|
||||
|
||||
{
|
||||
descr: "missing dependency",
|
||||
pkgs: map[string]string{
|
||||
"a": `package a; import _ "b"`,
|
||||
"b": `package b; import _ "c"`,
|
||||
},
|
||||
wantPkgs: "a b",
|
||||
},
|
||||
{
|
||||
descr: "bad package decl in dependency",
|
||||
pkgs: map[string]string{
|
||||
"a": `package a; import _ "b"`,
|
||||
"b": `package b; import _ "c"`,
|
||||
"c": `package`,
|
||||
},
|
||||
wantPkgs: "a b",
|
||||
},
|
||||
{
|
||||
descr: "parse error in dependency",
|
||||
pkgs: map[string]string{
|
||||
"a": `package a; import _ "b"`,
|
||||
"b": `package b; import _ "c"`,
|
||||
"c": `package c; var x = `,
|
||||
},
|
||||
wantPkgs: "a b c",
|
||||
},
|
||||
} {
|
||||
conf := loader.Config{
|
||||
AllowErrors: true,
|
||||
Build: fakeContext(test.pkgs),
|
||||
}
|
||||
conf.Import("a")
|
||||
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
t.Errorf("%s: Load failed unexpectedly: %v", test.descr, err)
|
||||
}
|
||||
if prog == nil {
|
||||
t.Fatalf("%s: Load returned a nil Program", test.descr)
|
||||
}
|
||||
|
||||
if got, want := imported(prog), "a"; got != want {
|
||||
t.Errorf("%s: Imported = %s, want %s", test.descr, got, want)
|
||||
}
|
||||
if got := all(prog); strings.Join(got, " ") != test.wantPkgs {
|
||||
t.Errorf("%s: AllPackages = %s, want %s", test.descr, got, test.wantPkgs)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCwd(t *testing.T) {
|
||||
ctxt := fakeContext(map[string]string{"one/two/three": `package three`})
|
||||
for _, test := range []struct {
|
||||
cwd, arg, want string
|
||||
}{
|
||||
{cwd: "/go/src/one", arg: "./two/three", want: "one/two/three"},
|
||||
{cwd: "/go/src/one", arg: "../one/two/three", want: "one/two/three"},
|
||||
{cwd: "/go/src/one", arg: "one/two/three", want: "one/two/three"},
|
||||
{cwd: "/go/src/one/two/three", arg: ".", want: "one/two/three"},
|
||||
{cwd: "/go/src/one", arg: "two/three", want: ""},
|
||||
} {
|
||||
conf := loader.Config{
|
||||
Cwd: test.cwd,
|
||||
Build: ctxt,
|
||||
}
|
||||
conf.Import(test.arg)
|
||||
|
||||
var got string
|
||||
prog, err := conf.Load()
|
||||
if prog != nil {
|
||||
got = imported(prog)
|
||||
}
|
||||
if got != test.want {
|
||||
t.Errorf("Load(%s) from %s: Imported = %s, want %s",
|
||||
test.arg, test.cwd, got, test.want)
|
||||
if err != nil {
|
||||
t.Errorf("Load failed: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(adonovan): more Load tests:
|
||||
//
|
||||
// failures:
|
||||
// - to parse package decl of *_test.go files
|
||||
// - to parse package decl of external *_test.go files
|
||||
// - to parse whole of *_test.go files
|
||||
// - to parse whole of external *_test.go files
|
||||
// - to open a *.go file during import scanning
|
||||
// - to import from binary
|
||||
|
||||
// features:
|
||||
// - InitialPackages
|
||||
// - PackageCreated hook
|
||||
// - TypeCheckFuncBodies hook
|
||||
|
||||
func TestTransitivelyErrorFreeFlag(t *testing.T) {
|
||||
// Create an minimal custom build.Context
|
||||
// that fakes the following packages:
|
||||
//
|
||||
// a --> b --> c! c has an error
|
||||
// \ d and e are transitively error-free.
|
||||
// e --> d
|
||||
//
|
||||
// Each package [a-e] consists of one file, x.go.
|
||||
pkgs := map[string]string{
|
||||
"a": `package a; import (_ "b"; _ "e")`,
|
||||
"b": `package b; import _ "c"`,
|
||||
"c": `package c; func f() { _ = int(false) }`, // type error within function body
|
||||
"d": `package d;`,
|
||||
"e": `package e; import _ "d"`,
|
||||
}
|
||||
conf := loader.Config{
|
||||
AllowErrors: true,
|
||||
Build: fakeContext(pkgs),
|
||||
}
|
||||
conf.Import("a")
|
||||
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
t.Errorf("Load failed: %s", err)
|
||||
}
|
||||
if prog == nil {
|
||||
t.Fatalf("Load returned nil *Program")
|
||||
}
|
||||
|
||||
for pkg, info := range prog.AllPackages {
|
||||
var wantErr, wantTEF bool
|
||||
switch pkg.Path() {
|
||||
case "a", "b":
|
||||
case "c":
|
||||
wantErr = true
|
||||
case "d", "e":
|
||||
wantTEF = true
|
||||
default:
|
||||
t.Errorf("unexpected package: %q", pkg.Path())
|
||||
continue
|
||||
}
|
||||
|
||||
if (info.Errors != nil) != wantErr {
|
||||
if wantErr {
|
||||
t.Errorf("Package %q.Error = nil, want error", pkg.Path())
|
||||
} else {
|
||||
t.Errorf("Package %q has unexpected Errors: %v",
|
||||
pkg.Path(), info.Errors)
|
||||
}
|
||||
}
|
||||
|
||||
if info.TransitivelyErrorFree != wantTEF {
|
||||
t.Errorf("Package %q.TransitivelyErrorFree=%t, want %t",
|
||||
pkg.Path(), info.TransitivelyErrorFree, wantTEF)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test that syntax (scan/parse), type, and loader errors are recorded
|
||||
// (in PackageInfo.Errors) and reported (via Config.TypeChecker.Error).
|
||||
func TestErrorReporting(t *testing.T) {
|
||||
pkgs := map[string]string{
|
||||
"a": `package a; import (_ "b"; _ "c"); var x int = false`,
|
||||
"b": `package b; 'syntax error!`,
|
||||
}
|
||||
conf := loader.Config{
|
||||
AllowErrors: true,
|
||||
Build: fakeContext(pkgs),
|
||||
}
|
||||
var mu sync.Mutex
|
||||
var allErrors []error
|
||||
conf.TypeChecker.Error = func(err error) {
|
||||
mu.Lock()
|
||||
allErrors = append(allErrors, err)
|
||||
mu.Unlock()
|
||||
}
|
||||
conf.Import("a")
|
||||
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
t.Errorf("Load failed: %s", err)
|
||||
}
|
||||
if prog == nil {
|
||||
t.Fatalf("Load returned nil *Program")
|
||||
}
|
||||
|
||||
// TODO(adonovan): test keys of ImportMap.
|
||||
|
||||
// Check errors recorded in each PackageInfo.
|
||||
for pkg, info := range prog.AllPackages {
|
||||
switch pkg.Path() {
|
||||
case "a":
|
||||
if !hasError(info.Errors, "cannot convert false") {
|
||||
t.Errorf("a.Errors = %v, want bool conversion (type) error", info.Errors)
|
||||
}
|
||||
if !hasError(info.Errors, "could not import c") {
|
||||
t.Errorf("a.Errors = %v, want import (loader) error", info.Errors)
|
||||
}
|
||||
case "b":
|
||||
if !hasError(info.Errors, "rune literal not terminated") {
|
||||
t.Errorf("b.Errors = %v, want unterminated literal (syntax) error", info.Errors)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check errors reported via error handler.
|
||||
if !hasError(allErrors, "cannot convert false") ||
|
||||
!hasError(allErrors, "rune literal not terminated") ||
|
||||
!hasError(allErrors, "could not import c") {
|
||||
t.Errorf("allErrors = %v, want syntax, type and loader errors", allErrors)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCycles(t *testing.T) {
|
||||
for _, test := range []struct {
|
||||
descr string
|
||||
ctxt *build.Context
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
"self-cycle",
|
||||
fakeContext(map[string]string{
|
||||
"main": `package main; import _ "selfcycle"`,
|
||||
"selfcycle": `package selfcycle; import _ "selfcycle"`,
|
||||
}),
|
||||
`import cycle: selfcycle -> selfcycle`,
|
||||
},
|
||||
{
|
||||
"three-package cycle",
|
||||
fakeContext(map[string]string{
|
||||
"main": `package main; import _ "a"`,
|
||||
"a": `package a; import _ "b"`,
|
||||
"b": `package b; import _ "c"`,
|
||||
"c": `package c; import _ "a"`,
|
||||
}),
|
||||
`import cycle: c -> a -> b -> c`,
|
||||
},
|
||||
{
|
||||
"self-cycle in dependency of test file",
|
||||
buildutil.FakeContext(map[string]map[string]string{
|
||||
"main": {
|
||||
"main.go": `package main`,
|
||||
"main_test.go": `package main; import _ "a"`,
|
||||
},
|
||||
"a": {
|
||||
"a.go": `package a; import _ "a"`,
|
||||
},
|
||||
}),
|
||||
`import cycle: a -> a`,
|
||||
},
|
||||
// TODO(adonovan): fix: these fail
|
||||
// {
|
||||
// "two-package cycle in dependency of test file",
|
||||
// buildutil.FakeContext(map[string]map[string]string{
|
||||
// "main": {
|
||||
// "main.go": `package main`,
|
||||
// "main_test.go": `package main; import _ "a"`,
|
||||
// },
|
||||
// "a": {
|
||||
// "a.go": `package a; import _ "main"`,
|
||||
// },
|
||||
// }),
|
||||
// `import cycle: main -> a -> main`,
|
||||
// },
|
||||
// {
|
||||
// "self-cycle in augmented package",
|
||||
// buildutil.FakeContext(map[string]map[string]string{
|
||||
// "main": {
|
||||
// "main.go": `package main`,
|
||||
// "main_test.go": `package main; import _ "main"`,
|
||||
// },
|
||||
// }),
|
||||
// `import cycle: main -> main`,
|
||||
// },
|
||||
} {
|
||||
conf := loader.Config{
|
||||
AllowErrors: true,
|
||||
Build: test.ctxt,
|
||||
}
|
||||
var mu sync.Mutex
|
||||
var allErrors []error
|
||||
conf.TypeChecker.Error = func(err error) {
|
||||
mu.Lock()
|
||||
allErrors = append(allErrors, err)
|
||||
mu.Unlock()
|
||||
}
|
||||
conf.ImportWithTests("main")
|
||||
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
t.Errorf("%s: Load failed: %s", test.descr, err)
|
||||
}
|
||||
if prog == nil {
|
||||
t.Fatalf("%s: Load returned nil *Program", test.descr)
|
||||
}
|
||||
|
||||
if !hasError(allErrors, test.wantErr) {
|
||||
t.Errorf("%s: Load() errors = %q, want %q",
|
||||
test.descr, allErrors, test.wantErr)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(adonovan):
|
||||
// - Test that in a legal test cycle, none of the symbols
|
||||
// defined by augmentation are visible via import.
|
||||
}
|
||||
|
||||
// ---- utilities ----
|
||||
|
||||
// Simplifying wrapper around buildutil.FakeContext for single-file packages.
|
||||
func fakeContext(pkgs map[string]string) *build.Context {
|
||||
pkgs2 := make(map[string]map[string]string)
|
||||
for path, content := range pkgs {
|
||||
pkgs2[path] = map[string]string{"x.go": content}
|
||||
}
|
||||
return buildutil.FakeContext(pkgs2)
|
||||
}
|
||||
|
||||
func hasError(errors []error, substr string) bool {
|
||||
for _, err := range errors {
|
||||
if strings.Contains(err.Error(), substr) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func keys(m map[string]bool) (keys []string) {
|
||||
for key := range m {
|
||||
keys = append(keys, key)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
return
|
||||
}
|
||||
|
||||
// Returns all loaded packages.
|
||||
func all(prog *loader.Program) []string {
|
||||
var pkgs []string
|
||||
for _, info := range prog.AllPackages {
|
||||
pkgs = append(pkgs, info.Pkg.Path())
|
||||
}
|
||||
sort.Strings(pkgs)
|
||||
return pkgs
|
||||
}
|
||||
|
||||
// Returns initially imported packages, as a string.
|
||||
func imported(prog *loader.Program) string {
|
||||
var pkgs []string
|
||||
for _, info := range prog.Imported {
|
||||
pkgs = append(pkgs, info.Pkg.Path())
|
||||
}
|
||||
sort.Strings(pkgs)
|
||||
return strings.Join(pkgs, " ")
|
||||
}
|
||||
|
||||
// Returns initially created packages, as a string.
|
||||
func created(prog *loader.Program) string {
|
||||
var pkgs []string
|
||||
for _, info := range prog.Created {
|
||||
pkgs = append(pkgs, info.Pkg.Path())
|
||||
}
|
||||
return strings.Join(pkgs, " ")
|
||||
}
|
@ -1,197 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.5
|
||||
|
||||
package loader_test
|
||||
|
||||
// This file enumerates all packages beneath $GOROOT, loads them, plus
|
||||
// their external tests if any, runs the type checker on them, and
|
||||
// prints some summary information.
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/token"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"golang.org/x/tools/go/buildutil"
|
||||
"golang.org/x/tools/go/loader"
|
||||
"golang.org/x/tools/go/types"
|
||||
)
|
||||
|
||||
func TestStdlib(t *testing.T) {
|
||||
if runtime.GOOS == "android" {
|
||||
t.Skipf("incomplete std lib on %s", runtime.GOOS)
|
||||
}
|
||||
|
||||
runtime.GC()
|
||||
t0 := time.Now()
|
||||
var memstats runtime.MemStats
|
||||
runtime.ReadMemStats(&memstats)
|
||||
alloc := memstats.Alloc
|
||||
|
||||
// Load, parse and type-check the program.
|
||||
ctxt := build.Default // copy
|
||||
ctxt.GOPATH = "" // disable GOPATH
|
||||
conf := loader.Config{Build: &ctxt}
|
||||
for _, path := range buildutil.AllPackages(conf.Build) {
|
||||
conf.ImportWithTests(path)
|
||||
}
|
||||
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
t.Fatalf("Load failed: %v", err)
|
||||
}
|
||||
|
||||
t1 := time.Now()
|
||||
runtime.GC()
|
||||
runtime.ReadMemStats(&memstats)
|
||||
|
||||
numPkgs := len(prog.AllPackages)
|
||||
if want := 205; numPkgs < want {
|
||||
t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want)
|
||||
}
|
||||
|
||||
// Dump package members.
|
||||
if false {
|
||||
for pkg := range prog.AllPackages {
|
||||
fmt.Printf("Package %s:\n", pkg.Path())
|
||||
scope := pkg.Scope()
|
||||
qualifier := types.RelativeTo(pkg)
|
||||
for _, name := range scope.Names() {
|
||||
if ast.IsExported(name) {
|
||||
fmt.Printf("\t%s\n", types.ObjectString(scope.Lookup(name), qualifier))
|
||||
}
|
||||
}
|
||||
fmt.Println()
|
||||
}
|
||||
}
|
||||
|
||||
// Check that Test functions for io/ioutil, regexp and
|
||||
// compress/bzip2 are all simultaneously present.
|
||||
// (The apparent cycle formed when augmenting all three of
|
||||
// these packages by their tests was the original motivation
|
||||
// for reporting b/7114.)
|
||||
//
|
||||
// compress/bzip2.TestBitReader in bzip2_test.go imports io/ioutil
|
||||
// io/ioutil.TestTempFile in tempfile_test.go imports regexp
|
||||
// regexp.TestRE2Search in exec_test.go imports compress/bzip2
|
||||
for _, test := range []struct{ pkg, fn string }{
|
||||
{"io/ioutil", "TestTempFile"},
|
||||
{"regexp", "TestRE2Search"},
|
||||
{"compress/bzip2", "TestBitReader"},
|
||||
} {
|
||||
info := prog.Imported[test.pkg]
|
||||
if info == nil {
|
||||
t.Errorf("failed to load package %q", test.pkg)
|
||||
continue
|
||||
}
|
||||
obj, _ := info.Pkg.Scope().Lookup(test.fn).(*types.Func)
|
||||
if obj == nil {
|
||||
t.Errorf("package %q has no func %q", test.pkg, test.fn)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Dump some statistics.
|
||||
|
||||
// determine line count
|
||||
var lineCount int
|
||||
prog.Fset.Iterate(func(f *token.File) bool {
|
||||
lineCount += f.LineCount()
|
||||
return true
|
||||
})
|
||||
|
||||
t.Log("GOMAXPROCS: ", runtime.GOMAXPROCS(0))
|
||||
t.Log("#Source lines: ", lineCount)
|
||||
t.Log("Load/parse/typecheck: ", t1.Sub(t0))
|
||||
t.Log("#MB: ", int64(memstats.Alloc-alloc)/1000000)
|
||||
}
|
||||
|
||||
func TestCgoOption(t *testing.T) {
|
||||
switch runtime.GOOS {
|
||||
// On these systems, the net and os/user packages don't use cgo
|
||||
// or the std library is incomplete (Android).
|
||||
case "android", "plan9", "solaris", "windows":
|
||||
t.Skipf("no cgo or incomplete std lib on %s", runtime.GOOS)
|
||||
}
|
||||
// In nocgo builds (e.g. linux-amd64-nocgo),
|
||||
// there is no "runtime/cgo" package,
|
||||
// so cgo-generated Go files will have a failing import.
|
||||
if !build.Default.CgoEnabled {
|
||||
return
|
||||
}
|
||||
// Test that we can load cgo-using packages with
|
||||
// CGO_ENABLED=[01], which causes go/build to select pure
|
||||
// Go/native implementations, respectively, based on build
|
||||
// tags.
|
||||
//
|
||||
// Each entry specifies a package-level object and the generic
|
||||
// file expected to define it when cgo is disabled.
|
||||
// When cgo is enabled, the exact file is not specified (since
|
||||
// it varies by platform), but must differ from the generic one.
|
||||
//
|
||||
// The test also loads the actual file to verify that the
|
||||
// object is indeed defined at that location.
|
||||
for _, test := range []struct {
|
||||
pkg, name, genericFile string
|
||||
}{
|
||||
{"net", "cgoLookupHost", "cgo_stub.go"},
|
||||
{"os/user", "lookupId", "lookup_stubs.go"},
|
||||
} {
|
||||
ctxt := build.Default
|
||||
for _, ctxt.CgoEnabled = range []bool{false, true} {
|
||||
conf := loader.Config{Build: &ctxt}
|
||||
conf.Import(test.pkg)
|
||||
prog, err := conf.Load()
|
||||
if err != nil {
|
||||
t.Errorf("Load failed: %v", err)
|
||||
continue
|
||||
}
|
||||
info := prog.Imported[test.pkg]
|
||||
if info == nil {
|
||||
t.Errorf("package %s not found", test.pkg)
|
||||
continue
|
||||
}
|
||||
obj := info.Pkg.Scope().Lookup(test.name)
|
||||
if obj == nil {
|
||||
t.Errorf("no object %s.%s", test.pkg, test.name)
|
||||
continue
|
||||
}
|
||||
posn := prog.Fset.Position(obj.Pos())
|
||||
t.Logf("%s: %s (CgoEnabled=%t)", posn, obj, ctxt.CgoEnabled)
|
||||
|
||||
gotFile := filepath.Base(posn.Filename)
|
||||
filesMatch := gotFile == test.genericFile
|
||||
|
||||
if ctxt.CgoEnabled && filesMatch {
|
||||
t.Errorf("CGO_ENABLED=1: %s found in %s, want native file",
|
||||
obj, gotFile)
|
||||
} else if !ctxt.CgoEnabled && !filesMatch {
|
||||
t.Errorf("CGO_ENABLED=0: %s found in %s, want %s",
|
||||
obj, gotFile, test.genericFile)
|
||||
}
|
||||
|
||||
// Load the file and check the object is declared at the right place.
|
||||
b, err := ioutil.ReadFile(posn.Filename)
|
||||
if err != nil {
|
||||
t.Errorf("can't read %s: %s", posn.Filename, err)
|
||||
continue
|
||||
}
|
||||
line := string(bytes.Split(b, []byte("\n"))[posn.Line-1])
|
||||
ident := line[posn.Column-1:]
|
||||
if !strings.HasPrefix(ident, test.name) {
|
||||
t.Errorf("%s: %s not declared here (looking at %q)", posn, obj, ident)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user