[dev.regabi] cmd/compile: backport changes from dev.typeparams (9456804)

This CL backports a bunch of changes that landed on dev.typeparams,
but are not dependent on types2 or generics. By backporting, we reduce
the divergence between development branches, hopefully improving test
coverage and reducing risk of merge conflicts.

Updates #43866.

Change-Id: I382510855c9b5fac52b17066e44a00bd07fe86f5
Reviewed-on: https://go-review.googlesource.com/c/go/+/286172
Trust: Matthew Dempsky <mdempsky@google.com>
Trust: Robert Griesemer <gri@golang.org>
Run-TryBot: Matthew Dempsky <mdempsky@google.com>
Reviewed-by: Robert Griesemer <gri@golang.org>
This commit is contained in:
Matthew Dempsky 2021-01-24 10:52:00 -08:00
parent d05d6fab32
commit 063c72f06d
14 changed files with 404 additions and 336 deletions

View File

@ -0,0 +1,94 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package dwarfgen
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/internal/src"
)
// A ScopeMarker tracks scope nesting and boundaries for later use
// during DWARF generation.
type ScopeMarker struct {
parents []ir.ScopeID
marks []ir.Mark
}
// checkPos validates the given position and returns the current scope.
func (m *ScopeMarker) checkPos(pos src.XPos) ir.ScopeID {
if !pos.IsKnown() {
base.Fatalf("unknown scope position")
}
if len(m.marks) == 0 {
return 0
}
last := &m.marks[len(m.marks)-1]
if xposBefore(pos, last.Pos) {
base.FatalfAt(pos, "non-monotonic scope positions\n\t%v: previous scope position", base.FmtPos(last.Pos))
}
return last.Scope
}
// Push records a transition to a new child scope of the current scope.
func (m *ScopeMarker) Push(pos src.XPos) {
current := m.checkPos(pos)
m.parents = append(m.parents, current)
child := ir.ScopeID(len(m.parents))
m.marks = append(m.marks, ir.Mark{Pos: pos, Scope: child})
}
// Pop records a transition back to the current scope's parent.
func (m *ScopeMarker) Pop(pos src.XPos) {
current := m.checkPos(pos)
parent := m.parents[current-1]
m.marks = append(m.marks, ir.Mark{Pos: pos, Scope: parent})
}
// Unpush removes the current scope, which must be empty.
func (m *ScopeMarker) Unpush() {
i := len(m.marks) - 1
current := m.marks[i].Scope
if current != ir.ScopeID(len(m.parents)) {
base.FatalfAt(m.marks[i].Pos, "current scope is not empty")
}
m.parents = m.parents[:current-1]
m.marks = m.marks[:i]
}
// WriteTo writes the recorded scope marks to the given function,
// and resets the marker for reuse.
func (m *ScopeMarker) WriteTo(fn *ir.Func) {
m.compactMarks()
fn.Parents = make([]ir.ScopeID, len(m.parents))
copy(fn.Parents, m.parents)
m.parents = m.parents[:0]
fn.Marks = make([]ir.Mark, len(m.marks))
copy(fn.Marks, m.marks)
m.marks = m.marks[:0]
}
func (m *ScopeMarker) compactMarks() {
n := 0
for _, next := range m.marks {
if n > 0 && next.Pos == m.marks[n-1].Pos {
m.marks[n-1].Scope = next.Scope
continue
}
m.marks[n] = next
n++
}
m.marks = m.marks[:n]
}

View File

@ -5,18 +5,20 @@
package noder
import (
"errors"
"fmt"
"go/constant"
"os"
"path"
pathpkg "path"
"runtime"
"sort"
"strconv"
"strings"
"unicode"
"unicode/utf8"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/archive"
@ -38,160 +40,157 @@ func islocalname(name string) bool {
strings.HasPrefix(name, "../") || name == ".."
}
func findpkg(name string) (file string, ok bool) {
if islocalname(name) {
func openPackage(path string) (*os.File, error) {
if islocalname(path) {
if base.Flag.NoLocalImports {
return "", false
return nil, errors.New("local imports disallowed")
}
if base.Flag.Cfg.PackageFile != nil {
file, ok = base.Flag.Cfg.PackageFile[name]
return file, ok
return os.Open(base.Flag.Cfg.PackageFile[path])
}
// try .a before .6. important for building libraries:
// if there is an array.6 in the array.a library,
// want to find all of array.a, not just array.6.
file = fmt.Sprintf("%s.a", name)
if _, err := os.Stat(file); err == nil {
return file, true
// try .a before .o. important for building libraries:
// if there is an array.o in the array.a library,
// want to find all of array.a, not just array.o.
if file, err := os.Open(fmt.Sprintf("%s.a", path)); err == nil {
return file, nil
}
file = fmt.Sprintf("%s.o", name)
if _, err := os.Stat(file); err == nil {
return file, true
if file, err := os.Open(fmt.Sprintf("%s.o", path)); err == nil {
return file, nil
}
return "", false
return nil, errors.New("file not found")
}
// local imports should be canonicalized already.
// don't want to see "encoding/../encoding/base64"
// as different from "encoding/base64".
if q := path.Clean(name); q != name {
base.Errorf("non-canonical import path %q (should be %q)", name, q)
return "", false
if q := pathpkg.Clean(path); q != path {
return nil, fmt.Errorf("non-canonical import path %q (should be %q)", path, q)
}
if base.Flag.Cfg.PackageFile != nil {
file, ok = base.Flag.Cfg.PackageFile[name]
return file, ok
return os.Open(base.Flag.Cfg.PackageFile[path])
}
for _, dir := range base.Flag.Cfg.ImportDirs {
file = fmt.Sprintf("%s/%s.a", dir, name)
if _, err := os.Stat(file); err == nil {
return file, true
if file, err := os.Open(fmt.Sprintf("%s/%s.a", dir, path)); err == nil {
return file, nil
}
file = fmt.Sprintf("%s/%s.o", dir, name)
if _, err := os.Stat(file); err == nil {
return file, true
if file, err := os.Open(fmt.Sprintf("%s/%s.o", dir, path)); err == nil {
return file, nil
}
}
if objabi.GOROOT != "" {
suffix := ""
suffixsep := ""
if base.Flag.InstallSuffix != "" {
suffixsep = "_"
suffix = base.Flag.InstallSuffix
suffix = "_" + base.Flag.InstallSuffix
} else if base.Flag.Race {
suffixsep = "_"
suffix = "race"
suffix = "_race"
} else if base.Flag.MSan {
suffixsep = "_"
suffix = "msan"
suffix = "_msan"
}
file = fmt.Sprintf("%s/pkg/%s_%s%s%s/%s.a", objabi.GOROOT, objabi.GOOS, objabi.GOARCH, suffixsep, suffix, name)
if _, err := os.Stat(file); err == nil {
return file, true
if file, err := os.Open(fmt.Sprintf("%s/pkg/%s_%s%s/%s.a", objabi.GOROOT, objabi.GOOS, objabi.GOARCH, suffix, path)); err == nil {
return file, nil
}
file = fmt.Sprintf("%s/pkg/%s_%s%s%s/%s.o", objabi.GOROOT, objabi.GOOS, objabi.GOARCH, suffixsep, suffix, name)
if _, err := os.Stat(file); err == nil {
return file, true
if file, err := os.Open(fmt.Sprintf("%s/pkg/%s_%s%s/%s.o", objabi.GOROOT, objabi.GOOS, objabi.GOARCH, suffix, path)); err == nil {
return file, nil
}
}
return "", false
return nil, errors.New("file not found")
}
// myheight tracks the local package's height based on packages
// imported so far.
var myheight int
func importfile(f constant.Value) *types.Pkg {
if f.Kind() != constant.String {
base.Errorf("import path must be a string")
return nil
}
path_ := constant.StringVal(f)
if len(path_) == 0 {
base.Errorf("import path is empty")
return nil
}
if isbadimport(path_, false) {
return nil
}
// resolveImportPath resolves an import path as it appears in a Go
// source file to the package's full path.
func resolveImportPath(path string) (string, error) {
// The package name main is no longer reserved,
// but we reserve the import path "main" to identify
// the main package, just as we reserve the import
// path "math" to identify the standard math package.
if path_ == "main" {
base.Errorf("cannot import \"main\"")
base.ErrorExit()
if path == "main" {
return "", errors.New("cannot import \"main\"")
}
if base.Ctxt.Pkgpath != "" && path_ == base.Ctxt.Pkgpath {
base.Errorf("import %q while compiling that package (import cycle)", path_)
base.ErrorExit()
if base.Ctxt.Pkgpath != "" && path == base.Ctxt.Pkgpath {
return "", fmt.Errorf("import %q while compiling that package (import cycle)", path)
}
if mapped, ok := base.Flag.Cfg.ImportMap[path_]; ok {
path_ = mapped
if mapped, ok := base.Flag.Cfg.ImportMap[path]; ok {
path = mapped
}
if path_ == "unsafe" {
return ir.Pkgs.Unsafe
}
if islocalname(path_) {
if path_[0] == '/' {
base.Errorf("import path cannot be absolute path")
return nil
if islocalname(path) {
if path[0] == '/' {
return "", errors.New("import path cannot be absolute path")
}
prefix := base.Ctxt.Pathname
if base.Flag.D != "" {
prefix = base.Flag.D
prefix := base.Flag.D
if prefix == "" {
// Questionable, but when -D isn't specified, historically we
// resolve local import paths relative to the directory the
// compiler's current directory, not the respective source
// file's directory.
prefix = base.Ctxt.Pathname
}
path_ = path.Join(prefix, path_)
path = pathpkg.Join(prefix, path)
if isbadimport(path_, true) {
return nil
if err := checkImportPath(path, true); err != nil {
return "", err
}
}
file, found := findpkg(path_)
if !found {
base.Errorf("can't find import: %q", path_)
base.ErrorExit()
return path, nil
}
// TODO(mdempsky): Return an error instead.
func importfile(decl *syntax.ImportDecl) *types.Pkg {
if decl.Path.Kind != syntax.StringLit {
base.Errorf("import path must be a string")
return nil
}
importpkg := types.NewPkg(path_, "")
if importpkg.Imported {
return importpkg
}
importpkg.Imported = true
imp, err := bio.Open(file)
path, err := strconv.Unquote(decl.Path.Value)
if err != nil {
base.Errorf("can't open import: %q: %v", path_, err)
base.Errorf("import path must be a string")
return nil
}
if err := checkImportPath(path, false); err != nil {
base.Errorf("%s", err.Error())
return nil
}
path, err = resolveImportPath(path)
if err != nil {
base.Errorf("%s", err)
return nil
}
importpkg := types.NewPkg(path, "")
if importpkg.Direct {
return importpkg // already fully loaded
}
importpkg.Direct = true
typecheck.Target.Imports = append(typecheck.Target.Imports, importpkg)
if path == "unsafe" {
return importpkg // initialized with universe
}
f, err := openPackage(path)
if err != nil {
base.Errorf("could not import %q: %v", path, err)
base.ErrorExit()
}
imp := bio.NewReader(f)
defer imp.Close()
file := f.Name()
// check object header
p, err := imp.ReadString('\n')
@ -261,12 +260,12 @@ func importfile(f constant.Value) *types.Pkg {
var fingerprint goobj.FingerprintType
switch c {
case '\n':
base.Errorf("cannot import %s: old export format no longer supported (recompile library)", path_)
base.Errorf("cannot import %s: old export format no longer supported (recompile library)", path)
return nil
case 'B':
if base.Debug.Export != 0 {
fmt.Printf("importing %s (%s)\n", path_, file)
fmt.Printf("importing %s (%s)\n", path, file)
}
imp.ReadByte() // skip \n after $$B
@ -285,17 +284,17 @@ func importfile(f constant.Value) *types.Pkg {
fingerprint = typecheck.ReadImports(importpkg, imp)
default:
base.Errorf("no import in %q", path_)
base.Errorf("no import in %q", path)
base.ErrorExit()
}
// assume files move (get installed) so don't record the full path
if base.Flag.Cfg.PackageFile != nil {
// If using a packageFile map, assume path_ can be recorded directly.
base.Ctxt.AddImport(path_, fingerprint)
base.Ctxt.AddImport(path, fingerprint)
} else {
// For file "/Users/foo/go/pkg/darwin_amd64/math.a" record "math.a".
base.Ctxt.AddImport(file[len(file)-len(path_)-len(".a"):], fingerprint)
base.Ctxt.AddImport(file[len(file)-len(path)-len(".a"):], fingerprint)
}
if importpkg.Height >= myheight {
@ -315,47 +314,37 @@ var reservedimports = []string{
"type",
}
func isbadimport(path string, allowSpace bool) bool {
func checkImportPath(path string, allowSpace bool) error {
if path == "" {
return errors.New("import path is empty")
}
if strings.Contains(path, "\x00") {
base.Errorf("import path contains NUL")
return true
return errors.New("import path contains NUL")
}
for _, ri := range reservedimports {
if path == ri {
base.Errorf("import path %q is reserved and cannot be used", path)
return true
return fmt.Errorf("import path %q is reserved and cannot be used", path)
}
}
for _, r := range path {
if r == utf8.RuneError {
base.Errorf("import path contains invalid UTF-8 sequence: %q", path)
return true
}
if r < 0x20 || r == 0x7f {
base.Errorf("import path contains control character: %q", path)
return true
}
if r == '\\' {
base.Errorf("import path contains backslash; use slash: %q", path)
return true
}
if !allowSpace && unicode.IsSpace(r) {
base.Errorf("import path contains space character: %q", path)
return true
}
if strings.ContainsRune("!\"#$%&'()*,:;<=>?[]^`{|}", r) {
base.Errorf("import path contains invalid character '%c': %q", r, path)
return true
switch {
case r == utf8.RuneError:
return fmt.Errorf("import path contains invalid UTF-8 sequence: %q", path)
case r < 0x20 || r == 0x7f:
return fmt.Errorf("import path contains control character: %q", path)
case r == '\\':
return fmt.Errorf("import path contains backslash; use slash: %q", path)
case !allowSpace && unicode.IsSpace(r):
return fmt.Errorf("import path contains space character: %q", path)
case strings.ContainsRune("!\"#$%&'()*,:;<=>?[]^`{|}", r):
return fmt.Errorf("import path contains invalid character '%c': %q", r, path)
}
}
return false
return nil
}
func pkgnotused(lineno src.XPos, path string, name string) {

View File

@ -17,6 +17,7 @@ import (
"unicode/utf8"
"cmd/compile/internal/base"
"cmd/compile/internal/dwarfgen"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
"cmd/compile/internal/typecheck"
@ -27,40 +28,26 @@ import (
func LoadPackage(filenames []string) {
base.Timer.Start("fe", "parse")
lines := ParseFiles(filenames)
base.Timer.Stop()
base.Timer.AddEvent(int64(lines), "lines")
// Typecheck.
Package()
mode := syntax.CheckBranches
// With all user code typechecked, it's now safe to verify unused dot imports.
CheckDotImports()
base.ExitIfErrors()
}
// ParseFiles concurrently parses files into *syntax.File structures.
// Each declaration in every *syntax.File is converted to a syntax tree
// and its root represented by *Node is appended to Target.Decls.
// Returns the total count of parsed lines.
func ParseFiles(filenames []string) uint {
noders := make([]*noder, 0, len(filenames))
// Limit the number of simultaneously open files.
sem := make(chan struct{}, runtime.GOMAXPROCS(0)+10)
for _, filename := range filenames {
p := &noder{
basemap: make(map[*syntax.PosBase]*src.PosBase),
noders := make([]*noder, len(filenames))
for i, filename := range filenames {
p := noder{
err: make(chan syntax.Error),
trackScopes: base.Flag.Dwarf,
}
noders = append(noders, p)
noders[i] = &p
go func(filename string) {
filename := filename
go func() {
sem <- struct{}{}
defer func() { <-sem }()
defer close(p.err)
base := syntax.NewFileBase(filename)
fbase := syntax.NewFileBase(filename)
f, err := os.Open(filename)
if err != nil {
@ -69,8 +56,8 @@ func ParseFiles(filenames []string) uint {
}
defer f.Close()
p.file, _ = syntax.Parse(base, f, p.error, p.pragma, syntax.CheckBranches) // errors are tracked via p.error
}(filename)
p.file, _ = syntax.Parse(fbase, f, p.error, p.pragma, mode) // errors are tracked via p.error
}()
}
var lines uint
@ -78,30 +65,27 @@ func ParseFiles(filenames []string) uint {
for e := range p.err {
p.errorAt(e.Pos, "%s", e.Msg)
}
p.node()
lines += p.file.Lines
p.file = nil // release memory
if base.SyntaxErrors() != 0 {
base.ErrorExit()
}
// Always run CheckDclstack here, even when debug_dclstack is not set, as a sanity measure.
types.CheckDclstack()
}
base.Timer.AddEvent(int64(lines), "lines")
for _, p := range noders {
p.node()
p.file = nil // release memory
}
if base.SyntaxErrors() != 0 {
base.ErrorExit()
}
types.CheckDclstack()
for _, p := range noders {
p.processPragmas()
}
// Typecheck.
types.LocalPkg.Height = myheight
return lines
}
func Package() {
typecheck.DeclareUniverse()
typecheck.TypecheckAllowed = true
// Process top-level declarations in phases.
@ -166,44 +150,10 @@ func Package() {
}
// Phase 5: With all user code type-checked, it's now safe to verify map keys.
// With all user code typechecked, it's now safe to verify unused dot imports.
typecheck.CheckMapKeys()
}
// makeSrcPosBase translates from a *syntax.PosBase to a *src.PosBase.
func (p *noder) makeSrcPosBase(b0 *syntax.PosBase) *src.PosBase {
// fast path: most likely PosBase hasn't changed
if p.basecache.last == b0 {
return p.basecache.base
}
b1, ok := p.basemap[b0]
if !ok {
fn := b0.Filename()
if b0.IsFileBase() {
b1 = src.NewFileBase(fn, absFilename(fn))
} else {
// line directive base
p0 := b0.Pos()
p0b := p0.Base()
if p0b == b0 {
panic("infinite recursion in makeSrcPosBase")
}
p1 := src.MakePos(p.makeSrcPosBase(p0b), p0.Line(), p0.Col())
b1 = src.NewLinePragmaBase(p1, fn, fileh(fn), b0.Line(), b0.Col())
}
p.basemap[b0] = b1
}
// update cache
p.basecache.last = b0
p.basecache.base = b1
return b1
}
func (p *noder) makeXPos(pos syntax.Pos) (_ src.XPos) {
return base.Ctxt.PosTable.XPos(src.MakePos(p.makeSrcPosBase(pos.Base()), pos.Line(), pos.Col()))
CheckDotImports()
base.ExitIfErrors()
}
func (p *noder) errorAt(pos syntax.Pos, format string, args ...interface{}) {
@ -221,31 +171,33 @@ func absFilename(name string) string {
// noder transforms package syntax's AST into a Node tree.
type noder struct {
basemap map[*syntax.PosBase]*src.PosBase
basecache struct {
last *syntax.PosBase
base *src.PosBase
}
posMap
file *syntax.File
linknames []linkname
pragcgobuf [][]string
err chan syntax.Error
scope ir.ScopeID
importedUnsafe bool
importedEmbed bool
trackScopes bool
// scopeVars is a stack tracking the number of variables declared in the
// current function at the moment each open scope was opened.
trackScopes bool
scopeVars []int
funcState *funcState
}
// funcState tracks all per-function state to make handling nested
// functions easier.
type funcState struct {
// scopeVars is a stack tracking the number of variables declared in
// the current function at the moment each open scope was opened.
scopeVars []int
marker dwarfgen.ScopeMarker
lastCloseScopePos syntax.Pos
}
func (p *noder) funcBody(fn *ir.Func, block *syntax.BlockStmt) {
oldScope := p.scope
p.scope = 0
outerFuncState := p.funcState
p.funcState = new(funcState)
typecheck.StartFuncBody(fn)
if block != nil {
@ -260,62 +212,34 @@ func (p *noder) funcBody(fn *ir.Func, block *syntax.BlockStmt) {
}
typecheck.FinishFuncBody()
p.scope = oldScope
p.funcState.marker.WriteTo(fn)
p.funcState = outerFuncState
}
func (p *noder) openScope(pos syntax.Pos) {
fs := p.funcState
types.Markdcl()
if p.trackScopes {
ir.CurFunc.Parents = append(ir.CurFunc.Parents, p.scope)
p.scopeVars = append(p.scopeVars, len(ir.CurFunc.Dcl))
p.scope = ir.ScopeID(len(ir.CurFunc.Parents))
p.markScope(pos)
fs.scopeVars = append(fs.scopeVars, len(ir.CurFunc.Dcl))
fs.marker.Push(p.makeXPos(pos))
}
}
func (p *noder) closeScope(pos syntax.Pos) {
p.lastCloseScopePos = pos
fs := p.funcState
fs.lastCloseScopePos = pos
types.Popdcl()
if p.trackScopes {
scopeVars := p.scopeVars[len(p.scopeVars)-1]
p.scopeVars = p.scopeVars[:len(p.scopeVars)-1]
scopeVars := fs.scopeVars[len(fs.scopeVars)-1]
fs.scopeVars = fs.scopeVars[:len(fs.scopeVars)-1]
if scopeVars == len(ir.CurFunc.Dcl) {
// no variables were declared in this scope, so we can retract it.
if int(p.scope) != len(ir.CurFunc.Parents) {
base.Fatalf("scope tracking inconsistency, no variables declared but scopes were not retracted")
}
p.scope = ir.CurFunc.Parents[p.scope-1]
ir.CurFunc.Parents = ir.CurFunc.Parents[:len(ir.CurFunc.Parents)-1]
nmarks := len(ir.CurFunc.Marks)
ir.CurFunc.Marks[nmarks-1].Scope = p.scope
prevScope := ir.ScopeID(0)
if nmarks >= 2 {
prevScope = ir.CurFunc.Marks[nmarks-2].Scope
}
if ir.CurFunc.Marks[nmarks-1].Scope == prevScope {
ir.CurFunc.Marks = ir.CurFunc.Marks[:nmarks-1]
}
return
fs.marker.Unpush()
} else {
fs.marker.Pop(p.makeXPos(pos))
}
p.scope = ir.CurFunc.Parents[p.scope-1]
p.markScope(pos)
}
}
func (p *noder) markScope(pos syntax.Pos) {
xpos := p.makeXPos(pos)
if i := len(ir.CurFunc.Marks); i > 0 && ir.CurFunc.Marks[i-1].Pos == xpos {
ir.CurFunc.Marks[i-1].Scope = p.scope
} else {
ir.CurFunc.Marks = append(ir.CurFunc.Marks, ir.Mark{Pos: xpos, Scope: p.scope})
}
}
@ -324,7 +248,7 @@ func (p *noder) markScope(pos syntax.Pos) {
// "if" statements, as their implicit blocks always end at the same
// position as an explicit block.
func (p *noder) closeAnotherScope() {
p.closeScope(p.lastCloseScopePos)
p.closeScope(p.funcState.lastCloseScopePos)
}
// linkname records a //go:linkname directive.
@ -335,7 +259,6 @@ type linkname struct {
}
func (p *noder) node() {
types.Block = 1
p.importedUnsafe = false
p.importedEmbed = false
@ -404,7 +327,7 @@ func (p *noder) decls(decls []syntax.Decl) (l []ir.Node) {
}
func (p *noder) importDecl(imp *syntax.ImportDecl) {
if imp.Path.Bad {
if imp.Path == nil || imp.Path.Bad {
return // avoid follow-on errors if there was a syntax error
}
@ -412,7 +335,7 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
p.checkUnused(pragma)
}
ipkg := importfile(p.basicLit(imp.Path))
ipkg := importfile(imp)
if ipkg == nil {
if base.Errors() == 0 {
base.Fatalf("phase error in import")
@ -427,11 +350,6 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
p.importedEmbed = true
}
if !ipkg.Direct {
typecheck.Target.Imports = append(typecheck.Target.Imports, ipkg)
}
ipkg.Direct = true
var my *types.Sym
if imp.LocalPkgName != nil {
my = p.name(imp.LocalPkgName)
@ -465,20 +383,7 @@ func (p *noder) varDecl(decl *syntax.VarDecl) []ir.Node {
exprs := p.exprList(decl.Values)
if pragma, ok := decl.Pragma.(*pragmas); ok {
if len(pragma.Embeds) > 0 {
if !p.importedEmbed {
// This check can't be done when building the list pragma.Embeds
// because that list is created before the noder starts walking over the file,
// so at that point it hasn't seen the imports.
// We're left to check now, just before applying the //go:embed lines.
for _, e := range pragma.Embeds {
p.errorAt(e.Pos, "//go:embed only allowed in Go files that import \"embed\"")
}
} else {
varEmbed(p, names, typ, exprs, pragma.Embeds)
}
pragma.Embeds = nil
}
varEmbed(p.makeXPos, names[0], decl, pragma, p.importedEmbed)
p.checkUnused(pragma)
}
@ -1126,9 +1031,16 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node {
case *syntax.DeclStmt:
return ir.NewBlockStmt(src.NoXPos, p.decls(stmt.DeclList))
case *syntax.AssignStmt:
if stmt.Rhs == syntax.ImplicitOne {
one := constant.MakeInt64(1)
pos := p.pos(stmt)
n := ir.NewAssignOpStmt(pos, p.binOp(stmt.Op), p.expr(stmt.Lhs), ir.NewBasicLit(pos, one))
n.IncDec = true
return n
}
if stmt.Op != 0 && stmt.Op != syntax.Def {
n := ir.NewAssignOpStmt(p.pos(stmt), p.binOp(stmt.Op), p.expr(stmt.Lhs), p.expr(stmt.Rhs))
n.IncDec = stmt.Rhs == syntax.ImplicitOne
return n
}
@ -1588,15 +1500,6 @@ func (p *noder) wrapname(n syntax.Node, x ir.Node) ir.Node {
return x
}
func (p *noder) pos(n syntax.Node) src.XPos {
// TODO(gri): orig.Pos() should always be known - fix package syntax
xpos := base.Pos
if pos := n.Pos(); pos.IsKnown() {
xpos = p.makeXPos(pos)
}
return xpos
}
func (p *noder) setlineno(n syntax.Node) {
if n != nil {
base.Pos = p.pos(n)
@ -1923,48 +1826,41 @@ func oldname(s *types.Sym) ir.Node {
return n
}
func varEmbed(p *noder, names []*ir.Name, typ ir.Ntype, exprs []ir.Node, embeds []pragmaEmbed) {
haveEmbed := false
for _, decl := range p.file.DeclList {
imp, ok := decl.(*syntax.ImportDecl)
if !ok {
// imports always come first
break
}
path, _ := strconv.Unquote(imp.Path.Value)
if path == "embed" {
haveEmbed = true
break
}
func varEmbed(makeXPos func(syntax.Pos) src.XPos, name *ir.Name, decl *syntax.VarDecl, pragma *pragmas, haveEmbed bool) {
if pragma.Embeds == nil {
return
}
pos := embeds[0].Pos
pragmaEmbeds := pragma.Embeds
pragma.Embeds = nil
pos := makeXPos(pragmaEmbeds[0].Pos)
if !haveEmbed {
p.errorAt(pos, "invalid go:embed: missing import \"embed\"")
base.ErrorfAt(pos, "go:embed only allowed in Go files that import \"embed\"")
return
}
if len(names) > 1 {
p.errorAt(pos, "go:embed cannot apply to multiple vars")
if len(decl.NameList) > 1 {
base.ErrorfAt(pos, "go:embed cannot apply to multiple vars")
return
}
if len(exprs) > 0 {
p.errorAt(pos, "go:embed cannot apply to var with initializer")
if decl.Values != nil {
base.ErrorfAt(pos, "go:embed cannot apply to var with initializer")
return
}
if typ == nil {
// Should not happen, since len(exprs) == 0 now.
p.errorAt(pos, "go:embed cannot apply to var without type")
if decl.Type == nil {
// Should not happen, since Values == nil now.
base.ErrorfAt(pos, "go:embed cannot apply to var without type")
return
}
if typecheck.DeclContext != ir.PEXTERN {
p.errorAt(pos, "go:embed cannot apply to var inside func")
base.ErrorfAt(pos, "go:embed cannot apply to var inside func")
return
}
v := names[0]
typecheck.Target.Embeds = append(typecheck.Target.Embeds, v)
v.Embed = new([]ir.Embed)
for _, e := range embeds {
*v.Embed = append(*v.Embed, ir.Embed{Pos: p.makeXPos(e.Pos), Patterns: e.Patterns})
var embeds []ir.Embed
for _, e := range pragmaEmbeds {
embeds = append(embeds, ir.Embed{Pos: makeXPos(e.Pos), Patterns: e.Patterns})
}
typecheck.Target.Embeds = append(typecheck.Target.Embeds, name)
name.Embed = &embeds
}

View File

@ -0,0 +1,83 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package noder
import (
"cmd/compile/internal/base"
"cmd/compile/internal/syntax"
"cmd/internal/src"
)
// A posMap handles mapping from syntax.Pos to src.XPos.
type posMap struct {
bases map[*syntax.PosBase]*src.PosBase
cache struct {
last *syntax.PosBase
base *src.PosBase
}
}
type poser interface{ Pos() syntax.Pos }
type ender interface{ End() syntax.Pos }
func (m *posMap) pos(p poser) src.XPos { return m.makeXPos(p.Pos()) }
func (m *posMap) end(p ender) src.XPos { return m.makeXPos(p.End()) }
func (m *posMap) makeXPos(pos syntax.Pos) src.XPos {
if !pos.IsKnown() {
// TODO(mdempsky): Investigate restoring base.Fatalf.
return src.NoXPos
}
posBase := m.makeSrcPosBase(pos.Base())
return base.Ctxt.PosTable.XPos(src.MakePos(posBase, pos.Line(), pos.Col()))
}
// makeSrcPosBase translates from a *syntax.PosBase to a *src.PosBase.
func (m *posMap) makeSrcPosBase(b0 *syntax.PosBase) *src.PosBase {
// fast path: most likely PosBase hasn't changed
if m.cache.last == b0 {
return m.cache.base
}
b1, ok := m.bases[b0]
if !ok {
fn := b0.Filename()
if b0.IsFileBase() {
b1 = src.NewFileBase(fn, absFilename(fn))
} else {
// line directive base
p0 := b0.Pos()
p0b := p0.Base()
if p0b == b0 {
panic("infinite recursion in makeSrcPosBase")
}
p1 := src.MakePos(m.makeSrcPosBase(p0b), p0.Line(), p0.Col())
b1 = src.NewLinePragmaBase(p1, fn, fileh(fn), b0.Line(), b0.Col())
}
if m.bases == nil {
m.bases = make(map[*syntax.PosBase]*src.PosBase)
}
m.bases[b0] = b1
}
// update cache
m.cache.last = b0
m.cache.base = b1
return b1
}
func (m *posMap) join(other *posMap) {
if m.bases == nil {
m.bases = make(map[*syntax.PosBase]*src.PosBase)
}
for k, v := range other.bases {
if m.bases[k] != nil {
base.Fatalf("duplicate posmap bases")
}
m.bases[k] = v
}
}

View File

@ -791,7 +791,7 @@ func dcommontype(lsym *obj.LSym, t *types.Type) int {
// TrackSym returns the symbol for tracking use of field/method f, assumed
// to be a member of struct/interface type t.
func TrackSym(t *types.Type, f *types.Field) *obj.LSym {
return base.PkgLinksym("go.track", t.ShortString() + "." + f.Sym.Name, obj.ABI0)
return base.PkgLinksym("go.track", t.ShortString()+"."+f.Sym.Name, obj.ABI0)
}
func TypeSymPrefix(prefix string, t *types.Type) *types.Sym {

View File

@ -304,10 +304,13 @@ func checkembeddedtype(t *types.Type) {
}
}
func fakeRecvField() *types.Field {
// TODO(mdempsky): Move to package types.
func FakeRecv() *types.Field {
return types.NewField(src.NoXPos, nil, types.FakeRecvType())
}
var fakeRecvField = FakeRecv
var funcStack []funcStackEnt // stack of previous values of ir.CurFunc/DeclContext
type funcStackEnt struct {

View File

@ -174,7 +174,7 @@ func fnpkg(fn *ir.Name) *types.Pkg {
// closurename generates a new unique name for a closure within
// outerfunc.
func closurename(outerfunc *ir.Func) *types.Sym {
func ClosureName(outerfunc *ir.Func) *types.Sym {
outer := "glob."
prefix := "func"
gen := &globClosgen
@ -309,7 +309,7 @@ func tcClosure(clo *ir.ClosureExpr, top int) {
// explicitly in (*inlsubst).node()).
inTypeCheckInl := ir.CurFunc != nil && ir.CurFunc.Body == nil
if !inTypeCheckInl {
fn.Nname.SetSym(closurename(ir.CurFunc))
fn.Nname.SetSym(ClosureName(ir.CurFunc))
ir.MarkFunc(fn.Nname)
}
Func(fn)

View File

@ -127,10 +127,9 @@ func NodNil() ir.Node {
return n
}
// in T.field
// find missing fields that
// will give shortest unique addressing.
// modify the tree with missing type names.
// AddImplicitDots finds missing fields in obj.field that
// will give the shortest unique addressing and
// modifies the tree with missing field names.
func AddImplicitDots(n *ir.SelectorExpr) *ir.SelectorExpr {
n.X = typecheck(n.X, ctxType|ctxExpr)
if n.X.Diag() {

View File

@ -1674,10 +1674,10 @@ func CheckMapKeys() {
mapqueue = nil
}
// typegen tracks the number of function-scoped defined types that
// TypeGen tracks the number of function-scoped defined types that
// have been declared. It's used to generate unique linker symbols for
// their runtime type descriptors.
var typegen int32
var TypeGen int32
func typecheckdeftype(n *ir.Name) {
if base.EnableTrace && base.Flag.LowerT {
@ -1686,8 +1686,8 @@ func typecheckdeftype(n *ir.Name) {
t := types.NewNamed(n)
if n.Curfn != nil {
typegen++
t.Vargen = typegen
TypeGen++
t.Vargen = TypeGen
}
if n.Pragma()&ir.NotInHeap != 0 {

View File

@ -31,8 +31,7 @@ type Pkg struct {
// height of their imported packages.
Height int
Imported bool // export data of this package was parsed
Direct bool // imported directly
Direct bool // imported directly
}
// NewPkg returns a new Pkg for the given package path and name.

View File

@ -12,7 +12,7 @@ import (
// Declaration stack & operations
var blockgen int32 = 1 // max block number
var Block int32 // current block number
var Block int32 = 1 // current block number
// A dsym stores a symbol's shadowed declaration so that it can be
// restored once the block scope ends.

View File

@ -49,6 +49,11 @@ func Walk(fn *ir.Func) {
if base.Flag.Cfg.Instrumenting {
instrument(fn)
}
// Eagerly compute sizes of all variables for SSA.
for _, n := range fn.Dcl {
types.CalcSize(n.Type())
}
}
// walkRecv walks an ORECV node.

View File

@ -809,9 +809,9 @@ type Link struct {
Errors int
RegArgs []RegArg
InParallel bool // parallel backend phase in effect
UseBASEntries bool // use Base Address Selection Entries in location lists and PC ranges
IsAsm bool // is the source assembly language, which may contain surprising idioms (e.g., call tables)
InParallel bool // parallel backend phase in effect
UseBASEntries bool // use Base Address Selection Entries in location lists and PC ranges
IsAsm bool // is the source assembly language, which may contain surprising idioms (e.g., call tables)
// state for writing objects
Text []*LSym

View File

@ -8,7 +8,7 @@
package main
import _ "unicode//utf8" // GC_ERROR "non-canonical import path .unicode//utf8. \(should be .unicode/utf8.\)" "can't find import: .unicode//utf8."
import _ "unicode//utf8" // GC_ERROR "non-canonical import path .unicode//utf8. \(should be .unicode/utf8.\)"
func main() {
}