[dev.typeparams] all: merge dev.regabi (77365c5) into dev.typeparams

Conflicts:

- src/cmd/compile/internal/gc/main.go
- test/fixedbugs/issue15055.go

Merge List:

+ 2021-01-05 77365c5ed7 [dev.regabi] cmd/compile: add Name.Canonical and move Byval
+ 2021-01-05 e09783cbc0 [dev.regabi] cmd/compile: make ir.StaticValue safer
+ 2021-01-05 9aa950c407 [dev.regabi] cmd/compile: make ir.OuterValue safer
+ 2021-01-05 eb626409d1 [dev.regabi] cmd/compile: simplify CaptureVars
+ 2021-01-05 c28ca67a96 [dev.regabi] cmd/compile: fix ir.Dump for []*CaseClause, etc
+ 2021-01-04 f24e40c14a [dev.regabi] cmd/compile: remove Name.Class_ accessors
+ 2021-01-04 d89705e087 [dev.regabi] cmd/compile: fix re-export of parameters
+ 2021-01-04 290b4154b7 [dev.regabi] cmd/compile: fix ICE due to large uint64 constants
+ 2021-01-04 a30fd52884 [dev.regabi] cmd/compile: use ir.NewNameAt in SubstArgTypes
+ 2021-01-03 8fc44cf0fa [dev.regabi] cmd/compile: remove a couple CloneName calls
+ 2021-01-03 907a4bfdc7 [dev.regabi] cmd/compile: fix map assignment order
+ 2021-01-03 f2e6dab048 [dev.regabi] cmd/compile: remove walkReturn "common case" path
+ 2021-01-03 d36a6bf44d [dev.regabi] cmd/compile: improve walkReturn common case
+ 2021-01-03 a317067d65 [dev.regabi] cmd/compile: improve ascompatee
+ 2021-01-03 5d80a590a2 [dev.regabi] cmd/compile: simplify walkReturn
+ 2021-01-03 bb1b6c95c2 [dev.regabi] cmd/compile: remove Node.{,Set}Walkdef
+ 2021-01-03 57c426c9a5 [dev.regabi] cmd/compile: tighten typecheckdef to *ir.Name
+ 2021-01-03 b1747756e3 [dev.regabi] cmd/compile: reorganize escape analysis somewhat
+ 2021-01-02 f2538033c0 [dev.regabi] cmd/compile: remove Nodes.Set [generated]
+ 2021-01-02 2f2d4b4e68 [dev.regabi] cmd/compile: remove {Ptr,Set}Init from Node interface
+ 2021-01-01 1544a03198 [dev.regabi] cmd/compile: refactor redundant type conversion [generated]
+ 2021-01-01 7958a23ea3 [dev.regabi] cmd/compile: use *ir.Name where possible in inl.go
+ 2021-01-01 bfa97ba48f [dev.regabi] test: add another closure test case
+ 2021-01-01 67ad695416 [dev.regabi] cmd/compile: split escape analysis state
+ 2021-01-01 fad9a8b528 [dev.regabi] cmd/compile: simplify inlining of closures
+ 2021-01-01 7d55669847 [dev.regabi] cmd/compile: simplify dwarfgen.declPos
+ 2021-01-01 9ed1577779 [dev.regabi] cmd/compile: remove Func.ClosureEnter
+ 2021-01-01 ece345aa69 [dev.regabi] cmd/compile: expand documentation for Func.Closure{Vars,Enter}
+ 2021-01-01 6ddbc75efd [dev.regabi] cmd/compile: earlier deadcode removal
+ 2021-01-01 68e6fa4f68 [dev.regabi] cmd/compile: fix package-initialization order
+ 2021-01-01 3a4474cdfd [dev.regabi] cmd/compile: some more manual shuffling
+ 2021-01-01 0f1d2129c4 [dev.regabi] cmd/compile: reshuffle type-checking code [generated]
+ 2021-01-01 b8fd3440cd [dev.regabi] cmd/compile: report unused variables during typecheck
+ 2021-01-01 fd22df9905 [dev.regabi] cmd/compile: remove idempotent Name() calls [generated]
+ 2020-12-31 dfbcff80c6 [dev.regabi] cmd/compile: make copyExpr return *ir.Name directly
+ 2020-12-31 77fd81a3e6 [dev.regabi] cmd/compile: use names for keep alive variables in function call
+ 2020-12-31 8fe1197654 [dev.regabi] cmd/compile: remove Name.orig
+ 2020-12-31 477b049060 [dev.regabi] cmd/compile: fix printing of method expressions
+ 2020-12-30 178c667db2 [dev.regabi] cmd/compile: fix OSLICEARR comments
+ 2020-12-30 f0d99def5b [dev.regabi] cmd/compile: add newline to ir.Dump
+ 2020-12-30 451693af71 [dev.regabi] cmd/compile: simplify typecheckdef
+ 2020-12-30 0c1a899a6c [dev.regabi] cmd/compile: fix defined-pointer method call check
+ 2020-12-30 f9b67f76a5 [dev.regabi] cmd/compile: change ir.DoChildren to use bool result type
+ 2020-12-30 499851bac8 [dev.regabi] cmd/compile: generalize ir/mknode.go
+ 2020-12-30 82ab3d1448 [dev.regabi] cmd/compile: use *ir.Name for Decl.X
+ 2020-12-30 9958b7ed3e [dev.regabi] cmd/compile: unexport ir.FmtNode
+ 2020-12-29 f5816624cd [dev.regabi] cmd/compile: change AddrExpr.Alloc to AddrExpr.Prealloc
+ 2020-12-29 850aa7c60c [dev.regabi] cmd/compile: use *ir.Name instead of ir.Node for CaseClause.Var
+ 2020-12-29 37babc97bb [dev.regabi] cmd/compile: allow visitor visits *ir.Name
+ 2020-12-29 5cf3c87fa6 [dev.regabi] cmd/compile: generate case/comm clause functions in mknode.go
+ 2020-12-29 b3e1ec97fd [dev.regabi] cmd/compile: move new addrtaken bit back to the old name
+ 2020-12-29 0620c674dd [dev.regabi] cmd/compile: remove original addrtaken bit
+ 2020-12-29 0523d525ae [dev.regabi] cmd/compile: separate out address taken computation from typechecker
+ 2020-12-29 9ea272e5ec [dev.regabi] cmd/compile: simplify ir.Func somewhat
+ 2020-12-29 e40cb4d4ae [dev.regabi] cmd/compile: remove more unused code
+ 2020-12-29 6f30c95048 [dev.regabi] cmd/compile: remove unneeded indirection
+ 2020-12-29 171fc6f223 [dev.regabi] cmd/compile: remove workarounds for go/constant issues
+ 2020-12-29 33801cdc62 [dev.regabi] cmd/compile: use Ntype where possible
+ 2020-12-29 82ad3083f8 [dev.regabi] cmd/compile: remove typ from AssignOpStmt
+ 2020-12-29 e34c44a7c4 [dev.regabi] cmd/compile: refactoring typecheck arith
+ 2020-12-29 a5ec920160 [dev.regabi] cmd/compile: more Linksym cleanup
+ 2020-12-29 ec59b197d5 [dev.regabi] cmd/compile: rewrite to use linksym helpers [generated]
+ 2020-12-29 25c613c02d [dev.regabi] cmd/compile: add Linksym helpers
+ 2020-12-29 289da2b33e [dev.regabi] cmd/compile: move Node.Opt to Name
+ 2020-12-29 6acbae4fcc [dev.regabi] cmd/compile: address some ir TODOs
+ 2020-12-29 4629f6a51d [dev.regabi] cmd/compile: merge {Selector,CallPart,Method}Expr
+ 2020-12-29 e563715b30 [dev.regabi] cmd/compile: remove Sym.Importdef
+ 2020-12-29 3f370b75fb [dev.regabi] cmd/compile: cleanup //go:generate directives

Change-Id: Ibb254630924ac5873ebda6762ceb066f54a82649
This commit is contained in:
Matthew Dempsky 2021-01-05 10:10:56 -08:00
commit a8fe098a12
88 changed files with 3411 additions and 2776 deletions

View File

@ -0,0 +1,152 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package deadcode
import (
"go/constant"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
)
func Func(fn *ir.Func) {
stmts(&fn.Body)
if len(fn.Body) == 0 {
return
}
for _, n := range fn.Body {
if len(n.Init()) > 0 {
return
}
switch n.Op() {
case ir.OIF:
n := n.(*ir.IfStmt)
if !ir.IsConst(n.Cond, constant.Bool) || len(n.Body) > 0 || len(n.Else) > 0 {
return
}
case ir.OFOR:
n := n.(*ir.ForStmt)
if !ir.IsConst(n.Cond, constant.Bool) || ir.BoolVal(n.Cond) {
return
}
default:
return
}
}
fn.Body = []ir.Node{ir.NewBlockStmt(base.Pos, nil)}
}
func stmts(nn *ir.Nodes) {
var lastLabel = -1
for i, n := range *nn {
if n != nil && n.Op() == ir.OLABEL {
lastLabel = i
}
}
for i, n := range *nn {
// Cut is set to true when all nodes after i'th position
// should be removed.
// In other words, it marks whole slice "tail" as dead.
cut := false
if n == nil {
continue
}
if n.Op() == ir.OIF {
n := n.(*ir.IfStmt)
n.Cond = expr(n.Cond)
if ir.IsConst(n.Cond, constant.Bool) {
var body ir.Nodes
if ir.BoolVal(n.Cond) {
n.Else = ir.Nodes{}
body = n.Body
} else {
n.Body = ir.Nodes{}
body = n.Else
}
// If "then" or "else" branch ends with panic or return statement,
// it is safe to remove all statements after this node.
// isterminating is not used to avoid goto-related complications.
// We must be careful not to deadcode-remove labels, as they
// might be the target of a goto. See issue 28616.
if body := body; len(body) != 0 {
switch body[(len(body) - 1)].Op() {
case ir.ORETURN, ir.ORETJMP, ir.OPANIC:
if i > lastLabel {
cut = true
}
}
}
}
}
if len(n.Init()) != 0 {
stmts(n.(ir.InitNode).PtrInit())
}
switch n.Op() {
case ir.OBLOCK:
n := n.(*ir.BlockStmt)
stmts(&n.List)
case ir.OFOR:
n := n.(*ir.ForStmt)
stmts(&n.Body)
case ir.OIF:
n := n.(*ir.IfStmt)
stmts(&n.Body)
stmts(&n.Else)
case ir.ORANGE:
n := n.(*ir.RangeStmt)
stmts(&n.Body)
case ir.OSELECT:
n := n.(*ir.SelectStmt)
for _, cas := range n.Cases {
stmts(&cas.Body)
}
case ir.OSWITCH:
n := n.(*ir.SwitchStmt)
for _, cas := range n.Cases {
stmts(&cas.Body)
}
}
if cut {
*nn = (*nn)[:i+1]
break
}
}
}
func expr(n ir.Node) ir.Node {
// Perform dead-code elimination on short-circuited boolean
// expressions involving constants with the intent of
// producing a constant 'if' condition.
switch n.Op() {
case ir.OANDAND:
n := n.(*ir.LogicalExpr)
n.X = expr(n.X)
n.Y = expr(n.Y)
if ir.IsConst(n.X, constant.Bool) {
if ir.BoolVal(n.X) {
return n.Y // true && x => x
} else {
return n.X // false && x => false
}
}
case ir.OOROR:
n := n.(*ir.LogicalExpr)
n.X = expr(n.X)
n.Y = expr(n.Y)
if ir.IsConst(n.X, constant.Bool) {
if ir.BoolVal(n.X) {
return n.X // true || x => true
} else {
return n.Y // false || x => x
}
}
}
return n
}

View File

@ -26,7 +26,7 @@ func Info(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.Scope,
fn := curfn.(*ir.Func)
if fn.Nname != nil {
expect := fn.Sym().Linksym()
expect := fn.Linksym()
if fnsym.ABI() == obj.ABI0 {
expect = fn.Sym().LinksymABI0()
}
@ -76,7 +76,7 @@ func Info(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.Scope,
if n.Op() != ir.ONAME { // might be OTYPE or OLITERAL
continue
}
switch n.Class_ {
switch n.Class {
case ir.PAUTO:
if !n.Used() {
// Text == nil -> generating abstract function
@ -90,7 +90,7 @@ func Info(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.Scope,
continue
}
apdecls = append(apdecls, n)
fnsym.Func().RecordAutoType(reflectdata.TypeSym(n.Type()).Linksym())
fnsym.Func().RecordAutoType(reflectdata.TypeLinksym(n.Type()))
}
}
@ -127,24 +127,7 @@ func Info(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.Scope,
}
func declPos(decl *ir.Name) src.XPos {
if decl.Name().Defn != nil && (decl.Name().Captured() || decl.Name().Byval()) {
// It's not clear which position is correct for captured variables here:
// * decl.Pos is the wrong position for captured variables, in the inner
// function, but it is the right position in the outer function.
// * decl.Name.Defn is nil for captured variables that were arguments
// on the outer function, however the decl.Pos for those seems to be
// correct.
// * decl.Name.Defn is the "wrong" thing for variables declared in the
// header of a type switch, it's their position in the header, rather
// than the position of the case statement. In principle this is the
// right thing, but here we prefer the latter because it makes each
// instance of the header variable local to the lexical block of its
// case statement.
// This code is probably wrong for type switch variables that are also
// captured.
return decl.Name().Defn.Pos()
}
return decl.Pos()
return decl.Canonical().Pos()
}
// createDwarfVars process fn, returning a list of DWARF variables and the
@ -185,7 +168,7 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
if c == '.' || n.Type().IsUntyped() {
continue
}
if n.Class_ == ir.PPARAM && !ssagen.TypeOK(n.Type()) {
if n.Class == ir.PPARAM && !ssagen.TypeOK(n.Type()) {
// SSA-able args get location lists, and may move in and
// out of registers, so those are handled elsewhere.
// Autos and named output params seem to get handled
@ -200,10 +183,10 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
typename := dwarf.InfoPrefix + types.TypeSymName(n.Type())
decls = append(decls, n)
abbrev := dwarf.DW_ABRV_AUTO_LOCLIST
isReturnValue := (n.Class_ == ir.PPARAMOUT)
if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
isReturnValue := (n.Class == ir.PPARAMOUT)
if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
} else if n.Class_ == ir.PAUTOHEAP {
} else if n.Class == ir.PAUTOHEAP {
// If dcl in question has been promoted to heap, do a bit
// of extra work to recover original class (auto or param);
// see issue 30908. This insures that we get the proper
@ -211,17 +194,17 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
// misleading location for the param (we want pointer-to-heap
// and not stack).
// TODO(thanm): generate a better location expression
stackcopy := n.Name().Stackcopy
if stackcopy != nil && (stackcopy.Class_ == ir.PPARAM || stackcopy.Class_ == ir.PPARAMOUT) {
stackcopy := n.Stackcopy
if stackcopy != nil && (stackcopy.Class == ir.PPARAM || stackcopy.Class == ir.PPARAMOUT) {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
isReturnValue = (stackcopy.Class_ == ir.PPARAMOUT)
isReturnValue = (stackcopy.Class == ir.PPARAMOUT)
}
}
inlIndex := 0
if base.Flag.GenDwarfInl > 1 {
if n.Name().InlFormal() || n.Name().InlLocal() {
if n.InlFormal() || n.InlLocal() {
inlIndex = posInlIndex(n.Pos()) + 1
if n.Name().InlFormal() {
if n.InlFormal() {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
}
}
@ -240,7 +223,7 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
ChildIndex: -1,
})
// Record go type of to insure that it gets emitted by the linker.
fnsym.Func().RecordAutoType(reflectdata.TypeSym(n.Type()).Linksym())
fnsym.Func().RecordAutoType(reflectdata.TypeLinksym(n.Type()))
}
return decls, vars
@ -289,7 +272,7 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var {
var abbrev int
var offs int64
switch n.Class_ {
switch n.Class {
case ir.PAUTO:
offs = n.FrameOffset()
abbrev = dwarf.DW_ABRV_AUTO
@ -305,16 +288,16 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var {
abbrev = dwarf.DW_ABRV_PARAM
offs = n.FrameOffset() + base.Ctxt.FixedFrameSize()
default:
base.Fatalf("createSimpleVar unexpected class %v for node %v", n.Class_, n)
base.Fatalf("createSimpleVar unexpected class %v for node %v", n.Class, n)
}
typename := dwarf.InfoPrefix + types.TypeSymName(n.Type())
delete(fnsym.Func().Autot, reflectdata.TypeSym(n.Type()).Linksym())
delete(fnsym.Func().Autot, reflectdata.TypeLinksym(n.Type()))
inlIndex := 0
if base.Flag.GenDwarfInl > 1 {
if n.Name().InlFormal() || n.Name().InlLocal() {
if n.InlFormal() || n.InlLocal() {
inlIndex = posInlIndex(n.Pos()) + 1
if n.Name().InlFormal() {
if n.InlFormal() {
abbrev = dwarf.DW_ABRV_PARAM
}
}
@ -322,8 +305,8 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var {
declpos := base.Ctxt.InnermostPos(declPos(n))
return &dwarf.Var{
Name: n.Sym().Name,
IsReturnValue: n.Class_ == ir.PPARAMOUT,
IsInlFormal: n.Name().InlFormal(),
IsReturnValue: n.Class == ir.PPARAMOUT,
IsInlFormal: n.InlFormal(),
Abbrev: abbrev,
StackOffset: int32(offs),
Type: base.Ctxt.Lookup(typename),
@ -367,7 +350,7 @@ func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var
n := debug.Vars[varID]
var abbrev int
switch n.Class_ {
switch n.Class {
case ir.PAUTO:
abbrev = dwarf.DW_ABRV_AUTO_LOCLIST
case ir.PPARAM, ir.PPARAMOUT:
@ -376,14 +359,14 @@ func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var
return nil
}
gotype := reflectdata.TypeSym(n.Type()).Linksym()
gotype := reflectdata.TypeLinksym(n.Type())
delete(fnsym.Func().Autot, gotype)
typename := dwarf.InfoPrefix + gotype.Name[len("type."):]
inlIndex := 0
if base.Flag.GenDwarfInl > 1 {
if n.Name().InlFormal() || n.Name().InlLocal() {
if n.InlFormal() || n.InlLocal() {
inlIndex = posInlIndex(n.Pos()) + 1
if n.Name().InlFormal() {
if n.InlFormal() {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
}
}
@ -391,8 +374,8 @@ func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var
declpos := base.Ctxt.InnermostPos(n.Pos())
dvar := &dwarf.Var{
Name: n.Sym().Name,
IsReturnValue: n.Class_ == ir.PPARAMOUT,
IsInlFormal: n.Name().InlFormal(),
IsReturnValue: n.Class == ir.PPARAMOUT,
IsInlFormal: n.InlFormal(),
Abbrev: abbrev,
Type: base.Ctxt.Lookup(typename),
// The stack offset is used as a sorting key, so for decomposed

View File

@ -85,20 +85,29 @@ import (
// u[2], etc. However, we do record the implicit dereference involved
// in indexing a slice.
type escape struct {
// A batch holds escape analysis state that's shared across an entire
// batch of functions being analyzed at once.
type batch struct {
allLocs []*location
labels map[*types.Sym]labelState // known labels
curfn *ir.Func
heapLoc location
blankLoc location
}
// An escape holds state specific to a single function being analyzed
// within a batch.
type escape struct {
*batch
curfn *ir.Func // function being analyzed
labels map[*types.Sym]labelState // known labels
// loopDepth counts the current loop nesting depth within
// curfn. It increments within each "for" loop and at each
// label with a corresponding backwards "goto" (i.e.,
// unstructured loop).
loopDepth int
heapLoc location
blankLoc location
}
// An location represents an abstract location that stores a Go
@ -165,12 +174,16 @@ func Fmt(n ir.Node) string {
text = fmt.Sprintf("esc(%d)", n.Esc())
}
if e, ok := n.Opt().(*location); ok && e.loopDepth != 0 {
if text != "" {
text += " "
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
if loc, ok := n.Opt.(*location); ok && loc.loopDepth != 0 {
if text != "" {
text += " "
}
text += fmt.Sprintf("ld(%d)", loc.loopDepth)
}
text += fmt.Sprintf("ld(%d)", e.loopDepth)
}
return text
}
@ -183,23 +196,33 @@ func Batch(fns []*ir.Func, recursive bool) {
}
}
var e escape
e.heapLoc.escapes = true
var b batch
b.heapLoc.escapes = true
// Construct data-flow graph from syntax trees.
for _, fn := range fns {
e.initFunc(fn)
b.initFunc(fn)
}
for _, fn := range fns {
e.walkFunc(fn)
if !fn.IsHiddenClosure() {
b.walkFunc(fn)
}
}
e.curfn = nil
e.walkAll()
e.finish(fns)
b.walkAll()
b.finish(fns)
}
func (e *escape) initFunc(fn *ir.Func) {
func (b *batch) with(fn *ir.Func) *escape {
return &escape{
batch: b,
curfn: fn,
loopDepth: 1,
}
}
func (b *batch) initFunc(fn *ir.Func) {
e := b.with(fn)
if fn.Esc() != escFuncUnknown {
base.Fatalf("unexpected node: %v", fn)
}
@ -208,9 +231,6 @@ func (e *escape) initFunc(fn *ir.Func) {
ir.Dump("escAnalyze", fn)
}
e.curfn = fn
e.loopDepth = 1
// Allocate locations for local variables.
for _, dcl := range fn.Dcl {
if dcl.Op() == ir.ONAME {
@ -219,7 +239,8 @@ func (e *escape) initFunc(fn *ir.Func) {
}
}
func (e *escape) walkFunc(fn *ir.Func) {
func (b *batch) walkFunc(fn *ir.Func) {
e := b.with(fn)
fn.SetEsc(escFuncStarted)
// Identify labels that mark the head of an unstructured loop.
@ -242,8 +263,6 @@ func (e *escape) walkFunc(fn *ir.Func) {
}
})
e.curfn = fn
e.loopDepth = 1
e.block(fn.Body)
if len(e.labels) != 0 {
@ -349,54 +368,52 @@ func (e *escape) stmt(n ir.Node) {
case ir.ORANGE:
// for Key, Value = range X { Body }
n := n.(*ir.RangeStmt)
// X is evaluated outside the loop.
tmp := e.newLoc(nil, false)
e.expr(tmp.asHole(), n.X)
e.loopDepth++
e.addr(n.Key)
k := e.addr(n.Value)
ks := e.addrs([]ir.Node{n.Key, n.Value})
if n.X.Type().IsArray() {
e.flow(ks[1].note(n, "range"), tmp)
} else {
e.flow(ks[1].deref(n, "range-deref"), tmp)
}
e.block(n.Body)
e.loopDepth--
// X is evaluated outside the loop.
if n.X.Type().IsArray() {
k = k.note(n, "range")
} else {
k = k.deref(n, "range-deref")
}
e.expr(e.later(k), n.X)
case ir.OSWITCH:
n := n.(*ir.SwitchStmt)
typesw := n.Tag != nil && n.Tag.Op() == ir.OTYPESW
var ks []hole
for _, cas := range n.Cases { // cases
if typesw && n.Tag.(*ir.TypeSwitchGuard).Tag != nil {
cv := cas.Var
k := e.dcl(cv) // type switch variables have no ODCL.
if cv.Type().HasPointers() {
ks = append(ks, k.dotType(cv.Type(), cas, "switch case"))
if guard, ok := n.Tag.(*ir.TypeSwitchGuard); ok {
var ks []hole
if guard.Tag != nil {
for _, cas := range n.Cases {
cv := cas.Var
k := e.dcl(cv) // type switch variables have no ODCL.
if cv.Type().HasPointers() {
ks = append(ks, k.dotType(cv.Type(), cas, "switch case"))
}
}
}
e.discards(cas.List)
e.block(cas.Body)
}
if typesw {
e.expr(e.teeHole(ks...), n.Tag.(*ir.TypeSwitchGuard).X)
} else {
e.discard(n.Tag)
}
for _, cas := range n.Cases {
e.discards(cas.List)
e.block(cas.Body)
}
case ir.OSELECT:
n := n.(*ir.SelectStmt)
for _, cas := range n.Cases {
e.stmt(cas.Comm)
e.block(cas.Body)
}
case ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
e.assign(n.Lhs[0], n.Rhs[0], "selrecv", n)
e.assign(n.Lhs[1], nil, "selrecv", n)
case ir.ORECV:
// TODO(mdempsky): Consider e.discard(n.Left).
n := n.(*ir.UnaryExpr)
@ -408,28 +425,24 @@ func (e *escape) stmt(n ir.Node) {
case ir.OAS:
n := n.(*ir.AssignStmt)
e.assign(n.X, n.Y, "assign", n)
e.assignList([]ir.Node{n.X}, []ir.Node{n.Y}, "assign", n)
case ir.OASOP:
n := n.(*ir.AssignOpStmt)
e.assign(n.X, n.Y, "assign", n)
// TODO(mdempsky): Worry about OLSH/ORSH?
e.assignList([]ir.Node{n.X}, []ir.Node{n.Y}, "assign", n)
case ir.OAS2:
n := n.(*ir.AssignListStmt)
for i, nl := range n.Lhs {
e.assign(nl, n.Rhs[i], "assign-pair", n)
}
e.assignList(n.Lhs, n.Rhs, "assign-pair", n)
case ir.OAS2DOTTYPE: // v, ok = x.(type)
n := n.(*ir.AssignListStmt)
e.assign(n.Lhs[0], n.Rhs[0], "assign-pair-dot-type", n)
e.assign(n.Lhs[1], nil, "assign-pair-dot-type", n)
e.assignList(n.Lhs, n.Rhs, "assign-pair-dot-type", n)
case ir.OAS2MAPR: // v, ok = m[k]
n := n.(*ir.AssignListStmt)
e.assign(n.Lhs[0], n.Rhs[0], "assign-pair-mapr", n)
e.assign(n.Lhs[1], nil, "assign-pair-mapr", n)
case ir.OAS2RECV: // v, ok = <-ch
e.assignList(n.Lhs, n.Rhs, "assign-pair-mapr", n)
case ir.OAS2RECV, ir.OSELRECV2: // v, ok = <-ch
n := n.(*ir.AssignListStmt)
e.assign(n.Lhs[0], n.Rhs[0], "assign-pair-receive", n)
e.assign(n.Lhs[1], nil, "assign-pair-receive", n)
e.assignList(n.Lhs, n.Rhs, "assign-pair-receive", n)
case ir.OAS2FUNC:
n := n.(*ir.AssignListStmt)
@ -438,9 +451,11 @@ func (e *escape) stmt(n ir.Node) {
case ir.ORETURN:
n := n.(*ir.ReturnStmt)
results := e.curfn.Type().Results().FieldSlice()
for i, v := range n.Results {
e.assign(ir.AsNode(results[i].Nname), v, "return", n)
dsts := make([]ir.Node, len(results))
for i, res := range results {
dsts[i] = res.Nname.(*ir.Name)
}
e.assignList(dsts, n.Results, "return", n)
case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER, ir.OCLOSE, ir.OCOPY, ir.ODELETE, ir.OPANIC, ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
e.call(nil, n, nil)
case ir.OGO, ir.ODEFER:
@ -504,7 +519,7 @@ func (e *escape) exprSkipInit(k hole, n ir.Node) {
case ir.ONAME:
n := n.(*ir.Name)
if n.Class_ == ir.PFUNC || n.Class_ == ir.PEXTERN {
if n.Class == ir.PFUNC || n.Class == ir.PEXTERN {
return
}
e.flow(k, e.oldLoc(n))
@ -612,10 +627,10 @@ func (e *escape) exprSkipInit(k hole, n ir.Node) {
// Flow the receiver argument to both the closure and
// to the receiver parameter.
n := n.(*ir.CallPartExpr)
n := n.(*ir.SelectorExpr)
closureK := e.spill(k, n)
m := n.Method
m := n.Selection
// We don't know how the method value will be called
// later, so conservatively assume the result
@ -676,9 +691,13 @@ func (e *escape) exprSkipInit(k hole, n ir.Node) {
case ir.OCLOSURE:
n := n.(*ir.ClosureExpr)
k = e.spill(k, n)
if fn := n.Func; fn.IsHiddenClosure() {
e.walkFunc(fn)
}
// Link addresses of captured variables to closure.
k = e.spill(k, n)
for _, v := range n.Func.ClosureVars {
k := k
if !v.Byval() {
@ -772,13 +791,13 @@ func (e *escape) addr(n ir.Node) hole {
base.Fatalf("unexpected addr: %v", n)
case ir.ONAME:
n := n.(*ir.Name)
if n.Class_ == ir.PEXTERN {
if n.Class == ir.PEXTERN {
break
}
k = e.oldLoc(n).asHole()
case ir.ONAMEOFFSET:
n := n.(*ir.NameOffsetExpr)
e.addr(n.Name_)
k = e.addr(n.Name_)
case ir.ODOT:
n := n.(*ir.SelectorExpr)
k = e.addr(n.X)
@ -798,10 +817,6 @@ func (e *escape) addr(n ir.Node) hole {
e.assignHeap(n.Index, "key of map put", n)
}
if !n.Type().HasPointers() {
k = e.discardHole()
}
return k
}
@ -813,6 +828,16 @@ func (e *escape) addrs(l ir.Nodes) []hole {
return ks
}
func (e *escape) assignList(dsts, srcs []ir.Node, why string, where ir.Node) {
for i, dst := range dsts {
var src ir.Node
if i < len(srcs) {
src = srcs[i]
}
e.assign(dst, src, why, where)
}
}
// assign evaluates the assignment dst = src.
func (e *escape) assign(dst, src ir.Node, why string, where ir.Node) {
// Filter out some no-op assignments for escape analysis.
@ -874,7 +899,7 @@ func (e *escape) call(ks []hole, call, where ir.Node) {
switch call.Op() {
case ir.OCALLFUNC:
switch v := ir.StaticValue(call.X); {
case v.Op() == ir.ONAME && v.(*ir.Name).Class_ == ir.PFUNC:
case v.Op() == ir.ONAME && v.(*ir.Name).Class == ir.PFUNC:
fn = v.(*ir.Name)
case v.Op() == ir.OCLOSURE:
fn = v.(*ir.ClosureExpr).Func.Nname
@ -1097,7 +1122,7 @@ func (e *escape) teeHole(ks ...hole) hole {
return loc.asHole()
}
func (e *escape) dcl(n ir.Node) hole {
func (e *escape) dcl(n *ir.Name) hole {
loc := e.oldLoc(n)
loc.loopDepth = e.loopDepth
return loc.asHole()
@ -1121,19 +1146,6 @@ func (e *escape) later(k hole) hole {
return loc.asHole()
}
// canonicalNode returns the canonical *Node that n logically
// represents.
func canonicalNode(n ir.Node) ir.Node {
if n != nil && n.Op() == ir.ONAME && n.Name().IsClosureVar() {
n = n.Name().Defn
if n.Name().IsClosureVar() {
base.Fatalf("still closure var")
}
}
return n
}
func (e *escape) newLoc(n ir.Node, transient bool) *location {
if e.curfn == nil {
base.Fatalf("e.curfn isn't set")
@ -1142,7 +1154,9 @@ func (e *escape) newLoc(n ir.Node, transient bool) *location {
base.ErrorfAt(n.Pos(), "%v is incomplete (or unallocatable); stack allocation disallowed", n.Type())
}
n = canonicalNode(n)
if n != nil && n.Op() == ir.ONAME {
n = n.(*ir.Name).Canonical()
}
loc := &location{
n: n,
curfn: e.curfn,
@ -1151,15 +1165,17 @@ func (e *escape) newLoc(n ir.Node, transient bool) *location {
}
e.allLocs = append(e.allLocs, loc)
if n != nil {
if n.Op() == ir.ONAME && n.Name().Curfn != e.curfn {
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
base.Fatalf("curfn mismatch: %v != %v", n.Name().Curfn, e.curfn)
}
if n.Curfn != e.curfn {
base.Fatalf("curfn mismatch: %v != %v", n.Curfn, e.curfn)
}
if n.Opt() != nil {
base.Fatalf("%v already has a location", n)
if n.Opt != nil {
base.Fatalf("%v already has a location", n)
}
n.Opt = loc
}
n.SetOpt(loc)
if why := HeapAllocReason(n); why != "" {
e.flow(e.heapHole().addr(n, why), loc)
@ -1168,9 +1184,8 @@ func (e *escape) newLoc(n ir.Node, transient bool) *location {
return loc
}
func (e *escape) oldLoc(n ir.Node) *location {
n = canonicalNode(n)
return n.Opt().(*location)
func (b *batch) oldLoc(n *ir.Name) *location {
return n.Canonical().Opt.(*location)
}
func (l *location) asHole() hole {
@ -1210,7 +1225,7 @@ func (e *escape) discardHole() hole { return e.blankLoc.asHole() }
// walkAll computes the minimal dereferences between all pairs of
// locations.
func (e *escape) walkAll() {
func (b *batch) walkAll() {
// We use a work queue to keep track of locations that we need
// to visit, and repeatedly walk until we reach a fixed point.
//
@ -1220,7 +1235,7 @@ func (e *escape) walkAll() {
// happen at most once. So we take Θ(len(e.allLocs)) walks.
// LIFO queue, has enough room for e.allLocs and e.heapLoc.
todo := make([]*location, 0, len(e.allLocs)+1)
todo := make([]*location, 0, len(b.allLocs)+1)
enqueue := func(loc *location) {
if !loc.queued {
todo = append(todo, loc)
@ -1228,10 +1243,10 @@ func (e *escape) walkAll() {
}
}
for _, loc := range e.allLocs {
for _, loc := range b.allLocs {
enqueue(loc)
}
enqueue(&e.heapLoc)
enqueue(&b.heapLoc)
var walkgen uint32
for len(todo) > 0 {
@ -1240,13 +1255,13 @@ func (e *escape) walkAll() {
root.queued = false
walkgen++
e.walkOne(root, walkgen, enqueue)
b.walkOne(root, walkgen, enqueue)
}
}
// walkOne computes the minimal number of dereferences from root to
// all other locations.
func (e *escape) walkOne(root *location, walkgen uint32, enqueue func(*location)) {
func (b *batch) walkOne(root *location, walkgen uint32, enqueue func(*location)) {
// The data flow graph has negative edges (from addressing
// operations), so we use the Bellman-Ford algorithm. However,
// we don't have to worry about infinite negative cycles since
@ -1281,7 +1296,7 @@ func (e *escape) walkOne(root *location, walkgen uint32, enqueue func(*location)
}
}
if e.outlives(root, l) {
if b.outlives(root, l) {
// l's value flows to root. If l is a function
// parameter and root is the heap or a
// corresponding result parameter, then record
@ -1290,12 +1305,13 @@ func (e *escape) walkOne(root *location, walkgen uint32, enqueue func(*location)
if l.isName(ir.PPARAM) {
if (logopt.Enabled() || base.Flag.LowerM >= 2) && !l.escapes {
if base.Flag.LowerM >= 2 {
fmt.Printf("%s: parameter %v leaks to %s with derefs=%d:\n", base.FmtPos(l.n.Pos()), l.n, e.explainLoc(root), derefs)
fmt.Printf("%s: parameter %v leaks to %s with derefs=%d:\n", base.FmtPos(l.n.Pos()), l.n, b.explainLoc(root), derefs)
}
explanation := e.explainPath(root, l)
explanation := b.explainPath(root, l)
if logopt.Enabled() {
logopt.LogOpt(l.n.Pos(), "leak", "escape", ir.FuncName(e.curfn),
fmt.Sprintf("parameter %v leaks to %s with derefs=%d", l.n, e.explainLoc(root), derefs), explanation)
var e_curfn *ir.Func // TODO(mdempsky): Fix.
logopt.LogOpt(l.n.Pos(), "leak", "escape", ir.FuncName(e_curfn),
fmt.Sprintf("parameter %v leaks to %s with derefs=%d", l.n, b.explainLoc(root), derefs), explanation)
}
}
l.leakTo(root, derefs)
@ -1309,9 +1325,10 @@ func (e *escape) walkOne(root *location, walkgen uint32, enqueue func(*location)
if base.Flag.LowerM >= 2 {
fmt.Printf("%s: %v escapes to heap:\n", base.FmtPos(l.n.Pos()), l.n)
}
explanation := e.explainPath(root, l)
explanation := b.explainPath(root, l)
if logopt.Enabled() {
logopt.LogOpt(l.n.Pos(), "escape", "escape", ir.FuncName(e.curfn), fmt.Sprintf("%v escapes to heap", l.n), explanation)
var e_curfn *ir.Func // TODO(mdempsky): Fix.
logopt.LogOpt(l.n.Pos(), "escape", "escape", ir.FuncName(e_curfn), fmt.Sprintf("%v escapes to heap", l.n), explanation)
}
}
l.escapes = true
@ -1337,7 +1354,7 @@ func (e *escape) walkOne(root *location, walkgen uint32, enqueue func(*location)
}
// explainPath prints an explanation of how src flows to the walk root.
func (e *escape) explainPath(root, src *location) []*logopt.LoggedOpt {
func (b *batch) explainPath(root, src *location) []*logopt.LoggedOpt {
visited := make(map[*location]bool)
pos := base.FmtPos(src.n.Pos())
var explanation []*logopt.LoggedOpt
@ -1356,7 +1373,7 @@ func (e *escape) explainPath(root, src *location) []*logopt.LoggedOpt {
base.Fatalf("path inconsistency: %v != %v", edge.src, src)
}
explanation = e.explainFlow(pos, dst, src, edge.derefs, edge.notes, explanation)
explanation = b.explainFlow(pos, dst, src, edge.derefs, edge.notes, explanation)
if dst == root {
break
@ -1367,14 +1384,14 @@ func (e *escape) explainPath(root, src *location) []*logopt.LoggedOpt {
return explanation
}
func (e *escape) explainFlow(pos string, dst, srcloc *location, derefs int, notes *note, explanation []*logopt.LoggedOpt) []*logopt.LoggedOpt {
func (b *batch) explainFlow(pos string, dst, srcloc *location, derefs int, notes *note, explanation []*logopt.LoggedOpt) []*logopt.LoggedOpt {
ops := "&"
if derefs >= 0 {
ops = strings.Repeat("*", derefs)
}
print := base.Flag.LowerM >= 2
flow := fmt.Sprintf(" flow: %s = %s%v:", e.explainLoc(dst), ops, e.explainLoc(srcloc))
flow := fmt.Sprintf(" flow: %s = %s%v:", b.explainLoc(dst), ops, b.explainLoc(srcloc))
if print {
fmt.Printf("%s:%s\n", pos, flow)
}
@ -1385,7 +1402,8 @@ func (e *escape) explainFlow(pos string, dst, srcloc *location, derefs int, note
} else if srcloc != nil && srcloc.n != nil {
epos = srcloc.n.Pos()
}
explanation = append(explanation, logopt.NewLoggedOpt(epos, "escflow", "escape", ir.FuncName(e.curfn), flow))
var e_curfn *ir.Func // TODO(mdempsky): Fix.
explanation = append(explanation, logopt.NewLoggedOpt(epos, "escflow", "escape", ir.FuncName(e_curfn), flow))
}
for note := notes; note != nil; note = note.next {
@ -1393,15 +1411,16 @@ func (e *escape) explainFlow(pos string, dst, srcloc *location, derefs int, note
fmt.Printf("%s: from %v (%v) at %s\n", pos, note.where, note.why, base.FmtPos(note.where.Pos()))
}
if logopt.Enabled() {
explanation = append(explanation, logopt.NewLoggedOpt(note.where.Pos(), "escflow", "escape", ir.FuncName(e.curfn),
var e_curfn *ir.Func // TODO(mdempsky): Fix.
explanation = append(explanation, logopt.NewLoggedOpt(note.where.Pos(), "escflow", "escape", ir.FuncName(e_curfn),
fmt.Sprintf(" from %v (%v)", note.where, note.why)))
}
}
return explanation
}
func (e *escape) explainLoc(l *location) string {
if l == &e.heapLoc {
func (b *batch) explainLoc(l *location) string {
if l == &b.heapLoc {
return "{heap}"
}
if l.n == nil {
@ -1416,7 +1435,7 @@ func (e *escape) explainLoc(l *location) string {
// outlives reports whether values stored in l may survive beyond
// other's lifetime if stack allocated.
func (e *escape) outlives(l, other *location) bool {
func (b *batch) outlives(l, other *location) bool {
// The heap outlives everything.
if l.escapes {
return true
@ -1497,7 +1516,7 @@ func (l *location) leakTo(sink *location, derefs int) {
l.paramEsc.AddHeap(derefs)
}
func (e *escape) finish(fns []*ir.Func) {
func (b *batch) finish(fns []*ir.Func) {
// Record parameter tags for package export data.
for _, fn := range fns {
fn.SetEsc(escFuncTagged)
@ -1506,17 +1525,20 @@ func (e *escape) finish(fns []*ir.Func) {
for _, fs := range &types.RecvsParams {
for _, f := range fs(fn.Type()).Fields().Slice() {
narg++
f.Note = e.paramTag(fn, narg, f)
f.Note = b.paramTag(fn, narg, f)
}
}
}
for _, loc := range e.allLocs {
for _, loc := range b.allLocs {
n := loc.n
if n == nil {
continue
}
n.SetOpt(nil)
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
n.Opt = nil
}
// Update n.Esc based on escape analysis results.
@ -1526,7 +1548,8 @@ func (e *escape) finish(fns []*ir.Func) {
base.WarnfAt(n.Pos(), "%v escapes to heap", n)
}
if logopt.Enabled() {
logopt.LogOpt(n.Pos(), "escape", "escape", ir.FuncName(e.curfn))
var e_curfn *ir.Func // TODO(mdempsky): Fix.
logopt.LogOpt(n.Pos(), "escape", "escape", ir.FuncName(e_curfn))
}
}
n.SetEsc(ir.EscHeap)
@ -1542,7 +1565,7 @@ func (e *escape) finish(fns []*ir.Func) {
n := n.(*ir.ClosureExpr)
n.SetTransient(true)
case ir.OCALLPART:
n := n.(*ir.CallPartExpr)
n := n.(*ir.SelectorExpr)
n.SetTransient(true)
case ir.OSLICELIT:
n := n.(*ir.CompLitExpr)
@ -1554,7 +1577,7 @@ func (e *escape) finish(fns []*ir.Func) {
}
func (l *location) isName(c ir.Class) bool {
return l.n != nil && l.n.Op() == ir.ONAME && l.n.(*ir.Name).Class_ == c
return l.n != nil && l.n.Op() == ir.ONAME && l.n.(*ir.Name).Class == c
}
const numEscResults = 7
@ -1847,7 +1870,7 @@ func HeapAllocReason(n ir.Node) string {
// Parameters are always passed via the stack.
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT {
return ""
}
}
@ -1863,7 +1886,7 @@ func HeapAllocReason(n ir.Node) string {
if n.Op() == ir.OCLOSURE && typecheck.ClosureType(n.(*ir.ClosureExpr)).Size() >= ir.MaxImplicitStackVarSize {
return "too large for stack"
}
if n.Op() == ir.OCALLPART && typecheck.PartialCallType(n.(*ir.CallPartExpr)).Size() >= ir.MaxImplicitStackVarSize {
if n.Op() == ir.OCALLPART && typecheck.PartialCallType(n.(*ir.SelectorExpr)).Size() >= ir.MaxImplicitStackVarSize {
return "too large for stack"
}
@ -1904,7 +1927,7 @@ func addrescapes(n ir.Node) {
// if this is a tmpname (PAUTO), it was tagged by tmpname as not escaping.
// on PPARAM it means something different.
if n.Class_ == ir.PAUTO && n.Esc() == ir.EscNever {
if n.Class == ir.PAUTO && n.Esc() == ir.EscNever {
break
}
@ -1914,7 +1937,7 @@ func addrescapes(n ir.Node) {
break
}
if n.Class_ != ir.PPARAM && n.Class_ != ir.PPARAMOUT && n.Class_ != ir.PAUTO {
if n.Class != ir.PPARAM && n.Class != ir.PPARAMOUT && n.Class != ir.PAUTO {
break
}
@ -1968,7 +1991,7 @@ func moveToHeap(n *ir.Name) {
if base.Flag.CompilingRuntime {
base.Errorf("%v escapes to heap, not allowed in runtime", n)
}
if n.Class_ == ir.PAUTOHEAP {
if n.Class == ir.PAUTOHEAP {
ir.Dump("n", n)
base.Fatalf("double move to heap")
}
@ -1987,7 +2010,7 @@ func moveToHeap(n *ir.Name) {
// Parameters have a local stack copy used at function start/end
// in addition to the copy in the heap that may live longer than
// the function.
if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT {
if n.FrameOffset() == types.BADWIDTH {
base.Fatalf("addrescapes before param assignment")
}
@ -1999,9 +2022,9 @@ func moveToHeap(n *ir.Name) {
stackcopy := typecheck.NewName(n.Sym())
stackcopy.SetType(n.Type())
stackcopy.SetFrameOffset(n.FrameOffset())
stackcopy.Class_ = n.Class_
stackcopy.Class = n.Class
stackcopy.Heapaddr = heapaddr
if n.Class_ == ir.PPARAMOUT {
if n.Class == ir.PPARAMOUT {
// Make sure the pointer to the heap copy is kept live throughout the function.
// The function could panic at any point, and then a defer could recover.
// Thus, we need the pointer to the heap copy always available so the
@ -2023,7 +2046,7 @@ func moveToHeap(n *ir.Name) {
}
// Parameters are before locals, so can stop early.
// This limits the search even in functions with many local variables.
if d.Class_ == ir.PAUTO {
if d.Class == ir.PAUTO {
break
}
}
@ -2034,7 +2057,7 @@ func moveToHeap(n *ir.Name) {
}
// Modify n in place so that uses of n now mean indirection of the heapaddr.
n.Class_ = ir.PAUTOHEAP
n.Class = ir.PAUTOHEAP
n.SetFrameOffset(0)
n.Heapaddr = heapaddr
n.SetEsc(ir.EscHeap)
@ -2052,7 +2075,7 @@ const UnsafeUintptrNote = "unsafe-uintptr"
// marked go:uintptrescapes.
const UintptrEscapesNote = "uintptr-escapes"
func (e *escape) paramTag(fn *ir.Func, narg int, f *types.Field) string {
func (b *batch) paramTag(fn *ir.Func, narg int, f *types.Field) string {
name := func() string {
if f.Sym != nil {
return f.Sym.Name
@ -2122,8 +2145,8 @@ func (e *escape) paramTag(fn *ir.Func, narg int, f *types.Field) string {
return esc.Encode()
}
n := ir.AsNode(f.Nname)
loc := e.oldLoc(n)
n := f.Nname.(*ir.Name)
loc := b.oldLoc(n)
esc := loc.paramEsc
esc.Optimize()

View File

@ -7,7 +7,6 @@ package gc
import (
"bufio"
"cmd/compile/internal/base"
"cmd/compile/internal/reflectdata"
"cmd/compile/internal/ssagen"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
@ -39,13 +38,7 @@ func TestMain(m *testing.M) {
base.Ctxt.Bso = bufio.NewWriter(os.Stdout)
types.PtrSize = ssagen.Arch.LinkArch.PtrSize
types.RegSize = ssagen.Arch.LinkArch.RegSize
types.TypeLinkSym = func(t *types.Type) *obj.LSym {
return reflectdata.TypeSym(t).Linksym()
}
types.TypeLinkSym = func(t *types.Type) *obj.LSym {
return reflectdata.TypeSym(t).Linksym()
}
typecheck.Init()
typecheck.InitUniverse()
os.Exit(m.Run())
}

View File

@ -21,7 +21,7 @@ import (
func mkParamResultField(t *types.Type, s *types.Sym, which ir.Class) *types.Field {
field := types.NewField(src.NoXPos, s, t)
n := typecheck.NewName(s)
n.Class_ = which
n.Class = which
field.Nname = n
n.SetType(t)
return field

View File

@ -83,7 +83,7 @@ func compile(fn *ir.Func) {
// because symbols must be allocated before the parallel
// phase of the compiler.
for _, n := range fn.Dcl {
switch n.Class_ {
switch n.Class {
case ir.PPARAM, ir.PPARAMOUT, ir.PAUTO:
if liveness.ShouldTrack(n) && n.Addrtaken() {
reflectdata.WriteType(n.Type())
@ -174,5 +174,5 @@ func isInlinableButNotInlined(fn *ir.Func) bool {
if fn.Sym() == nil {
return true
}
return !fn.Sym().Linksym().WasInlined()
return !fn.Linksym().WasInlined()
}

View File

@ -83,7 +83,7 @@ type exporter struct {
func (p *exporter) markObject(n ir.Node) {
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
if n.Class_ == ir.PFUNC {
if n.Class == ir.PFUNC {
inline.Inline_Flood(n, typecheck.Export)
}
}

View File

@ -2,14 +2,13 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:generate go run mkbuiltin.go
package gc
import (
"bufio"
"bytes"
"cmd/compile/internal/base"
"cmd/compile/internal/deadcode"
"cmd/compile/internal/devirtualize"
"cmd/compile/internal/dwarfgen"
"cmd/compile/internal/escape"
@ -192,9 +191,6 @@ func Main(archInit func(*ssagen.ArchInfo)) {
types.PtrSize = ssagen.Arch.LinkArch.PtrSize
types.RegSize = ssagen.Arch.LinkArch.RegSize
types.MaxWidth = ssagen.Arch.MAXWIDTH
types.TypeLinkSym = func(t *types.Type) *obj.LSym {
return reflectdata.TypeSym(t).Linksym()
}
typecheck.Target = new(ir.Package)
@ -204,37 +200,60 @@ func Main(archInit func(*ssagen.ArchInfo)) {
base.AutogeneratedPos = makePos(src.NewFileBase("<autogenerated>", "<autogenerated>"), 1, 0)
types.TypeLinkSym = func(t *types.Type) *obj.LSym {
return reflectdata.TypeSym(t).Linksym()
}
typecheck.Init()
typecheck.InitUniverse()
// Parse input.
base.Timer.Start("fe", "parse")
lines := noder.ParseFiles(flag.Args())
ssagen.CgoSymABIs()
base.Timer.Stop()
base.Timer.AddEvent(int64(lines), "lines")
if base.Flag.G != 0 && base.Flag.G < 3 {
// can only parse generic code for now
base.ExitIfErrors()
return
}
// Parse and typecheck input.
noder.LoadPackage(flag.Args())
dwarfgen.RecordPackageName()
// Typecheck.
typecheck.Package()
// With all user code typechecked, it's now safe to verify unused dot imports.
noder.CheckDotImports()
base.ExitIfErrors()
ssagen.CgoSymABIs()
// Build init task.
if initTask := pkginit.Task(); initTask != nil {
typecheck.Export(initTask)
}
// Eliminate some obviously dead code.
// Must happen after typechecking.
for _, n := range typecheck.Target.Decls {
if n.Op() == ir.ODCLFUNC {
deadcode.Func(n.(*ir.Func))
}
}
// Compute Addrtaken for names.
// We need to wait until typechecking is done so that when we see &x[i]
// we know that x has its address taken if x is an array, but not if x is a slice.
// We compute Addrtaken in bulk here.
// After this phase, we maintain Addrtaken incrementally.
if typecheck.DirtyAddrtaken {
typecheck.ComputeAddrtaken(typecheck.Target.Decls)
typecheck.DirtyAddrtaken = false
}
typecheck.IncrementalAddrtaken = true
// Decide how to capture closed variables.
// This needs to run before escape analysis,
// because variables captured by value do not escape.
base.Timer.Start("fe", "capturevars")
for _, n := range typecheck.Target.Decls {
if n.Op() == ir.ODCLFUNC {
n := n.(*ir.Func)
if n.OClosure != nil {
ir.CurFunc = n
typecheck.CaptureVars(n)
}
}
}
typecheck.CaptureVarsComplete = true
ir.CurFunc = nil
if base.Debug.TypecheckInl != 0 {
// Typecheck imported function bodies if Debug.l > 1,
// otherwise lazily when used or re-exported.
typecheck.AllImportedBodies()
}
// Inlining
base.Timer.Start("fe", "inlining")
if base.Flag.LowerL != 0 {
@ -285,6 +304,7 @@ func Main(archInit func(*ssagen.ArchInfo)) {
// Prepare for SSA compilation.
// This must be before peekitabs, because peekitabs
// can trigger function compilation.
typecheck.InitRuntime()
ssagen.InitConfig()
// Just before compilation, compile itabs found on

View File

@ -148,8 +148,8 @@ func dumpdata() {
dumpglobls(typecheck.Target.Externs[numExterns:])
if reflectdata.ZeroSize > 0 {
zero := ir.Pkgs.Map.Lookup("zero")
objw.Global(zero.Linksym(), int32(reflectdata.ZeroSize), obj.DUPOK|obj.RODATA)
zero := ir.Pkgs.Map.Lookup("zero").Linksym()
objw.Global(zero, int32(reflectdata.ZeroSize), obj.DUPOK|obj.RODATA)
}
addGCLocals()
@ -188,7 +188,7 @@ func dumpGlobal(n *ir.Name) {
if n.Type() == nil {
base.Fatalf("external %v nil type\n", n)
}
if n.Class_ == ir.PFUNC {
if n.Class == ir.PFUNC {
return
}
if n.Sym().Pkg != types.LocalPkg {
@ -260,18 +260,18 @@ func addGCLocals() {
}
}
func ggloblnod(nam ir.Node) {
s := nam.Sym().Linksym()
s.Gotype = reflectdata.TypeSym(nam.Type()).Linksym()
func ggloblnod(nam *ir.Name) {
s := nam.Linksym()
s.Gotype = reflectdata.TypeLinksym(nam.Type())
flags := 0
if nam.Name().Readonly() {
if nam.Readonly() {
flags = obj.RODATA
}
if nam.Type() != nil && !nam.Type().HasPointers() {
flags |= obj.NOPTR
}
base.Ctxt.Globl(s, nam.Type().Width, flags)
if nam.Name().LibfuzzerExtraCounter() {
if nam.LibfuzzerExtraCounter() {
s.Type = objabi.SLIBFUZZER_EXTRA_COUNTER
}
if nam.Sym().Linkname != "" {

View File

@ -199,8 +199,8 @@ func Inline_Flood(n *ir.Name, exportsym func(*ir.Name)) {
if n == nil {
return
}
if n.Op() != ir.ONAME || n.Class_ != ir.PFUNC {
base.Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op(), n.Class_)
if n.Op() != ir.ONAME || n.Class != ir.PFUNC {
base.Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op(), n.Class)
}
fn := n.Func
if fn == nil {
@ -227,7 +227,7 @@ func Inline_Flood(n *ir.Name, exportsym func(*ir.Name)) {
case ir.ONAME:
n := n.(*ir.Name)
switch n.Class_ {
switch n.Class {
case ir.PFUNC:
Inline_Flood(n, exportsym)
exportsym(n)
@ -265,7 +265,7 @@ var errBudget = errors.New("too expensive")
func (v *hairyVisitor) tooHairy(fn *ir.Func) bool {
v.do = v.doNode // cache closure
err := ir.DoChildren(fn, v.do)
err := errChildren(fn, v.do)
if err != nil {
v.reason = err.Error()
return true
@ -292,7 +292,7 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
// runtime.throw is a "cheap call" like panic in normal code.
if n.X.Op() == ir.ONAME {
name := n.X.(*ir.Name)
if name.Class_ == ir.PFUNC && types.IsRuntimePkg(name.Sym().Pkg) {
if name.Class == ir.PFUNC && types.IsRuntimePkg(name.Sym().Pkg) {
fn := name.Sym().Name
if fn == "getcallerpc" || fn == "getcallersp" {
return errors.New("call to " + fn)
@ -393,13 +393,13 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
if ir.IsConst(n.Cond, constant.Bool) {
// This if and the condition cost nothing.
// TODO(rsc): It seems strange that we visit the dead branch.
if err := ir.DoList(n.Init(), v.do); err != nil {
if err := errList(n.Init(), v.do); err != nil {
return err
}
if err := ir.DoList(n.Body, v.do); err != nil {
if err := errList(n.Body, v.do); err != nil {
return err
}
if err := ir.DoList(n.Else, v.do); err != nil {
if err := errList(n.Else, v.do); err != nil {
return err
}
return nil
@ -407,7 +407,7 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
case ir.ONAME:
n := n.(*ir.Name)
if n.Class_ == ir.PAUTO {
if n.Class == ir.PAUTO {
v.usedLocals[n] = true
}
@ -419,6 +419,9 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
case ir.OCALLPART, ir.OSLICELIT:
v.budget-- // Hack for toolstash -cmp.
case ir.OMETHEXPR:
v.budget++ // Hack for toolstash -cmp.
}
v.budget--
@ -428,7 +431,7 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
return errBudget
}
return ir.DoChildren(n, v.do)
return errChildren(n, v.do)
}
func isBigFunc(fn *ir.Func) bool {
@ -541,7 +544,7 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
if as := n; as.Op() == ir.OAS2FUNC {
as := as.(*ir.AssignListStmt)
if as.Rhs[0].Op() == ir.OINLCALL {
as.Rhs.Set(inlconv2list(as.Rhs[0].(*ir.InlinedCallExpr)))
as.Rhs = inlconv2list(as.Rhs[0].(*ir.InlinedCallExpr))
as.SetOp(ir.OAS2)
as.SetTypecheck(0)
n = typecheck.Stmt(as)
@ -613,18 +616,18 @@ func inlCallee(fn ir.Node) *ir.Func {
fn = ir.StaticValue(fn)
switch fn.Op() {
case ir.OMETHEXPR:
fn := fn.(*ir.MethodExpr)
fn := fn.(*ir.SelectorExpr)
n := ir.MethodExprName(fn)
// Check that receiver type matches fn.Left.
// Check that receiver type matches fn.X.
// TODO(mdempsky): Handle implicit dereference
// of pointer receiver argument?
if n == nil || !types.Identical(n.Type().Recv().Type, fn.T) {
if n == nil || !types.Identical(n.Type().Recv().Type, fn.X.Type()) {
return nil
}
return n.Func
case ir.ONAME:
fn := fn.(*ir.Name)
if fn.Class_ == ir.PFUNC {
if fn.Class == ir.PFUNC {
return fn.Func
}
case ir.OCLOSURE:
@ -636,13 +639,15 @@ func inlCallee(fn ir.Node) *ir.Func {
return nil
}
func inlParam(t *types.Field, as ir.Node, inlvars map[*ir.Name]ir.Node) ir.Node {
n := ir.AsNode(t.Nname)
if n == nil || ir.IsBlank(n) {
func inlParam(t *types.Field, as ir.InitNode, inlvars map[*ir.Name]*ir.Name) ir.Node {
if t.Nname == nil {
return ir.BlankNode
}
inlvar := inlvars[n.(*ir.Name)]
n := t.Nname.(*ir.Name)
if ir.IsBlank(n) {
return ir.BlankNode
}
inlvar := inlvars[n]
if inlvar == nil {
base.Fatalf("missing inlvar for %v", n)
}
@ -736,7 +741,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
callee := n.X
for callee.Op() == ir.OCONVNOP {
conv := callee.(*ir.ConvExpr)
ninit.Append(conv.PtrInit().Take()...)
ninit.Append(ir.TakeInit(conv)...)
callee = conv.X
}
if callee.Op() != ir.ONAME && callee.Op() != ir.OCLOSURE && callee.Op() != ir.OMETHEXPR {
@ -745,52 +750,16 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
}
// Make temp names to use instead of the originals.
inlvars := make(map[*ir.Name]ir.Node)
inlvars := make(map[*ir.Name]*ir.Name)
// record formals/locals for later post-processing
var inlfvars []ir.Node
// Handle captured variables when inlining closures.
if c := fn.OClosure; c != nil {
for _, v := range fn.ClosureVars {
if v.Op() == ir.OXXX {
continue
}
o := v.Outer
// make sure the outer param matches the inlining location
// NB: if we enabled inlining of functions containing OCLOSURE or refined
// the reassigned check via some sort of copy propagation this would most
// likely need to be changed to a loop to walk up to the correct Param
if o == nil || o.Curfn != ir.CurFunc {
base.Fatalf("%v: unresolvable capture %v %v\n", ir.Line(n), fn, v)
}
if v.Byval() {
iv := typecheck.Expr(inlvar(v))
ninit.Append(ir.NewDecl(base.Pos, ir.ODCL, iv))
ninit.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, iv, o)))
inlvars[v] = iv
} else {
addr := typecheck.NewName(typecheck.Lookup("&" + v.Sym().Name))
addr.SetType(types.NewPtr(v.Type()))
ia := typecheck.Expr(inlvar(addr))
ninit.Append(ir.NewDecl(base.Pos, ir.ODCL, ia))
ninit.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, ia, typecheck.NodAddr(o))))
inlvars[addr] = ia
// When capturing by reference, all occurrence of the captured var
// must be substituted with dereference of the temporary address
inlvars[v] = typecheck.Expr(ir.NewStarExpr(base.Pos, ia))
}
}
}
var inlfvars []*ir.Name
for _, ln := range fn.Inl.Dcl {
if ln.Op() != ir.ONAME {
continue
}
if ln.Class_ == ir.PPARAMOUT { // return values handled below.
if ln.Class == ir.PPARAMOUT { // return values handled below.
continue
}
if ir.IsParamStackCopy(ln) { // ignore the on-stack copy of a parameter that moved to the heap
@ -800,10 +769,10 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
// nothing should have moved to the heap yet.
base.Fatalf("impossible: %v", ln)
}
inlf := typecheck.Expr(inlvar(ln))
inlf := typecheck.Expr(inlvar(ln)).(*ir.Name)
inlvars[ln] = inlf
if base.Flag.GenDwarfInl > 0 {
if ln.Class_ == ir.PPARAM {
if ln.Class == ir.PPARAM {
inlf.Name().SetInlFormal(true)
} else {
inlf.Name().SetInlLocal(true)
@ -828,11 +797,11 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
// temporaries for return values.
var retvars []ir.Node
for i, t := range fn.Type().Results().Fields().Slice() {
var m ir.Node
if n := ir.AsNode(t.Nname); n != nil && !ir.IsBlank(n) && !strings.HasPrefix(n.Sym().Name, "~r") {
n := n.(*ir.Name)
var m *ir.Name
if nn := t.Nname; nn != nil && !ir.IsBlank(nn.(*ir.Name)) && !strings.HasPrefix(nn.Sym().Name, "~r") {
n := nn.(*ir.Name)
m = inlvar(n)
m = typecheck.Expr(m)
m = typecheck.Expr(m).(*ir.Name)
inlvars[n] = m
delayretvars = false // found a named result parameter
} else {
@ -897,8 +866,8 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
vas.Y = typecheck.NodNil()
vas.Y.SetType(param.Type)
} else {
lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(param.Type).(ir.Ntype), nil)
lit.List.Set(varargs)
lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(param.Type), nil)
lit.List = varargs
vas.Y = lit
}
}
@ -914,7 +883,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
if !delayretvars {
// Zero the return parameters.
for _, n := range retvars {
ninit.Append(ir.NewDecl(base.Pos, ir.ODCL, n))
ninit.Append(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
ras := ir.NewAssignStmt(base.Pos, n, nil)
ninit.Append(typecheck.Stmt(ras))
}
@ -929,7 +898,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
parent = b.InliningIndex()
}
sym := fn.Sym().Linksym()
sym := fn.Linksym()
newIndex := base.Ctxt.InlTree.Add(parent, n.Pos(), sym)
// Add an inline mark just before the inlined body.
@ -975,9 +944,9 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
//dumplist("ninit post", ninit);
call := ir.NewInlinedCallExpr(base.Pos, nil, nil)
call.PtrInit().Set(ninit)
call.Body.Set(body)
call.ReturnVars.Set(retvars)
*call.PtrInit() = ninit
call.Body = body
call.ReturnVars = retvars
call.SetType(n.Type())
call.SetTypecheck(1)
@ -999,27 +968,27 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
// Every time we expand a function we generate a new set of tmpnames,
// PAUTO's in the calling functions, and link them off of the
// PPARAM's, PAUTOS and PPARAMOUTs of the called function.
func inlvar(var_ ir.Node) ir.Node {
func inlvar(var_ *ir.Name) *ir.Name {
if base.Flag.LowerM > 3 {
fmt.Printf("inlvar %+v\n", var_)
}
n := typecheck.NewName(var_.Sym())
n.SetType(var_.Type())
n.Class_ = ir.PAUTO
n.Class = ir.PAUTO
n.SetUsed(true)
n.Curfn = ir.CurFunc // the calling function, not the called one
n.SetAddrtaken(var_.Name().Addrtaken())
n.SetAddrtaken(var_.Addrtaken())
ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n)
return n
}
// Synthesize a variable to store the inlined function's results in.
func retvar(t *types.Field, i int) ir.Node {
func retvar(t *types.Field, i int) *ir.Name {
n := typecheck.NewName(typecheck.LookupNum("~R", i))
n.SetType(t.Type)
n.Class_ = ir.PAUTO
n.Class = ir.PAUTO
n.SetUsed(true)
n.Curfn = ir.CurFunc // the calling function, not the called one
ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n)
@ -1031,7 +1000,7 @@ func retvar(t *types.Field, i int) ir.Node {
func argvar(t *types.Type, i int) ir.Node {
n := typecheck.NewName(typecheck.LookupNum("~arg", i))
n.SetType(t.Elem())
n.Class_ = ir.PAUTO
n.Class = ir.PAUTO
n.SetUsed(true)
n.Curfn = ir.CurFunc // the calling function, not the called one
ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n)
@ -1051,7 +1020,7 @@ type inlsubst struct {
// "return" statement.
delayretvars bool
inlvars map[*ir.Name]ir.Node
inlvars map[*ir.Name]*ir.Name
// bases maps from original PosBase to PosBase with an extra
// inlined call frame.
@ -1085,6 +1054,25 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
switch n.Op() {
case ir.ONAME:
n := n.(*ir.Name)
// Handle captured variables when inlining closures.
if n.IsClosureVar() {
o := n.Outer
// make sure the outer param matches the inlining location
// NB: if we enabled inlining of functions containing OCLOSURE or refined
// the reassigned check via some sort of copy propagation this would most
// likely need to be changed to a loop to walk up to the correct Param
if o == nil || o.Curfn != ir.CurFunc {
base.Fatalf("%v: unresolvable capture %v\n", ir.Line(n), n)
}
if base.Flag.LowerM > 2 {
fmt.Printf("substituting captured name %+v -> %+v\n", n, o)
}
return o
}
if inlvar := subst.inlvars[n]; inlvar != nil { // These will be set during inlnode
if base.Flag.LowerM > 2 {
fmt.Printf("substituting name %+v -> %+v\n", n, inlvar)
@ -1098,7 +1086,7 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
return n
case ir.OMETHEXPR:
n := n.(*ir.MethodExpr)
n := n.(*ir.SelectorExpr)
return n
case ir.OLITERAL, ir.ONIL, ir.OTYPE:
@ -1108,15 +1096,6 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
if n.Sym() != nil {
return n
}
if n, ok := n.(*ir.Name); ok && n.Op() == ir.OLITERAL {
// This happens for unnamed OLITERAL.
// which should really not be a *Name, but for now it is.
// ir.Copy(n) is not allowed generally and would panic below,
// but it's OK in this situation.
n = n.CloneName()
n.SetPos(subst.updatedPos(n.Pos()))
return n
}
case ir.ORETURN:
// Since we don't handle bodies with closures,
@ -1132,11 +1111,11 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
for _, n := range subst.retvars {
as.Lhs.Append(n)
}
as.Rhs.Set(subst.list(n.Results))
as.Rhs = subst.list(n.Results)
if subst.delayretvars {
for _, n := range as.Lhs {
as.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n))
as.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
n.Name().Defn = as
}
}
@ -1151,7 +1130,7 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
n := n.(*ir.BranchStmt)
m := ir.Copy(n).(*ir.BranchStmt)
m.SetPos(subst.updatedPos(m.Pos()))
m.PtrInit().Set(nil)
*m.PtrInit() = nil
p := fmt.Sprintf("%s·%d", n.Label.Name, inlgen)
m.Label = typecheck.Lookup(p)
return m
@ -1160,7 +1139,7 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
n := n.(*ir.LabelStmt)
m := ir.Copy(n).(*ir.LabelStmt)
m.SetPos(subst.updatedPos(m.Pos()))
m.PtrInit().Set(nil)
*m.PtrInit() = nil
p := fmt.Sprintf("%s·%d", n.Label.Name, inlgen)
m.Label = typecheck.Lookup(p)
return m
@ -1191,7 +1170,7 @@ func (subst *inlsubst) updatedPos(xpos src.XPos) src.XPos {
func pruneUnusedAutos(ll []*ir.Name, vis *hairyVisitor) []*ir.Name {
s := make([]*ir.Name, 0, len(ll))
for _, n := range ll {
if n.Class_ == ir.PAUTO {
if n.Class == ir.PAUTO {
if _, found := vis.usedLocals[n]; !found {
continue
}
@ -1211,3 +1190,22 @@ func numNonClosures(list []*ir.Func) int {
}
return count
}
// TODO(mdempsky): Update inl.go to use ir.DoChildren directly.
func errChildren(n ir.Node, do func(ir.Node) error) (err error) {
ir.DoChildren(n, func(x ir.Node) bool {
err = do(x)
return err != nil
})
return
}
func errList(list []ir.Node, do func(ir.Node) error) error {
for _, x := range list {
if x != nil {
if err := do(x); err != nil {
return err
}
}
}
return nil
}

View File

@ -1,4 +1,4 @@
// Code generated by "stringer -type=Class"; DO NOT EDIT.
// Code generated by "stringer -type=Class name.go"; DO NOT EDIT.
package ir

View File

@ -25,6 +25,14 @@ type OrigNode interface {
SetOrig(Node)
}
// origNode may be embedded into a Node to make it implement OrigNode.
type origNode struct {
orig Node `mknode:"-"`
}
func (n *origNode) Orig() Node { return n.orig }
func (n *origNode) SetOrig(o Node) { n.orig = o }
// Orig returns the “original” node for n.
// If n implements OrigNode, Orig returns n.Orig().
// Otherwise Orig returns n itself.

View File

@ -14,27 +14,6 @@ import (
"go/token"
)
func maybeDo(x Node, err error, do func(Node) error) error {
if x != nil && err == nil {
err = do(x)
}
return err
}
func maybeDoList(x Nodes, err error, do func(Node) error) error {
if err == nil {
err = DoList(x, do)
}
return err
}
func maybeEdit(x Node, edit func(Node) Node) Node {
if x == nil {
return x
}
return edit(x)
}
// An Expr is a Node that can appear as an expression.
type Expr interface {
Node
@ -48,8 +27,7 @@ type Expr interface {
type miniExpr struct {
miniNode
typ *types.Type
init Nodes // TODO(rsc): Don't require every Node to have an init
opt interface{} // TODO(rsc): Don't require every Node to have an opt?
init Nodes // TODO(rsc): Don't require every Node to have an init
flags bitset8
}
@ -59,14 +37,13 @@ const (
miniExprTransient
miniExprBounded
miniExprImplicit // for use by implementations; not supported by every Expr
miniExprCheckPtr
)
func (*miniExpr) isExpr() {}
func (n *miniExpr) Type() *types.Type { return n.typ }
func (n *miniExpr) SetType(x *types.Type) { n.typ = x }
func (n *miniExpr) Opt() interface{} { return n.opt }
func (n *miniExpr) SetOpt(x interface{}) { n.opt = x }
func (n *miniExpr) HasCall() bool { return n.flags&miniExprHasCall != 0 }
func (n *miniExpr) SetHasCall(b bool) { n.flags.set(miniExprHasCall, b) }
func (n *miniExpr) NonNil() bool { return n.flags&miniExprNonNil != 0 }
@ -79,16 +56,6 @@ func (n *miniExpr) Init() Nodes { return n.init }
func (n *miniExpr) PtrInit() *Nodes { return &n.init }
func (n *miniExpr) SetInit(x Nodes) { n.init = x }
func toNtype(x Node) Ntype {
if x == nil {
return nil
}
if _, ok := x.(Ntype); !ok {
Dump("not Ntype", x)
}
return x.(Ntype)
}
// An AddStringExpr is a string concatenation Expr[0] + Exprs[1] + ... + Expr[len(Expr)-1].
type AddStringExpr struct {
miniExpr
@ -100,7 +67,7 @@ func NewAddStringExpr(pos src.XPos, list []Node) *AddStringExpr {
n := &AddStringExpr{}
n.pos = pos
n.op = OADDSTR
n.List.Set(list)
n.List = list
return n
}
@ -108,8 +75,8 @@ func NewAddStringExpr(pos src.XPos, list []Node) *AddStringExpr {
// It may end up being a normal address-of or an allocation of a composite literal.
type AddrExpr struct {
miniExpr
X Node
Alloc Node // preallocated storage if any
X Node
Prealloc *Name // preallocated storage if any
}
func NewAddrExpr(pos src.XPos, x Node) *AddrExpr {
@ -191,14 +158,14 @@ const (
// A CallExpr is a function call X(Args).
type CallExpr struct {
miniExpr
orig Node
X Node
Args Nodes
Rargs Nodes // TODO(rsc): Delete.
Body Nodes // TODO(rsc): Delete.
IsDDD bool
Use CallUse
NoInline bool
origNode
X Node
Args Nodes
Rargs Nodes // TODO(rsc): Delete.
KeepAlive []*Name // vars to be kept alive until call returns
IsDDD bool
Use CallUse
NoInline bool
}
func NewCallExpr(pos src.XPos, op Op, fun Node, args []Node) *CallExpr {
@ -206,15 +173,12 @@ func NewCallExpr(pos src.XPos, op Op, fun Node, args []Node) *CallExpr {
n.pos = pos
n.orig = n
n.SetOp(op)
n.Args.Set(args)
n.Args = args
return n
}
func (*CallExpr) isStmt() {}
func (n *CallExpr) Orig() Node { return n.orig }
func (n *CallExpr) SetOrig(x Node) { n.orig = x }
func (n *CallExpr) SetOp(op Op) {
switch op {
default:
@ -225,30 +189,10 @@ func (n *CallExpr) SetOp(op Op) {
}
}
// A CallPartExpr is a method expression X.Method (uncalled).
type CallPartExpr struct {
miniExpr
Func *Func
X Node
Method *types.Field
Prealloc *Name
}
func NewCallPartExpr(pos src.XPos, x Node, method *types.Field, fn *Func) *CallPartExpr {
n := &CallPartExpr{Func: fn, X: x, Method: method}
n.op = OCALLPART
n.pos = pos
n.typ = fn.Type()
n.Func = fn
return n
}
func (n *CallPartExpr) Sym() *types.Sym { return n.Method.Sym }
// A ClosureExpr is a function literal expression.
type ClosureExpr struct {
miniExpr
Func *Func
Func *Func `mknode:"-"`
Prealloc *Name
}
@ -276,7 +220,7 @@ func NewClosureRead(typ *types.Type, offset int64) *ClosureReadExpr {
// Before type-checking, the type is Ntype.
type CompLitExpr struct {
miniExpr
orig Node
origNode
Ntype Ntype
List Nodes // initialized values
Prealloc *Name
@ -287,13 +231,11 @@ func NewCompLitExpr(pos src.XPos, op Op, typ Ntype, list []Node) *CompLitExpr {
n := &CompLitExpr{Ntype: typ}
n.pos = pos
n.SetOp(op)
n.List.Set(list)
n.List = list
n.orig = n
return n
}
func (n *CompLitExpr) Orig() Node { return n.orig }
func (n *CompLitExpr) SetOrig(x Node) { n.orig = x }
func (n *CompLitExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *CompLitExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
@ -308,14 +250,15 @@ func (n *CompLitExpr) SetOp(op Op) {
type ConstExpr struct {
miniExpr
val constant.Value
orig Node
origNode
val constant.Value
}
func NewConstExpr(val constant.Value, orig Node) Node {
n := &ConstExpr{orig: orig, val: val}
n := &ConstExpr{val: val}
n.op = OLITERAL
n.pos = orig.Pos()
n.orig = orig
n.SetType(orig.Type())
n.SetTypecheck(orig.Typecheck())
n.SetDiag(orig.Diag())
@ -323,8 +266,6 @@ func NewConstExpr(val constant.Value, orig Node) Node {
}
func (n *ConstExpr) Sym() *types.Sym { return n.orig.Sym() }
func (n *ConstExpr) Orig() Node { return n.orig }
func (n *ConstExpr) SetOrig(orig Node) { panic(n.no("SetOrig")) }
func (n *ConstExpr) Val() constant.Value { return n.val }
// A ConvExpr is a conversion Type(X).
@ -344,6 +285,8 @@ func NewConvExpr(pos src.XPos, op Op, typ *types.Type, x Node) *ConvExpr {
func (n *ConvExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *ConvExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
func (n *ConvExpr) CheckPtr() bool { return n.flags&miniExprCheckPtr != 0 }
func (n *ConvExpr) SetCheckPtr(b bool) { n.flags.set(miniExprCheckPtr, b) }
func (n *ConvExpr) SetOp(op Op) {
switch op {
@ -421,8 +364,8 @@ func NewInlinedCallExpr(pos src.XPos, body, retvars []Node) *InlinedCallExpr {
n := &InlinedCallExpr{}
n.pos = pos
n.op = OINLCALL
n.Body.Set(body)
n.ReturnVars.Set(retvars)
n.Body = body
n.ReturnVars = retvars
return n
}
@ -476,24 +419,6 @@ func (n *MakeExpr) SetOp(op Op) {
}
}
// A MethodExpr is a method expression T.M (where T is a type).
type MethodExpr struct {
miniExpr
T *types.Type
Method *types.Field
FuncName_ *Name
}
func NewMethodExpr(pos src.XPos, t *types.Type, method *types.Field) *MethodExpr {
n := &MethodExpr{T: t, Method: method}
n.pos = pos
n.op = OMETHEXPR
return n
}
func (n *MethodExpr) FuncName() *Name { return n.FuncName_ }
func (n *MethodExpr) Sym() *types.Sym { panic("MethodExpr.Sym") }
// A NilExpr represents the predefined untyped constant nil.
// (It may be copied and assigned a type, though.)
type NilExpr struct {
@ -567,12 +492,13 @@ func NewNameOffsetExpr(pos src.XPos, name *Name, offset int64, typ *types.Type)
return n
}
// A SelectorExpr is a selector expression X.Sym.
// A SelectorExpr is a selector expression X.Sel.
type SelectorExpr struct {
miniExpr
X Node
Sel *types.Sym
Selection *types.Field
Prealloc *Name // preallocated storage for OCALLPART, if any
}
func NewSelectorExpr(pos src.XPos, op Op, x Node, sel *types.Sym) *SelectorExpr {
@ -586,7 +512,7 @@ func (n *SelectorExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
case ODOT, ODOTPTR, ODOTMETH, ODOTINTER, OXDOT:
case OXDOT, ODOT, ODOTPTR, ODOTMETH, ODOTINTER, OCALLPART, OMETHEXPR:
n.op = op
}
}
@ -596,6 +522,16 @@ func (n *SelectorExpr) Implicit() bool { return n.flags&miniExprImplicit !=
func (n *SelectorExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
func (n *SelectorExpr) Offset() int64 { return n.Selection.Offset }
func (n *SelectorExpr) FuncName() *Name {
if n.Op() != OMETHEXPR {
panic(n.no("FuncName"))
}
fn := NewNameAt(n.Selection.Pos, MethodSym(n.X.Type(), n.Sel))
fn.Class = PFUNC
fn.SetType(n.Type())
return fn
}
// Before type-checking, bytes.Buffer is a SelectorExpr.
// After type-checking it becomes a Name.
func (*SelectorExpr) CanBeNtype() {}
@ -687,8 +623,13 @@ func (n *StarExpr) SetOTYPE(t *types.Type) {
type TypeAssertExpr struct {
miniExpr
X Node
Ntype Node // TODO: Should be Ntype, but reused as address of type structure
Itab Nodes // Itab[0] is itab
Ntype Ntype
// Runtime type information provided by walkDotType.
// Caution: These aren't always populated; see walkDotType.
SrcType *AddrExpr `mknode:"-"` // *runtime._type for X's type
DstType *AddrExpr `mknode:"-"` // *runtime._type for Type
Itab *AddrExpr `mknode:"-"` // *runtime.itab for Type implementing X's type
}
func NewTypeAssertExpr(pos src.XPos, x Node, typ Ntype) *TypeAssertExpr {
@ -795,7 +736,7 @@ func IsAddressable(n Node) bool {
case ONAME:
n := n.(*Name)
if n.Class_ == PFUNC {
if n.Class == PFUNC {
return false
}
return true
@ -830,11 +771,11 @@ func staticValue1(nn Node) Node {
return nil
}
n := nn.(*Name)
if n.Class_ != PAUTO || n.Name().Addrtaken() {
if n.Class != PAUTO {
return nil
}
defn := n.Name().Defn
defn := n.Defn
if defn == nil {
return nil
}
@ -882,23 +823,46 @@ func reassigned(name *Name) bool {
if name.Curfn == nil {
return true
}
return Any(name.Curfn, func(n Node) bool {
// TODO(mdempsky): This is inefficient and becoming increasingly
// unwieldy. Figure out a way to generalize escape analysis's
// reassignment detection for use by inlining and devirtualization.
// isName reports whether n is a reference to name.
isName := func(x Node) bool {
n, ok := x.(*Name)
return ok && n.Canonical() == name
}
var do func(n Node) bool
do = func(n Node) bool {
switch n.Op() {
case OAS:
n := n.(*AssignStmt)
if n.X == name && n != name.Defn {
if isName(n.X) && n != name.Defn {
return true
}
case OAS2, OAS2FUNC, OAS2MAPR, OAS2DOTTYPE, OAS2RECV, OSELRECV2:
n := n.(*AssignListStmt)
for _, p := range n.Lhs {
if p == name && n != name.Defn {
if isName(p) && n != name.Defn {
return true
}
}
case OADDR:
n := n.(*AddrExpr)
if isName(OuterValue(n.X)) {
return true
}
case OCLOSURE:
n := n.(*ClosureExpr)
if Any(n.Func, do) {
return true
}
}
return false
})
}
return Any(name.Curfn, do)
}
// IsIntrinsicCall reports whether the compiler back end will treat the call as an intrinsic operation.
@ -1089,13 +1053,8 @@ func MethodExprName(n Node) *Name {
// MethodFunc is like MethodName, but returns the types.Field instead.
func MethodExprFunc(n Node) *types.Field {
switch n.Op() {
case ODOTMETH:
case ODOTMETH, OMETHEXPR, OCALLPART:
return n.(*SelectorExpr).Selection
case OMETHEXPR:
return n.(*MethodExpr).Method
case OCALLPART:
n := n.(*CallPartExpr)
return n.Method
}
base.Fatalf("unexpected node: %v (%v)", n, n.Op())
panic("unreachable")

View File

@ -128,7 +128,7 @@ func (o Op) Format(s fmt.State, verb rune) {
// %L Go syntax followed by " (type T)" if type is known.
// %+v Debug syntax, as in Dump.
//
func FmtNode(n Node, s fmt.State, verb rune) {
func fmtNode(n Node, s fmt.State, verb rune) {
// %+v prints Dump.
// Otherwise we print Go syntax.
if s.Flag('+') && verb == 'v' {
@ -216,6 +216,7 @@ var OpPrec = []int{
OTINTER: 8,
OTMAP: 8,
OTSTRUCT: 8,
OTYPE: 8,
OINDEXMAP: 8,
OINDEX: 8,
OSLICE: 8,
@ -232,6 +233,7 @@ var OpPrec = []int{
ODOT: 8,
OXDOT: 8,
OCALLPART: 8,
OMETHEXPR: 8,
OPLUS: 7,
ONOT: 7,
OBITNOT: 7,
@ -551,8 +553,8 @@ func exprFmt(n Node, s fmt.State, prec int) {
}
nprec := OpPrec[n.Op()]
if n.Op() == OTYPE && n.Sym() != nil {
nprec = 8
if n.Op() == OTYPE && n.Type().IsPtr() {
nprec = OpPrec[ODEREF]
}
if prec > nprec {
@ -630,10 +632,6 @@ func exprFmt(n Node, s fmt.State, prec int) {
case OPACK, ONONAME:
fmt.Fprint(s, n.Sym())
case OMETHEXPR:
n := n.(*MethodExpr)
fmt.Fprint(s, n.FuncName().Sym())
case ONAMEOFFSET:
n := n.(*NameOffsetExpr)
fmt.Fprintf(s, "(%v)(%v@%d)", n.Type(), n.Name_, n.Offset_)
@ -749,16 +747,7 @@ func exprFmt(n Node, s fmt.State, prec int) {
n := n.(*StructKeyExpr)
fmt.Fprintf(s, "%v:%v", n.Field, n.Value)
case OCALLPART:
n := n.(*CallPartExpr)
exprFmt(n.X, s, nprec)
if n.Method.Sym == nil {
fmt.Fprint(s, ".<nil>")
return
}
fmt.Fprintf(s, ".%s", n.Method.Sym.Name)
case OXDOT, ODOT, ODOTPTR, ODOTINTER, ODOTMETH:
case OXDOT, ODOT, ODOTPTR, ODOTINTER, ODOTMETH, OCALLPART, OMETHEXPR:
n := n.(*SelectorExpr)
exprFmt(n.X, s, nprec)
if n.Sel == nil {
@ -991,7 +980,7 @@ func (l Nodes) Format(s fmt.State, verb rune) {
// Dump prints the message s followed by a debug dump of n.
func Dump(s string, n Node) {
fmt.Printf("%s [%p]%+v", s, n, n)
fmt.Printf("%s [%p]%+v\n", s, n, n)
}
// DumpList prints the message s followed by a debug dump of each node in the list.
@ -1160,12 +1149,6 @@ func dumpNode(w io.Writer, n Node, depth int) {
}
return
case OMETHEXPR:
n := n.(*MethodExpr)
fmt.Fprintf(w, "%+v-%+v", n.Op(), n.FuncName().Sym())
dumpNodeHeader(w, n)
return
case OASOP:
n := n.(*AssignOpStmt)
fmt.Fprintf(w, "%+v-%+v", n.Op(), n.AsOp)
@ -1254,10 +1237,25 @@ func dumpNode(w io.Writer, n Node, depth int) {
fmt.Fprintf(w, "%+v-%s", n.Op(), name)
}
dumpNodes(w, val, depth+1)
default:
if vf.Kind() == reflect.Slice && vf.Type().Elem().Implements(nodeType) {
if vf.Len() == 0 {
continue
}
if name != "" {
indent(w, depth)
fmt.Fprintf(w, "%+v-%s", n.Op(), name)
}
for i, n := 0, vf.Len(); i < n; i++ {
dumpNode(w, vf.Index(i).Interface().(Node), depth+1)
}
}
}
}
}
var nodeType = reflect.TypeOf((*Node)(nil)).Elem()
func dumpNodes(w io.Writer, list Nodes, depth int) {
if len(list) == 0 {
fmt.Fprintf(w, " <nil>")

View File

@ -49,7 +49,6 @@ import (
// pointer from the Func back to the OCALLPART.
type Func struct {
miniNode
typ *types.Type
Body Nodes
Iota int64
@ -66,9 +65,15 @@ type Func struct {
// include closurevars until transformclosure runs.
Dcl []*Name
ClosureEnter Nodes // list of ONAME nodes (or OADDR-of-ONAME nodes, for output parameters) of captured variables
ClosureType Node // closure representation type
ClosureVars []*Name // closure params; each has closurevar set
ClosureType Ntype // closure representation type
// ClosureVars lists the free variables that are used within a
// function literal, but formally declared in an enclosing
// function. The variables in this slice are the closure function's
// own copy of the variables, which are used within its function
// body. They will also each have IsClosureVar set, and will have
// Byval set if they're captured by value.
ClosureVars []*Name
// Parents records the parent scope of each scope within a
// function. The root scope (0) has no parent, so the i'th
@ -78,7 +83,7 @@ type Func struct {
// Marks records scope boundary changes.
Marks []Mark
FieldTrack map[*types.Sym]struct{}
FieldTrack map[*obj.LSym]struct{}
DebugInfo interface{}
LSym *obj.LSym
@ -116,15 +121,13 @@ func NewFunc(pos src.XPos) *Func {
func (f *Func) isStmt() {}
func (f *Func) Type() *types.Type { return f.typ }
func (f *Func) SetType(x *types.Type) { f.typ = x }
func (n *Func) copy() Node { panic(n.no("copy")) }
func (n *Func) doChildren(do func(Node) bool) bool { return doNodes(n.Body, do) }
func (n *Func) editChildren(edit func(Node) Node) { editNodes(n.Body, edit) }
func (f *Func) Sym() *types.Sym {
if f.Nname != nil {
return f.Nname.Sym()
}
return nil
}
func (f *Func) Type() *types.Type { return f.Nname.Type() }
func (f *Func) Sym() *types.Sym { return f.Nname.Sym() }
func (f *Func) Linksym() *obj.LSym { return f.Nname.Linksym() }
// An Inline holds fields used for function bodies that can be inlined.
type Inline struct {
@ -240,24 +243,13 @@ func FuncSymName(s *types.Sym) string {
return s.Name + "·f"
}
// NewFuncNameAt generates a new name node for a function or method.
func NewFuncNameAt(pos src.XPos, s *types.Sym, fn *Func) *Name {
if fn.Nname != nil {
base.Fatalf("newFuncName - already have name")
}
n := NewNameAt(pos, s)
n.SetFunc(fn)
fn.Nname = n
return n
}
// MarkFunc marks a node as a function.
func MarkFunc(n *Name) {
if n.Op() != ONAME || n.Class_ != Pxxx {
if n.Op() != ONAME || n.Class != Pxxx {
base.Fatalf("expected ONAME/Pxxx node, got %v", n)
}
n.Class_ = PFUNC
n.Class = PFUNC
n.Sym().SetFunc(true)
}

View File

@ -54,20 +54,13 @@ func (n *miniNode) Esc() uint16 { return n.esc }
func (n *miniNode) SetEsc(x uint16) { n.esc = x }
const (
miniWalkdefShift = 0
miniWalkdefShift = 0 // TODO(mdempsky): Move to Name.flags.
miniTypecheckShift = 2
miniDiag = 1 << 4
miniHasCall = 1 << 5 // for miniStmt
)
func (n *miniNode) Walkdef() uint8 { return n.bits.get2(miniWalkdefShift) }
func (n *miniNode) Typecheck() uint8 { return n.bits.get2(miniTypecheckShift) }
func (n *miniNode) SetWalkdef(x uint8) {
if x > 3 {
panic(fmt.Sprintf("cannot SetWalkdef %d", x))
}
n.bits.set2(miniWalkdefShift, x)
}
func (n *miniNode) SetTypecheck(x uint8) {
if x > 3 {
panic(fmt.Sprintf("cannot SetTypecheck %d", x))
@ -80,13 +73,7 @@ func (n *miniNode) SetDiag(x bool) { n.bits.set(miniDiag, x) }
// Empty, immutable graph structure.
func (n *miniNode) Init() Nodes { return Nodes{} }
func (n *miniNode) PtrInit() *Nodes { return &immutableEmptyNodes }
func (n *miniNode) SetInit(x Nodes) {
if x != nil {
panic(n.no("SetInit"))
}
}
func (n *miniNode) Init() Nodes { return Nodes{} }
// Additional functionality unavailable.
@ -102,5 +89,3 @@ func (n *miniNode) HasCall() bool { return false }
func (n *miniNode) SetHasCall(bool) { panic(n.no("SetHasCall")) }
func (n *miniNode) NonNil() bool { return false }
func (n *miniNode) MarkNonNil() { panic(n.no("MarkNonNil")) }
func (n *miniNode) Opt() interface{} { return nil }
func (n *miniNode) SetOpt(interface{}) { panic(n.no("SetOpt")) }

View File

@ -13,11 +13,16 @@ import (
"go/types"
"io/ioutil"
"log"
"reflect"
"sort"
"strings"
"golang.org/x/tools/go/packages"
)
var irPkg *types.Package
var buf bytes.Buffer
func main() {
cfg := &packages.Config{
Mode: packages.NeedSyntax | packages.NeedTypes,
@ -26,116 +31,59 @@ func main() {
if err != nil {
log.Fatal(err)
}
irPkg = pkgs[0].Types
pkg := pkgs[0].Types
scope := pkg.Scope()
lookup := func(name string) *types.Named {
return scope.Lookup(name).(*types.TypeName).Type().(*types.Named)
}
nodeType := lookup("Node")
ntypeType := lookup("Ntype")
nodesType := lookup("Nodes")
slicePtrCaseClauseType := types.NewSlice(types.NewPointer(lookup("CaseClause")))
slicePtrCommClauseType := types.NewSlice(types.NewPointer(lookup("CommClause")))
ptrFieldType := types.NewPointer(lookup("Field"))
slicePtrFieldType := types.NewSlice(ptrFieldType)
ptrIdentType := types.NewPointer(lookup("Ident"))
var buf bytes.Buffer
fmt.Fprintln(&buf, "// Code generated by mknode.go. DO NOT EDIT.")
fmt.Fprintln(&buf)
fmt.Fprintln(&buf, "package ir")
fmt.Fprintln(&buf)
fmt.Fprintln(&buf, `import "fmt"`)
scope := irPkg.Scope()
for _, name := range scope.Names() {
if strings.HasPrefix(name, "mini") {
continue
}
obj, ok := scope.Lookup(name).(*types.TypeName)
if !ok {
continue
}
typName := obj.Name()
typ, ok := obj.Type().(*types.Named).Underlying().(*types.Struct)
if !ok {
continue
}
if strings.HasPrefix(typName, "mini") || !hasMiniNode(typ) {
typ := obj.Type().(*types.Named)
if !implementsNode(types.NewPointer(typ)) {
continue
}
fmt.Fprintf(&buf, "\n")
fmt.Fprintf(&buf, "func (n *%s) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }\n", name)
fmt.Fprintf(&buf, "func (n *%s) Format(s fmt.State, verb rune) { fmtNode(n, s, verb) }\n", name)
switch name {
case "Name":
fmt.Fprintf(&buf, "func (n *%s) copy() Node {panic(\"%s.copy\")}\n", name, name)
default:
fmt.Fprintf(&buf, "func (n *%s) copy() Node { c := *n\n", name)
forNodeFields(typName, typ, func(name string, is func(types.Type) bool) {
switch {
case is(nodesType):
fmt.Fprintf(&buf, "c.%s = c.%s.Copy()\n", name, name)
case is(slicePtrCaseClauseType):
fmt.Fprintf(&buf, "c.%s = copyCases(c.%s)\n", name, name)
case is(slicePtrCommClauseType):
fmt.Fprintf(&buf, "c.%s = copyComms(c.%s)\n", name, name)
case is(ptrFieldType):
fmt.Fprintf(&buf, "if c.%s != nil { c.%s = c.%s.copy() }\n", name, name, name)
case is(slicePtrFieldType):
fmt.Fprintf(&buf, "c.%s = copyFields(c.%s)\n", name, name)
}
})
fmt.Fprintf(&buf, "return &c }\n")
case "Name", "Func":
// Too specialized to automate.
continue
}
fmt.Fprintf(&buf, "func (n *%s) doChildren(do func(Node) error) error { var err error\n", name)
forNodeFields(typName, typ, func(name string, is func(types.Type) bool) {
switch {
case is(ptrIdentType):
fmt.Fprintf(&buf, "if n.%s != nil { err = maybeDo(n.%s, err, do) }\n", name, name)
case is(nodeType), is(ntypeType):
fmt.Fprintf(&buf, "err = maybeDo(n.%s, err, do)\n", name)
case is(nodesType):
fmt.Fprintf(&buf, "err = maybeDoList(n.%s, err, do)\n", name)
case is(slicePtrCaseClauseType):
fmt.Fprintf(&buf, "err = maybeDoCases(n.%s, err, do)\n", name)
case is(slicePtrCommClauseType):
fmt.Fprintf(&buf, "err = maybeDoComms(n.%s, err, do)\n", name)
case is(ptrFieldType):
fmt.Fprintf(&buf, "err = maybeDoField(n.%s, err, do)\n", name)
case is(slicePtrFieldType):
fmt.Fprintf(&buf, "err = maybeDoFields(n.%s, err, do)\n", name)
}
})
fmt.Fprintf(&buf, "return err }\n")
forNodeFields(typ,
"func (n *%[1]s) copy() Node { c := *n\n",
"",
"c.%[1]s = copy%[2]s(c.%[1]s)",
"return &c }\n")
fmt.Fprintf(&buf, "func (n *%s) editChildren(edit func(Node) Node) {\n", name)
forNodeFields(typName, typ, func(name string, is func(types.Type) bool) {
switch {
case is(ptrIdentType):
fmt.Fprintf(&buf, "if n.%s != nil { n.%s = edit(n.%s).(*Ident) }\n", name, name, name)
case is(nodeType):
fmt.Fprintf(&buf, "n.%s = maybeEdit(n.%s, edit)\n", name, name)
case is(ntypeType):
fmt.Fprintf(&buf, "n.%s = toNtype(maybeEdit(n.%s, edit))\n", name, name)
case is(nodesType):
fmt.Fprintf(&buf, "editList(n.%s, edit)\n", name)
case is(slicePtrCaseClauseType):
fmt.Fprintf(&buf, "editCases(n.%s, edit)\n", name)
case is(slicePtrCommClauseType):
fmt.Fprintf(&buf, "editComms(n.%s, edit)\n", name)
case is(ptrFieldType):
fmt.Fprintf(&buf, "editField(n.%s, edit)\n", name)
case is(slicePtrFieldType):
fmt.Fprintf(&buf, "editFields(n.%s, edit)\n", name)
}
})
fmt.Fprintf(&buf, "}\n")
forNodeFields(typ,
"func (n *%[1]s) doChildren(do func(Node) bool) bool {\n",
"if n.%[1]s != nil && do(n.%[1]s) { return true }",
"if do%[2]s(n.%[1]s, do) { return true }",
"return false }\n")
forNodeFields(typ,
"func (n *%[1]s) editChildren(edit func(Node) Node) {\n",
"if n.%[1]s != nil { n.%[1]s = edit(n.%[1]s).(%[2]s) }",
"edit%[2]s(n.%[1]s, edit)",
"}\n")
}
makeHelpers()
out, err := format.Source(buf.Bytes())
if err != nil {
// write out mangled source so we can see the bug.
@ -148,40 +96,131 @@ func main() {
}
}
func forNodeFields(typName string, typ *types.Struct, f func(name string, is func(types.Type) bool)) {
for i, n := 0, typ.NumFields(); i < n; i++ {
v := typ.Field(i)
if v.Embedded() {
if typ, ok := v.Type().Underlying().(*types.Struct); ok {
forNodeFields(typName, typ, f)
continue
}
}
switch typName {
case "Func":
if strings.ToLower(strings.TrimSuffix(v.Name(), "_")) != "body" {
continue
}
case "Name":
continue
}
switch v.Name() {
case "orig":
continue
}
f(v.Name(), func(t types.Type) bool { return types.Identical(t, v.Type()) })
// needHelper maps needed slice helpers from their base name to their
// respective slice-element type.
var needHelper = map[string]string{}
func makeHelpers() {
var names []string
for name := range needHelper {
names = append(names, name)
}
sort.Strings(names)
for _, name := range names {
fmt.Fprintf(&buf, sliceHelperTmpl, name, needHelper[name])
}
}
func hasMiniNode(typ *types.Struct) bool {
for i, n := 0, typ.NumFields(); i < n; i++ {
v := typ.Field(i)
if v.Name() == "miniNode" {
const sliceHelperTmpl = `
func copy%[1]s(list []%[2]s) []%[2]s {
if list == nil {
return nil
}
c := make([]%[2]s, len(list))
copy(c, list)
return c
}
func do%[1]s(list []%[2]s, do func(Node) bool) bool {
for _, x := range list {
if x != nil && do(x) {
return true
}
if v.Embedded() {
if typ, ok := v.Type().Underlying().(*types.Struct); ok && hasMiniNode(typ) {
return true
}
return false
}
func edit%[1]s(list []%[2]s, edit func(Node) Node) {
for i, x := range list {
if x != nil {
list[i] = edit(x).(%[2]s)
}
}
}
`
func forNodeFields(named *types.Named, prologue, singleTmpl, sliceTmpl, epilogue string) {
fmt.Fprintf(&buf, prologue, named.Obj().Name())
anyField(named.Underlying().(*types.Struct), func(f *types.Var) bool {
if f.Embedded() {
return false
}
name, typ := f.Name(), f.Type()
slice, _ := typ.Underlying().(*types.Slice)
if slice != nil {
typ = slice.Elem()
}
tmpl, what := singleTmpl, types.TypeString(typ, types.RelativeTo(irPkg))
if implementsNode(typ) {
if slice != nil {
helper := strings.TrimPrefix(what, "*") + "s"
needHelper[helper] = what
tmpl, what = sliceTmpl, helper
}
} else if what == "*Field" {
// Special case for *Field.
tmpl = sliceTmpl
if slice != nil {
what = "Fields"
} else {
what = "Field"
}
} else {
return false
}
if tmpl == "" {
return false
}
// Allow template to not use all arguments without
// upsetting fmt.Printf.
s := fmt.Sprintf(tmpl+"\x00 %[1]s %[2]s", name, what)
fmt.Fprintln(&buf, s[:strings.LastIndex(s, "\x00")])
return false
})
fmt.Fprintf(&buf, epilogue)
}
func implementsNode(typ types.Type) bool {
if _, ok := typ.Underlying().(*types.Interface); ok {
// TODO(mdempsky): Check the interface implements Node.
// Worst case, node_gen.go will fail to compile if we're wrong.
return true
}
if ptr, ok := typ.(*types.Pointer); ok {
if str, ok := ptr.Elem().Underlying().(*types.Struct); ok {
return anyField(str, func(f *types.Var) bool {
return f.Embedded() && f.Name() == "miniNode"
})
}
}
return false
}
func anyField(typ *types.Struct, pred func(f *types.Var) bool) bool {
for i, n := 0, typ.NumFields(); i < n; i++ {
if value, ok := reflect.StructTag(typ.Tag(i)).Lookup("mknode"); ok {
if value != "-" {
panic(fmt.Sprintf("unexpected tag value: %q", value))
}
continue
}
f := typ.Field(i)
if pred(f) {
return true
}
if f.Embedded() {
if typ, ok := f.Type().Underlying().(*types.Struct); ok {
if anyField(typ, pred) {
return true
}
}
}
}

View File

@ -7,8 +7,10 @@ package ir
import (
"cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/objabi"
"cmd/internal/src"
"fmt"
"go/constant"
)
@ -34,16 +36,16 @@ func (*Ident) CanBeNtype() {}
// Name holds Node fields used only by named nodes (ONAME, OTYPE, some OLITERAL).
type Name struct {
miniExpr
BuiltinOp Op // uint8
Class_ Class // uint8
flags bitset16
BuiltinOp Op // uint8
Class Class // uint8
pragma PragmaFlag // int16
flags bitset16
sym *types.Sym
Func *Func
Offset_ int64
val constant.Value
orig Node
Embed *[]Embed // list of embedded files, for ONAME var
Opt interface{} // for use by escape analysis
Embed *[]Embed // list of embedded files, for ONAME var
PkgName *PkgName // real package for import . names
// For a local variable (not param) or extern, the initializing assignment (OAS or OAS2).
@ -141,11 +143,9 @@ type Name struct {
func (n *Name) isExpr() {}
// CloneName makes a cloned copy of the name.
// It's not ir.Copy(n) because in general that operation is a mistake on names,
// which uniquely identify variables.
// Callers must use n.CloneName to make clear they intend to create a separate name.
func (n *Name) CloneName() *Name { c := *n; return &c }
func (n *Name) copy() Node { panic(n.no("copy")) }
func (n *Name) doChildren(do func(Node) bool) bool { return false }
func (n *Name) editChildren(edit func(Node) Node) {}
// TypeDefn returns the type definition for a named OTYPE.
// That is, given "type T Defn", it returns Defn.
@ -213,7 +213,6 @@ func newNameAt(pos src.XPos, op Op, sym *types.Sym) *Name {
n := new(Name)
n.op = op
n.pos = pos
n.orig = n
n.sym = sym
return n
}
@ -223,8 +222,6 @@ func (n *Name) Sym() *types.Sym { return n.sym }
func (n *Name) SetSym(x *types.Sym) { n.sym = x }
func (n *Name) SubOp() Op { return n.BuiltinOp }
func (n *Name) SetSubOp(x Op) { n.BuiltinOp = x }
func (n *Name) Class() Class { return n.Class_ }
func (n *Name) SetClass(x Class) { n.Class_ = x }
func (n *Name) SetFunc(x *Func) { n.Func = x }
func (n *Name) Offset() int64 { panic("Name.Offset") }
func (n *Name) SetOffset(x int64) {
@ -236,6 +233,15 @@ func (n *Name) FrameOffset() int64 { return n.Offset_ }
func (n *Name) SetFrameOffset(x int64) { n.Offset_ = x }
func (n *Name) Iota() int64 { return n.Offset_ }
func (n *Name) SetIota(x int64) { n.Offset_ = x }
func (n *Name) Walkdef() uint8 { return n.bits.get2(miniWalkdefShift) }
func (n *Name) SetWalkdef(x uint8) {
if x > 3 {
panic(fmt.Sprintf("cannot SetWalkdef %d", x))
}
n.bits.set2(miniWalkdefShift, x)
}
func (n *Name) Linksym() *obj.LSym { return n.sym.Linksym() }
func (*Name) CanBeNtype() {}
func (*Name) CanBeAnSSASym() {}
@ -273,7 +279,6 @@ const (
func (n *Name) Captured() bool { return n.flags&nameCaptured != 0 }
func (n *Name) Readonly() bool { return n.flags&nameReadonly != 0 }
func (n *Name) Byval() bool { return n.flags&nameByval != 0 }
func (n *Name) Needzero() bool { return n.flags&nameNeedzero != 0 }
func (n *Name) AutoTemp() bool { return n.flags&nameAutoTemp != 0 }
func (n *Name) Used() bool { return n.flags&nameUsed != 0 }
@ -288,7 +293,6 @@ func (n *Name) LibfuzzerExtraCounter() bool { return n.flags&nameLibfuzzerExtraC
func (n *Name) SetCaptured(b bool) { n.flags.set(nameCaptured, b) }
func (n *Name) setReadonly(b bool) { n.flags.set(nameReadonly, b) }
func (n *Name) SetByval(b bool) { n.flags.set(nameByval, b) }
func (n *Name) SetNeedzero(b bool) { n.flags.set(nameNeedzero, b) }
func (n *Name) SetAutoTemp(b bool) { n.flags.set(nameAutoTemp, b) }
func (n *Name) SetUsed(b bool) { n.flags.set(nameUsed, b) }
@ -306,11 +310,11 @@ func (n *Name) MarkReadonly() {
if n.Op() != ONAME {
base.Fatalf("Node.MarkReadonly %v", n.Op())
}
n.Name().setReadonly(true)
n.setReadonly(true)
// Mark the linksym as readonly immediately
// so that the SSA backend can use this information.
// It will be overridden later during dumpglobls.
n.Sym().Linksym().Type = objabi.SRODATA
n.Linksym().Type = objabi.SRODATA
}
// Val returns the constant.Value for the node.
@ -321,8 +325,7 @@ func (n *Name) Val() constant.Value {
return n.val
}
// SetVal sets the constant.Value for the node,
// which must not have been used with SetOpt.
// SetVal sets the constant.Value for the node.
func (n *Name) SetVal(v constant.Value) {
if n.op != OLITERAL {
panic(n.no("SetVal"))
@ -331,6 +334,33 @@ func (n *Name) SetVal(v constant.Value) {
n.val = v
}
// Canonical returns the logical declaration that n represents. If n
// is a closure variable, then Canonical returns the original Name as
// it appears in the function that immediately contains the
// declaration. Otherwise, Canonical simply returns n itself.
func (n *Name) Canonical() *Name {
if n.IsClosureVar() {
n = n.Defn.(*Name)
if n.IsClosureVar() {
base.Fatalf("recursive closure variable: %v", n)
}
}
return n
}
func (n *Name) SetByval(b bool) {
if n.Canonical() != n {
base.Fatalf("SetByval called on non-canonical variable: %v", n)
}
n.flags.set(nameByval, b)
}
func (n *Name) Byval() bool {
// We require byval to be set on the canonical variable, but we
// allow it to be accessed from any instance.
return n.Canonical().flags&nameByval != 0
}
// SameSource reports whether two nodes refer to the same source
// element.
//
@ -373,7 +403,7 @@ func DeclaredBy(x, stmt Node) bool {
// called declaration contexts.
type Class uint8
//go:generate stringer -type=Class
//go:generate stringer -type=Class name.go
const (
Pxxx Class = iota // no class; used during ssa conversion to indicate pseudo-variables
PEXTERN // global variables
@ -418,7 +448,7 @@ func IsParamStackCopy(n Node) bool {
return false
}
name := n.(*Name)
return (name.Class_ == PPARAM || name.Class_ == PPARAMOUT) && name.Heapaddr != nil
return (name.Class == PPARAM || name.Class == PPARAMOUT) && name.Heapaddr != nil
}
// IsParamHeapCopy reports whether this is the on-heap copy of
@ -428,7 +458,7 @@ func IsParamHeapCopy(n Node) bool {
return false
}
name := n.(*Name)
return name.Class_ == PAUTOHEAP && name.Name().Stackcopy != nil
return name.Class == PAUTOHEAP && name.Stackcopy != nil
}
var RegFP *Name

View File

@ -28,14 +28,12 @@ type Node interface {
// For making copies. For Copy and SepCopy.
copy() Node
doChildren(func(Node) error) error
doChildren(func(Node) bool) bool
editChildren(func(Node) Node)
// Abstract graph structure, for generic traversals.
Op() Op
Init() Nodes
PtrInit() *Nodes
SetInit(x Nodes)
// Fields specific to certain Ops only.
Type() *types.Type
@ -48,10 +46,6 @@ type Node interface {
// Storage for analysis passes.
Esc() uint16
SetEsc(x uint16)
Walkdef() uint8
SetWalkdef(x uint8)
Opt() interface{}
SetOpt(x interface{})
Diag() bool
SetDiag(x bool)
Typecheck() uint8
@ -92,7 +86,21 @@ func MayBeShared(n Node) bool {
return false
}
//go:generate stringer -type=Op -trimprefix=O
type InitNode interface {
Node
PtrInit() *Nodes
SetInit(x Nodes)
}
func TakeInit(n Node) Nodes {
init := n.Init()
if len(init) != 0 {
n.(InitNode).SetInit(nil)
}
return init
}
//go:generate stringer -type=Op -trimprefix=O node.go
type Op uint8
@ -220,10 +228,10 @@ const (
OPAREN // (Left)
OSEND // Left <- Right
OSLICE // Left[List[0] : List[1]] (Left is untypechecked or slice)
OSLICEARR // Left[List[0] : List[1]] (Left is array)
OSLICEARR // Left[List[0] : List[1]] (Left is pointer to array)
OSLICESTR // Left[List[0] : List[1]] (Left is string)
OSLICE3 // Left[List[0] : List[1] : List[2]] (Left is untypedchecked or slice)
OSLICE3ARR // Left[List[0] : List[1] : List[2]] (Left is array)
OSLICE3ARR // Left[List[0] : List[1] : List[2]] (Left is pointer to array)
OSLICEHEADER // sliceheader{Left, List[0], List[1]} (Left is unsafe.Pointer, List[0] is length, List[1] is capacity)
ORECOVER // recover()
ORECV // <-Left
@ -313,35 +321,11 @@ const (
// a slice to save space.
type Nodes []Node
// immutableEmptyNodes is an immutable, empty Nodes list.
// The methods that would modify it panic instead.
var immutableEmptyNodes = Nodes{}
func (n *Nodes) mutate() {
if n == &immutableEmptyNodes {
panic("immutable Nodes.Set")
}
}
// Set sets n to a slice.
// This takes ownership of the slice.
func (n *Nodes) Set(s []Node) {
if n == &immutableEmptyNodes {
if len(s) == 0 {
// Allow immutableEmptyNodes.Set(nil) (a no-op).
return
}
n.mutate()
}
*n = s
}
// Append appends entries to Nodes.
func (n *Nodes) Append(a ...Node) {
if len(a) == 0 {
return
}
n.mutate()
*n = append(*n, a...)
}
@ -351,7 +335,6 @@ func (n *Nodes) Prepend(a ...Node) {
if len(a) == 0 {
return
}
n.mutate()
*n = append(a, *n...)
}
@ -546,15 +529,16 @@ func SetPos(n Node) src.XPos {
// The result of InitExpr MUST be assigned back to n, e.g.
// n.Left = InitExpr(init, n.Left)
func InitExpr(init []Node, n Node) Node {
func InitExpr(init []Node, expr Node) Node {
if len(init) == 0 {
return n
return expr
}
if MayBeShared(n) {
n, ok := expr.(InitNode)
if !ok || MayBeShared(n) {
// Introduce OCONVNOP to hold init list.
old := n
n = NewConvExpr(base.Pos, OCONVNOP, nil, old)
n.SetType(old.Type())
n = NewConvExpr(base.Pos, OCONVNOP, nil, expr)
n.SetType(expr.Type())
n.SetTypecheck(1)
}
@ -584,7 +568,10 @@ func OuterValue(n Node) Node {
continue
case OINDEX:
nn := nn.(*IndexExpr)
if nn.X.Type() != nil && nn.X.Type().IsArray() {
if nn.X.Type() == nil {
base.Fatalf("OuterValue needs type for %v", nn.X)
}
if nn.X.Type().IsArray() {
n = nn.X
continue
}

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
// Code generated by "stringer -type=Op -trimprefix=O"; DO NOT EDIT.
// Code generated by "stringer -type=Op -trimprefix=O node.go"; DO NOT EDIT.
package ir

View File

@ -87,7 +87,7 @@ func (v *bottomUpVisitor) visit(n *Func) uint32 {
Visit(n, func(n Node) {
switch n.Op() {
case ONAME:
if n := n.(*Name); n.Class_ == PFUNC {
if n := n.(*Name); n.Class == PFUNC {
do(n.Defn)
}
case ODOTMETH, OCALLPART, OMETHEXPR:

View File

@ -20,8 +20,8 @@ func TestSizeof(t *testing.T) {
_32bit uintptr // size on 32bit platforms
_64bit uintptr // size on 64bit platforms
}{
{Func{}, 200, 352},
{Name{}, 132, 232},
{Func{}, 184, 320},
{Name{}, 124, 216},
}
for _, tt := range tests {

View File

@ -13,10 +13,10 @@ import (
// A Decl is a declaration of a const, type, or var. (A declared func is a Func.)
type Decl struct {
miniNode
X Node // the thing being declared
X *Name // the thing being declared
}
func NewDecl(pos src.XPos, op Op, x Node) *Decl {
func NewDecl(pos src.XPos, op Op, x *Name) *Decl {
n := &Decl{X: x}
n.pos = pos
switch op {
@ -70,8 +70,8 @@ func NewAssignListStmt(pos src.XPos, op Op, lhs, rhs []Node) *AssignListStmt {
n := &AssignListStmt{}
n.pos = pos
n.SetOp(op)
n.Lhs.Set(lhs)
n.Rhs.Set(rhs)
n.Lhs = lhs
n.Rhs = rhs
return n
}
@ -112,7 +112,6 @@ func (n *AssignStmt) SetOp(op Op) {
// An AssignOpStmt is an AsOp= assignment statement: X AsOp= Y.
type AssignOpStmt struct {
miniStmt
typ *types.Type
X Node
AsOp Op // OADD etc
Y Node
@ -126,9 +125,6 @@ func NewAssignOpStmt(pos src.XPos, asOp Op, x, y Node) *AssignOpStmt {
return n
}
func (n *AssignOpStmt) Type() *types.Type { return n.typ }
func (n *AssignOpStmt) SetType(x *types.Type) { n.typ = x }
// A BlockStmt is a block: { List }.
type BlockStmt struct {
miniStmt
@ -145,7 +141,7 @@ func NewBlockStmt(pos src.XPos, list []Node) *BlockStmt {
}
}
n.op = OBLOCK
n.List.Set(list)
n.List = list
return n
}
@ -176,7 +172,7 @@ func (n *BranchStmt) Sym() *types.Sym { return n.Label }
// A CaseClause is a case statement in a switch or select: case List: Body.
type CaseClause struct {
miniStmt
Var Node // declared variable for this case in type switch
Var *Name // declared variable for this case in type switch
List Nodes // list of expressions for switch, early select
Body Nodes
}
@ -188,36 +184,6 @@ func NewCaseStmt(pos src.XPos, list, body []Node) *CaseClause {
return n
}
// TODO(mdempsky): Generate these with mknode.go.
func copyCases(list []*CaseClause) []*CaseClause {
if list == nil {
return nil
}
c := make([]*CaseClause, len(list))
copy(c, list)
return c
}
func maybeDoCases(list []*CaseClause, err error, do func(Node) error) error {
if err != nil {
return err
}
for _, x := range list {
if x != nil {
if err := do(x); err != nil {
return err
}
}
}
return nil
}
func editCases(list []*CaseClause, edit func(Node) Node) {
for i, x := range list {
if x != nil {
list[i] = edit(x).(*CaseClause)
}
}
}
type CommClause struct {
miniStmt
Comm Node // communication case
@ -231,36 +197,6 @@ func NewCommStmt(pos src.XPos, comm Node, body []Node) *CommClause {
return n
}
// TODO(mdempsky): Generate these with mknode.go.
func copyComms(list []*CommClause) []*CommClause {
if list == nil {
return nil
}
c := make([]*CommClause, len(list))
copy(c, list)
return c
}
func maybeDoComms(list []*CommClause, err error, do func(Node) error) error {
if err != nil {
return err
}
for _, x := range list {
if x != nil {
if err := do(x); err != nil {
return err
}
}
}
return nil
}
func editComms(list []*CommClause, edit func(Node) Node) {
for i, x := range list {
if x != nil {
list[i] = edit(x).(*CommClause)
}
}
}
// A ForStmt is a non-range for loop: for Init; Cond; Post { Body }
// Op can be OFOR or OFORUNTIL (!Cond).
type ForStmt struct {
@ -280,7 +216,7 @@ func NewForStmt(pos src.XPos, init Node, cond, post Node, body []Node) *ForStmt
if init != nil {
n.init = []Node{init}
}
n.Body.Set(body)
n.Body = body
return n
}
@ -326,8 +262,8 @@ func NewIfStmt(pos src.XPos, cond Node, body, els []Node) *IfStmt {
n := &IfStmt{Cond: cond}
n.pos = pos
n.op = OIF
n.Body.Set(body)
n.Else.Set(els)
n.Body = body
n.Else = els
return n
}
@ -379,15 +315,15 @@ func NewRangeStmt(pos src.XPos, key, value, x Node, body []Node) *RangeStmt {
n := &RangeStmt{X: x, Key: key, Value: value}
n.pos = pos
n.op = ORANGE
n.Body.Set(body)
n.Body = body
return n
}
// A ReturnStmt is a return statement.
type ReturnStmt struct {
miniStmt
orig Node // for typecheckargs rewrite
Results Nodes // return list
origNode // for typecheckargs rewrite
Results Nodes // return list
}
func NewReturnStmt(pos src.XPos, results []Node) *ReturnStmt {
@ -395,13 +331,10 @@ func NewReturnStmt(pos src.XPos, results []Node) *ReturnStmt {
n.pos = pos
n.op = ORETURN
n.orig = n
n.Results.Set(results)
n.Results = results
return n
}
func (n *ReturnStmt) Orig() Node { return n.orig }
func (n *ReturnStmt) SetOrig(x Node) { n.orig = x }
// A SelectStmt is a block: { Cases }.
type SelectStmt struct {
miniStmt

View File

@ -46,7 +46,7 @@ func (n *miniType) Type() *types.Type { return n.typ }
// setOTYPE also records t.Nod = self if t.Nod is not already set.
// (Some types are shared by multiple OTYPE nodes, so only
// the first such node is used as t.Nod.)
func (n *miniType) setOTYPE(t *types.Type, self Node) {
func (n *miniType) setOTYPE(t *types.Type, self Ntype) {
if n.typ != nil {
panic(n.op.String() + " SetType: type already set")
}
@ -61,11 +61,11 @@ func (n *miniType) Implicit() bool { return false } // for Format OTYPE
// A ChanType represents a chan Elem syntax with the direction Dir.
type ChanType struct {
miniType
Elem Node
Elem Ntype
Dir types.ChanDir
}
func NewChanType(pos src.XPos, elem Node, dir types.ChanDir) *ChanType {
func NewChanType(pos src.XPos, elem Ntype, dir types.ChanDir) *ChanType {
n := &ChanType{Elem: elem, Dir: dir}
n.op = OTCHAN
n.pos = pos
@ -80,11 +80,11 @@ func (n *ChanType) SetOTYPE(t *types.Type) {
// A MapType represents a map[Key]Value type syntax.
type MapType struct {
miniType
Key Node
Elem Node
Key Ntype
Elem Ntype
}
func NewMapType(pos src.XPos, key, elem Node) *MapType {
func NewMapType(pos src.XPos, key, elem Ntype) *MapType {
n := &MapType{Key: key, Elem: elem}
n.op = OTMAP
n.pos = pos
@ -185,45 +185,28 @@ func (f *Field) String() string {
return typ
}
func (f *Field) copy() *Field {
// TODO(mdempsky): Make Field a Node again so these can be generated?
// Fields are Nodes in go/ast and cmd/compile/internal/syntax.
func copyField(f *Field) *Field {
if f == nil {
return nil
}
c := *f
return &c
}
func copyFields(list []*Field) []*Field {
out := make([]*Field, len(list))
copy(out, list)
for i, f := range out {
out[i] = f.copy()
func doField(f *Field, do func(Node) bool) bool {
if f == nil {
return false
}
return out
if f.Decl != nil && do(f.Decl) {
return true
}
if f.Ntype != nil && do(f.Ntype) {
return true
}
return false
}
func maybeDoField(f *Field, err error, do func(Node) error) error {
if f != nil {
if err == nil && f.Decl != nil {
err = do(f.Decl)
}
if err == nil && f.Ntype != nil {
err = do(f.Ntype)
}
}
return err
}
func maybeDoFields(list []*Field, err error, do func(Node) error) error {
if err != nil {
return err
}
for _, f := range list {
err = maybeDoField(f, err, do)
if err != nil {
return err
}
}
return err
}
func editField(f *Field, edit func(Node) Node) {
if f == nil {
return
@ -232,10 +215,25 @@ func editField(f *Field, edit func(Node) Node) {
f.Decl = edit(f.Decl).(*Name)
}
if f.Ntype != nil {
f.Ntype = toNtype(edit(f.Ntype))
f.Ntype = edit(f.Ntype).(Ntype)
}
}
func copyFields(list []*Field) []*Field {
out := make([]*Field, len(list))
for i, f := range list {
out[i] = copyField(f)
}
return out
}
func doFields(list []*Field, do func(Node) bool) bool {
for _, x := range list {
if doField(x, do) {
return true
}
}
return false
}
func editFields(list []*Field, edit func(Node) Node) {
for _, f := range list {
editField(f, edit)
@ -246,11 +244,11 @@ func editFields(list []*Field, edit func(Node) Node) {
// If DDD is true, it's the ...Elem at the end of a function list.
type SliceType struct {
miniType
Elem Node
Elem Ntype
DDD bool
}
func NewSliceType(pos src.XPos, elem Node) *SliceType {
func NewSliceType(pos src.XPos, elem Ntype) *SliceType {
n := &SliceType{Elem: elem}
n.op = OTSLICE
n.pos = pos
@ -267,11 +265,11 @@ func (n *SliceType) SetOTYPE(t *types.Type) {
type ArrayType struct {
miniType
Len Node
Elem Node
Elem Ntype
}
func NewArrayType(pos src.XPos, size Node, elem Node) *ArrayType {
n := &ArrayType{Len: size, Elem: elem}
func NewArrayType(pos src.XPos, len Node, elem Ntype) *ArrayType {
n := &ArrayType{Len: len, Elem: elem}
n.op = OTARRAY
n.pos = pos
return n

View File

@ -4,23 +4,18 @@
// IR visitors for walking the IR tree.
//
// The lowest level helpers are DoChildren and EditChildren,
// which nodes help implement (TODO(rsc): eventually) and
// provide control over whether and when recursion happens
// during the walk of the IR.
// The lowest level helpers are DoChildren and EditChildren, which
// nodes help implement and provide control over whether and when
// recursion happens during the walk of the IR.
//
// Although these are both useful directly, two simpler patterns
// are fairly common and also provided: Inspect and Scan.
// are fairly common and also provided: Visit and Any.
package ir
import (
"errors"
)
// DoChildren calls do(x) on each of n's non-nil child nodes x.
// If any call returns a non-nil error, DoChildren stops and returns that error.
// Otherwise, DoChildren returns nil.
// If any call returns true, DoChildren stops and returns true.
// Otherwise, DoChildren returns false.
//
// Note that DoChildren(n, do) only calls do(x) for n's immediate children.
// If x's children should be processed, then do(x) must call DoChildren(x, do).
@ -28,32 +23,32 @@ import (
// DoChildren allows constructing general traversals of the IR graph
// that can stop early if needed. The most general usage is:
//
// var do func(ir.Node) error
// do = func(x ir.Node) error {
// var do func(ir.Node) bool
// do = func(x ir.Node) bool {
// ... processing BEFORE visting children ...
// if ... should visit children ... {
// ir.DoChildren(x, do)
// ... processing AFTER visting children ...
// }
// if ... should stop parent DoChildren call from visiting siblings ... {
// return non-nil error
// return true
// }
// return nil
// return false
// }
// do(root)
//
// Since DoChildren does not generate any errors itself, if the do function
// never wants to stop the traversal, it can assume that DoChildren itself
// will always return nil, simplifying to:
// Since DoChildren does not return true itself, if the do function
// never wants to stop the traversal, it can assume that DoChildren
// itself will always return false, simplifying to:
//
// var do func(ir.Node) error
// do = func(x ir.Node) error {
// var do func(ir.Node) bool
// do = func(x ir.Node) bool {
// ... processing BEFORE visting children ...
// if ... should visit children ... {
// ir.DoChildren(x, do)
// }
// ... processing AFTER visting children ...
// return nil
// return false
// }
// do(root)
//
@ -61,14 +56,15 @@ import (
// only processing before visiting children and never stopping:
//
// func Visit(n ir.Node, visit func(ir.Node)) {
// var do func(ir.Node) error
// do = func(x ir.Node) error {
// if n == nil {
// return
// }
// var do func(ir.Node) bool
// do = func(x ir.Node) bool {
// visit(x)
// return ir.DoChildren(x, do)
// }
// if n != nil {
// visit(n)
// }
// do(n)
// }
//
// The Any function illustrates a different simplification of the pattern,
@ -76,57 +72,40 @@ import (
// a node x for which cond(x) returns true, at which point the entire
// traversal stops and returns true.
//
// func Any(n ir.Node, find cond(ir.Node)) bool {
// stop := errors.New("stop")
// var do func(ir.Node) error
// do = func(x ir.Node) error {
// if cond(x) {
// return stop
// }
// return ir.DoChildren(x, do)
// func Any(n ir.Node, cond(ir.Node) bool) bool {
// if n == nil {
// return false
// }
// return do(n) == stop
// var do func(ir.Node) bool
// do = func(x ir.Node) bool {
// return cond(x) || ir.DoChildren(x, do)
// }
// return do(n)
// }
//
// Visit and Any are presented above as examples of how to use
// DoChildren effectively, but of course, usage that fits within the
// simplifications captured by Visit or Any will be best served
// by directly calling the ones provided by this package.
func DoChildren(n Node, do func(Node) error) error {
func DoChildren(n Node, do func(Node) bool) bool {
if n == nil {
return nil
return false
}
return n.doChildren(do)
}
// DoList calls f on each non-nil node x in the list, in list order.
// If any call returns a non-nil error, DoList stops and returns that error.
// Otherwise DoList returns nil.
//
// Note that DoList only calls do on the nodes in the list, not their children.
// If x's children should be processed, do(x) must call DoChildren(x, do) itself.
func DoList(list Nodes, do func(Node) error) error {
for _, x := range list {
if x != nil {
if err := do(x); err != nil {
return err
}
}
}
return nil
}
// Visit visits each non-nil node x in the IR tree rooted at n
// in a depth-first preorder traversal, calling visit on each node visited.
func Visit(n Node, visit func(Node)) {
var do func(Node) error
do = func(x Node) error {
if n == nil {
return
}
var do func(Node) bool
do = func(x Node) bool {
visit(x)
return DoChildren(x, do)
}
if n != nil {
do(n)
}
do(n)
}
// VisitList calls Visit(x, visit) for each node x in the list.
@ -136,8 +115,6 @@ func VisitList(list Nodes, visit func(Node)) {
}
}
var stop = errors.New("stop")
// Any looks for a non-nil node x in the IR tree rooted at n
// for which cond(x) returns true.
// Any considers nodes in a depth-first, preorder traversal.
@ -148,14 +125,11 @@ func Any(n Node, cond func(Node) bool) bool {
if n == nil {
return false
}
var do func(Node) error
do = func(x Node) error {
if cond(x) {
return stop
}
return DoChildren(x, do)
var do func(Node) bool
do = func(x Node) bool {
return cond(x) || DoChildren(x, do)
}
return do(n) == stop
return do(n)
}
// AnyList calls Any(x, cond) for each node x in the list, in order.
@ -210,16 +184,3 @@ func EditChildren(n Node, edit func(Node) Node) {
}
n.editChildren(edit)
}
// editList calls edit on each non-nil node x in the list,
// saving the result of edit back into the list.
//
// Note that editList only calls edit on the nodes in the list, not their children.
// If x's children should be processed, edit(x) must call EditChildren(x, edit) itself.
func editList(list Nodes, edit func(Node) Node) {
for i, x := range list {
if x != nil {
list[i] = edit(x)
}
}
}

View File

@ -181,7 +181,7 @@ type progeffectscache struct {
// nor do we care about empty structs (handled by the pointer check),
// nor do we care about the fake PAUTOHEAP variables.
func ShouldTrack(n *ir.Name) bool {
return (n.Class_ == ir.PAUTO || n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT) && n.Type().HasPointers()
return (n.Class == ir.PAUTO || n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT) && n.Type().HasPointers()
}
// getvariables returns the list of on-stack variables that we need to track
@ -208,7 +208,7 @@ func (lv *liveness) initcache() {
lv.cache.initialized = true
for i, node := range lv.vars {
switch node.Class_ {
switch node.Class {
case ir.PPARAM:
// A return instruction with a p.to is a tail return, which brings
// the stack pointer back up (if it ever went down) and then jumps
@ -255,7 +255,7 @@ func (lv *liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
// variable" ICEs (issue 19632).
switch v.Op {
case ssa.OpVarDef, ssa.OpVarKill, ssa.OpVarLive, ssa.OpKeepAlive:
if !n.Name().Used() {
if !n.Used() {
return -1, 0
}
}
@ -386,7 +386,7 @@ func (lv *liveness) pointerMap(liveout bitvec.BitVec, vars []*ir.Name, args, loc
break
}
node := vars[i]
switch node.Class_ {
switch node.Class {
case ir.PAUTO:
typebits.Set(node.Type(), node.FrameOffset()+lv.stkptrsize, locals)
@ -687,12 +687,12 @@ func (lv *liveness) epilogue() {
// don't need to keep the stack copy live?
if lv.fn.HasDefer() {
for i, n := range lv.vars {
if n.Class_ == ir.PPARAMOUT {
if n.Name().IsOutputParamHeapAddr() {
if n.Class == ir.PPARAMOUT {
if n.IsOutputParamHeapAddr() {
// Just to be paranoid. Heap addresses are PAUTOs.
base.Fatalf("variable %v both output param and heap output param", n)
}
if n.Name().Heapaddr != nil {
if n.Heapaddr != nil {
// If this variable moved to the heap, then
// its stack copy is not live.
continue
@ -700,21 +700,21 @@ func (lv *liveness) epilogue() {
// Note: zeroing is handled by zeroResults in walk.go.
livedefer.Set(int32(i))
}
if n.Name().IsOutputParamHeapAddr() {
if n.IsOutputParamHeapAddr() {
// This variable will be overwritten early in the function
// prologue (from the result of a mallocgc) but we need to
// zero it in case that malloc causes a stack scan.
n.Name().SetNeedzero(true)
n.SetNeedzero(true)
livedefer.Set(int32(i))
}
if n.Name().OpenDeferSlot() {
if n.OpenDeferSlot() {
// Open-coded defer args slots must be live
// everywhere in a function, since a panic can
// occur (almost) anywhere. Because it is live
// everywhere, it must be zeroed on entry.
livedefer.Set(int32(i))
// It was already marked as Needzero when created.
if !n.Name().Needzero() {
if !n.Needzero() {
base.Fatalf("all pointer-containing defer arg slots should have Needzero set")
}
}
@ -785,7 +785,7 @@ func (lv *liveness) epilogue() {
if !liveout.Get(int32(i)) {
continue
}
if n.Class_ == ir.PPARAM {
if n.Class == ir.PPARAM {
continue // ok
}
base.Fatalf("bad live variable at entry of %v: %L", lv.fn.Nname, n)
@ -818,7 +818,7 @@ func (lv *liveness) epilogue() {
// the only things that can possibly be live are the
// input parameters.
for j, n := range lv.vars {
if n.Class_ != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
if n.Class != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Nname, n)
}
}
@ -1063,7 +1063,7 @@ func (lv *liveness) emit() (argsSym, liveSym *obj.LSym) {
// (Nodes without pointers aren't in lv.vars; see livenessShouldTrack.)
var maxArgNode *ir.Name
for _, n := range lv.vars {
switch n.Class_ {
switch n.Class {
case ir.PPARAM, ir.PPARAMOUT:
if maxArgNode == nil || n.FrameOffset() > maxArgNode.FrameOffset() {
maxArgNode = n

View File

@ -2,8 +2,6 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:generate go run mkbuiltin.go
package noder
import (

View File

@ -28,6 +28,26 @@ import (
"cmd/internal/src"
)
func LoadPackage(filenames []string) {
base.Timer.Start("fe", "parse")
lines := ParseFiles(filenames)
base.Timer.Stop()
base.Timer.AddEvent(int64(lines), "lines")
if base.Flag.G != 0 && base.Flag.G < 3 {
// can only parse generic code for now
base.ExitIfErrors()
return
}
// Typecheck.
Package()
// With all user code typechecked, it's now safe to verify unused dot imports.
CheckDotImports()
base.ExitIfErrors()
}
// ParseFiles concurrently parses files into *syntax.File structures.
// Each declaration in every *syntax.File is converted to a syntax tree
// and its root represented by *Node is appended to Target.Decls.
@ -170,6 +190,69 @@ func ParseFiles(filenames []string) (lines uint) {
return
}
func Package() {
typecheck.DeclareUniverse()
typecheck.TypecheckAllowed = true
// Process top-level declarations in phases.
// Phase 1: const, type, and names and types of funcs.
// This will gather all the information about types
// and methods but doesn't depend on any of it.
//
// We also defer type alias declarations until phase 2
// to avoid cycles like #18640.
// TODO(gri) Remove this again once we have a fix for #25838.
// Don't use range--typecheck can add closures to Target.Decls.
base.Timer.Start("fe", "typecheck", "top1")
for i := 0; i < len(typecheck.Target.Decls); i++ {
n := typecheck.Target.Decls[i]
if op := n.Op(); op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.(*ir.Decl).X.Alias()) {
typecheck.Target.Decls[i] = typecheck.Stmt(n)
}
}
// Phase 2: Variable assignments.
// To check interface assignments, depends on phase 1.
// Don't use range--typecheck can add closures to Target.Decls.
base.Timer.Start("fe", "typecheck", "top2")
for i := 0; i < len(typecheck.Target.Decls); i++ {
n := typecheck.Target.Decls[i]
if op := n.Op(); op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.(*ir.Decl).X.Alias() {
typecheck.Target.Decls[i] = typecheck.Stmt(n)
}
}
// Phase 3: Type check function bodies.
// Don't use range--typecheck can add closures to Target.Decls.
base.Timer.Start("fe", "typecheck", "func")
var fcount int64
for i := 0; i < len(typecheck.Target.Decls); i++ {
n := typecheck.Target.Decls[i]
if n.Op() == ir.ODCLFUNC {
typecheck.FuncBody(n.(*ir.Func))
fcount++
}
}
// Phase 4: Check external declarations.
// TODO(mdempsky): This should be handled when type checking their
// corresponding ODCL nodes.
base.Timer.Start("fe", "typecheck", "externdcls")
for i, n := range typecheck.Target.Externs {
if n.Op() == ir.ONAME {
typecheck.Target.Externs[i] = typecheck.Expr(typecheck.Target.Externs[i])
}
}
// Phase 5: With all user code type-checked, it's now safe to verify map keys.
typecheck.CheckMapKeys()
}
// Temporary import helper to get type2-based type-checking going.
type gcimports struct {
packages map[string]*types2.Package
@ -301,7 +384,7 @@ func (p *noder) funcBody(fn *ir.Func, block *syntax.BlockStmt) {
if body == nil {
body = []ir.Node{ir.NewBlockStmt(base.Pos, nil)}
}
fn.Body.Set(body)
fn.Body = body
base.Pos = p.makeXPos(block.Rbrace)
fn.Endlineno = base.Pos
@ -530,8 +613,48 @@ func (p *noder) varDecl(decl *syntax.VarDecl) []ir.Node {
p.checkUnused(pragma)
}
var init []ir.Node
p.setlineno(decl)
return typecheck.DeclVars(names, typ, exprs)
if len(names) > 1 && len(exprs) == 1 {
as2 := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, exprs)
for _, v := range names {
as2.Lhs.Append(v)
typecheck.Declare(v, typecheck.DeclContext)
v.Ntype = typ
v.Defn = as2
if ir.CurFunc != nil {
init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v))
}
}
return append(init, as2)
}
for i, v := range names {
var e ir.Node
if i < len(exprs) {
e = exprs[i]
}
typecheck.Declare(v, typecheck.DeclContext)
v.Ntype = typ
if ir.CurFunc != nil {
init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v))
}
as := ir.NewAssignStmt(base.Pos, v, e)
init = append(init, as)
if e != nil || ir.CurFunc == nil {
v.Defn = as
}
}
if len(exprs) != 0 && len(names) != len(exprs) {
base.Errorf("assignment mismatch: %d variables but %d values", len(names), len(exprs))
}
return init
}
// constState tracks state between constant specifiers within a
@ -657,7 +780,8 @@ func (p *noder) funcDecl(fun *syntax.FuncDecl) ir.Node {
name = ir.BlankNode.Sym() // filled in by typecheckfunc
}
f.Nname = ir.NewFuncNameAt(p.pos(fun.Name), name, f)
f.Nname = ir.NewNameAt(p.pos(fun.Name), name)
f.Nname.Func = f
f.Nname.Defn = f
f.Nname.Ntype = t
@ -787,7 +911,7 @@ func (p *noder) expr(expr syntax.Expr) ir.Node {
for i, e := range l {
l[i] = p.wrapname(expr.ElemList[i], e)
}
n.List.Set(l)
n.List = l
base.Pos = p.makeXPos(expr.Rbrace)
return n
case *syntax.KeyValueExpr:
@ -1143,8 +1267,8 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node {
if list, ok := stmt.Lhs.(*syntax.ListExpr); ok && len(list.ElemList) != 1 || len(rhs) != 1 {
n := ir.NewAssignListStmt(p.pos(stmt), ir.OAS2, nil, nil)
n.Def = stmt.Op == syntax.Def
n.Lhs.Set(p.assignList(stmt.Lhs, n, n.Def))
n.Rhs.Set(rhs)
n.Lhs = p.assignList(stmt.Lhs, n, n.Def)
n.Rhs = rhs
return n
}
@ -1191,10 +1315,10 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node {
n := ir.NewReturnStmt(p.pos(stmt), p.exprList(stmt.Results))
if len(n.Results) == 0 && ir.CurFunc != nil {
for _, ln := range ir.CurFunc.Dcl {
if ln.Class_ == ir.PPARAM {
if ln.Class == ir.PPARAM {
continue
}
if ln.Class_ != ir.PPARAMOUT {
if ln.Class != ir.PPARAMOUT {
break
}
if ln.Sym().Def != ln {
@ -1215,7 +1339,7 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node {
panic("unhandled Stmt")
}
func (p *noder) assignList(expr syntax.Expr, defn ir.Node, colas bool) []ir.Node {
func (p *noder) assignList(expr syntax.Expr, defn ir.InitNode, colas bool) []ir.Node {
if !colas {
return p.exprList(expr)
}
@ -1291,7 +1415,7 @@ func (p *noder) ifStmt(stmt *syntax.IfStmt) ir.Node {
e := p.stmt(stmt.Else)
if e.Op() == ir.OBLOCK {
e := e.(*ir.BlockStmt)
n.Else.Set(e.List)
n.Else = e.List
} else {
n.Else = []ir.Node{e}
}
@ -1316,7 +1440,7 @@ func (p *noder) forStmt(stmt *syntax.ForStmt) ir.Node {
n.Value = lhs[1]
}
}
n.Body.Set(p.blockStmt(stmt.Body))
n.Body = p.blockStmt(stmt.Body)
p.closeAnotherScope()
return n
}
@ -1374,7 +1498,7 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.TypeSwitch
body = body[:len(body)-1]
}
n.Body.Set(p.stmtsFall(body, true))
n.Body = p.stmtsFall(body, true)
if l := len(n.Body); l > 0 && n.Body[l-1].Op() == ir.OFALL {
if tswitch != nil {
base.Errorf("cannot fallthrough in type switch")
@ -1875,7 +1999,9 @@ func (p *noder) funcLit(expr *syntax.FuncLit) ir.Node {
fn := ir.NewFunc(p.pos(expr))
fn.SetIsHiddenClosure(ir.CurFunc != nil)
fn.Nname = ir.NewFuncNameAt(p.pos(expr), ir.BlankNode.Sym(), fn) // filled in by typecheckclosure
fn.Nname = ir.NewNameAt(p.pos(expr), ir.BlankNode.Sym()) // filled in by typecheckclosure
fn.Nname.Func = fn
fn.Nname.Ntype = xtype
fn.Nname.Defn = fn
@ -1965,18 +2091,18 @@ func oldname(s *types.Sym) ir.Node {
// the := it looks like a reference to the outer x so we'll
// make x a closure variable unnecessarily.
n := n.(*ir.Name)
c := n.Name().Innermost
c := n.Innermost
if c == nil || c.Curfn != ir.CurFunc {
// Do not have a closure var for the active closure yet; make one.
c = typecheck.NewName(s)
c.Class_ = ir.PAUTOHEAP
c.Class = ir.PAUTOHEAP
c.SetIsClosureVar(true)
c.Defn = n
// Link into list of active closure variables.
// Popped from list in func funcLit.
c.Outer = n.Name().Innermost
n.Name().Innermost = c
c.Outer = n.Innermost
n.Innermost = c
ir.CurFunc.ClosureVars = append(ir.CurFunc.ClosureVars, c)
}

View File

@ -6,6 +6,7 @@ package pkginit
import (
"cmd/compile/internal/base"
"cmd/compile/internal/deadcode"
"cmd/compile/internal/ir"
"cmd/compile/internal/objw"
"cmd/compile/internal/typecheck"
@ -31,10 +32,10 @@ func Task() *ir.Name {
if n.Op() == ir.ONONAME {
continue
}
if n.Op() != ir.ONAME || n.(*ir.Name).Class_ != ir.PEXTERN {
if n.Op() != ir.ONAME || n.(*ir.Name).Class != ir.PEXTERN {
base.Fatalf("bad inittask: %v", n)
}
deps = append(deps, n.(*ir.Name).Sym().Linksym())
deps = append(deps, n.(*ir.Name).Linksym())
}
// Make a function that contains all the initialization statements.
@ -48,7 +49,7 @@ func Task() *ir.Name {
fn.Dcl = append(fn.Dcl, typecheck.InitTodoFunc.Dcl...)
typecheck.InitTodoFunc.Dcl = nil
fn.Body.Set(nf)
fn.Body = nf
typecheck.FinishFuncBody()
typecheck.Func(fn)
@ -56,7 +57,7 @@ func Task() *ir.Name {
typecheck.Stmts(nf)
ir.CurFunc = nil
typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
fns = append(fns, initializers.Linksym())
fns = append(fns, fn.Linksym())
}
if typecheck.InitTodoFunc.Dcl != nil {
// We only generate temps using initTodo if there
@ -68,13 +69,16 @@ func Task() *ir.Name {
// Record user init functions.
for _, fn := range typecheck.Target.Inits {
// Must happen after initOrder; see #43444.
deadcode.Func(fn)
// Skip init functions with empty bodies.
if len(fn.Body) == 1 {
if stmt := fn.Body[0]; stmt.Op() == ir.OBLOCK && len(stmt.(*ir.BlockStmt).List) == 0 {
continue
}
}
fns = append(fns, fn.Nname.Sym().Linksym())
fns = append(fns, fn.Nname.Linksym())
}
if len(deps) == 0 && len(fns) == 0 && types.LocalPkg.Name != "main" && types.LocalPkg.Name != "runtime" {
@ -85,9 +89,9 @@ func Task() *ir.Name {
sym := typecheck.Lookup(".inittask")
task := typecheck.NewName(sym)
task.SetType(types.Types[types.TUINT8]) // fake type
task.Class_ = ir.PEXTERN
task.Class = ir.PEXTERN
sym.Def = task
lsym := sym.Linksym()
lsym := task.Linksym()
ot := 0
ot = objw.Uintptr(lsym, ot, 0) // state: not initialized yet
ot = objw.Uintptr(lsym, ot, uint64(len(deps)))

View File

@ -140,7 +140,7 @@ func (o *InitOrder) processAssign(n ir.Node) {
defn := dep.Defn
// Skip dependencies on functions (PFUNC) and
// variables already initialized (InitDone).
if dep.Class_ != ir.PEXTERN || o.order[defn] == orderDone {
if dep.Class != ir.PEXTERN || o.order[defn] == orderDone {
continue
}
o.order[n]++
@ -197,14 +197,14 @@ func (o *InitOrder) findInitLoopAndExit(n *ir.Name, path *[]*ir.Name) {
// There might be multiple loops involving n; by sorting
// references, we deterministically pick the one reported.
refers := collectDeps(n.Name().Defn, false).Sorted(func(ni, nj *ir.Name) bool {
refers := collectDeps(n.Defn, false).Sorted(func(ni, nj *ir.Name) bool {
return ni.Pos().Before(nj.Pos())
})
*path = append(*path, n)
for _, ref := range refers {
// Short-circuit variables that were initialized.
if ref.Class_ == ir.PEXTERN && o.order[ref.Defn] == orderDone {
if ref.Class == ir.PEXTERN && o.order[ref.Defn] == orderDone {
continue
}
@ -221,7 +221,7 @@ func reportInitLoopAndExit(l []*ir.Name) {
// the start.
i := -1
for j, n := range l {
if n.Class_ == ir.PEXTERN && (i == -1 || n.Pos().Before(l[i].Pos())) {
if n.Class == ir.PEXTERN && (i == -1 || n.Pos().Before(l[i].Pos())) {
i = j
}
}
@ -291,7 +291,7 @@ func (d *initDeps) visit(n ir.Node) {
switch n.Op() {
case ir.ONAME:
n := n.(*ir.Name)
switch n.Class_ {
switch n.Class {
case ir.PEXTERN, ir.PFUNC:
d.foundDep(n)
}
@ -324,7 +324,7 @@ func (d *initDeps) foundDep(n *ir.Name) {
return
}
d.seen.Add(n)
if d.transitive && n.Class_ == ir.PFUNC {
if d.transitive && n.Class == ir.PFUNC {
d.inspectList(n.Defn.(*ir.Func).Body)
}
}

View File

@ -104,7 +104,7 @@ func genhash(t *types.Type) *obj.LSym {
// For other sizes of plain memory, we build a closure
// that calls memhash_varlen. The size of the memory is
// encoded in the first slot of the closure.
closure := types.TypeSymLookup(fmt.Sprintf(".hashfunc%d", t.Width)).Linksym()
closure := TypeLinksymLookup(fmt.Sprintf(".hashfunc%d", t.Width))
if len(closure.P) > 0 { // already generated
return closure
}
@ -120,7 +120,7 @@ func genhash(t *types.Type) *obj.LSym {
break
}
closure := TypeSymPrefix(".hashfunc", t).Linksym()
closure := TypeLinksymPrefix(".hashfunc", t)
if len(closure.P) > 0 { // already generated
return closure
}
@ -255,7 +255,7 @@ func genhash(t *types.Type) *obj.LSym {
// Build closure. It doesn't close over any variables, so
// it contains just the function pointer.
objw.SymPtr(closure, 0, sym.Linksym(), 0)
objw.SymPtr(closure, 0, fn.Linksym(), 0)
objw.Global(closure, int32(types.PtrSize), obj.DUPOK|obj.RODATA)
return closure
@ -347,7 +347,7 @@ func geneq(t *types.Type) *obj.LSym {
case types.AMEM:
// make equality closure. The size of the type
// is encoded in the closure.
closure := types.TypeSymLookup(fmt.Sprintf(".eqfunc%d", t.Width)).Linksym()
closure := TypeLinksymLookup(fmt.Sprintf(".eqfunc%d", t.Width))
if len(closure.P) != 0 {
return closure
}
@ -363,7 +363,7 @@ func geneq(t *types.Type) *obj.LSym {
break
}
closure := TypeSymPrefix(".eqfunc", t).Linksym()
closure := TypeLinksymPrefix(".eqfunc", t)
if len(closure.P) > 0 { // already generated
return closure
}
@ -634,7 +634,7 @@ func geneq(t *types.Type) *obj.LSym {
typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
// Generate a closure which points at the function we just generated.
objw.SymPtr(closure, 0, sym.Linksym(), 0)
objw.SymPtr(closure, 0, fn.Linksym(), 0)
objw.Global(closure, int32(types.PtrSize), obj.DUPOK|obj.RODATA)
return closure
}

View File

@ -52,13 +52,13 @@ var (
signatslice []*types.Type
itabs []itabEntry
ptabs []ptabEntry
ptabs []*ir.Name
)
type typeSig struct {
name *types.Sym
isym *types.Sym
tsym *types.Sym
isym *obj.LSym
tsym *obj.LSym
type_ *types.Type
mtype *types.Type
}
@ -327,21 +327,19 @@ func methods(t *types.Type) []*typeSig {
// generating code if necessary.
var ms []*typeSig
for _, f := range mt.AllMethods().Slice() {
if f.Sym == nil {
base.Fatalf("method with no sym on %v", mt)
}
if !f.IsMethod() {
base.Fatalf("non-method on %v method %v %v\n", mt, f.Sym, f)
base.Fatalf("non-method on %v method %v %v", mt, f.Sym, f)
}
if f.Type.Recv() == nil {
base.Fatalf("receiver with no type on %v method %v %v\n", mt, f.Sym, f)
base.Fatalf("receiver with no type on %v method %v %v", mt, f.Sym, f)
}
if f.Nointerface() {
continue
}
method := f.Sym
if method == nil {
break
}
// get receiver type for this particular method.
// if pointer receiver but non-pointer t and
// this is not an embedded pointer inside a struct,
@ -351,29 +349,13 @@ func methods(t *types.Type) []*typeSig {
}
sig := &typeSig{
name: method,
isym: ir.MethodSym(it, method),
tsym: ir.MethodSym(t, method),
name: f.Sym,
isym: methodWrapper(it, f),
tsym: methodWrapper(t, f),
type_: typecheck.NewMethodType(f.Type, t),
mtype: typecheck.NewMethodType(f.Type, nil),
}
ms = append(ms, sig)
this := f.Type.Recv().Type
if !sig.isym.Siggen() {
sig.isym.SetSiggen(true)
if !types.Identical(this, it) {
genwrapper(it, f, sig.isym)
}
}
if !sig.tsym.Siggen() {
sig.tsym.SetSiggen(true)
if !types.Identical(this, t) {
genwrapper(t, f, sig.tsym)
}
}
}
return ms
@ -407,11 +389,7 @@ func imethods(t *types.Type) []*typeSig {
// IfaceType.Method is not in the reflect data.
// Generate the method body, so that compiled
// code can refer to it.
isym := ir.MethodSym(t, f.Sym)
if !isym.Siggen() {
isym.SetSiggen(true)
genwrapper(t, f, isym)
}
methodWrapper(t, f)
}
return methods
@ -636,8 +614,8 @@ func dextratypeData(lsym *obj.LSym, ot int, t *types.Type) int {
ot = objw.SymPtrOff(lsym, ot, nsym)
ot = dmethodptrOff(lsym, ot, WriteType(a.mtype))
ot = dmethodptrOff(lsym, ot, a.isym.Linksym())
ot = dmethodptrOff(lsym, ot, a.tsym.Linksym())
ot = dmethodptrOff(lsym, ot, a.isym)
ot = dmethodptrOff(lsym, ot, a.tsym)
}
return ot
}
@ -812,8 +790,8 @@ func dcommontype(lsym *obj.LSym, t *types.Type) int {
// TrackSym returns the symbol for tracking use of field/method f, assumed
// to be a member of struct/interface type t.
func TrackSym(t *types.Type, f *types.Field) *types.Sym {
return ir.Pkgs.Track.Lookup(t.ShortString() + "." + f.Sym.Name)
func TrackSym(t *types.Type, f *types.Field) *obj.LSym {
return ir.Pkgs.Track.Lookup(t.ShortString() + "." + f.Sym.Name).Linksym()
}
func TypeSymPrefix(prefix string, t *types.Type) *types.Sym {
@ -845,12 +823,24 @@ func TypeSym(t *types.Type) *types.Sym {
return s
}
func TypeLinksymPrefix(prefix string, t *types.Type) *obj.LSym {
return TypeSymPrefix(prefix, t).Linksym()
}
func TypeLinksymLookup(name string) *obj.LSym {
return types.TypeSymLookup(name).Linksym()
}
func TypeLinksym(t *types.Type) *obj.LSym {
return TypeSym(t).Linksym()
}
func TypePtr(t *types.Type) *ir.AddrExpr {
s := TypeSym(t)
if s.Def == nil {
n := ir.NewNameAt(src.NoXPos, s)
n.SetType(types.Types[types.TUINT8])
n.Class_ = ir.PEXTERN
n.Class = ir.PEXTERN
n.SetTypecheck(1)
s.Def = n
}
@ -869,10 +859,10 @@ func ITabAddr(t, itype *types.Type) *ir.AddrExpr {
if s.Def == nil {
n := typecheck.NewName(s)
n.SetType(types.Types[types.TUINT8])
n.Class_ = ir.PEXTERN
n.Class = ir.PEXTERN
n.SetTypecheck(1)
s.Def = n
itabs = append(itabs, itabEntry{t: t, itype: itype, lsym: s.Linksym()})
itabs = append(itabs, itabEntry{t: t, itype: itype, lsym: n.Linksym()})
}
n := typecheck.NodAddr(ir.AsNode(s.Def))
@ -1269,7 +1259,7 @@ func genfun(t, it *types.Type) []*obj.LSym {
// so we can find the intersect in a single pass
for _, m := range methods {
if m.name == sigs[0].name {
out = append(out, m.isym.Linksym())
out = append(out, m.isym)
sigs = sigs[1:]
if len(sigs) == 0 {
break
@ -1378,8 +1368,12 @@ func WriteTabs() {
// name nameOff
// typ typeOff // pointer to symbol
// }
nsym := dname(p.s.Name, "", nil, true)
tsym := WriteType(p.t)
nsym := dname(p.Sym().Name, "", nil, true)
t := p.Type()
if p.Class != ir.PFUNC {
t = types.NewPtr(t)
}
tsym := WriteType(t)
ot = objw.SymPtrOff(s, ot, nsym)
ot = objw.SymPtrOff(s, ot, tsym)
// Plugin exports symbols as interfaces. Mark their types
@ -1391,7 +1385,7 @@ func WriteTabs() {
ot = 0
s = base.Ctxt.Lookup("go.plugin.exports")
for _, p := range ptabs {
ot = objw.SymPtr(s, ot, p.s.Linksym(), 0)
ot = objw.SymPtr(s, ot, p.Linksym(), 0)
}
objw.Global(s, int32(ot), int16(obj.RODATA))
}
@ -1571,7 +1565,7 @@ func dgcprog(t *types.Type) (*obj.LSym, int64) {
if t.Width == types.BADWIDTH {
base.Fatalf("dgcprog: %v badwidth", t)
}
lsym := TypeSymPrefix(".gcprog", t).Linksym()
lsym := TypeLinksymPrefix(".gcprog", t)
var p gcProg
p.init(lsym)
p.emit(t, 0)
@ -1680,7 +1674,7 @@ func ZeroAddr(size int64) ir.Node {
if s.Def == nil {
x := typecheck.NewName(s)
x.SetType(types.Types[types.TUINT8])
x.Class_ = ir.PEXTERN
x.Class = ir.PEXTERN
x.SetTypecheck(1)
s.Def = x
}
@ -1710,13 +1704,7 @@ func CollectPTabs() {
if s.Pkg.Name != "main" {
continue
}
if n.Type().Kind() == types.TFUNC && n.Class_ == ir.PFUNC {
// function
ptabs = append(ptabs, ptabEntry{s: s, t: s.Def.Type()})
} else {
// variable
ptabs = append(ptabs, ptabEntry{s: s, t: types.NewPtr(s.Def.Type())})
}
ptabs = append(ptabs, n)
}
}
@ -1740,22 +1728,28 @@ func CollectPTabs() {
//
// rcvr - U
// method - M func (t T)(), a TFIELD type struct
// newnam - the eventual mangled name of this function
func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
if false && base.Flag.LowerR != 0 {
fmt.Printf("genwrapper rcvrtype=%v method=%v newnam=%v\n", rcvr, method, newnam)
func methodWrapper(rcvr *types.Type, method *types.Field) *obj.LSym {
newnam := ir.MethodSym(rcvr, method.Sym)
lsym := newnam.Linksym()
if newnam.Siggen() {
return lsym
}
newnam.SetSiggen(true)
if types.Identical(rcvr, method.Type.Recv().Type) {
return lsym
}
// Only generate (*T).M wrappers for T.M in T's own package.
if rcvr.IsPtr() && rcvr.Elem() == method.Type.Recv().Type &&
rcvr.Elem().Sym() != nil && rcvr.Elem().Sym().Pkg != types.LocalPkg {
return
return lsym
}
// Only generate I.M wrappers for I in I's own package
// but keep doing it for error.Error (was issue #29304).
if rcvr.IsInterface() && rcvr.Sym() != nil && rcvr.Sym().Pkg != types.LocalPkg && rcvr != types.ErrorType {
return
return lsym
}
base.Pos = base.AutogeneratedPos
@ -1804,7 +1798,7 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
} else {
fn.SetWrapper(true) // ignore frame for panic+recover matching
call := ir.NewCallExpr(base.Pos, ir.OCALL, dot, nil)
call.Args.Set(ir.ParamNames(tfn.Type()))
call.Args = ir.ParamNames(tfn.Type())
call.IsDDD = tfn.Type().IsVariadic()
if method.Type.NumResults() > 0 {
ret := ir.NewReturnStmt(base.Pos, nil)
@ -1815,10 +1809,6 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
}
}
if false && base.Flag.LowerR != 0 {
ir.DumpList("genwrapper body", fn.Body)
}
typecheck.FinishFuncBody()
if base.Debug.DclStack != 0 {
types.CheckDclstack()
@ -1838,6 +1828,8 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
ir.CurFunc = nil
typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
return lsym
}
var ZeroSize int64
@ -1845,7 +1837,7 @@ var ZeroSize int64
// MarkTypeUsedInInterface marks that type t is converted to an interface.
// This information is used in the linker in dead method elimination.
func MarkTypeUsedInInterface(t *types.Type, from *obj.LSym) {
tsym := TypeSym(t).Linksym()
tsym := TypeLinksym(t)
// Emit a marker relocation. The linker will know the type is converted
// to an interface if "from" is reachable.
r := obj.Addrel(from)
@ -1858,7 +1850,7 @@ func MarkTypeUsedInInterface(t *types.Type, from *obj.LSym) {
func MarkUsedIfaceMethod(n *ir.CallExpr) {
dot := n.X.(*ir.SelectorExpr)
ityp := dot.X.Type()
tsym := TypeSym(ityp).Linksym()
tsym := TypeLinksym(ityp)
r := obj.Addrel(ir.CurFunc.LSym)
r.Sym = tsym
// dot.Xoffset is the method index * Widthptr (the offset of code pointer

View File

@ -148,7 +148,7 @@ func elimDeadAutosGeneric(f *Func) {
case OpAddr, OpLocalAddr:
// Propagate the address if it points to an auto.
n, ok := v.Aux.(*ir.Name)
if !ok || n.Class() != ir.PAUTO {
if !ok || n.Class != ir.PAUTO {
return
}
if addr[v] == nil {
@ -159,7 +159,7 @@ func elimDeadAutosGeneric(f *Func) {
case OpVarDef, OpVarKill:
// v should be eliminated if we eliminate the auto.
n, ok := v.Aux.(*ir.Name)
if !ok || n.Class() != ir.PAUTO {
if !ok || n.Class != ir.PAUTO {
return
}
if elim[v] == nil {
@ -175,7 +175,7 @@ func elimDeadAutosGeneric(f *Func) {
// may not be used by the inline code, but will be used by
// panic processing).
n, ok := v.Aux.(*ir.Name)
if !ok || n.Class() != ir.PAUTO {
if !ok || n.Class != ir.PAUTO {
return
}
if !used[n] {
@ -307,7 +307,7 @@ func elimUnreadAutos(f *Func) {
if !ok {
continue
}
if n.Class() != ir.PAUTO {
if n.Class != ir.PAUTO {
continue
}

View File

@ -70,7 +70,7 @@ func (TestFrontend) StringData(s string) *obj.LSym {
}
func (TestFrontend) Auto(pos src.XPos, t *types.Type) *ir.Name {
n := ir.NewNameAt(pos, &types.Sym{Name: "aFakeAuto"})
n.SetClass(ir.PAUTO)
n.Class = ir.PAUTO
return n
}
func (d TestFrontend) SplitString(s LocalSlot) (LocalSlot, LocalSlot) {

View File

@ -5,6 +5,7 @@
package ssa
import (
"cmd/compile/internal/reflectdata"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/objabi"
@ -270,11 +271,11 @@ func writebarrier(f *Func) {
case OpMoveWB:
fn = typedmemmove
val = w.Args[1]
typ = w.Aux.(*types.Type).Symbol()
typ = reflectdata.TypeLinksym(w.Aux.(*types.Type))
nWBops--
case OpZeroWB:
fn = typedmemclr
typ = w.Aux.(*types.Type).Symbol()
typ = reflectdata.TypeLinksym(w.Aux.(*types.Type))
nWBops--
case OpVarDef, OpVarLive, OpVarKill:
}

View File

@ -2,8 +2,6 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:generate go run mkbuiltin.go
package ssagen
import (
@ -168,7 +166,7 @@ func selectLSym(f *ir.Func, hasBody bool) {
f.LSym = nam.Sym().LinksymABI0()
needABIWrapper, wrapperABI = true, obj.ABIInternal
} else {
f.LSym = nam.Sym().Linksym()
f.LSym = nam.Linksym()
// No ABI override. Check that the symbol is
// using the expected ABI.
want := obj.ABIInternal
@ -305,7 +303,7 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) {
tail = ir.NewBranchStmt(base.Pos, ir.ORETJMP, f.Nname.Sym())
} else {
call := ir.NewCallExpr(base.Pos, ir.OCALL, f.Nname, nil)
call.Args.Set(ir.ParamNames(tfn.Type()))
call.Args = ir.ParamNames(tfn.Type())
call.IsDDD = tfn.Type().IsVariadic()
tail = call
if tfn.Type().NumResults() > 0 {

View File

@ -76,7 +76,7 @@ func (c *nowritebarrierrecChecker) findExtraCalls(nn ir.Node) {
return
}
fn := n.X.(*ir.Name)
if fn.Class_ != ir.PFUNC || fn.Name().Defn == nil {
if fn.Class != ir.PFUNC || fn.Defn == nil {
return
}
if !types.IsRuntimePkg(fn.Sym().Pkg) || fn.Sym().Name != "systemstack" {
@ -88,7 +88,7 @@ func (c *nowritebarrierrecChecker) findExtraCalls(nn ir.Node) {
switch arg.Op() {
case ir.ONAME:
arg := arg.(*ir.Name)
callee = arg.Name().Defn.(*ir.Func)
callee = arg.Defn.(*ir.Func)
case ir.OCLOSURE:
arg := arg.(*ir.ClosureExpr)
callee = arg.Func

View File

@ -34,11 +34,11 @@ import (
// the top of the stack and increasing in size.
// Non-autos sort on offset.
func cmpstackvarlt(a, b *ir.Name) bool {
if (a.Class_ == ir.PAUTO) != (b.Class_ == ir.PAUTO) {
return b.Class_ == ir.PAUTO
if (a.Class == ir.PAUTO) != (b.Class == ir.PAUTO) {
return b.Class == ir.PAUTO
}
if a.Class_ != ir.PAUTO {
if a.Class != ir.PAUTO {
return a.FrameOffset() < b.FrameOffset()
}
@ -79,14 +79,14 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
// Mark the PAUTO's unused.
for _, ln := range fn.Dcl {
if ln.Class_ == ir.PAUTO {
if ln.Class == ir.PAUTO {
ln.SetUsed(false)
}
}
for _, l := range f.RegAlloc {
if ls, ok := l.(ssa.LocalSlot); ok {
ls.N.Name().SetUsed(true)
ls.N.SetUsed(true)
}
}
@ -94,14 +94,14 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
for _, b := range f.Blocks {
for _, v := range b.Values {
if n, ok := v.Aux.(*ir.Name); ok {
switch n.Class_ {
switch n.Class {
case ir.PPARAM, ir.PPARAMOUT:
// Don't modify nodfp; it is a global.
if n != ir.RegFP {
n.Name().SetUsed(true)
n.SetUsed(true)
}
case ir.PAUTO:
n.Name().SetUsed(true)
n.SetUsed(true)
}
}
if !scratchUsed {
@ -120,7 +120,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
// Reassign stack offsets of the locals that are used.
lastHasPtr := false
for i, n := range fn.Dcl {
if n.Op() != ir.ONAME || n.Class_ != ir.PAUTO {
if n.Op() != ir.ONAME || n.Class != ir.PAUTO {
continue
}
if !n.Used() {
@ -207,7 +207,7 @@ func init() {
func StackOffset(slot ssa.LocalSlot) int32 {
n := slot.N
var off int64
switch n.Class_ {
switch n.Class {
case ir.PAUTO:
off = n.FrameOffset()
if base.Ctxt.FixedFrameSize() == 0 {
@ -225,7 +225,7 @@ func StackOffset(slot ssa.LocalSlot) int32 {
// fieldtrack adds R_USEFIELD relocations to fnsym to record any
// struct fields that it used.
func fieldtrack(fnsym *obj.LSym, tracked map[*types.Sym]struct{}) {
func fieldtrack(fnsym *obj.LSym, tracked map[*obj.LSym]struct{}) {
if fnsym == nil {
return
}
@ -233,24 +233,18 @@ func fieldtrack(fnsym *obj.LSym, tracked map[*types.Sym]struct{}) {
return
}
trackSyms := make([]*types.Sym, 0, len(tracked))
trackSyms := make([]*obj.LSym, 0, len(tracked))
for sym := range tracked {
trackSyms = append(trackSyms, sym)
}
sort.Sort(symByName(trackSyms))
sort.Slice(trackSyms, func(i, j int) bool { return trackSyms[i].Name < trackSyms[j].Name })
for _, sym := range trackSyms {
r := obj.Addrel(fnsym)
r.Sym = sym.Linksym()
r.Sym = sym
r.Type = objabi.R_USEFIELD
}
}
type symByName []*types.Sym
func (a symByName) Len() int { return len(a) }
func (a symByName) Less(i, j int) bool { return a[i].Name < a[j].Name }
func (a symByName) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
// largeStack is info about a function whose stack frame is too large (rare).
type largeStack struct {
locals int64

View File

@ -46,7 +46,7 @@ func TestCmpstackvar(t *testing.T) {
n := typecheck.NewName(s)
n.SetType(t)
n.SetFrameOffset(xoffset)
n.Class_ = cl
n.Class = cl
return n
}
testdata := []struct {
@ -161,7 +161,7 @@ func TestStackvarSort(t *testing.T) {
n := typecheck.NewName(s)
n.SetType(t)
n.SetFrameOffset(xoffset)
n.Class_ = cl
n.Class = cl
return n
}
inp := []*ir.Name{

View File

@ -436,7 +436,7 @@ func buildssa(fn *ir.Func, worker int) *ssa.Func {
var args []ssa.Param
var results []ssa.Param
for _, n := range fn.Dcl {
switch n.Class_ {
switch n.Class {
case ir.PPARAM:
s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
args = append(args, ssa.Param{Type: n.Type(), Offset: int32(n.FrameOffset())})
@ -457,13 +457,13 @@ func buildssa(fn *ir.Func, worker int) *ssa.Func {
case ir.PFUNC:
// local function - already handled by frontend
default:
s.Fatalf("local variable with class %v unimplemented", n.Class_)
s.Fatalf("local variable with class %v unimplemented", n.Class)
}
}
// Populate SSAable arguments.
for _, n := range fn.Dcl {
if n.Class_ == ir.PPARAM && s.canSSA(n) {
if n.Class == ir.PPARAM && s.canSSA(n) {
v := s.newValue0A(ssa.OpArg, n.Type(), n)
s.vars[n] = v
s.addNamedValue(n, v) // This helps with debugging information, not needed for compilation itself.
@ -1166,7 +1166,7 @@ func (s *state) stmt(n ir.Node) {
case ir.OCALLINTER:
n := n.(*ir.CallExpr)
s.callResult(n, callNormal)
if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PFUNC {
if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class == ir.PFUNC {
if fn := n.X.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
n.X.Sym().Pkg == ir.Pkgs.Runtime && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap") {
m := s.mem()
@ -1242,7 +1242,7 @@ func (s *state) stmt(n ir.Node) {
case ir.ODCL:
n := n.(*ir.Decl)
if n.X.(*ir.Name).Class_ == ir.PAUTOHEAP {
if n.X.Class == ir.PAUTOHEAP {
s.Fatalf("DCL %v", n)
}
@ -1634,7 +1634,7 @@ func (s *state) stmt(n ir.Node) {
if !v.Addrtaken() {
s.Fatalf("VARLIVE variable %v must have Addrtaken set", v)
}
switch v.Class_ {
switch v.Class {
case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
default:
s.Fatalf("VARLIVE variable %v must be Auto or Arg", v)
@ -2106,13 +2106,13 @@ func (s *state) expr(n ir.Node) *ssa.Value {
return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
case ir.OCFUNC:
n := n.(*ir.UnaryExpr)
aux := n.X.Sym().Linksym()
aux := n.X.(*ir.Name).Linksym()
return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
case ir.ONAME:
n := n.(*ir.Name)
if n.Class_ == ir.PFUNC {
if n.Class == ir.PFUNC {
// "value" of a function is the address of the function's closure
sym := staticdata.FuncSym(n.Sym()).Linksym()
sym := staticdata.FuncLinksym(n)
return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
}
if s.canSSA(n) {
@ -3003,7 +3003,7 @@ func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
if inplace {
if sn.Op() == ir.ONAME {
sn := sn.(*ir.Name)
if sn.Class_ != ir.PEXTERN {
if sn.Class != ir.PEXTERN {
// Tell liveness we're about to build a new slice
s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
}
@ -3222,7 +3222,7 @@ func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask
// If this assignment clobbers an entire local variable, then emit
// OpVarDef so liveness analysis knows the variable is redefined.
if base := clobberBase(left); base.Op() == ir.ONAME && base.(*ir.Name).Class_ != ir.PEXTERN && skip == 0 {
if base := clobberBase(left); base.Op() == ir.ONAME && base.(*ir.Name).Class != ir.PEXTERN && skip == 0 {
s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base.(*ir.Name), s.mem(), !ir.IsAutoTmp(base))
}
@ -4385,7 +4385,7 @@ func (s *state) openDeferRecord(n *ir.CallExpr) {
closureVal := s.expr(fn)
closure := s.openDeferSave(nil, fn.Type(), closureVal)
opendefer.closureNode = closure.Aux.(*ir.Name)
if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class_ == ir.PFUNC) {
if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
opendefer.closure = closure
}
} else if n.Op() == ir.OCALLMETH {
@ -4578,7 +4578,7 @@ func (s *state) openDeferExit() {
call = s.newValue3A(ssa.OpClosureCall, types.TypeMem, aux, codeptr, v, s.mem())
}
} else {
aux := ssa.StaticAuxCall(fn.Sym().Linksym(), ACArgs, ACResults)
aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), ACArgs, ACResults)
if testLateExpansion {
callArgs = append(callArgs, s.mem())
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
@ -4651,7 +4651,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
switch n.Op() {
case ir.OCALLFUNC:
testLateExpansion = k != callDeferStack && ssa.LateCallExpansionEnabledWithin(s.f)
if k == callNormal && fn.Op() == ir.ONAME && fn.(*ir.Name).Class_ == ir.PFUNC {
if k == callNormal && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
fn := fn.(*ir.Name)
sym = fn.Sym()
break
@ -4867,7 +4867,9 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
s.vars[memVar] = call
}
// Insert OVARLIVE nodes
s.stmtList(n.Body)
for _, name := range n.KeepAlive {
s.stmt(ir.NewUnaryExpr(n.Pos(), ir.OVARLIVE, name))
}
// Finish block for defers
if k == callDefer || k == callDeferStack {
@ -4956,10 +4958,10 @@ func (s *state) addr(n ir.Node) *ssa.Value {
fallthrough
case ir.ONAME:
n := n.(*ir.Name)
switch n.Class_ {
switch n.Class {
case ir.PEXTERN:
// global variable
v := s.entryNewValue1A(ssa.OpAddr, t, n.Sym().Linksym(), s.sb)
v := s.entryNewValue1A(ssa.OpAddr, t, n.Linksym(), s.sb)
// TODO: Make OpAddr use AuxInt as well as Aux.
if offset != 0 {
v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
@ -4985,7 +4987,7 @@ func (s *state) addr(n ir.Node) *ssa.Value {
// that cse works on their addresses
return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
default:
s.Fatalf("variable address class %v not implemented", n.Class_)
s.Fatalf("variable address class %v not implemented", n.Class)
return nil
}
case ir.ORESULT:
@ -5094,10 +5096,10 @@ func (s *state) canSSAName(name *ir.Name) bool {
if ir.IsParamHeapCopy(name) {
return false
}
if name.Class_ == ir.PAUTOHEAP {
if name.Class == ir.PAUTOHEAP {
s.Fatalf("canSSA of PAUTOHEAP %v", name)
}
switch name.Class_ {
switch name.Class {
case ir.PEXTERN:
return false
case ir.PPARAMOUT:
@ -5115,7 +5117,7 @@ func (s *state) canSSAName(name *ir.Name) bool {
return false
}
}
if name.Class_ == ir.PPARAM && name.Sym() != nil && name.Sym().Name == ".this" {
if name.Class == ir.PPARAM && name.Sym() != nil && name.Sym().Name == ".this" {
// wrappers generated by genwrapper need to update
// the .this pointer in place.
// TODO: treat as a PPARAMOUT?
@ -5978,8 +5980,8 @@ func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *
// commaok indicates whether to panic or return a bool.
// If commaok is false, resok will be nil.
func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
iface := s.expr(n.X) // input interface
target := s.expr(n.Ntype) // target type
iface := s.expr(n.X) // input interface
target := s.expr(n.DstType) // target type
byteptr := s.f.Config.Types.BytePtr
if n.Type().IsInterface() {
@ -6086,7 +6088,7 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
targetITab = target
} else {
// Looking for pointer to itab for target type and source interface.
targetITab = s.expr(n.Itab[0])
targetITab = s.expr(n.Itab)
}
var tmp ir.Node // temporary for use with large types
@ -6113,7 +6115,7 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
if !commaok {
// on failure, panic by calling panicdottype
s.startBlock(bFail)
taddr := s.expr(n.Ntype.(*ir.AddrExpr).Alloc)
taddr := s.expr(n.SrcType)
if n.X.Type().IsEmptyInterface() {
s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
} else {
@ -6208,7 +6210,7 @@ func (s *state) mem() *ssa.Value {
}
func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
if n.Class_ == ir.Pxxx {
if n.Class == ir.Pxxx {
// Don't track our marker nodes (memVar etc.).
return
}
@ -6216,12 +6218,12 @@ func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
// Don't track temporary variables.
return
}
if n.Class_ == ir.PPARAMOUT {
if n.Class == ir.PPARAMOUT {
// Don't track named output values. This prevents return values
// from being assigned too early. See #14591 and #14762. TODO: allow this.
return
}
loc := ssa.LocalSlot{N: n.Name(), Type: n.Type(), Off: 0}
loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
values, ok := s.f.NamedValues[loc]
if !ok {
s.f.Names = append(s.f.Names, loc)
@ -6739,8 +6741,8 @@ func defframe(s *State, e *ssafn) {
if !n.Needzero() {
continue
}
if n.Class_ != ir.PAUTO {
e.Fatalf(n.Pos(), "needzero class %d", n.Class_)
if n.Class != ir.PAUTO {
e.Fatalf(n.Pos(), "needzero class %d", n.Class)
}
if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
@ -6824,14 +6826,14 @@ func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
a.Name = obj.NAME_EXTERN
a.Sym = n
case *ir.Name:
if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT {
a.Name = obj.NAME_PARAM
a.Sym = ir.Orig(n).Sym().Linksym()
a.Sym = ir.Orig(n).(*ir.Name).Linksym()
a.Offset += n.FrameOffset()
break
}
a.Name = obj.NAME_AUTO
a.Sym = n.Sym().Linksym()
a.Sym = n.Linksym()
a.Offset += n.FrameOffset()
default:
v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
@ -6963,10 +6965,10 @@ func CheckLoweredGetClosurePtr(v *ssa.Value) {
func AddrAuto(a *obj.Addr, v *ssa.Value) {
n, off := ssa.AutoVar(v)
a.Type = obj.TYPE_MEM
a.Sym = n.Sym().Linksym()
a.Sym = n.Linksym()
a.Reg = int16(Arch.REGSP)
a.Offset = n.FrameOffset() + off
if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT {
a.Name = obj.NAME_PARAM
} else {
a.Name = obj.NAME_AUTO
@ -6979,7 +6981,7 @@ func (s *State) AddrScratch(a *obj.Addr) {
}
a.Type = obj.TYPE_MEM
a.Name = obj.NAME_AUTO
a.Sym = s.ScratchFpMem.Sym().Linksym()
a.Sym = s.ScratchFpMem.Linksym()
a.Reg = int16(Arch.REGSP)
a.Offset = s.ScratchFpMem.Offset_
}
@ -7196,7 +7198,7 @@ func (e *ssafn) DerefItab(it *obj.LSym, offset int64) *obj.LSym {
func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
node := parent.N
if node.Class_ != ir.PAUTO || node.Name().Addrtaken() {
if node.Class != ir.PAUTO || node.Addrtaken() {
// addressed things and non-autos retain their parents (i.e., cannot truly be split)
return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
}
@ -7206,7 +7208,7 @@ func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t
s.Def = n
ir.AsNode(s.Def).Name().SetUsed(true)
n.SetType(t)
n.Class_ = ir.PAUTO
n.Class = ir.PAUTO
n.SetEsc(ir.EscNever)
n.Curfn = e.curfn
e.curfn.Dcl = append(e.curfn.Dcl, n)

View File

@ -37,8 +37,8 @@ func InitAddr(n *ir.Name, noff int64, a *ir.Name, aoff int64) {
if a.Op() != ir.ONAME {
base.Fatalf("addrsym a op %v", a.Op())
}
s := n.Sym().Linksym()
s.WriteAddr(base.Ctxt, noff, types.PtrSize, a.Sym().Linksym(), aoff)
s := n.Linksym()
s.WriteAddr(base.Ctxt, noff, types.PtrSize, a.Linksym(), aoff)
}
// InitFunc writes the static address of f to n. f must be a global function.
@ -50,21 +50,21 @@ func InitFunc(n *ir.Name, noff int64, f *ir.Name) {
if n.Sym() == nil {
base.Fatalf("pfuncsym nil n sym")
}
if f.Class_ != ir.PFUNC {
base.Fatalf("pfuncsym class not PFUNC %d", f.Class_)
if f.Class != ir.PFUNC {
base.Fatalf("pfuncsym class not PFUNC %d", f.Class)
}
s := n.Sym().Linksym()
s.WriteAddr(base.Ctxt, noff, types.PtrSize, FuncSym(f.Sym()).Linksym(), 0)
s := n.Linksym()
s.WriteAddr(base.Ctxt, noff, types.PtrSize, FuncLinksym(f), 0)
}
// InitSlice writes a static slice symbol {&arr, lencap, lencap} to n+noff.
// InitSlice does not modify n.
func InitSlice(n *ir.Name, noff int64, arr *ir.Name, lencap int64) {
s := n.Sym().Linksym()
s := n.Linksym()
if arr.Op() != ir.ONAME {
base.Fatalf("slicesym non-name arr %v", arr)
}
s.WriteAddr(base.Ctxt, noff, types.PtrSize, arr.Sym().Linksym(), 0)
s.WriteAddr(base.Ctxt, noff, types.PtrSize, arr.Linksym(), 0)
s.WriteInt(base.Ctxt, noff+types.SliceLenOffset, types.PtrSize, lencap)
s.WriteInt(base.Ctxt, noff+types.SliceCapOffset, types.PtrSize, lencap)
}
@ -141,7 +141,7 @@ func fileStringSym(pos src.XPos, file string, readonly bool, hash []byte) (*obj.
if readonly {
sym = StringSym(pos, string(data))
} else {
sym = slicedata(pos, string(data)).Sym().Linksym()
sym = slicedata(pos, string(data)).Linksym()
}
if len(hash) > 0 {
sum := sha256.Sum256(data)
@ -189,7 +189,7 @@ func fileStringSym(pos src.XPos, file string, readonly bool, hash []byte) (*obj.
} else {
// Emit a zero-length data symbol
// and then fix up length and content to use file.
symdata = slicedata(pos, "").Sym().Linksym()
symdata = slicedata(pos, "").Linksym()
symdata.Size = size
symdata.Type = objabi.SNOPTRDATA
info := symdata.NewFileInfo()
@ -209,7 +209,7 @@ func slicedata(pos src.XPos, s string) *ir.Name {
symnode := typecheck.NewName(sym)
sym.Def = symnode
lsym := sym.Linksym()
lsym := symnode.Linksym()
off := dstringdata(lsym, 0, s, pos, "slice")
objw.Global(lsym, int32(off), obj.NOPTR|obj.LOCAL)
@ -258,6 +258,13 @@ func FuncSym(s *types.Sym) *types.Sym {
return sf
}
func FuncLinksym(n *ir.Name) *obj.LSym {
if n.Op() != ir.ONAME || n.Class != ir.PFUNC {
base.Fatalf("expected func name: %v", n)
}
return FuncSym(n.Sym()).Linksym()
}
// NeedFuncSym ensures that s·f is exported.
// It is only used with -dynlink.
// When not compiling for dynamic linking,
@ -311,7 +318,7 @@ func InitConst(n *ir.Name, noff int64, c ir.Node, wid int) {
if c.Op() != ir.OLITERAL {
base.Fatalf("litsym c op %v", c.Op())
}
s := n.Sym().Linksym()
s := n.Linksym()
switch u := c.Val(); u.Kind() {
case constant.Bool:
i := int64(obj.Bool2int(constant.BoolVal(u)))

View File

@ -145,7 +145,7 @@ func WriteEmbed(v *ir.Name) {
if err != nil {
base.ErrorfAt(v.Pos(), "embed %s: %v", file, err)
}
sym := v.Sym().Linksym()
sym := v.Linksym()
off := 0
off = objw.SymPtr(sym, off, fsym, 0) // data string
off = objw.Uintptr(sym, off, uint64(size)) // len
@ -187,7 +187,7 @@ func WriteEmbed(v *ir.Name) {
}
}
objw.Global(slicedata, int32(off), obj.RODATA|obj.LOCAL)
sym := v.Sym().Linksym()
sym := v.Linksym()
objw.SymPtr(sym, 0, slicedata, 0)
}
}

View File

@ -78,15 +78,12 @@ func (s *Schedule) tryStaticInit(nn ir.Node) bool {
// like staticassign but we are copying an already
// initialized value r.
func (s *Schedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *types.Type) bool {
if rn.Class_ == ir.PFUNC {
if rn.Class == ir.PFUNC {
// TODO if roff != 0 { panic }
staticdata.InitFunc(l, loff, rn)
return true
}
if rn.Class_ != ir.PEXTERN || rn.Sym().Pkg != types.LocalPkg {
return false
}
if rn.Defn == nil { // probably zeroed but perhaps supplied externally and of unknown value
if rn.Class != ir.PEXTERN || rn.Sym().Pkg != types.LocalPkg {
return false
}
if rn.Defn.Op() != ir.OAS {
@ -95,8 +92,16 @@ func (s *Schedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *types.Ty
if rn.Type().IsString() { // perhaps overwritten by cmd/link -X (#34675)
return false
}
if rn.Embed != nil {
return false
}
orig := rn
r := rn.Defn.(*ir.AssignStmt).Y
if r == nil {
// No explicit initialization value. Probably zeroed but perhaps
// supplied externally and of unknown value.
return false
}
for r.Op() == ir.OCONVNOP && !types.Identical(r.Type(), typ) {
r = r.(*ir.ConvExpr).X
@ -104,7 +109,7 @@ func (s *Schedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *types.Ty
switch r.Op() {
case ir.OMETHEXPR:
r = r.(*ir.MethodExpr).FuncName()
r = r.(*ir.SelectorExpr).FuncName()
fallthrough
case ir.ONAME:
r := r.(*ir.Name)
@ -165,7 +170,7 @@ func (s *Schedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *types.Ty
}
x := e.Expr
if x.Op() == ir.OMETHEXPR {
x = x.(*ir.MethodExpr).FuncName()
x = x.(*ir.SelectorExpr).FuncName()
}
if x.Op() == ir.ONAME && s.staticcopy(l, loff+e.Xoffset, x.(*ir.Name), typ) {
continue
@ -185,6 +190,11 @@ func (s *Schedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *types.Ty
}
func (s *Schedule) StaticAssign(l *ir.Name, loff int64, r ir.Node, typ *types.Type) bool {
if r == nil {
// No explicit initialization value. Either zero or supplied
// externally.
return true
}
for r.Op() == ir.OCONVNOP {
r = r.(*ir.ConvExpr).X
}
@ -195,7 +205,7 @@ func (s *Schedule) StaticAssign(l *ir.Name, loff int64, r ir.Node, typ *types.Ty
return s.staticcopy(l, loff, r, typ)
case ir.OMETHEXPR:
r := r.(*ir.MethodExpr)
r := r.(*ir.SelectorExpr)
return s.staticcopy(l, loff, r.FuncName(), typ)
case ir.ONIL:
@ -236,7 +246,7 @@ func (s *Schedule) StaticAssign(l *ir.Name, loff int64, r ir.Node, typ *types.Ty
case ir.OSTR2BYTES:
r := r.(*ir.ConvExpr)
if l.Class_ == ir.PEXTERN && r.X.Op() == ir.OLITERAL {
if l.Class == ir.PEXTERN && r.X.Op() == ir.OLITERAL {
sval := ir.StringVal(r.X)
staticdata.InitSliceBytes(l, loff, sval)
return true
@ -313,7 +323,7 @@ func (s *Schedule) StaticAssign(l *ir.Name, loff int64, r ir.Node, typ *types.Ty
return val.Op() == ir.ONIL
}
reflectdata.MarkTypeUsedInInterface(val.Type(), l.Sym().Linksym())
reflectdata.MarkTypeUsedInInterface(val.Type(), l.Linksym())
var itab *ir.AddrExpr
if typ.IsEmptyInterface() {
@ -445,7 +455,7 @@ func StaticName(t *types.Type) *ir.Name {
statuniqgen++
typecheck.Declare(n, ir.PEXTERN)
n.SetType(t)
n.Sym().Linksym().Set(obj.AttrLocal, true)
n.Linksym().Set(obj.AttrLocal, true)
return n
}
@ -461,7 +471,7 @@ func StaticLoc(n ir.Node) (name *ir.Name, offset int64, ok bool) {
return n, 0, true
case ir.OMETHEXPR:
n := n.(*ir.MethodExpr)
n := n.(*ir.SelectorExpr)
return StaticLoc(n.FuncName())
case ir.ODOT:

View File

@ -1,9 +1,37 @@
// Code generated by "stringer -type Operator -linecomment"; DO NOT EDIT.
// Code generated by "stringer -type Operator -linecomment tokens.go"; DO NOT EDIT.
package syntax
import "strconv"
func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[Def-1]
_ = x[Not-2]
_ = x[Recv-3]
_ = x[OrOr-4]
_ = x[AndAnd-5]
_ = x[Eql-6]
_ = x[Neq-7]
_ = x[Lss-8]
_ = x[Leq-9]
_ = x[Gtr-10]
_ = x[Geq-11]
_ = x[Add-12]
_ = x[Sub-13]
_ = x[Or-14]
_ = x[Xor-15]
_ = x[Mul-16]
_ = x[Div-17]
_ = x[Rem-18]
_ = x[And-19]
_ = x[AndNot-20]
_ = x[Shl-21]
_ = x[Shr-22]
}
const _Operator_name = ":!<-||&&==!=<<=>>=+-|^*/%&&^<<>>"
var _Operator_index = [...]uint8{0, 1, 2, 4, 6, 8, 10, 12, 13, 15, 16, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 30, 32}

View File

@ -1,9 +1,62 @@
// Code generated by "stringer -type token -linecomment"; DO NOT EDIT.
// Code generated by "stringer -type token -linecomment tokens.go"; DO NOT EDIT.
package syntax
import "strconv"
func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[_EOF-1]
_ = x[_Name-2]
_ = x[_Literal-3]
_ = x[_Operator-4]
_ = x[_AssignOp-5]
_ = x[_IncOp-6]
_ = x[_Assign-7]
_ = x[_Define-8]
_ = x[_Arrow-9]
_ = x[_Star-10]
_ = x[_Lparen-11]
_ = x[_Lbrack-12]
_ = x[_Lbrace-13]
_ = x[_Rparen-14]
_ = x[_Rbrack-15]
_ = x[_Rbrace-16]
_ = x[_Comma-17]
_ = x[_Semi-18]
_ = x[_Colon-19]
_ = x[_Dot-20]
_ = x[_DotDotDot-21]
_ = x[_Break-22]
_ = x[_Case-23]
_ = x[_Chan-24]
_ = x[_Const-25]
_ = x[_Continue-26]
_ = x[_Default-27]
_ = x[_Defer-28]
_ = x[_Else-29]
_ = x[_Fallthrough-30]
_ = x[_For-31]
_ = x[_Func-32]
_ = x[_Go-33]
_ = x[_Goto-34]
_ = x[_If-35]
_ = x[_Import-36]
_ = x[_Interface-37]
_ = x[_Map-38]
_ = x[_Package-39]
_ = x[_Range-40]
_ = x[_Return-41]
_ = x[_Select-42]
_ = x[_Struct-43]
_ = x[_Switch-44]
_ = x[_Type-45]
_ = x[_Var-46]
_ = x[tokenCount-47]
}
const _token_name = "EOFnameliteralopop=opop=:=<-*([{)]},;:....breakcasechanconstcontinuedefaultdeferelsefallthroughforfuncgogotoifimportinterfacemappackagerangereturnselectstructswitchtypevar"
var _token_index = [...]uint8{0, 3, 7, 14, 16, 19, 23, 24, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 42, 47, 51, 55, 60, 68, 75, 80, 84, 95, 98, 102, 104, 108, 110, 116, 125, 128, 135, 140, 146, 152, 158, 164, 168, 171, 171}

View File

@ -6,7 +6,7 @@ package syntax
type token uint
//go:generate stringer -type token -linecomment
//go:generate stringer -type token -linecomment tokens.go
const (
_ token = iota
@ -105,7 +105,7 @@ const (
type Operator uint
//go:generate stringer -type Operator -linecomment
//go:generate stringer -type Operator -linecomment tokens.go
const (
_ Operator = iota

View File

@ -509,7 +509,7 @@ func EvalConst(n ir.Node) ir.Node {
}
nl := ir.Copy(n).(*ir.AddStringExpr)
nl.List.Set(s[i:i2])
nl.List = s[i:i2]
newList = append(newList, OrigConst(nl, constant.MakeString(strings.Join(strs, ""))))
i = i2 - 1
} else {
@ -518,7 +518,7 @@ func EvalConst(n ir.Node) ir.Node {
}
nn := ir.Copy(n).(*ir.AddStringExpr)
nn.List.Set(newList)
nn.List = newList
return nn
case ir.OCAP, ir.OLEN:
@ -564,20 +564,11 @@ func EvalConst(n ir.Node) ir.Node {
return n
}
func makeInt(i *big.Int) constant.Value {
if i.IsInt64() {
return constant.Make(i.Int64()) // workaround #42640 (Int64Val(Make(big.NewInt(10))) returns (10, false), not (10, true))
}
return constant.Make(i)
}
func makeFloat64(f float64) constant.Value {
if math.IsInf(f, 0) {
base.Fatalf("infinity is not a valid constant")
}
v := constant.MakeFloat64(f)
v = constant.ToFloat(v) // workaround #42641 (MakeFloat64(0).Kind() returns Int, not Float)
return v
return constant.MakeFloat64(f)
}
func makeComplex(real, imag constant.Value) constant.Value {

View File

@ -15,64 +15,7 @@ import (
"cmd/internal/src"
)
var DeclContext ir.Class // PEXTERN/PAUTO
func AssignDefn(left []ir.Node, defn ir.Node) {
for _, n := range left {
if n.Sym() != nil {
n.Sym().SetUniq(true)
}
}
var nnew, nerr int
for i, n := range left {
if ir.IsBlank(n) {
continue
}
if !assignableName(n) {
base.ErrorfAt(defn.Pos(), "non-name %v on left side of :=", n)
nerr++
continue
}
if !n.Sym().Uniq() {
base.ErrorfAt(defn.Pos(), "%v repeated on left side of :=", n.Sym())
n.SetDiag(true)
nerr++
continue
}
n.Sym().SetUniq(false)
if n.Sym().Block == types.Block {
continue
}
nnew++
n := NewName(n.Sym())
Declare(n, DeclContext)
n.Defn = defn
defn.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n))
left[i] = n
}
if nnew == 0 && nerr == 0 {
base.ErrorfAt(defn.Pos(), "no new variables on left side of :=")
}
}
// := declarations
func assignableName(n ir.Node) bool {
switch n.Op() {
case ir.ONAME,
ir.ONONAME,
ir.OPACK,
ir.OTYPE,
ir.OLITERAL:
return n.Sym() != nil
}
return false
}
var DeclContext ir.Class = ir.PEXTERN // PEXTERN/PAUTO
func DeclFunc(sym *types.Sym, tfn ir.Ntype) *ir.Func {
if tfn.Op() != ir.OTFUNC {
@ -80,7 +23,8 @@ func DeclFunc(sym *types.Sym, tfn ir.Ntype) *ir.Func {
}
fn := ir.NewFunc(base.Pos)
fn.Nname = ir.NewFuncNameAt(base.Pos, sym, fn)
fn.Nname = ir.NewNameAt(base.Pos, sym)
fn.Nname.Func = fn
fn.Nname.Defn = fn
fn.Nname.Ntype = tfn
ir.MarkFunc(fn.Nname)
@ -89,60 +33,6 @@ func DeclFunc(sym *types.Sym, tfn ir.Ntype) *ir.Func {
return fn
}
// declare variables from grammar
// new_name_list (type | [type] = expr_list)
func DeclVars(vl []*ir.Name, t ir.Ntype, el []ir.Node) []ir.Node {
var init []ir.Node
doexpr := len(el) > 0
if len(el) == 1 && len(vl) > 1 {
e := el[0]
as2 := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
as2.Rhs = []ir.Node{e}
for _, v := range vl {
as2.Lhs.Append(v)
Declare(v, DeclContext)
v.Ntype = t
v.Defn = as2
if ir.CurFunc != nil {
init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v))
}
}
return append(init, as2)
}
for i, v := range vl {
var e ir.Node
if doexpr {
if i >= len(el) {
base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el))
break
}
e = el[i]
}
Declare(v, DeclContext)
v.Ntype = t
if e != nil || ir.CurFunc != nil || ir.IsBlank(v) {
if ir.CurFunc != nil {
init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v))
}
as := ir.NewAssignStmt(base.Pos, v, e)
init = append(init, as)
if e != nil {
v.Defn = as
}
}
}
if len(el) > len(vl) {
base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el))
}
return init
}
// Declare records that Node n declares symbol n.Sym in the specified
// declaration context.
func Declare(n *ir.Name, ctxt ir.Class) {
@ -201,7 +91,7 @@ func Declare(n *ir.Name, ctxt ir.Class) {
s.Lastlineno = base.Pos
s.Def = n
n.Vargen = int32(gen)
n.Class_ = ctxt
n.Class = ctxt
if ctxt == ir.PFUNC {
n.Sym().SetFunc(true)
}
@ -565,7 +455,7 @@ func TempAt(pos src.XPos, curfn *ir.Func, t *types.Type) *ir.Name {
n := ir.NewNameAt(pos, s)
s.Def = n
n.SetType(t)
n.Class_ = ir.PAUTO
n.Class = ir.PAUTO
n.SetEsc(ir.EscNever)
n.Curfn = curfn
n.SetUsed(true)
@ -596,6 +486,9 @@ func NewMethodType(sig *types.Type, recv *types.Type) *types.Type {
nrecvs++
}
// TODO(mdempsky): Move this function to types.
// TODO(mdempsky): Preserve positions, names, and package from sig+recv.
params := make([]*types.Field, nrecvs+sig.Params().Fields().Len())
if recv != nil {
params[0] = types.NewField(base.Pos, nil, recv)

View File

@ -31,12 +31,8 @@ func importconst(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type, val
// ipkg is the package being imported
func importfunc(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
n := importobj(ipkg, pos, s, ir.ONAME, ir.PFUNC, t)
fn := ir.NewFunc(pos)
fn.SetType(t)
n.SetFunc(fn)
fn.Nname = n
n.Func = ir.NewFunc(pos)
n.Func.Nname = n
return n
}
@ -57,9 +53,8 @@ func importsym(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Cl
}
n := ir.NewDeclNameAt(pos, op, s)
n.Class_ = ctxt // TODO(mdempsky): Move this into NewDeclNameAt too?
n.Class = ctxt // TODO(mdempsky): Move this into NewDeclNameAt too?
s.SetPkgDef(n)
s.Importdef = ipkg
return n
}

View File

@ -35,14 +35,6 @@ func tcAddr(n *ir.AddrExpr) ir.Node {
if ir.Orig(r) != r {
base.Fatalf("found non-orig name node %v", r) // TODO(mdempsky): What does this mean?
}
r.Name().SetAddrtaken(true)
if r.Name().IsClosureVar() && !CaptureVarsComplete {
// Mark the original variable as Addrtaken so that capturevars
// knows not to pass it by value.
// But if the capturevars phase is complete, don't touch it,
// in case l.Name's containing function has not yet been compiled.
r.Name().Defn.Name().SetAddrtaken(true)
}
}
n.X = DefaultLit(n.X, nil)
if n.X.Type() == nil {
@ -55,103 +47,50 @@ func tcAddr(n *ir.AddrExpr) ir.Node {
return n
}
// tcArith typechecks a binary arithmetic expression.
func tcArith(n ir.Node) ir.Node {
var l, r ir.Node
var setLR func()
switch n := n.(type) {
case *ir.AssignOpStmt:
l, r = n.X, n.Y
setLR = func() { n.X = l; n.Y = r }
case *ir.BinaryExpr:
l, r = n.X, n.Y
setLR = func() { n.X = l; n.Y = r }
case *ir.LogicalExpr:
l, r = n.X, n.Y
setLR = func() { n.X = l; n.Y = r }
}
l = Expr(l)
r = Expr(r)
setLR()
if l.Type() == nil || r.Type() == nil {
n.SetType(nil)
return n
}
op := n.Op()
if n.Op() == ir.OASOP {
n := n.(*ir.AssignOpStmt)
checkassign(n, l)
if n.IncDec && !okforarith[l.Type().Kind()] {
base.Errorf("invalid operation: %v (non-numeric type %v)", n, l.Type())
n.SetType(nil)
return n
}
// TODO(marvin): Fix Node.EType type union.
op = n.AsOp
}
if op == ir.OLSH || op == ir.ORSH {
r = DefaultLit(r, types.Types[types.TUINT])
setLR()
t := r.Type()
if !t.IsInteger() {
base.Errorf("invalid operation: %v (shift count type %v, must be integer)", n, r.Type())
n.SetType(nil)
return n
}
if t.IsSigned() && !types.AllowsGoVersion(curpkg(), 1, 13) {
base.ErrorfVers("go1.13", "invalid operation: %v (signed shift count type %v)", n, r.Type())
n.SetType(nil)
return n
}
t = l.Type()
if t != nil && t.Kind() != types.TIDEAL && !t.IsInteger() {
base.Errorf("invalid operation: %v (shift of type %v)", n, t)
n.SetType(nil)
return n
}
// no defaultlit for left
// the outer context gives the type
n.SetType(l.Type())
if (l.Type() == types.UntypedFloat || l.Type() == types.UntypedComplex) && r.Op() == ir.OLITERAL {
n.SetType(types.UntypedInt)
}
return n
func tcShift(n, l, r ir.Node) (ir.Node, ir.Node, *types.Type) {
if l.Type() == nil || l.Type() == nil {
return l, r, nil
}
// For "x == x && len(s)", it's better to report that "len(s)" (type int)
// can't be used with "&&" than to report that "x == x" (type untyped bool)
// can't be converted to int (see issue #41500).
if n.Op() == ir.OANDAND || n.Op() == ir.OOROR {
n := n.(*ir.LogicalExpr)
if !n.X.Type().IsBoolean() {
base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(n.X.Type()))
n.SetType(nil)
return n
}
if !n.Y.Type().IsBoolean() {
base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(n.Y.Type()))
n.SetType(nil)
return n
}
r = DefaultLit(r, types.Types[types.TUINT])
t := r.Type()
if !t.IsInteger() {
base.Errorf("invalid operation: %v (shift count type %v, must be integer)", n, r.Type())
return l, r, nil
}
if t.IsSigned() && !types.AllowsGoVersion(curpkg(), 1, 13) {
base.ErrorfVers("go1.13", "invalid operation: %v (signed shift count type %v)", n, r.Type())
return l, r, nil
}
t = l.Type()
if t != nil && t.Kind() != types.TIDEAL && !t.IsInteger() {
base.Errorf("invalid operation: %v (shift of type %v)", n, t)
return l, r, nil
}
// ideal mixed with non-ideal
// no defaultlit for left
// the outer context gives the type
t = l.Type()
if (l.Type() == types.UntypedFloat || l.Type() == types.UntypedComplex) && r.Op() == ir.OLITERAL {
t = types.UntypedInt
}
return l, r, t
}
// tcArith typechecks operands of a binary arithmetic expression.
// The result of tcArith MUST be assigned back to original operands,
// t is the type of the expression, and should be set by the caller. e.g:
// n.X, n.Y, t = tcArith(n, op, n.X, n.Y)
// n.SetType(t)
func tcArith(n ir.Node, op ir.Op, l, r ir.Node) (ir.Node, ir.Node, *types.Type) {
l, r = defaultlit2(l, r, false)
setLR()
if l.Type() == nil || r.Type() == nil {
n.SetType(nil)
return n
return l, r, nil
}
t := l.Type()
if t.Kind() == types.TIDEAL {
t = r.Type()
}
et := t.Kind()
if et == types.TIDEAL {
et = types.TINT
}
aop := ir.OXXX
if iscmp[n.Op()] && t.Kind() != types.TIDEAL && !types.Identical(l.Type(), r.Type()) {
// comparison is okay as long as one side is
@ -167,15 +106,13 @@ func tcArith(n ir.Node) ir.Node {
if aop != ir.OXXX {
if r.Type().IsInterface() && !l.Type().IsInterface() && !types.IsComparable(l.Type()) {
base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(l.Type()))
n.SetType(nil)
return n
return l, r, nil
}
types.CalcSize(l.Type())
if r.Type().IsInterface() == l.Type().IsInterface() || l.Type().Width >= 1<<16 {
l = ir.NewConvExpr(base.Pos, aop, r.Type(), l)
l.SetTypecheck(1)
setLR()
}
t = r.Type()
@ -188,34 +125,28 @@ func tcArith(n ir.Node) ir.Node {
if aop != ir.OXXX {
if l.Type().IsInterface() && !r.Type().IsInterface() && !types.IsComparable(r.Type()) {
base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(r.Type()))
n.SetType(nil)
return n
return l, r, nil
}
types.CalcSize(r.Type())
if r.Type().IsInterface() == l.Type().IsInterface() || r.Type().Width >= 1<<16 {
r = ir.NewConvExpr(base.Pos, aop, l.Type(), r)
r.SetTypecheck(1)
setLR()
}
t = l.Type()
}
}
et = t.Kind()
}
if t.Kind() != types.TIDEAL && !types.Identical(l.Type(), r.Type()) {
l, r = defaultlit2(l, r, true)
if l.Type() == nil || r.Type() == nil {
n.SetType(nil)
return n
return l, r, nil
}
if l.Type().IsInterface() == r.Type().IsInterface() || aop == 0 {
base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, l.Type(), r.Type())
n.SetType(nil)
return n
return l, r, nil
}
}
@ -224,85 +155,46 @@ func tcArith(n ir.Node) ir.Node {
}
if dt := defaultType(t); !okfor[op][dt.Kind()] {
base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(t))
n.SetType(nil)
return n
return l, r, nil
}
// okfor allows any array == array, map == map, func == func.
// restrict to slice/map/func == nil and nil == slice/map/func.
if l.Type().IsArray() && !types.IsComparable(l.Type()) {
base.Errorf("invalid operation: %v (%v cannot be compared)", n, l.Type())
n.SetType(nil)
return n
return l, r, nil
}
if l.Type().IsSlice() && !ir.IsNil(l) && !ir.IsNil(r) {
base.Errorf("invalid operation: %v (slice can only be compared to nil)", n)
n.SetType(nil)
return n
return l, r, nil
}
if l.Type().IsMap() && !ir.IsNil(l) && !ir.IsNil(r) {
base.Errorf("invalid operation: %v (map can only be compared to nil)", n)
n.SetType(nil)
return n
return l, r, nil
}
if l.Type().Kind() == types.TFUNC && !ir.IsNil(l) && !ir.IsNil(r) {
base.Errorf("invalid operation: %v (func can only be compared to nil)", n)
n.SetType(nil)
return n
return l, r, nil
}
if l.Type().IsStruct() {
if f := types.IncomparableField(l.Type()); f != nil {
base.Errorf("invalid operation: %v (struct containing %v cannot be compared)", n, f.Type)
n.SetType(nil)
return n
return l, r, nil
}
}
if iscmp[n.Op()] {
t = types.UntypedBool
n.SetType(t)
if con := EvalConst(n); con.Op() == ir.OLITERAL {
return con
}
l, r = defaultlit2(l, r, true)
setLR()
return n
}
if et == types.TSTRING && n.Op() == ir.OADD {
// create or update OADDSTR node with list of strings in x + y + z + (w + v) + ...
n := n.(*ir.BinaryExpr)
var add *ir.AddStringExpr
if l.Op() == ir.OADDSTR {
add = l.(*ir.AddStringExpr)
add.SetPos(n.Pos())
} else {
add = ir.NewAddStringExpr(n.Pos(), []ir.Node{l})
}
if r.Op() == ir.OADDSTR {
r := r.(*ir.AddStringExpr)
add.List.Append(r.List.Take()...)
} else {
add.List.Append(r)
}
add.SetType(t)
return add
}
if (op == ir.ODIV || op == ir.OMOD) && ir.IsConst(r, constant.Int) {
if constant.Sign(r.Val()) == 0 {
base.Errorf("division by zero")
n.SetType(nil)
return n
return l, r, nil
}
}
n.SetType(t)
return n
return l, r, t
}
// The result of tcCompLit MUST be assigned back to n, e.g.
@ -330,7 +222,7 @@ func tcCompLit(n *ir.CompLitExpr) (res ir.Node) {
// Need to handle [...]T arrays specially.
if array, ok := n.Ntype.(*ir.ArrayType); ok && array.Elem != nil && array.Len == nil {
array.Elem = typecheck(array.Elem, ctxType)
array.Elem = typecheckNtype(array.Elem)
elemType := array.Elem.Type()
if elemType == nil {
n.SetType(nil)
@ -343,7 +235,7 @@ func tcCompLit(n *ir.CompLitExpr) (res ir.Node) {
return n
}
n.Ntype = ir.Node(typecheck(n.Ntype, ctxType)).(ir.Ntype)
n.Ntype = typecheckNtype(n.Ntype)
t := n.Ntype.Type()
if t == nil {
n.SetType(nil)
@ -626,10 +518,8 @@ func tcDot(n *ir.SelectorExpr, top int) ir.Node {
}
if (n.Op() == ir.ODOTINTER || n.Op() == ir.ODOTMETH) && top&ctxCallee == 0 {
// Create top-level function.
fn := makepartialcall(n)
return ir.NewCallPartExpr(n.Pos(), n.X, n.Selection, fn)
n.SetOp(ir.OCALLPART)
n.SetType(MethodValueWrapper(n).Type())
}
return n
}
@ -651,7 +541,7 @@ func tcDotType(n *ir.TypeAssertExpr) ir.Node {
}
if n.Ntype != nil {
n.Ntype = typecheck(n.Ntype, ctxType)
n.Ntype = typecheckNtype(n.Ntype)
n.SetType(n.Ntype.Type())
n.Ntype = nil
if n.Type() == nil {

View File

@ -21,7 +21,7 @@ func MakeDotArgs(typ *types.Type, args []ir.Node) ir.Node {
n = NodNil()
n.SetType(typ)
} else {
lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ), nil)
lit.List.Append(args...)
lit.SetImplicit(true)
n = lit
@ -52,7 +52,7 @@ func FixVariadicCall(call *ir.CallExpr) {
extra[i] = nil // allow GC
}
call.Args.Set(append(args[:vi], slice))
call.Args = append(args[:vi], slice)
call.IsDDD = true
}
@ -91,7 +91,7 @@ func ClosureType(clo *ir.ClosureExpr) *types.Type {
// PartialCallType returns the struct type used to hold all the information
// needed in the closure for n (n must be a OCALLPART node).
// The address of a variable of the returned type can be cast to a func.
func PartialCallType(n *ir.CallPartExpr) *types.Type {
func PartialCallType(n *ir.SelectorExpr) *types.Type {
t := types.NewStruct(types.NoPkg, []*types.Field{
types.NewField(base.Pos, Lookup("F"), types.Types[types.TUINTPTR]),
types.NewField(base.Pos, Lookup("R"), n.X.Type()),
@ -106,56 +106,24 @@ func PartialCallType(n *ir.CallPartExpr) *types.Type {
// We use value capturing for values <= 128 bytes that are never reassigned
// after capturing (effectively constant).
func CaptureVars(fn *ir.Func) {
lno := base.Pos
base.Pos = fn.Pos()
cvars := fn.ClosureVars
out := cvars[:0]
for _, v := range cvars {
if v.Type() == nil {
// If v.Type is nil, it means v looked like it
// was going to be used in the closure, but
// isn't. This happens in struct literals like
// s{f: x} where we can't distinguish whether
// f is a field identifier or expression until
// resolving s.
continue
}
out = append(out, v)
// type check the & of closed variables outside the closure,
// so that the outer frame also grabs them and knows they escape.
types.CalcSize(v.Type())
var outer ir.Node
outer = v.Outer
for _, v := range fn.ClosureVars {
outermost := v.Defn.(*ir.Name)
// out parameters will be assigned to implicitly upon return.
if outermost.Class_ != ir.PPARAMOUT && !outermost.Name().Addrtaken() && !outermost.Name().Assigned() && v.Type().Width <= 128 {
v.SetByval(true)
if outermost.Class != ir.PPARAMOUT && !outermost.Addrtaken() && !outermost.Assigned() && outermost.Type().Size() <= 128 {
outermost.SetByval(true)
} else {
outermost.Name().SetAddrtaken(true)
outer = NodAddr(outer)
outermost.SetAddrtaken(true)
}
if base.Flag.LowerM > 1 {
var name *types.Sym
if v.Curfn != nil && v.Curfn.Nname != nil {
name = v.Curfn.Sym()
}
how := "ref"
if v.Byval() {
how = "value"
}
base.WarnfAt(v.Pos(), "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym(), outermost.Name().Addrtaken(), outermost.Name().Assigned(), int32(v.Type().Width))
base.WarnfAt(v.Pos(), "%v capturing by %s: %v (addr=%v assign=%v width=%d)", v.Curfn, how, v, outermost.Addrtaken(), outermost.Assigned(), v.Type().Size())
}
outer = Expr(outer)
fn.ClosureEnter.Append(outer)
}
fn.ClosureVars = out
base.Pos = lno
}
// Lazy typechecking of imported bodies. For local functions, caninl will set ->typecheck
@ -163,6 +131,21 @@ func CaptureVars(fn *ir.Func) {
func ImportedBody(fn *ir.Func) {
lno := ir.SetPos(fn.Nname)
// When we load an inlined body, we need to allow OADDR
// operations on untyped expressions. We will fix the
// addrtaken flags on all the arguments of the OADDR with the
// computeAddrtaken call below (after we typecheck the body).
// TODO: export/import types and addrtaken marks along with inlined bodies,
// so this will be unnecessary.
IncrementalAddrtaken = false
defer func() {
if DirtyAddrtaken {
ComputeAddrtaken(fn.Inl.Body) // compute addrtaken marks once types are available
DirtyAddrtaken = false
}
IncrementalAddrtaken = true
}()
ImportBody(fn)
// typecheckinl is only for imported functions;
@ -247,9 +230,17 @@ func closurename(outerfunc *ir.Func) *types.Sym {
// globClosgen is like Func.Closgen, but for the global scope.
var globClosgen int32
// makepartialcall returns a DCLFUNC node representing the wrapper function (*-fm) needed
// for partial calls.
func makepartialcall(dot *ir.SelectorExpr) *ir.Func {
// MethodValueWrapper returns the DCLFUNC node representing the
// wrapper function (*-fm) needed for the given method value. If the
// wrapper function hasn't already been created yet, it's created and
// added to Target.Decls.
//
// TODO(mdempsky): Move into walk. This isn't part of type checking.
func MethodValueWrapper(dot *ir.SelectorExpr) *ir.Func {
if dot.Op() != ir.OCALLPART {
base.Fatalf("MethodValueWrapper: unexpected %v (%v)", dot, dot.Op())
}
t0 := dot.Type()
meth := dot.Sel
rcvrtype := dot.X.Type()
@ -296,7 +287,7 @@ func makepartialcall(dot *ir.SelectorExpr) *ir.Func {
}
call := ir.NewCallExpr(base.Pos, ir.OCALL, ir.NewSelectorExpr(base.Pos, ir.OXDOT, ptr, meth), nil)
call.Args.Set(ir.ParamNames(tfn.Type()))
call.Args = ir.ParamNames(tfn.Type())
call.IsDDD = tfn.Type().IsVariadic()
if t0.NumResults() != 0 {
ret := ir.NewReturnStmt(base.Pos, nil)
@ -306,7 +297,7 @@ func makepartialcall(dot *ir.SelectorExpr) *ir.Func {
body = append(body, call)
}
fn.Body.Set(body)
fn.Body = body
FinishFuncBody()
Func(fn)
@ -334,7 +325,7 @@ func tcClosure(clo *ir.ClosureExpr, top int) {
fn.Iota = x
}
fn.ClosureType = typecheck(fn.ClosureType, ctxType)
fn.ClosureType = typecheckNtype(fn.ClosureType)
clo.SetType(fn.ClosureType.Type())
fn.SetClosureCalled(top&ctxCallee != 0)
@ -379,6 +370,25 @@ func tcClosure(clo *ir.ClosureExpr, top int) {
ir.CurFunc = oldfn
}
out := 0
for _, v := range fn.ClosureVars {
if v.Type() == nil {
// If v.Type is nil, it means v looked like it was going to be
// used in the closure, but isn't. This happens in struct
// literals like s{f: x} where we can't distinguish whether f is
// a field identifier or expression until resolving s.
continue
}
// type check closed variables outside the closure, so that the
// outer frame also captures them.
Expr(v.Outer)
fn.ClosureVars[out] = v
out++
}
fn.ClosureVars = fn.ClosureVars[:out]
Target.Decls = append(Target.Decls, fn)
}
@ -391,7 +401,7 @@ func tcFunc(n *ir.Func) {
}
for _, ln := range n.Dcl {
if ln.Op() == ir.ONAME && (ln.Class_ == ir.PPARAM || ln.Class_ == ir.PPARAMOUT) {
if ln.Op() == ir.ONAME && (ln.Class == ir.PPARAM || ln.Class == ir.PPARAMOUT) {
ln.Decldepth = 1
}
}
@ -401,7 +411,6 @@ func tcFunc(n *ir.Func) {
if t == nil {
return
}
n.SetType(t)
rcvr := t.Recv()
if rcvr != nil && n.Shortname != nil {
m := addmethod(n, n.Shortname, t, true, n.Pragma&ir.Nointerface != 0)
@ -782,7 +791,7 @@ func tcMake(n *ir.CallExpr) ir.Node {
return n
}
n.Args.Set(nil)
n.Args = nil
l := args[0]
l = typecheck(l, ctxType)
t := l.Type()

View File

@ -430,7 +430,7 @@ func (p *iexporter) doDecl(n *ir.Name) {
switch n.Op() {
case ir.ONAME:
switch n.Class_ {
switch n.Class {
case ir.PEXTERN:
// Variable.
w.tag('V')
@ -450,7 +450,7 @@ func (p *iexporter) doDecl(n *ir.Name) {
w.funcExt(n)
default:
base.Fatalf("unexpected class: %v, %v", n, n.Class_)
base.Fatalf("unexpected class: %v, %v", n, n.Class)
}
case ir.OLITERAL:
@ -574,6 +574,11 @@ func (w *exportWriter) pos(pos src.XPos) {
}
func (w *exportWriter) pkg(pkg *types.Pkg) {
// TODO(mdempsky): Add flag to types.Pkg to mark pseudo-packages.
if pkg == ir.Pkgs.Go {
base.Fatalf("export of pseudo-package: %q", pkg.Path)
}
// Ensure any referenced packages are declared in the main index.
w.p.allPkgs[pkg] = true
@ -936,7 +941,7 @@ func (w *exportWriter) mpfloat(v constant.Value, typ *types.Type) {
if acc != big.Exact {
base.Fatalf("mantissa scaling failed for %f (%s)", f, acc)
}
w.mpint(makeInt(manti), typ)
w.mpint(constant.Make(manti), typ)
if manti.Sign() != 0 {
w.int64(exp)
}
@ -1067,7 +1072,7 @@ func (w *exportWriter) stmt(n ir.Node) {
n := n.(*ir.Decl)
w.op(ir.ODCL)
w.pos(n.X.Pos())
w.localName(n.X.(*ir.Name))
w.localName(n.X)
w.typ(n.X.Type())
case ir.OAS:
@ -1187,7 +1192,7 @@ func (w *exportWriter) caseList(cases []*ir.CaseClause, namedTypeSwitch bool) {
w.pos(cas.Pos())
w.stmtList(cas.List)
if namedTypeSwitch {
w.localName(cas.Var.(*ir.Name))
w.localName(cas.Var)
}
w.stmtList(cas.Body)
}
@ -1252,21 +1257,10 @@ func (w *exportWriter) expr(n ir.Node) {
w.pos(n.Pos())
w.value(n.Type(), n.Val())
case ir.OMETHEXPR:
// Special case: explicit name of func (*T) method(...) is turned into pkg.(*T).method,
// but for export, this should be rendered as (*pkg.T).meth.
// These nodes have the special property that they are names with a left OTYPE and a right ONAME.
n := n.(*ir.MethodExpr)
w.op(ir.OXDOT)
w.pos(n.Pos())
w.op(ir.OTYPE)
w.typ(n.T) // n.Left.Op == OTYPE
w.selector(n.Method.Sym)
case ir.ONAME:
// Package scope name.
n := n.(*ir.Name)
if (n.Class_ == ir.PEXTERN || n.Class_ == ir.PFUNC) && !ir.IsBlank(n) {
if (n.Class == ir.PEXTERN || n.Class == ir.PFUNC) && !ir.IsBlank(n) {
w.op(ir.ONONAME)
w.qualifiedIdent(n)
break
@ -1336,15 +1330,7 @@ func (w *exportWriter) expr(n ir.Node) {
// case OSTRUCTKEY:
// unreachable - handled in case OSTRUCTLIT by elemList
case ir.OCALLPART:
// An OCALLPART is an OXDOT before type checking.
n := n.(*ir.CallPartExpr)
w.op(ir.OXDOT)
w.pos(n.Pos())
w.expr(n.X)
w.selector(n.Method.Sym)
case ir.OXDOT, ir.ODOT, ir.ODOTPTR, ir.ODOTINTER, ir.ODOTMETH:
case ir.OXDOT, ir.ODOT, ir.ODOTPTR, ir.ODOTINTER, ir.ODOTMETH, ir.OCALLPART, ir.OMETHEXPR:
n := n.(*ir.SelectorExpr)
w.op(ir.OXDOT)
w.pos(n.Pos())
@ -1540,14 +1526,18 @@ func (w *exportWriter) localName(n *ir.Name) {
// PPARAM/PPARAMOUT, because we only want to include vargen in
// non-param names.
var v int32
if n.Class_ == ir.PAUTO || (n.Class_ == ir.PAUTOHEAP && n.Name().Stackcopy == nil) {
v = n.Name().Vargen
if n.Class == ir.PAUTO || (n.Class == ir.PAUTOHEAP && n.Stackcopy == nil) {
v = n.Vargen
}
w.localIdent(n.Sym(), v)
}
func (w *exportWriter) localIdent(s *types.Sym, v int32) {
if w.currPkg == nil {
base.Fatalf("missing currPkg")
}
// Anonymous parameters.
if s == nil {
w.string("")
@ -1572,8 +1562,8 @@ func (w *exportWriter) localIdent(s *types.Sym, v int32) {
name = fmt.Sprintf("%s·%d", name, v)
}
if !types.IsExported(name) && s.Pkg != w.currPkg {
base.Fatalf("weird package in name: %v => %v, not %q", s, name, w.currPkg.Path)
if s.Pkg != w.currPkg {
base.Fatalf("weird package in name: %v => %v from %q, not %q", s, name, s.Pkg.Path, w.currPkg.Path)
}
w.string(name)

View File

@ -327,16 +327,17 @@ func (r *importReader) doDecl(sym *types.Sym) *ir.Name {
ms := make([]*types.Field, r.uint64())
for i := range ms {
mpos := r.pos()
msym := r.ident()
msym := r.selector()
recv := r.param()
mtyp := r.signature(recv)
fn := ir.NewFunc(mpos)
fn.SetType(mtyp)
m := ir.NewFuncNameAt(mpos, ir.MethodSym(recv.Type, msym), fn)
m.SetType(mtyp)
m.Class_ = ir.PFUNC
// methodSym already marked m.Sym as a function.
m := ir.NewNameAt(mpos, ir.MethodSym(recv.Type, msym))
m.Class = ir.PFUNC
m.SetType(mtyp)
m.Func = ir.NewFunc(mpos)
m.Func.Nname = m
f := types.NewField(mpos, msym, mtyp)
f.Nname = m
@ -372,7 +373,7 @@ func (p *importReader) value(typ *types.Type) constant.Value {
case constant.Int:
var i big.Int
p.mpint(&i, typ)
return makeInt(&i)
return constant.Make(&i)
case constant.Float:
return p.float(typ)
case constant.Complex:
@ -433,18 +434,21 @@ func (p *importReader) float(typ *types.Type) constant.Value {
return constant.Make(&f)
}
func (r *importReader) ident() *types.Sym {
func (r *importReader) ident(selector bool) *types.Sym {
name := r.string()
if name == "" {
return nil
}
pkg := r.currPkg
if types.IsExported(name) {
if selector && types.IsExported(name) {
pkg = types.LocalPkg
}
return pkg.Lookup(name)
}
func (r *importReader) localIdent() *types.Sym { return r.ident(false) }
func (r *importReader) selector() *types.Sym { return r.ident(true) }
func (r *importReader) qualifiedIdent() *ir.Ident {
name := r.string()
pkg := r.pkg()
@ -533,7 +537,7 @@ func (r *importReader) typ1() *types.Type {
fs := make([]*types.Field, r.uint64())
for i := range fs {
pos := r.pos()
sym := r.ident()
sym := r.selector()
typ := r.typ()
emb := r.bool()
note := r.string()
@ -562,7 +566,7 @@ func (r *importReader) typ1() *types.Type {
methods := make([]*types.Field, r.uint64())
for i := range methods {
pos := r.pos()
sym := r.ident()
sym := r.selector()
typ := r.signature(fakeRecvField())
methods[i] = types.NewField(pos, sym, typ)
@ -598,7 +602,7 @@ func (r *importReader) paramList() []*types.Field {
}
func (r *importReader) param() *types.Field {
return types.NewField(r.pos(), r.ident(), r.typ())
return types.NewField(r.pos(), r.localIdent(), r.typ())
}
func (r *importReader) bool() bool {
@ -778,17 +782,17 @@ func (r *importReader) caseList(switchExpr ir.Node) []*ir.CaseClause {
cases := make([]*ir.CaseClause, r.uint64())
for i := range cases {
cas := ir.NewCaseStmt(r.pos(), nil, nil)
cas.List.Set(r.stmtList())
cas.List = r.stmtList()
if namedTypeSwitch {
// Note: per-case variables will have distinct, dotted
// names after import. That's okay: swt.go only needs
// Sym for diagnostics anyway.
caseVar := ir.NewNameAt(cas.Pos(), r.ident())
caseVar := ir.NewNameAt(cas.Pos(), r.localIdent())
Declare(caseVar, DeclContext)
cas.Var = caseVar
caseVar.Defn = switchExpr
}
cas.Body.Set(r.stmtList())
cas.Body = r.stmtList()
cases[i] = cas
}
return cases
@ -850,7 +854,7 @@ func (r *importReader) node() ir.Node {
return r.qualifiedIdent()
case ir.ONAME:
return r.ident().Def.(*ir.Name)
return r.localIdent().Def.(*ir.Name)
// case OPACK, ONONAME:
// unreachable - should have been resolved by typechecking
@ -861,7 +865,7 @@ func (r *importReader) node() ir.Node {
case ir.OTYPESW:
pos := r.pos()
var tag *ir.Ident
if s := r.ident(); s != nil {
if s := r.localIdent(); s != nil {
tag = ir.NewIdent(pos, s)
}
return ir.NewTypeSwitchGuard(pos, tag, r.expr())
@ -898,7 +902,7 @@ func (r *importReader) node() ir.Node {
case ir.OXDOT:
// see parser.new_dotname
return ir.NewSelectorExpr(r.pos(), ir.OXDOT, r.expr(), r.ident())
return ir.NewSelectorExpr(r.pos(), ir.OXDOT, r.expr(), r.selector())
// case ODOTTYPE, ODOTTYPE2:
// unreachable - mapped to case ODOTTYPE below by exporter
@ -931,7 +935,7 @@ func (r *importReader) node() ir.Node {
case ir.OCOPY, ir.OCOMPLEX, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCAP, ir.OCLOSE, ir.ODELETE, ir.OLEN, ir.OMAKE, ir.ONEW, ir.OPANIC, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
n := builtinCall(r.pos(), op)
n.Args.Set(r.exprList())
n.Args = r.exprList()
if op == ir.OAPPEND {
n.IsDDD = r.bool()
}
@ -944,7 +948,7 @@ func (r *importReader) node() ir.Node {
pos := r.pos()
init := r.stmtList()
n := ir.NewCallExpr(pos, ir.OCALL, r.expr(), r.exprList())
n.PtrInit().Set(init)
*n.PtrInit() = init
n.IsDDD = r.bool()
return n
@ -988,7 +992,7 @@ func (r *importReader) node() ir.Node {
// statements
case ir.ODCL:
pos := r.pos()
lhs := ir.NewDeclNameAt(pos, ir.ONAME, r.ident())
lhs := ir.NewDeclNameAt(pos, ir.ONAME, r.localIdent())
lhs.SetType(r.typ())
Declare(lhs, ir.PAUTO)
@ -1032,14 +1036,14 @@ func (r *importReader) node() ir.Node {
case ir.OIF:
pos, init := r.pos(), r.stmtList()
n := ir.NewIfStmt(pos, r.expr(), r.stmtList(), r.stmtList())
n.PtrInit().Set(init)
*n.PtrInit() = init
return n
case ir.OFOR:
pos, init := r.pos(), r.stmtList()
cond, post := r.exprsOrNil()
n := ir.NewForStmt(pos, nil, cond, post, r.stmtList())
n.PtrInit().Set(init)
*n.PtrInit() = init
return n
case ir.ORANGE:
@ -1051,7 +1055,7 @@ func (r *importReader) node() ir.Node {
pos := r.pos()
init := r.stmtList()
n := ir.NewSelectStmt(pos, r.commList())
n.PtrInit().Set(init)
*n.PtrInit() = init
return n
case ir.OSWITCH:
@ -1059,7 +1063,7 @@ func (r *importReader) node() ir.Node {
init := r.stmtList()
x, _ := r.exprsOrNil()
n := ir.NewSwitchStmt(pos, x, r.caseList(x))
n.PtrInit().Set(init)
*n.PtrInit() = init
return n
// case OCASE:
@ -1099,7 +1103,7 @@ func (r *importReader) op() ir.Op {
func (r *importReader) fieldList() []ir.Node {
list := make([]ir.Node, r.uint64())
for i := range list {
list[i] = ir.NewStructKeyExpr(r.pos(), r.ident(), r.expr())
list[i] = ir.NewStructKeyExpr(r.pos(), r.selector(), r.expr())
}
return list
}

View File

@ -631,7 +631,7 @@ func tcSwitchType(n *ir.SwitchStmt) {
nvar := ncase.Var
nvar.SetType(vt)
if vt != nil {
nvar = AssignExpr(nvar)
nvar = AssignExpr(nvar).(*ir.Name)
} else {
// Clause variable is broken; prevent typechecking.
nvar.SetTypecheck(1)

View File

@ -43,6 +43,9 @@ func NewFuncParams(tl *types.Type, mustname bool) []*ir.Field {
// invent a name so that we can refer to it in the trampoline
s = LookupNum(".anon", gen)
gen++
} else if s != nil && s.Pkg != types.LocalPkg {
// TODO(mdempsky): Preserve original position, name, and package.
s = Lookup(s.Name)
}
a := ir.NewField(base.Pos, s, nil, t.Type)
a.Pos = t.Pos
@ -67,9 +70,57 @@ func NodAddr(n ir.Node) *ir.AddrExpr {
// nodAddrPos returns a node representing &n at position pos.
func NodAddrAt(pos src.XPos, n ir.Node) *ir.AddrExpr {
n = markAddrOf(n)
return ir.NewAddrExpr(pos, n)
}
func markAddrOf(n ir.Node) ir.Node {
if IncrementalAddrtaken {
// We can only do incremental addrtaken computation when it is ok
// to typecheck the argument of the OADDR. That's only safe after the
// main typecheck has completed.
// The argument to OADDR needs to be typechecked because &x[i] takes
// the address of x if x is an array, but not if x is a slice.
// Note: outervalue doesn't work correctly until n is typechecked.
n = typecheck(n, ctxExpr)
if x := ir.OuterValue(n); x.Op() == ir.ONAME {
x.Name().SetAddrtaken(true)
}
} else {
// Remember that we built an OADDR without computing the Addrtaken bit for
// its argument. We'll do that later in bulk using computeAddrtaken.
DirtyAddrtaken = true
}
return n
}
// If IncrementalAddrtaken is false, we do not compute Addrtaken for an OADDR Node
// when it is built. The Addrtaken bits are set in bulk by computeAddrtaken.
// If IncrementalAddrtaken is true, then when an OADDR Node is built the Addrtaken
// field of its argument is updated immediately.
var IncrementalAddrtaken = false
// If DirtyAddrtaken is true, then there are OADDR whose corresponding arguments
// have not yet been marked as Addrtaken.
var DirtyAddrtaken = false
func ComputeAddrtaken(top []ir.Node) {
for _, n := range top {
ir.Visit(n, func(n ir.Node) {
if n.Op() == ir.OADDR {
if x := ir.OuterValue(n.(*ir.AddrExpr).X); x.Op() == ir.ONAME {
x.Name().SetAddrtaken(true)
if x.Name().IsClosureVar() {
// Mark the original variable as Addrtaken so that capturevars
// knows not to pass it by value.
x.Name().Defn.Name().SetAddrtaken(true)
}
}
}
})
}
}
func NodNil() ir.Node {
n := ir.NewNilExpr(base.Pos)
n.SetType(types.Types[types.TNIL])

View File

@ -26,12 +26,12 @@ func LookupRuntime(name string) *ir.Name {
// The result of SubstArgTypes MUST be assigned back to old, e.g.
// n.Left = SubstArgTypes(n.Left, t1, t2)
func SubstArgTypes(old *ir.Name, types_ ...*types.Type) *ir.Name {
n := old.CloneName()
for _, t := range types_ {
types.CalcSize(t)
}
n.SetType(types.SubstAny(n.Type(), &types_))
n := ir.NewNameAt(old.Pos(), old.Sym())
n.Class = old.Class
n.SetType(types.SubstAny(old.Type(), &types_))
if len(types_) > 0 {
base.Fatalf("substArgTypes: too many argument types")
}
@ -61,15 +61,13 @@ func Lookup(name string) *types.Sym {
return types.LocalPkg.Lookup(name)
}
// loadsys loads the definitions for the low-level runtime functions,
// InitRuntime loads the definitions for the low-level runtime functions,
// so that the compiler can generate calls to them,
// but does not make them visible to user code.
func loadsys() {
func InitRuntime() {
base.Timer.Start("fe", "loadsys")
types.Block = 1
inimport = true
TypecheckAllowed = true
typs := runtimeTypes()
for _, d := range &runtimeDecls {
sym := ir.Pkgs.Runtime.Lookup(d.name)
@ -83,9 +81,6 @@ func loadsys() {
base.Fatalf("unhandled declaration tag %v", d.tag)
}
}
TypecheckAllowed = false
inimport = false
}
// LookupRuntimeFunc looks up Go function name in package runtime. This function

View File

@ -14,7 +14,7 @@ import (
// tcArrayType typechecks an OTARRAY node.
func tcArrayType(n *ir.ArrayType) ir.Node {
n.Elem = typecheck(n.Elem, ctxType)
n.Elem = typecheckNtype(n.Elem)
if n.Elem.Type() == nil {
return n
}
@ -59,7 +59,7 @@ func tcArrayType(n *ir.ArrayType) ir.Node {
// tcChanType typechecks an OTCHAN node.
func tcChanType(n *ir.ChanType) ir.Node {
n.Elem = typecheck(n.Elem, ctxType)
n.Elem = typecheckNtype(n.Elem)
l := n.Elem
if l.Type() == nil {
return n
@ -103,7 +103,7 @@ func tcInterfaceType(n *ir.InterfaceType) ir.Node {
n.SetOTYPE(types.Types[types.TINTER])
return n
}
lno := base.Pos
methods := tcFields(n.Methods, nil)
base.Pos = lno
@ -114,8 +114,8 @@ func tcInterfaceType(n *ir.InterfaceType) ir.Node {
// tcMapType typechecks an OTMAP node.
func tcMapType(n *ir.MapType) ir.Node {
n.Key = typecheck(n.Key, ctxType)
n.Elem = typecheck(n.Elem, ctxType)
n.Key = typecheckNtype(n.Key)
n.Elem = typecheckNtype(n.Elem)
l := n.Key
r := n.Elem
if l.Type() == nil || r.Type() == nil {
@ -134,7 +134,7 @@ func tcMapType(n *ir.MapType) ir.Node {
// tcSliceType typechecks an OTSLICE node.
func tcSliceType(n *ir.SliceType) ir.Node {
n.Elem = typecheck(n.Elem, ctxType)
n.Elem = typecheckNtype(n.Elem)
if n.Elem.Type() == nil {
return n
}

View File

@ -31,97 +31,6 @@ var (
NeedRuntimeType = func(*types.Type) {}
)
func Init() {
initUniverse()
DeclContext = ir.PEXTERN
base.Timer.Start("fe", "loadsys")
loadsys()
}
func Package() {
declareUniverse()
TypecheckAllowed = true
// Process top-level declarations in phases.
// Phase 1: const, type, and names and types of funcs.
// This will gather all the information about types
// and methods but doesn't depend on any of it.
//
// We also defer type alias declarations until phase 2
// to avoid cycles like #18640.
// TODO(gri) Remove this again once we have a fix for #25838.
// Don't use range--typecheck can add closures to Target.Decls.
base.Timer.Start("fe", "typecheck", "top1")
for i := 0; i < len(Target.Decls); i++ {
n := Target.Decls[i]
if op := n.Op(); op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.(*ir.Decl).X.Name().Alias()) {
Target.Decls[i] = Stmt(n)
}
}
// Phase 2: Variable assignments.
// To check interface assignments, depends on phase 1.
// Don't use range--typecheck can add closures to Target.Decls.
base.Timer.Start("fe", "typecheck", "top2")
for i := 0; i < len(Target.Decls); i++ {
n := Target.Decls[i]
if op := n.Op(); op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.(*ir.Decl).X.Name().Alias() {
Target.Decls[i] = Stmt(n)
}
}
// Phase 3: Type check function bodies.
// Don't use range--typecheck can add closures to Target.Decls.
base.Timer.Start("fe", "typecheck", "func")
var fcount int64
for i := 0; i < len(Target.Decls); i++ {
n := Target.Decls[i]
if n.Op() == ir.ODCLFUNC {
FuncBody(n.(*ir.Func))
fcount++
}
}
// Phase 4: Check external declarations.
// TODO(mdempsky): This should be handled when type checking their
// corresponding ODCL nodes.
base.Timer.Start("fe", "typecheck", "externdcls")
for i, n := range Target.Externs {
if n.Op() == ir.ONAME {
Target.Externs[i] = Expr(Target.Externs[i])
}
}
// Phase 5: With all user code type-checked, it's now safe to verify map keys.
CheckMapKeys()
// Phase 6: Decide how to capture closed variables.
// This needs to run before escape analysis,
// because variables captured by value do not escape.
base.Timer.Start("fe", "capturevars")
for _, n := range Target.Decls {
if n.Op() == ir.ODCLFUNC {
n := n.(*ir.Func)
if n.OClosure != nil {
ir.CurFunc = n
CaptureVars(n)
}
}
}
CaptureVarsComplete = true
ir.CurFunc = nil
if base.Debug.TypecheckInl != 0 {
// Typecheck imported function bodies if Debug.l > 1,
// otherwise lazily when used or re-exported.
AllImportedBodies()
}
}
func AssignExpr(n ir.Node) ir.Node { return typecheck(n, ctxExpr|ctxAssign) }
func Expr(n ir.Node) ir.Node { return typecheck(n, ctxExpr) }
func Stmt(n ir.Node) ir.Node { return typecheck(n, ctxStmt) }
@ -152,13 +61,11 @@ func FuncBody(n *ir.Func) {
decldepth = 1
errorsBefore := base.Errors()
Stmts(n.Body)
CheckUnused(n)
CheckReturn(n)
if base.Errors() > errorsBefore {
n.Body.Set(nil) // type errors; do not compile
n.Body = nil // type errors; do not compile
}
// Now that we've checked whether n terminates,
// we can eliminate some obviously dead code.
deadcode(n)
}
var importlist []*ir.Func
@ -230,7 +137,7 @@ const (
// marks variables that escape the local frame.
// rewrites n.Op to be more specific in some cases.
var typecheckdefstack []ir.Node
var typecheckdefstack []*ir.Name
// Resolve ONONAME to definition, if any.
func Resolve(n ir.Node) (res ir.Node) {
@ -567,26 +474,8 @@ func indexlit(n ir.Node) ir.Node {
// typecheck1 should ONLY be called from typecheck.
func typecheck1(n ir.Node, top int) ir.Node {
switch n.Op() {
case ir.OLITERAL, ir.ONAME, ir.ONONAME, ir.OTYPE:
if n.Sym() == nil {
return n
}
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
if n.BuiltinOp != 0 && top&ctxCallee == 0 {
base.Errorf("use of builtin %v not in function call", n.Sym())
n.SetType(nil)
return n
}
}
if n, ok := n.(*ir.Name); ok {
typecheckdef(n)
if n.Op() == ir.ONONAME {
n.SetType(nil)
return n
}
}
switch n.Op() {
@ -595,22 +484,37 @@ func typecheck1(n ir.Node, top int) ir.Node {
base.Fatalf("typecheck %v", n.Op())
panic("unreachable")
// names
case ir.OLITERAL:
if n.Type() == nil && n.Val().Kind() == constant.String {
base.Fatalf("string literal missing type")
if n.Sym() == nil && n.Type() == nil {
base.Fatalf("literal missing type: %v", n)
}
return n
case ir.ONIL, ir.ONONAME:
case ir.ONIL:
return n
// names
case ir.ONONAME:
if !n.Diag() {
// Note: adderrorname looks for this string and
// adds context about the outer expression
base.ErrorfAt(n.Pos(), "undefined: %v", n.Sym())
n.SetDiag(true)
}
n.SetType(nil)
return n
case ir.ONAME:
n := n.(*ir.Name)
if n.Name().Decldepth == 0 {
n.Name().Decldepth = decldepth
if n.Decldepth == 0 {
n.Decldepth = decldepth
}
if n.BuiltinOp != 0 {
if top&ctxCallee == 0 {
base.Errorf("use of builtin %v not in function call", n.Sym())
n.SetType(nil)
return n
}
return n
}
if top&ctxAssign == 0 {
@ -620,7 +524,7 @@ func typecheck1(n ir.Node, top int) ir.Node {
n.SetType(nil)
return n
}
n.Name().SetUsed(true)
n.SetUsed(true)
}
return n
@ -636,9 +540,6 @@ func typecheck1(n ir.Node, top int) ir.Node {
// types (ODEREF is with exprs)
case ir.OTYPE:
if n.Type() == nil {
return n
}
return n
case ir.OTSLICE:
@ -672,28 +573,98 @@ func typecheck1(n ir.Node, top int) ir.Node {
case ir.ODEREF:
n := n.(*ir.StarExpr)
return tcStar(n, top)
// arithmetic exprs
case ir.OASOP,
ir.OADD,
ir.OAND,
ir.OANDAND,
ir.OANDNOT,
ir.ODIV,
ir.OEQ,
ir.OGE,
ir.OGT,
ir.OLE,
ir.OLT,
ir.OLSH,
ir.ORSH,
ir.OMOD,
ir.OMUL,
ir.ONE,
ir.OOR,
ir.OOROR,
ir.OSUB,
ir.OXOR:
return tcArith(n)
// x op= y
case ir.OASOP:
n := n.(*ir.AssignOpStmt)
n.X, n.Y = Expr(n.X), Expr(n.Y)
checkassign(n, n.X)
if n.IncDec && !okforarith[n.X.Type().Kind()] {
base.Errorf("invalid operation: %v (non-numeric type %v)", n, n.X.Type())
return n
}
switch n.AsOp {
case ir.OLSH, ir.ORSH:
n.X, n.Y, _ = tcShift(n, n.X, n.Y)
case ir.OADD, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OMOD, ir.OMUL, ir.OOR, ir.OSUB, ir.OXOR:
n.X, n.Y, _ = tcArith(n, n.AsOp, n.X, n.Y)
default:
base.Fatalf("invalid assign op: %v", n.AsOp)
}
return n
// logical operators
case ir.OANDAND, ir.OOROR:
n := n.(*ir.LogicalExpr)
n.X, n.Y = Expr(n.X), Expr(n.Y)
// For "x == x && len(s)", it's better to report that "len(s)" (type int)
// can't be used with "&&" than to report that "x == x" (type untyped bool)
// can't be converted to int (see issue #41500).
if !n.X.Type().IsBoolean() {
base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(n.X.Type()))
n.SetType(nil)
return n
}
if !n.Y.Type().IsBoolean() {
base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(n.Y.Type()))
n.SetType(nil)
return n
}
l, r, t := tcArith(n, n.Op(), n.X, n.Y)
n.X, n.Y = l, r
n.SetType(t)
return n
// shift operators
case ir.OLSH, ir.ORSH:
n := n.(*ir.BinaryExpr)
n.X, n.Y = Expr(n.X), Expr(n.Y)
l, r, t := tcShift(n, n.X, n.Y)
n.X, n.Y = l, r
n.SetType(t)
return n
// comparison operators
case ir.OEQ, ir.OGE, ir.OGT, ir.OLE, ir.OLT, ir.ONE:
n := n.(*ir.BinaryExpr)
n.X, n.Y = Expr(n.X), Expr(n.Y)
l, r, t := tcArith(n, n.Op(), n.X, n.Y)
if t != nil {
n.X, n.Y = l, r
n.SetType(types.UntypedBool)
if con := EvalConst(n); con.Op() == ir.OLITERAL {
return con
}
n.X, n.Y = defaultlit2(l, r, true)
}
return n
// binary operators
case ir.OADD, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OMOD, ir.OMUL, ir.OOR, ir.OSUB, ir.OXOR:
n := n.(*ir.BinaryExpr)
n.X, n.Y = Expr(n.X), Expr(n.Y)
l, r, t := tcArith(n, n.Op(), n.X, n.Y)
if t != nil && t.Kind() == types.TSTRING && n.Op() == ir.OADD {
// create or update OADDSTR node with list of strings in x + y + z + (w + v) + ...
var add *ir.AddStringExpr
if l.Op() == ir.OADDSTR {
add = l.(*ir.AddStringExpr)
add.SetPos(n.Pos())
} else {
add = ir.NewAddStringExpr(n.Pos(), []ir.Node{l})
}
if r.Op() == ir.OADDSTR {
r := r.(*ir.AddStringExpr)
add.List.Append(r.List.Take()...)
} else {
add.List.Append(r)
}
add.SetType(t)
return add
}
n.X, n.Y = l, r
n.SetType(t)
return n
case ir.OBITNOT, ir.ONEG, ir.ONOT, ir.OPLUS:
n := n.(*ir.UnaryExpr)
@ -925,12 +896,12 @@ func typecheck1(n ir.Node, top int) ir.Node {
case ir.ODCLCONST:
n := n.(*ir.Decl)
n.X = Expr(n.X)
n.X = Expr(n.X).(*ir.Name)
return n
case ir.ODCLTYPE:
n := n.(*ir.Decl)
n.X = typecheck(n.X, ctxType)
n.X = typecheck(n.X, ctxType).(*ir.Name)
types.CheckSize(n.X.Type())
return n
}
@ -940,7 +911,7 @@ func typecheck1(n ir.Node, top int) ir.Node {
// Each must execute its own return n.
}
func typecheckargs(n ir.Node) {
func typecheckargs(n ir.InitNode) {
var list []ir.Node
switch n := n.(type) {
default:
@ -997,9 +968,9 @@ func typecheckargs(n ir.Node) {
switch n := n.(type) {
case *ir.CallExpr:
n.Args.Set(list)
n.Args = list
case *ir.ReturnStmt:
n.Results.Set(list)
n.Results = list
}
n.PtrInit().Append(Stmt(as))
@ -1176,19 +1147,16 @@ func typecheckMethodExpr(n *ir.SelectorExpr) (res ir.Node) {
return n
}
me := ir.NewMethodExpr(n.Pos(), n.X.Type(), m)
me.SetType(NewMethodType(m.Type, n.X.Type()))
f := NewName(ir.MethodSym(t, m.Sym))
f.Class_ = ir.PFUNC
f.SetType(me.Type())
me.FuncName_ = f
n.SetOp(ir.OMETHEXPR)
n.Selection = m
n.SetType(NewMethodType(m.Type, n.X.Type()))
// Issue 25065. Make sure that we emit the symbol for a local method.
if base.Ctxt.Flag_dynlink && !inimport && (t.Sym() == nil || t.Sym().Pkg == types.LocalPkg) {
NeedFuncSym(me.FuncName_.Sym())
NeedFuncSym(n.FuncName().Sym())
}
return me
return n
}
func derefall(t *types.Type) *types.Type {
@ -1245,6 +1213,7 @@ func lookdot(n *ir.SelectorExpr, t *types.Type, dostrcmp int) *types.Field {
// Already in the process of diagnosing an error.
return f2
}
orig := n.X
tt := n.X.Type()
types.CalcSize(tt)
rcvr := f2.Type.Recv().Type
@ -1275,20 +1244,28 @@ func lookdot(n *ir.SelectorExpr, t *types.Type, dostrcmp int) *types.Field {
}
}
implicit, ll := n.Implicit(), n.X
for ll != nil && (ll.Op() == ir.ODOT || ll.Op() == ir.ODOTPTR || ll.Op() == ir.ODEREF) {
switch l := ll.(type) {
// Check that we haven't implicitly dereferenced any defined pointer types.
for x := n.X; ; {
var inner ir.Node
implicit := false
switch x := x.(type) {
case *ir.AddrExpr:
inner, implicit = x.X, x.Implicit()
case *ir.SelectorExpr:
implicit, ll = l.Implicit(), l.X
inner, implicit = x.X, x.Implicit()
case *ir.StarExpr:
implicit, ll = l.Implicit(), l.X
inner, implicit = x.X, x.Implicit()
}
}
if implicit && ll.Type().IsPtr() && ll.Type().Sym() != nil && ll.Type().Sym().Def != nil && ir.AsNode(ll.Type().Sym().Def).Op() == ir.OTYPE {
// It is invalid to automatically dereference a named pointer type when selecting a method.
// Make n.Left == ll to clarify error message.
n.X = ll
return nil
if !implicit {
break
}
if inner.Type().Sym() != nil && (x.Op() == ir.ODEREF || x.Op() == ir.ODOTPTR) {
// Found an implicit dereference of a defined pointer type.
// Restore n.X for better error message.
n.X = orig
return nil
}
x = inner
}
n.Selection = f2
@ -1422,7 +1399,7 @@ notenough:
// Method expressions have the form T.M, and the compiler has
// rewritten those to ONAME nodes but left T in Left.
if call.Op() == ir.OMETHEXPR {
call := call.(*ir.MethodExpr)
call := call.(*ir.SelectorExpr)
base.Errorf("not enough arguments in call to method expression %v%s", call, details)
} else {
base.Errorf("not enough arguments in call to %v%s", call, details)
@ -1635,14 +1612,22 @@ func checklvalue(n ir.Node, verb string) {
}
func checkassign(stmt ir.Node, n ir.Node) {
// have already complained about n being invalid
if n.Type() == nil {
if base.Errors() == 0 {
base.Fatalf("expected an error about %v", n)
}
return
}
// Variables declared in ORANGE are assigned on every iteration.
if !ir.DeclaredBy(n, stmt) || stmt.Op() == ir.ORANGE {
r := ir.OuterValue(n)
if r.Op() == ir.ONAME {
r := r.(*ir.Name)
r.Name().SetAssigned(true)
if r.Name().IsClosureVar() {
r.Name().Defn.Name().SetAssigned(true)
r.SetAssigned(true)
if r.IsClosureVar() {
r.Defn.Name().SetAssigned(true)
}
}
}
@ -1656,11 +1641,6 @@ func checkassign(stmt ir.Node, n ir.Node) {
return
}
// have already complained about n being invalid
if n.Type() == nil {
return
}
switch {
case n.Op() == ir.ODOT && n.(*ir.SelectorExpr).X.Op() == ir.OINDEXMAP:
base.Errorf("cannot assign to struct field %v in map", n)
@ -1706,8 +1686,8 @@ func stringtoruneslit(n *ir.ConvExpr) ir.Node {
i++
}
nn := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(n.Type()).(ir.Ntype), nil)
nn.List.Set(l)
nn := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(n.Type()), nil)
nn.List = l
return Expr(nn)
}
@ -1755,30 +1735,26 @@ func typecheckdeftype(n *ir.Name) {
types.ResumeCheckSize()
}
func typecheckdef(n ir.Node) {
func typecheckdef(n *ir.Name) {
if base.EnableTrace && base.Flag.LowerT {
defer tracePrint("typecheckdef", n)(nil)
}
lno := ir.SetPos(n)
if n.Op() == ir.ONONAME {
if !n.Diag() {
n.SetDiag(true)
// Note: adderrorname looks for this string and
// adds context about the outer expression
base.ErrorfAt(base.Pos, "undefined: %v", n.Sym())
}
base.Pos = lno
return
}
if n.Walkdef() == 1 {
base.Pos = lno
return
}
if n.Type() != nil { // builtin
// Mark as Walkdef so that if n.SetType(nil) is called later, we
// won't try walking again.
if got := n.Walkdef(); got != 0 {
base.Fatalf("unexpected walkdef: %v", got)
}
n.SetWalkdef(1)
return
}
lno := ir.SetPos(n)
typecheckdefstack = append(typecheckdefstack, n)
if n.Walkdef() == 2 {
base.FlushErrors()
@ -1793,27 +1769,23 @@ func typecheckdef(n ir.Node) {
n.SetWalkdef(2)
if n.Type() != nil || n.Sym() == nil { // builtin or no name
goto ret
}
switch n.Op() {
default:
base.Fatalf("typecheckdef %v", n.Op())
case ir.OLITERAL:
if n.Name().Ntype != nil {
n.Name().Ntype = typecheckNtype(n.Name().Ntype)
n.SetType(n.Name().Ntype.Type())
n.Name().Ntype = nil
if n.Ntype != nil {
n.Ntype = typecheckNtype(n.Ntype)
n.SetType(n.Ntype.Type())
n.Ntype = nil
if n.Type() == nil {
n.SetDiag(true)
goto ret
}
}
e := n.Name().Defn
n.Name().Defn = nil
e := n.Defn
n.Defn = nil
if e == nil {
ir.Dump("typecheckdef nil defn", n)
base.ErrorfAt(n.Pos(), "xxx")
@ -1856,10 +1828,9 @@ func typecheckdef(n ir.Node) {
}
case ir.ONAME:
n := n.(*ir.Name)
if n.Name().Ntype != nil {
n.Name().Ntype = typecheckNtype(n.Name().Ntype)
n.SetType(n.Name().Ntype.Type())
if n.Ntype != nil {
n.Ntype = typecheckNtype(n.Ntype)
n.SetType(n.Ntype.Type())
if n.Type() == nil {
n.SetDiag(true)
goto ret
@ -1869,7 +1840,7 @@ func typecheckdef(n ir.Node) {
if n.Type() != nil {
break
}
if n.Name().Defn == nil {
if n.Defn == nil {
if n.BuiltinOp != 0 { // like OPRINTN
break
}
@ -1884,16 +1855,15 @@ func typecheckdef(n ir.Node) {
base.Fatalf("var without type, init: %v", n.Sym())
}
if n.Name().Defn.Op() == ir.ONAME {
n.Name().Defn = Expr(n.Name().Defn)
n.SetType(n.Name().Defn.Type())
if n.Defn.Op() == ir.ONAME {
n.Defn = Expr(n.Defn)
n.SetType(n.Defn.Type())
break
}
n.Name().Defn = Stmt(n.Name().Defn) // fills in n.Type
n.Defn = Stmt(n.Defn) // fills in n.Type
case ir.OTYPE:
n := n.(*ir.Name)
if n.Alias() {
// Type alias declaration: Simply use the rhs type - no need
// to create a new type.
@ -1970,8 +1940,8 @@ func markBreak(fn *ir.Func) {
var labels map[*types.Sym]ir.Node
var implicit ir.Node
var mark func(ir.Node) error
mark = func(n ir.Node) error {
var mark func(ir.Node) bool
mark = func(n ir.Node) bool {
switch n.Op() {
default:
ir.DoChildren(n, mark)
@ -2011,7 +1981,7 @@ func markBreak(fn *ir.Func) {
}
implicit = old
}
return nil
return false
}
mark(fn)
@ -2122,6 +2092,39 @@ func isTermNode(n ir.Node) bool {
return false
}
// CheckUnused checks for any declared variables that weren't used.
func CheckUnused(fn *ir.Func) {
// Only report unused variables if we haven't seen any type-checking
// errors yet.
if base.Errors() != 0 {
return
}
// Propagate the used flag for typeswitch variables up to the NONAME in its definition.
for _, ln := range fn.Dcl {
if ln.Op() == ir.ONAME && ln.Class == ir.PAUTO && ln.Used() {
if guard, ok := ln.Defn.(*ir.TypeSwitchGuard); ok {
guard.Used = true
}
}
}
for _, ln := range fn.Dcl {
if ln.Op() != ir.ONAME || ln.Class != ir.PAUTO || ln.Used() {
continue
}
if defn, ok := ln.Defn.(*ir.TypeSwitchGuard); ok {
if defn.Used {
continue
}
base.ErrorfAt(defn.Tag.Pos(), "%v declared but not used", ln.Sym())
defn.Used = true // suppress repeats
} else {
base.ErrorfAt(ln.Pos(), "%v declared but not used", ln.Sym())
}
}
}
// CheckReturn makes sure that fn terminates appropriately.
func CheckReturn(fn *ir.Func) {
if fn.Type().NumResults() != 0 && len(fn.Body) != 0 {
@ -2132,150 +2135,12 @@ func CheckReturn(fn *ir.Func) {
}
}
func deadcode(fn *ir.Func) {
deadcodeslice(&fn.Body)
if len(fn.Body) == 0 {
return
}
for _, n := range fn.Body {
if len(n.Init()) > 0 {
return
}
switch n.Op() {
case ir.OIF:
n := n.(*ir.IfStmt)
if !ir.IsConst(n.Cond, constant.Bool) || len(n.Body) > 0 || len(n.Else) > 0 {
return
}
case ir.OFOR:
n := n.(*ir.ForStmt)
if !ir.IsConst(n.Cond, constant.Bool) || ir.BoolVal(n.Cond) {
return
}
default:
return
}
}
fn.Body.Set([]ir.Node{ir.NewBlockStmt(base.Pos, nil)})
}
func deadcodeslice(nn *ir.Nodes) {
var lastLabel = -1
for i, n := range *nn {
if n != nil && n.Op() == ir.OLABEL {
lastLabel = i
}
}
for i, n := range *nn {
// Cut is set to true when all nodes after i'th position
// should be removed.
// In other words, it marks whole slice "tail" as dead.
cut := false
if n == nil {
continue
}
if n.Op() == ir.OIF {
n := n.(*ir.IfStmt)
n.Cond = deadcodeexpr(n.Cond)
if ir.IsConst(n.Cond, constant.Bool) {
var body ir.Nodes
if ir.BoolVal(n.Cond) {
n.Else = ir.Nodes{}
body = n.Body
} else {
n.Body = ir.Nodes{}
body = n.Else
}
// If "then" or "else" branch ends with panic or return statement,
// it is safe to remove all statements after this node.
// isterminating is not used to avoid goto-related complications.
// We must be careful not to deadcode-remove labels, as they
// might be the target of a goto. See issue 28616.
if body := body; len(body) != 0 {
switch body[(len(body) - 1)].Op() {
case ir.ORETURN, ir.ORETJMP, ir.OPANIC:
if i > lastLabel {
cut = true
}
}
}
}
}
deadcodeslice(n.PtrInit())
switch n.Op() {
case ir.OBLOCK:
n := n.(*ir.BlockStmt)
deadcodeslice(&n.List)
case ir.OFOR:
n := n.(*ir.ForStmt)
deadcodeslice(&n.Body)
case ir.OIF:
n := n.(*ir.IfStmt)
deadcodeslice(&n.Body)
deadcodeslice(&n.Else)
case ir.ORANGE:
n := n.(*ir.RangeStmt)
deadcodeslice(&n.Body)
case ir.OSELECT:
n := n.(*ir.SelectStmt)
for _, cas := range n.Cases {
deadcodeslice(&cas.Body)
}
case ir.OSWITCH:
n := n.(*ir.SwitchStmt)
for _, cas := range n.Cases {
deadcodeslice(&cas.Body)
}
}
if cut {
nn.Set((*nn)[:i+1])
break
}
}
}
func deadcodeexpr(n ir.Node) ir.Node {
// Perform dead-code elimination on short-circuited boolean
// expressions involving constants with the intent of
// producing a constant 'if' condition.
switch n.Op() {
case ir.OANDAND:
n := n.(*ir.LogicalExpr)
n.X = deadcodeexpr(n.X)
n.Y = deadcodeexpr(n.Y)
if ir.IsConst(n.X, constant.Bool) {
if ir.BoolVal(n.X) {
return n.Y // true && x => x
} else {
return n.X // false && x => false
}
}
case ir.OOROR:
n := n.(*ir.LogicalExpr)
n.X = deadcodeexpr(n.X)
n.Y = deadcodeexpr(n.Y)
if ir.IsConst(n.X, constant.Bool) {
if ir.BoolVal(n.X) {
return n.X // true || x => true
} else {
return n.Y // false || x => x
}
}
}
return n
}
// getIotaValue returns the current value for "iota",
// or -1 if not within a ConstSpec.
func getIotaValue() int64 {
if i := len(typecheckdefstack); i > 0 {
if x := typecheckdefstack[i-1]; x.Op() == ir.OLITERAL {
return x.(*ir.Name).Iota()
return x.Iota()
}
}

View File

@ -90,8 +90,8 @@ var unsafeFuncs = [...]struct {
{"Sizeof", ir.OSIZEOF},
}
// initUniverse initializes the universe block.
func initUniverse() {
// InitUniverse initializes the universe block.
func InitUniverse() {
if types.PtrSize == 0 {
base.Fatalf("typeinit before betypeinit")
}
@ -336,8 +336,8 @@ func makeErrorInterface() *types.Type {
return types.NewInterface(types.NoPkg, []*types.Field{method})
}
// declareUniverse makes the universe block visible within the current package.
func declareUniverse() {
// DeclareUniverse makes the universe block visible within the current package.
func DeclareUniverse() {
// Operationally, this is similar to a dot import of builtinpkg, except
// that we silently skip symbols that are already declared in the
// package block rather than emitting a redeclared symbol error.
@ -357,6 +357,6 @@ func declareUniverse() {
ir.RegFP = NewName(Lookup(".fp"))
ir.RegFP.SetType(types.Types[types.TINT32])
ir.RegFP.Class_ = ir.PPARAM
ir.RegFP.Class = ir.PPARAM
ir.RegFP.SetUsed(true)
}

View File

@ -20,7 +20,7 @@ func TestSizeof(t *testing.T) {
_32bit uintptr // size on 32bit platforms
_64bit uintptr // size on 64bit platforms
}{
{Sym{}, 48, 80},
{Sym{}, 44, 72},
{Type{}, 56, 96},
{Map{}, 20, 40},
{Forward{}, 20, 32},

View File

@ -27,8 +27,7 @@ import (
// NOTE: In practice, things can be messier than the description above
// for various reasons (historical, convenience).
type Sym struct {
Importdef *Pkg // where imported definition was found
Linkname string // link name
Linkname string // link name
Pkg *Pkg
Name string // object name
@ -75,6 +74,10 @@ func (sym *Sym) LinksymName() string {
return sym.Pkg.Prefix + "." + sym.Name
}
// Deprecated: This method should not be used directly. Instead, use a
// higher-level abstraction that directly returns the linker symbol
// for a named object. For example, reflectdata.TypeLinksym(t) instead
// of reflectdata.TypeSym(t).Linksym().
func (sym *Sym) Linksym() *obj.LSym {
if sym == nil {
return nil

View File

@ -6,7 +6,6 @@ package types
import (
"cmd/compile/internal/base"
"cmd/internal/obj"
"cmd/internal/src"
"fmt"
"sync"
@ -1532,10 +1531,6 @@ func (t *Type) HasPointers() bool {
return true
}
func (t *Type) Symbol() *obj.LSym {
return TypeLinkSym(t)
}
// Tie returns 'T' if t is a concrete type,
// 'I' if t is an interface type, and 'E' if t is an empty interface type.
// It is used to build calls to the conv* and assert* runtime routines.

View File

@ -4,19 +4,8 @@
package types
import (
"cmd/internal/obj"
)
const BADWIDTH = -1000000000
// The following variables must be initialized early by the frontend.
// They are here to break import cycles.
// TODO(gri) eliminate these dependencies.
var (
TypeLinkSym func(*Type) *obj.LSym
)
type bitset8 uint8
func (f *bitset8) set(mask uint8, b bool) {

View File

@ -17,7 +17,7 @@ import (
// walkAssign walks an OAS (AssignExpr) or OASOP (AssignOpExpr) node.
func walkAssign(init *ir.Nodes, n ir.Node) ir.Node {
init.Append(n.PtrInit().Take()...)
init.Append(ir.TakeInit(n)...)
var left, right ir.Node
switch n.Op() {
@ -124,7 +124,7 @@ func walkAssignDotType(n *ir.AssignListStmt, init *ir.Nodes) ir.Node {
// walkAssignFunc walks an OAS2FUNC node.
func walkAssignFunc(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
init.Append(n.PtrInit().Take()...)
init.Append(ir.TakeInit(n)...)
r := n.Rhs[0]
walkExprListSafe(n.Lhs, init)
@ -142,15 +142,13 @@ func walkAssignFunc(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
// walkAssignList walks an OAS2 node.
func walkAssignList(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
init.Append(n.PtrInit().Take()...)
walkExprListSafe(n.Lhs, init)
walkExprListSafe(n.Rhs, init)
return ir.NewBlockStmt(src.NoXPos, ascompatee(ir.OAS, n.Lhs, n.Rhs, init))
init.Append(ir.TakeInit(n)...)
return ir.NewBlockStmt(src.NoXPos, ascompatee(ir.OAS, n.Lhs, n.Rhs))
}
// walkAssignMapRead walks an OAS2MAPR node.
func walkAssignMapRead(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
init.Append(n.PtrInit().Take()...)
init.Append(ir.TakeInit(n)...)
r := n.Rhs[0].(*ir.IndexExpr)
walkExprListSafe(n.Lhs, init)
@ -213,7 +211,7 @@ func walkAssignMapRead(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
// walkAssignRecv walks an OAS2RECV node.
func walkAssignRecv(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
init.Append(n.PtrInit().Take()...)
init.Append(ir.TakeInit(n)...)
r := n.Rhs[0].(*ir.UnaryExpr) // recv
walkExprListSafe(n.Lhs, init)
@ -232,56 +230,21 @@ func walkAssignRecv(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
// walkReturn walks an ORETURN node.
func walkReturn(n *ir.ReturnStmt) ir.Node {
ir.CurFunc.NumReturns++
fn := ir.CurFunc
fn.NumReturns++
if len(n.Results) == 0 {
return n
}
if (ir.HasNamedResults(ir.CurFunc) && len(n.Results) > 1) || paramoutheap(ir.CurFunc) {
// assign to the function out parameters,
// so that ascompatee can fix up conflicts
var rl []ir.Node
for _, ln := range ir.CurFunc.Dcl {
cl := ln.Class_
if cl == ir.PAUTO || cl == ir.PAUTOHEAP {
break
}
if cl == ir.PPARAMOUT {
var ln ir.Node = ln
if ir.IsParamStackCopy(ln) {
ln = walkExpr(typecheck.Expr(ir.NewStarExpr(base.Pos, ln.Name().Heapaddr)), nil)
}
rl = append(rl, ln)
}
}
if got, want := len(n.Results), len(rl); got != want {
// order should have rewritten multi-value function calls
// with explicit OAS2FUNC nodes.
base.Fatalf("expected %v return arguments, have %v", want, got)
}
// move function calls out, to make ascompatee's job easier.
walkExprListSafe(n.Results, n.PtrInit())
n.Results.Set(ascompatee(n.Op(), rl, n.Results, n.PtrInit()))
return n
results := fn.Type().Results().FieldSlice()
dsts := make([]ir.Node, len(results))
for i, v := range results {
// TODO(mdempsky): typecheck should have already checked the result variables.
dsts[i] = typecheck.AssignExpr(v.Nname.(*ir.Name))
}
walkExprList(n.Results, n.PtrInit())
// For each return parameter (lhs), assign the corresponding result (rhs).
lhs := ir.CurFunc.Type().Results()
rhs := n.Results
res := make([]ir.Node, lhs.NumFields())
for i, nl := range lhs.FieldSlice() {
nname := ir.AsNode(nl.Nname)
if ir.IsParamHeapCopy(nname) {
nname = nname.Name().Stackcopy
}
a := ir.NewAssignStmt(base.Pos, nname, rhs[i])
res[i] = convas(a, n.PtrInit())
}
n.Results.Set(res)
n.Results = ascompatee(n.Op(), dsts, n.Results)
return n
}
@ -342,20 +305,12 @@ func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node {
// check assign expression list to
// an expression list. called in
// expr-list = expr-list
func ascompatee(op ir.Op, nl, nr []ir.Node, init *ir.Nodes) []ir.Node {
func ascompatee(op ir.Op, nl, nr []ir.Node) []ir.Node {
// cannot happen: should have been rejected during type checking
if len(nl) != len(nr) {
base.Fatalf("assignment operands mismatch: %+v / %+v", ir.Nodes(nl), ir.Nodes(nr))
}
// ensure order of evaluation for function calls
for i := range nl {
nl[i] = safeExpr(nl[i], init)
}
for i := range nr {
nr[i] = safeExpr(nr[i], init)
}
var assigned ir.NameSet
var memWrite bool
@ -376,27 +331,22 @@ func ascompatee(op ir.Op, nl, nr []ir.Node, init *ir.Nodes) []ir.Node {
// If a needed expression may be affected by an
// earlier assignment, make an early copy of that
// expression and use the copy instead.
var early []ir.Node
var early ir.Nodes
save := func(np *ir.Node) {
if n := *np; affected(n) {
tmp := ir.Node(typecheck.Temp(n.Type()))
as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, tmp, n))
early = append(early, as)
*np = tmp
*np = copyExpr(n, n.Type(), &early)
}
}
var late []ir.Node
for i, l := range nl {
r := nr[i]
var late ir.Nodes
for i, lorig := range nl {
l, r := lorig, nr[i]
// Do not generate 'x = x' during return. See issue 4014.
if op == ir.ORETURN && ir.SameSafeExpr(l, r) {
continue
}
as := ir.NewAssignStmt(base.Pos, l, r)
// Save subexpressions needed on left side.
// Drill through non-dereferences.
for {
@ -438,11 +388,11 @@ func ascompatee(op ir.Op, nl, nr []ir.Node, init *ir.Nodes) []ir.Node {
}
// Save expression on right side.
save(&as.Y)
save(&r)
late = append(late, convas(as, init))
appendWalkStmt(&late, convas(ir.NewAssignStmt(base.Pos, lorig, r), &late))
if name == nil || name.Addrtaken() || name.Class_ == ir.PEXTERN || name.Class_ == ir.PAUTOHEAP {
if name == nil || name.Addrtaken() || name.Class == ir.PEXTERN || name.Class == ir.PAUTOHEAP {
memWrite = true
continue
}
@ -450,10 +400,16 @@ func ascompatee(op ir.Op, nl, nr []ir.Node, init *ir.Nodes) []ir.Node {
// We can ignore assignments to blank.
continue
}
if op == ir.ORETURN && types.OrigSym(name.Sym()) == nil {
// We can also ignore assignments to anonymous result
// parameters. These can't appear in expressions anyway.
continue
}
assigned.Add(name)
}
return append(early, late...)
early.Append(late.Take()...)
return early
}
// readsMemory reports whether the evaluation n directly reads from
@ -462,7 +418,7 @@ func readsMemory(n ir.Node) bool {
switch n.Op() {
case ir.ONAME:
n := n.(*ir.Name)
return n.Class_ == ir.PEXTERN || n.Class_ == ir.PAUTOHEAP || n.Addrtaken()
return n.Class == ir.PEXTERN || n.Class == ir.PAUTOHEAP || n.Addrtaken()
case ir.OADD,
ir.OAND,

View File

@ -206,7 +206,7 @@ func walkCopy(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node {
// walkDelete walks an ODELETE node.
func walkDelete(init *ir.Nodes, n *ir.CallExpr) ir.Node {
init.Append(n.PtrInit().Take()...)
init.Append(ir.TakeInit(n)...)
map_ := n.Args[0]
key := n.Args[1]
map_ = walkExpr(map_, init)
@ -531,7 +531,7 @@ func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
t = append(t, n)
}
t = append(t, ir.NewString("\n"))
nn.Args.Set(t)
nn.Args = t
}
// Collapse runs of constant strings.
@ -551,7 +551,7 @@ func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
i++
}
}
nn.Args.Set(t)
nn.Args = t
calls := []ir.Node{mkcall("printlock", nil, init)}
for i, n := range nn.Args {
@ -653,7 +653,7 @@ func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
walkExprList(calls, init)
r := ir.NewBlockStmt(base.Pos, nil)
r.List.Set(calls)
r.List = calls
return walkStmt(typecheck.Stmt(r))
}

View File

@ -52,7 +52,7 @@ func Closure(fn *ir.Func) {
v = addr
}
v.Class_ = ir.PPARAM
v.Class = ir.PPARAM
decls = append(decls, v)
fld := types.NewField(src.NoXPos, v.Sym(), v.Type())
@ -67,7 +67,7 @@ func Closure(fn *ir.Func) {
}
types.CalcSize(f.Type())
fn.SetType(f.Type()) // update type of ODCLFUNC
fn.Nname.SetType(f.Type()) // update type of ODCLFUNC
} else {
// The closure is not called, so it is going to stay as closure.
var body []ir.Node
@ -84,7 +84,7 @@ func Closure(fn *ir.Func) {
if v.Byval() && v.Type().Width <= int64(2*types.PtrSize) {
// If it is a small variable captured by value, downgrade it to PAUTO.
v.Class_ = ir.PAUTO
v.Class = ir.PAUTO
fn.Dcl = append(fn.Dcl, v)
body = append(body, ir.NewAssignStmt(base.Pos, v, cr))
} else {
@ -92,7 +92,7 @@ func Closure(fn *ir.Func) {
// and initialize in entry prologue.
addr := typecheck.NewName(typecheck.Lookup("&" + v.Sym().Name))
addr.SetType(types.NewPtr(v.Type()))
addr.Class_ = ir.PAUTO
addr.Class = ir.PAUTO
addr.SetUsed(true)
addr.Curfn = fn
fn.Dcl = append(fn.Dcl, addr)
@ -107,7 +107,7 @@ func Closure(fn *ir.Func) {
if len(body) > 0 {
typecheck.Stmts(body)
fn.Enter.Set(body)
fn.Enter = body
fn.SetNeedctxt(true)
}
}
@ -129,9 +129,9 @@ func walkClosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
typ := typecheck.ClosureType(clo)
clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ), nil)
clos.SetEsc(clo.Esc())
clos.List.Set(append([]ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, fn.Nname)}, fn.ClosureEnter...))
clos.List = append([]ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, fn.Nname)}, closureArgs(clo)...)
addr := typecheck.NodAddr(clos)
addr.SetEsc(clo.Esc())
@ -144,14 +144,34 @@ func walkClosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
if !types.Identical(typ, x.Type()) {
panic("closure type does not match order's assigned type")
}
addr.Alloc = x
addr.Prealloc = x
clo.Prealloc = nil
}
return walkExpr(cfn, init)
}
func walkCallPart(n *ir.CallPartExpr, init *ir.Nodes) ir.Node {
// closureArgs returns a slice of expressions that an be used to
// initialize the given closure's free variables. These correspond
// one-to-one with the variables in clo.Func.ClosureVars, and will be
// either an ONAME node (if the variable is captured by value) or an
// OADDR-of-ONAME node (if not).
func closureArgs(clo *ir.ClosureExpr) []ir.Node {
fn := clo.Func
args := make([]ir.Node, len(fn.ClosureVars))
for i, v := range fn.ClosureVars {
var outer ir.Node
outer = v.Outer
if !v.Byval() {
outer = typecheck.NodAddrAt(fn.Pos(), outer)
}
args[i] = typecheck.Expr(outer)
}
return args
}
func walkCallPart(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
// Create closure in the form of a composite literal.
// For x.M with receiver (x) type T, the generated code looks like:
//
@ -174,9 +194,9 @@ func walkCallPart(n *ir.CallPartExpr, init *ir.Nodes) ir.Node {
typ := typecheck.PartialCallType(n)
clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ), nil)
clos.SetEsc(n.Esc())
clos.List = []ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, n.Func.Nname), n.X}
clos.List = []ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, typecheck.MethodValueWrapper(n).Nname), n.X}
addr := typecheck.NodAddr(clos)
addr.SetEsc(n.Esc())
@ -189,7 +209,7 @@ func walkCallPart(n *ir.CallPartExpr, init *ir.Nodes) ir.Node {
if !types.Identical(typ, x.Type()) {
panic("partial call type does not match order's assigned type")
}
addr.Alloc = x
addr.Prealloc = x
n.Prealloc = nil
}

View File

@ -59,7 +59,7 @@ func (c initContext) String() string {
func readonlystaticname(t *types.Type) *ir.Name {
n := staticinit.StaticName(t)
n.MarkReadonly()
n.Sym().Linksym().Set(obj.AttrContentAddressable, true)
n.Linksym().Set(obj.AttrContentAddressable, true)
return n
}
@ -68,7 +68,7 @@ func isSimpleName(nn ir.Node) bool {
return false
}
n := nn.(*ir.Name)
return n.Class_ != ir.PAUTOHEAP && n.Class_ != ir.PEXTERN
return n.Class != ir.PAUTOHEAP && n.Class != ir.PEXTERN
}
func litas(l ir.Node, r ir.Node, init *ir.Nodes) {
@ -294,7 +294,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
// copy static to slice
var_ = typecheck.AssignExpr(var_)
name, offset, ok := staticinit.StaticLoc(var_)
if !ok || name.Class_ != ir.PEXTERN {
if !ok || name.Class != ir.PEXTERN {
base.Fatalf("slicelit: %v", var_)
}
staticdata.InitSlice(name, offset, vstat, t.NumElem())
@ -539,7 +539,7 @@ func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, n))
case ir.OMETHEXPR:
n := n.(*ir.MethodExpr)
n := n.(*ir.SelectorExpr)
anylit(n.FuncName(), var_, init)
case ir.OPTRLIT:
@ -549,10 +549,10 @@ func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
}
var r ir.Node
if n.Alloc != nil {
if n.Prealloc != nil {
// n.Right is stack temporary used as backing store.
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, n.Alloc, nil)) // zero backing store, just in case (#18410)
r = typecheck.NodAddr(n.Alloc)
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, n.Prealloc, nil)) // zero backing store, just in case (#18410)
r = typecheck.NodAddr(n.Prealloc)
} else {
r = ir.NewUnaryExpr(base.Pos, ir.ONEW, ir.TypeNode(n.X.Type()))
r.SetEsc(n.Esc())
@ -657,7 +657,7 @@ func genAsStatic(as *ir.AssignStmt) {
}
name, offset, ok := staticinit.StaticLoc(as.X)
if !ok || (name.Class_ != ir.PEXTERN && as.X != ir.BlankNode) {
if !ok || (name.Class != ir.PEXTERN && as.X != ir.BlankNode) {
base.Fatalf("genAsStatic: lhs %v", as.X)
}
@ -666,7 +666,7 @@ func genAsStatic(as *ir.AssignStmt) {
staticdata.InitConst(name, offset, r, int(r.Type().Width))
return
case ir.OMETHEXPR:
r := r.(*ir.MethodExpr)
r := r.(*ir.SelectorExpr)
staticdata.InitFunc(name, offset, r.FuncName())
return
case ir.ONAME:
@ -674,7 +674,7 @@ func genAsStatic(as *ir.AssignStmt) {
if r.Offset_ != 0 {
base.Fatalf("genAsStatic %+v", as)
}
if r.Class_ == ir.PFUNC {
if r.Class == ir.PFUNC {
staticdata.InitFunc(name, offset, r)
return
}

View File

@ -68,12 +68,12 @@ func walkConvInterface(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
if ir.Names.Staticuint64s == nil {
ir.Names.Staticuint64s = typecheck.NewName(ir.Pkgs.Runtime.Lookup("staticuint64s"))
ir.Names.Staticuint64s.Class_ = ir.PEXTERN
ir.Names.Staticuint64s.Class = ir.PEXTERN
// The actual type is [256]uint64, but we use [256*8]uint8 so we can address
// individual bytes.
ir.Names.Staticuint64s.SetType(types.NewArray(types.Types[types.TUINT8], 256*8))
ir.Names.Zerobase = typecheck.NewName(ir.Pkgs.Runtime.Lookup("zerobase"))
ir.Names.Zerobase.Class_ = ir.PEXTERN
ir.Names.Zerobase.Class = ir.PEXTERN
ir.Names.Zerobase.SetType(types.Types[types.TUINTPTR])
}
@ -98,7 +98,7 @@ func walkConvInterface(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
xe := ir.NewIndexExpr(base.Pos, ir.Names.Staticuint64s, index)
xe.SetBounded(true)
value = xe
case n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PEXTERN && n.X.(*ir.Name).Readonly():
case n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class == ir.PEXTERN && n.X.(*ir.Name).Readonly():
// n.Left is a readonly global; use it directly.
value = n.X
case !fromType.IsInterface() && n.Esc() == ir.EscNone && fromType.Width <= 1024:
@ -438,18 +438,14 @@ func walkCheckPtrAlignment(n *ir.ConvExpr, init *ir.Nodes, count ir.Node) ir.Nod
}
func walkCheckPtrArithmetic(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
// Calling cheapexpr(n, init) below leads to a recursive call
// to walkexpr, which leads us back here again. Use n.Opt to
// Calling cheapexpr(n, init) below leads to a recursive call to
// walkexpr, which leads us back here again. Use n.Checkptr to
// prevent infinite loops.
if opt := n.Opt(); opt == &walkCheckPtrArithmeticMarker {
if n.CheckPtr() {
return n
} else if opt != nil {
// We use n.Opt() here because today it's not used for OCONVNOP. If that changes,
// there's no guarantee that temporarily replacing it is safe, so just hard fail here.
base.Fatalf("unexpected Opt: %v", opt)
}
n.SetOpt(&walkCheckPtrArithmeticMarker)
defer n.SetOpt(nil)
n.SetCheckPtr(true)
defer n.SetCheckPtr(false)
// TODO(mdempsky): Make stricter. We only need to exempt
// reflect.Value.Pointer and reflect.Value.UnsafeAddr.

View File

@ -26,7 +26,7 @@ func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
return n
}
if init == n.PtrInit() {
if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() {
// not okay to use n->ninit when walking n,
// because we might replace n with some other node
// and would lose the init list.
@ -35,7 +35,7 @@ func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
if len(n.Init()) != 0 {
walkStmtList(n.Init())
init.Append(n.PtrInit().Take()...)
init.Append(ir.TakeInit(n)...)
}
lno := ir.SetPos(n)
@ -52,9 +52,9 @@ func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
base.Fatalf("expression has untyped type: %+v", n)
}
if n.Op() == ir.ONAME && n.(*ir.Name).Class_ == ir.PAUTOHEAP {
if n.Op() == ir.ONAME && n.(*ir.Name).Class == ir.PAUTOHEAP {
n := n.(*ir.Name)
nn := ir.NewStarExpr(base.Pos, n.Name().Heapaddr)
nn := ir.NewStarExpr(base.Pos, n.Heapaddr)
nn.X.MarkNonNil()
return walkExpr(typecheck.Expr(nn), init)
}
@ -100,7 +100,7 @@ func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OMETHEXPR:
// TODO(mdempsky): Do this right after type checking.
n := n.(*ir.MethodExpr)
n := n.(*ir.SelectorExpr)
return n.FuncName()
case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
@ -306,7 +306,7 @@ func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
return walkClosure(n.(*ir.ClosureExpr), init)
case ir.OCALLPART:
return walkCallPart(n.(*ir.CallPartExpr), init)
return walkCallPart(n.(*ir.SelectorExpr), init)
}
// No return! Each case must return (or panic),
@ -359,7 +359,7 @@ func safeExpr(n ir.Node, init *ir.Nodes) ir.Node {
if len(n.Init()) != 0 {
walkStmtList(n.Init())
init.Append(n.PtrInit().Take()...)
init.Append(ir.TakeInit(n)...)
}
switch n.Op() {
@ -477,7 +477,7 @@ func walkAddString(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
cat := typecheck.LookupRuntime(fn)
r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
r.Args.Set(args)
r.Args = args
r1 := typecheck.Expr(r)
r1 = walkExpr(r1, init)
r1.SetType(n.Type())
@ -498,8 +498,7 @@ func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
// Prepend captured variables to argument list.
clo := n.X.(*ir.ClosureExpr)
n.Args.Prepend(clo.Func.ClosureEnter...)
clo.Func.ClosureEnter.Set(nil)
n.Args.Prepend(closureArgs(clo)...)
// Replace OCLOSURE with ONAME/PFUNC.
n.X = clo.Func.Nname
@ -563,8 +562,8 @@ func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
}
}
n.Args.Set(tempAssigns)
n.Rargs.Set(args)
n.Args = tempAssigns
n.Rargs = args
}
// walkDivMod walks an ODIV or OMOD node.
@ -639,12 +638,13 @@ func walkDot(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
func walkDotType(n *ir.TypeAssertExpr, init *ir.Nodes) ir.Node {
n.X = walkExpr(n.X, init)
// Set up interface type addresses for back end.
n.Ntype = reflectdata.TypePtr(n.Type())
n.DstType = reflectdata.TypePtr(n.Type())
if n.Op() == ir.ODOTTYPE {
n.Ntype.(*ir.AddrExpr).Alloc = reflectdata.TypePtr(n.X.Type())
n.SrcType = reflectdata.TypePtr(n.X.Type())
}
if !n.Type().IsInterface() && !n.X.Type().IsEmptyInterface() {
n.Itab = []ir.Node{reflectdata.ITabAddr(n.Type(), n.X.Type())}
n.Itab = reflectdata.ITabAddr(n.Type(), n.X.Type())
}
return n
}
@ -974,7 +974,7 @@ func usefield(n *ir.SelectorExpr) {
sym := reflectdata.TrackSym(outer, field)
if ir.CurFunc.FieldTrack == nil {
ir.CurFunc.FieldTrack = make(map[*types.Sym]struct{})
ir.CurFunc.FieldTrack = make(map[*obj.LSym]struct{})
}
ir.CurFunc.FieldTrack[sym] = struct{}{}
}

View File

@ -102,7 +102,7 @@ func (o *orderState) newTemp(t *types.Type, clear bool) *ir.Name {
// copyExpr behaves like newTemp but also emits
// code to initialize the temporary to the value n.
func (o *orderState) copyExpr(n ir.Node) ir.Node {
func (o *orderState) copyExpr(n ir.Node) *ir.Name {
return o.copyExpr1(n, false)
}
@ -235,7 +235,7 @@ func (o *orderState) safeExpr(n ir.Node) ir.Node {
// because we emit explicit VARKILL instructions marking the end of those
// temporaries' lifetimes.
func isaddrokay(n ir.Node) bool {
return ir.IsAddressable(n) && (n.Op() != ir.ONAME || n.(*ir.Name).Class_ == ir.PEXTERN || ir.IsAutoTmp(n))
return ir.IsAddressable(n) && (n.Op() != ir.ONAME || n.(*ir.Name).Class == ir.PEXTERN || ir.IsAutoTmp(n))
}
// addrTemp ensures that n is okay to pass by address to runtime routines.
@ -406,7 +406,7 @@ func (o *orderState) edge() {
// Create a new uint8 counter to be allocated in section
// __libfuzzer_extra_counters.
counter := staticinit.StaticName(types.Types[types.TUINT8])
counter.Name().SetLibfuzzerExtraCounter(true)
counter.SetLibfuzzerExtraCounter(true)
// counter += 1
incr := ir.NewAssignOpStmt(base.Pos, ir.OADD, counter, ir.NewInt(1))
@ -423,7 +423,7 @@ func orderBlock(n *ir.Nodes, free map[string][]*ir.Name) {
order.edge()
order.stmtList(*n)
order.cleanTemp(mark)
n.Set(order.out)
*n = order.out
}
// exprInPlace orders the side effects in *np and
@ -466,8 +466,7 @@ func (o *orderState) init(n ir.Node) {
}
return
}
o.stmtList(n.Init())
n.PtrInit().Set(nil)
o.stmtList(ir.TakeInit(n))
}
// call orders the call expression n.
@ -517,8 +516,8 @@ func (o *orderState) call(nn ir.Node) {
if arg.X.Type().IsUnsafePtr() {
x := o.copyExpr(arg.X)
arg.X = x
x.Name().SetAddrtaken(true) // ensure SSA keeps the x variable
n.Body.Append(typecheck.Stmt(ir.NewUnaryExpr(base.Pos, ir.OVARLIVE, x)))
x.SetAddrtaken(true) // ensure SSA keeps the x variable
n.KeepAlive = append(n.KeepAlive, x)
}
}
}
@ -538,21 +537,7 @@ func (o *orderState) call(nn ir.Node) {
}
}
// mapAssign appends n to o.out, introducing temporaries
// to make sure that all map assignments have the form m[k] = x.
// (Note: expr has already been called on n, so we know k is addressable.)
//
// If n is the multiple assignment form ..., m[k], ... = ..., x, ..., the rewrite is
// t1 = m
// t2 = k
// ...., t3, ... = ..., x, ...
// t1[t2] = t3
//
// The temporaries t1, t2 are needed in case the ... being assigned
// contain m or k. They are usually unnecessary, but in the unnecessary
// cases they are also typically registerizable, so not much harm done.
// And this only applies to the multiple-assignment form.
// We could do a more precise analysis if needed, like in walk.go.
// mapAssign appends n to o.out.
func (o *orderState) mapAssign(n ir.Node) {
switch n.Op() {
default:
@ -573,28 +558,7 @@ func (o *orderState) mapAssign(n ir.Node) {
case ir.OAS2, ir.OAS2DOTTYPE, ir.OAS2MAPR, ir.OAS2FUNC:
n := n.(*ir.AssignListStmt)
var post []ir.Node
for i, m := range n.Lhs {
switch {
case m.Op() == ir.OINDEXMAP:
m := m.(*ir.IndexExpr)
if !ir.IsAutoTmp(m.X) {
m.X = o.copyExpr(m.X)
}
if !ir.IsAutoTmp(m.Index) {
m.Index = o.copyExpr(m.Index)
}
fallthrough
case base.Flag.Cfg.Instrumenting && n.Op() == ir.OAS2FUNC && !ir.IsBlank(m):
t := o.newTemp(m.Type(), false)
n.Lhs[i] = t
a := ir.NewAssignStmt(base.Pos, m, t)
post = append(post, typecheck.Stmt(a))
}
}
o.out = append(o.out, n)
o.out = append(o.out, post...)
}
}
@ -938,8 +902,7 @@ func (o *orderState) stmt(n ir.Node) {
if !ir.IsAutoTmp(recv.X) {
recv.X = o.copyExpr(recv.X)
}
init := *r.PtrInit()
r.PtrInit().Set(nil)
init := ir.TakeInit(r)
colas := r.Def
do := func(i int, t *types.Type) {
@ -955,7 +918,7 @@ func (o *orderState) stmt(n ir.Node) {
if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).X == n {
init = init[1:]
}
dcl := typecheck.Stmt(ir.NewDecl(base.Pos, ir.ODCL, n))
dcl := typecheck.Stmt(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
ncas.PtrInit().Append(dcl)
}
tmp := o.newTemp(t, t.HasPointers())
@ -1000,8 +963,7 @@ func (o *orderState) stmt(n ir.Node) {
// TODO(mdempsky): Is this actually necessary?
// walkselect appears to walk Ninit.
cas.Body.Prepend(cas.Init()...)
cas.PtrInit().Set(nil)
cas.Body.Prepend(ir.TakeInit(cas)...)
}
o.out = append(o.out, n)
@ -1236,9 +1198,9 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
// If left-hand side doesn't cause a short-circuit, issue right-hand side.
nif := ir.NewIfStmt(base.Pos, r, nil, nil)
if n.Op() == ir.OANDAND {
nif.Body.Set(gen)
nif.Body = gen
} else {
nif.Else.Set(gen)
nif.Else = gen
}
o.out = append(o.out, nif)
return r
@ -1310,7 +1272,7 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
return n
case ir.OCALLPART:
n := n.(*ir.CallPartExpr)
n := n.(*ir.SelectorExpr)
n.X = o.expr(n.X, nil)
if n.Transient() {
t := typecheck.PartialCallType(n)
@ -1404,7 +1366,7 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
statics = append(statics, r)
}
n.List.Set(statics)
n.List = statics
if len(dynamics) == 0 {
return n
@ -1451,8 +1413,8 @@ func (o *orderState) as2(n *ir.AssignListStmt) {
o.out = append(o.out, n)
as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
as.Lhs.Set(left)
as.Rhs.Set(tmplist)
as.Lhs = left
as.Rhs = tmplist
o.stmt(typecheck.Stmt(as))
}

View File

@ -8,13 +8,14 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/ssagen"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
"cmd/internal/sys"
)
func instrument(fn *ir.Func) {
if fn.Pragma&ir.Norace != 0 || (fn.Sym().Linksym() != nil && fn.Sym().Linksym().ABIWrapper()) {
if fn.Pragma&ir.Norace != 0 || (fn.Linksym() != nil && fn.Linksym().ABIWrapper()) {
return
}
@ -36,7 +37,10 @@ func instrument(fn *ir.Func) {
// This only works for amd64. This will not
// work on arm or others that might support
// race in the future.
nodpc := ir.RegFP.CloneName()
nodpc := ir.NewNameAt(src.NoXPos, typecheck.Lookup(".fp"))
nodpc.Class = ir.PPARAM
nodpc.SetUsed(true)
nodpc.SetType(types.Types[types.TUINTPTR])
nodpc.SetFrameOffset(int64(-types.PtrSize))
fn.Dcl = append(fn.Dcl, nodpc)

View File

@ -210,7 +210,7 @@ func walkRange(nrange *ir.RangeStmt) ir.Node {
a.SetTypecheck(1)
a.Lhs = []ir.Node{hv1, hb}
a.Rhs = []ir.Node{ir.NewUnaryExpr(base.Pos, ir.ORECV, ha)}
*nfor.Cond.PtrInit() = []ir.Node{a}
nfor.Cond = ir.InitExpr([]ir.Node{a}, nfor.Cond)
if v1 == nil {
body = nil
} else {
@ -429,7 +429,7 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
// i = len(a) - 1
// }
n := ir.NewIfStmt(base.Pos, nil, nil, nil)
n.Body.Set(nil)
n.Body = nil
n.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(0))
// hp = &a[0]

View File

@ -17,13 +17,12 @@ func walkSelect(sel *ir.SelectStmt) {
base.Fatalf("double walkselect")
}
init := sel.Init()
sel.PtrInit().Set(nil)
init := ir.TakeInit(sel)
init = append(init, walkSelectCases(sel.Cases)...)
sel.Cases = nil
sel.Compiled.Set(init)
sel.Compiled = init
walkStmtList(sel.Compiled)
base.Pos = lno
@ -45,8 +44,7 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node {
l := cas.Init()
if cas.Comm != nil { // not default:
n := cas.Comm
l = append(l, n.Init()...)
n.PtrInit().Set(nil)
l = append(l, ir.TakeInit(n)...)
switch n.Op() {
default:
base.Fatalf("select %v", n.Op())
@ -106,7 +104,7 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node {
n := cas.Comm
ir.SetPos(n)
r := ir.NewIfStmt(base.Pos, nil, nil, nil)
r.PtrInit().Set(cas.Init())
*r.PtrInit() = cas.Init()
var call ir.Node
switch n.Op() {
default:
@ -138,8 +136,8 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node {
}
r.Cond = typecheck.Expr(call)
r.Body.Set(cas.Body)
r.Else.Set(append(dflt.Init(), dflt.Body...))
r.Body = cas.Body
r.Else = append(dflt.Init(), dflt.Body...)
return []ir.Node{r, ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)}
}
@ -171,8 +169,7 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node {
for _, cas := range cases {
ir.SetPos(cas)
init = append(init, cas.Init()...)
cas.PtrInit().Set(nil)
init = append(init, ir.TakeInit(cas)...)
n := cas.Comm
if n == nil { // default:

View File

@ -55,19 +55,18 @@ func walkStmt(n ir.Node) ir.Node {
if n.Typecheck() == 0 {
base.Fatalf("missing typecheck: %+v", n)
}
init := n.Init()
n.PtrInit().Set(nil)
init := ir.TakeInit(n)
n = walkExpr(n, &init)
if n.Op() == ir.ONAME {
// copy rewrote to a statement list and a temp for the length.
// Throw away the temp to avoid plain values as statements.
n = ir.NewBlockStmt(n.Pos(), init)
init.Set(nil)
init = nil
}
if len(init) > 0 {
switch n.Op() {
case ir.OAS, ir.OAS2, ir.OBLOCK:
n.PtrInit().Prepend(init...)
n.(ir.InitNode).PtrInit().Prepend(init...)
default:
init.Append(n)
@ -176,12 +175,12 @@ func walkStmtList(s []ir.Node) {
// walkDecl walks an ODCL node.
func walkDecl(n *ir.Decl) ir.Node {
v := n.X.(*ir.Name)
if v.Class_ == ir.PAUTOHEAP {
v := n.X
if v.Class == ir.PAUTOHEAP {
if base.Flag.CompilingRuntime {
base.Errorf("%v escapes to heap, not allowed in runtime", v)
}
nn := ir.NewAssignStmt(base.Pos, v.Name().Heapaddr, callnew(v.Type()))
nn := ir.NewAssignStmt(base.Pos, v.Heapaddr, callnew(v.Type()))
nn.Def = true
return walkStmt(typecheck.Stmt(nn))
}
@ -191,9 +190,8 @@ func walkDecl(n *ir.Decl) ir.Node {
// walkFor walks an OFOR or OFORUNTIL node.
func walkFor(n *ir.ForStmt) ir.Node {
if n.Cond != nil {
walkStmtList(n.Cond.Init())
init := n.Cond.Init()
n.Cond.PtrInit().Set(nil)
init := ir.TakeInit(n.Cond)
walkStmtList(init)
n.Cond = walkExpr(n.Cond, &init)
n.Cond = ir.InitExpr(init, n.Cond)
}
@ -228,7 +226,7 @@ func walkGoDefer(n *ir.GoDeferStmt) ir.Node {
case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER:
call := call.(*ir.CallExpr)
if len(call.Body) > 0 {
if len(call.KeepAlive) > 0 {
n.Call = wrapCall(call, &init)
} else {
n.Call = walkExpr(call, &init)
@ -257,7 +255,7 @@ func walkIf(n *ir.IfStmt) ir.Node {
func wrapCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
if len(n.Init()) != 0 {
walkStmtList(n.Init())
init.Append(n.PtrInit().Take()...)
init.Append(ir.TakeInit(n)...)
}
isBuiltinCall := n.Op() != ir.OCALLFUNC && n.Op() != ir.OCALLMETH && n.Op() != ir.OCALLINTER
@ -267,7 +265,7 @@ func wrapCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
last := len(n.Args) - 1
if va := n.Args[last]; va.Op() == ir.OSLICELIT {
va := va.(*ir.CompLitExpr)
n.Args.Set(append(n.Args[:last], va.List...))
n.Args = append(n.Args[:last], va.List...)
n.IsDDD = false
}
}

View File

@ -201,10 +201,15 @@ func (s *exprSwitch) flush() {
// Merge consecutive integer cases.
if s.exprname.Type().IsInteger() {
consecutive := func(last, next constant.Value) bool {
delta := constant.BinaryOp(next, token.SUB, last)
return constant.Compare(delta, token.EQL, constant.MakeInt64(1))
}
merged := cc[:1]
for _, c := range cc[1:] {
last := &merged[len(merged)-1]
if last.jmp == c.jmp && ir.Int64Val(last.hi)+1 == ir.Int64Val(c.lo) {
if last.jmp == c.jmp && consecutive(last.hi.Val(), c.lo.Val()) {
last.hi = c.lo
} else {
merged = append(merged, c)
@ -440,7 +445,7 @@ type typeClause struct {
body ir.Nodes
}
func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp ir.Node) {
func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar *ir.Name, jmp ir.Node) {
var body ir.Nodes
if caseVar != nil {
l := []ir.Node{
@ -450,7 +455,7 @@ func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp ir.Node) {
typecheck.Stmts(l)
body.Append(l...)
} else {
caseVar = ir.BlankNode
caseVar = ir.BlankNode.(*ir.Name)
}
// cv, ok = iface.(type)

View File

@ -37,36 +37,6 @@ func Walk(fn *ir.Func) {
lno := base.Pos
// Final typecheck for any unused variables.
for i, ln := range fn.Dcl {
if ln.Op() == ir.ONAME && (ln.Class_ == ir.PAUTO || ln.Class_ == ir.PAUTOHEAP) {
ln = typecheck.AssignExpr(ln).(*ir.Name)
fn.Dcl[i] = ln
}
}
// Propagate the used flag for typeswitch variables up to the NONAME in its definition.
for _, ln := range fn.Dcl {
if ln.Op() == ir.ONAME && (ln.Class_ == ir.PAUTO || ln.Class_ == ir.PAUTOHEAP) && ln.Defn != nil && ln.Defn.Op() == ir.OTYPESW && ln.Used() {
ln.Defn.(*ir.TypeSwitchGuard).Used = true
}
}
for _, ln := range fn.Dcl {
if ln.Op() != ir.ONAME || (ln.Class_ != ir.PAUTO && ln.Class_ != ir.PAUTOHEAP) || ln.Sym().Name[0] == '&' || ln.Used() {
continue
}
if defn, ok := ln.Defn.(*ir.TypeSwitchGuard); ok {
if defn.Used {
continue
}
base.ErrorfAt(defn.Tag.Pos(), "%v declared but not used", ln.Sym())
defn.Used = true // suppress repeats
} else {
base.ErrorfAt(ln.Pos(), "%v declared but not used", ln.Sym())
}
}
base.Pos = lno
if base.Errors() > errorsBefore {
return
@ -91,7 +61,7 @@ func Walk(fn *ir.Func) {
func paramoutheap(fn *ir.Func) bool {
for _, ln := range fn.Dcl {
switch ln.Class_ {
switch ln.Class {
case ir.PPARAMOUT:
if ir.IsParamStackCopy(ln) || ln.Addrtaken() {
return true
@ -111,8 +81,7 @@ func walkRecv(n *ir.UnaryExpr) ir.Node {
if n.Typecheck() == 0 {
base.Fatalf("missing typecheck: %+v", n)
}
init := n.Init()
n.PtrInit().Set(nil)
init := ir.TakeInit(n)
n.X = walkExpr(n.X, &init)
call := walkExpr(mkcall1(chanfn("chanrecv1", 2, n.X.Type()), nil, &init, n.X, typecheck.NodNil()), &init)
@ -167,8 +136,8 @@ func paramstoheap(params *types.Type) []ir.Node {
}
if stackcopy := v.Name().Stackcopy; stackcopy != nil {
nn = append(nn, walkStmt(ir.NewDecl(base.Pos, ir.ODCL, v)))
if stackcopy.Class_ == ir.PPARAM {
nn = append(nn, walkStmt(ir.NewDecl(base.Pos, ir.ODCL, v.(*ir.Name))))
if stackcopy.Class == ir.PPARAM {
nn = append(nn, walkStmt(typecheck.Stmt(ir.NewAssignStmt(base.Pos, v, stackcopy))))
}
}
@ -216,7 +185,7 @@ func returnsfromheap(params *types.Type) []ir.Node {
if v == nil {
continue
}
if stackcopy := v.Name().Stackcopy; stackcopy != nil && stackcopy.Class_ == ir.PPARAMOUT {
if stackcopy := v.Name().Stackcopy; stackcopy != nil && stackcopy.Class == ir.PPARAMOUT {
nn = append(nn, walkStmt(typecheck.Stmt(ir.NewAssignStmt(base.Pos, stackcopy, v))))
}
}
@ -377,8 +346,6 @@ func walkAppendArgs(n *ir.CallExpr, init *ir.Nodes) {
var wrapCall_prgen int
var walkCheckPtrArithmeticMarker byte
// appendWalkStmt typechecks and walks stmt and then appends it to init.
func appendWalkStmt(init *ir.Nodes, stmt ir.Node) {
op := stmt.Op()

View File

@ -9,6 +9,8 @@
package main
var never bool
func main() {
{
type X struct {
@ -115,4 +117,16 @@ func main() {
panic("g() != 2")
}
}
{
var g func() int
q := 0
q, g = 1, func() int { return q }
if never {
g = func() int { return 2 }
}
if g() != 1 {
panic("g() != 1")
}
}
}

View File

@ -12,6 +12,8 @@ func main() {
_ = string("a", "b", nil) // ERROR "too many arguments (to conversion to string: string\(.a., .b., nil\))?"
_ = []byte() // ERROR "missing argument (to conversion to \[\]byte: \(\[\]byte\)\(\))?"
_ = string() // ERROR "missing argument (to conversion to string: string\(\))?"
_ = *int() // ERROR "missing argument (to conversion to int: int\(\))?"
_ = (*int)() // ERROR "missing argument (to conversion to \*int: \(\*int\)\(\))?"
_ = name("a", 1, 3.3) // ERROR "too many arguments (to conversion to name: name\(.a., 1, 3.3\))?"
_ = map[string]string(nil, nil) // ERROR "too many arguments (to conversion to map\[string\]string: \(map\[string\]string\)\(nil, nil\))?"
}

View File

@ -0,0 +1,113 @@
// run
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// assignment order in multiple assignments.
// See issue #23017
package main
import "fmt"
func main() {}
func init() {
var m = map[int]int{}
var p *int
defer func() {
recover()
check(1, len(m))
check(42, m[2])
}()
m[2], *p = 42, 2
}
func init() {
var m = map[int]int{}
p := []int{}
defer func() {
recover()
check(1, len(m))
check(2, m[2])
}()
m[2], p[1] = 2, 2
}
func init() {
type P struct{ i int }
var m = map[int]int{}
var p *P
defer func() {
recover()
check(1, len(m))
check(3, m[2])
}()
m[2], p.i = 3, 2
}
func init() {
type T struct{ i int }
var x T
p := &x
p, p.i = new(T), 4
check(4, x.i)
}
func init() {
var m map[int]int
var a int
var p = &a
defer func() {
recover()
check(5, *p)
}()
*p, m[2] = 5, 2
}
var g int
func init() {
var m map[int]int
defer func() {
recover()
check(0, g)
}()
m[0], g = 1, 2
}
func init() {
type T struct{ x struct{ y int } }
var x T
p := &x
p, p.x.y = new(T), 7
check(7, x.x.y)
check(0, p.x.y)
}
func init() {
type T *struct{ x struct{ y int } }
x := struct{ y int }{0}
var q T = &struct{ x struct{ y int } }{x}
p := q
p, p.x.y = nil, 7
check(7, q.x.y)
}
func init() {
x, y := 1, 2
x, y = y, x
check(2, x)
check(1, y)
}
func check(want, got int) {
if want != got {
panic(fmt.Sprintf("wanted %d, but got %d", want, got))
}
}

View File

@ -0,0 +1,124 @@
// errorcheck
// Copyright 2020 The Go Authors. All rights reserved. Use of this
// source code is governed by a BSD-style license that can be found in
// the LICENSE file.
package p
type T int
func (T) Mv() {}
func (*T) Mp() {}
type P1 struct{ T }
type P2 struct{ *T }
type P3 *struct{ T }
type P4 *struct{ *T }
func _() {
{
var p P1
p.Mv()
(&p).Mv()
(*&p).Mv()
p.Mp()
(&p).Mp()
(*&p).Mp()
}
{
var p P2
p.Mv()
(&p).Mv()
(*&p).Mv()
p.Mp()
(&p).Mp()
(*&p).Mp()
}
{
var p P3
p.Mv() // ERROR "undefined"
(&p).Mv() // ERROR "undefined"
(*&p).Mv() // ERROR "undefined"
(**&p).Mv()
(*p).Mv()
(&*p).Mv()
p.Mp() // ERROR "undefined"
(&p).Mp() // ERROR "undefined"
(*&p).Mp() // ERROR "undefined"
(**&p).Mp()
(*p).Mp()
(&*p).Mp()
}
{
var p P4
p.Mv() // ERROR "undefined"
(&p).Mv() // ERROR "undefined"
(*&p).Mv() // ERROR "undefined"
(**&p).Mv()
(*p).Mv()
(&*p).Mv()
p.Mp() // ERROR "undefined"
(&p).Mp() // ERROR "undefined"
(*&p).Mp() // ERROR "undefined"
(**&p).Mp()
(*p).Mp()
(&*p).Mp()
}
}
func _() {
type P5 struct{ T }
type P6 struct{ *T }
type P7 *struct{ T }
type P8 *struct{ *T }
{
var p P5
p.Mv()
(&p).Mv()
(*&p).Mv()
p.Mp()
(&p).Mp()
(*&p).Mp()
}
{
var p P6
p.Mv()
(&p).Mv()
(*&p).Mv()
p.Mp()
(&p).Mp()
(*&p).Mp()
}
{
var p P7
p.Mv() // ERROR "undefined"
(&p).Mv() // ERROR "undefined"
(*&p).Mv() // ERROR "undefined"
(**&p).Mv()
(*p).Mv()
(&*p).Mv()
p.Mp() // ERROR "undefined"
(&p).Mp() // ERROR "undefined"
(*&p).Mp() // ERROR "undefined"
(**&p).Mp()
(*p).Mp()
(&*p).Mp()
}
{
var p P8
p.Mv() // ERROR "undefined"
(&p).Mv() // ERROR "undefined"
(*&p).Mv() // ERROR "undefined"
(**&p).Mv()
(*p).Mv()
(&*p).Mv()
p.Mp() // ERROR "undefined"
(&p).Mp() // ERROR "undefined"
(*&p).Mp() // ERROR "undefined"
(**&p).Mp()
(*p).Mp()
(&*p).Mp()
}
}

View File

@ -0,0 +1,25 @@
// errorcheck
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package p
import "time"
type T int
func (T) Mv() {}
func (*T) Mp() {}
var _ = []int{
T.Mv, // ERROR "cannot use T\.Mv|incompatible type"
(*T).Mv, // ERROR "cannot use \(\*T\)\.Mv|incompatible type"
(*T).Mp, // ERROR "cannot use \(\*T\)\.Mp|incompatible type"
time.Time.GobEncode, // ERROR "cannot use time\.Time\.GobEncode|incompatible type"
(*time.Time).GobEncode, // ERROR "cannot use \(\*time\.Time\)\.GobEncode|incompatible type"
(*time.Time).GobDecode, // ERROR "cannot use \(\*time\.Time\)\.GobDecode|incompatible type"
}

View File

@ -0,0 +1,28 @@
// run
package main
var sp = ""
func f(name string, _ ...interface{}) int {
print(sp, name)
sp = " "
return 0
}
var a = f("a", x)
var b = f("b", y)
var c = f("c", z)
var d = func() int {
if false {
_ = z
}
return f("d")
}()
var e = f("e")
var x int
var y int = 42
var z int = func() int { return 42 }()
func main() { println() }

View File

@ -0,0 +1 @@
e a b c d

View File

@ -0,0 +1,27 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package a
type Here struct{ stuff int }
type Info struct{ Dir string }
func New() Here { return Here{} }
func (h Here) Dir(p string) (Info, error)
type I interface{ M(x string) }
type T = struct {
Here
I
}
var X T
var A = (*T).Dir
var B = T.Dir
var C = X.Dir
var D = (*T).M
var E = T.M
var F = X.M

View File

@ -0,0 +1,38 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package b
import "./a"
var Here = a.New()
var Dir = Here.Dir
type T = struct {
a.Here
a.I
}
var X T
// Test exporting the type of method values for anonymous structs with
// promoted methods.
var A = a.A
var B = a.B
var C = a.C
var D = a.D
var E = a.E
var F = a.F
var G = (*a.T).Dir
var H = a.T.Dir
var I = a.X.Dir
var J = (*a.T).M
var K = a.T.M
var L = a.X.M
var M = (*T).Dir
var N = T.Dir
var O = X.Dir
var P = (*T).M
var Q = T.M
var R = X.M

View File

@ -0,0 +1,7 @@
// compiledir
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ignored

View File

@ -0,0 +1,33 @@
// run
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Issue #43480: ICE on large uint64 constants in switch cases.
package main
func isPow10(x uint64) bool {
switch x {
case 1e0, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9,
1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18, 1e19:
return true
}
return false
}
func main() {
var x uint64 = 1
for {
if !isPow10(x) || isPow10(x-1) || isPow10(x+1) {
panic(x)
}
next := x * 10
if next/10 != x {
break // overflow
}
x = next
}
}