mirror of
https://github.com/golang/go.git
synced 2025-05-21 07:13:27 +00:00
CL 35554 taught order.go to use static variables for constants that needed to be addressable for runtime routines. However, there is one class of runtime routines that do not actually need an addressable value: fast map access routines. This CL teaches order.go to avoid using static variables for addressability in those cases. Instead, it avoids introducing a temp at all, which the backend would just have to optimize away. Fixes #19015. Change-Id: I5ef780c604fac3fb48dabb23a344435e283cb832 Reviewed-on: https://go-review.googlesource.com/36693 Reviewed-by: Keith Randall <khr@golang.org> Reviewed-by: Cherry Zhang <cherryyz@google.com>
3952 lines
89 KiB
Go
3952 lines
89 KiB
Go
// Copyright 2009 The Go Authors. All rights reserved.
|
|
// Use of this source code is governed by a BSD-style
|
|
// license that can be found in the LICENSE file.
|
|
|
|
package gc
|
|
|
|
import (
|
|
"cmd/internal/obj"
|
|
"cmd/internal/sys"
|
|
"fmt"
|
|
"strings"
|
|
)
|
|
|
|
// The constant is known to runtime.
|
|
const (
|
|
tmpstringbufsize = 32
|
|
)
|
|
|
|
func walk(fn *Node) {
|
|
Curfn = fn
|
|
|
|
if Debug['W'] != 0 {
|
|
s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym)
|
|
dumplist(s, Curfn.Nbody)
|
|
}
|
|
|
|
lno := lineno
|
|
|
|
// Final typecheck for any unused variables.
|
|
for i, ln := range fn.Func.Dcl {
|
|
if ln.Op == ONAME && (ln.Class == PAUTO || ln.Class == PAUTOHEAP) {
|
|
ln = typecheck(ln, Erv|Easgn)
|
|
fn.Func.Dcl[i] = ln
|
|
}
|
|
}
|
|
|
|
// Propagate the used flag for typeswitch variables up to the NONAME in it's definition.
|
|
for _, ln := range fn.Func.Dcl {
|
|
if ln.Op == ONAME && (ln.Class == PAUTO || ln.Class == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Used {
|
|
ln.Name.Defn.Left.Used = true
|
|
}
|
|
}
|
|
|
|
for _, ln := range fn.Func.Dcl {
|
|
if ln.Op != ONAME || (ln.Class != PAUTO && ln.Class != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Used {
|
|
continue
|
|
}
|
|
if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW {
|
|
if defn.Left.Used {
|
|
continue
|
|
}
|
|
lineno = defn.Left.Pos
|
|
yyerror("%v declared and not used", ln.Sym)
|
|
defn.Left.Used = true // suppress repeats
|
|
} else {
|
|
lineno = ln.Pos
|
|
yyerror("%v declared and not used", ln.Sym)
|
|
}
|
|
}
|
|
|
|
lineno = lno
|
|
if nerrors != 0 {
|
|
return
|
|
}
|
|
walkstmtlist(Curfn.Nbody.Slice())
|
|
if Debug['W'] != 0 {
|
|
s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym)
|
|
dumplist(s, Curfn.Nbody)
|
|
}
|
|
|
|
heapmoves()
|
|
if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 {
|
|
s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym)
|
|
dumplist(s, Curfn.Func.Enter)
|
|
}
|
|
}
|
|
|
|
func walkstmtlist(s []*Node) {
|
|
for i := range s {
|
|
s[i] = walkstmt(s[i])
|
|
}
|
|
}
|
|
|
|
func samelist(a, b []*Node) bool {
|
|
if len(a) != len(b) {
|
|
return false
|
|
}
|
|
for i, n := range a {
|
|
if n != b[i] {
|
|
return false
|
|
}
|
|
}
|
|
return true
|
|
}
|
|
|
|
func paramoutheap(fn *Node) bool {
|
|
for _, ln := range fn.Func.Dcl {
|
|
switch ln.Class {
|
|
case PPARAMOUT:
|
|
if ln.isParamStackCopy() || ln.Addrtaken {
|
|
return true
|
|
}
|
|
|
|
case PAUTO:
|
|
// stop early - parameters are over
|
|
return false
|
|
}
|
|
}
|
|
|
|
return false
|
|
}
|
|
|
|
// adds "adjust" to all the argument locations for the call n.
|
|
// n must be a defer or go node that has already been walked.
|
|
func adjustargs(n *Node, adjust int) {
|
|
var arg *Node
|
|
var lhs *Node
|
|
|
|
callfunc := n.Left
|
|
for _, arg = range callfunc.List.Slice() {
|
|
if arg.Op != OAS {
|
|
yyerror("call arg not assignment")
|
|
}
|
|
lhs = arg.Left
|
|
if lhs.Op == ONAME {
|
|
// This is a temporary introduced by reorder1.
|
|
// The real store to the stack appears later in the arg list.
|
|
continue
|
|
}
|
|
|
|
if lhs.Op != OINDREGSP {
|
|
yyerror("call argument store does not use OINDREGSP")
|
|
}
|
|
|
|
// can't really check this in machine-indep code.
|
|
//if(lhs->val.u.reg != D_SP)
|
|
// yyerror("call arg assign not indreg(SP)");
|
|
lhs.Xoffset += int64(adjust)
|
|
}
|
|
}
|
|
|
|
// The result of walkstmt MUST be assigned back to n, e.g.
|
|
// n.Left = walkstmt(n.Left)
|
|
func walkstmt(n *Node) *Node {
|
|
if n == nil {
|
|
return n
|
|
}
|
|
if n.IsStatic { // don't walk, generated by anylit.
|
|
return n
|
|
}
|
|
|
|
setlineno(n)
|
|
|
|
walkstmtlist(n.Ninit.Slice())
|
|
|
|
switch n.Op {
|
|
default:
|
|
if n.Op == ONAME {
|
|
yyerror("%v is not a top level statement", n.Sym)
|
|
} else {
|
|
yyerror("%v is not a top level statement", n.Op)
|
|
}
|
|
Dump("nottop", n)
|
|
|
|
case OAS,
|
|
OASOP,
|
|
OAS2,
|
|
OAS2DOTTYPE,
|
|
OAS2RECV,
|
|
OAS2FUNC,
|
|
OAS2MAPR,
|
|
OCLOSE,
|
|
OCOPY,
|
|
OCALLMETH,
|
|
OCALLINTER,
|
|
OCALL,
|
|
OCALLFUNC,
|
|
ODELETE,
|
|
OSEND,
|
|
OPRINT,
|
|
OPRINTN,
|
|
OPANIC,
|
|
OEMPTY,
|
|
ORECOVER,
|
|
OGETG:
|
|
if n.Typecheck == 0 {
|
|
Fatalf("missing typecheck: %+v", n)
|
|
}
|
|
wascopy := n.Op == OCOPY
|
|
init := n.Ninit
|
|
n.Ninit.Set(nil)
|
|
n = walkexpr(n, &init)
|
|
n = addinit(n, init.Slice())
|
|
if wascopy && n.Op == OCONVNOP {
|
|
n.Op = OEMPTY // don't leave plain values as statements.
|
|
}
|
|
|
|
// special case for a receive where we throw away
|
|
// the value received.
|
|
case ORECV:
|
|
if n.Typecheck == 0 {
|
|
Fatalf("missing typecheck: %+v", n)
|
|
}
|
|
init := n.Ninit
|
|
n.Ninit.Set(nil)
|
|
|
|
n.Left = walkexpr(n.Left, &init)
|
|
n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, typename(n.Left.Type), n.Left, nodnil())
|
|
n = walkexpr(n, &init)
|
|
|
|
n = addinit(n, init.Slice())
|
|
|
|
case OBREAK,
|
|
OCONTINUE,
|
|
OFALL,
|
|
OGOTO,
|
|
OLABEL,
|
|
ODCLCONST,
|
|
ODCLTYPE,
|
|
OCHECKNIL,
|
|
OVARKILL,
|
|
OVARLIVE:
|
|
break
|
|
|
|
case ODCL:
|
|
v := n.Left
|
|
if v.Class == PAUTOHEAP {
|
|
if compiling_runtime {
|
|
yyerror("%v escapes to heap, not allowed in runtime.", v)
|
|
}
|
|
if prealloc[v] == nil {
|
|
prealloc[v] = callnew(v.Type)
|
|
}
|
|
nn := nod(OAS, v.Name.Param.Heapaddr, prealloc[v])
|
|
nn.Colas = true
|
|
nn = typecheck(nn, Etop)
|
|
return walkstmt(nn)
|
|
}
|
|
|
|
case OBLOCK:
|
|
walkstmtlist(n.List.Slice())
|
|
|
|
case OXCASE:
|
|
yyerror("case statement out of place")
|
|
n.Op = OCASE
|
|
fallthrough
|
|
|
|
case OCASE:
|
|
n.Right = walkstmt(n.Right)
|
|
|
|
case ODEFER:
|
|
hasdefer = true
|
|
switch n.Left.Op {
|
|
case OPRINT, OPRINTN:
|
|
n.Left = walkprintfunc(n.Left, &n.Ninit)
|
|
|
|
case OCOPY:
|
|
n.Left = copyany(n.Left, &n.Ninit, true)
|
|
|
|
default:
|
|
n.Left = walkexpr(n.Left, &n.Ninit)
|
|
}
|
|
|
|
// make room for size & fn arguments.
|
|
adjustargs(n, 2*Widthptr)
|
|
|
|
case OFOR:
|
|
if n.Left != nil {
|
|
walkstmtlist(n.Left.Ninit.Slice())
|
|
init := n.Left.Ninit
|
|
n.Left.Ninit.Set(nil)
|
|
n.Left = walkexpr(n.Left, &init)
|
|
n.Left = addinit(n.Left, init.Slice())
|
|
}
|
|
|
|
n.Right = walkstmt(n.Right)
|
|
walkstmtlist(n.Nbody.Slice())
|
|
|
|
case OIF:
|
|
n.Left = walkexpr(n.Left, &n.Ninit)
|
|
walkstmtlist(n.Nbody.Slice())
|
|
walkstmtlist(n.Rlist.Slice())
|
|
|
|
case OPROC:
|
|
switch n.Left.Op {
|
|
case OPRINT, OPRINTN:
|
|
n.Left = walkprintfunc(n.Left, &n.Ninit)
|
|
|
|
case OCOPY:
|
|
n.Left = copyany(n.Left, &n.Ninit, true)
|
|
|
|
default:
|
|
n.Left = walkexpr(n.Left, &n.Ninit)
|
|
}
|
|
|
|
// make room for size & fn arguments.
|
|
adjustargs(n, 2*Widthptr)
|
|
|
|
case ORETURN:
|
|
walkexprlist(n.List.Slice(), &n.Ninit)
|
|
if n.List.Len() == 0 {
|
|
break
|
|
}
|
|
if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) {
|
|
// assign to the function out parameters,
|
|
// so that reorder3 can fix up conflicts
|
|
var rl []*Node
|
|
|
|
var cl Class
|
|
for _, ln := range Curfn.Func.Dcl {
|
|
cl = ln.Class
|
|
if cl == PAUTO || cl == PAUTOHEAP {
|
|
break
|
|
}
|
|
if cl == PPARAMOUT {
|
|
if ln.isParamStackCopy() {
|
|
ln = walkexpr(typecheck(nod(OIND, ln.Name.Param.Heapaddr, nil), Erv), nil)
|
|
}
|
|
rl = append(rl, ln)
|
|
}
|
|
}
|
|
|
|
if got, want := n.List.Len(), len(rl); got != want {
|
|
// order should have rewritten multi-value function calls
|
|
// with explicit OAS2FUNC nodes.
|
|
Fatalf("expected %v return arguments, have %v", want, got)
|
|
}
|
|
|
|
if samelist(rl, n.List.Slice()) {
|
|
// special return in disguise
|
|
n.List.Set(nil)
|
|
|
|
break
|
|
}
|
|
|
|
// move function calls out, to make reorder3's job easier.
|
|
walkexprlistsafe(n.List.Slice(), &n.Ninit)
|
|
|
|
ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit)
|
|
n.List.Set(reorder3(ll))
|
|
ls := n.List.Slice()
|
|
for i, n := range ls {
|
|
ls[i] = applywritebarrier(n)
|
|
}
|
|
break
|
|
}
|
|
|
|
ll := ascompatte(n.Op, nil, false, Curfn.Type.Results(), n.List.Slice(), 1, &n.Ninit)
|
|
n.List.Set(ll)
|
|
|
|
case ORETJMP:
|
|
break
|
|
|
|
case OSELECT:
|
|
walkselect(n)
|
|
|
|
case OSWITCH:
|
|
walkswitch(n)
|
|
|
|
case ORANGE:
|
|
walkrange(n)
|
|
|
|
case OXFALL:
|
|
yyerror("fallthrough statement out of place")
|
|
n.Op = OFALL
|
|
}
|
|
|
|
if n.Op == ONAME {
|
|
Fatalf("walkstmt ended up with name: %+v", n)
|
|
}
|
|
return n
|
|
}
|
|
|
|
func isSmallMakeSlice(n *Node) bool {
|
|
if n.Op != OMAKESLICE {
|
|
return false
|
|
}
|
|
l := n.Left
|
|
r := n.Right
|
|
if r == nil {
|
|
r = l
|
|
}
|
|
t := n.Type
|
|
|
|
return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width)
|
|
}
|
|
|
|
// walk the whole tree of the body of an
|
|
// expression or simple statement.
|
|
// the types expressions are calculated.
|
|
// compile-time constants are evaluated.
|
|
// complex side effects like statements are appended to init
|
|
func walkexprlist(s []*Node, init *Nodes) {
|
|
for i := range s {
|
|
s[i] = walkexpr(s[i], init)
|
|
}
|
|
}
|
|
|
|
func walkexprlistsafe(s []*Node, init *Nodes) {
|
|
for i, n := range s {
|
|
s[i] = safeexpr(n, init)
|
|
s[i] = walkexpr(s[i], init)
|
|
}
|
|
}
|
|
|
|
func walkexprlistcheap(s []*Node, init *Nodes) {
|
|
for i, n := range s {
|
|
s[i] = cheapexpr(n, init)
|
|
s[i] = walkexpr(s[i], init)
|
|
}
|
|
}
|
|
|
|
// Build name of function for interface conversion.
|
|
// Not all names are possible
|
|
// (e.g., we'll never generate convE2E or convE2I or convI2E).
|
|
func convFuncName(from, to *Type) string {
|
|
tkind := to.iet()
|
|
switch from.iet() {
|
|
case 'I':
|
|
switch tkind {
|
|
case 'I':
|
|
return "convI2I"
|
|
}
|
|
case 'T':
|
|
switch tkind {
|
|
case 'E':
|
|
return "convT2E"
|
|
case 'I':
|
|
return "convT2I"
|
|
}
|
|
}
|
|
Fatalf("unknown conv func %c2%c", from.iet(), to.iet())
|
|
panic("unreachable")
|
|
}
|
|
|
|
// The result of walkexpr MUST be assigned back to n, e.g.
|
|
// n.Left = walkexpr(n.Left, init)
|
|
func walkexpr(n *Node, init *Nodes) *Node {
|
|
if n == nil {
|
|
return n
|
|
}
|
|
|
|
if init == &n.Ninit {
|
|
// not okay to use n->ninit when walking n,
|
|
// because we might replace n with some other node
|
|
// and would lose the init list.
|
|
Fatalf("walkexpr init == &n->ninit")
|
|
}
|
|
|
|
if n.Ninit.Len() != 0 {
|
|
walkstmtlist(n.Ninit.Slice())
|
|
init.AppendNodes(&n.Ninit)
|
|
}
|
|
|
|
lno := setlineno(n)
|
|
|
|
if Debug['w'] > 1 {
|
|
Dump("walk-before", n)
|
|
}
|
|
|
|
if n.Typecheck != 1 {
|
|
Fatalf("missed typecheck: %+v", n)
|
|
}
|
|
|
|
if n.Op == ONAME && n.Class == PAUTOHEAP {
|
|
nn := nod(OIND, n.Name.Param.Heapaddr, nil)
|
|
nn = typecheck(nn, Erv)
|
|
nn = walkexpr(nn, init)
|
|
nn.Left.NonNil = true
|
|
return nn
|
|
}
|
|
|
|
opswitch:
|
|
switch n.Op {
|
|
default:
|
|
Dump("walk", n)
|
|
Fatalf("walkexpr: switch 1 unknown op %+S", n)
|
|
|
|
case OTYPE,
|
|
ONONAME,
|
|
OINDREGSP,
|
|
OEMPTY,
|
|
OGETG:
|
|
|
|
case ONOT,
|
|
OMINUS,
|
|
OPLUS,
|
|
OCOM,
|
|
OREAL,
|
|
OIMAG,
|
|
ODOTMETH,
|
|
ODOTINTER:
|
|
n.Left = walkexpr(n.Left, init)
|
|
|
|
case OIND:
|
|
n.Left = walkexpr(n.Left, init)
|
|
|
|
case ODOT:
|
|
usefield(n)
|
|
n.Left = walkexpr(n.Left, init)
|
|
|
|
case ODOTPTR:
|
|
usefield(n)
|
|
if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 {
|
|
// No actual copy will be generated, so emit an explicit nil check.
|
|
n.Left = cheapexpr(n.Left, init)
|
|
|
|
checknil(n.Left, init)
|
|
}
|
|
|
|
n.Left = walkexpr(n.Left, init)
|
|
|
|
case OEFACE:
|
|
n.Left = walkexpr(n.Left, init)
|
|
n.Right = walkexpr(n.Right, init)
|
|
|
|
case OSPTR, OITAB, OIDATA:
|
|
n.Left = walkexpr(n.Left, init)
|
|
|
|
case OLEN, OCAP:
|
|
n.Left = walkexpr(n.Left, init)
|
|
|
|
// replace len(*[10]int) with 10.
|
|
// delayed until now to preserve side effects.
|
|
t := n.Left.Type
|
|
|
|
if t.IsPtr() {
|
|
t = t.Elem()
|
|
}
|
|
if t.IsArray() {
|
|
safeexpr(n.Left, init)
|
|
Nodconst(n, n.Type, t.NumElem())
|
|
n.Typecheck = 1
|
|
}
|
|
|
|
case OLSH, ORSH:
|
|
n.Left = walkexpr(n.Left, init)
|
|
n.Right = walkexpr(n.Right, init)
|
|
t := n.Left.Type
|
|
n.Bounded = bounded(n.Right, 8*t.Width)
|
|
if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) {
|
|
Warn("shift bounds check elided")
|
|
}
|
|
|
|
case OAND,
|
|
OSUB,
|
|
OHMUL,
|
|
OMUL,
|
|
OLT,
|
|
OLE,
|
|
OGE,
|
|
OGT,
|
|
OADD,
|
|
OOR,
|
|
OXOR:
|
|
n.Left = walkexpr(n.Left, init)
|
|
n.Right = walkexpr(n.Right, init)
|
|
|
|
case OCOMPLEX:
|
|
// Use results from call expression as arguments for complex.
|
|
if n.Left == nil && n.Right == nil {
|
|
n.Left = n.List.First()
|
|
n.Right = n.List.Second()
|
|
}
|
|
n.Left = walkexpr(n.Left, init)
|
|
n.Right = walkexpr(n.Right, init)
|
|
|
|
case OEQ, ONE:
|
|
n.Left = walkexpr(n.Left, init)
|
|
n.Right = walkexpr(n.Right, init)
|
|
|
|
// Disable safemode while compiling this code: the code we
|
|
// generate internally can refer to unsafe.Pointer.
|
|
// In this case it can happen if we need to generate an ==
|
|
// for a struct containing a reflect.Value, which itself has
|
|
// an unexported field of type unsafe.Pointer.
|
|
old_safemode := safemode
|
|
safemode = false
|
|
n = walkcompare(n, init)
|
|
safemode = old_safemode
|
|
|
|
case OANDAND, OOROR:
|
|
n.Left = walkexpr(n.Left, init)
|
|
|
|
// cannot put side effects from n.Right on init,
|
|
// because they cannot run before n.Left is checked.
|
|
// save elsewhere and store on the eventual n.Right.
|
|
var ll Nodes
|
|
|
|
n.Right = walkexpr(n.Right, &ll)
|
|
n.Right = addinit(n.Right, ll.Slice())
|
|
n = walkinrange(n, init)
|
|
|
|
case OPRINT, OPRINTN:
|
|
walkexprlist(n.List.Slice(), init)
|
|
n = walkprint(n, init)
|
|
|
|
case OPANIC:
|
|
n = mkcall("gopanic", nil, init, n.Left)
|
|
|
|
case ORECOVER:
|
|
n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil))
|
|
|
|
case OLITERAL:
|
|
n.Addable = true
|
|
|
|
case OCLOSUREVAR, OCFUNC:
|
|
n.Addable = true
|
|
|
|
case ONAME:
|
|
n.Addable = true
|
|
|
|
case OCALLINTER:
|
|
usemethod(n)
|
|
t := n.Left.Type
|
|
if n.List.Len() != 0 && n.List.First().Op == OAS {
|
|
break
|
|
}
|
|
n.Left = walkexpr(n.Left, init)
|
|
walkexprlist(n.List.Slice(), init)
|
|
ll := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init)
|
|
n.List.Set(reorder1(ll))
|
|
|
|
case OCALLFUNC:
|
|
if n.Left.Op == OCLOSURE {
|
|
// Transform direct call of a closure to call of a normal function.
|
|
// transformclosure already did all preparation work.
|
|
|
|
// Prepend captured variables to argument list.
|
|
n.List.Prepend(n.Left.Func.Enter.Slice()...)
|
|
|
|
n.Left.Func.Enter.Set(nil)
|
|
|
|
// Replace OCLOSURE with ONAME/PFUNC.
|
|
n.Left = n.Left.Func.Closure.Func.Nname
|
|
|
|
// Update type of OCALLFUNC node.
|
|
// Output arguments had not changed, but their offsets could.
|
|
if n.Left.Type.Results().NumFields() == 1 {
|
|
n.Type = n.Left.Type.Results().Field(0).Type
|
|
} else {
|
|
n.Type = n.Left.Type.Results()
|
|
}
|
|
}
|
|
|
|
t := n.Left.Type
|
|
if n.List.Len() != 0 && n.List.First().Op == OAS {
|
|
break
|
|
}
|
|
|
|
n.Left = walkexpr(n.Left, init)
|
|
walkexprlist(n.List.Slice(), init)
|
|
|
|
ll := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init)
|
|
n.List.Set(reorder1(ll))
|
|
|
|
case OCALLMETH:
|
|
t := n.Left.Type
|
|
if n.List.Len() != 0 && n.List.First().Op == OAS {
|
|
break
|
|
}
|
|
n.Left = walkexpr(n.Left, init)
|
|
walkexprlist(n.List.Slice(), init)
|
|
ll := ascompatte(n.Op, n, false, t.Recvs(), []*Node{n.Left.Left}, 0, init)
|
|
lr := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init)
|
|
ll = append(ll, lr...)
|
|
n.Left.Left = nil
|
|
ullmancalc(n.Left)
|
|
n.List.Set(reorder1(ll))
|
|
|
|
case OAS:
|
|
init.AppendNodes(&n.Ninit)
|
|
|
|
n.Left = walkexpr(n.Left, init)
|
|
n.Left = safeexpr(n.Left, init)
|
|
|
|
if oaslit(n, init) {
|
|
break
|
|
}
|
|
|
|
if n.Right == nil {
|
|
// TODO(austin): Check all "implicit zeroing"
|
|
break
|
|
}
|
|
|
|
if !instrumenting && iszero(n.Right) && !needwritebarrier(n.Left, n.Right) {
|
|
break
|
|
}
|
|
|
|
switch n.Right.Op {
|
|
default:
|
|
n.Right = walkexpr(n.Right, init)
|
|
|
|
case ORECV:
|
|
// x = <-c; n.Left is x, n.Right.Left is c.
|
|
// orderstmt made sure x is addressable.
|
|
n.Right.Left = walkexpr(n.Right.Left, init)
|
|
|
|
n1 := nod(OADDR, n.Left, nil)
|
|
r := n.Right.Left // the channel
|
|
n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, typename(r.Type), r, n1)
|
|
n = walkexpr(n, init)
|
|
break opswitch
|
|
|
|
case OAPPEND:
|
|
// x = append(...)
|
|
r := n.Right
|
|
if r.Type.Elem().NotInHeap {
|
|
yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem())
|
|
}
|
|
if r.Isddd {
|
|
r = appendslice(r, init) // also works for append(slice, string).
|
|
} else {
|
|
r = walkappend(r, init, n)
|
|
}
|
|
n.Right = r
|
|
if r.Op == OAPPEND {
|
|
// Left in place for back end.
|
|
// Do not add a new write barrier.
|
|
break opswitch
|
|
}
|
|
// Otherwise, lowered for race detector.
|
|
// Treat as ordinary assignment.
|
|
}
|
|
|
|
if n.Left != nil && n.Right != nil {
|
|
static := n.IsStatic
|
|
n = convas(n, init)
|
|
n.IsStatic = static
|
|
n = applywritebarrier(n)
|
|
}
|
|
|
|
case OAS2:
|
|
init.AppendNodes(&n.Ninit)
|
|
walkexprlistsafe(n.List.Slice(), init)
|
|
walkexprlistsafe(n.Rlist.Slice(), init)
|
|
ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init)
|
|
ll = reorder3(ll)
|
|
for i, n := range ll {
|
|
ll[i] = applywritebarrier(n)
|
|
}
|
|
n = liststmt(ll)
|
|
|
|
// a,b,... = fn()
|
|
case OAS2FUNC:
|
|
init.AppendNodes(&n.Ninit)
|
|
|
|
r := n.Rlist.First()
|
|
walkexprlistsafe(n.List.Slice(), init)
|
|
r = walkexpr(r, init)
|
|
|
|
if isIntrinsicCall(r) {
|
|
n.Rlist.Set1(r)
|
|
break
|
|
}
|
|
init.Append(r)
|
|
|
|
ll := ascompatet(n.Op, n.List, r.Type)
|
|
for i, n := range ll {
|
|
ll[i] = applywritebarrier(n)
|
|
}
|
|
n = liststmt(ll)
|
|
|
|
// x, y = <-c
|
|
// orderstmt made sure x is addressable.
|
|
case OAS2RECV:
|
|
init.AppendNodes(&n.Ninit)
|
|
|
|
r := n.Rlist.First()
|
|
walkexprlistsafe(n.List.Slice(), init)
|
|
r.Left = walkexpr(r.Left, init)
|
|
var n1 *Node
|
|
if isblank(n.List.First()) {
|
|
n1 = nodnil()
|
|
} else {
|
|
n1 = nod(OADDR, n.List.First(), nil)
|
|
}
|
|
n1.Etype = 1 // addr does not escape
|
|
fn := chanfn("chanrecv2", 2, r.Left.Type)
|
|
ok := n.List.Second()
|
|
call := mkcall1(fn, ok.Type, init, typename(r.Left.Type), r.Left, n1)
|
|
n = nod(OAS, ok, call)
|
|
n = typecheck(n, Etop)
|
|
|
|
// a,b = m[i];
|
|
case OAS2MAPR:
|
|
init.AppendNodes(&n.Ninit)
|
|
|
|
r := n.Rlist.First()
|
|
walkexprlistsafe(n.List.Slice(), init)
|
|
r.Left = walkexpr(r.Left, init)
|
|
r.Right = walkexpr(r.Right, init)
|
|
t := r.Left.Type
|
|
|
|
_, p := mapaccessfast(t)
|
|
var key *Node
|
|
if p != "" {
|
|
// fast versions take key by value
|
|
key = r.Right
|
|
} else {
|
|
// standard version takes key by reference
|
|
// orderexpr made sure key is addressable.
|
|
key = nod(OADDR, r.Right, nil)
|
|
p = "mapaccess2"
|
|
}
|
|
|
|
// from:
|
|
// a,b = m[i]
|
|
// to:
|
|
// var,b = mapaccess2*(t, m, i)
|
|
// a = *var
|
|
a := n.List.First()
|
|
|
|
if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero
|
|
fn := mapfn(p, t)
|
|
r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key)
|
|
} else {
|
|
fn := mapfn("mapaccess2_fat", t)
|
|
z := zeroaddr(w)
|
|
r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z)
|
|
}
|
|
|
|
// mapaccess2* returns a typed bool, but due to spec changes,
|
|
// the boolean result of i.(T) is now untyped so we make it the
|
|
// same type as the variable on the lhs.
|
|
if ok := n.List.Second(); !isblank(ok) && ok.Type.IsBoolean() {
|
|
r.Type.Field(1).Type = ok.Type
|
|
}
|
|
n.Rlist.Set1(r)
|
|
n.Op = OAS2FUNC
|
|
|
|
// don't generate a = *var if a is _
|
|
if !isblank(a) {
|
|
var_ := temp(ptrto(t.Val()))
|
|
var_.Typecheck = 1
|
|
var_.NonNil = true // mapaccess always returns a non-nil pointer
|
|
n.List.SetIndex(0, var_)
|
|
n = walkexpr(n, init)
|
|
init.Append(n)
|
|
n = nod(OAS, a, nod(OIND, var_, nil))
|
|
}
|
|
|
|
n = typecheck(n, Etop)
|
|
n = walkexpr(n, init)
|
|
|
|
case ODELETE:
|
|
init.AppendNodes(&n.Ninit)
|
|
map_ := n.List.First()
|
|
key := n.List.Second()
|
|
map_ = walkexpr(map_, init)
|
|
key = walkexpr(key, init)
|
|
|
|
// orderstmt made sure key is addressable.
|
|
key = nod(OADDR, key, nil)
|
|
|
|
t := map_.Type
|
|
n = mkcall1(mapfndel("mapdelete", t), nil, init, typename(t), map_, key)
|
|
|
|
case OAS2DOTTYPE:
|
|
walkexprlistsafe(n.List.Slice(), init)
|
|
e := n.Rlist.First() // i.(T)
|
|
e.Left = walkexpr(e.Left, init)
|
|
|
|
case ODOTTYPE, ODOTTYPE2:
|
|
n.Left = walkexpr(n.Left, init)
|
|
|
|
case OCONVIFACE:
|
|
n.Left = walkexpr(n.Left, init)
|
|
|
|
// Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped.
|
|
if isdirectiface(n.Left.Type) {
|
|
var t *Node
|
|
if n.Type.IsEmptyInterface() {
|
|
t = typename(n.Left.Type)
|
|
} else {
|
|
t = itabname(n.Left.Type, n.Type)
|
|
}
|
|
l := nod(OEFACE, t, n.Left)
|
|
l.Type = n.Type
|
|
l.Typecheck = n.Typecheck
|
|
n = l
|
|
break
|
|
}
|
|
|
|
if staticbytes == nil {
|
|
staticbytes = newname(Pkglookup("staticbytes", Runtimepkg))
|
|
staticbytes.Class = PEXTERN
|
|
staticbytes.Type = typArray(Types[TUINT8], 256)
|
|
zerobase = newname(Pkglookup("zerobase", Runtimepkg))
|
|
zerobase.Class = PEXTERN
|
|
zerobase.Type = Types[TUINTPTR]
|
|
}
|
|
|
|
// Optimize convT2{E,I} for many cases in which T is not pointer-shaped,
|
|
// by using an existing addressable value identical to n.Left
|
|
// or creating one on the stack.
|
|
var value *Node
|
|
switch {
|
|
case n.Left.Type.Size() == 0:
|
|
// n.Left is zero-sized. Use zerobase.
|
|
value = zerobase
|
|
case n.Left.Type.IsBoolean() || (n.Left.Type.Size() == 1 && n.Left.Type.IsInteger()):
|
|
// n.Left is a bool/byte. Use staticbytes[n.Left].
|
|
value = nod(OINDEX, staticbytes, byteindex(n.Left))
|
|
value.Bounded = true
|
|
case n.Left.Class == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly:
|
|
// n.Left is a readonly global; use it directly.
|
|
value = n.Left
|
|
case !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024:
|
|
// n.Left does not escape. Use a stack temporary initialized to n.Left.
|
|
value = temp(n.Left.Type)
|
|
init.Append(typecheck(nod(OAS, value, n.Left), Etop))
|
|
}
|
|
|
|
if value != nil {
|
|
// Value is identical to n.Left.
|
|
// Construct the interface directly: {type/itab, &value}.
|
|
var t *Node
|
|
if n.Type.IsEmptyInterface() {
|
|
t = typename(n.Left.Type)
|
|
} else {
|
|
t = itabname(n.Left.Type, n.Type)
|
|
}
|
|
l := nod(OEFACE, t, typecheck(nod(OADDR, value, nil), Erv))
|
|
l.Type = n.Type
|
|
l.Typecheck = n.Typecheck
|
|
n = l
|
|
break
|
|
}
|
|
|
|
// Implement interface to empty interface conversion.
|
|
// tmp = i.itab
|
|
// if tmp != nil {
|
|
// tmp = tmp.type
|
|
// }
|
|
// e = iface{tmp, i.data}
|
|
if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
|
|
// Evaluate the input interface.
|
|
c := temp(n.Left.Type)
|
|
init.Append(nod(OAS, c, n.Left))
|
|
|
|
// Get the itab out of the interface.
|
|
tmp := temp(ptrto(Types[TUINT8]))
|
|
init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv)))
|
|
|
|
// Get the type out of the itab.
|
|
nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil)
|
|
nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp)))
|
|
init.Append(nif)
|
|
|
|
// Build the result.
|
|
e := nod(OEFACE, tmp, ifaceData(c, ptrto(Types[TUINT8])))
|
|
e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE.
|
|
e.Typecheck = 1
|
|
n = e
|
|
break
|
|
}
|
|
|
|
var ll []*Node
|
|
if n.Type.IsEmptyInterface() {
|
|
if !n.Left.Type.IsInterface() {
|
|
ll = append(ll, typename(n.Left.Type))
|
|
}
|
|
} else {
|
|
if n.Left.Type.IsInterface() {
|
|
ll = append(ll, typename(n.Type))
|
|
} else {
|
|
ll = append(ll, itabname(n.Left.Type, n.Type))
|
|
}
|
|
}
|
|
|
|
if n.Left.Type.IsInterface() {
|
|
ll = append(ll, n.Left)
|
|
} else {
|
|
// regular types are passed by reference to avoid C vararg calls
|
|
// orderexpr arranged for n.Left to be a temporary for all
|
|
// the conversions it could see. comparison of an interface
|
|
// with a non-interface, especially in a switch on interface value
|
|
// with non-interface cases, is not visible to orderstmt, so we
|
|
// have to fall back on allocating a temp here.
|
|
if islvalue(n.Left) {
|
|
ll = append(ll, nod(OADDR, n.Left, nil))
|
|
} else {
|
|
ll = append(ll, nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil))
|
|
}
|
|
dowidth(n.Left.Type)
|
|
}
|
|
|
|
fn := syslook(convFuncName(n.Left.Type, n.Type))
|
|
fn = substArgTypes(fn, n.Left.Type, n.Type)
|
|
dowidth(fn.Type)
|
|
n = nod(OCALL, fn, nil)
|
|
n.List.Set(ll)
|
|
n = typecheck(n, Erv)
|
|
n = walkexpr(n, init)
|
|
|
|
case OCONV, OCONVNOP:
|
|
if Thearch.LinkArch.Family == sys.ARM || Thearch.LinkArch.Family == sys.MIPS {
|
|
if n.Left.Type.IsFloat() {
|
|
if n.Type.Etype == TINT64 {
|
|
n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64]))
|
|
break
|
|
}
|
|
|
|
if n.Type.Etype == TUINT64 {
|
|
n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64]))
|
|
break
|
|
}
|
|
}
|
|
|
|
if n.Type.IsFloat() {
|
|
if n.Left.Type.Etype == TINT64 {
|
|
n = conv(mkcall("int64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TINT64])), n.Type)
|
|
break
|
|
}
|
|
|
|
if n.Left.Type.Etype == TUINT64 {
|
|
n = conv(mkcall("uint64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT64])), n.Type)
|
|
break
|
|
}
|
|
}
|
|
}
|
|
|
|
if Thearch.LinkArch.Family == sys.I386 {
|
|
if n.Left.Type.IsFloat() {
|
|
if n.Type.Etype == TINT64 {
|
|
n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64]))
|
|
break
|
|
}
|
|
|
|
if n.Type.Etype == TUINT64 {
|
|
n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64]))
|
|
break
|
|
}
|
|
if n.Type.Etype == TUINT32 || n.Type.Etype == TUINT || n.Type.Etype == TUINTPTR {
|
|
n = mkcall("float64touint32", n.Type, init, conv(n.Left, Types[TFLOAT64]))
|
|
break
|
|
}
|
|
}
|
|
if n.Type.IsFloat() {
|
|
if n.Left.Type.Etype == TINT64 {
|
|
n = conv(mkcall("int64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TINT64])), n.Type)
|
|
break
|
|
}
|
|
|
|
if n.Left.Type.Etype == TUINT64 {
|
|
n = conv(mkcall("uint64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT64])), n.Type)
|
|
break
|
|
}
|
|
if n.Left.Type.Etype == TUINT32 || n.Left.Type.Etype == TUINT || n.Left.Type.Etype == TUINTPTR {
|
|
n = conv(mkcall("uint32tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT32])), n.Type)
|
|
break
|
|
}
|
|
}
|
|
}
|
|
|
|
n.Left = walkexpr(n.Left, init)
|
|
|
|
case OANDNOT:
|
|
n.Left = walkexpr(n.Left, init)
|
|
n.Op = OAND
|
|
n.Right = nod(OCOM, n.Right, nil)
|
|
n.Right = typecheck(n.Right, Erv)
|
|
n.Right = walkexpr(n.Right, init)
|
|
|
|
case ODIV, OMOD:
|
|
n.Left = walkexpr(n.Left, init)
|
|
n.Right = walkexpr(n.Right, init)
|
|
|
|
// rewrite complex div into function call.
|
|
et := n.Left.Type.Etype
|
|
|
|
if isComplex[et] && n.Op == ODIV {
|
|
t := n.Type
|
|
n = mkcall("complex128div", Types[TCOMPLEX128], init, conv(n.Left, Types[TCOMPLEX128]), conv(n.Right, Types[TCOMPLEX128]))
|
|
n = conv(n, t)
|
|
break
|
|
}
|
|
|
|
// Nothing to do for float divisions.
|
|
if isFloat[et] {
|
|
break
|
|
}
|
|
|
|
// Try rewriting as shifts or magic multiplies.
|
|
n = walkdiv(n, init)
|
|
|
|
// rewrite 64-bit div and mod into function calls
|
|
// on 32-bit architectures.
|
|
switch n.Op {
|
|
case OMOD, ODIV:
|
|
if Widthreg >= 8 || (et != TUINT64 && et != TINT64) {
|
|
break opswitch
|
|
}
|
|
var fn string
|
|
if et == TINT64 {
|
|
fn = "int64"
|
|
} else {
|
|
fn = "uint64"
|
|
}
|
|
if n.Op == ODIV {
|
|
fn += "div"
|
|
} else {
|
|
fn += "mod"
|
|
}
|
|
n = mkcall(fn, n.Type, init, conv(n.Left, Types[et]), conv(n.Right, Types[et]))
|
|
}
|
|
|
|
case OINDEX:
|
|
n.Left = walkexpr(n.Left, init)
|
|
|
|
// save the original node for bounds checking elision.
|
|
// If it was a ODIV/OMOD walk might rewrite it.
|
|
r := n.Right
|
|
|
|
n.Right = walkexpr(n.Right, init)
|
|
|
|
// if range of type cannot exceed static array bound,
|
|
// disable bounds check.
|
|
if n.Bounded {
|
|
break
|
|
}
|
|
t := n.Left.Type
|
|
if t != nil && t.IsPtr() {
|
|
t = t.Elem()
|
|
}
|
|
if t.IsArray() {
|
|
n.Bounded = bounded(r, t.NumElem())
|
|
if Debug['m'] != 0 && n.Bounded && !Isconst(n.Right, CTINT) {
|
|
Warn("index bounds check elided")
|
|
}
|
|
if smallintconst(n.Right) && !n.Bounded {
|
|
yyerror("index out of bounds")
|
|
}
|
|
} else if Isconst(n.Left, CTSTR) {
|
|
n.Bounded = bounded(r, int64(len(n.Left.Val().U.(string))))
|
|
if Debug['m'] != 0 && n.Bounded && !Isconst(n.Right, CTINT) {
|
|
Warn("index bounds check elided")
|
|
}
|
|
if smallintconst(n.Right) && !n.Bounded {
|
|
yyerror("index out of bounds")
|
|
}
|
|
}
|
|
|
|
if Isconst(n.Right, CTINT) {
|
|
if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 {
|
|
yyerror("index out of bounds")
|
|
}
|
|
}
|
|
|
|
case OINDEXMAP:
|
|
// Replace m[k] with *map{access1,assign}(maptype, m, &k)
|
|
n.Left = walkexpr(n.Left, init)
|
|
n.Right = walkexpr(n.Right, init)
|
|
map_ := n.Left
|
|
key := n.Right
|
|
t := map_.Type
|
|
if n.Etype == 1 {
|
|
// This m[k] expression is on the left-hand side of an assignment.
|
|
// orderexpr made sure key is addressable.
|
|
key = nod(OADDR, key, nil)
|
|
n = mkcall1(mapfn("mapassign", t), nil, init, typename(t), map_, key)
|
|
} else {
|
|
// m[k] is not the target of an assignment.
|
|
p, _ := mapaccessfast(t)
|
|
if p == "" {
|
|
// standard version takes key by reference.
|
|
// orderexpr made sure key is addressable.
|
|
key = nod(OADDR, key, nil)
|
|
p = "mapaccess1"
|
|
}
|
|
|
|
if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero
|
|
n = mkcall1(mapfn(p, t), ptrto(t.Val()), init, typename(t), map_, key)
|
|
} else {
|
|
p = "mapaccess1_fat"
|
|
z := zeroaddr(w)
|
|
n = mkcall1(mapfn(p, t), ptrto(t.Val()), init, typename(t), map_, key, z)
|
|
}
|
|
}
|
|
n.Type = ptrto(t.Val())
|
|
n.NonNil = true // mapaccess1* and mapassign always return non-nil pointers.
|
|
n = nod(OIND, n, nil)
|
|
n.Type = t.Val()
|
|
n.Typecheck = 1
|
|
|
|
case ORECV:
|
|
Fatalf("walkexpr ORECV") // should see inside OAS only
|
|
|
|
case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR:
|
|
n.Left = walkexpr(n.Left, init)
|
|
low, high, max := n.SliceBounds()
|
|
low = walkexpr(low, init)
|
|
if low != nil && iszero(low) {
|
|
// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
|
|
low = nil
|
|
}
|
|
high = walkexpr(high, init)
|
|
max = walkexpr(max, init)
|
|
n.SetSliceBounds(low, high, max)
|
|
if n.Op.IsSlice3() {
|
|
if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) {
|
|
// Reduce x[i:j:cap(x)] to x[i:j].
|
|
if n.Op == OSLICE3 {
|
|
n.Op = OSLICE
|
|
} else {
|
|
n.Op = OSLICEARR
|
|
}
|
|
n = reduceSlice(n)
|
|
}
|
|
} else {
|
|
n = reduceSlice(n)
|
|
}
|
|
|
|
case OADDR:
|
|
n.Left = walkexpr(n.Left, init)
|
|
|
|
case ONEW:
|
|
if n.Esc == EscNone {
|
|
if n.Type.Elem().Width >= 1<<16 {
|
|
Fatalf("large ONEW with EscNone: %v", n)
|
|
}
|
|
r := temp(n.Type.Elem())
|
|
r = nod(OAS, r, nil) // zero temp
|
|
r = typecheck(r, Etop)
|
|
init.Append(r)
|
|
r = nod(OADDR, r.Left, nil)
|
|
r = typecheck(r, Erv)
|
|
n = r
|
|
} else {
|
|
n = callnew(n.Type.Elem())
|
|
}
|
|
|
|
case OCMPSTR:
|
|
// s + "badgerbadgerbadger" == "badgerbadgerbadger"
|
|
if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) {
|
|
// TODO(marvin): Fix Node.EType type union.
|
|
r := nod(Op(n.Etype), nod(OLEN, n.Left.List.First(), nil), nodintconst(0))
|
|
r = typecheck(r, Erv)
|
|
r = walkexpr(r, init)
|
|
r.Type = n.Type
|
|
n = r
|
|
break
|
|
}
|
|
|
|
// Rewrite comparisons to short constant strings as length+byte-wise comparisons.
|
|
var cs, ncs *Node // const string, non-const string
|
|
switch {
|
|
case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR):
|
|
// ignore; will be constant evaluated
|
|
case Isconst(n.Left, CTSTR):
|
|
cs = n.Left
|
|
ncs = n.Right
|
|
case Isconst(n.Right, CTSTR):
|
|
cs = n.Right
|
|
ncs = n.Left
|
|
}
|
|
if cs != nil {
|
|
cmp := Op(n.Etype)
|
|
// maxRewriteLen was chosen empirically.
|
|
// It is the value that minimizes cmd/go file size
|
|
// across most architectures.
|
|
// See the commit description for CL 26758 for details.
|
|
maxRewriteLen := 6
|
|
var and Op
|
|
switch cmp {
|
|
case OEQ:
|
|
and = OANDAND
|
|
case ONE:
|
|
and = OOROR
|
|
default:
|
|
// Don't do byte-wise comparisons for <, <=, etc.
|
|
// They're fairly complicated.
|
|
// Length-only checks are ok, though.
|
|
maxRewriteLen = 0
|
|
}
|
|
if s := cs.Val().U.(string); len(s) <= maxRewriteLen {
|
|
if len(s) > 0 {
|
|
ncs = safeexpr(ncs, init)
|
|
}
|
|
// TODO(marvin): Fix Node.EType type union.
|
|
r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s))))
|
|
for i := 0; i < len(s); i++ {
|
|
cb := nodintconst(int64(s[i]))
|
|
ncb := nod(OINDEX, ncs, nodintconst(int64(i)))
|
|
r = nod(and, r, nod(cmp, ncb, cb))
|
|
}
|
|
r = typecheck(r, Erv)
|
|
r = walkexpr(r, init)
|
|
r.Type = n.Type
|
|
n = r
|
|
break
|
|
}
|
|
}
|
|
|
|
var r *Node
|
|
// TODO(marvin): Fix Node.EType type union.
|
|
if Op(n.Etype) == OEQ || Op(n.Etype) == ONE {
|
|
// prepare for rewrite below
|
|
n.Left = cheapexpr(n.Left, init)
|
|
n.Right = cheapexpr(n.Right, init)
|
|
|
|
r = mkcall("eqstring", Types[TBOOL], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING]))
|
|
|
|
// quick check of len before full compare for == or !=
|
|
// eqstring assumes that the lengths are equal
|
|
// TODO(marvin): Fix Node.EType type union.
|
|
if Op(n.Etype) == OEQ {
|
|
// len(left) == len(right) && eqstring(left, right)
|
|
r = nod(OANDAND, nod(OEQ, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r)
|
|
} else {
|
|
// len(left) != len(right) || !eqstring(left, right)
|
|
r = nod(ONOT, r, nil)
|
|
r = nod(OOROR, nod(ONE, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r)
|
|
}
|
|
|
|
r = typecheck(r, Erv)
|
|
r = walkexpr(r, nil)
|
|
} else {
|
|
// sys_cmpstring(s1, s2) :: 0
|
|
r = mkcall("cmpstring", Types[TINT], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING]))
|
|
// TODO(marvin): Fix Node.EType type union.
|
|
r = nod(Op(n.Etype), r, nodintconst(0))
|
|
}
|
|
|
|
r = typecheck(r, Erv)
|
|
if !n.Type.IsBoolean() {
|
|
Fatalf("cmp %v", n.Type)
|
|
}
|
|
r.Type = n.Type
|
|
n = r
|
|
|
|
case OADDSTR:
|
|
n = addstr(n, init)
|
|
|
|
case OAPPEND:
|
|
// order should make sure we only see OAS(node, OAPPEND), which we handle above.
|
|
Fatalf("append outside assignment")
|
|
|
|
case OCOPY:
|
|
n = copyany(n, init, instrumenting && !compiling_runtime)
|
|
|
|
// cannot use chanfn - closechan takes any, not chan any
|
|
case OCLOSE:
|
|
fn := syslook("closechan")
|
|
|
|
fn = substArgTypes(fn, n.Left.Type)
|
|
n = mkcall1(fn, nil, init, n.Left)
|
|
|
|
case OMAKECHAN:
|
|
n = mkcall1(chanfn("makechan", 1, n.Type), n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64]))
|
|
|
|
case OMAKEMAP:
|
|
t := n.Type
|
|
|
|
a := nodnil() // hmap buffer
|
|
r := nodnil() // bucket buffer
|
|
if n.Esc == EscNone {
|
|
// Allocate hmap buffer on stack.
|
|
var_ := temp(hmap(t))
|
|
|
|
a = nod(OAS, var_, nil) // zero temp
|
|
a = typecheck(a, Etop)
|
|
init.Append(a)
|
|
a = nod(OADDR, var_, nil)
|
|
|
|
// Allocate one bucket on stack.
|
|
// Maximum key/value size is 128 bytes, larger objects
|
|
// are stored with an indirection. So max bucket size is 2048+eps.
|
|
var_ = temp(mapbucket(t))
|
|
|
|
r = nod(OAS, var_, nil) // zero temp
|
|
r = typecheck(r, Etop)
|
|
init.Append(r)
|
|
r = nod(OADDR, var_, nil)
|
|
}
|
|
|
|
fn := syslook("makemap")
|
|
fn = substArgTypes(fn, hmap(t), mapbucket(t), t.Key(), t.Val())
|
|
n = mkcall1(fn, n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64]), a, r)
|
|
|
|
case OMAKESLICE:
|
|
l := n.Left
|
|
r := n.Right
|
|
if r == nil {
|
|
r = safeexpr(l, init)
|
|
l = r
|
|
}
|
|
t := n.Type
|
|
if n.Esc == EscNone {
|
|
if !isSmallMakeSlice(n) {
|
|
Fatalf("non-small OMAKESLICE with EscNone: %v", n)
|
|
}
|
|
// var arr [r]T
|
|
// n = arr[:l]
|
|
t = typArray(t.Elem(), nonnegintconst(r)) // [r]T
|
|
var_ := temp(t)
|
|
a := nod(OAS, var_, nil) // zero temp
|
|
a = typecheck(a, Etop)
|
|
init.Append(a)
|
|
r := nod(OSLICE, var_, nil) // arr[:l]
|
|
r.SetSliceBounds(nil, l, nil)
|
|
r = conv(r, n.Type) // in case n.Type is named.
|
|
r = typecheck(r, Erv)
|
|
r = walkexpr(r, init)
|
|
n = r
|
|
} else {
|
|
// n escapes; set up a call to makeslice.
|
|
// When len and cap can fit into int, use makeslice instead of
|
|
// makeslice64, which is faster and shorter on 32 bit platforms.
|
|
|
|
if t.Elem().NotInHeap {
|
|
yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem())
|
|
}
|
|
|
|
len, cap := l, r
|
|
|
|
fnname := "makeslice64"
|
|
argtype := Types[TINT64]
|
|
|
|
// typechecking guarantees that TIDEAL len/cap are positive and fit in an int.
|
|
// The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
|
|
// will be handled by the negative range checks in makeslice during runtime.
|
|
if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) &&
|
|
(cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) {
|
|
fnname = "makeslice"
|
|
argtype = Types[TINT]
|
|
}
|
|
|
|
fn := syslook(fnname)
|
|
fn = substArgTypes(fn, t.Elem()) // any-1
|
|
n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype))
|
|
}
|
|
|
|
case ORUNESTR:
|
|
a := nodnil()
|
|
if n.Esc == EscNone {
|
|
t := typArray(Types[TUINT8], 4)
|
|
var_ := temp(t)
|
|
a = nod(OADDR, var_, nil)
|
|
}
|
|
|
|
// intstring(*[4]byte, rune)
|
|
n = mkcall("intstring", n.Type, init, a, conv(n.Left, Types[TINT64]))
|
|
|
|
case OARRAYBYTESTR:
|
|
a := nodnil()
|
|
if n.Esc == EscNone {
|
|
// Create temporary buffer for string on stack.
|
|
t := typArray(Types[TUINT8], tmpstringbufsize)
|
|
|
|
a = nod(OADDR, temp(t), nil)
|
|
}
|
|
|
|
// slicebytetostring(*[32]byte, []byte) string;
|
|
n = mkcall("slicebytetostring", n.Type, init, a, n.Left)
|
|
|
|
// slicebytetostringtmp([]byte) string;
|
|
case OARRAYBYTESTRTMP:
|
|
n.Left = walkexpr(n.Left, init)
|
|
|
|
if !instrumenting {
|
|
// Let the backend handle OARRAYBYTESTRTMP directly
|
|
// to avoid a function call to slicebytetostringtmp.
|
|
break
|
|
}
|
|
|
|
n = mkcall("slicebytetostringtmp", n.Type, init, n.Left)
|
|
|
|
// slicerunetostring(*[32]byte, []rune) string;
|
|
case OARRAYRUNESTR:
|
|
a := nodnil()
|
|
|
|
if n.Esc == EscNone {
|
|
// Create temporary buffer for string on stack.
|
|
t := typArray(Types[TUINT8], tmpstringbufsize)
|
|
|
|
a = nod(OADDR, temp(t), nil)
|
|
}
|
|
|
|
n = mkcall("slicerunetostring", n.Type, init, a, n.Left)
|
|
|
|
// stringtoslicebyte(*32[byte], string) []byte;
|
|
case OSTRARRAYBYTE:
|
|
a := nodnil()
|
|
|
|
if n.Esc == EscNone {
|
|
// Create temporary buffer for slice on stack.
|
|
t := typArray(Types[TUINT8], tmpstringbufsize)
|
|
|
|
a = nod(OADDR, temp(t), nil)
|
|
}
|
|
|
|
n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, Types[TSTRING]))
|
|
|
|
case OSTRARRAYBYTETMP:
|
|
// []byte(string) conversion that creates a slice
|
|
// referring to the actual string bytes.
|
|
// This conversion is handled later by the backend and
|
|
// is only for use by internal compiler optimizations
|
|
// that know that the slice won't be mutated.
|
|
// The only such case today is:
|
|
// for i, c := range []byte(string)
|
|
n.Left = walkexpr(n.Left, init)
|
|
|
|
// stringtoslicerune(*[32]rune, string) []rune
|
|
case OSTRARRAYRUNE:
|
|
a := nodnil()
|
|
|
|
if n.Esc == EscNone {
|
|
// Create temporary buffer for slice on stack.
|
|
t := typArray(Types[TINT32], tmpstringbufsize)
|
|
|
|
a = nod(OADDR, temp(t), nil)
|
|
}
|
|
|
|
n = mkcall("stringtoslicerune", n.Type, init, a, n.Left)
|
|
|
|
// ifaceeq(i1 any-1, i2 any-2) (ret bool);
|
|
case OCMPIFACE:
|
|
if !eqtype(n.Left.Type, n.Right.Type) {
|
|
Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type)
|
|
}
|
|
var fn *Node
|
|
if n.Left.Type.IsEmptyInterface() {
|
|
fn = syslook("efaceeq")
|
|
} else {
|
|
fn = syslook("ifaceeq")
|
|
}
|
|
|
|
n.Right = cheapexpr(n.Right, init)
|
|
n.Left = cheapexpr(n.Left, init)
|
|
fn = substArgTypes(fn, n.Right.Type, n.Left.Type)
|
|
r := mkcall1(fn, n.Type, init, n.Left, n.Right)
|
|
// TODO(marvin): Fix Node.EType type union.
|
|
if Op(n.Etype) == ONE {
|
|
r = nod(ONOT, r, nil)
|
|
}
|
|
|
|
// check itable/type before full compare.
|
|
// TODO(marvin): Fix Node.EType type union.
|
|
if Op(n.Etype) == OEQ {
|
|
r = nod(OANDAND, nod(OEQ, nod(OITAB, n.Left, nil), nod(OITAB, n.Right, nil)), r)
|
|
} else {
|
|
r = nod(OOROR, nod(ONE, nod(OITAB, n.Left, nil), nod(OITAB, n.Right, nil)), r)
|
|
}
|
|
r = typecheck(r, Erv)
|
|
r = walkexpr(r, init)
|
|
r.Type = n.Type
|
|
n = r
|
|
|
|
case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT:
|
|
if n.Op == OSTRUCTLIT && iszero(n) && !instrumenting { // TODO: SSA doesn't yet handle ARRAYLIT with length > 1
|
|
break
|
|
}
|
|
if isStaticCompositeLiteral(n) {
|
|
// n can be directly represented in the read-only data section.
|
|
// Make direct reference to the static data. See issue 12841.
|
|
vstat := staticname(n.Type)
|
|
vstat.Name.Readonly = true
|
|
fixedlit(inInitFunction, initKindStatic, n, vstat, init)
|
|
n = vstat
|
|
n = typecheck(n, Erv)
|
|
break
|
|
}
|
|
var_ := temp(n.Type)
|
|
anylit(n, var_, init)
|
|
n = var_
|
|
|
|
case OSEND:
|
|
n1 := n.Right
|
|
n1 = assignconv(n1, n.Left.Type.Elem(), "chan send")
|
|
n1 = walkexpr(n1, init)
|
|
n1 = nod(OADDR, n1, nil)
|
|
n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, typename(n.Left.Type), n.Left, n1)
|
|
|
|
case OCLOSURE:
|
|
n = walkclosure(n, init)
|
|
|
|
case OCALLPART:
|
|
n = walkpartialcall(n, init)
|
|
}
|
|
|
|
// Expressions that are constant at run time but not
|
|
// considered const by the language spec are not turned into
|
|
// constants until walk. For example, if n is y%1 == 0, the
|
|
// walk of y%1 may have replaced it by 0.
|
|
// Check whether n with its updated args is itself now a constant.
|
|
t := n.Type
|
|
|
|
evconst(n)
|
|
n.Type = t
|
|
if n.Op == OLITERAL {
|
|
n = typecheck(n, Erv)
|
|
}
|
|
|
|
ullmancalc(n)
|
|
|
|
if Debug['w'] != 0 && n != nil {
|
|
Dump("walk", n)
|
|
}
|
|
|
|
lineno = lno
|
|
return n
|
|
}
|
|
|
|
// TODO(josharian): combine this with its caller and simplify
|
|
func reduceSlice(n *Node) *Node {
|
|
low, high, max := n.SliceBounds()
|
|
if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) {
|
|
// Reduce x[i:len(x)] to x[i:].
|
|
high = nil
|
|
}
|
|
n.SetSliceBounds(low, high, max)
|
|
if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil {
|
|
// Reduce x[:] to x.
|
|
if Debug_slice > 0 {
|
|
Warn("slice: omit slice operation")
|
|
}
|
|
return n.Left
|
|
}
|
|
return n
|
|
}
|
|
|
|
func ascompatee1(op Op, l *Node, r *Node, init *Nodes) *Node {
|
|
// convas will turn map assigns into function calls,
|
|
// making it impossible for reorder3 to work.
|
|
n := nod(OAS, l, r)
|
|
|
|
if l.Op == OINDEXMAP {
|
|
return n
|
|
}
|
|
|
|
return convas(n, init)
|
|
}
|
|
|
|
func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node {
|
|
// check assign expression list to
|
|
// a expression list. called in
|
|
// expr-list = expr-list
|
|
|
|
// ensure order of evaluation for function calls
|
|
for i := range nl {
|
|
nl[i] = safeexpr(nl[i], init)
|
|
}
|
|
for i1 := range nr {
|
|
nr[i1] = safeexpr(nr[i1], init)
|
|
}
|
|
|
|
var nn []*Node
|
|
i := 0
|
|
for ; i < len(nl); i++ {
|
|
if i >= len(nr) {
|
|
break
|
|
}
|
|
// Do not generate 'x = x' during return. See issue 4014.
|
|
if op == ORETURN && samesafeexpr(nl[i], nr[i]) {
|
|
continue
|
|
}
|
|
nn = append(nn, ascompatee1(op, nl[i], nr[i], init))
|
|
}
|
|
|
|
// cannot happen: caller checked that lists had same length
|
|
if i < len(nl) || i < len(nr) {
|
|
var nln, nrn Nodes
|
|
nln.Set(nl)
|
|
nrn.Set(nr)
|
|
yyerror("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.Func.Nname.Sym.Name)
|
|
}
|
|
return nn
|
|
}
|
|
|
|
// l is an lv and rt is the type of an rv
|
|
// return 1 if this implies a function call
|
|
// evaluating the lv or a function call
|
|
// in the conversion of the types
|
|
func fncall(l *Node, rt *Type) bool {
|
|
if l.Ullman >= UINF || l.Op == OINDEXMAP {
|
|
return true
|
|
}
|
|
var r Node
|
|
if needwritebarrier(l, &r) {
|
|
return true
|
|
}
|
|
if eqtype(l.Type, rt) {
|
|
return false
|
|
}
|
|
return true
|
|
}
|
|
|
|
// check assign type list to
|
|
// a expression list. called in
|
|
// expr-list = func()
|
|
func ascompatet(op Op, nl Nodes, nr *Type) []*Node {
|
|
r, saver := iterFields(nr)
|
|
|
|
var nn, mm Nodes
|
|
var ullmanOverflow bool
|
|
var i int
|
|
for i = 0; i < nl.Len(); i++ {
|
|
if r == nil {
|
|
break
|
|
}
|
|
l := nl.Index(i)
|
|
if isblank(l) {
|
|
r = saver.Next()
|
|
continue
|
|
}
|
|
|
|
// any lv that causes a fn call must be
|
|
// deferred until all the return arguments
|
|
// have been pulled from the output arguments
|
|
if fncall(l, r.Type) {
|
|
tmp := temp(r.Type)
|
|
tmp = typecheck(tmp, Erv)
|
|
a := nod(OAS, l, tmp)
|
|
a = convas(a, &mm)
|
|
mm.Append(a)
|
|
l = tmp
|
|
}
|
|
|
|
a := nod(OAS, l, nodarg(r, 0))
|
|
a = convas(a, &nn)
|
|
ullmancalc(a)
|
|
if a.Ullman >= UINF {
|
|
Dump("ascompatet ucount", a)
|
|
ullmanOverflow = true
|
|
}
|
|
|
|
nn.Append(a)
|
|
r = saver.Next()
|
|
}
|
|
|
|
if i < nl.Len() || r != nil {
|
|
yyerror("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields())
|
|
}
|
|
|
|
if ullmanOverflow {
|
|
Fatalf("ascompatet: too many function calls evaluating parameters")
|
|
}
|
|
return append(nn.Slice(), mm.Slice()...)
|
|
}
|
|
|
|
// package all the arguments that match a ... T parameter into a []T.
|
|
func mkdotargslice(lr0, nn []*Node, l *Field, fp int, init *Nodes, ddd *Node) []*Node {
|
|
esc := uint16(EscUnknown)
|
|
if ddd != nil {
|
|
esc = ddd.Esc
|
|
}
|
|
|
|
tslice := typSlice(l.Type.Elem())
|
|
|
|
var n *Node
|
|
if len(lr0) == 0 {
|
|
n = nodnil()
|
|
n.Type = tslice
|
|
} else {
|
|
n = nod(OCOMPLIT, nil, typenod(tslice))
|
|
if ddd != nil && prealloc[ddd] != nil {
|
|
prealloc[n] = prealloc[ddd] // temporary to use
|
|
}
|
|
n.List.Set(lr0)
|
|
n.Esc = esc
|
|
n = typecheck(n, Erv)
|
|
if n.Type == nil {
|
|
Fatalf("mkdotargslice: typecheck failed")
|
|
}
|
|
n = walkexpr(n, init)
|
|
}
|
|
|
|
a := nod(OAS, nodarg(l, fp), n)
|
|
nn = append(nn, convas(a, init))
|
|
return nn
|
|
}
|
|
|
|
// helpers for shape errors
|
|
func dumptypes(nl *Type, what string) string {
|
|
s := ""
|
|
for _, l := range nl.Fields().Slice() {
|
|
if s != "" {
|
|
s += ", "
|
|
}
|
|
s += fldconv(l, 0)
|
|
}
|
|
if s == "" {
|
|
s = fmt.Sprintf("[no arguments %s]", what)
|
|
}
|
|
return s
|
|
}
|
|
|
|
func dumpnodetypes(l []*Node, what string) string {
|
|
s := ""
|
|
for _, r := range l {
|
|
if s != "" {
|
|
s += ", "
|
|
}
|
|
s += r.Type.String()
|
|
}
|
|
if s == "" {
|
|
s = fmt.Sprintf("[no arguments %s]", what)
|
|
}
|
|
return s
|
|
}
|
|
|
|
// check assign expression list to
|
|
// a type list. called in
|
|
// return expr-list
|
|
// func(expr-list)
|
|
func ascompatte(op Op, call *Node, isddd bool, nl *Type, lr []*Node, fp int, init *Nodes) []*Node {
|
|
lr0 := lr
|
|
l, savel := iterFields(nl)
|
|
var r *Node
|
|
if len(lr) > 0 {
|
|
r = lr[0]
|
|
}
|
|
var nn []*Node
|
|
|
|
// f(g()) where g has multiple return values
|
|
if r != nil && len(lr) <= 1 && r.Type.IsFuncArgStruct() {
|
|
// optimization - can do block copy
|
|
if eqtypenoname(r.Type, nl) {
|
|
arg := nodarg(nl, fp)
|
|
r = nod(OCONVNOP, r, nil)
|
|
r.Type = arg.Type
|
|
nn = []*Node{convas(nod(OAS, arg, r), init)}
|
|
goto ret
|
|
}
|
|
|
|
// conversions involved.
|
|
// copy into temporaries.
|
|
var alist []*Node
|
|
|
|
for _, l := range r.Type.Fields().Slice() {
|
|
tmp := temp(l.Type)
|
|
alist = append(alist, tmp)
|
|
}
|
|
|
|
a := nod(OAS2, nil, nil)
|
|
a.List.Set(alist)
|
|
a.Rlist.Set(lr)
|
|
a = typecheck(a, Etop)
|
|
a = walkstmt(a)
|
|
init.Append(a)
|
|
lr = alist
|
|
r = lr[0]
|
|
l, savel = iterFields(nl)
|
|
}
|
|
|
|
for {
|
|
if l != nil && l.Isddd {
|
|
// the ddd parameter must be last
|
|
ll := savel.Next()
|
|
|
|
if ll != nil {
|
|
yyerror("... must be last argument")
|
|
}
|
|
|
|
// special case --
|
|
// only if we are assigning a single ddd
|
|
// argument to a ddd parameter then it is
|
|
// passed through unencapsulated
|
|
if r != nil && len(lr) <= 1 && isddd && eqtype(l.Type, r.Type) {
|
|
a := nod(OAS, nodarg(l, fp), r)
|
|
a = convas(a, init)
|
|
nn = append(nn, a)
|
|
break
|
|
}
|
|
|
|
// normal case -- make a slice of all
|
|
// remaining arguments and pass it to
|
|
// the ddd parameter.
|
|
nn = mkdotargslice(lr, nn, l, fp, init, call.Right)
|
|
|
|
break
|
|
}
|
|
|
|
if l == nil || r == nil {
|
|
if l != nil || r != nil {
|
|
l1 := dumptypes(nl, "expected")
|
|
l2 := dumpnodetypes(lr0, "given")
|
|
if l != nil {
|
|
yyerror("not enough arguments to %v\n\t%s\n\t%s", op, l1, l2)
|
|
} else {
|
|
yyerror("too many arguments to %v\n\t%s\n\t%s", op, l1, l2)
|
|
}
|
|
}
|
|
|
|
break
|
|
}
|
|
|
|
a := nod(OAS, nodarg(l, fp), r)
|
|
a = convas(a, init)
|
|
nn = append(nn, a)
|
|
|
|
l = savel.Next()
|
|
r = nil
|
|
lr = lr[1:]
|
|
if len(lr) > 0 {
|
|
r = lr[0]
|
|
}
|
|
}
|
|
|
|
ret:
|
|
for _, n := range nn {
|
|
n.Typecheck = 1
|
|
}
|
|
return nn
|
|
}
|
|
|
|
// generate code for print
|
|
func walkprint(nn *Node, init *Nodes) *Node {
|
|
var r *Node
|
|
var n *Node
|
|
var on *Node
|
|
var t *Type
|
|
var et EType
|
|
|
|
op := nn.Op
|
|
all := nn.List
|
|
var calls []*Node
|
|
notfirst := false
|
|
|
|
// Hoist all the argument evaluation up before the lock.
|
|
walkexprlistcheap(all.Slice(), init)
|
|
|
|
calls = append(calls, mkcall("printlock", nil, init))
|
|
for i1, n1 := range all.Slice() {
|
|
if notfirst {
|
|
calls = append(calls, mkcall("printsp", nil, init))
|
|
}
|
|
|
|
notfirst = op == OPRINTN
|
|
|
|
n = n1
|
|
if n.Op == OLITERAL {
|
|
switch n.Val().Ctype() {
|
|
case CTRUNE:
|
|
n = defaultlit(n, runetype)
|
|
|
|
case CTINT:
|
|
n = defaultlit(n, Types[TINT64])
|
|
|
|
case CTFLT:
|
|
n = defaultlit(n, Types[TFLOAT64])
|
|
}
|
|
}
|
|
|
|
if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL {
|
|
n = defaultlit(n, Types[TINT64])
|
|
}
|
|
n = defaultlit(n, nil)
|
|
all.SetIndex(i1, n)
|
|
if n.Type == nil || n.Type.Etype == TFORW {
|
|
continue
|
|
}
|
|
|
|
t = n.Type
|
|
et = n.Type.Etype
|
|
if n.Type.IsInterface() {
|
|
if n.Type.IsEmptyInterface() {
|
|
on = syslook("printeface")
|
|
} else {
|
|
on = syslook("printiface")
|
|
}
|
|
on = substArgTypes(on, n.Type) // any-1
|
|
} else if n.Type.IsPtr() || et == TCHAN || et == TMAP || et == TFUNC || et == TUNSAFEPTR {
|
|
on = syslook("printpointer")
|
|
on = substArgTypes(on, n.Type) // any-1
|
|
} else if n.Type.IsSlice() {
|
|
on = syslook("printslice")
|
|
on = substArgTypes(on, n.Type) // any-1
|
|
} else if isInt[et] {
|
|
if et == TUINT64 {
|
|
if (t.Sym.Pkg == Runtimepkg || compiling_runtime) && t.Sym.Name == "hex" {
|
|
on = syslook("printhex")
|
|
} else {
|
|
on = syslook("printuint")
|
|
}
|
|
} else {
|
|
on = syslook("printint")
|
|
}
|
|
} else if isFloat[et] {
|
|
on = syslook("printfloat")
|
|
} else if isComplex[et] {
|
|
on = syslook("printcomplex")
|
|
} else if et == TBOOL {
|
|
on = syslook("printbool")
|
|
} else if et == TSTRING {
|
|
on = syslook("printstring")
|
|
} else {
|
|
badtype(OPRINT, n.Type, nil)
|
|
continue
|
|
}
|
|
|
|
t = on.Type.Params().Field(0).Type
|
|
|
|
if !eqtype(t, n.Type) {
|
|
n = nod(OCONV, n, nil)
|
|
n.Type = t
|
|
}
|
|
|
|
r = nod(OCALL, on, nil)
|
|
r.List.Append(n)
|
|
calls = append(calls, r)
|
|
}
|
|
|
|
if op == OPRINTN {
|
|
calls = append(calls, mkcall("printnl", nil, nil))
|
|
}
|
|
|
|
calls = append(calls, mkcall("printunlock", nil, init))
|
|
|
|
typecheckslice(calls, Etop)
|
|
walkexprlist(calls, init)
|
|
|
|
r = nod(OEMPTY, nil, nil)
|
|
r = typecheck(r, Etop)
|
|
r = walkexpr(r, init)
|
|
r.Ninit.Set(calls)
|
|
return r
|
|
}
|
|
|
|
func callnew(t *Type) *Node {
|
|
if t.NotInHeap {
|
|
yyerror("%v is go:notinheap; heap allocation disallowed", t)
|
|
}
|
|
dowidth(t)
|
|
fn := syslook("newobject")
|
|
fn = substArgTypes(fn, t)
|
|
v := mkcall1(fn, ptrto(t), nil, typename(t))
|
|
v.NonNil = true
|
|
return v
|
|
}
|
|
|
|
func iscallret(n *Node) bool {
|
|
n = outervalue(n)
|
|
return n.Op == OINDREGSP
|
|
}
|
|
|
|
func isstack(n *Node) bool {
|
|
n = outervalue(n)
|
|
|
|
// If n is *autotmp and autotmp = &foo, replace n with foo.
|
|
// We introduce such temps when initializing struct literals.
|
|
if n.Op == OIND && n.Left.Op == ONAME && n.Left.IsAutoTmp() {
|
|
defn := n.Left.Name.Defn
|
|
if defn != nil && defn.Op == OAS && defn.Right.Op == OADDR {
|
|
n = defn.Right.Left
|
|
}
|
|
}
|
|
|
|
switch n.Op {
|
|
case OINDREGSP:
|
|
return true
|
|
|
|
case ONAME:
|
|
switch n.Class {
|
|
case PAUTO, PPARAM, PPARAMOUT:
|
|
return true
|
|
}
|
|
}
|
|
|
|
return false
|
|
}
|
|
|
|
// Do we need a write barrier for the assignment l = r?
|
|
func needwritebarrier(l *Node, r *Node) bool {
|
|
if !use_writebarrier {
|
|
return false
|
|
}
|
|
|
|
if l == nil || isblank(l) {
|
|
return false
|
|
}
|
|
|
|
// No write barrier for write of non-pointers.
|
|
dowidth(l.Type)
|
|
|
|
if !haspointers(l.Type) {
|
|
return false
|
|
}
|
|
|
|
// No write barrier for write to stack.
|
|
if isstack(l) {
|
|
return false
|
|
}
|
|
|
|
// No write barrier if this is a pointer to a go:notinheap
|
|
// type, since the write barrier's inheap(ptr) check will fail.
|
|
if l.Type.IsPtr() && l.Type.Elem().NotInHeap {
|
|
return false
|
|
}
|
|
|
|
// Implicit zeroing is still zeroing, so it needs write
|
|
// barriers. In practice, these are all to stack variables
|
|
// (even if isstack isn't smart enough to figure that out), so
|
|
// they'll be eliminated by the backend.
|
|
if r == nil {
|
|
return true
|
|
}
|
|
|
|
// Ignore no-op conversions when making decision.
|
|
// Ensures that xp = unsafe.Pointer(&x) is treated
|
|
// the same as xp = &x.
|
|
for r.Op == OCONVNOP {
|
|
r = r.Left
|
|
}
|
|
|
|
// TODO: We can eliminate write barriers if we know *both* the
|
|
// current and new content of the slot must already be shaded.
|
|
// We know a pointer is shaded if it's nil, or points to
|
|
// static data, a global (variable or function), or the stack.
|
|
// The nil optimization could be particularly useful for
|
|
// writes to just-allocated objects. Unfortunately, knowing
|
|
// the "current" value of the slot requires flow analysis.
|
|
|
|
// No write barrier for storing address of stack values,
|
|
// which are guaranteed only to be written to the stack.
|
|
if r.Op == OADDR && isstack(r.Left) {
|
|
return false
|
|
}
|
|
|
|
// Otherwise, be conservative and use write barrier.
|
|
return true
|
|
}
|
|
|
|
// TODO(rsc): Perhaps componentgen should run before this.
|
|
|
|
func applywritebarrier(n *Node) *Node {
|
|
if n.Left != nil && n.Right != nil && needwritebarrier(n.Left, n.Right) {
|
|
if Debug_wb > 1 {
|
|
Warnl(n.Pos, "marking %v for barrier", n.Left)
|
|
}
|
|
n.Op = OASWB
|
|
return n
|
|
}
|
|
return n
|
|
}
|
|
|
|
func convas(n *Node, init *Nodes) *Node {
|
|
if n.Op != OAS {
|
|
Fatalf("convas: not OAS %v", n.Op)
|
|
}
|
|
|
|
n.Typecheck = 1
|
|
|
|
var lt *Type
|
|
var rt *Type
|
|
if n.Left == nil || n.Right == nil {
|
|
goto out
|
|
}
|
|
|
|
lt = n.Left.Type
|
|
rt = n.Right.Type
|
|
if lt == nil || rt == nil {
|
|
goto out
|
|
}
|
|
|
|
if isblank(n.Left) {
|
|
n.Right = defaultlit(n.Right, nil)
|
|
goto out
|
|
}
|
|
|
|
if !eqtype(lt, rt) {
|
|
n.Right = assignconv(n.Right, lt, "assignment")
|
|
n.Right = walkexpr(n.Right, init)
|
|
}
|
|
|
|
out:
|
|
ullmancalc(n)
|
|
return n
|
|
}
|
|
|
|
// from ascompat[te]
|
|
// evaluating actual function arguments.
|
|
// f(a,b)
|
|
// if there is exactly one function expr,
|
|
// then it is done first. otherwise must
|
|
// make temp variables
|
|
func reorder1(all []*Node) []*Node {
|
|
c := 0 // function calls
|
|
t := 0 // total parameters
|
|
|
|
for _, n := range all {
|
|
t++
|
|
ullmancalc(n)
|
|
if n.Ullman >= UINF {
|
|
c++
|
|
}
|
|
}
|
|
|
|
if c == 0 || t == 1 {
|
|
return all
|
|
}
|
|
|
|
var g []*Node // fncalls assigned to tempnames
|
|
var f *Node // last fncall assigned to stack
|
|
var r []*Node // non fncalls and tempnames assigned to stack
|
|
d := 0
|
|
var a *Node
|
|
for _, n := range all {
|
|
if n.Ullman < UINF {
|
|
r = append(r, n)
|
|
continue
|
|
}
|
|
|
|
d++
|
|
if d == c {
|
|
f = n
|
|
continue
|
|
}
|
|
|
|
// make assignment of fncall to tempname
|
|
a = temp(n.Right.Type)
|
|
|
|
a = nod(OAS, a, n.Right)
|
|
g = append(g, a)
|
|
|
|
// put normal arg assignment on list
|
|
// with fncall replaced by tempname
|
|
n.Right = a.Left
|
|
|
|
r = append(r, n)
|
|
}
|
|
|
|
if f != nil {
|
|
g = append(g, f)
|
|
}
|
|
return append(g, r...)
|
|
}
|
|
|
|
// from ascompat[ee]
|
|
// a,b = c,d
|
|
// simultaneous assignment. there cannot
|
|
// be later use of an earlier lvalue.
|
|
//
|
|
// function calls have been removed.
|
|
func reorder3(all []*Node) []*Node {
|
|
var l *Node
|
|
|
|
// If a needed expression may be affected by an
|
|
// earlier assignment, make an early copy of that
|
|
// expression and use the copy instead.
|
|
var early []*Node
|
|
|
|
var mapinit Nodes
|
|
for i, n := range all {
|
|
l = n.Left
|
|
|
|
// Save subexpressions needed on left side.
|
|
// Drill through non-dereferences.
|
|
for {
|
|
if l.Op == ODOT || l.Op == OPAREN {
|
|
l = l.Left
|
|
continue
|
|
}
|
|
|
|
if l.Op == OINDEX && l.Left.Type.IsArray() {
|
|
l.Right = reorder3save(l.Right, all, i, &early)
|
|
l = l.Left
|
|
continue
|
|
}
|
|
|
|
break
|
|
}
|
|
|
|
switch l.Op {
|
|
default:
|
|
Fatalf("reorder3 unexpected lvalue %#v", l.Op)
|
|
|
|
case ONAME:
|
|
break
|
|
|
|
case OINDEX, OINDEXMAP:
|
|
l.Left = reorder3save(l.Left, all, i, &early)
|
|
l.Right = reorder3save(l.Right, all, i, &early)
|
|
if l.Op == OINDEXMAP {
|
|
all[i] = convas(all[i], &mapinit)
|
|
}
|
|
|
|
case OIND, ODOTPTR:
|
|
l.Left = reorder3save(l.Left, all, i, &early)
|
|
}
|
|
|
|
// Save expression on right side.
|
|
all[i].Right = reorder3save(all[i].Right, all, i, &early)
|
|
}
|
|
|
|
early = append(mapinit.Slice(), early...)
|
|
return append(early, all...)
|
|
}
|
|
|
|
// if the evaluation of *np would be affected by the
|
|
// assignments in all up to but not including the ith assignment,
|
|
// copy into a temporary during *early and
|
|
// replace *np with that temp.
|
|
// The result of reorder3save MUST be assigned back to n, e.g.
|
|
// n.Left = reorder3save(n.Left, all, i, early)
|
|
func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node {
|
|
if !aliased(n, all, i) {
|
|
return n
|
|
}
|
|
|
|
q := temp(n.Type)
|
|
q = nod(OAS, q, n)
|
|
q = typecheck(q, Etop)
|
|
*early = append(*early, q)
|
|
return q.Left
|
|
}
|
|
|
|
// what's the outer value that a write to n affects?
|
|
// outer value means containing struct or array.
|
|
func outervalue(n *Node) *Node {
|
|
for {
|
|
if n.Op == OXDOT {
|
|
Fatalf("OXDOT in walk")
|
|
}
|
|
if n.Op == ODOT || n.Op == OPAREN || n.Op == OCONVNOP {
|
|
n = n.Left
|
|
continue
|
|
}
|
|
|
|
if n.Op == OINDEX && n.Left.Type != nil && n.Left.Type.IsArray() {
|
|
n = n.Left
|
|
continue
|
|
}
|
|
|
|
break
|
|
}
|
|
|
|
return n
|
|
}
|
|
|
|
// Is it possible that the computation of n might be
|
|
// affected by writes in as up to but not including the ith element?
|
|
func aliased(n *Node, all []*Node, i int) bool {
|
|
if n == nil {
|
|
return false
|
|
}
|
|
|
|
// Treat all fields of a struct as referring to the whole struct.
|
|
// We could do better but we would have to keep track of the fields.
|
|
for n.Op == ODOT {
|
|
n = n.Left
|
|
}
|
|
|
|
// Look for obvious aliasing: a variable being assigned
|
|
// during the all list and appearing in n.
|
|
// Also record whether there are any writes to main memory.
|
|
// Also record whether there are any writes to variables
|
|
// whose addresses have been taken.
|
|
memwrite := 0
|
|
|
|
varwrite := 0
|
|
var a *Node
|
|
for _, an := range all[:i] {
|
|
a = outervalue(an.Left)
|
|
|
|
for a.Op == ODOT {
|
|
a = a.Left
|
|
}
|
|
|
|
if a.Op != ONAME {
|
|
memwrite = 1
|
|
continue
|
|
}
|
|
|
|
switch n.Class {
|
|
default:
|
|
varwrite = 1
|
|
continue
|
|
|
|
case PAUTO, PPARAM, PPARAMOUT:
|
|
if n.Addrtaken {
|
|
varwrite = 1
|
|
continue
|
|
}
|
|
|
|
if vmatch2(a, n) {
|
|
// Direct hit.
|
|
return true
|
|
}
|
|
}
|
|
}
|
|
|
|
// The variables being written do not appear in n.
|
|
// However, n might refer to computed addresses
|
|
// that are being written.
|
|
|
|
// If no computed addresses are affected by the writes, no aliasing.
|
|
if memwrite == 0 && varwrite == 0 {
|
|
return false
|
|
}
|
|
|
|
// If n does not refer to computed addresses
|
|
// (that is, if n only refers to variables whose addresses
|
|
// have not been taken), no aliasing.
|
|
if varexpr(n) {
|
|
return false
|
|
}
|
|
|
|
// Otherwise, both the writes and n refer to computed memory addresses.
|
|
// Assume that they might conflict.
|
|
return true
|
|
}
|
|
|
|
// does the evaluation of n only refer to variables
|
|
// whose addresses have not been taken?
|
|
// (and no other memory)
|
|
func varexpr(n *Node) bool {
|
|
if n == nil {
|
|
return true
|
|
}
|
|
|
|
switch n.Op {
|
|
case OLITERAL:
|
|
return true
|
|
|
|
case ONAME:
|
|
switch n.Class {
|
|
case PAUTO, PPARAM, PPARAMOUT:
|
|
if !n.Addrtaken {
|
|
return true
|
|
}
|
|
}
|
|
|
|
return false
|
|
|
|
case OADD,
|
|
OSUB,
|
|
OOR,
|
|
OXOR,
|
|
OMUL,
|
|
ODIV,
|
|
OMOD,
|
|
OLSH,
|
|
ORSH,
|
|
OAND,
|
|
OANDNOT,
|
|
OPLUS,
|
|
OMINUS,
|
|
OCOM,
|
|
OPAREN,
|
|
OANDAND,
|
|
OOROR,
|
|
OCONV,
|
|
OCONVNOP,
|
|
OCONVIFACE,
|
|
ODOTTYPE:
|
|
return varexpr(n.Left) && varexpr(n.Right)
|
|
|
|
case ODOT: // but not ODOTPTR
|
|
// Should have been handled in aliased.
|
|
Fatalf("varexpr unexpected ODOT")
|
|
}
|
|
|
|
// Be conservative.
|
|
return false
|
|
}
|
|
|
|
// is the name l mentioned in r?
|
|
func vmatch2(l *Node, r *Node) bool {
|
|
if r == nil {
|
|
return false
|
|
}
|
|
switch r.Op {
|
|
// match each right given left
|
|
case ONAME:
|
|
return l == r
|
|
|
|
case OLITERAL:
|
|
return false
|
|
}
|
|
|
|
if vmatch2(l, r.Left) {
|
|
return true
|
|
}
|
|
if vmatch2(l, r.Right) {
|
|
return true
|
|
}
|
|
for _, n := range r.List.Slice() {
|
|
if vmatch2(l, n) {
|
|
return true
|
|
}
|
|
}
|
|
return false
|
|
}
|
|
|
|
// is any name mentioned in l also mentioned in r?
|
|
// called by sinit.go
|
|
func vmatch1(l *Node, r *Node) bool {
|
|
// isolate all left sides
|
|
if l == nil || r == nil {
|
|
return false
|
|
}
|
|
switch l.Op {
|
|
case ONAME:
|
|
switch l.Class {
|
|
case PPARAM, PAUTO:
|
|
break
|
|
|
|
// assignment to non-stack variable
|
|
// must be delayed if right has function calls.
|
|
default:
|
|
if r.Ullman >= UINF {
|
|
return true
|
|
}
|
|
}
|
|
|
|
return vmatch2(l, r)
|
|
|
|
case OLITERAL:
|
|
return false
|
|
}
|
|
|
|
if vmatch1(l.Left, r) {
|
|
return true
|
|
}
|
|
if vmatch1(l.Right, r) {
|
|
return true
|
|
}
|
|
for _, n := range l.List.Slice() {
|
|
if vmatch1(n, r) {
|
|
return true
|
|
}
|
|
}
|
|
return false
|
|
}
|
|
|
|
// paramstoheap returns code to allocate memory for heap-escaped parameters
|
|
// and to copy non-result prameters' values from the stack.
|
|
// If out is true, then code is also produced to zero-initialize their
|
|
// stack memory addresses.
|
|
func paramstoheap(params *Type) []*Node {
|
|
var nn []*Node
|
|
for _, t := range params.Fields().Slice() {
|
|
// For precise stacks, the garbage collector assumes results
|
|
// are always live, so zero them always.
|
|
if params.StructType().Funarg == FunargResults {
|
|
// Defer might stop a panic and show the
|
|
// return values as they exist at the time of panic.
|
|
// Make sure to zero them on entry to the function.
|
|
nn = append(nn, nod(OAS, nodarg(t, 1), nil))
|
|
}
|
|
|
|
v := t.Nname
|
|
if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result
|
|
v = nil
|
|
}
|
|
if v == nil {
|
|
continue
|
|
}
|
|
|
|
if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil {
|
|
nn = append(nn, walkstmt(nod(ODCL, v, nil)))
|
|
if stackcopy.Class == PPARAM {
|
|
nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop)))
|
|
}
|
|
}
|
|
}
|
|
|
|
return nn
|
|
}
|
|
|
|
// returnsfromheap returns code to copy values for heap-escaped parameters
|
|
// back to the stack.
|
|
func returnsfromheap(params *Type) []*Node {
|
|
var nn []*Node
|
|
for _, t := range params.Fields().Slice() {
|
|
v := t.Nname
|
|
if v == nil {
|
|
continue
|
|
}
|
|
if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class == PPARAMOUT {
|
|
nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop)))
|
|
}
|
|
}
|
|
|
|
return nn
|
|
}
|
|
|
|
// heapmoves generates code to handle migrating heap-escaped parameters
|
|
// between the stack and the heap. The generated code is added to Curfn's
|
|
// Enter and Exit lists.
|
|
func heapmoves() {
|
|
lno := lineno
|
|
lineno = Curfn.Pos
|
|
nn := paramstoheap(Curfn.Type.Recvs())
|
|
nn = append(nn, paramstoheap(Curfn.Type.Params())...)
|
|
nn = append(nn, paramstoheap(Curfn.Type.Results())...)
|
|
Curfn.Func.Enter.Append(nn...)
|
|
lineno = Curfn.Func.Endlineno
|
|
Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...)
|
|
lineno = lno
|
|
}
|
|
|
|
func vmkcall(fn *Node, t *Type, init *Nodes, va []*Node) *Node {
|
|
if fn.Type == nil || fn.Type.Etype != TFUNC {
|
|
Fatalf("mkcall %v %v", fn, fn.Type)
|
|
}
|
|
|
|
n := fn.Type.Params().NumFields()
|
|
|
|
r := nod(OCALL, fn, nil)
|
|
r.List.Set(va[:n])
|
|
if fn.Type.Results().NumFields() > 0 {
|
|
r = typecheck(r, Erv|Efnstruct)
|
|
} else {
|
|
r = typecheck(r, Etop)
|
|
}
|
|
r = walkexpr(r, init)
|
|
r.Type = t
|
|
return r
|
|
}
|
|
|
|
func mkcall(name string, t *Type, init *Nodes, args ...*Node) *Node {
|
|
return vmkcall(syslook(name), t, init, args)
|
|
}
|
|
|
|
func mkcall1(fn *Node, t *Type, init *Nodes, args ...*Node) *Node {
|
|
return vmkcall(fn, t, init, args)
|
|
}
|
|
|
|
func conv(n *Node, t *Type) *Node {
|
|
if eqtype(n.Type, t) {
|
|
return n
|
|
}
|
|
n = nod(OCONV, n, nil)
|
|
n.Type = t
|
|
n = typecheck(n, Erv)
|
|
return n
|
|
}
|
|
|
|
// byteindex converts n, which is byte-sized, to a uint8.
|
|
// We cannot use conv, because we allow converting bool to uint8 here,
|
|
// which is forbidden in user code.
|
|
func byteindex(n *Node) *Node {
|
|
if eqtype(n.Type, Types[TUINT8]) {
|
|
return n
|
|
}
|
|
n = nod(OCONV, n, nil)
|
|
n.Type = Types[TUINT8]
|
|
n.Typecheck = 1
|
|
return n
|
|
}
|
|
|
|
func chanfn(name string, n int, t *Type) *Node {
|
|
if !t.IsChan() {
|
|
Fatalf("chanfn %v", t)
|
|
}
|
|
fn := syslook(name)
|
|
switch n {
|
|
default:
|
|
Fatalf("chanfn %d", n)
|
|
case 1:
|
|
fn = substArgTypes(fn, t.Elem())
|
|
case 2:
|
|
fn = substArgTypes(fn, t.Elem(), t.Elem())
|
|
}
|
|
return fn
|
|
}
|
|
|
|
func mapfn(name string, t *Type) *Node {
|
|
if !t.IsMap() {
|
|
Fatalf("mapfn %v", t)
|
|
}
|
|
fn := syslook(name)
|
|
fn = substArgTypes(fn, t.Key(), t.Val(), t.Key(), t.Val())
|
|
return fn
|
|
}
|
|
|
|
func mapfndel(name string, t *Type) *Node {
|
|
if !t.IsMap() {
|
|
Fatalf("mapfn %v", t)
|
|
}
|
|
fn := syslook(name)
|
|
fn = substArgTypes(fn, t.Key(), t.Val(), t.Key())
|
|
return fn
|
|
}
|
|
|
|
// mapaccessfast returns the names of the fast map access runtime routines for t.
|
|
func mapaccessfast(t *Type) (access1, access2 string) {
|
|
// Check ../../runtime/hashmap.go:maxValueSize before changing.
|
|
if t.Val().Width > 128 {
|
|
return "", ""
|
|
}
|
|
switch algtype(t.Key()) {
|
|
case AMEM32:
|
|
return "mapaccess1_fast32", "mapaccess2_fast32"
|
|
case AMEM64:
|
|
return "mapaccess1_fast64", "mapaccess2_fast64"
|
|
case ASTRING:
|
|
return "mapaccess1_faststr", "mapaccess2_faststr"
|
|
}
|
|
return "", ""
|
|
}
|
|
|
|
func writebarrierfn(name string, l *Type, r *Type) *Node {
|
|
fn := syslook(name)
|
|
fn = substArgTypes(fn, l, r)
|
|
return fn
|
|
}
|
|
|
|
func addstr(n *Node, init *Nodes) *Node {
|
|
// orderexpr rewrote OADDSTR to have a list of strings.
|
|
c := n.List.Len()
|
|
|
|
if c < 2 {
|
|
yyerror("addstr count %d too small", c)
|
|
}
|
|
|
|
buf := nodnil()
|
|
if n.Esc == EscNone {
|
|
sz := int64(0)
|
|
for _, n1 := range n.List.Slice() {
|
|
if n1.Op == OLITERAL {
|
|
sz += int64(len(n1.Val().U.(string)))
|
|
}
|
|
}
|
|
|
|
// Don't allocate the buffer if the result won't fit.
|
|
if sz < tmpstringbufsize {
|
|
// Create temporary buffer for result string on stack.
|
|
t := typArray(Types[TUINT8], tmpstringbufsize)
|
|
|
|
buf = nod(OADDR, temp(t), nil)
|
|
}
|
|
}
|
|
|
|
// build list of string arguments
|
|
args := []*Node{buf}
|
|
for _, n2 := range n.List.Slice() {
|
|
args = append(args, conv(n2, Types[TSTRING]))
|
|
}
|
|
|
|
var fn string
|
|
if c <= 5 {
|
|
// small numbers of strings use direct runtime helpers.
|
|
// note: orderexpr knows this cutoff too.
|
|
fn = fmt.Sprintf("concatstring%d", c)
|
|
} else {
|
|
// large numbers of strings are passed to the runtime as a slice.
|
|
fn = "concatstrings"
|
|
|
|
t := typSlice(Types[TSTRING])
|
|
slice := nod(OCOMPLIT, nil, typenod(t))
|
|
if prealloc[n] != nil {
|
|
prealloc[slice] = prealloc[n]
|
|
}
|
|
slice.List.Set(args[1:]) // skip buf arg
|
|
args = []*Node{buf, slice}
|
|
slice.Esc = EscNone
|
|
}
|
|
|
|
cat := syslook(fn)
|
|
r := nod(OCALL, cat, nil)
|
|
r.List.Set(args)
|
|
r = typecheck(r, Erv)
|
|
r = walkexpr(r, init)
|
|
r.Type = n.Type
|
|
|
|
return r
|
|
}
|
|
|
|
// expand append(l1, l2...) to
|
|
// init {
|
|
// s := l1
|
|
// n := len(s) + len(l2)
|
|
// // Compare as uint so growslice can panic on overflow.
|
|
// if uint(n) > uint(cap(s)) {
|
|
// s = growslice(s, n)
|
|
// }
|
|
// s = s[:n]
|
|
// memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
|
|
// }
|
|
// s
|
|
//
|
|
// l2 is allowed to be a string.
|
|
func appendslice(n *Node, init *Nodes) *Node {
|
|
walkexprlistsafe(n.List.Slice(), init)
|
|
|
|
// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
|
|
// and n are name or literal, but those may index the slice we're
|
|
// modifying here. Fix explicitly.
|
|
ls := n.List.Slice()
|
|
for i1, n1 := range ls {
|
|
ls[i1] = cheapexpr(n1, init)
|
|
}
|
|
|
|
l1 := n.List.First()
|
|
l2 := n.List.Second()
|
|
|
|
var l []*Node
|
|
|
|
// var s []T
|
|
s := temp(l1.Type)
|
|
l = append(l, nod(OAS, s, l1)) // s = l1
|
|
|
|
// n := len(s) + len(l2)
|
|
nn := temp(Types[TINT])
|
|
l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil))))
|
|
|
|
// if uint(n) > uint(cap(s))
|
|
nif := nod(OIF, nil, nil)
|
|
nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil))
|
|
nif.Left.Left.Type = Types[TUINT]
|
|
nif.Left.Right.Type = Types[TUINT]
|
|
|
|
// instantiate growslice(Type*, []any, int) []any
|
|
fn := syslook("growslice")
|
|
fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem())
|
|
|
|
// s = growslice(T, s, n)
|
|
nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type.Elem()), s, nn)))
|
|
l = append(l, nif)
|
|
|
|
// s = s[:n]
|
|
nt := nod(OSLICE, s, nil)
|
|
nt.SetSliceBounds(nil, nn, nil)
|
|
nt.Etype = 1
|
|
l = append(l, nod(OAS, s, nt))
|
|
|
|
if haspointers(l1.Type.Elem()) {
|
|
// copy(s[len(l1):], l2)
|
|
nptr1 := nod(OSLICE, s, nil)
|
|
nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
|
|
nptr1.Etype = 1
|
|
nptr2 := l2
|
|
fn := syslook("typedslicecopy")
|
|
fn = substArgTypes(fn, l1.Type, l2.Type)
|
|
var ln Nodes
|
|
ln.Set(l)
|
|
nt := mkcall1(fn, Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2)
|
|
l = append(ln.Slice(), nt)
|
|
} else if instrumenting && !compiling_runtime {
|
|
// rely on runtime to instrument copy.
|
|
// copy(s[len(l1):], l2)
|
|
nptr1 := nod(OSLICE, s, nil)
|
|
nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
|
|
nptr1.Etype = 1
|
|
nptr2 := l2
|
|
var fn *Node
|
|
if l2.Type.IsString() {
|
|
fn = syslook("slicestringcopy")
|
|
} else {
|
|
fn = syslook("slicecopy")
|
|
}
|
|
fn = substArgTypes(fn, l1.Type, l2.Type)
|
|
var ln Nodes
|
|
ln.Set(l)
|
|
nt := mkcall1(fn, Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width))
|
|
l = append(ln.Slice(), nt)
|
|
} else {
|
|
// memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
|
|
nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil))
|
|
nptr1.Bounded = true
|
|
|
|
nptr1 = nod(OADDR, nptr1, nil)
|
|
|
|
nptr2 := nod(OSPTR, l2, nil)
|
|
|
|
fn := syslook("memmove")
|
|
fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem())
|
|
|
|
var ln Nodes
|
|
ln.Set(l)
|
|
nwid := cheapexpr(conv(nod(OLEN, l2, nil), Types[TUINTPTR]), &ln)
|
|
|
|
nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width))
|
|
nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid)
|
|
l = append(ln.Slice(), nt)
|
|
}
|
|
|
|
typecheckslice(l, Etop)
|
|
walkstmtlist(l)
|
|
init.Append(l...)
|
|
return s
|
|
}
|
|
|
|
// Rewrite append(src, x, y, z) so that any side effects in
|
|
// x, y, z (including runtime panics) are evaluated in
|
|
// initialization statements before the append.
|
|
// For normal code generation, stop there and leave the
|
|
// rest to cgen_append.
|
|
//
|
|
// For race detector, expand append(src, a [, b]* ) to
|
|
//
|
|
// init {
|
|
// s := src
|
|
// const argc = len(args) - 1
|
|
// if cap(s) - len(s) < argc {
|
|
// s = growslice(s, len(s)+argc)
|
|
// }
|
|
// n := len(s)
|
|
// s = s[:n+argc]
|
|
// s[n] = a
|
|
// s[n+1] = b
|
|
// ...
|
|
// }
|
|
// s
|
|
func walkappend(n *Node, init *Nodes, dst *Node) *Node {
|
|
if !samesafeexpr(dst, n.List.First()) {
|
|
n.List.SetIndex(0, safeexpr(n.List.Index(0), init))
|
|
n.List.SetIndex(0, walkexpr(n.List.Index(0), init))
|
|
}
|
|
walkexprlistsafe(n.List.Slice()[1:], init)
|
|
|
|
// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
|
|
// and n are name or literal, but those may index the slice we're
|
|
// modifying here. Fix explicitly.
|
|
// Using cheapexpr also makes sure that the evaluation
|
|
// of all arguments (and especially any panics) happen
|
|
// before we begin to modify the slice in a visible way.
|
|
ls := n.List.Slice()[1:]
|
|
for i, n := range ls {
|
|
ls[i] = cheapexpr(n, init)
|
|
}
|
|
|
|
nsrc := n.List.First()
|
|
|
|
argc := n.List.Len() - 1
|
|
if argc < 1 {
|
|
return nsrc
|
|
}
|
|
|
|
// General case, with no function calls left as arguments.
|
|
// Leave for gen, except that instrumentation requires old form.
|
|
if !instrumenting || compiling_runtime {
|
|
return n
|
|
}
|
|
|
|
var l []*Node
|
|
|
|
ns := temp(nsrc.Type)
|
|
l = append(l, nod(OAS, ns, nsrc)) // s = src
|
|
|
|
na := nodintconst(int64(argc)) // const argc
|
|
nx := nod(OIF, nil, nil) // if cap(s) - len(s) < argc
|
|
nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na)
|
|
|
|
fn := syslook("growslice") // growslice(<type>, old []T, mincap int) (ret []T)
|
|
fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem())
|
|
|
|
nx.Nbody.Set1(nod(OAS, ns,
|
|
mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns,
|
|
nod(OADD, nod(OLEN, ns, nil), na))))
|
|
|
|
l = append(l, nx)
|
|
|
|
nn := temp(Types[TINT])
|
|
l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s)
|
|
|
|
nx = nod(OSLICE, ns, nil) // ...s[:n+argc]
|
|
nx.SetSliceBounds(nil, nod(OADD, nn, na), nil)
|
|
nx.Etype = 1
|
|
l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc]
|
|
|
|
ls = n.List.Slice()[1:]
|
|
for i, n := range ls {
|
|
nx = nod(OINDEX, ns, nn) // s[n] ...
|
|
nx.Bounded = true
|
|
l = append(l, nod(OAS, nx, n)) // s[n] = arg
|
|
if i+1 < len(ls) {
|
|
l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1
|
|
}
|
|
}
|
|
|
|
typecheckslice(l, Etop)
|
|
walkstmtlist(l)
|
|
init.Append(l...)
|
|
return ns
|
|
}
|
|
|
|
// Lower copy(a, b) to a memmove call or a runtime call.
|
|
//
|
|
// init {
|
|
// n := len(a)
|
|
// if n > len(b) { n = len(b) }
|
|
// memmove(a.ptr, b.ptr, n*sizeof(elem(a)))
|
|
// }
|
|
// n;
|
|
//
|
|
// Also works if b is a string.
|
|
//
|
|
func copyany(n *Node, init *Nodes, runtimecall bool) *Node {
|
|
if haspointers(n.Left.Type.Elem()) {
|
|
fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type)
|
|
return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right)
|
|
}
|
|
|
|
if runtimecall {
|
|
var fn *Node
|
|
if n.Right.Type.IsString() {
|
|
fn = syslook("slicestringcopy")
|
|
} else {
|
|
fn = syslook("slicecopy")
|
|
}
|
|
fn = substArgTypes(fn, n.Left.Type, n.Right.Type)
|
|
return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width))
|
|
}
|
|
|
|
n.Left = walkexpr(n.Left, init)
|
|
n.Right = walkexpr(n.Right, init)
|
|
nl := temp(n.Left.Type)
|
|
nr := temp(n.Right.Type)
|
|
var l []*Node
|
|
l = append(l, nod(OAS, nl, n.Left))
|
|
l = append(l, nod(OAS, nr, n.Right))
|
|
|
|
nfrm := nod(OSPTR, nr, nil)
|
|
nto := nod(OSPTR, nl, nil)
|
|
|
|
nlen := temp(Types[TINT])
|
|
|
|
// n = len(to)
|
|
l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil)))
|
|
|
|
// if n > len(frm) { n = len(frm) }
|
|
nif := nod(OIF, nil, nil)
|
|
|
|
nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil))
|
|
nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil)))
|
|
l = append(l, nif)
|
|
|
|
// Call memmove.
|
|
fn := syslook("memmove")
|
|
|
|
fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem())
|
|
nwid := temp(Types[TUINTPTR])
|
|
l = append(l, nod(OAS, nwid, conv(nlen, Types[TUINTPTR])))
|
|
nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width))
|
|
l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid))
|
|
|
|
typecheckslice(l, Etop)
|
|
walkstmtlist(l)
|
|
init.Append(l...)
|
|
return nlen
|
|
}
|
|
|
|
func eqfor(t *Type, needsize *int) *Node {
|
|
// Should only arrive here with large memory or
|
|
// a struct/array containing a non-memory field/element.
|
|
// Small memory is handled inline, and single non-memory
|
|
// is handled during type check (OCMPSTR etc).
|
|
switch a, _ := algtype1(t); a {
|
|
case AMEM:
|
|
n := syslook("memequal")
|
|
n = substArgTypes(n, t, t)
|
|
*needsize = 1
|
|
return n
|
|
case ASPECIAL:
|
|
sym := typesymprefix(".eq", t)
|
|
n := newname(sym)
|
|
n.Class = PFUNC
|
|
ntype := nod(OTFUNC, nil, nil)
|
|
ntype.List.Append(nod(ODCLFIELD, nil, typenod(ptrto(t))))
|
|
ntype.List.Append(nod(ODCLFIELD, nil, typenod(ptrto(t))))
|
|
ntype.Rlist.Append(nod(ODCLFIELD, nil, typenod(Types[TBOOL])))
|
|
ntype = typecheck(ntype, Etype)
|
|
n.Type = ntype.Type
|
|
*needsize = 0
|
|
return n
|
|
}
|
|
Fatalf("eqfor %v", t)
|
|
return nil
|
|
}
|
|
|
|
// The result of walkcompare MUST be assigned back to n, e.g.
|
|
// n.Left = walkcompare(n.Left, init)
|
|
func walkcompare(n *Node, init *Nodes) *Node {
|
|
// Given interface value l and concrete value r, rewrite
|
|
// l == r
|
|
// into types-equal && data-equal.
|
|
// This is efficient, avoids allocations, and avoids runtime calls.
|
|
var l, r *Node
|
|
if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() {
|
|
l = n.Left
|
|
r = n.Right
|
|
} else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() {
|
|
l = n.Right
|
|
r = n.Left
|
|
}
|
|
|
|
if l != nil {
|
|
// Handle both == and !=.
|
|
eq := n.Op
|
|
var andor Op
|
|
if eq == OEQ {
|
|
andor = OANDAND
|
|
} else {
|
|
andor = OOROR
|
|
}
|
|
// Check for types equal.
|
|
// For empty interface, this is:
|
|
// l.tab == type(r)
|
|
// For non-empty interface, this is:
|
|
// l.tab != nil && l.tab._type == type(r)
|
|
var eqtype *Node
|
|
tab := nod(OITAB, l, nil)
|
|
rtyp := typename(r.Type)
|
|
if l.Type.IsEmptyInterface() {
|
|
tab.Type = ptrto(Types[TUINT8])
|
|
tab.Typecheck = 1
|
|
eqtype = nod(eq, tab, rtyp)
|
|
} else {
|
|
nonnil := nod(brcom(eq), nodnil(), tab)
|
|
match := nod(eq, itabType(tab), rtyp)
|
|
eqtype = nod(andor, nonnil, match)
|
|
}
|
|
// Check for data equal.
|
|
eqdata := nod(eq, ifaceData(l, r.Type), r)
|
|
// Put it all together.
|
|
expr := nod(andor, eqtype, eqdata)
|
|
n = finishcompare(n, expr, init)
|
|
return n
|
|
}
|
|
|
|
// Must be comparison of array or struct.
|
|
// Otherwise back end handles it.
|
|
// While we're here, decide whether to
|
|
// inline or call an eq alg.
|
|
t := n.Left.Type
|
|
var inline bool
|
|
switch t.Etype {
|
|
default:
|
|
return n
|
|
case TARRAY:
|
|
inline = t.NumElem() <= 1 || (t.NumElem() <= 4 && issimple[t.Elem().Etype])
|
|
case TSTRUCT:
|
|
inline = t.NumFields() <= 4
|
|
}
|
|
|
|
cmpl := n.Left
|
|
for cmpl != nil && cmpl.Op == OCONVNOP {
|
|
cmpl = cmpl.Left
|
|
}
|
|
cmpr := n.Right
|
|
for cmpr != nil && cmpr.Op == OCONVNOP {
|
|
cmpr = cmpr.Left
|
|
}
|
|
|
|
// Chose not to inline. Call equality function directly.
|
|
if !inline {
|
|
if isvaluelit(cmpl) {
|
|
var_ := temp(cmpl.Type)
|
|
anylit(cmpl, var_, init)
|
|
cmpl = var_
|
|
}
|
|
if isvaluelit(cmpr) {
|
|
var_ := temp(cmpr.Type)
|
|
anylit(cmpr, var_, init)
|
|
cmpr = var_
|
|
}
|
|
if !islvalue(cmpl) || !islvalue(cmpr) {
|
|
Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
|
|
}
|
|
|
|
// eq algs take pointers
|
|
pl := temp(ptrto(t))
|
|
al := nod(OAS, pl, nod(OADDR, cmpl, nil))
|
|
al.Right.Etype = 1 // addr does not escape
|
|
al = typecheck(al, Etop)
|
|
init.Append(al)
|
|
|
|
pr := temp(ptrto(t))
|
|
ar := nod(OAS, pr, nod(OADDR, cmpr, nil))
|
|
ar.Right.Etype = 1 // addr does not escape
|
|
ar = typecheck(ar, Etop)
|
|
init.Append(ar)
|
|
|
|
var needsize int
|
|
call := nod(OCALL, eqfor(t, &needsize), nil)
|
|
call.List.Append(pl)
|
|
call.List.Append(pr)
|
|
if needsize != 0 {
|
|
call.List.Append(nodintconst(t.Width))
|
|
}
|
|
res := call
|
|
if n.Op != OEQ {
|
|
res = nod(ONOT, res, nil)
|
|
}
|
|
n = finishcompare(n, res, init)
|
|
return n
|
|
}
|
|
|
|
// inline: build boolean expression comparing element by element
|
|
andor := OANDAND
|
|
if n.Op == ONE {
|
|
andor = OOROR
|
|
}
|
|
var expr *Node
|
|
compare := func(el, er *Node) {
|
|
a := nod(n.Op, el, er)
|
|
if expr == nil {
|
|
expr = a
|
|
} else {
|
|
expr = nod(andor, expr, a)
|
|
}
|
|
}
|
|
cmpl = safeexpr(cmpl, init)
|
|
cmpr = safeexpr(cmpr, init)
|
|
if t.IsStruct() {
|
|
for _, f := range t.Fields().Slice() {
|
|
sym := f.Sym
|
|
if isblanksym(sym) {
|
|
continue
|
|
}
|
|
compare(
|
|
nodSym(OXDOT, cmpl, sym),
|
|
nodSym(OXDOT, cmpr, sym),
|
|
)
|
|
}
|
|
} else {
|
|
for i := 0; int64(i) < t.NumElem(); i++ {
|
|
compare(
|
|
nod(OINDEX, cmpl, nodintconst(int64(i))),
|
|
nod(OINDEX, cmpr, nodintconst(int64(i))),
|
|
)
|
|
}
|
|
}
|
|
if expr == nil {
|
|
expr = nodbool(n.Op == OEQ)
|
|
}
|
|
n = finishcompare(n, expr, init)
|
|
return n
|
|
}
|
|
|
|
// The result of finishcompare MUST be assigned back to n, e.g.
|
|
// n.Left = finishcompare(n.Left, x, r, init)
|
|
func finishcompare(n, r *Node, init *Nodes) *Node {
|
|
// Use nn here to avoid passing r to typecheck.
|
|
nn := r
|
|
nn = typecheck(nn, Erv)
|
|
nn = walkexpr(nn, init)
|
|
r = nn
|
|
if r.Type != n.Type {
|
|
r = nod(OCONVNOP, r, nil)
|
|
r.Type = n.Type
|
|
r.Typecheck = 1
|
|
nn = r
|
|
}
|
|
return nn
|
|
}
|
|
|
|
// isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers.
|
|
func (n *Node) isIntOrdering() bool {
|
|
switch n.Op {
|
|
case OLE, OLT, OGE, OGT:
|
|
default:
|
|
return false
|
|
}
|
|
return n.Left.Type.IsInteger() && n.Right.Type.IsInteger()
|
|
}
|
|
|
|
// walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10.
|
|
// n must be an OANDAND or OOROR node.
|
|
// The result of walkinrange MUST be assigned back to n, e.g.
|
|
// n.Left = walkinrange(n.Left)
|
|
func walkinrange(n *Node, init *Nodes) *Node {
|
|
// We are looking for something equivalent to a opl b OP b opr c, where:
|
|
// * a, b, and c have integer type
|
|
// * b is side-effect-free
|
|
// * opl and opr are each < or ≤
|
|
// * OP is &&
|
|
l := n.Left
|
|
r := n.Right
|
|
if !l.isIntOrdering() || !r.isIntOrdering() {
|
|
return n
|
|
}
|
|
|
|
// Find b, if it exists, and rename appropriately.
|
|
// Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right
|
|
// Output is: a opl b(==x) ANDAND/OROR b(==x) opr c
|
|
a, opl, b := l.Left, l.Op, l.Right
|
|
x, opr, c := r.Left, r.Op, r.Right
|
|
for i := 0; ; i++ {
|
|
if samesafeexpr(b, x) {
|
|
break
|
|
}
|
|
if i == 3 {
|
|
// Tried all permutations and couldn't find an appropriate b == x.
|
|
return n
|
|
}
|
|
if i&1 == 0 {
|
|
a, opl, b = b, brrev(opl), a
|
|
} else {
|
|
x, opr, c = c, brrev(opr), x
|
|
}
|
|
}
|
|
|
|
// If n.Op is ||, apply de Morgan.
|
|
// Negate the internal ops now; we'll negate the top level op at the end.
|
|
// Henceforth assume &&.
|
|
negateResult := n.Op == OOROR
|
|
if negateResult {
|
|
opl = brcom(opl)
|
|
opr = brcom(opr)
|
|
}
|
|
|
|
cmpdir := func(o Op) int {
|
|
switch o {
|
|
case OLE, OLT:
|
|
return -1
|
|
case OGE, OGT:
|
|
return +1
|
|
}
|
|
Fatalf("walkinrange cmpdir %v", o)
|
|
return 0
|
|
}
|
|
if cmpdir(opl) != cmpdir(opr) {
|
|
// Not a range check; something like b < a && b < c.
|
|
return n
|
|
}
|
|
|
|
switch opl {
|
|
case OGE, OGT:
|
|
// We have something like a > b && b ≥ c.
|
|
// Switch and reverse ops and rename constants,
|
|
// to make it look like a ≤ b && b < c.
|
|
a, c = c, a
|
|
opl, opr = brrev(opr), brrev(opl)
|
|
}
|
|
|
|
// We must ensure that c-a is non-negative.
|
|
// For now, require a and c to be constants.
|
|
// In the future, we could also support a == 0 and c == len/cap(...).
|
|
// Unfortunately, by this point, most len/cap expressions have been
|
|
// stored into temporary variables.
|
|
if !Isconst(a, CTINT) || !Isconst(c, CTINT) {
|
|
return n
|
|
}
|
|
|
|
if opl == OLT {
|
|
// We have a < b && ...
|
|
// We need a ≤ b && ... to safely use unsigned comparison tricks.
|
|
// If a is not the maximum constant for b's type,
|
|
// we can increment a and switch to ≤.
|
|
if a.Int64() >= maxintval[b.Type.Etype].Int64() {
|
|
return n
|
|
}
|
|
a = nodintconst(a.Int64() + 1)
|
|
opl = OLE
|
|
}
|
|
|
|
bound := c.Int64() - a.Int64()
|
|
if bound < 0 {
|
|
// Bad news. Something like 5 <= x && x < 3.
|
|
// Rare in practice, and we still need to generate side-effects,
|
|
// so just leave it alone.
|
|
return n
|
|
}
|
|
|
|
// We have a ≤ b && b < c (or a ≤ b && b ≤ c).
|
|
// This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a),
|
|
// which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a),
|
|
// which is equivalent to uint(b-a) < uint(c-a).
|
|
ut := b.Type.toUnsigned()
|
|
lhs := conv(nod(OSUB, b, a), ut)
|
|
rhs := nodintconst(bound)
|
|
if negateResult {
|
|
// Negate top level.
|
|
opr = brcom(opr)
|
|
}
|
|
cmp := nod(opr, lhs, rhs)
|
|
cmp.Pos = n.Pos
|
|
cmp = addinit(cmp, l.Ninit.Slice())
|
|
cmp = addinit(cmp, r.Ninit.Slice())
|
|
// Typecheck the AST rooted at cmp...
|
|
cmp = typecheck(cmp, Erv)
|
|
// ...but then reset cmp's type to match n's type.
|
|
cmp.Type = n.Type
|
|
cmp = walkexpr(cmp, init)
|
|
return cmp
|
|
}
|
|
|
|
// walkdiv rewrites division by a constant as less expensive
|
|
// operations.
|
|
// The result of walkdiv MUST be assigned back to n, e.g.
|
|
// n.Left = walkdiv(n.Left, init)
|
|
func walkdiv(n *Node, init *Nodes) *Node {
|
|
// if >= 0, nr is 1<<pow // 1 if nr is negative.
|
|
|
|
if n.Right.Op != OLITERAL {
|
|
return n
|
|
}
|
|
|
|
// nr is a constant.
|
|
nl := cheapexpr(n.Left, init)
|
|
|
|
nr := n.Right
|
|
|
|
// special cases of mod/div
|
|
// by a constant
|
|
w := int(nl.Type.Width * 8)
|
|
|
|
s := 0 // 1 if nr is negative.
|
|
pow := powtwo(nr) // if >= 0, nr is 1<<pow
|
|
if pow >= 1000 {
|
|
// negative power of 2
|
|
s = 1
|
|
|
|
pow -= 1000
|
|
}
|
|
|
|
if pow+1 >= w {
|
|
// divisor too large.
|
|
return n
|
|
}
|
|
|
|
if pow < 0 {
|
|
// try to do division by multiply by (2^w)/d
|
|
// see hacker's delight chapter 10
|
|
// TODO: support 64-bit magic multiply here.
|
|
var m Magic
|
|
m.W = w
|
|
|
|
if nl.Type.IsSigned() {
|
|
m.Sd = nr.Int64()
|
|
smagic(&m)
|
|
} else {
|
|
m.Ud = uint64(nr.Int64())
|
|
umagic(&m)
|
|
}
|
|
|
|
if m.Bad != 0 {
|
|
return n
|
|
}
|
|
|
|
// We have a quick division method so use it
|
|
// for modulo too.
|
|
if n.Op == OMOD {
|
|
// rewrite as A%B = A - (A/B*B).
|
|
n1 := nod(ODIV, nl, nr)
|
|
|
|
n2 := nod(OMUL, n1, nr)
|
|
n = nod(OSUB, nl, n2)
|
|
goto ret
|
|
}
|
|
|
|
switch simtype[nl.Type.Etype] {
|
|
default:
|
|
return n
|
|
|
|
// n1 = nl * magic >> w (HMUL)
|
|
case TUINT8, TUINT16, TUINT32:
|
|
var nc Node
|
|
|
|
Nodconst(&nc, nl.Type, int64(m.Um))
|
|
n1 := nod(OHMUL, nl, &nc)
|
|
n1 = typecheck(n1, Erv)
|
|
if m.Ua != 0 {
|
|
// Select a Go type with (at least) twice the width.
|
|
var twide *Type
|
|
switch simtype[nl.Type.Etype] {
|
|
default:
|
|
return n
|
|
|
|
case TUINT8, TUINT16:
|
|
twide = Types[TUINT32]
|
|
|
|
case TUINT32:
|
|
twide = Types[TUINT64]
|
|
|
|
case TINT8, TINT16:
|
|
twide = Types[TINT32]
|
|
|
|
case TINT32:
|
|
twide = Types[TINT64]
|
|
}
|
|
|
|
// add numerator (might overflow).
|
|
// n2 = (n1 + nl)
|
|
n2 := nod(OADD, conv(n1, twide), conv(nl, twide))
|
|
|
|
// shift by m.s
|
|
var nc Node
|
|
|
|
Nodconst(&nc, Types[TUINT], int64(m.S))
|
|
n = conv(nod(ORSH, n2, &nc), nl.Type)
|
|
} else {
|
|
// n = n1 >> m.s
|
|
var nc Node
|
|
|
|
Nodconst(&nc, Types[TUINT], int64(m.S))
|
|
n = nod(ORSH, n1, &nc)
|
|
}
|
|
|
|
// n1 = nl * magic >> w
|
|
case TINT8, TINT16, TINT32:
|
|
var nc Node
|
|
|
|
Nodconst(&nc, nl.Type, m.Sm)
|
|
n1 := nod(OHMUL, nl, &nc)
|
|
n1 = typecheck(n1, Erv)
|
|
if m.Sm < 0 {
|
|
// add the numerator.
|
|
n1 = nod(OADD, n1, nl)
|
|
}
|
|
|
|
// shift by m.s
|
|
var ns Node
|
|
|
|
Nodconst(&ns, Types[TUINT], int64(m.S))
|
|
n2 := conv(nod(ORSH, n1, &ns), nl.Type)
|
|
|
|
// add 1 iff n1 is negative.
|
|
var nneg Node
|
|
|
|
Nodconst(&nneg, Types[TUINT], int64(w)-1)
|
|
n3 := nod(ORSH, nl, &nneg) // n4 = -1 iff n1 is negative.
|
|
n = nod(OSUB, n2, n3)
|
|
|
|
// apply sign.
|
|
if m.Sd < 0 {
|
|
n = nod(OMINUS, n, nil)
|
|
}
|
|
}
|
|
|
|
goto ret
|
|
}
|
|
|
|
switch pow {
|
|
case 0:
|
|
if n.Op == OMOD {
|
|
// nl % 1 is zero.
|
|
Nodconst(n, n.Type, 0)
|
|
} else if s != 0 {
|
|
// divide by -1
|
|
n.Op = OMINUS
|
|
|
|
n.Right = nil
|
|
} else {
|
|
// divide by 1
|
|
n = nl
|
|
}
|
|
|
|
default:
|
|
if n.Type.IsSigned() {
|
|
if n.Op == OMOD {
|
|
// signed modulo 2^pow is like ANDing
|
|
// with the last pow bits, but if nl < 0,
|
|
// nl & (2^pow-1) is (nl+1)%2^pow - 1.
|
|
var nc Node
|
|
|
|
Nodconst(&nc, Types[simtype[TUINT]], int64(w)-1)
|
|
n1 := nod(ORSH, nl, &nc) // n1 = -1 iff nl < 0.
|
|
if pow == 1 {
|
|
n1 = typecheck(n1, Erv)
|
|
n1 = cheapexpr(n1, init)
|
|
|
|
// n = (nl+ε)&1 -ε where ε=1 iff nl<0.
|
|
n2 := nod(OSUB, nl, n1)
|
|
|
|
var nc Node
|
|
Nodconst(&nc, nl.Type, 1)
|
|
n3 := nod(OAND, n2, &nc)
|
|
n = nod(OADD, n3, n1)
|
|
} else {
|
|
// n = (nl+ε)&(nr-1) - ε where ε=2^pow-1 iff nl<0.
|
|
var nc Node
|
|
|
|
Nodconst(&nc, nl.Type, (1<<uint(pow))-1)
|
|
n2 := nod(OAND, n1, &nc) // n2 = 2^pow-1 iff nl<0.
|
|
n2 = typecheck(n2, Erv)
|
|
n2 = cheapexpr(n2, init)
|
|
|
|
n3 := nod(OADD, nl, n2)
|
|
n4 := nod(OAND, n3, &nc)
|
|
n = nod(OSUB, n4, n2)
|
|
}
|
|
|
|
break
|
|
} else {
|
|
// arithmetic right shift does not give the correct rounding.
|
|
// if nl >= 0, nl >> n == nl / nr
|
|
// if nl < 0, we want to add 2^n-1 first.
|
|
var nc Node
|
|
|
|
Nodconst(&nc, Types[simtype[TUINT]], int64(w)-1)
|
|
n1 := nod(ORSH, nl, &nc) // n1 = -1 iff nl < 0.
|
|
if pow == 1 {
|
|
// nl+1 is nl-(-1)
|
|
n.Left = nod(OSUB, nl, n1)
|
|
} else {
|
|
// Do a logical right right on -1 to keep pow bits.
|
|
var nc Node
|
|
|
|
Nodconst(&nc, Types[simtype[TUINT]], int64(w)-int64(pow))
|
|
n2 := nod(ORSH, conv(n1, nl.Type.toUnsigned()), &nc)
|
|
n.Left = nod(OADD, nl, conv(n2, nl.Type))
|
|
}
|
|
|
|
// n = (nl + 2^pow-1) >> pow
|
|
n.Op = ORSH
|
|
|
|
var n2 Node
|
|
Nodconst(&n2, Types[simtype[TUINT]], int64(pow))
|
|
n.Right = &n2
|
|
n.Typecheck = 0
|
|
}
|
|
|
|
if s != 0 {
|
|
n = nod(OMINUS, n, nil)
|
|
}
|
|
break
|
|
}
|
|
|
|
var nc Node
|
|
if n.Op == OMOD {
|
|
// n = nl & (nr-1)
|
|
n.Op = OAND
|
|
|
|
Nodconst(&nc, nl.Type, nr.Int64()-1)
|
|
} else {
|
|
// n = nl >> pow
|
|
n.Op = ORSH
|
|
|
|
Nodconst(&nc, Types[simtype[TUINT]], int64(pow))
|
|
}
|
|
|
|
n.Typecheck = 0
|
|
n.Right = &nc
|
|
}
|
|
|
|
goto ret
|
|
|
|
ret:
|
|
n = typecheck(n, Erv)
|
|
n = walkexpr(n, init)
|
|
return n
|
|
}
|
|
|
|
// return 1 if integer n must be in range [0, max), 0 otherwise
|
|
func bounded(n *Node, max int64) bool {
|
|
if n.Type == nil || !n.Type.IsInteger() {
|
|
return false
|
|
}
|
|
|
|
sign := n.Type.IsSigned()
|
|
bits := int32(8 * n.Type.Width)
|
|
|
|
if smallintconst(n) {
|
|
v := n.Int64()
|
|
return 0 <= v && v < max
|
|
}
|
|
|
|
switch n.Op {
|
|
case OAND:
|
|
v := int64(-1)
|
|
if smallintconst(n.Left) {
|
|
v = n.Left.Int64()
|
|
} else if smallintconst(n.Right) {
|
|
v = n.Right.Int64()
|
|
}
|
|
|
|
if 0 <= v && v < max {
|
|
return true
|
|
}
|
|
|
|
case OMOD:
|
|
if !sign && smallintconst(n.Right) {
|
|
v := n.Right.Int64()
|
|
if 0 <= v && v <= max {
|
|
return true
|
|
}
|
|
}
|
|
|
|
case ODIV:
|
|
if !sign && smallintconst(n.Right) {
|
|
v := n.Right.Int64()
|
|
for bits > 0 && v >= 2 {
|
|
bits--
|
|
v >>= 1
|
|
}
|
|
}
|
|
|
|
case ORSH:
|
|
if !sign && smallintconst(n.Right) {
|
|
v := n.Right.Int64()
|
|
if v > int64(bits) {
|
|
return true
|
|
}
|
|
bits -= int32(v)
|
|
}
|
|
}
|
|
|
|
if !sign && bits <= 62 && 1<<uint(bits) <= max {
|
|
return true
|
|
}
|
|
|
|
return false
|
|
}
|
|
|
|
// usemethod check interface method calls for uses of reflect.Type.Method.
|
|
func usemethod(n *Node) {
|
|
t := n.Left.Type
|
|
|
|
// Looking for either of:
|
|
// Method(int) reflect.Method
|
|
// MethodByName(string) (reflect.Method, bool)
|
|
//
|
|
// TODO(crawshaw): improve precision of match by working out
|
|
// how to check the method name.
|
|
if n := t.Params().NumFields(); n != 1 {
|
|
return
|
|
}
|
|
if n := t.Results().NumFields(); n != 1 && n != 2 {
|
|
return
|
|
}
|
|
p0 := t.Params().Field(0)
|
|
res0 := t.Results().Field(0)
|
|
var res1 *Field
|
|
if t.Results().NumFields() == 2 {
|
|
res1 = t.Results().Field(1)
|
|
}
|
|
|
|
if res1 == nil {
|
|
if p0.Type.Etype != TINT {
|
|
return
|
|
}
|
|
} else {
|
|
if !p0.Type.IsString() {
|
|
return
|
|
}
|
|
if !res1.Type.IsBoolean() {
|
|
return
|
|
}
|
|
}
|
|
if res0.Type.String() != "reflect.Method" {
|
|
return
|
|
}
|
|
|
|
Curfn.Func.ReflectMethod = true
|
|
}
|
|
|
|
func usefield(n *Node) {
|
|
if obj.Fieldtrack_enabled == 0 {
|
|
return
|
|
}
|
|
|
|
switch n.Op {
|
|
default:
|
|
Fatalf("usefield %v", n.Op)
|
|
|
|
case ODOT, ODOTPTR:
|
|
break
|
|
}
|
|
if n.Sym == nil {
|
|
// No field name. This DOTPTR was built by the compiler for access
|
|
// to runtime data structures. Ignore.
|
|
return
|
|
}
|
|
|
|
t := n.Left.Type
|
|
if t.IsPtr() {
|
|
t = t.Elem()
|
|
}
|
|
field := dotField[typeSym{t.Orig, n.Sym}]
|
|
if field == nil {
|
|
Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym)
|
|
}
|
|
if !strings.Contains(field.Note, "go:\"track\"") {
|
|
return
|
|
}
|
|
|
|
outer := n.Left.Type
|
|
if outer.IsPtr() {
|
|
outer = outer.Elem()
|
|
}
|
|
if outer.Sym == nil {
|
|
yyerror("tracked field must be in named struct type")
|
|
}
|
|
if !exportname(field.Sym.Name) {
|
|
yyerror("tracked field must be exported (upper case)")
|
|
}
|
|
|
|
sym := tracksym(outer, field)
|
|
if Curfn.Func.FieldTrack == nil {
|
|
Curfn.Func.FieldTrack = make(map[*Sym]struct{})
|
|
}
|
|
Curfn.Func.FieldTrack[sym] = struct{}{}
|
|
}
|
|
|
|
func candiscardlist(l Nodes) bool {
|
|
for _, n := range l.Slice() {
|
|
if !candiscard(n) {
|
|
return false
|
|
}
|
|
}
|
|
return true
|
|
}
|
|
|
|
func candiscard(n *Node) bool {
|
|
if n == nil {
|
|
return true
|
|
}
|
|
|
|
switch n.Op {
|
|
default:
|
|
return false
|
|
|
|
// Discardable as long as the subpieces are.
|
|
case ONAME,
|
|
ONONAME,
|
|
OTYPE,
|
|
OPACK,
|
|
OLITERAL,
|
|
OADD,
|
|
OSUB,
|
|
OOR,
|
|
OXOR,
|
|
OADDSTR,
|
|
OADDR,
|
|
OANDAND,
|
|
OARRAYBYTESTR,
|
|
OARRAYRUNESTR,
|
|
OSTRARRAYBYTE,
|
|
OSTRARRAYRUNE,
|
|
OCAP,
|
|
OCMPIFACE,
|
|
OCMPSTR,
|
|
OCOMPLIT,
|
|
OMAPLIT,
|
|
OSTRUCTLIT,
|
|
OARRAYLIT,
|
|
OSLICELIT,
|
|
OPTRLIT,
|
|
OCONV,
|
|
OCONVIFACE,
|
|
OCONVNOP,
|
|
ODOT,
|
|
OEQ,
|
|
ONE,
|
|
OLT,
|
|
OLE,
|
|
OGT,
|
|
OGE,
|
|
OKEY,
|
|
OSTRUCTKEY,
|
|
OLEN,
|
|
OMUL,
|
|
OLSH,
|
|
ORSH,
|
|
OAND,
|
|
OANDNOT,
|
|
ONEW,
|
|
ONOT,
|
|
OCOM,
|
|
OPLUS,
|
|
OMINUS,
|
|
OOROR,
|
|
OPAREN,
|
|
ORUNESTR,
|
|
OREAL,
|
|
OIMAG,
|
|
OCOMPLEX:
|
|
break
|
|
|
|
// Discardable as long as we know it's not division by zero.
|
|
case ODIV, OMOD:
|
|
if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 {
|
|
break
|
|
}
|
|
if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 {
|
|
break
|
|
}
|
|
return false
|
|
|
|
// Discardable as long as we know it won't fail because of a bad size.
|
|
case OMAKECHAN, OMAKEMAP:
|
|
if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 {
|
|
break
|
|
}
|
|
return false
|
|
|
|
// Difficult to tell what sizes are okay.
|
|
case OMAKESLICE:
|
|
return false
|
|
}
|
|
|
|
if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) {
|
|
return false
|
|
}
|
|
|
|
return true
|
|
}
|
|
|
|
// rewrite
|
|
// print(x, y, z)
|
|
// into
|
|
// func(a1, a2, a3) {
|
|
// print(a1, a2, a3)
|
|
// }(x, y, z)
|
|
// and same for println.
|
|
|
|
var walkprintfunc_prgen int
|
|
|
|
// The result of walkprintfunc MUST be assigned back to n, e.g.
|
|
// n.Left = walkprintfunc(n.Left, init)
|
|
func walkprintfunc(n *Node, init *Nodes) *Node {
|
|
if n.Ninit.Len() != 0 {
|
|
walkstmtlist(n.Ninit.Slice())
|
|
init.AppendNodes(&n.Ninit)
|
|
}
|
|
|
|
t := nod(OTFUNC, nil, nil)
|
|
num := 0
|
|
var printargs []*Node
|
|
var a *Node
|
|
var buf string
|
|
for _, n1 := range n.List.Slice() {
|
|
buf = fmt.Sprintf("a%d", num)
|
|
num++
|
|
a = nod(ODCLFIELD, newname(lookup(buf)), typenod(n1.Type))
|
|
t.List.Append(a)
|
|
printargs = append(printargs, a.Left)
|
|
}
|
|
|
|
fn := nod(ODCLFUNC, nil, nil)
|
|
walkprintfunc_prgen++
|
|
buf = fmt.Sprintf("print·%d", walkprintfunc_prgen)
|
|
fn.Func.Nname = newname(lookup(buf))
|
|
fn.Func.Nname.Name.Defn = fn
|
|
fn.Func.Nname.Name.Param.Ntype = t
|
|
declare(fn.Func.Nname, PFUNC)
|
|
|
|
oldfn := Curfn
|
|
Curfn = nil
|
|
funchdr(fn)
|
|
|
|
a = nod(n.Op, nil, nil)
|
|
a.List.Set(printargs)
|
|
a = typecheck(a, Etop)
|
|
a = walkstmt(a)
|
|
|
|
fn.Nbody.Set1(a)
|
|
|
|
funcbody(fn)
|
|
|
|
fn = typecheck(fn, Etop)
|
|
typecheckslice(fn.Nbody.Slice(), Etop)
|
|
xtop = append(xtop, fn)
|
|
Curfn = oldfn
|
|
|
|
a = nod(OCALL, nil, nil)
|
|
a.Left = fn.Func.Nname
|
|
a.List.Set(n.List.Slice())
|
|
a = typecheck(a, Etop)
|
|
a = walkexpr(a, init)
|
|
return a
|
|
}
|