// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package gc import ( "cmd/internal/obj" "fmt" "strings" ) var mpzero Mpint // The constant is known to runtime. const ( tmpstringbufsize = 32 ) func walk(fn *Node) { Curfn = fn if Debug['W'] != 0 { s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym) dumplist(s, Curfn.Nbody) } lno := lineno // Final typecheck for any unused variables. // It's hard to be on the heap when not-used, but best to be consistent about &~PHEAP here and below. for i, ln := range fn.Func.Dcl { if ln.Op == ONAME && ln.Class&^PHEAP == PAUTO { typecheck(&ln, Erv|Easgn) fn.Func.Dcl[i] = ln } } // Propagate the used flag for typeswitch variables up to the NONAME in it's definition. for _, ln := range fn.Func.Dcl { if ln.Op == ONAME && ln.Class&^PHEAP == PAUTO && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Used { ln.Name.Defn.Left.Used = true } } for _, ln := range fn.Func.Dcl { if ln.Op != ONAME || ln.Class&^PHEAP != PAUTO || ln.Sym.Name[0] == '&' || ln.Used { continue } if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW { if defn.Left.Used { continue } lineno = defn.Left.Lineno Yyerror("%v declared and not used", ln.Sym) defn.Left.Used = true // suppress repeats } else { lineno = ln.Lineno Yyerror("%v declared and not used", ln.Sym) } } lineno = lno if nerrors != 0 { return } walkstmtlist(Curfn.Nbody) if Debug['W'] != 0 { s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym) dumplist(s, Curfn.Nbody) } heapmoves() if Debug['W'] != 0 && len(Curfn.Func.Enter.Slice()) > 0 { s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym) dumplist(s, Curfn.Func.Enter) } } func walkstmtlist(l nodesOrNodeList) { for it := nodeSeqIterate(l); !it.Done(); it.Next() { walkstmt(it.P()) } } func walkstmtslice(l []*Node) { for i := range l { walkstmt(&l[i]) } } func samelist(a *NodeList, b *NodeList) bool { for ; a != nil && b != nil; a, b = a.Next, b.Next { if a.N != b.N { return false } } return a == b } func paramoutheap(fn *Node) bool { for _, ln := range fn.Func.Dcl { switch ln.Class { case PPARAMOUT, PPARAMOUT | PHEAP: return ln.Addrtaken // stop early - parameters are over case PAUTO, PAUTO | PHEAP: return false } } return false } // adds "adjust" to all the argument locations for the call n. // n must be a defer or go node that has already been walked. func adjustargs(n *Node, adjust int) { var arg *Node var lhs *Node callfunc := n.Left for argsit := nodeSeqIterate(callfunc.List); !argsit.Done(); argsit.Next() { arg = argsit.N() if arg.Op != OAS { Yyerror("call arg not assignment") } lhs = arg.Left if lhs.Op == ONAME { // This is a temporary introduced by reorder1. // The real store to the stack appears later in the arg list. continue } if lhs.Op != OINDREG { Yyerror("call argument store does not use OINDREG") } // can't really check this in machine-indep code. //if(lhs->val.u.reg != D_SP) // yyerror("call arg assign not indreg(SP)"); lhs.Xoffset += int64(adjust) } } func walkstmt(np **Node) { n := *np if n == nil { return } if n.Dodata == 2 { // don't walk, generated by anylit. return } setlineno(n) walkstmtlist(n.Ninit) switch n.Op { default: if n.Op == ONAME { Yyerror("%v is not a top level statement", n.Sym) } else { Yyerror("%v is not a top level statement", Oconv(n.Op, 0)) } Dump("nottop", n) case OAS, OASOP, OAS2, OAS2DOTTYPE, OAS2RECV, OAS2FUNC, OAS2MAPR, OCLOSE, OCOPY, OCALLMETH, OCALLINTER, OCALL, OCALLFUNC, ODELETE, OSEND, OPRINT, OPRINTN, OPANIC, OEMPTY, ORECOVER, OGETG: if n.Typecheck == 0 { Fatalf("missing typecheck: %v", Nconv(n, obj.FmtSign)) } init := n.Ninit setNodeSeq(&n.Ninit, nil) walkexpr(&n, &init) addinit(&n, init) if (*np).Op == OCOPY && n.Op == OCONVNOP { n.Op = OEMPTY // don't leave plain values as statements. } // special case for a receive where we throw away // the value received. case ORECV: if n.Typecheck == 0 { Fatalf("missing typecheck: %v", Nconv(n, obj.FmtSign)) } init := n.Ninit setNodeSeq(&n.Ninit, nil) walkexpr(&n.Left, &init) n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, typename(n.Left.Type), n.Left, nodnil()) walkexpr(&n, &init) addinit(&n, init) case OBREAK, ODCL, OCONTINUE, OFALL, OGOTO, OLABEL, ODCLCONST, ODCLTYPE, OCHECKNIL, OVARKILL, OVARLIVE: break case OBLOCK: walkstmtlist(n.List) case OXCASE: Yyerror("case statement out of place") n.Op = OCASE fallthrough case OCASE: walkstmt(&n.Right) case ODEFER: hasdefer = true switch n.Left.Op { case OPRINT, OPRINTN: walkprintfunc(&n.Left, &n.Ninit) case OCOPY: n.Left = copyany(n.Left, &n.Ninit, true) default: walkexpr(&n.Left, &n.Ninit) } // make room for size & fn arguments. adjustargs(n, 2*Widthptr) case OFOR: if n.Left != nil { walkstmtlist(n.Left.Ninit) init := n.Left.Ninit setNodeSeq(&n.Left.Ninit, nil) walkexpr(&n.Left, &init) addinit(&n.Left, init) } walkstmt(&n.Right) walkstmtlist(n.Nbody) case OIF: walkexpr(&n.Left, &n.Ninit) walkstmtlist(n.Nbody) walkstmtlist(n.Rlist) case OPROC: switch n.Left.Op { case OPRINT, OPRINTN: walkprintfunc(&n.Left, &n.Ninit) case OCOPY: n.Left = copyany(n.Left, &n.Ninit, true) default: walkexpr(&n.Left, &n.Ninit) } // make room for size & fn arguments. adjustargs(n, 2*Widthptr) case ORETURN: walkexprlist(n.List, &n.Ninit) if nodeSeqLen(n.List) == 0 { break } if (Curfn.Type.Outnamed && nodeSeqLen(n.List) > 1) || paramoutheap(Curfn) { // assign to the function out parameters, // so that reorder3 can fix up conflicts var rl *NodeList var cl Class for _, ln := range Curfn.Func.Dcl { cl = ln.Class &^ PHEAP if cl == PAUTO { break } if cl == PPARAMOUT { rl = list(rl, ln) } } if got, want := nodeSeqLen(n.List), nodeSeqLen(rl); got != want { // order should have rewritten multi-value function calls // with explicit OAS2FUNC nodes. Fatalf("expected %v return arguments, have %v", want, got) } if samelist(rl, n.List) { // special return in disguise setNodeSeq(&n.List, nil) break } // move function calls out, to make reorder3's job easier. walkexprlistsafe(n.List, &n.Ninit) ll := ascompatee(n.Op, rl, n.List, &n.Ninit) setNodeSeq(&n.List, reorder3(ll)) for it := nodeSeqIterate(n.List); !it.Done(); it.Next() { *it.P() = applywritebarrier(it.N()) } break } ll := ascompatte(n.Op, nil, false, Getoutarg(Curfn.Type), n.List, 1, &n.Ninit) setNodeSeq(&n.List, ll) case ORETJMP: break case OSELECT: walkselect(n) case OSWITCH: walkswitch(n) case ORANGE: walkrange(n) case OXFALL: Yyerror("fallthrough statement out of place") n.Op = OFALL } if n.Op == ONAME { Fatalf("walkstmt ended up with name: %v", Nconv(n, obj.FmtSign)) } *np = n } func isSmallMakeSlice(n *Node) bool { if n.Op != OMAKESLICE { return false } l := n.Left r := n.Right if r == nil { r = l } t := n.Type return Smallintconst(l) && Smallintconst(r) && (t.Type.Width == 0 || Mpgetfix(r.Val().U.(*Mpint)) < (1<<16)/t.Type.Width) } // walk the whole tree of the body of an // expression or simple statement. // the types expressions are calculated. // compile-time constants are evaluated. // complex side effects like statements are appended to init func walkexprlist(l nodesOrNodeList, init nodesOrNodeListPtr) { for it := nodeSeqIterate(l); !it.Done(); it.Next() { walkexpr(it.P(), init) } } func walkexprlistsafe(l nodesOrNodeList, init nodesOrNodeListPtr) { for it := nodeSeqIterate(l); !it.Done(); it.Next() { *it.P() = safeexpr(it.N(), init) walkexpr(it.P(), init) } } func walkexprlistcheap(l nodesOrNodeList, init nodesOrNodeListPtr) { for it := nodeSeqIterate(l); !it.Done(); it.Next() { *it.P() = cheapexpr(it.N(), init) walkexpr(it.P(), init) } } // Build name of function: convI2E etc. // Not all names are possible // (e.g., we'll never generate convE2E or convE2I). func convFuncName(from, to *Type) string { tkind := to.iet() switch from.iet() { case 'I': switch tkind { case 'E': return "convI2E" case 'I': return "convI2I" } case 'T': switch tkind { case 'E': return "convT2E" case 'I': return "convT2I" } } Fatalf("unknown conv func %c2%c", from.iet(), to.iet()) panic("unreachable") } // Build name of function: assertI2E etc. // If with2suffix is true, the form ending in "2" is returned". func assertFuncName(from, to *Type, with2suffix bool) string { l := len("assertX2X2") if !with2suffix { l-- } tkind := to.iet() switch from.iet() { case 'E': switch tkind { case 'I': return "assertE2I2"[:l] case 'E': return "assertE2E2"[:l] case 'T': return "assertE2T2"[:l] } case 'I': switch tkind { case 'I': return "assertI2I2"[:l] case 'E': return "assertI2E2"[:l] case 'T': return "assertI2T2"[:l] } } Fatalf("unknown assert func %c2%c", from.iet(), to.iet()) panic("unreachable") } func walkexpr(np **Node, init nodesOrNodeListPtr) { n := *np if n == nil { return } if init == &n.Ninit { // not okay to use n->ninit when walking n, // because we might replace n with some other node // and would lose the init list. Fatalf("walkexpr init == &n->ninit") } if nodeSeqLen(n.Ninit) != 0 { walkstmtlist(n.Ninit) appendNodeSeq(init, n.Ninit) setNodeSeq(&n.Ninit, nil) } // annoying case - not typechecked if n.Op == OKEY { walkexpr(&n.Left, init) walkexpr(&n.Right, init) return } lno := setlineno(n) if Debug['w'] > 1 { Dump("walk-before", n) } if n.Typecheck != 1 { Fatalf("missed typecheck: %v\n", Nconv(n, obj.FmtSign)) } opswitch: switch n.Op { default: Dump("walk", n) Fatalf("walkexpr: switch 1 unknown op %v", Nconv(n, obj.FmtShort|obj.FmtSign)) case OTYPE, ONONAME, OINDREG, OEMPTY, OPARAM, OGETG: case ONOT, OMINUS, OPLUS, OCOM, OREAL, OIMAG, ODOTMETH, ODOTINTER: walkexpr(&n.Left, init) case OIND: walkexpr(&n.Left, init) case ODOT: usefield(n) walkexpr(&n.Left, init) case ODOTPTR: usefield(n) if n.Op == ODOTPTR && n.Left.Type.Type.Width == 0 { // No actual copy will be generated, so emit an explicit nil check. n.Left = cheapexpr(n.Left, init) checknil(n.Left, init) } walkexpr(&n.Left, init) case OEFACE: walkexpr(&n.Left, init) walkexpr(&n.Right, init) case OSPTR, OITAB: walkexpr(&n.Left, init) case OLEN, OCAP: walkexpr(&n.Left, init) // replace len(*[10]int) with 10. // delayed until now to preserve side effects. t := n.Left.Type if Isptr[t.Etype] { t = t.Type } if Isfixedarray(t) { safeexpr(n.Left, init) Nodconst(n, n.Type, t.Bound) n.Typecheck = 1 } case OLSH, ORSH: walkexpr(&n.Left, init) walkexpr(&n.Right, init) t := n.Left.Type n.Bounded = bounded(n.Right, 8*t.Width) if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) { Warn("shift bounds check elided") } // Use results from call expression as arguments for complex. case OAND, OSUB, OHMUL, OLT, OLE, OGE, OGT, OADD, OCOMPLEX, OLROT: if n.Op == OCOMPLEX && n.Left == nil && n.Right == nil { n.Left = nodeSeqFirst(n.List) n.Right = nodeSeqSecond(n.List) } walkexpr(&n.Left, init) walkexpr(&n.Right, init) case OOR, OXOR: walkexpr(&n.Left, init) walkexpr(&n.Right, init) walkrotate(&n) case OEQ, ONE: walkexpr(&n.Left, init) walkexpr(&n.Right, init) // Disable safemode while compiling this code: the code we // generate internally can refer to unsafe.Pointer. // In this case it can happen if we need to generate an == // for a struct containing a reflect.Value, which itself has // an unexported field of type unsafe.Pointer. old_safemode := safemode safemode = 0 walkcompare(&n, init) safemode = old_safemode case OANDAND, OOROR: walkexpr(&n.Left, init) // cannot put side effects from n.Right on init, // because they cannot run before n.Left is checked. // save elsewhere and store on the eventual n.Right. var ll *NodeList walkexpr(&n.Right, &ll) addinit(&n.Right, ll) case OPRINT, OPRINTN: walkexprlist(n.List, init) n = walkprint(n, init) case OPANIC: n = mkcall("gopanic", nil, init, n.Left) case ORECOVER: n = mkcall("gorecover", n.Type, init, Nod(OADDR, nodfp, nil)) case OLITERAL: n.Addable = true case OCLOSUREVAR, OCFUNC: n.Addable = true case ONAME: if n.Class&PHEAP == 0 && n.Class != PPARAMREF { n.Addable = true } case OCALLINTER: t := n.Left.Type if nodeSeqLen(n.List) != 0 && nodeSeqFirst(n.List).Op == OAS { break } walkexpr(&n.Left, init) walkexprlist(n.List, init) ll := ascompatte(n.Op, n, n.Isddd, getinarg(t), n.List, 0, init) setNodeSeq(&n.List, reorder1(ll)) case OCALLFUNC: if n.Left.Op == OCLOSURE { // Transform direct call of a closure to call of a normal function. // transformclosure already did all preparation work. // Prepend captured variables to argument list. setNodeSeq(&n.List, concat(n.Left.Func.Enter.NodeList(), n.List)) n.Left.Func.Enter.Set(nil) // Replace OCLOSURE with ONAME/PFUNC. n.Left = n.Left.Func.Closure.Func.Nname // Update type of OCALLFUNC node. // Output arguments had not changed, but their offsets could. if n.Left.Type.Outtuple == 1 { t := getoutargx(n.Left.Type).Type if t.Etype == TFIELD { t = t.Type } n.Type = t } else { n.Type = getoutargx(n.Left.Type) } } t := n.Left.Type if nodeSeqLen(n.List) != 0 && nodeSeqFirst(n.List).Op == OAS { break } walkexpr(&n.Left, init) walkexprlist(n.List, init) if n.Left.Op == ONAME && n.Left.Sym.Name == "Sqrt" && n.Left.Sym.Pkg.Path == "math" { switch Thearch.Thechar { case '5', '6', '7': n.Op = OSQRT n.Left = nodeSeqFirst(n.List) setNodeSeq(&n.List, nil) break opswitch } } ll := ascompatte(n.Op, n, n.Isddd, getinarg(t), n.List, 0, init) setNodeSeq(&n.List, reorder1(ll)) case OCALLMETH: t := n.Left.Type if nodeSeqLen(n.List) != 0 && nodeSeqFirst(n.List).Op == OAS { break } walkexpr(&n.Left, init) walkexprlist(n.List, init) ll := ascompatte(n.Op, n, false, getthis(t), list1(n.Left.Left), 0, init) lr := ascompatte(n.Op, n, n.Isddd, getinarg(t), n.List, 0, init) ll = concat(ll, lr) n.Left.Left = nil ullmancalc(n.Left) setNodeSeq(&n.List, reorder1(ll)) case OAS: appendNodeSeq(init, n.Ninit) setNodeSeq(&n.Ninit, nil) walkexpr(&n.Left, init) n.Left = safeexpr(n.Left, init) if oaslit(n, init) { break } if n.Right == nil || iszero(n.Right) && !instrumenting { break } switch n.Right.Op { default: walkexpr(&n.Right, init) case ODOTTYPE: // TODO(rsc): The Isfat is for consistency with componentgen and orderexpr. // It needs to be removed in all three places. // That would allow inlining x.(struct{*int}) the same as x.(*int). if isdirectiface(n.Right.Type) && !Isfat(n.Right.Type) && !instrumenting { // handled directly during cgen walkexpr(&n.Right, init) break } // x = i.(T); n.Left is x, n.Right.Left is i. // orderstmt made sure x is addressable. walkexpr(&n.Right.Left, init) n1 := Nod(OADDR, n.Left, nil) r := n.Right // i.(T) if Debug_typeassert > 0 { Warn("type assertion not inlined") } fn := syslook(assertFuncName(r.Left.Type, r.Type, false)) substArgTypes(&fn, r.Left.Type, r.Type) n = mkcall1(fn, nil, init, typename(r.Type), r.Left, n1) walkexpr(&n, init) break opswitch case ORECV: // x = <-c; n.Left is x, n.Right.Left is c. // orderstmt made sure x is addressable. walkexpr(&n.Right.Left, init) n1 := Nod(OADDR, n.Left, nil) r := n.Right.Left // the channel n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, typename(r.Type), r, n1) walkexpr(&n, init) break opswitch case OAPPEND: // x = append(...) r := n.Right if r.Isddd { r = appendslice(r, init) // also works for append(slice, string). } else { r = walkappend(r, init, n) } n.Right = r if r.Op == OAPPEND { // Left in place for back end. // Do not add a new write barrier. break opswitch } // Otherwise, lowered for race detector. // Treat as ordinary assignment. } if n.Left != nil && n.Right != nil { r := convas(Nod(OAS, n.Left, n.Right), init) r.Dodata = n.Dodata n = r n = applywritebarrier(n) } case OAS2: appendNodeSeq(init, n.Ninit) setNodeSeq(&n.Ninit, nil) walkexprlistsafe(n.List, init) walkexprlistsafe(n.Rlist, init) ll := ascompatee(OAS, n.List, n.Rlist, init) ll = reorder3(ll) for lr := ll; lr != nil; lr = lr.Next { lr.N = applywritebarrier(lr.N) } n = liststmt(ll) // a,b,... = fn() case OAS2FUNC: appendNodeSeq(init, n.Ninit) setNodeSeq(&n.Ninit, nil) r := nodeSeqFirst(n.Rlist) walkexprlistsafe(n.List, init) walkexpr(&r, init) ll := ascompatet(n.Op, n.List, &r.Type, 0, init) for lr := ll; lr != nil; lr = lr.Next { lr.N = applywritebarrier(lr.N) } n = liststmt(concat(list1(r), ll)) // x, y = <-c // orderstmt made sure x is addressable. case OAS2RECV: appendNodeSeq(init, n.Ninit) setNodeSeq(&n.Ninit, nil) r := nodeSeqFirst(n.Rlist) walkexprlistsafe(n.List, init) walkexpr(&r.Left, init) var n1 *Node if isblank(nodeSeqFirst(n.List)) { n1 = nodnil() } else { n1 = Nod(OADDR, nodeSeqFirst(n.List), nil) } n1.Etype = 1 // addr does not escape fn := chanfn("chanrecv2", 2, r.Left.Type) r = mkcall1(fn, nodeSeqSecond(n.List).Type, init, typename(r.Left.Type), r.Left, n1) n = Nod(OAS, nodeSeqSecond(n.List), r) typecheck(&n, Etop) // a,b = m[i]; case OAS2MAPR: appendNodeSeq(init, n.Ninit) setNodeSeq(&n.Ninit, nil) r := nodeSeqFirst(n.Rlist) walkexprlistsafe(n.List, init) walkexpr(&r.Left, init) walkexpr(&r.Right, init) t := r.Left.Type p := "" if t.Type.Width <= 128 { // Check ../../runtime/hashmap.go:maxValueSize before changing. switch algtype(t.Down) { case AMEM32: p = "mapaccess2_fast32" case AMEM64: p = "mapaccess2_fast64" case ASTRING: p = "mapaccess2_faststr" } } var key *Node if p != "" { // fast versions take key by value key = r.Right } else { // standard version takes key by reference // orderexpr made sure key is addressable. key = Nod(OADDR, r.Right, nil) p = "mapaccess2" } // from: // a,b = m[i] // to: // var,b = mapaccess2*(t, m, i) // a = *var a := nodeSeqFirst(n.List) fn := mapfn(p, t) r = mkcall1(fn, getoutargx(fn.Type), init, typename(t), r.Left, key) // mapaccess2* returns a typed bool, but due to spec changes, // the boolean result of i.(T) is now untyped so we make it the // same type as the variable on the lhs. if !isblank(nodeSeqSecond(n.List)) { r.Type.Type.Down.Type = nodeSeqSecond(n.List).Type } setNodeSeq(&n.Rlist, list1(r)) n.Op = OAS2FUNC // don't generate a = *var if a is _ if !isblank(a) { var_ := temp(Ptrto(t.Type)) var_.Typecheck = 1 it := nodeSeqIterate(n.List) *it.P() = var_ walkexpr(&n, init) appendNodeSeqNode(init, n) n = Nod(OAS, a, Nod(OIND, var_, nil)) } typecheck(&n, Etop) walkexpr(&n, init) // TODO: ptr is always non-nil, so disable nil check for this OIND op. case ODELETE: appendNodeSeq(init, n.Ninit) setNodeSeq(&n.Ninit, nil) map_ := nodeSeqFirst(n.List) key := nodeSeqSecond(n.List) walkexpr(&map_, init) walkexpr(&key, init) // orderstmt made sure key is addressable. key = Nod(OADDR, key, nil) t := map_.Type n = mkcall1(mapfndel("mapdelete", t), nil, init, typename(t), map_, key) case OAS2DOTTYPE: e := nodeSeqFirst(n.Rlist) // i.(T) // TODO(rsc): The Isfat is for consistency with componentgen and orderexpr. // It needs to be removed in all three places. // That would allow inlining x.(struct{*int}) the same as x.(*int). if isdirectiface(e.Type) && !Isfat(e.Type) && !instrumenting { // handled directly during gen. walkexprlistsafe(n.List, init) walkexpr(&e.Left, init) break } // res, ok = i.(T) // orderstmt made sure a is addressable. appendNodeSeq(init, n.Ninit) setNodeSeq(&n.Ninit, nil) walkexprlistsafe(n.List, init) walkexpr(&e.Left, init) t := e.Type // T from := e.Left // i oktype := Types[TBOOL] ok := nodeSeqSecond(n.List) if !isblank(ok) { oktype = ok.Type } fromKind := from.Type.iet() toKind := t.iet() // Avoid runtime calls in a few cases of the form _, ok := i.(T). // This is faster and shorter and allows the corresponding assertX2X2 // routines to skip nil checks on their last argument. if isblank(nodeSeqFirst(n.List)) { var fast *Node switch { case fromKind == 'E' && toKind == 'T': tab := Nod(OITAB, from, nil) // type:eface::tab:iface typ := Nod(OCONVNOP, typename(t), nil) typ.Type = Ptrto(Types[TUINTPTR]) fast = Nod(OEQ, tab, typ) case fromKind == 'I' && toKind == 'E', fromKind == 'E' && toKind == 'E': tab := Nod(OITAB, from, nil) fast = Nod(ONE, nodnil(), tab) } if fast != nil { if Debug_typeassert > 0 { Warn("type assertion (ok only) inlined") } n = Nod(OAS, ok, fast) typecheck(&n, Etop) break } } var resptr *Node // &res if isblank(nodeSeqFirst(n.List)) { resptr = nodnil() } else { resptr = Nod(OADDR, nodeSeqFirst(n.List), nil) } resptr.Etype = 1 // addr does not escape if Debug_typeassert > 0 { Warn("type assertion not inlined") } fn := syslook(assertFuncName(from.Type, t, true)) substArgTypes(&fn, from.Type, t) call := mkcall1(fn, oktype, init, typename(t), from, resptr) n = Nod(OAS, ok, call) typecheck(&n, Etop) case ODOTTYPE, ODOTTYPE2: if !isdirectiface(n.Type) || Isfat(n.Type) { Fatalf("walkexpr ODOTTYPE") // should see inside OAS only } walkexpr(&n.Left, init) case OCONVIFACE: walkexpr(&n.Left, init) // Optimize convT2E as a two-word copy when T is pointer-shaped. if isnilinter(n.Type) && isdirectiface(n.Left.Type) { l := Nod(OEFACE, typename(n.Left.Type), n.Left) l.Type = n.Type l.Typecheck = n.Typecheck n = l break } var ll *NodeList if !Isinter(n.Left.Type) { ll = list(ll, typename(n.Left.Type)) } if !isnilinter(n.Type) { ll = list(ll, typename(n.Type)) } if !Isinter(n.Left.Type) && !isnilinter(n.Type) { sym := Pkglookup(Tconv(n.Left.Type, obj.FmtLeft)+"."+Tconv(n.Type, obj.FmtLeft), itabpkg) if sym.Def == nil { l := Nod(ONAME, nil, nil) l.Sym = sym l.Type = Ptrto(Types[TUINT8]) l.Addable = true l.Class = PEXTERN l.Xoffset = 0 sym.Def = l ggloblsym(sym, int32(Widthptr), obj.DUPOK|obj.NOPTR) } l := Nod(OADDR, sym.Def, nil) l.Addable = true ll = list(ll, l) if isdirectiface(n.Left.Type) { // For pointer types, we can make a special form of optimization // // These statements are put onto the expression init list: // Itab *tab = atomicloadtype(&cache); // if(tab == nil) // tab = typ2Itab(type, itype, &cache); // // The CONVIFACE expression is replaced with this: // OEFACE{tab, ptr}; l := temp(Ptrto(Types[TUINT8])) n1 := Nod(OAS, l, sym.Def) typecheck(&n1, Etop) appendNodeSeqNode(init, n1) fn := syslook("typ2Itab") n1 = Nod(OCALL, fn, nil) setNodeSeq(&n1.List, ll) typecheck(&n1, Erv) walkexpr(&n1, init) n2 := Nod(OIF, nil, nil) n2.Left = Nod(OEQ, l, nodnil()) n2.Nbody.Set([]*Node{Nod(OAS, l, n1)}) n2.Likely = -1 typecheck(&n2, Etop) appendNodeSeqNode(init, n2) l = Nod(OEFACE, l, n.Left) l.Typecheck = n.Typecheck l.Type = n.Type n = l break } } if Isinter(n.Left.Type) { ll = list(ll, n.Left) } else { // regular types are passed by reference to avoid C vararg calls // orderexpr arranged for n.Left to be a temporary for all // the conversions it could see. comparison of an interface // with a non-interface, especially in a switch on interface value // with non-interface cases, is not visible to orderstmt, so we // have to fall back on allocating a temp here. if islvalue(n.Left) { ll = list(ll, Nod(OADDR, n.Left, nil)) } else { ll = list(ll, Nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil)) } dowidth(n.Left.Type) r := nodnil() if n.Esc == EscNone && n.Left.Type.Width <= 1024 { // Allocate stack buffer for value stored in interface. r = temp(n.Left.Type) r = Nod(OAS, r, nil) // zero temp typecheck(&r, Etop) appendNodeSeqNode(init, r) r = Nod(OADDR, r.Left, nil) typecheck(&r, Erv) } ll = list(ll, r) } fn := syslook(convFuncName(n.Left.Type, n.Type)) if !Isinter(n.Left.Type) { substArgTypes(&fn, n.Left.Type, n.Left.Type, n.Type) } else { substArgTypes(&fn, n.Left.Type, n.Type) } dowidth(fn.Type) n = Nod(OCALL, fn, nil) setNodeSeq(&n.List, ll) typecheck(&n, Erv) walkexpr(&n, init) case OCONV, OCONVNOP: if Thearch.Thechar == '5' { if Isfloat[n.Left.Type.Etype] { if n.Type.Etype == TINT64 { n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) break } if n.Type.Etype == TUINT64 { n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) break } } if Isfloat[n.Type.Etype] { if n.Left.Type.Etype == TINT64 { n = mkcall("int64tofloat64", n.Type, init, conv(n.Left, Types[TINT64])) break } if n.Left.Type.Etype == TUINT64 { n = mkcall("uint64tofloat64", n.Type, init, conv(n.Left, Types[TUINT64])) break } } } walkexpr(&n.Left, init) case OANDNOT: walkexpr(&n.Left, init) n.Op = OAND n.Right = Nod(OCOM, n.Right, nil) typecheck(&n.Right, Erv) walkexpr(&n.Right, init) case OMUL: walkexpr(&n.Left, init) walkexpr(&n.Right, init) walkmul(&n, init) case ODIV, OMOD: walkexpr(&n.Left, init) walkexpr(&n.Right, init) // rewrite complex div into function call. et := n.Left.Type.Etype if Iscomplex[et] && n.Op == ODIV { t := n.Type n = mkcall("complex128div", Types[TCOMPLEX128], init, conv(n.Left, Types[TCOMPLEX128]), conv(n.Right, Types[TCOMPLEX128])) n = conv(n, t) break } // Nothing to do for float divisions. if Isfloat[et] { break } // Try rewriting as shifts or magic multiplies. walkdiv(&n, init) // rewrite 64-bit div and mod into function calls // on 32-bit architectures. switch n.Op { case OMOD, ODIV: if Widthreg >= 8 || (et != TUINT64 && et != TINT64) { break opswitch } var fn string if et == TINT64 { fn = "int64" } else { fn = "uint64" } if n.Op == ODIV { fn += "div" } else { fn += "mod" } n = mkcall(fn, n.Type, init, conv(n.Left, Types[et]), conv(n.Right, Types[et])) } case OINDEX: walkexpr(&n.Left, init) // save the original node for bounds checking elision. // If it was a ODIV/OMOD walk might rewrite it. r := n.Right walkexpr(&n.Right, init) // if range of type cannot exceed static array bound, // disable bounds check. if n.Bounded { break } t := n.Left.Type if t != nil && Isptr[t.Etype] { t = t.Type } if Isfixedarray(t) { n.Bounded = bounded(r, t.Bound) if Debug['m'] != 0 && n.Bounded && !Isconst(n.Right, CTINT) { Warn("index bounds check elided") } if Smallintconst(n.Right) && !n.Bounded { Yyerror("index out of bounds") } } else if Isconst(n.Left, CTSTR) { n.Bounded = bounded(r, int64(len(n.Left.Val().U.(string)))) if Debug['m'] != 0 && n.Bounded && !Isconst(n.Right, CTINT) { Warn("index bounds check elided") } if Smallintconst(n.Right) { if !n.Bounded { Yyerror("index out of bounds") } else { // replace "abc"[1] with 'b'. // delayed until now because "abc"[1] is not // an ideal constant. v := Mpgetfix(n.Right.Val().U.(*Mpint)) Nodconst(n, n.Type, int64(n.Left.Val().U.(string)[v])) n.Typecheck = 1 } } } if Isconst(n.Right, CTINT) { if Mpcmpfixfix(n.Right.Val().U.(*Mpint), &mpzero) < 0 || Mpcmpfixfix(n.Right.Val().U.(*Mpint), Maxintval[TINT]) > 0 { Yyerror("index out of bounds") } } case OINDEXMAP: if n.Etype == 1 { break } walkexpr(&n.Left, init) walkexpr(&n.Right, init) t := n.Left.Type p := "" if t.Type.Width <= 128 { // Check ../../runtime/hashmap.go:maxValueSize before changing. switch algtype(t.Down) { case AMEM32: p = "mapaccess1_fast32" case AMEM64: p = "mapaccess1_fast64" case ASTRING: p = "mapaccess1_faststr" } } var key *Node if p != "" { // fast versions take key by value key = n.Right } else { // standard version takes key by reference. // orderexpr made sure key is addressable. key = Nod(OADDR, n.Right, nil) p = "mapaccess1" } n = mkcall1(mapfn(p, t), Ptrto(t.Type), init, typename(t), n.Left, key) n = Nod(OIND, n, nil) n.Type = t.Type n.Typecheck = 1 case ORECV: Fatalf("walkexpr ORECV") // should see inside OAS only case OSLICE, OSLICEARR, OSLICESTR: walkexpr(&n.Left, init) walkexpr(&n.Right.Left, init) if n.Right.Left != nil && iszero(n.Right.Left) { // Reduce x[0:j] to x[:j]. n.Right.Left = nil } walkexpr(&n.Right.Right, init) n = reduceSlice(n) case OSLICE3, OSLICE3ARR: walkexpr(&n.Left, init) walkexpr(&n.Right.Left, init) if n.Right.Left != nil && iszero(n.Right.Left) { // Reduce x[0:j:k] to x[:j:k]. n.Right.Left = nil } walkexpr(&n.Right.Right.Left, init) walkexpr(&n.Right.Right.Right, init) r := n.Right.Right.Right if r != nil && r.Op == OCAP && samesafeexpr(n.Left, r.Left) { // Reduce x[i:j:cap(x)] to x[i:j]. n.Right.Right = n.Right.Right.Left if n.Op == OSLICE3 { n.Op = OSLICE } else { n.Op = OSLICEARR } n = reduceSlice(n) } case OADDR: walkexpr(&n.Left, init) case ONEW: if n.Esc == EscNone { if n.Type.Type.Width >= 1<<16 { Fatalf("large ONEW with EscNone: %v", n) } r := temp(n.Type.Type) r = Nod(OAS, r, nil) // zero temp typecheck(&r, Etop) appendNodeSeqNode(init, r) r = Nod(OADDR, r.Left, nil) typecheck(&r, Erv) n = r } else { n = callnew(n.Type.Type) } // If one argument to the comparison is an empty string, // comparing the lengths instead will yield the same result // without the function call. case OCMPSTR: if (Isconst(n.Left, CTSTR) && len(n.Left.Val().U.(string)) == 0) || (Isconst(n.Right, CTSTR) && len(n.Right.Val().U.(string)) == 0) { // TODO(marvin): Fix Node.EType type union. r := Nod(Op(n.Etype), Nod(OLEN, n.Left, nil), Nod(OLEN, n.Right, nil)) typecheck(&r, Erv) walkexpr(&r, init) r.Type = n.Type n = r break } // s + "badgerbadgerbadger" == "badgerbadgerbadger" if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && nodeSeqLen(n.Left.List) == 2 && Isconst(nodeSeqSecond(n.Left.List), CTSTR) && strlit(n.Right) == strlit(nodeSeqSecond(n.Left.List)) { // TODO(marvin): Fix Node.EType type union. r := Nod(Op(n.Etype), Nod(OLEN, nodeSeqFirst(n.Left.List), nil), Nodintconst(0)) typecheck(&r, Erv) walkexpr(&r, init) r.Type = n.Type n = r break } var r *Node // TODO(marvin): Fix Node.EType type union. if Op(n.Etype) == OEQ || Op(n.Etype) == ONE { // prepare for rewrite below n.Left = cheapexpr(n.Left, init) n.Right = cheapexpr(n.Right, init) r = mkcall("eqstring", Types[TBOOL], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING])) // quick check of len before full compare for == or != // eqstring assumes that the lengths are equal // TODO(marvin): Fix Node.EType type union. if Op(n.Etype) == OEQ { // len(left) == len(right) && eqstring(left, right) r = Nod(OANDAND, Nod(OEQ, Nod(OLEN, n.Left, nil), Nod(OLEN, n.Right, nil)), r) } else { // len(left) != len(right) || !eqstring(left, right) r = Nod(ONOT, r, nil) r = Nod(OOROR, Nod(ONE, Nod(OLEN, n.Left, nil), Nod(OLEN, n.Right, nil)), r) } typecheck(&r, Erv) walkexpr(&r, nil) } else { // sys_cmpstring(s1, s2) :: 0 r = mkcall("cmpstring", Types[TINT], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING])) // TODO(marvin): Fix Node.EType type union. r = Nod(Op(n.Etype), r, Nodintconst(0)) } typecheck(&r, Erv) if n.Type.Etype != TBOOL { Fatalf("cmp %v", n.Type) } r.Type = n.Type n = r case OADDSTR: n = addstr(n, init) case OAPPEND: // order should make sure we only see OAS(node, OAPPEND), which we handle above. Fatalf("append outside assignment") case OCOPY: n = copyany(n, init, instrumenting) // cannot use chanfn - closechan takes any, not chan any case OCLOSE: fn := syslook("closechan") substArgTypes(&fn, n.Left.Type) n = mkcall1(fn, nil, init, n.Left) case OMAKECHAN: n = mkcall1(chanfn("makechan", 1, n.Type), n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64])) case OMAKEMAP: t := n.Type a := nodnil() // hmap buffer r := nodnil() // bucket buffer if n.Esc == EscNone { // Allocate hmap buffer on stack. var_ := temp(hmap(t)) a = Nod(OAS, var_, nil) // zero temp typecheck(&a, Etop) appendNodeSeqNode(init, a) a = Nod(OADDR, var_, nil) // Allocate one bucket on stack. // Maximum key/value size is 128 bytes, larger objects // are stored with an indirection. So max bucket size is 2048+eps. var_ = temp(mapbucket(t)) r = Nod(OAS, var_, nil) // zero temp typecheck(&r, Etop) appendNodeSeqNode(init, r) r = Nod(OADDR, var_, nil) } fn := syslook("makemap") substArgTypes(&fn, hmap(t), mapbucket(t), t.Down, t.Type) n = mkcall1(fn, n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64]), a, r) case OMAKESLICE: l := n.Left r := n.Right if r == nil { r = safeexpr(l, init) l = r } t := n.Type if n.Esc == EscNone { if !isSmallMakeSlice(n) { Fatalf("non-small OMAKESLICE with EscNone: %v", n) } // var arr [r]T // n = arr[:l] t = aindex(r, t.Type) // [r]T var_ := temp(t) a := Nod(OAS, var_, nil) // zero temp typecheck(&a, Etop) appendNodeSeqNode(init, a) r := Nod(OSLICE, var_, Nod(OKEY, nil, l)) // arr[:l] r = conv(r, n.Type) // in case n.Type is named. typecheck(&r, Erv) walkexpr(&r, init) n = r } else { // makeslice(t *Type, nel int64, max int64) (ary []any) fn := syslook("makeslice") substArgTypes(&fn, t.Type) // any-1 n = mkcall1(fn, n.Type, init, typename(n.Type), conv(l, Types[TINT64]), conv(r, Types[TINT64])) } case ORUNESTR: a := nodnil() if n.Esc == EscNone { t := aindex(Nodintconst(4), Types[TUINT8]) var_ := temp(t) a = Nod(OADDR, var_, nil) } // intstring(*[4]byte, rune) n = mkcall("intstring", n.Type, init, a, conv(n.Left, Types[TINT64])) case OARRAYBYTESTR: a := nodnil() if n.Esc == EscNone { // Create temporary buffer for string on stack. t := aindex(Nodintconst(tmpstringbufsize), Types[TUINT8]) a = Nod(OADDR, temp(t), nil) } // slicebytetostring(*[32]byte, []byte) string; n = mkcall("slicebytetostring", n.Type, init, a, n.Left) // slicebytetostringtmp([]byte) string; case OARRAYBYTESTRTMP: n = mkcall("slicebytetostringtmp", n.Type, init, n.Left) // slicerunetostring(*[32]byte, []rune) string; case OARRAYRUNESTR: a := nodnil() if n.Esc == EscNone { // Create temporary buffer for string on stack. t := aindex(Nodintconst(tmpstringbufsize), Types[TUINT8]) a = Nod(OADDR, temp(t), nil) } n = mkcall("slicerunetostring", n.Type, init, a, n.Left) // stringtoslicebyte(*32[byte], string) []byte; case OSTRARRAYBYTE: a := nodnil() if n.Esc == EscNone { // Create temporary buffer for slice on stack. t := aindex(Nodintconst(tmpstringbufsize), Types[TUINT8]) a = Nod(OADDR, temp(t), nil) } n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, Types[TSTRING])) // stringtoslicebytetmp(string) []byte; case OSTRARRAYBYTETMP: n = mkcall("stringtoslicebytetmp", n.Type, init, conv(n.Left, Types[TSTRING])) // stringtoslicerune(*[32]rune, string) []rune case OSTRARRAYRUNE: a := nodnil() if n.Esc == EscNone { // Create temporary buffer for slice on stack. t := aindex(Nodintconst(tmpstringbufsize), Types[TINT32]) a = Nod(OADDR, temp(t), nil) } n = mkcall("stringtoslicerune", n.Type, init, a, n.Left) // ifaceeq(i1 any-1, i2 any-2) (ret bool); case OCMPIFACE: if !Eqtype(n.Left.Type, n.Right.Type) { Fatalf("ifaceeq %v %v %v", Oconv(n.Op, 0), n.Left.Type, n.Right.Type) } var fn *Node if isnilinter(n.Left.Type) { fn = syslook("efaceeq") } else { fn = syslook("ifaceeq") } n.Right = cheapexpr(n.Right, init) n.Left = cheapexpr(n.Left, init) substArgTypes(&fn, n.Right.Type, n.Left.Type) r := mkcall1(fn, n.Type, init, n.Left, n.Right) // TODO(marvin): Fix Node.EType type union. if Op(n.Etype) == ONE { r = Nod(ONOT, r, nil) } // check itable/type before full compare. // TODO(marvin): Fix Node.EType type union. if Op(n.Etype) == OEQ { r = Nod(OANDAND, Nod(OEQ, Nod(OITAB, n.Left, nil), Nod(OITAB, n.Right, nil)), r) } else { r = Nod(OOROR, Nod(ONE, Nod(OITAB, n.Left, nil), Nod(OITAB, n.Right, nil)), r) } typecheck(&r, Erv) walkexpr(&r, init) r.Type = n.Type n = r case OARRAYLIT, OMAPLIT, OSTRUCTLIT, OPTRLIT: var_ := temp(n.Type) anylit(0, n, var_, init) n = var_ case OSEND: n1 := n.Right n1 = assignconv(n1, n.Left.Type.Type, "chan send") walkexpr(&n1, init) n1 = Nod(OADDR, n1, nil) n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, typename(n.Left.Type), n.Left, n1) case OCLOSURE: n = walkclosure(n, init) case OCALLPART: n = walkpartialcall(n, init) } // Expressions that are constant at run time but not // considered const by the language spec are not turned into // constants until walk. For example, if n is y%1 == 0, the // walk of y%1 may have replaced it by 0. // Check whether n with its updated args is itself now a constant. t := n.Type evconst(n) n.Type = t if n.Op == OLITERAL { typecheck(&n, Erv) } ullmancalc(n) if Debug['w'] != 0 && n != nil { Dump("walk", n) } lineno = lno *np = n } func reduceSlice(n *Node) *Node { r := n.Right.Right if r != nil && r.Op == OLEN && samesafeexpr(n.Left, r.Left) { // Reduce x[i:len(x)] to x[i:]. n.Right.Right = nil } if (n.Op == OSLICE || n.Op == OSLICESTR) && n.Right.Left == nil && n.Right.Right == nil { // Reduce x[:] to x. if Debug_slice > 0 { Warn("slice: omit slice operation") } return n.Left } return n } func ascompatee1(op Op, l *Node, r *Node, init nodesOrNodeListPtr) *Node { // convas will turn map assigns into function calls, // making it impossible for reorder3 to work. n := Nod(OAS, l, r) if l.Op == OINDEXMAP { return n } return convas(n, init) } func ascompatee(op Op, nl *NodeList, nr *NodeList, init nodesOrNodeListPtr) *NodeList { // check assign expression list to // a expression list. called in // expr-list = expr-list // ensure order of evaluation for function calls for ll := nl; ll != nil; ll = ll.Next { ll.N = safeexpr(ll.N, init) } for lr := nr; lr != nil; lr = lr.Next { lr.N = safeexpr(lr.N, init) } var nn *NodeList ll := nl lr := nr for ; ll != nil && lr != nil; ll, lr = ll.Next, lr.Next { // Do not generate 'x = x' during return. See issue 4014. if op == ORETURN && ll.N == lr.N { continue } nn = list(nn, ascompatee1(op, ll.N, lr.N, init)) } // cannot happen: caller checked that lists had same length if ll != nil || lr != nil { Yyerror("error in shape across %v %v %v / %d %d [%s]", Hconv(nl, obj.FmtSign), Oconv(op, 0), Hconv(nr, obj.FmtSign), count(nl), count(nr), Curfn.Func.Nname.Sym.Name) } return nn } // l is an lv and rt is the type of an rv // return 1 if this implies a function call // evaluating the lv or a function call // in the conversion of the types func fncall(l *Node, rt *Type) bool { if l.Ullman >= UINF || l.Op == OINDEXMAP { return true } var r Node if needwritebarrier(l, &r) { return true } if Eqtype(l.Type, rt) { return false } return true } func ascompatet(op Op, nl *NodeList, nr **Type, fp int, init nodesOrNodeListPtr) *NodeList { var l *Node var tmp *Node var a *Node var ll *NodeList var saver Iter // check assign type list to // a expression list. called in // expr-list = func() r := Structfirst(&saver, nr) var nn *NodeList var mm *NodeList ucount := 0 for ll = nl; ll != nil; ll = ll.Next { if r == nil { break } l = ll.N if isblank(l) { r = structnext(&saver) continue } // any lv that causes a fn call must be // deferred until all the return arguments // have been pulled from the output arguments if fncall(l, r.Type) { tmp = temp(r.Type) typecheck(&tmp, Erv) a = Nod(OAS, l, tmp) a = convas(a, init) mm = list(mm, a) l = tmp } a = Nod(OAS, l, nodarg(r, fp)) a = convas(a, init) ullmancalc(a) if a.Ullman >= UINF { Dump("ascompatet ucount", a) ucount++ } nn = list(nn, a) r = structnext(&saver) } if ll != nil || r != nil { Yyerror("ascompatet: assignment count mismatch: %d = %d", count(nl), structcount(*nr)) } if ucount != 0 { Fatalf("ascompatet: too many function calls evaluating parameters") } return concat(nn, mm) } // package all the arguments that match a ... T parameter into a []T. func mkdotargslice(lr0 nodesOrNodeList, nn *NodeList, l *Type, fp int, init nodesOrNodeListPtr, ddd *Node) *NodeList { esc := uint16(EscUnknown) if ddd != nil { esc = ddd.Esc } tslice := typ(TARRAY) tslice.Type = l.Type.Type tslice.Bound = -1 var n *Node if nodeSeqLen(lr0) == 0 { n = nodnil() n.Type = tslice } else { n = Nod(OCOMPLIT, nil, typenod(tslice)) if ddd != nil && prealloc[ddd] != nil { prealloc[n] = prealloc[ddd] // temporary to use } setNodeSeq(&n.List, lr0) n.Esc = esc typecheck(&n, Erv) if n.Type == nil { Fatalf("mkdotargslice: typecheck failed") } walkexpr(&n, init) } a := Nod(OAS, nodarg(l, fp), n) nn = list(nn, convas(a, init)) return nn } // helpers for shape errors func dumptypes(nl **Type, what string) string { var savel Iter fmt_ := "" fmt_ += "\t" first := 1 for l := Structfirst(&savel, nl); l != nil; l = structnext(&savel) { if first != 0 { first = 0 } else { fmt_ += ", " } fmt_ += Tconv(l, 0) } if first != 0 { fmt_ += fmt.Sprintf("[no arguments %s]", what) } return fmt_ } func dumpnodetypes(l nodesOrNodeList, what string) string { var r *Node fmt_ := "" fmt_ += "\t" first := 1 for it := nodeSeqIterate(l); !it.Done(); it.Next() { r = it.N() if first != 0 { first = 0 } else { fmt_ += ", " } fmt_ += Tconv(r.Type, 0) } if first != 0 { fmt_ += fmt.Sprintf("[no arguments %s]", what) } return fmt_ } // check assign expression list to // a type list. called in // return expr-list // func(expr-list) func ascompatte(op Op, call *Node, isddd bool, nl **Type, lr nodesOrNodeList, fp int, init nodesOrNodeListPtr) *NodeList { var savel Iter lr0 := lr l := Structfirst(&savel, nl) var r *Node if nodeSeqLen(lr) > 0 { r = nodeSeqFirst(lr) } var nn *NodeList // f(g()) where g has multiple return values var a *Node var l2 string var ll *Type var l1 string var lrit nodeSeqIterator if r != nil && nodeSeqLen(lr) <= 1 && r.Type.Etype == TSTRUCT && r.Type.Funarg { // optimization - can do block copy if eqtypenoname(r.Type, *nl) { a := nodarg(*nl, fp) r = Nod(OCONVNOP, r, nil) r.Type = a.Type nn = list1(convas(Nod(OAS, a, r), init)) goto ret } // conversions involved. // copy into temporaries. var alist *NodeList for l := Structfirst(&savel, &r.Type); l != nil; l = structnext(&savel) { a = temp(l.Type) alist = list(alist, a) } a = Nod(OAS2, nil, nil) setNodeSeq(&a.List, alist) setNodeSeq(&a.Rlist, lr) typecheck(&a, Etop) walkstmt(&a) appendNodeSeqNode(init, a) lr = alist r = nodeSeqFirst(lr) l = Structfirst(&savel, nl) } lrit = nodeSeqIterate(lr) loop: if l != nil && l.Isddd { // the ddd parameter must be last ll = structnext(&savel) if ll != nil { Yyerror("... must be last argument") } // special case -- // only if we are assigning a single ddd // argument to a ddd parameter then it is // passed thru unencapsulated if r != nil && lrit.Len() <= 1 && isddd && Eqtype(l.Type, r.Type) { a = Nod(OAS, nodarg(l, fp), r) a = convas(a, init) nn = list(nn, a) goto ret } // normal case -- make a slice of all // remaining arguments and pass it to // the ddd parameter. nn = mkdotargslice(lrit.Seq(), nn, l, fp, init, call.Right) goto ret } if l == nil || r == nil { if l != nil || r != nil { l1 = dumptypes(nl, "expected") l2 = dumpnodetypes(lr0, "given") if l != nil { Yyerror("not enough arguments to %v\n%s\n%s", Oconv(op, 0), l1, l2) } else { Yyerror("too many arguments to %v\n%s\n%s", Oconv(op, 0), l1, l2) } } goto ret } a = Nod(OAS, nodarg(l, fp), r) a = convas(a, init) nn = list(nn, a) l = structnext(&savel) r = nil lrit.Next() if !lrit.Done() { r = lrit.N() } goto loop ret: for lrit = nodeSeqIterate(nn); !lrit.Done(); lrit.Next() { lrit.N().Typecheck = 1 } return nn } // generate code for print func walkprint(nn *Node, init nodesOrNodeListPtr) *Node { var r *Node var n *Node var on *Node var t *Type var et EType op := nn.Op all := nn.List var calls *NodeList notfirst := false // Hoist all the argument evaluation up before the lock. walkexprlistcheap(all, init) calls = list(calls, mkcall("printlock", nil, init)) for it := nodeSeqIterate(all); !it.Done(); it.Next() { if notfirst { calls = list(calls, mkcall("printsp", nil, init)) } notfirst = op == OPRINTN n = it.N() if n.Op == OLITERAL { switch n.Val().Ctype() { case CTRUNE: defaultlit(&n, runetype) case CTINT: defaultlit(&n, Types[TINT64]) case CTFLT: defaultlit(&n, Types[TFLOAT64]) } } if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL { defaultlit(&n, Types[TINT64]) } defaultlit(&n, nil) *it.P() = n if n.Type == nil || n.Type.Etype == TFORW { continue } t = n.Type et = n.Type.Etype if Isinter(n.Type) { if isnilinter(n.Type) { on = syslook("printeface") } else { on = syslook("printiface") } substArgTypes(&on, n.Type) // any-1 } else if Isptr[et] || et == TCHAN || et == TMAP || et == TFUNC || et == TUNSAFEPTR { on = syslook("printpointer") substArgTypes(&on, n.Type) // any-1 } else if Isslice(n.Type) { on = syslook("printslice") substArgTypes(&on, n.Type) // any-1 } else if Isint[et] { if et == TUINT64 { if (t.Sym.Pkg == Runtimepkg || compiling_runtime != 0) && t.Sym.Name == "hex" { on = syslook("printhex") } else { on = syslook("printuint") } } else { on = syslook("printint") } } else if Isfloat[et] { on = syslook("printfloat") } else if Iscomplex[et] { on = syslook("printcomplex") } else if et == TBOOL { on = syslook("printbool") } else if et == TSTRING { on = syslook("printstring") } else { badtype(OPRINT, n.Type, nil) continue } t = *getinarg(on.Type) if t != nil { t = t.Type } if t != nil { t = t.Type } if !Eqtype(t, n.Type) { n = Nod(OCONV, n, nil) n.Type = t } r = Nod(OCALL, on, nil) appendNodeSeqNode(&r.List, n) calls = list(calls, r) } if op == OPRINTN { calls = list(calls, mkcall("printnl", nil, nil)) } calls = list(calls, mkcall("printunlock", nil, init)) typechecklist(calls, Etop) walkexprlist(calls, init) r = Nod(OEMPTY, nil, nil) typecheck(&r, Etop) walkexpr(&r, init) setNodeSeq(&r.Ninit, calls) return r } func callnew(t *Type) *Node { dowidth(t) fn := syslook("newobject") substArgTypes(&fn, t) return mkcall1(fn, Ptrto(t), nil, typename(t)) } func iscallret(n *Node) bool { n = outervalue(n) return n.Op == OINDREG && n.Reg == int16(Thearch.REGSP) } func isstack(n *Node) bool { n = outervalue(n) // If n is *autotmp and autotmp = &foo, replace n with foo. // We introduce such temps when initializing struct literals. if n.Op == OIND && n.Left.Op == ONAME && strings.HasPrefix(n.Left.Sym.Name, "autotmp_") { defn := n.Left.Name.Defn if defn != nil && defn.Op == OAS && defn.Right.Op == OADDR { n = defn.Right.Left } } switch n.Op { case OINDREG: return n.Reg == int16(Thearch.REGSP) case ONAME: switch n.Class { case PAUTO, PPARAM, PPARAMOUT: return true } } return false } func isglobal(n *Node) bool { n = outervalue(n) switch n.Op { case ONAME: switch n.Class { case PEXTERN: return true } } return false } // Do we need a write barrier for the assignment l = r? func needwritebarrier(l *Node, r *Node) bool { if use_writebarrier == 0 { return false } if l == nil || isblank(l) { return false } // No write barrier for write of non-pointers. dowidth(l.Type) if !haspointers(l.Type) { return false } // No write barrier for write to stack. if isstack(l) { return false } // No write barrier for implicit zeroing. if r == nil { return false } // Ignore no-op conversions when making decision. // Ensures that xp = unsafe.Pointer(&x) is treated // the same as xp = &x. for r.Op == OCONVNOP { r = r.Left } // No write barrier for zeroing or initialization to constant. if iszero(r) || r.Op == OLITERAL { return false } // No write barrier for storing static (read-only) data. if r.Op == ONAME && strings.HasPrefix(r.Sym.Name, "statictmp_") { return false } // No write barrier for storing address of stack values, // which are guaranteed only to be written to the stack. if r.Op == OADDR && isstack(r.Left) { return false } // No write barrier for storing address of global, which // is live no matter what. if r.Op == OADDR && isglobal(r.Left) { return false } // Otherwise, be conservative and use write barrier. return true } // TODO(rsc): Perhaps componentgen should run before this. func applywritebarrier(n *Node) *Node { if n.Left != nil && n.Right != nil && needwritebarrier(n.Left, n.Right) { if Debug_wb > 1 { Warnl(n.Lineno, "marking %v for barrier", Nconv(n.Left, 0)) } n.Op = OASWB return n } return n } func convas(n *Node, init nodesOrNodeListPtr) *Node { if n.Op != OAS { Fatalf("convas: not OAS %v", Oconv(n.Op, 0)) } n.Typecheck = 1 var lt *Type var rt *Type if n.Left == nil || n.Right == nil { goto out } lt = n.Left.Type rt = n.Right.Type if lt == nil || rt == nil { goto out } if isblank(n.Left) { defaultlit(&n.Right, nil) goto out } if n.Left.Op == OINDEXMAP { map_ := n.Left.Left key := n.Left.Right val := n.Right walkexpr(&map_, init) walkexpr(&key, init) walkexpr(&val, init) // orderexpr made sure key and val are addressable. key = Nod(OADDR, key, nil) val = Nod(OADDR, val, nil) n = mkcall1(mapfn("mapassign1", map_.Type), nil, init, typename(map_.Type), map_, key, val) goto out } if !Eqtype(lt, rt) { n.Right = assignconv(n.Right, lt, "assignment") walkexpr(&n.Right, init) } out: ullmancalc(n) return n } // from ascompat[te] // evaluating actual function arguments. // f(a,b) // if there is exactly one function expr, // then it is done first. otherwise must // make temp variables func reorder1(all *NodeList) *NodeList { var n *Node c := 0 // function calls t := 0 // total parameters for l := all; l != nil; l = l.Next { n = l.N t++ ullmancalc(n) if n.Ullman >= UINF { c++ } } if c == 0 || t == 1 { return all } var g *NodeList // fncalls assigned to tempnames var f *Node // last fncall assigned to stack var r *NodeList // non fncalls and tempnames assigned to stack d := 0 var a *Node for l := all; l != nil; l = l.Next { n = l.N if n.Ullman < UINF { r = list(r, n) continue } d++ if d == c { f = n continue } // make assignment of fncall to tempname a = temp(n.Right.Type) a = Nod(OAS, a, n.Right) g = list(g, a) // put normal arg assignment on list // with fncall replaced by tempname n.Right = a.Left r = list(r, n) } if f != nil { g = list(g, f) } return concat(g, r) } // from ascompat[ee] // a,b = c,d // simultaneous assignment. there cannot // be later use of an earlier lvalue. // // function calls have been removed. func reorder3(all *NodeList) *NodeList { var l *Node // If a needed expression may be affected by an // earlier assignment, make an early copy of that // expression and use the copy instead. var early *NodeList var mapinit *NodeList for list := all; list != nil; list = list.Next { l = list.N.Left // Save subexpressions needed on left side. // Drill through non-dereferences. for { if l.Op == ODOT || l.Op == OPAREN { l = l.Left continue } if l.Op == OINDEX && Isfixedarray(l.Left.Type) { reorder3save(&l.Right, all, list, &early) l = l.Left continue } break } switch l.Op { default: Fatalf("reorder3 unexpected lvalue %v", Oconv(l.Op, obj.FmtSharp)) case ONAME: break case OINDEX, OINDEXMAP: reorder3save(&l.Left, all, list, &early) reorder3save(&l.Right, all, list, &early) if l.Op == OINDEXMAP { list.N = convas(list.N, &mapinit) } case OIND, ODOTPTR: reorder3save(&l.Left, all, list, &early) } // Save expression on right side. reorder3save(&list.N.Right, all, list, &early) } early = concat(mapinit, early) return concat(early, all) } // if the evaluation of *np would be affected by the // assignments in all up to but not including stop, // copy into a temporary during *early and // replace *np with that temp. func reorder3save(np **Node, all *NodeList, stop *NodeList, early **NodeList) { n := *np if !aliased(n, all, stop) { return } q := temp(n.Type) q = Nod(OAS, q, n) typecheck(&q, Etop) *early = list(*early, q) *np = q.Left } // what's the outer value that a write to n affects? // outer value means containing struct or array. func outervalue(n *Node) *Node { for { if n.Op == OXDOT { Fatalf("OXDOT in walk") } if n.Op == ODOT || n.Op == OPAREN || n.Op == OCONVNOP { n = n.Left continue } if n.Op == OINDEX && Isfixedarray(n.Left.Type) { n = n.Left continue } break } return n } // Is it possible that the computation of n might be // affected by writes in as up to but not including stop? func aliased(n *Node, all *NodeList, stop *NodeList) bool { if n == nil { return false } // Look for obvious aliasing: a variable being assigned // during the all list and appearing in n. // Also record whether there are any writes to main memory. // Also record whether there are any writes to variables // whose addresses have been taken. memwrite := 0 varwrite := 0 var a *Node for l := all; l != stop; l = l.Next { a = outervalue(l.N.Left) if a.Op != ONAME { memwrite = 1 continue } switch n.Class { default: varwrite = 1 continue case PAUTO, PPARAM, PPARAMOUT: if n.Addrtaken { varwrite = 1 continue } if vmatch2(a, n) { // Direct hit. return true } } } // The variables being written do not appear in n. // However, n might refer to computed addresses // that are being written. // If no computed addresses are affected by the writes, no aliasing. if memwrite == 0 && varwrite == 0 { return false } // If n does not refer to computed addresses // (that is, if n only refers to variables whose addresses // have not been taken), no aliasing. if varexpr(n) { return false } // Otherwise, both the writes and n refer to computed memory addresses. // Assume that they might conflict. return true } // does the evaluation of n only refer to variables // whose addresses have not been taken? // (and no other memory) func varexpr(n *Node) bool { if n == nil { return true } switch n.Op { case OLITERAL: return true case ONAME: switch n.Class { case PAUTO, PPARAM, PPARAMOUT: if !n.Addrtaken { return true } } return false case OADD, OSUB, OOR, OXOR, OMUL, ODIV, OMOD, OLSH, ORSH, OAND, OANDNOT, OPLUS, OMINUS, OCOM, OPAREN, OANDAND, OOROR, ODOT, // but not ODOTPTR OCONV, OCONVNOP, OCONVIFACE, ODOTTYPE: return varexpr(n.Left) && varexpr(n.Right) } // Be conservative. return false } // is the name l mentioned in r? func vmatch2(l *Node, r *Node) bool { if r == nil { return false } switch r.Op { // match each right given left case ONAME: return l == r case OLITERAL: return false } if vmatch2(l, r.Left) { return true } if vmatch2(l, r.Right) { return true } for it := nodeSeqIterate(r.List); !it.Done(); it.Next() { if vmatch2(l, it.N()) { return true } } return false } // is any name mentioned in l also mentioned in r? // called by sinit.go func vmatch1(l *Node, r *Node) bool { // isolate all left sides if l == nil || r == nil { return false } switch l.Op { case ONAME: switch l.Class { case PPARAM, PPARAMREF, PAUTO: break // assignment to non-stack variable // must be delayed if right has function calls. default: if r.Ullman >= UINF { return true } } return vmatch2(l, r) case OLITERAL: return false } if vmatch1(l.Left, r) { return true } if vmatch1(l.Right, r) { return true } for it := nodeSeqIterate(l); !it.Done(); it.Next() { if vmatch1(it.N(), r) { return true } } return false } // walk through argin parameters. // generate and return code to allocate // copies of escaped parameters to the heap. func paramstoheap(argin **Type, out int) []*Node { var savet Iter var v *Node var as *Node var nn []*Node for t := Structfirst(&savet, argin); t != nil; t = structnext(&savet) { v = t.Nname if v != nil && v.Sym != nil && v.Sym.Name[0] == '~' && v.Sym.Name[1] == 'r' { // unnamed result v = nil } // For precise stacks, the garbage collector assumes results // are always live, so zero them always. if out != 0 { // Defer might stop a panic and show the // return values as they exist at the time of panic. // Make sure to zero them on entry to the function. nn = append(nn, Nod(OAS, nodarg(t, -1), nil)) } if v == nil || v.Class&PHEAP == 0 { continue } // generate allocation & copying code if compiling_runtime != 0 { Yyerror("%v escapes to heap, not allowed in runtime.", v) } if prealloc[v] == nil { prealloc[v] = callnew(v.Type) } nn = append(nn, Nod(OAS, v.Name.Heapaddr, prealloc[v])) if v.Class&^PHEAP != PPARAMOUT { as = Nod(OAS, v, v.Name.Param.Stackparam) v.Name.Param.Stackparam.Typecheck = 1 typecheck(&as, Etop) as = applywritebarrier(as) nn = append(nn, as) } } return nn } // walk through argout parameters copying back to stack func returnsfromheap(argin **Type) []*Node { var savet Iter var v *Node var nn []*Node for t := Structfirst(&savet, argin); t != nil; t = structnext(&savet) { v = t.Nname if v == nil || v.Class != PHEAP|PPARAMOUT { continue } nn = append(nn, Nod(OAS, v.Name.Param.Stackparam, v)) } return nn } // take care of migrating any function in/out args // between the stack and the heap. adds code to // curfn's before and after lists. func heapmoves() { lno := lineno lineno = Curfn.Lineno nn := paramstoheap(getthis(Curfn.Type), 0) nn = append(nn, paramstoheap(getinarg(Curfn.Type), 0)...) nn = append(nn, paramstoheap(Getoutarg(Curfn.Type), 1)...) Curfn.Func.Enter.Append(nn...) lineno = Curfn.Func.Endlineno Curfn.Func.Exit.Append(returnsfromheap(Getoutarg(Curfn.Type))...) lineno = lno } func vmkcall(fn *Node, t *Type, init nodesOrNodeListPtr, va []*Node) *Node { if fn.Type == nil || fn.Type.Etype != TFUNC { Fatalf("mkcall %v %v", fn, fn.Type) } n := fn.Type.Intuple r := Nod(OCALL, fn, nil) setNodeSeq(&r.List, va[:n]) if fn.Type.Outtuple > 0 { typecheck(&r, Erv|Efnstruct) } else { typecheck(&r, Etop) } walkexpr(&r, init) r.Type = t return r } func mkcall(name string, t *Type, init nodesOrNodeListPtr, args ...*Node) *Node { return vmkcall(syslook(name), t, init, args) } func mkcall1(fn *Node, t *Type, init nodesOrNodeListPtr, args ...*Node) *Node { return vmkcall(fn, t, init, args) } func conv(n *Node, t *Type) *Node { if Eqtype(n.Type, t) { return n } n = Nod(OCONV, n, nil) n.Type = t typecheck(&n, Erv) return n } func chanfn(name string, n int, t *Type) *Node { if t.Etype != TCHAN { Fatalf("chanfn %v", t) } fn := syslook(name) switch n { default: Fatalf("chanfn %d", n) case 1: substArgTypes(&fn, t.Type) case 2: substArgTypes(&fn, t.Type, t.Type) } return fn } func mapfn(name string, t *Type) *Node { if t.Etype != TMAP { Fatalf("mapfn %v", t) } fn := syslook(name) substArgTypes(&fn, t.Down, t.Type, t.Down, t.Type) return fn } func mapfndel(name string, t *Type) *Node { if t.Etype != TMAP { Fatalf("mapfn %v", t) } fn := syslook(name) substArgTypes(&fn, t.Down, t.Type, t.Down) return fn } func writebarrierfn(name string, l *Type, r *Type) *Node { fn := syslook(name) substArgTypes(&fn, l, r) return fn } func addstr(n *Node, init nodesOrNodeListPtr) *Node { // orderexpr rewrote OADDSTR to have a list of strings. c := nodeSeqLen(n.List) if c < 2 { Yyerror("addstr count %d too small", c) } buf := nodnil() if n.Esc == EscNone { sz := int64(0) for it := nodeSeqIterate(n.List); !it.Done(); it.Next() { if it.N().Op == OLITERAL { sz += int64(len(it.N().Val().U.(string))) } } // Don't allocate the buffer if the result won't fit. if sz < tmpstringbufsize { // Create temporary buffer for result string on stack. t := aindex(Nodintconst(tmpstringbufsize), Types[TUINT8]) buf = Nod(OADDR, temp(t), nil) } } // build list of string arguments args := []*Node{buf} for it := nodeSeqIterate(n.List); !it.Done(); it.Next() { args = append(args, conv(it.N(), Types[TSTRING])) } var fn string if c <= 5 { // small numbers of strings use direct runtime helpers. // note: orderexpr knows this cutoff too. fn = fmt.Sprintf("concatstring%d", c) } else { // large numbers of strings are passed to the runtime as a slice. fn = "concatstrings" t := typ(TARRAY) t.Type = Types[TSTRING] t.Bound = -1 slice := Nod(OCOMPLIT, nil, typenod(t)) if prealloc[n] != nil { prealloc[slice] = prealloc[n] } setNodeSeq(&slice.List, args[1:]) // skip buf arg args = []*Node{buf} args = append(args, slice) slice.Esc = EscNone } cat := syslook(fn) r := Nod(OCALL, cat, nil) setNodeSeq(&r.List, args) typecheck(&r, Erv) walkexpr(&r, init) r.Type = n.Type return r } // expand append(l1, l2...) to // init { // s := l1 // if n := len(l1) + len(l2) - cap(s); n > 0 { // s = growslice_n(s, n) // } // s = s[:len(l1)+len(l2)] // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) // } // s // // l2 is allowed to be a string. func appendslice(n *Node, init nodesOrNodeListPtr) *Node { walkexprlistsafe(n.List, init) // walkexprlistsafe will leave OINDEX (s[n]) alone if both s // and n are name or literal, but those may index the slice we're // modifying here. Fix explicitly. for it := nodeSeqIterate(n.List); !it.Done(); it.Next() { *it.P() = cheapexpr(it.N(), init) } l1 := nodeSeqFirst(n.List) l2 := nodeSeqSecond(n.List) s := temp(l1.Type) // var s []T var l []*Node l = append(l, Nod(OAS, s, l1)) // s = l1 nt := temp(Types[TINT]) nif := Nod(OIF, nil, nil) // n := len(s) + len(l2) - cap(s) setNodeSeq(&nif.Ninit, list1(Nod(OAS, nt, Nod(OSUB, Nod(OADD, Nod(OLEN, s, nil), Nod(OLEN, l2, nil)), Nod(OCAP, s, nil))))) nif.Left = Nod(OGT, nt, Nodintconst(0)) // instantiate growslice_n(Type*, []any, int) []any fn := syslook("growslice_n") // growslice_n(, old []T, n int64) (ret []T) substArgTypes(&fn, s.Type.Type, s.Type.Type) // s = growslice_n(T, s, n) nif.Nbody.Set([]*Node{Nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type), s, nt))}) l = append(l, nif) if haspointers(l1.Type.Type) { // copy(s[len(l1):len(l1)+len(l2)], l2) nptr1 := Nod(OSLICE, s, Nod(OKEY, Nod(OLEN, l1, nil), Nod(OADD, Nod(OLEN, l1, nil), Nod(OLEN, l2, nil)))) nptr1.Etype = 1 nptr2 := l2 fn := syslook("typedslicecopy") substArgTypes(&fn, l1.Type, l2.Type) nt := mkcall1(fn, Types[TINT], &l, typename(l1.Type.Type), nptr1, nptr2) l = append(l, nt) } else if instrumenting { // rely on runtime to instrument copy. // copy(s[len(l1):len(l1)+len(l2)], l2) nptr1 := Nod(OSLICE, s, Nod(OKEY, Nod(OLEN, l1, nil), Nod(OADD, Nod(OLEN, l1, nil), Nod(OLEN, l2, nil)))) nptr1.Etype = 1 nptr2 := l2 var fn *Node if l2.Type.Etype == TSTRING { fn = syslook("slicestringcopy") } else { fn = syslook("slicecopy") } substArgTypes(&fn, l1.Type, l2.Type) nt := mkcall1(fn, Types[TINT], &l, nptr1, nptr2, Nodintconst(s.Type.Type.Width)) l = append(l, nt) } else { // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) nptr1 := Nod(OINDEX, s, Nod(OLEN, l1, nil)) nptr1.Bounded = true nptr1 = Nod(OADDR, nptr1, nil) nptr2 := Nod(OSPTR, l2, nil) fn := syslook("memmove") substArgTypes(&fn, s.Type.Type, s.Type.Type) nwid := cheapexpr(conv(Nod(OLEN, l2, nil), Types[TUINTPTR]), &l) nwid = Nod(OMUL, nwid, Nodintconst(s.Type.Type.Width)) nt := mkcall1(fn, nil, &l, nptr1, nptr2, nwid) l = append(l, nt) } // s = s[:len(l1)+len(l2)] nt = Nod(OADD, Nod(OLEN, l1, nil), Nod(OLEN, l2, nil)) nt = Nod(OSLICE, s, Nod(OKEY, nil, nt)) nt.Etype = 1 l = append(l, Nod(OAS, s, nt)) typechecklist(l, Etop) walkstmtlist(l) appendNodeSeq(init, l) return s } // Rewrite append(src, x, y, z) so that any side effects in // x, y, z (including runtime panics) are evaluated in // initialization statements before the append. // For normal code generation, stop there and leave the // rest to cgen_append. // // For race detector, expand append(src, a [, b]* ) to // // init { // s := src // const argc = len(args) - 1 // if cap(s) - len(s) < argc { // s = growslice(s, len(s)+argc) // } // n := len(s) // s = s[:n+argc] // s[n] = a // s[n+1] = b // ... // } // s func walkappend(n *Node, init nodesOrNodeListPtr, dst *Node) *Node { if !samesafeexpr(dst, n.List.N) { it := nodeSeqIterate(n.List) *it.P() = safeexpr(it.N(), init) walkexpr(it.P(), init) } it := nodeSeqIterate(n.List) it.Next() walkexprlistsafe(it.Seq(), init) // walkexprlistsafe will leave OINDEX (s[n]) alone if both s // and n are name or literal, but those may index the slice we're // modifying here. Fix explicitly. // Using cheapexpr also makes sure that the evaluation // of all arguments (and especially any panics) happen // before we begin to modify the slice in a visible way. it = nodeSeqIterate(n.List) it.Next() for ; !it.Done(); it.Next() { *it.P() = cheapexpr(it.N(), init) } nsrc := nodeSeqFirst(n.List) // Resolve slice type of multi-valued return. if Istype(nsrc.Type, TSTRUCT) { nsrc.Type = nsrc.Type.Type.Type } argc := nodeSeqLen(n.List) - 1 if argc < 1 { return nsrc } // General case, with no function calls left as arguments. // Leave for gen, except that instrumentation requires old form. if !instrumenting { return n } var l []*Node ns := temp(nsrc.Type) l = append(l, Nod(OAS, ns, nsrc)) // s = src na := Nodintconst(int64(argc)) // const argc nx := Nod(OIF, nil, nil) // if cap(s) - len(s) < argc nx.Left = Nod(OLT, Nod(OSUB, Nod(OCAP, ns, nil), Nod(OLEN, ns, nil)), na) fn := syslook("growslice") // growslice(, old []T, mincap int) (ret []T) substArgTypes(&fn, ns.Type.Type, ns.Type.Type) nx.Nbody.Set([]*Node{Nod(OAS, ns, mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type), ns, Nod(OADD, Nod(OLEN, ns, nil), na)))}) l = append(l, nx) nn := temp(Types[TINT]) l = append(l, Nod(OAS, nn, Nod(OLEN, ns, nil))) // n = len(s) nx = Nod(OSLICE, ns, Nod(OKEY, nil, Nod(OADD, nn, na))) // ...s[:n+argc] nx.Etype = 1 l = append(l, Nod(OAS, ns, nx)) // s = s[:n+argc] it = nodeSeqIterate(n.List) it.Next() for ; !it.Done(); it.Next() { nx = Nod(OINDEX, ns, nn) // s[n] ... nx.Bounded = true l = append(l, Nod(OAS, nx, it.N())) // s[n] = arg if it.Len() > 1 { l = append(l, Nod(OAS, nn, Nod(OADD, nn, Nodintconst(1)))) // n = n + 1 } } typechecklist(l, Etop) walkstmtlist(l) appendNodeSeq(init, l) return ns } // Lower copy(a, b) to a memmove call or a runtime call. // // init { // n := len(a) // if n > len(b) { n = len(b) } // memmove(a.ptr, b.ptr, n*sizeof(elem(a))) // } // n; // // Also works if b is a string. // func copyany(n *Node, init nodesOrNodeListPtr, runtimecall bool) *Node { if haspointers(n.Left.Type.Type) { fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type) return mkcall1(fn, n.Type, init, typename(n.Left.Type.Type), n.Left, n.Right) } if runtimecall { var fn *Node if n.Right.Type.Etype == TSTRING { fn = syslook("slicestringcopy") } else { fn = syslook("slicecopy") } substArgTypes(&fn, n.Left.Type, n.Right.Type) return mkcall1(fn, n.Type, init, n.Left, n.Right, Nodintconst(n.Left.Type.Type.Width)) } walkexpr(&n.Left, init) walkexpr(&n.Right, init) nl := temp(n.Left.Type) nr := temp(n.Right.Type) var l *NodeList l = list(l, Nod(OAS, nl, n.Left)) l = list(l, Nod(OAS, nr, n.Right)) nfrm := Nod(OSPTR, nr, nil) nto := Nod(OSPTR, nl, nil) nlen := temp(Types[TINT]) // n = len(to) l = list(l, Nod(OAS, nlen, Nod(OLEN, nl, nil))) // if n > len(frm) { n = len(frm) } nif := Nod(OIF, nil, nil) nif.Left = Nod(OGT, nlen, Nod(OLEN, nr, nil)) nif.Nbody.Append(Nod(OAS, nlen, Nod(OLEN, nr, nil))) l = list(l, nif) // Call memmove. fn := syslook("memmove") substArgTypes(&fn, nl.Type.Type, nl.Type.Type) nwid := temp(Types[TUINTPTR]) l = list(l, Nod(OAS, nwid, conv(nlen, Types[TUINTPTR]))) nwid = Nod(OMUL, nwid, Nodintconst(nl.Type.Type.Width)) l = list(l, mkcall1(fn, nil, init, nto, nfrm, nwid)) typechecklist(l, Etop) walkstmtlist(l) appendNodeSeq(init, l) return nlen } func eqfor(t *Type, needsize *int) *Node { // Should only arrive here with large memory or // a struct/array containing a non-memory field/element. // Small memory is handled inline, and single non-memory // is handled during type check (OCMPSTR etc). a := algtype1(t, nil) if a != AMEM && a != -1 { Fatalf("eqfor %v", t) } if a == AMEM { n := syslook("memequal") substArgTypes(&n, t, t) *needsize = 1 return n } sym := typesymprefix(".eq", t) n := newname(sym) n.Class = PFUNC ntype := Nod(OTFUNC, nil, nil) appendNodeSeqNode(&ntype.List, Nod(ODCLFIELD, nil, typenod(Ptrto(t)))) appendNodeSeqNode(&ntype.List, Nod(ODCLFIELD, nil, typenod(Ptrto(t)))) appendNodeSeqNode(&ntype.Rlist, Nod(ODCLFIELD, nil, typenod(Types[TBOOL]))) typecheck(&ntype, Etype) n.Type = ntype.Type *needsize = 0 return n } func countfield(t *Type) int { n := 0 for t1 := t.Type; t1 != nil; t1 = t1.Down { n++ } return n } func walkcompare(np **Node, init nodesOrNodeListPtr) { n := *np // Given interface value l and concrete value r, rewrite // l == r // to // x, ok := l.(type(r)); ok && x == r // Handle != similarly. // This avoids the allocation that would be required // to convert r to l for comparison. var l *Node var r *Node if Isinter(n.Left.Type) && !Isinter(n.Right.Type) { l = n.Left r = n.Right } else if !Isinter(n.Left.Type) && Isinter(n.Right.Type) { l = n.Right r = n.Left } if l != nil { x := temp(r.Type) if haspointers(r.Type) { a := Nod(OAS, x, nil) typecheck(&a, Etop) appendNodeSeqNode(init, a) } ok := temp(Types[TBOOL]) // l.(type(r)) a := Nod(ODOTTYPE, l, nil) a.Type = r.Type // x, ok := l.(type(r)) expr := Nod(OAS2, nil, nil) appendNodeSeqNode(&expr.List, x) appendNodeSeqNode(&expr.List, ok) appendNodeSeqNode(&expr.Rlist, a) typecheck(&expr, Etop) walkexpr(&expr, init) if n.Op == OEQ { r = Nod(OANDAND, ok, Nod(OEQ, x, r)) } else { r = Nod(OOROR, Nod(ONOT, ok, nil), Nod(ONE, x, r)) } appendNodeSeqNode(init, expr) finishcompare(np, n, r, init) return } // Must be comparison of array or struct. // Otherwise back end handles it. t := n.Left.Type switch t.Etype { default: return case TARRAY: if Isslice(t) { return } case TSTRUCT: break } cmpl := n.Left for cmpl != nil && cmpl.Op == OCONVNOP { cmpl = cmpl.Left } cmpr := n.Right for cmpr != nil && cmpr.Op == OCONVNOP { cmpr = cmpr.Left } if !islvalue(cmpl) || !islvalue(cmpr) { Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr) } l = temp(Ptrto(t)) a := Nod(OAS, l, Nod(OADDR, cmpl, nil)) a.Right.Etype = 1 // addr does not escape typecheck(&a, Etop) appendNodeSeqNode(init, a) r = temp(Ptrto(t)) a = Nod(OAS, r, Nod(OADDR, cmpr, nil)) a.Right.Etype = 1 // addr does not escape typecheck(&a, Etop) appendNodeSeqNode(init, a) var andor Op = OANDAND if n.Op == ONE { andor = OOROR } var expr *Node if t.Etype == TARRAY && t.Bound <= 4 && issimple[t.Type.Etype] { // Four or fewer elements of a basic type. // Unroll comparisons. var li *Node var ri *Node for i := 0; int64(i) < t.Bound; i++ { li = Nod(OINDEX, l, Nodintconst(int64(i))) ri = Nod(OINDEX, r, Nodintconst(int64(i))) a = Nod(n.Op, li, ri) if expr == nil { expr = a } else { expr = Nod(andor, expr, a) } } if expr == nil { expr = Nodbool(n.Op == OEQ) } finishcompare(np, n, expr, init) return } if t.Etype == TARRAY { // Zero- or single-element array, of any type. switch t.Bound { case 0: finishcompare(np, n, Nodbool(n.Op == OEQ), init) return case 1: l0 := Nod(OINDEX, l, Nodintconst(0)) r0 := Nod(OINDEX, r, Nodintconst(0)) a := Nod(n.Op, l0, r0) finishcompare(np, n, a, init) return } } if t.Etype == TSTRUCT && countfield(t) <= 4 { // Struct of four or fewer fields. // Inline comparisons. var li *Node var ri *Node for t1 := t.Type; t1 != nil; t1 = t1.Down { if isblanksym(t1.Sym) { continue } li = Nod(OXDOT, l, newname(t1.Sym)) ri = Nod(OXDOT, r, newname(t1.Sym)) a = Nod(n.Op, li, ri) if expr == nil { expr = a } else { expr = Nod(andor, expr, a) } } if expr == nil { expr = Nodbool(n.Op == OEQ) } finishcompare(np, n, expr, init) return } // Chose not to inline. Call equality function directly. var needsize int call := Nod(OCALL, eqfor(t, &needsize), nil) appendNodeSeqNode(&call.List, l) appendNodeSeqNode(&call.List, r) if needsize != 0 { appendNodeSeqNode(&call.List, Nodintconst(t.Width)) } r = call if n.Op != OEQ { r = Nod(ONOT, r, nil) } finishcompare(np, n, r, init) return } func finishcompare(np **Node, n, r *Node, init nodesOrNodeListPtr) { // Using np here to avoid passing &r to typecheck. *np = r typecheck(np, Erv) walkexpr(np, init) r = *np if r.Type != n.Type { r = Nod(OCONVNOP, r, nil) r.Type = n.Type r.Typecheck = 1 *np = r } } func samecheap(a *Node, b *Node) bool { var ar *Node var br *Node for a != nil && b != nil && a.Op == b.Op { switch a.Op { default: return false case ONAME: return a == b case ODOT, ODOTPTR: ar = a.Right br = b.Right if ar.Op != ONAME || br.Op != ONAME || ar.Sym != br.Sym { return false } case OINDEX: ar = a.Right br = b.Right if !Isconst(ar, CTINT) || !Isconst(br, CTINT) || Mpcmpfixfix(ar.Val().U.(*Mpint), br.Val().U.(*Mpint)) != 0 { return false } } a = a.Left b = b.Left } return false } func walkrotate(np **Node) { if Thearch.Thechar == '0' || Thearch.Thechar == '7' || Thearch.Thechar == '9' { return } n := *np // Want << | >> or >> | << or << ^ >> or >> ^ << on unsigned value. l := n.Left r := n.Right if (n.Op != OOR && n.Op != OXOR) || (l.Op != OLSH && l.Op != ORSH) || (r.Op != OLSH && r.Op != ORSH) || n.Type == nil || Issigned[n.Type.Etype] || l.Op == r.Op { return } // Want same, side effect-free expression on lhs of both shifts. if !samecheap(l.Left, r.Left) { return } // Constants adding to width? w := int(l.Type.Width * 8) if Smallintconst(l.Right) && Smallintconst(r.Right) { sl := int(Mpgetfix(l.Right.Val().U.(*Mpint))) if sl >= 0 { sr := int(Mpgetfix(r.Right.Val().U.(*Mpint))) if sr >= 0 && sl+sr == w { // Rewrite left shift half to left rotate. if l.Op == OLSH { n = l } else { n = r } n.Op = OLROT // Remove rotate 0 and rotate w. s := int(Mpgetfix(n.Right.Val().U.(*Mpint))) if s == 0 || s == w { n = n.Left } *np = n return } } return } // TODO: Could allow s and 32-s if s is bounded (maybe s&31 and 32-s&31). return } // walkmul rewrites integer multiplication by powers of two as shifts. func walkmul(np **Node, init nodesOrNodeListPtr) { n := *np if !Isint[n.Type.Etype] { return } var nr *Node var nl *Node if n.Right.Op == OLITERAL { nl = n.Left nr = n.Right } else if n.Left.Op == OLITERAL { nl = n.Right nr = n.Left } else { return } neg := 0 // x*0 is 0 (and side effects of x). var pow int var w int if Mpgetfix(nr.Val().U.(*Mpint)) == 0 { cheapexpr(nl, init) Nodconst(n, n.Type, 0) goto ret } // nr is a constant. pow = powtwo(nr) if pow < 0 { return } if pow >= 1000 { // negative power of 2, like -16 neg = 1 pow -= 1000 } w = int(nl.Type.Width * 8) if pow+1 >= w { // too big, shouldn't happen return } nl = cheapexpr(nl, init) if pow == 0 { // x*1 is x n = nl goto ret } n = Nod(OLSH, nl, Nodintconst(int64(pow))) ret: if neg != 0 { n = Nod(OMINUS, n, nil) } typecheck(&n, Erv) walkexpr(&n, init) *np = n } // walkdiv rewrites division by a constant as less expensive // operations. func walkdiv(np **Node, init nodesOrNodeListPtr) { // if >= 0, nr is 1<= 0, nr is 1<= 1000 { // negative power of 2 s = 1 pow -= 1000 } if pow+1 >= w { // divisor too large. return } if pow < 0 { // try to do division by multiply by (2^w)/d // see hacker's delight chapter 10 // TODO: support 64-bit magic multiply here. var m Magic m.W = w if Issigned[nl.Type.Etype] { m.Sd = Mpgetfix(nr.Val().U.(*Mpint)) Smagic(&m) } else { m.Ud = uint64(Mpgetfix(nr.Val().U.(*Mpint))) Umagic(&m) } if m.Bad != 0 { return } // We have a quick division method so use it // for modulo too. if n.Op == OMOD { // rewrite as A%B = A - (A/B*B). n1 := Nod(ODIV, nl, nr) n2 := Nod(OMUL, n1, nr) n = Nod(OSUB, nl, n2) goto ret } switch Simtype[nl.Type.Etype] { default: return // n1 = nl * magic >> w (HMUL) case TUINT8, TUINT16, TUINT32: nc := Nod(OXXX, nil, nil) Nodconst(nc, nl.Type, int64(m.Um)) n1 := Nod(OHMUL, nl, nc) typecheck(&n1, Erv) if m.Ua != 0 { // Select a Go type with (at least) twice the width. var twide *Type switch Simtype[nl.Type.Etype] { default: return case TUINT8, TUINT16: twide = Types[TUINT32] case TUINT32: twide = Types[TUINT64] case TINT8, TINT16: twide = Types[TINT32] case TINT32: twide = Types[TINT64] } // add numerator (might overflow). // n2 = (n1 + nl) n2 := Nod(OADD, conv(n1, twide), conv(nl, twide)) // shift by m.s nc := Nod(OXXX, nil, nil) Nodconst(nc, Types[TUINT], int64(m.S)) n = conv(Nod(ORSH, n2, nc), nl.Type) } else { // n = n1 >> m.s nc := Nod(OXXX, nil, nil) Nodconst(nc, Types[TUINT], int64(m.S)) n = Nod(ORSH, n1, nc) } // n1 = nl * magic >> w case TINT8, TINT16, TINT32: nc := Nod(OXXX, nil, nil) Nodconst(nc, nl.Type, m.Sm) n1 := Nod(OHMUL, nl, nc) typecheck(&n1, Erv) if m.Sm < 0 { // add the numerator. n1 = Nod(OADD, n1, nl) } // shift by m.s nc = Nod(OXXX, nil, nil) Nodconst(nc, Types[TUINT], int64(m.S)) n2 := conv(Nod(ORSH, n1, nc), nl.Type) // add 1 iff n1 is negative. nc = Nod(OXXX, nil, nil) Nodconst(nc, Types[TUINT], int64(w)-1) n3 := Nod(ORSH, nl, nc) // n4 = -1 iff n1 is negative. n = Nod(OSUB, n2, n3) // apply sign. if m.Sd < 0 { n = Nod(OMINUS, n, nil) } } goto ret } switch pow { case 0: if n.Op == OMOD { // nl % 1 is zero. Nodconst(n, n.Type, 0) } else if s != 0 { // divide by -1 n.Op = OMINUS n.Right = nil } else { // divide by 1 n = nl } default: if Issigned[n.Type.Etype] { if n.Op == OMOD { // signed modulo 2^pow is like ANDing // with the last pow bits, but if nl < 0, // nl & (2^pow-1) is (nl+1)%2^pow - 1. nc := Nod(OXXX, nil, nil) Nodconst(nc, Types[Simtype[TUINT]], int64(w)-1) n1 := Nod(ORSH, nl, nc) // n1 = -1 iff nl < 0. if pow == 1 { typecheck(&n1, Erv) n1 = cheapexpr(n1, init) // n = (nl+ε)&1 -ε where ε=1 iff nl<0. n2 := Nod(OSUB, nl, n1) nc := Nod(OXXX, nil, nil) Nodconst(nc, nl.Type, 1) n3 := Nod(OAND, n2, nc) n = Nod(OADD, n3, n1) } else { // n = (nl+ε)&(nr-1) - ε where ε=2^pow-1 iff nl<0. nc := Nod(OXXX, nil, nil) Nodconst(nc, nl.Type, (1<= 0, nl >> n == nl / nr // if nl < 0, we want to add 2^n-1 first. nc := Nod(OXXX, nil, nil) Nodconst(nc, Types[Simtype[TUINT]], int64(w)-1) n1 := Nod(ORSH, nl, nc) // n1 = -1 iff nl < 0. if pow == 1 { // nl+1 is nl-(-1) n.Left = Nod(OSUB, nl, n1) } else { // Do a logical right right on -1 to keep pow bits. nc := Nod(OXXX, nil, nil) Nodconst(nc, Types[Simtype[TUINT]], int64(w)-int64(pow)) n2 := Nod(ORSH, conv(n1, tounsigned(nl.Type)), nc) n.Left = Nod(OADD, nl, conv(n2, nl.Type)) } // n = (nl + 2^pow-1) >> pow n.Op = ORSH nc = Nod(OXXX, nil, nil) Nodconst(nc, Types[Simtype[TUINT]], int64(pow)) n.Right = nc n.Typecheck = 0 } if s != 0 { n = Nod(OMINUS, n, nil) } break } nc := Nod(OXXX, nil, nil) if n.Op == OMOD { // n = nl & (nr-1) n.Op = OAND Nodconst(nc, nl.Type, Mpgetfix(nr.Val().U.(*Mpint))-1) } else { // n = nl >> pow n.Op = ORSH Nodconst(nc, Types[Simtype[TUINT]], int64(pow)) } n.Typecheck = 0 n.Right = nc } goto ret ret: typecheck(&n, Erv) walkexpr(&n, init) *np = n } // return 1 if integer n must be in range [0, max), 0 otherwise func bounded(n *Node, max int64) bool { if n.Type == nil || !Isint[n.Type.Etype] { return false } sign := Issigned[n.Type.Etype] bits := int32(8 * n.Type.Width) if Smallintconst(n) { v := Mpgetfix(n.Val().U.(*Mpint)) return 0 <= v && v < max } switch n.Op { case OAND: v := int64(-1) if Smallintconst(n.Left) { v = Mpgetfix(n.Left.Val().U.(*Mpint)) } else if Smallintconst(n.Right) { v = Mpgetfix(n.Right.Val().U.(*Mpint)) } if 0 <= v && v < max { return true } case OMOD: if !sign && Smallintconst(n.Right) { v := Mpgetfix(n.Right.Val().U.(*Mpint)) if 0 <= v && v <= max { return true } } case ODIV: if !sign && Smallintconst(n.Right) { v := Mpgetfix(n.Right.Val().U.(*Mpint)) for bits > 0 && v >= 2 { bits-- v >>= 1 } } case ORSH: if !sign && Smallintconst(n.Right) { v := Mpgetfix(n.Right.Val().U.(*Mpint)) if v > int64(bits) { return true } bits -= int32(v) } } if !sign && bits <= 62 && 1<