mirror of
https://github.com/golang/go.git
synced 2025-05-05 15:43:04 +00:00
[dev.fuzz] all: merge master (af72ddf) into dev.fuzz
This now includes the fix in CL 350729, which means we no longer need to skip the test in dev.fuzz. Conflicts: - src/cmd/compile/internal/noder/unified_test.go Merge List: + 2021-09-20 af72ddfcd7 cmd/compile: extend dump-to-file to handle "genssa" (asm) case. + 2021-09-20 3c764babe7 cmd/go: write go.mod requirements more consistently for go 1.17+ + 2021-09-20 6268468e02 cmd/link: generate DIE for types referenced only through dictionaries + 2021-09-20 6acac8b685 cmd/compile: delay all transforms for generic funcs/methods + 2021-09-20 988f18d61d go/types: export Named._Orig as Named.Origin + 2021-09-20 b6dddaccd7 cmd/compile: fix transform.AssignOp to deal with tricky case + 2021-09-20 9e60c37147 cmd/compile: document register-based ABI for ppc64 + 2021-09-20 79159f2e83 cmd/compile: fix simplification rules on arm/arm64 + 2021-09-20 eff27e858b cmd/compile: ensure constant shift amounts are in range for arm + 2021-09-20 9ebe7c8ec6 go/test: add a test for issue 48344 + 2021-09-20 6f35430faa cmd/compile: allow rotates to be merged with logical ops on arm64 + 2021-09-20 2d9b486420 cmd/compile: update doc at top of iexport.go on the changes for typeparams + 2021-09-20 a81b0dc6ee cmd/compile: rename instType -> instanceType + 2021-09-20 119213566a cmd/cgo: remove hardcoded '-pie' ldflag for linux/arm + 2021-09-20 a83a558733 cmd/compile: fix export/import of range loop. + 2021-09-19 315dbd10c9 cmd/compile: fold double negate on arm64 + 2021-09-19 83b36ffb10 cmd/compile: implement constant rotates on arm64 + 2021-09-19 771b8ea4f4 cmd/compile: fix missing markHiddenClosureDead in deadcode pass + 2021-09-18 c894b442d1 net/rpc: remove warnings on incompatible methods at registration + 2021-09-17 4b654c0eec cmd/compile: SSA ".this" variable + 2021-09-17 f01721efb9 cmd/compile: remove self copies in tail-call wrappers + 2021-09-17 163871feb1 time: re-add space-padded day of year to docs + 2021-09-17 ac7c34767d time: support fractional timezone minutes in MarshalBinary + 2021-09-17 07b30a4f77 cmd/compile: delay transformAssign if lhs/rhs have typeparam + 2021-09-17 c10b980220 cmd/compile: restore tail call for method wrappers + 2021-09-17 50e4508269 cmd/compile: fix import/export of Init and Def fields. + 2021-09-17 3fa35b5f97 go/types: ensure that we always get a new signature in expandNamed + 2021-09-17 3fa7dbeff5 cmd/go: fix GOARCH value in GOAMD64 docs + 2021-09-17 974b0166d6 syscall: implement Pipe using pipe2 syscall on all linux platforms + 2021-09-17 1a49dcb82f syscall: remove //sysnb comment generating Setreuid for linux/arm64 + 2021-09-17 cea7a71d40 cmd/compile: fix generic type handling in crawler + 2021-09-17 74e384f50d internal/poll: inject a hook into the runtime finalizer to count the closed pipes + 2021-09-17 323c6f74d3 log: don't format if writing to io.Discard + 2021-09-17 7f36ef0aff cmd/compile/internal/noder: hide TestUnifiedCompare behind -cmp flag + 2021-09-17 70493b3eb0 runtime/cgo: save and restore X3 (aka GP) for crosscall1 on riscv64 + 2021-09-17 6d02ce8584 runtime: fix prettyprinting of parametric types in gdb + 2021-09-17 6602c86a38 cmd/internal/obj/riscv: improve instruction validation + 2021-09-17 14e812bfc5 syscall: do not use handle lists on windows when NoInheritHandles is true + 2021-09-16 8d2a9c32a2 all: remove incorrectly repeated words in comments + 2021-09-16 af9da137a9 A+C: update name to real name and add to AUTHORS + 2021-09-16 265b59aefd cmd/cgo: for godefs, don't let field prefix removal cause duplicates + 2021-09-16 4efdaa7bc7 testing: skip panics when picking the line number for decoration + 2021-09-16 e09dcc211a go/types, types2: add an additional shift test case + 2021-09-16 5402b4376c spec: fix incorrect type in a shift example + 2021-09-16 d09e09bc61 cmd/compile: fixing writebarrier.go for -G=3 + 2021-09-16 bcdc61d830 cmd/compile: preserve statements better in expandCalls + 2021-09-16 48e2b1ea91 cmd/compile: fix LocResults formatting + 2021-09-16 b1bedc0774 cmd/go: add GOAMD64 environment variable + 2021-09-16 04f5116c98 cmd/go: clean paths before checking same directory + 2021-09-16 e7dbe3908e cmd/cgo: add missing tab in exports for a result of void + 2021-09-15 cfa233d76b cmd/compile: remove unneeded early transforms, with dictionary change + 2021-09-15 59a9a035ff cmd/compile: switch to computing dict format on instantiated functions + 2021-09-15 0edc6c4fa0 cmd/internal/obj/ppc64: generate prologue code compatible with new ABI + 2021-09-15 03df68d3c3 runtime: fix setting of cpu features for amd64 + 2021-09-15 6196979365 cmd/go/internal/modload: prevent tidy downgrading disambiguating modules + 2021-09-15 72bb8185b5 cmd/compile: emit DWARF info about dictionary entries + 2021-09-15 5b48fca1fa cmd/compile: mark wrapper functions with DW_AT_trampoline + 2021-09-15 e4dfd788e6 go/internal/gcimporter,cmd/compile: minor clean-up in iimport.go + 2021-09-15 4847c47cb8 cmd/compile/internal/types2: eliminate Named.instPos + 2021-09-15 3100f54f20 cmd/compile/internal/types2: merge Named type loading and expansion + 2021-09-15 738cebb174 cmd/compile/internal/types2: implement Identical for *Union types + 2021-09-15 b26d325cb1 cmd/compile/internal/types2: remove some unnecessary loading/expansion of Named types + 2021-09-15 9fc28892cb cmd/compile/internal/types2: export TypeHash, return value without blanks + 2021-09-15 2da3375e9b runtime: in adjustTimers back up as far as necessary + 2021-09-15 c7f2f51fed cmd/go: remove subcommand prefix from error messages + 2021-09-15 0bb40b08c4 go/types: implement Identical for *Union types + 2021-09-15 cb4e1de021 go/types: minor cleanup of instantiation + 2021-09-15 a0f3129466 go/types: instantiate methods when instantiating Named types + 2021-09-14 bf26e43d0f go/types: eliminate Named.instPos + 2021-09-14 2933c451a0 go/types: merge Named type loading and expansion + 2021-09-14 137543bb93 cmd/compile: set IsShape based on type being in the Shapes pkg + 2021-09-14 3a72175cdc cmd/compile: fix test/typeparam/mdempsky/4.go for -G=3 + 2021-09-14 b2c04f0d48 runtime: avoid loop variable capture in test + 2021-09-14 181e8cde30 go/internal/gcimporter: remove outdated comment + 2021-09-14 8699425b55 syscall: remove use of IN_KUBERNETES in test + 2021-09-14 b3c6de9dcd cmd/internal/obj/ppc64: allow VR register arguments to VS registers + 2021-09-14 ee91bb8319 cmd/compile: prevent typecheck importer reading type parameter twice + 2021-09-14 2953cd0083 go/internal/gcimporter: prevent importReader reading type parameter twice + 2021-09-14 b8c802b116 cmd/compile: prevent importReader reading type parameter twice + 2021-09-14 4a4221e818 all: remove some unused code + 2021-09-14 71adc658de runtime: change time.now to ABIInternal + 2021-09-14 146e8d4994 reflect: use Value.Len instead of conversion to slice header + 2021-09-13 9a58aa267e spec: fix prose about terminating statements + 2021-09-13 42057e9848 cmd/compile: save the note of fields when translating struct + 2021-09-13 960d036f8f cmd/go: add missing parenthesis in a call to "PrintVersion" + 2021-09-13 81a4fe6fd2 cmd/link/internal/ld: re-enable DWARF tests on solaris/illumos + 2021-09-13 f93a63addb reflect: add a floating point section to DeepEqual tests + 2021-09-13 a0c409cbc8 reflect: add fast paths for common, simple Kinds to DeepEqual + 2021-09-13 ac40c9872f reflect: fix _faststr optimization + 2021-09-13 c8a58f29dc cmd/go: add test to check for a potential workspace loading issue + 2021-09-13 e74e363a6b strings: add Clone function + 2021-09-13 bced369a50 cmd/link: minor code cleanup in dwarf gen + 2021-09-13 c3b217a0e5 cmd/go: document 'go install cmd@version' ignores vendor directories + 2021-09-12 ad97d204f0 go/types: remove some unnecessary loading/expansion of Named types + 2021-09-12 0d8a4bfc96 bufio: add Writer.AvailableBuffer + 2021-09-11 23832ba2e2 reflect: optimize for maps with string keys + 2021-09-11 a50225a0dc bufio: make Reader.Reset and Writer.Reset work on the zero value + 2021-09-10 cf2fe5d6f1 doc/asm: fix HTML markup + 2021-09-10 1bf2cd1291 debug/elf: retain original error message when getSymbols fails. + 2021-09-10 5a4b9f9494 time: reference -tags=timetzdata in testing panic + 2021-09-10 025308fe08 testing: increase alternation precedence + 2021-09-10 5a94a90d84 cmd/compile/internal/types2: better error message for invalid array decls + 2021-09-10 da1aa65053 cmd/compile/internal/syntax: correct follow token for type parameter lists + 2021-09-10 96ab854ab0 cmd/compile/internal: better AST line highlight in ssa.html + 2021-09-10 90c5660616 embed: guarantee the returned file of FS.Open implements io.Seeker + 2021-09-10 c69f5c0d76 cmd/compile: add support for Abs and Copysign intrinsics on riscv64 + 2021-09-10 2091bd3f26 cmd/compile: simiplify arm64 bitfield optimizations + 2021-09-09 b32209d22d cmd/compile: fix test case for unified IR (fix build) + 2021-09-09 1a708bcf1d cmd/compile: don't crash while reporting invalid alias cycle + 2021-09-09 426ff3746f cmd/cgo, runtime/cgo: avoid GCC/clang conversion warnings + 2021-09-09 73483df406 cmd/compile/internal/syntax: better error message for missing type constraint + 2021-09-09 e1c3f2158f time: propagate "," separator for fractional seconds into Format + 2021-09-09 c981874a5a cmd/compile: fix implement for closure in a global assignment + 2021-09-09 2c4f389c02 cmd/link: enable internal linker in more cases for ppc64le + 2021-09-09 fb84e99eb7 test: add compiler regress tests for #46461 + 2021-09-09 b9e1a24581 cmd/compile: fix case where init info of OAS node is dropped + 2021-09-09 f9271e4f85 go/types, types2: rename RParams -> RecvTypeParams + 2021-09-09 ea434450c2 reflect: add hooks for dealing with narrow width floats + 2021-09-09 a53e3d5f88 net: deprecate (net.Error).Temporary + 2021-09-09 19457a58e5 cmd/compile: stenciled conversions might be NOPs + 2021-09-09 a295b3cec8 test: re-enable AsmCheck tests for types2-based frontends + 2021-09-09 66f0d35f71 go/types: reduce number of delayed functions + 2021-09-09 d2a77f1c76 go/types: handle recursive type parameter constraints + 2021-09-09 9e1eea6f8b go/types: detect constraint type inference cycles + 2021-09-09 b86e8dd0f3 test/typeparam: fix issue48094b test build + 2021-09-09 c84f3a4004 syscall: drop fallback to pipe in Pipe on linux/arm + 2021-09-09 376a079762 cmd/compile: fix unified IR panic when expanding nested inline function + 2021-09-09 6edc57983a internal/poll: report open fds when TestSplicePipePool fails + 2021-09-09 2481f6e367 cmd/compile: fix wrong instantiated type for embedded receiver + 2021-09-09 d62866ef79 cmd/compile: move checkptr alignment to SSA generation + 2021-09-09 8fad81cd62 cmd/compile: fold handling OCONV logic to separate function + 2021-09-09 9cbc76bdf9 cmd/internal/obj/arm64: add checks for incorrect use of REGTMP register + 2021-09-09 42563f89d7 cmd/compile: remove 'ext' fields from unified IR reader/writer types + 2021-09-09 4c52eac49b cmd/compile: simplify value coding for unified IR + 2021-09-09 e30a09013b cmd/compile: extrapolate $GOROOT in unified IR + 2021-09-08 a1f6208e56 go/types, types2: add Environment to Config + 2021-09-08 f5f8a911d8 cmd/compile/internal/types2: spell out 'Type' in type parameter APIs + 2021-09-08 bff39cf6cb cmd/compile: add automated rewrite cycle detection + 2021-09-08 b61e1ed863 cmd/compile/internal/types2: temporarily pin the Checker to Interface during checking + 2021-09-08 47f3e1e02c cmd/compile/internal/types2: move NewTypeParam off of Checker + 2021-09-08 ccc927b8f6 cmd/compile/internal/types2: move typeHash to environment.go + 2021-09-08 30e9bfbcef cmd/compile/internal/types2: implement deduplication of instances using the Environment + 2021-09-08 0406d3a8e5 go/ast: rename MultiIndexExpr to IndexListExpr Change-Id: I7f917d45b0507c122c212305144b0b455618ff54
This commit is contained in:
commit
c6c884be38
1
AUTHORS
1
AUTHORS
@ -1479,6 +1479,7 @@ Zheng Dayu <davidzheng23@gmail.com>
|
||||
Zhongtao Chen <chenzhongtao@126.com>
|
||||
Zhou Peng <p@ctriple.cn>
|
||||
Ziad Hatahet <hatahet@gmail.com>
|
||||
Zizhao Zhang <btw515wolf2@gmail.com>
|
||||
Zorion Arrizabalaga <zorionk@gmail.com>
|
||||
Максим Федосеев <max.faceless.frei@gmail.com>
|
||||
Роман Хавроненко <hagen1778@gmail.com>
|
||||
|
@ -1109,7 +1109,6 @@ Ian Lance Taylor <iant@golang.org>
|
||||
Ian Leue <ian@appboy.com>
|
||||
Ian Mckay <iann0036@gmail.com>
|
||||
Ian Tay <iantay@google.com>
|
||||
Ian Woolf <btw515wolf2@gmail.com>
|
||||
Ian Zapolsky <ianzapolsky@gmail.com>
|
||||
Ibrahim AshShohail <ibra.sho@gmail.com>
|
||||
Icarus Sparry <golang@icarus.freeuk.com>
|
||||
@ -2749,6 +2748,7 @@ Zhongwei Yao <zhongwei.yao@arm.com>
|
||||
Zhou Peng <p@ctriple.cn>
|
||||
Ziad Hatahet <hatahet@gmail.com>
|
||||
Ziheng Liu <lzhfromustc@gmail.com>
|
||||
Zizhao Zhang <btw515wolf2@gmail.com>
|
||||
Zorion Arrizabalaga <zorionk@gmail.com>
|
||||
Zvonimir Pavlinovic <zpavlinovic@google.com>
|
||||
Zyad A. Ali <zyad.ali.me@gmail.com>
|
||||
|
@ -125,8 +125,8 @@ it is a distinct program, so there are some differences.
|
||||
One is in constant evaluation.
|
||||
Constant expressions in the assembler are parsed using Go's operator
|
||||
precedence, not the C-like precedence of the original.
|
||||
Thus <code>3&1<<2</code> is 4, not 0—it parses as <code>(3&1)<<2</code>
|
||||
not <code>3&(1<<2)</code>.
|
||||
Thus <code>3&1<<2</code> is 4, not 0—it parses as <code>(3&1)<<2</code>
|
||||
not <code>3&(1<<2)</code>.
|
||||
Also, constants are always evaluated as 64-bit unsigned integers.
|
||||
Thus <code>-2</code> is not the integer value minus two,
|
||||
but the unsigned 64-bit integer with the same bit pattern.
|
||||
@ -914,8 +914,6 @@ This assembler is used by GOARCH values ppc64 and ppc64le.
|
||||
Reference: <a href="/pkg/cmd/internal/obj/ppc64">Go PPC64 Assembly Instructions Reference Manual</a>
|
||||
</p>
|
||||
|
||||
</ul>
|
||||
|
||||
<h3 id="s390x">IBM z/Architecture, a.k.a. s390x</h3>
|
||||
|
||||
<p>
|
||||
|
@ -1,6 +1,6 @@
|
||||
<!--{
|
||||
"Title": "The Go Programming Language Specification",
|
||||
"Subtitle": "Version of Aug 23, 2021",
|
||||
"Subtitle": "Version of Sep 16, 2021",
|
||||
"Path": "/ref/spec"
|
||||
}-->
|
||||
|
||||
@ -3614,7 +3614,7 @@ var i = 1<<s // 1 has type int
|
||||
var j int32 = 1<<s // 1 has type int32; j == 0
|
||||
var k = uint64(1<<s) // 1 has type uint64; k == 1<<33
|
||||
var m int = 1.0<<s // 1.0 has type int; m == 1<<33
|
||||
var n = 1.0<<s == j // 1.0 has type int; n == true
|
||||
var n = 1.0<<s == j // 1.0 has type int32; n == true
|
||||
var o = 1<<s == 2<<s // 1 and 2 have type int; o == false
|
||||
var p = 1<<s == 1<<33 // 1 has type int; p == true
|
||||
var u = 1.0<<s // illegal: 1.0 has type float64, cannot shift
|
||||
@ -4561,9 +4561,8 @@ SimpleStmt = EmptyStmt | ExpressionStmt | SendStmt | IncDecStmt | Assignment | S
|
||||
<h3 id="Terminating_statements">Terminating statements</h3>
|
||||
|
||||
<p>
|
||||
A <i>terminating statement</i> prevents execution of all statements that lexically
|
||||
appear after it in the same <a href="#Blocks">block</a>. The following statements
|
||||
are terminating:
|
||||
A <i>terminating statement</i> interrupts the regular flow of control in
|
||||
a <a href="#Blocks">block</a>. The following statements are terminating:
|
||||
</p>
|
||||
|
||||
<ol>
|
||||
|
18
misc/cgo/testgodefs/testdata/issue48396.go
vendored
Normal file
18
misc/cgo/testgodefs/testdata/issue48396.go
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
//
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
/*
|
||||
// from <linux/kcm.h>
|
||||
struct issue48396 {
|
||||
int fd;
|
||||
int bpf_fd;
|
||||
};
|
||||
*/
|
||||
import "C"
|
||||
|
||||
type Issue48396 C.struct_issue48396
|
3
misc/cgo/testgodefs/testdata/main.go
vendored
3
misc/cgo/testgodefs/testdata/main.go
vendored
@ -28,6 +28,9 @@ var v7 = S{}
|
||||
// Test that #define'd type is fully defined
|
||||
var _ = issue38649{X: 0}
|
||||
|
||||
// Test that prefixes do not cause duplicate field names.
|
||||
var _ = Issue48396{Fd: 1, Bpf_fd: 2}
|
||||
|
||||
func main() {
|
||||
pass := true
|
||||
|
||||
|
@ -25,6 +25,7 @@ var filePrefixes = []string{
|
||||
"issue37621",
|
||||
"issue38649",
|
||||
"issue39534",
|
||||
"issue48396",
|
||||
}
|
||||
|
||||
func TestGoDefs(t *testing.T) {
|
||||
|
@ -68,7 +68,12 @@ func (b *Reader) Size() int { return len(b.buf) }
|
||||
|
||||
// Reset discards any buffered data, resets all state, and switches
|
||||
// the buffered reader to read from r.
|
||||
// Calling Reset on the zero value of Reader initializes the internal buffer
|
||||
// to the default size.
|
||||
func (b *Reader) Reset(r io.Reader) {
|
||||
if b.buf == nil {
|
||||
b.buf = make([]byte, defaultBufSize)
|
||||
}
|
||||
b.reset(b.buf, r)
|
||||
}
|
||||
|
||||
@ -590,7 +595,12 @@ func (b *Writer) Size() int { return len(b.buf) }
|
||||
|
||||
// Reset discards any unflushed buffered data, clears any error, and
|
||||
// resets b to write its output to w.
|
||||
// Calling Reset on the zero value of Writer initializes the internal buffer
|
||||
// to the default size.
|
||||
func (b *Writer) Reset(w io.Writer) {
|
||||
if b.buf == nil {
|
||||
b.buf = make([]byte, defaultBufSize)
|
||||
}
|
||||
b.err = nil
|
||||
b.n = 0
|
||||
b.wr = w
|
||||
@ -623,6 +633,14 @@ func (b *Writer) Flush() error {
|
||||
// Available returns how many bytes are unused in the buffer.
|
||||
func (b *Writer) Available() int { return len(b.buf) - b.n }
|
||||
|
||||
// AvailableBuffer returns an empty buffer with b.Available() capacity.
|
||||
// This buffer is intended to be appended to and
|
||||
// passed to an immediately succeeding Write call.
|
||||
// The buffer is only valid until the next write operation on b.
|
||||
func (b *Writer) AvailableBuffer() []byte {
|
||||
return b.buf[b.n:][:0]
|
||||
}
|
||||
|
||||
// Buffered returns the number of bytes that have been written into the current buffer.
|
||||
func (b *Writer) Buffered() int { return b.n }
|
||||
|
||||
|
@ -10,6 +10,8 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"math/rand"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
"testing/iotest"
|
||||
@ -608,6 +610,37 @@ func TestWriter(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriterAppend(t *testing.T) {
|
||||
got := new(bytes.Buffer)
|
||||
var want []byte
|
||||
rn := rand.New(rand.NewSource(0))
|
||||
w := NewWriterSize(got, 64)
|
||||
for i := 0; i < 100; i++ {
|
||||
// Obtain a buffer to append to.
|
||||
b := w.AvailableBuffer()
|
||||
if w.Available() != cap(b) {
|
||||
t.Fatalf("Available() = %v, want %v", w.Available(), cap(b))
|
||||
}
|
||||
|
||||
// While not recommended, it is valid to append to a shifted buffer.
|
||||
// This forces Write to copy the the input.
|
||||
if rn.Intn(8) == 0 && cap(b) > 0 {
|
||||
b = b[1:1:cap(b)]
|
||||
}
|
||||
|
||||
// Append a random integer of varying width.
|
||||
n := int64(rn.Intn(1 << rn.Intn(30)))
|
||||
want = append(strconv.AppendInt(want, n, 10), ' ')
|
||||
b = append(strconv.AppendInt(b, n, 10), ' ')
|
||||
w.Write(b)
|
||||
}
|
||||
w.Flush()
|
||||
|
||||
if !bytes.Equal(got.Bytes(), want) {
|
||||
t.Errorf("output mismatch:\ngot %s\nwant %s", got.Bytes(), want)
|
||||
}
|
||||
}
|
||||
|
||||
// Check that write errors are returned properly.
|
||||
|
||||
type errorWriterTest struct {
|
||||
@ -1312,6 +1345,7 @@ func TestReaderReset(t *testing.T) {
|
||||
if string(buf) != "foo" {
|
||||
t.Errorf("buf = %q; want foo", buf)
|
||||
}
|
||||
|
||||
r.Reset(strings.NewReader("bar bar"))
|
||||
all, err := io.ReadAll(r)
|
||||
if err != nil {
|
||||
@ -1320,12 +1354,23 @@ func TestReaderReset(t *testing.T) {
|
||||
if string(all) != "bar bar" {
|
||||
t.Errorf("ReadAll = %q; want bar bar", all)
|
||||
}
|
||||
|
||||
*r = Reader{} // zero out the Reader
|
||||
r.Reset(strings.NewReader("bar bar"))
|
||||
all, err = io.ReadAll(r)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if string(all) != "bar bar" {
|
||||
t.Errorf("ReadAll = %q; want bar bar", all)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriterReset(t *testing.T) {
|
||||
var buf1, buf2 bytes.Buffer
|
||||
var buf1, buf2, buf3 bytes.Buffer
|
||||
w := NewWriter(&buf1)
|
||||
w.WriteString("foo")
|
||||
|
||||
w.Reset(&buf2) // and not flushed
|
||||
w.WriteString("bar")
|
||||
w.Flush()
|
||||
@ -1335,6 +1380,17 @@ func TestWriterReset(t *testing.T) {
|
||||
if buf2.String() != "bar" {
|
||||
t.Errorf("buf2 = %q; want bar", buf2.String())
|
||||
}
|
||||
|
||||
*w = Writer{} // zero out the Writer
|
||||
w.Reset(&buf3) // and not flushed
|
||||
w.WriteString("bar")
|
||||
w.Flush()
|
||||
if buf1.String() != "" {
|
||||
t.Errorf("buf1 = %q; want empty", buf1.String())
|
||||
}
|
||||
if buf3.String() != "bar" {
|
||||
t.Errorf("buf3 = %q; want bar", buf3.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestReaderDiscard(t *testing.T) {
|
||||
|
@ -20,6 +20,18 @@ func ExampleWriter() {
|
||||
// Output: Hello, world!
|
||||
}
|
||||
|
||||
func ExampleWriter_AvailableBuffer() {
|
||||
w := bufio.NewWriter(os.Stdout)
|
||||
for _, i := range []int64{1, 2, 3, 4} {
|
||||
b := w.AvailableBuffer()
|
||||
b = strconv.AppendInt(b, i, 10)
|
||||
b = append(b, ' ')
|
||||
w.Write(b)
|
||||
}
|
||||
w.Flush()
|
||||
// Output: 1 2 3 4
|
||||
}
|
||||
|
||||
// The simplest use of a Scanner, to read standard input as a set of lines.
|
||||
func ExampleScanner_lines() {
|
||||
scanner := bufio.NewScanner(os.Stdin)
|
||||
|
21
src/cmd/asm/internal/asm/testdata/arm64error.s
vendored
21
src/cmd/asm/internal/asm/testdata/arm64error.s
vendored
@ -406,12 +406,12 @@ TEXT errors(SB),$0
|
||||
VBIF V0.D2, V1.D2, V2.D2 // ERROR "invalid arrangement"
|
||||
VUADDW V9.B8, V12.H8, V14.B8 // ERROR "invalid arrangement"
|
||||
VUADDW2 V9.B8, V12.S4, V14.S4 // ERROR "operand mismatch"
|
||||
VUMAX V1.D2, V2.D2, V3.D2 // ERROR "invalid arrangement"
|
||||
VUMIN V1.D2, V2.D2, V3.D2 // ERROR "invalid arrangement"
|
||||
VUMAX V1.D2, V2.D2, V3.D2 // ERROR "invalid arrangement"
|
||||
VUMIN V1.D2, V2.D2, V3.D2 // ERROR "invalid arrangement"
|
||||
VUMAX V1.B8, V2.B8, V3.B16 // ERROR "operand mismatch"
|
||||
VUMIN V1.H4, V2.S4, V3.H4 // ERROR "operand mismatch"
|
||||
VSLI $64, V7.D2, V8.D2 // ERROR "shift out of range"
|
||||
VUSRA $0, V7.D2, V8.D2 // ERROR "shift out of range"
|
||||
VUSRA $0, V7.D2, V8.D2 // ERROR "shift out of range"
|
||||
CASPD (R3, R4), (R2), (R8, R9) // ERROR "source register pair must start from even register"
|
||||
CASPD (R2, R3), (R2), (R9, R10) // ERROR "destination register pair must start from even register"
|
||||
CASPD (R2, R4), (R2), (R8, R9) // ERROR "source register pair must be contiguous"
|
||||
@ -419,8 +419,15 @@ TEXT errors(SB),$0
|
||||
ADD R1>>2, RSP, R3 // ERROR "illegal combination"
|
||||
ADDS R2<<3, R3, RSP // ERROR "unexpected SP reference"
|
||||
CMP R1<<5, RSP // ERROR "the left shift amount out of range 0 to 4"
|
||||
MOVD.P y+8(FP), R1 // ERROR "illegal combination"
|
||||
MOVD.W x-8(SP), R1 // ERROR "illegal combination"
|
||||
LDP.P x+8(FP), (R0, R1) // ERROR "illegal combination"
|
||||
LDP.W x+8(SP), (R0, R1) // ERROR "illegal combination"
|
||||
MOVD.P y+8(FP), R1 // ERROR "illegal combination"
|
||||
MOVD.W x-8(SP), R1 // ERROR "illegal combination"
|
||||
LDP.P x+8(FP), (R0, R1) // ERROR "illegal combination"
|
||||
LDP.W x+8(SP), (R0, R1) // ERROR "illegal combination"
|
||||
ADD $0x1234567, R27, R3 // ERROR "cannot use REGTMP as source"
|
||||
ADD $0x3fffffffc000, R27, R5 // ERROR "cannot use REGTMP as source"
|
||||
AND $0x22220000, R27, R4 // ERROR "cannot use REGTMP as source"
|
||||
ANDW $0x6006000060060, R27, R5 // ERROR "cannot use REGTMP as source"
|
||||
STP (R3, R4), 0x1234567(R27) // ERROR "REGTMP used in large offset store"
|
||||
LDP 0x1234567(R27), (R3, R4) // ERROR "REGTMP used in large offset load"
|
||||
STP (R26, R27), 700(R2) // ERROR "cannot use REGTMP as source"
|
||||
RET
|
||||
|
15
src/cmd/asm/internal/asm/testdata/ppc64.s
vendored
15
src/cmd/asm/internal/asm/testdata/ppc64.s
vendored
@ -649,6 +649,8 @@ TEXT asmtest(SB),DUPOK|NOSPLIT,$0
|
||||
LXVB16X (R3)(R4), VS1 // 7c241ed8
|
||||
LXVW4X (R3)(R4), VS1 // 7c241e18
|
||||
LXV 16(R3), VS1 // f4230011
|
||||
LXV 16(R3), VS33 // f4230019
|
||||
LXV 16(R3), V1 // f4230019
|
||||
LXVL R3, R4, VS1 // 7c23221a
|
||||
LXVLL R3, R4, VS1 // 7c23225a
|
||||
LXVX R3, R4, VS1 // 7c232218
|
||||
@ -668,8 +670,13 @@ TEXT asmtest(SB),DUPOK|NOSPLIT,$0
|
||||
MTFPRD R3, F0 // 7c030166
|
||||
MFVRD V0, R3 // 7c030067
|
||||
MFVSRLD VS63,R4 // 7fe40267
|
||||
MFVSRLD V31,R4 // 7fe40267
|
||||
MFVSRWZ VS33,R4 // 7c2400e7
|
||||
MFVSRWZ V1,R4 // 7c2400e7
|
||||
MTVSRD R3, VS1 // 7c230166
|
||||
MTVSRDD R3, R4, VS1 // 7c232366
|
||||
MTVSRDD R3, R4, VS33 // 7c232367
|
||||
MTVSRDD R3, R4, V1 // 7c232367
|
||||
MTVRD R3, V13 // 7da30167
|
||||
MTVSRWA R4, VS31 // 7fe401a6
|
||||
MTVSRWS R4, VS32 // 7c040327
|
||||
@ -678,6 +685,8 @@ TEXT asmtest(SB),DUPOK|NOSPLIT,$0
|
||||
XXBRW VS1, VS2 // f04f0f6c
|
||||
XXBRH VS2, VS3 // f067176c
|
||||
XXLAND VS1, VS2, VS3 // f0611410
|
||||
XXLAND V1, V2, V3 // f0611417
|
||||
XXLAND VS33, VS34, VS35 // f0611417
|
||||
XXLANDC VS1, VS2, VS3 // f0611450
|
||||
XXLEQV VS0, VS1, VS2 // f0400dd0
|
||||
XXLNAND VS0, VS1, VS2 // f0400d90
|
||||
@ -687,11 +696,17 @@ TEXT asmtest(SB),DUPOK|NOSPLIT,$0
|
||||
XXLORQ VS1, VS2, VS3 // f0611490
|
||||
XXLXOR VS1, VS2, VS3 // f06114d0
|
||||
XXSEL VS1, VS2, VS3, VS4 // f08110f0
|
||||
XXSEL VS33, VS34, VS35, VS36 // f08110ff
|
||||
XXSEL V1, V2, V3, V4 // f08110ff
|
||||
XXMRGHW VS1, VS2, VS3 // f0611090
|
||||
XXMRGLW VS1, VS2, VS3 // f0611190
|
||||
XXSPLTW VS1, $1, VS2 // f0410a90
|
||||
XXSPLTW VS33, $1, VS34 // f0410a93
|
||||
XXSPLTW V1, $1, V2 // f0410a93
|
||||
XXPERM VS1, VS2, VS3 // f06110d0
|
||||
XXSLDWI VS1, VS2, $1, VS3 // f0611110
|
||||
XXSLDWI V1, V2, $1, V3 // f0611117
|
||||
XXSLDWI VS33, VS34, $1, VS35 // f0611117
|
||||
XSCVDPSP VS1, VS2 // f0400c24
|
||||
XVCVDPSP VS1, VS2 // f0400e24
|
||||
XSCVSXDDP VS1, VS2 // f0400de0
|
||||
|
2
src/cmd/asm/internal/asm/testdata/riscv64.s
vendored
2
src/cmd/asm/internal/asm/testdata/riscv64.s
vendored
@ -382,10 +382,12 @@ start:
|
||||
SNEZ X15, X15 // b337f000
|
||||
|
||||
// F extension
|
||||
FABSS F0, F1 // d3200020
|
||||
FNEGS F0, F1 // d3100020
|
||||
FNES F0, F1, X7 // d3a300a093c31300
|
||||
|
||||
// D extension
|
||||
FABSD F0, F1 // d3200022
|
||||
FNEGD F0, F1 // d3100022
|
||||
FNED F0, F1, X5 // d3a200a293c21200
|
||||
FLTD F0, F1, X5 // d39200a2
|
||||
|
@ -3030,6 +3030,31 @@ func upper(s string) string {
|
||||
// so that all fields are exported.
|
||||
func godefsFields(fld []*ast.Field) {
|
||||
prefix := fieldPrefix(fld)
|
||||
|
||||
// Issue 48396: check for duplicate field names.
|
||||
if prefix != "" {
|
||||
names := make(map[string]bool)
|
||||
fldLoop:
|
||||
for _, f := range fld {
|
||||
for _, n := range f.Names {
|
||||
name := n.Name
|
||||
if name == "_" {
|
||||
continue
|
||||
}
|
||||
if name != prefix {
|
||||
name = strings.TrimPrefix(n.Name, prefix)
|
||||
}
|
||||
name = upper(name)
|
||||
if names[name] {
|
||||
// Field name conflict: don't remove prefix.
|
||||
prefix = ""
|
||||
break fldLoop
|
||||
}
|
||||
names[name] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
npad := 0
|
||||
for _, f := range fld {
|
||||
for _, n := range f.Names {
|
||||
|
@ -1054,9 +1054,10 @@ func (p *Package) writeExports(fgo2, fm, fgcc, fgcch io.Writer) {
|
||||
|
||||
fmt.Fprintf(fm, "void _cgoexp%s_%s(void* p){}\n", cPrefix, exp.ExpName)
|
||||
|
||||
fmt.Fprintf(fgo2, "\t")
|
||||
|
||||
if gccResult != "void" {
|
||||
// Write results back to frame.
|
||||
fmt.Fprintf(fgo2, "\t")
|
||||
forFieldList(fntype.Results,
|
||||
func(i int, aname string, atype ast.Expr) {
|
||||
if i > 0 {
|
||||
@ -1458,10 +1459,10 @@ const gccProlog = `
|
||||
(have a negative array count) and an inscrutable error will come
|
||||
out of the compiler and hopefully mention "name".
|
||||
*/
|
||||
#define __cgo_compile_assert_eq(x, y, name) typedef char name[(x-y)*(x-y)*-2+1];
|
||||
#define __cgo_compile_assert_eq(x, y, name) typedef char name[(x-y)*(x-y)*-2UL+1UL];
|
||||
|
||||
/* Check at compile time that the sizes we use match our expectations. */
|
||||
#define __cgo_size_assert(t, n) __cgo_compile_assert_eq(sizeof(t), n, _cgo_sizeof_##t##_is_not_##n)
|
||||
#define __cgo_size_assert(t, n) __cgo_compile_assert_eq(sizeof(t), (size_t)n, _cgo_sizeof_##t##_is_not_##n)
|
||||
|
||||
__cgo_size_assert(char, 1)
|
||||
__cgo_size_assert(short, 2)
|
||||
|
@ -627,6 +627,105 @@ modifying or saving the FPCR.
|
||||
Functions are allowed to modify it between calls (as long as they
|
||||
restore it), but as of this writing Go code never does.
|
||||
|
||||
### ppc64 architecture
|
||||
|
||||
The ppc64 architecture uses R3 – R10 and R14 – R17 for integer arguments
|
||||
and results.
|
||||
|
||||
It uses F1 – F12 for floating-point arguments and results.
|
||||
|
||||
Register R31 is a permanent scratch register in Go.
|
||||
|
||||
Special-purpose registers used within Go generated code and Go
|
||||
assembly code are as follows:
|
||||
|
||||
| Register | Call meaning | Return meaning | Body meaning |
|
||||
| --- | --- | --- | --- |
|
||||
| R0 | Zero value | Same | Same |
|
||||
| R1 | Stack pointer | Same | Same |
|
||||
| R2 | TOC register | Same | Same |
|
||||
| R11 | Closure context pointer | Scratch | Scratch |
|
||||
| R12 | Function address on indirect calls | Scratch | Scratch |
|
||||
| R13 | TLS pointer | Same | Same |
|
||||
| R20,R21 | Scratch | Scratch | Used by duffcopy, duffzero |
|
||||
| R30 | Current goroutine | Same | Same |
|
||||
| R31 | Scratch | Scratch | Scratch |
|
||||
| LR | Link register | Link register | Scratch |
|
||||
*Rationale*: These register meanings are compatible with Go’s
|
||||
stack-based calling convention.
|
||||
|
||||
The link register, LR, holds the function return
|
||||
address at the function entry and is set to the correct return
|
||||
address before exiting the function. It is also used
|
||||
in some cases as the function address when doing an indirect call.
|
||||
|
||||
The register R2 contains the address of the TOC (table of contents) which
|
||||
contains data or code addresses used when generating position independent
|
||||
code. Non-Go code generated when using cgo contains TOC-relative addresses
|
||||
which depend on R2 holding a valid TOC. Go code compiled with -shared or
|
||||
-dynlink initializes and maintains R2 and uses it in some cases for
|
||||
function calls; Go code compiled without these options does not modify R2.
|
||||
|
||||
When making a function call R12 contains the function address for use by the
|
||||
code to generate R2 at the beginning of the function. R12 can be used for
|
||||
other purposes within the body of the function, such as trampoline generation.
|
||||
|
||||
R20 and R21 are used in duffcopy and duffzero which could be generated
|
||||
before arguments are saved so should not be used for register arguments.
|
||||
|
||||
The Count register CTR can be used as the call target for some branch instructions.
|
||||
It holds the return address when preemption has occurred.
|
||||
|
||||
On PPC64 when a float32 is loaded it becomes a float64 in the register, which is
|
||||
different from other platforms and that needs to be recognized by the internal
|
||||
implementation of reflection so that float32 arguments are passed correctly.
|
||||
|
||||
Registers R18 - R29 and F13 - F31 are considered scratch registers.
|
||||
|
||||
#### Stack layout
|
||||
|
||||
The stack pointer, R1, grows down and is aligned to 8 bytes in Go, but changed
|
||||
to 16 bytes when calling cgo.
|
||||
|
||||
A function's stack frame, after the frame is created, is laid out as
|
||||
follows:
|
||||
|
||||
+------------------------------+
|
||||
| ... locals ... |
|
||||
| ... outgoing arguments ... |
|
||||
| 24 TOC register R2 save | When compiled with -shared/-dynlink
|
||||
| 16 Unused in Go | Not used in Go
|
||||
| 8 CR save | nonvolatile CR fields
|
||||
| 0 return PC | ← R1 points to
|
||||
+------------------------------+ ↓ lower addresses
|
||||
|
||||
The "return PC" is loaded to the link register, LR, as part of the
|
||||
ppc64 `BL` operations.
|
||||
|
||||
On entry to a non-leaf function, the stack frame size is subtracted from R1 to
|
||||
create its stack frame, and saves the value of LR at the bottom of the frame.
|
||||
|
||||
A leaf function that does not require any stack space does not modify R1 and
|
||||
does not save LR.
|
||||
|
||||
*NOTE*: We might need to save the frame pointer on the stack as
|
||||
in the PPC64 ELF v2 ABI so Go can inter-operate with platform debuggers
|
||||
and profilers.
|
||||
|
||||
This stack layout is used by both register-based (ABIInternal) and
|
||||
stack-based (ABI0) calling conventions.
|
||||
|
||||
#### Flags
|
||||
|
||||
The condition register consists of 8 condition code register fields
|
||||
CR0-CR7. Go generated code only sets and uses CR0, commonly set by
|
||||
compare functions and use to determine the target of a conditional
|
||||
branch. The generated code does not set or use CR1-CR7.
|
||||
|
||||
The floating point status and control register (FPSCR) is initialized
|
||||
to 0 by the kernel at startup of the Go program and not changed by
|
||||
the Go generated code.
|
||||
|
||||
## Future directions
|
||||
|
||||
### Spill path improvements
|
||||
|
@ -1008,7 +1008,7 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
|
||||
}
|
||||
r := v.Reg()
|
||||
getgFromTLS(s, r)
|
||||
case ssa.OpAMD64CALLstatic:
|
||||
case ssa.OpAMD64CALLstatic, ssa.OpAMD64CALLtail:
|
||||
if s.ABI == obj.ABI0 && v.Aux.(*ssa.AuxCall).Fn.ABI() == obj.ABIInternal {
|
||||
// zeroing X15 when entering ABIInternal from ABI0
|
||||
if buildcfg.GOOS != "plan9" { // do not use SSE on Plan 9
|
||||
@ -1017,6 +1017,10 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
|
||||
// set G register from TLS
|
||||
getgFromTLS(s, x86.REG_R14)
|
||||
}
|
||||
if v.Op == ssa.OpAMD64CALLtail {
|
||||
s.TailCall(v)
|
||||
break
|
||||
}
|
||||
s.Call(v)
|
||||
if s.ABI == obj.ABIInternal && v.Aux.(*ssa.AuxCall).Fn.ABI() == obj.ABI0 {
|
||||
// zeroing X15 when entering ABIInternal from ABI0
|
||||
@ -1314,22 +1318,9 @@ func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
|
||||
p.To.Type = obj.TYPE_BRANCH
|
||||
s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
|
||||
}
|
||||
case ssa.BlockExit:
|
||||
case ssa.BlockExit, ssa.BlockRetJmp:
|
||||
case ssa.BlockRet:
|
||||
s.Prog(obj.ARET)
|
||||
case ssa.BlockRetJmp:
|
||||
if s.ABI == obj.ABI0 && b.Aux.(*obj.LSym).ABI() == obj.ABIInternal {
|
||||
// zeroing X15 when entering ABIInternal from ABI0
|
||||
if buildcfg.GOOS != "plan9" { // do not use SSE on Plan 9
|
||||
opregreg(s, x86.AXORPS, x86.REG_X15, x86.REG_X15)
|
||||
}
|
||||
// set G register from TLS
|
||||
getgFromTLS(s, x86.REG_R14)
|
||||
}
|
||||
p := s.Prog(obj.ARET)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
p.To.Name = obj.NAME_EXTERN
|
||||
p.To.Sym = b.Aux.(*obj.LSym)
|
||||
|
||||
case ssa.BlockAMD64EQF:
|
||||
s.CombJump(b, next, &eqfJumps)
|
||||
|
@ -696,6 +696,8 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
|
||||
p.To.Reg = v.Reg()
|
||||
case ssa.OpARMCALLstatic, ssa.OpARMCALLclosure, ssa.OpARMCALLinter:
|
||||
s.Call(v)
|
||||
case ssa.OpARMCALLtail:
|
||||
s.TailCall(v)
|
||||
case ssa.OpARMCALLudiv:
|
||||
p := s.Prog(obj.ACALL)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
@ -936,17 +938,11 @@ func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
|
||||
s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
|
||||
}
|
||||
|
||||
case ssa.BlockExit:
|
||||
case ssa.BlockExit, ssa.BlockRetJmp:
|
||||
|
||||
case ssa.BlockRet:
|
||||
s.Prog(obj.ARET)
|
||||
|
||||
case ssa.BlockRetJmp:
|
||||
p := s.Prog(obj.ARET)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
p.To.Name = obj.NAME_EXTERN
|
||||
p.To.Sym = b.Aux.(*obj.LSym)
|
||||
|
||||
case ssa.BlockARMEQ, ssa.BlockARMNE,
|
||||
ssa.BlockARMLT, ssa.BlockARMGE,
|
||||
ssa.BlockARMLE, ssa.BlockARMGT,
|
||||
|
@ -315,6 +315,8 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
|
||||
genshift(s, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_LR, v.AuxInt)
|
||||
case ssa.OpARM64MVNshiftRA, ssa.OpARM64NEGshiftRA:
|
||||
genshift(s, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_AR, v.AuxInt)
|
||||
case ssa.OpARM64MVNshiftRO:
|
||||
genshift(s, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_ROR, v.AuxInt)
|
||||
case ssa.OpARM64ADDshiftLL,
|
||||
ssa.OpARM64SUBshiftLL,
|
||||
ssa.OpARM64ANDshiftLL,
|
||||
@ -342,6 +344,13 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
|
||||
ssa.OpARM64ORNshiftRA,
|
||||
ssa.OpARM64BICshiftRA:
|
||||
genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_AR, v.AuxInt)
|
||||
case ssa.OpARM64ANDshiftRO,
|
||||
ssa.OpARM64ORshiftRO,
|
||||
ssa.OpARM64XORshiftRO,
|
||||
ssa.OpARM64EONshiftRO,
|
||||
ssa.OpARM64ORNshiftRO,
|
||||
ssa.OpARM64BICshiftRO:
|
||||
genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_ROR, v.AuxInt)
|
||||
case ssa.OpARM64MOVDconst:
|
||||
p := s.Prog(v.Op.Asm())
|
||||
p.From.Type = obj.TYPE_CONST
|
||||
@ -389,6 +398,8 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
|
||||
genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_LR, v.AuxInt)
|
||||
case ssa.OpARM64CMPshiftRA, ssa.OpARM64CMNshiftRA, ssa.OpARM64TSTshiftRA:
|
||||
genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_AR, v.AuxInt)
|
||||
case ssa.OpARM64TSTshiftRO:
|
||||
genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_ROR, v.AuxInt)
|
||||
case ssa.OpARM64MOVDaddr:
|
||||
p := s.Prog(arm64.AMOVD)
|
||||
p.From.Type = obj.TYPE_ADDR
|
||||
@ -1046,6 +1057,8 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
|
||||
p4.To.SetTarget(p)
|
||||
case ssa.OpARM64CALLstatic, ssa.OpARM64CALLclosure, ssa.OpARM64CALLinter:
|
||||
s.Call(v)
|
||||
case ssa.OpARM64CALLtail:
|
||||
s.TailCall(v)
|
||||
case ssa.OpARM64LoweredWB:
|
||||
p := s.Prog(obj.ACALL)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
@ -1241,17 +1254,11 @@ func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
|
||||
s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
|
||||
}
|
||||
|
||||
case ssa.BlockExit:
|
||||
case ssa.BlockExit, ssa.BlockRetJmp:
|
||||
|
||||
case ssa.BlockRet:
|
||||
s.Prog(obj.ARET)
|
||||
|
||||
case ssa.BlockRetJmp:
|
||||
p := s.Prog(obj.ARET)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
p.To.Name = obj.NAME_EXTERN
|
||||
p.To.Sym = b.Aux.(*obj.LSym)
|
||||
|
||||
case ssa.BlockARM64EQ, ssa.BlockARM64NE,
|
||||
ssa.BlockARM64LT, ssa.BlockARM64GE,
|
||||
ssa.BlockARM64LE, ssa.BlockARM64GT,
|
||||
|
@ -117,6 +117,7 @@ func stmts(nn *ir.Nodes) {
|
||||
}
|
||||
|
||||
if cut {
|
||||
ir.VisitList((*nn)[i+1:len(*nn)], markHiddenClosureDead)
|
||||
*nn = (*nn)[:i+1]
|
||||
break
|
||||
}
|
||||
|
@ -217,6 +217,7 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
|
||||
DeclCol: declpos.RelCol(),
|
||||
InlIndex: int32(inlIndex),
|
||||
ChildIndex: -1,
|
||||
DictIndex: n.DictIndex,
|
||||
})
|
||||
// Record go type of to insure that it gets emitted by the linker.
|
||||
fnsym.Func().RecordAutoType(reflectdata.TypeLinksym(n.Type()))
|
||||
@ -374,6 +375,7 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var {
|
||||
DeclCol: declpos.RelCol(),
|
||||
InlIndex: int32(inlIndex),
|
||||
ChildIndex: -1,
|
||||
DictIndex: n.DictIndex,
|
||||
}
|
||||
}
|
||||
|
||||
@ -478,6 +480,7 @@ func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var
|
||||
DeclCol: declpos.RelCol(),
|
||||
InlIndex: int32(inlIndex),
|
||||
ChildIndex: -1,
|
||||
DictIndex: n.DictIndex,
|
||||
}
|
||||
list := debug.LocationLists[varID]
|
||||
if len(list) != 0 {
|
||||
|
@ -180,7 +180,8 @@ func (e *escape) stmt(n ir.Node) {
|
||||
e.goDeferStmt(n)
|
||||
|
||||
case ir.OTAILCALL:
|
||||
// TODO(mdempsky): Treat like a normal call? esc.go used to just ignore it.
|
||||
n := n.(*ir.TailCallStmt)
|
||||
e.call(nil, n.Call)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -72,7 +72,7 @@ const (
|
||||
structType
|
||||
interfaceType
|
||||
typeParamType
|
||||
instType
|
||||
instanceType
|
||||
unionType
|
||||
)
|
||||
|
||||
@ -314,21 +314,21 @@ func (r *importReader) obj(name string) {
|
||||
tparams = r.tparamList()
|
||||
}
|
||||
sig := r.signature(nil)
|
||||
sig.SetTParams(tparams)
|
||||
sig.SetTypeParams(tparams)
|
||||
r.declare(types2.NewFunc(pos, r.currPkg, name, sig))
|
||||
|
||||
case 'T', 'U':
|
||||
var tparams []*types2.TypeParam
|
||||
if tag == 'U' {
|
||||
tparams = r.tparamList()
|
||||
}
|
||||
|
||||
// Types can be recursive. We need to setup a stub
|
||||
// declaration before recursing.
|
||||
obj := types2.NewTypeName(pos, r.currPkg, name, nil)
|
||||
named := types2.NewNamed(obj, nil, nil)
|
||||
named.SetTParams(tparams)
|
||||
// Declare obj before calling r.tparamList, so the new type name is recognized
|
||||
// if used in the constraint of one of its own typeparams (see #48280).
|
||||
r.declare(obj)
|
||||
if tag == 'U' {
|
||||
tparams := r.tparamList()
|
||||
named.SetTypeParams(tparams)
|
||||
}
|
||||
|
||||
underlying := r.p.typAt(r.uint64(), named).Underlying()
|
||||
named.SetUnderlying(underlying)
|
||||
@ -343,13 +343,13 @@ func (r *importReader) obj(name string) {
|
||||
// If the receiver has any targs, set those as the
|
||||
// rparams of the method (since those are the
|
||||
// typeparams being used in the method sig/body).
|
||||
targs := baseType(msig.Recv().Type()).TArgs()
|
||||
targs := baseType(msig.Recv().Type()).TypeArgs()
|
||||
if targs.Len() > 0 {
|
||||
rparams := make([]*types2.TypeParam, targs.Len())
|
||||
for i := range rparams {
|
||||
rparams[i] = types2.AsTypeParam(targs.At(i))
|
||||
}
|
||||
msig.SetRParams(rparams)
|
||||
msig.SetRecvTypeParams(rparams)
|
||||
}
|
||||
|
||||
named.AddMethod(types2.NewFunc(mpos, r.currPkg, mname, msig))
|
||||
@ -365,7 +365,7 @@ func (r *importReader) obj(name string) {
|
||||
}
|
||||
name0, sub := parseSubscript(name)
|
||||
tn := types2.NewTypeName(pos, r.currPkg, name0, nil)
|
||||
t := (*types2.Checker)(nil).NewTypeParam(tn, nil)
|
||||
t := types2.NewTypeParam(tn, nil)
|
||||
if sub == 0 {
|
||||
errorf("missing subscript")
|
||||
}
|
||||
@ -646,7 +646,7 @@ func (r *importReader) doType(base *types2.Named) types2.Type {
|
||||
r.p.doDecl(pkg, name)
|
||||
return r.p.tparamIndex[id]
|
||||
|
||||
case instType:
|
||||
case instanceType:
|
||||
if r.p.exportVersion < iexportVersionGenerics {
|
||||
errorf("unexpected instantiation type")
|
||||
}
|
||||
@ -661,7 +661,7 @@ func (r *importReader) doType(base *types2.Named) types2.Type {
|
||||
baseType := r.typ()
|
||||
// The imported instantiated type doesn't include any methods, so
|
||||
// we must always use the methods of the base (orig) type.
|
||||
// TODO provide a non-nil *Checker
|
||||
// TODO provide a non-nil *Environment
|
||||
t, _ := types2.Instantiate(nil, baseType, targs, false)
|
||||
return t
|
||||
|
||||
|
@ -544,6 +544,9 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
|
||||
call := call.(*ir.CallExpr)
|
||||
call.NoInline = true
|
||||
}
|
||||
case ir.OTAILCALL:
|
||||
n := n.(*ir.TailCallStmt)
|
||||
n.Call.NoInline = true // Not inline a tail call for now. Maybe we could inline it just like RETURN fn(arg)?
|
||||
|
||||
// TODO do them here (or earlier),
|
||||
// so escape analysis can avoid more heapmoves.
|
||||
|
@ -570,11 +570,10 @@ func (*SelectorExpr) CanBeNtype() {}
|
||||
// A SliceExpr is a slice expression X[Low:High] or X[Low:High:Max].
|
||||
type SliceExpr struct {
|
||||
miniExpr
|
||||
X Node
|
||||
Low Node
|
||||
High Node
|
||||
Max Node
|
||||
CheckPtrCall *CallExpr `mknode:"-"`
|
||||
X Node
|
||||
Low Node
|
||||
High Node
|
||||
Max Node
|
||||
}
|
||||
|
||||
func NewSliceExpr(pos src.XPos, op Op, x, low, high, max Node) *SliceExpr {
|
||||
|
@ -386,7 +386,7 @@ func stmtFmt(n Node, s fmt.State) {
|
||||
|
||||
case OTAILCALL:
|
||||
n := n.(*TailCallStmt)
|
||||
fmt.Fprintf(s, "tailcall %v", n.Target)
|
||||
fmt.Fprintf(s, "tailcall %v", n.Call)
|
||||
|
||||
case OINLMARK:
|
||||
n := n.(*InlineMarkStmt)
|
||||
@ -559,7 +559,7 @@ func exprFmt(n Node, s fmt.State, prec int) {
|
||||
}
|
||||
|
||||
nprec := OpPrec[n.Op()]
|
||||
if n.Op() == OTYPE && n.Type().IsPtr() {
|
||||
if n.Op() == OTYPE && n.Type() != nil && n.Type().IsPtr() {
|
||||
nprec = OpPrec[ODEREF]
|
||||
}
|
||||
|
||||
@ -1147,6 +1147,7 @@ func dumpNodeHeader(w io.Writer, n Node) {
|
||||
}
|
||||
// TODO(mdempsky): Print line pragma details too.
|
||||
file := filepath.Base(pos.Filename())
|
||||
// Note: this output will be parsed by ssa/html.go:(*HTMLWriter).WriteAST. Keep in sync.
|
||||
fmt.Fprintf(w, "%s:%d:%d", file, pos.Line(), pos.Col())
|
||||
}
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ const (
|
||||
|
||||
func (n *miniNode) Typecheck() uint8 { return n.bits.get2(miniTypecheckShift) }
|
||||
func (n *miniNode) SetTypecheck(x uint8) {
|
||||
if x > 3 {
|
||||
if x > 2 {
|
||||
panic(fmt.Sprintf("cannot SetTypecheck %d", x))
|
||||
}
|
||||
n.bits.set2(miniTypecheckShift, x)
|
||||
|
@ -40,6 +40,7 @@ type Name struct {
|
||||
Class Class // uint8
|
||||
pragma PragmaFlag // int16
|
||||
flags bitset16
|
||||
DictIndex uint16 // index of the dictionary entry describing the type of this variable declaration plus 1
|
||||
sym *types.Sym
|
||||
Func *Func // TODO(austin): nil for I.M, eqFor, hashfor, and hashmem
|
||||
Offset_ int64
|
||||
|
@ -1331,15 +1331,15 @@ func (n *TailCallStmt) doChildren(do func(Node) bool) bool {
|
||||
if doNodes(n.init, do) {
|
||||
return true
|
||||
}
|
||||
if n.Target != nil && do(n.Target) {
|
||||
if n.Call != nil && do(n.Call) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
func (n *TailCallStmt) editChildren(edit func(Node) Node) {
|
||||
editNodes(n.init, edit)
|
||||
if n.Target != nil {
|
||||
n.Target = edit(n.Target).(*Name)
|
||||
if n.Call != nil {
|
||||
n.Call = edit(n.Call).(*CallExpr)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -385,14 +385,11 @@ func NewSwitchStmt(pos src.XPos, tag Node, cases []*CaseClause) *SwitchStmt {
|
||||
// code generation to jump directly to another function entirely.
|
||||
type TailCallStmt struct {
|
||||
miniStmt
|
||||
Target *Name
|
||||
Call *CallExpr // the underlying call
|
||||
}
|
||||
|
||||
func NewTailCallStmt(pos src.XPos, target *Name) *TailCallStmt {
|
||||
if target.Op() != ONAME || target.Class != PFUNC {
|
||||
base.FatalfAt(pos, "tail call to non-func %v", target)
|
||||
}
|
||||
n := &TailCallStmt{Target: target}
|
||||
func NewTailCallStmt(pos src.XPos, call *CallExpr) *TailCallStmt {
|
||||
n := &TailCallStmt{Call: call}
|
||||
n.pos = pos
|
||||
n.op = OTAILCALL
|
||||
return n
|
||||
|
@ -11,33 +11,34 @@ import (
|
||||
|
||||
// Syms holds known symbols.
|
||||
var Syms struct {
|
||||
AssertE2I *obj.LSym
|
||||
AssertE2I2 *obj.LSym
|
||||
AssertI2I *obj.LSym
|
||||
AssertI2I2 *obj.LSym
|
||||
Deferproc *obj.LSym
|
||||
DeferprocStack *obj.LSym
|
||||
Deferreturn *obj.LSym
|
||||
Duffcopy *obj.LSym
|
||||
Duffzero *obj.LSym
|
||||
GCWriteBarrier *obj.LSym
|
||||
Goschedguarded *obj.LSym
|
||||
Growslice *obj.LSym
|
||||
Msanread *obj.LSym
|
||||
Msanwrite *obj.LSym
|
||||
Msanmove *obj.LSym
|
||||
Newobject *obj.LSym
|
||||
Newproc *obj.LSym
|
||||
Panicdivide *obj.LSym
|
||||
Panicshift *obj.LSym
|
||||
PanicdottypeE *obj.LSym
|
||||
PanicdottypeI *obj.LSym
|
||||
Panicnildottype *obj.LSym
|
||||
Panicoverflow *obj.LSym
|
||||
Raceread *obj.LSym
|
||||
Racereadrange *obj.LSym
|
||||
Racewrite *obj.LSym
|
||||
Racewriterange *obj.LSym
|
||||
AssertE2I *obj.LSym
|
||||
AssertE2I2 *obj.LSym
|
||||
AssertI2I *obj.LSym
|
||||
AssertI2I2 *obj.LSym
|
||||
CheckPtrAlignment *obj.LSym
|
||||
Deferproc *obj.LSym
|
||||
DeferprocStack *obj.LSym
|
||||
Deferreturn *obj.LSym
|
||||
Duffcopy *obj.LSym
|
||||
Duffzero *obj.LSym
|
||||
GCWriteBarrier *obj.LSym
|
||||
Goschedguarded *obj.LSym
|
||||
Growslice *obj.LSym
|
||||
Msanread *obj.LSym
|
||||
Msanwrite *obj.LSym
|
||||
Msanmove *obj.LSym
|
||||
Newobject *obj.LSym
|
||||
Newproc *obj.LSym
|
||||
Panicdivide *obj.LSym
|
||||
Panicshift *obj.LSym
|
||||
PanicdottypeE *obj.LSym
|
||||
PanicdottypeI *obj.LSym
|
||||
Panicnildottype *obj.LSym
|
||||
Panicoverflow *obj.LSym
|
||||
Raceread *obj.LSym
|
||||
Racereadrange *obj.LSym
|
||||
Racewrite *obj.LSym
|
||||
Racewriterange *obj.LSym
|
||||
// Wasm
|
||||
SigPanic *obj.LSym
|
||||
Staticuint64s *obj.LSym
|
||||
|
@ -475,6 +475,8 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
|
||||
p6.To.SetTarget(p2)
|
||||
case ssa.OpMIPSCALLstatic, ssa.OpMIPSCALLclosure, ssa.OpMIPSCALLinter:
|
||||
s.Call(v)
|
||||
case ssa.OpMIPSCALLtail:
|
||||
s.TailCall(v)
|
||||
case ssa.OpMIPSLoweredWB:
|
||||
p := s.Prog(obj.ACALL)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
@ -841,14 +843,9 @@ func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
|
||||
p.To.Type = obj.TYPE_BRANCH
|
||||
s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
|
||||
}
|
||||
case ssa.BlockExit:
|
||||
case ssa.BlockExit, ssa.BlockRetJmp:
|
||||
case ssa.BlockRet:
|
||||
s.Prog(obj.ARET)
|
||||
case ssa.BlockRetJmp:
|
||||
p := s.Prog(obj.ARET)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
p.To.Name = obj.NAME_EXTERN
|
||||
p.To.Sym = b.Aux.(*obj.LSym)
|
||||
case ssa.BlockMIPSEQ, ssa.BlockMIPSNE,
|
||||
ssa.BlockMIPSLTZ, ssa.BlockMIPSGEZ,
|
||||
ssa.BlockMIPSLEZ, ssa.BlockMIPSGTZ,
|
||||
|
@ -491,6 +491,8 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
|
||||
p6.To.SetTarget(p2)
|
||||
case ssa.OpMIPS64CALLstatic, ssa.OpMIPS64CALLclosure, ssa.OpMIPS64CALLinter:
|
||||
s.Call(v)
|
||||
case ssa.OpMIPS64CALLtail:
|
||||
s.TailCall(v)
|
||||
case ssa.OpMIPS64LoweredWB:
|
||||
p := s.Prog(obj.ACALL)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
@ -808,14 +810,9 @@ func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
|
||||
p.To.Type = obj.TYPE_BRANCH
|
||||
s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
|
||||
}
|
||||
case ssa.BlockExit:
|
||||
case ssa.BlockExit, ssa.BlockRetJmp:
|
||||
case ssa.BlockRet:
|
||||
s.Prog(obj.ARET)
|
||||
case ssa.BlockRetJmp:
|
||||
p := s.Prog(obj.ARET)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
p.To.Name = obj.NAME_EXTERN
|
||||
p.To.Sym = b.Aux.(*obj.LSym)
|
||||
case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE,
|
||||
ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ,
|
||||
ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ,
|
||||
|
@ -190,7 +190,7 @@ func (g *irgen) typeDecl(out *ir.Nodes, decl *syntax.TypeDecl) {
|
||||
// object to new type pragmas.]
|
||||
ntyp.SetUnderlying(g.typeExpr(decl.Type))
|
||||
|
||||
tparams := otyp.(*types2.Named).TParams()
|
||||
tparams := otyp.(*types2.Named).TypeParams()
|
||||
if n := tparams.Len(); n > 0 {
|
||||
rparams := make([]*types.Type, n)
|
||||
for i := range rparams {
|
||||
|
@ -255,7 +255,8 @@ func (r *decoder) strings() []string {
|
||||
return res
|
||||
}
|
||||
|
||||
func (r *decoder) rawValue() constant.Value {
|
||||
func (r *decoder) value() constant.Value {
|
||||
r.sync(syncValue)
|
||||
isComplex := r.bool()
|
||||
val := r.scalar()
|
||||
if isComplex {
|
||||
|
@ -237,7 +237,8 @@ func (w *encoder) strings(ss []string) {
|
||||
}
|
||||
}
|
||||
|
||||
func (w *encoder) rawValue(val constant.Value) {
|
||||
func (w *encoder) value(val constant.Value) {
|
||||
w.sync(syncValue)
|
||||
if w.bool(val.Kind() == constant.Complex) {
|
||||
w.scalar(constant.Real(val))
|
||||
w.scalar(constant.Imag(val))
|
||||
|
@ -250,44 +250,6 @@ func (g *irgen) selectorExpr(pos src.XPos, typ types2.Type, expr *syntax.Selecto
|
||||
// only be fully transformed once it has an instantiated type.
|
||||
n := ir.NewSelectorExpr(pos, ir.OXDOT, x, typecheck.Lookup(expr.Sel.Value))
|
||||
typed(g.typ(typ), n)
|
||||
|
||||
// Fill in n.Selection for a generic method reference or a bound
|
||||
// interface method, even though we won't use it directly, since it
|
||||
// is useful for analysis. Specifically do not fill in for fields or
|
||||
// other interfaces methods (method call on an interface value), so
|
||||
// n.Selection being non-nil means a method reference for a generic
|
||||
// type or a method reference due to a bound.
|
||||
obj2 := g.info.Selections[expr].Obj()
|
||||
sig := types2.AsSignature(obj2.Type())
|
||||
if sig == nil || sig.Recv() == nil {
|
||||
return n
|
||||
}
|
||||
index := g.info.Selections[expr].Index()
|
||||
last := index[len(index)-1]
|
||||
// recvType is the receiver of the method being called. Because of the
|
||||
// way methods are imported, g.obj(obj2) doesn't work across
|
||||
// packages, so we have to lookup the method via the receiver type.
|
||||
recvType := deref2(sig.Recv().Type())
|
||||
if types2.AsInterface(recvType.Underlying()) != nil {
|
||||
fieldType := n.X.Type()
|
||||
for _, ix := range index[:len(index)-1] {
|
||||
fieldType = deref(fieldType).Field(ix).Type
|
||||
}
|
||||
if fieldType.Kind() == types.TTYPEPARAM {
|
||||
n.Selection = fieldType.Bound().AllMethods().Index(last)
|
||||
//fmt.Printf(">>>>> %v: Bound call %v\n", base.FmtPos(pos), n.Sel)
|
||||
} else {
|
||||
assert(fieldType.Kind() == types.TINTER)
|
||||
//fmt.Printf(">>>>> %v: Interface call %v\n", base.FmtPos(pos), n.Sel)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
recvObj := types2.AsNamed(recvType).Obj()
|
||||
recv := g.pkg(recvObj.Pkg()).Lookup(recvObj.Name()).Def
|
||||
n.Selection = recv.Type().Methods().Index(last)
|
||||
//fmt.Printf(">>>>> %v: Method call %v\n", base.FmtPos(pos), n.Sel)
|
||||
|
||||
return n
|
||||
}
|
||||
|
||||
@ -344,7 +306,7 @@ func (g *irgen) selectorExpr(pos src.XPos, typ types2.Type, expr *syntax.Selecto
|
||||
if wantPtr {
|
||||
recvType2Base = types2.AsPointer(recvType2).Elem()
|
||||
}
|
||||
if types2.AsNamed(recvType2Base).TParams().Len() > 0 {
|
||||
if types2.AsNamed(recvType2Base).TypeParams().Len() > 0 {
|
||||
// recvType2 is the original generic type that is
|
||||
// instantiated for this method call.
|
||||
// selinfo.Recv() is the instantiated type
|
||||
@ -360,12 +322,10 @@ func (g *irgen) selectorExpr(pos src.XPos, typ types2.Type, expr *syntax.Selecto
|
||||
n.(*ir.SelectorExpr).Selection.Nname = method
|
||||
typed(method.Type(), n)
|
||||
|
||||
// selinfo.Targs() are the types used to
|
||||
// instantiate the type of receiver
|
||||
targs2 := getTargs(selinfo)
|
||||
targs := make([]ir.Node, targs2.Len())
|
||||
xt := deref(x.Type())
|
||||
targs := make([]ir.Node, len(xt.RParams()))
|
||||
for i := range targs {
|
||||
targs[i] = ir.TypeNode(g.typ(targs2.At(i)))
|
||||
targs[i] = ir.TypeNode(xt.RParams()[i])
|
||||
}
|
||||
|
||||
// Create function instantiation with the type
|
||||
@ -388,16 +348,6 @@ func (g *irgen) selectorExpr(pos src.XPos, typ types2.Type, expr *syntax.Selecto
|
||||
return n
|
||||
}
|
||||
|
||||
// getTargs gets the targs associated with the receiver of a selected method
|
||||
func getTargs(selinfo *types2.Selection) *types2.TypeList {
|
||||
r := deref2(selinfo.Recv())
|
||||
n := types2.AsNamed(r)
|
||||
if n == nil {
|
||||
base.Fatalf("Incorrect type for selinfo %v", selinfo)
|
||||
}
|
||||
return n.TArgs()
|
||||
}
|
||||
|
||||
func (g *irgen) exprList(expr syntax.Expr) []ir.Node {
|
||||
return g.exprs(unpackListExpr(expr))
|
||||
}
|
||||
@ -440,9 +390,10 @@ func (g *irgen) compLit(typ types2.Type, lit *syntax.CompositeLit) ir.Node {
|
||||
} else {
|
||||
key = g.expr(elem.Key)
|
||||
}
|
||||
exprs[i] = ir.NewKeyExpr(g.pos(elem), key, g.expr(elem.Value))
|
||||
value := wrapname(g.pos(elem.Value), g.expr(elem.Value))
|
||||
exprs[i] = ir.NewKeyExpr(g.pos(elem), key, value)
|
||||
default:
|
||||
exprs[i] = g.expr(elem)
|
||||
exprs[i] = wrapname(g.pos(elem), g.expr(elem))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -95,16 +95,12 @@ func Binary(pos src.XPos, op ir.Op, typ *types.Type, x, y ir.Node) ir.Node {
|
||||
return typed(x.Type(), ir.NewLogicalExpr(pos, op, x, y))
|
||||
case ir.OADD:
|
||||
n := ir.NewBinaryExpr(pos, op, x, y)
|
||||
if x.Type().HasTParam() || y.Type().HasTParam() {
|
||||
// Delay transformAdd() if either arg has a type param,
|
||||
// since it needs to know the exact types to decide whether
|
||||
// to transform OADD to OADDSTR.
|
||||
n.SetType(typ)
|
||||
n.SetTypecheck(3)
|
||||
return n
|
||||
}
|
||||
typed(typ, n)
|
||||
return transformAdd(n)
|
||||
r := ir.Node(n)
|
||||
if !delayTransform() {
|
||||
r = transformAdd(n)
|
||||
}
|
||||
return r
|
||||
default:
|
||||
return typed(x.Type(), ir.NewBinaryExpr(pos, op, x, y))
|
||||
}
|
||||
@ -189,17 +185,6 @@ func Call(pos src.XPos, typ *types.Type, fun ir.Node, args []ir.Node, dots bool)
|
||||
// A function instantiation (even if fully concrete) shouldn't be
|
||||
// transformed yet, because we need to add the dictionary during the
|
||||
// transformation.
|
||||
//
|
||||
// However, if we have a function type (even though it is
|
||||
// parameterized), then we can add in any needed CONVIFACE nodes via
|
||||
// typecheckaste(). We need to call transformArgs() to deal first
|
||||
// with the f(g(()) case where g returns multiple return values. We
|
||||
// can't do anything if fun is a type param (which is probably
|
||||
// described by a structural constraint)
|
||||
if fun.Type().Kind() == types.TFUNC {
|
||||
transformArgs(n)
|
||||
typecheckaste(ir.OCALL, fun, n.IsDDD, fun.Type().Params(), n.Args, true)
|
||||
}
|
||||
return typed(typ, n)
|
||||
}
|
||||
|
||||
@ -212,22 +197,10 @@ func Call(pos src.XPos, typ *types.Type, fun ir.Node, args []ir.Node, dots bool)
|
||||
|
||||
func Compare(pos src.XPos, typ *types.Type, op ir.Op, x, y ir.Node) ir.Node {
|
||||
n := ir.NewBinaryExpr(pos, op, x, y)
|
||||
if x.Type().HasTParam() || y.Type().HasTParam() {
|
||||
xIsInt := x.Type().IsInterface()
|
||||
yIsInt := y.Type().IsInterface()
|
||||
if !(xIsInt && !yIsInt || !xIsInt && yIsInt) {
|
||||
// If either arg is a type param, then we can still do the
|
||||
// transformCompare() if we know that one arg is an interface
|
||||
// and the other is not. Otherwise, we delay
|
||||
// transformCompare(), since it needs to know the exact types
|
||||
// to decide on any needed conversions.
|
||||
n.SetType(typ)
|
||||
n.SetTypecheck(3)
|
||||
return n
|
||||
}
|
||||
}
|
||||
typed(typ, n)
|
||||
transformCompare(n)
|
||||
if !delayTransform() {
|
||||
transformCompare(n)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
@ -299,15 +272,11 @@ func method(typ *types.Type, index int) *types.Field {
|
||||
|
||||
func Index(pos src.XPos, typ *types.Type, x, index ir.Node) ir.Node {
|
||||
n := ir.NewIndexExpr(pos, x, index)
|
||||
if x.Type().HasTParam() {
|
||||
// transformIndex needs to know exact type
|
||||
n.SetType(typ)
|
||||
n.SetTypecheck(3)
|
||||
return n
|
||||
}
|
||||
typed(typ, n)
|
||||
// transformIndex will modify n.Type() for OINDEXMAP.
|
||||
transformIndex(n)
|
||||
if !delayTransform() {
|
||||
// transformIndex will modify n.Type() for OINDEXMAP.
|
||||
transformIndex(n)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
@ -317,14 +286,10 @@ func Slice(pos src.XPos, typ *types.Type, x, low, high, max ir.Node) ir.Node {
|
||||
op = ir.OSLICE3
|
||||
}
|
||||
n := ir.NewSliceExpr(pos, op, x, low, high, max)
|
||||
if x.Type().HasTParam() {
|
||||
// transformSlice needs to know if x.Type() is a string or an array or a slice.
|
||||
n.SetType(typ)
|
||||
n.SetTypecheck(3)
|
||||
return n
|
||||
}
|
||||
typed(typ, n)
|
||||
transformSlice(n)
|
||||
if !delayTransform() {
|
||||
transformSlice(n)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
@ -366,3 +331,9 @@ func IncDec(pos src.XPos, op ir.Op, x ir.Node) *ir.AssignOpStmt {
|
||||
}
|
||||
return ir.NewAssignOpStmt(pos, op, x, bl)
|
||||
}
|
||||
|
||||
// delayTransform returns true if we should delay all transforms, because we are
|
||||
// creating the nodes for a generic function/method.
|
||||
func delayTransform() bool {
|
||||
return ir.CurFunc != nil && ir.CurFunc.Type().HasTParam()
|
||||
}
|
||||
|
@ -43,12 +43,12 @@ var haveLegacyImports = false
|
||||
// for an imported package by overloading writeNewExportFunc, then
|
||||
// that payload will be mapped into memory and passed to
|
||||
// newReadImportFunc.
|
||||
var newReadImportFunc = func(data string, pkg1 *types.Pkg, check *types2.Checker, packages map[string]*types2.Package) (pkg2 *types2.Package, err error) {
|
||||
var newReadImportFunc = func(data string, pkg1 *types.Pkg, env *types2.Environment, packages map[string]*types2.Package) (pkg2 *types2.Package, err error) {
|
||||
panic("unexpected new export data payload")
|
||||
}
|
||||
|
||||
type gcimports struct {
|
||||
check *types2.Checker
|
||||
env *types2.Environment
|
||||
packages map[string]*types2.Package
|
||||
}
|
||||
|
||||
@ -61,7 +61,7 @@ func (m *gcimports) ImportFrom(path, srcDir string, mode types2.ImportMode) (*ty
|
||||
panic("mode must be 0")
|
||||
}
|
||||
|
||||
_, pkg, err := readImportFile(path, typecheck.Target, m.check, m.packages)
|
||||
_, pkg, err := readImportFile(path, typecheck.Target, m.env, m.packages)
|
||||
return pkg, err
|
||||
}
|
||||
|
||||
@ -224,7 +224,7 @@ func parseImportPath(pathLit *syntax.BasicLit) (string, error) {
|
||||
// readImportFile reads the import file for the given package path and
|
||||
// returns its types.Pkg representation. If packages is non-nil, the
|
||||
// types2.Package representation is also returned.
|
||||
func readImportFile(path string, target *ir.Package, check *types2.Checker, packages map[string]*types2.Package) (pkg1 *types.Pkg, pkg2 *types2.Package, err error) {
|
||||
func readImportFile(path string, target *ir.Package, env *types2.Environment, packages map[string]*types2.Package) (pkg1 *types.Pkg, pkg2 *types2.Package, err error) {
|
||||
path, err = resolveImportPath(path)
|
||||
if err != nil {
|
||||
return
|
||||
@ -279,7 +279,7 @@ func readImportFile(path string, target *ir.Package, check *types2.Checker, pack
|
||||
return
|
||||
}
|
||||
|
||||
pkg2, err = newReadImportFunc(data, pkg1, check, packages)
|
||||
pkg2, err = newReadImportFunc(data, pkg1, env, packages)
|
||||
} else {
|
||||
// We only have old data. Oh well, fall back to the legacy importers.
|
||||
haveLegacyImports = true
|
||||
|
@ -34,10 +34,13 @@ func checkFiles(noders []*noder) (posMap, *types2.Package, *types2.Info) {
|
||||
}
|
||||
|
||||
// typechecking
|
||||
env := types2.NewEnvironment()
|
||||
importer := gcimports{
|
||||
env: env,
|
||||
packages: map[string]*types2.Package{"unsafe": types2.Unsafe},
|
||||
}
|
||||
conf := types2.Config{
|
||||
Environment: env,
|
||||
GoVersion: base.Flag.Lang,
|
||||
IgnoreLabels: true, // parser already checked via syntax.CheckBranches mode
|
||||
CompilerErrorMessages: true, // use error strings matching existing compiler errors
|
||||
@ -60,9 +63,7 @@ func checkFiles(noders []*noder) (posMap, *types2.Package, *types2.Info) {
|
||||
// expand as needed
|
||||
}
|
||||
|
||||
pkg := types2.NewPackage(base.Ctxt.Pkgpath, "")
|
||||
importer.check = types2.NewChecker(&conf, pkg, info)
|
||||
err := importer.check.Files(files)
|
||||
pkg, err := conf.Check(base.Ctxt.Pkgpath, files, info)
|
||||
|
||||
base.ExitIfErrors()
|
||||
if err != nil {
|
||||
@ -96,39 +97,42 @@ func check2(noders []*noder) {
|
||||
}
|
||||
}
|
||||
|
||||
// gfInfo is information gathered on a generic function.
|
||||
type gfInfo struct {
|
||||
tparams []*types.Type
|
||||
// dictInfo is the dictionary format for an instantiation of a generic function with
|
||||
// particular shapes. shapeParams, derivedTypes, subDictCalls, and itabConvs describe
|
||||
// the actual dictionary entries in order, and the remaining fields are other info
|
||||
// needed in doing dictionary processing during compilation.
|
||||
type dictInfo struct {
|
||||
// Types substituted for the type parameters, which are shape types.
|
||||
shapeParams []*types.Type
|
||||
// All types derived from those typeparams used in the instantiation.
|
||||
derivedTypes []*types.Type
|
||||
// Nodes in generic function that requires a subdictionary. Includes
|
||||
// Nodes in the instantiation that requires a subdictionary. Includes
|
||||
// method and function calls (OCALL), function values (OFUNCINST), method
|
||||
// values/expressions (OXDOT).
|
||||
subDictCalls []ir.Node
|
||||
// Nodes in generic functions that are a conversion from a typeparam/derived
|
||||
// Nodes in the instantiation that are a conversion from a typeparam/derived
|
||||
// type to a specific interface.
|
||||
itabConvs []ir.Node
|
||||
|
||||
// Mapping from each shape type that substitutes a type param, to its
|
||||
// type bound (which is also substitued with shapes if it is parameterized)
|
||||
shapeToBound map[*types.Type]*types.Type
|
||||
|
||||
// For type switches on nonempty interfaces, a map from OTYPE entries of
|
||||
// HasTParam type, to the interface type we're switching from.
|
||||
// TODO: what if the type we're switching from is a shape type?
|
||||
// HasShape type, to the interface type we're switching from.
|
||||
type2switchType map[ir.Node]*types.Type
|
||||
}
|
||||
|
||||
// instInfo is information gathered on an gcshape (or fully concrete)
|
||||
// instantiation of a function.
|
||||
type instInfo struct {
|
||||
fun *ir.Func // The instantiated function (with body)
|
||||
dictParam *ir.Name // The node inside fun that refers to the dictionary param
|
||||
|
||||
gf *ir.Name // The associated generic function
|
||||
gfInfo *gfInfo
|
||||
|
||||
startSubDict int // Start of dict entries for subdictionaries
|
||||
startItabConv int // Start of dict entries for itab conversions
|
||||
dictLen int // Total number of entries in dictionary
|
||||
}
|
||||
|
||||
// Map from nodes in instantiated fun (OCALL, OCALLMETHOD, OFUNCINST, and
|
||||
// OMETHEXPR) to the associated dictionary entry for a sub-dictionary
|
||||
dictEntryMap map[ir.Node]int
|
||||
// instInfo is information gathered on an shape instantiation of a function.
|
||||
type instInfo struct {
|
||||
fun *ir.Func // The instantiated function (with body)
|
||||
dictParam *ir.Name // The node inside fun that refers to the dictionary param
|
||||
|
||||
dictInfo *dictInfo
|
||||
}
|
||||
|
||||
type irgen struct {
|
||||
@ -154,13 +158,8 @@ type irgen struct {
|
||||
|
||||
dnum int // for generating unique dictionary variables
|
||||
|
||||
// Map from generic function to information about its type params, derived
|
||||
// types, and subdictionaries.
|
||||
gfInfoMap map[*types.Sym]*gfInfo
|
||||
|
||||
// Map from a name of function that been instantiated to information about
|
||||
// its instantiated function, associated generic function/method, and the
|
||||
// mapping from IR nodes to dictionary entries.
|
||||
// its instantiated function (including dictionary format).
|
||||
instInfoMap map[*types.Sym]*instInfo
|
||||
|
||||
// dictionary syms which we need to finish, by writing out any itabconv
|
||||
@ -178,10 +177,11 @@ func (g *irgen) later(fn func()) {
|
||||
}
|
||||
|
||||
type delayInfo struct {
|
||||
gf *ir.Name
|
||||
targs []*types.Type
|
||||
sym *types.Sym
|
||||
off int
|
||||
gf *ir.Name
|
||||
targs []*types.Type
|
||||
sym *types.Sym
|
||||
off int
|
||||
isMeth bool
|
||||
}
|
||||
|
||||
type typeDelayInfo struct {
|
||||
|
@ -1537,7 +1537,7 @@ func (p *noder) mkname(name *syntax.Name) ir.Node {
|
||||
return mkname(p.name(name))
|
||||
}
|
||||
|
||||
func (p *noder) wrapname(n syntax.Node, x ir.Node) ir.Node {
|
||||
func wrapname(pos src.XPos, x ir.Node) ir.Node {
|
||||
// These nodes do not carry line numbers.
|
||||
// Introduce a wrapper node to give them the correct line.
|
||||
switch x.Op() {
|
||||
@ -1547,13 +1547,17 @@ func (p *noder) wrapname(n syntax.Node, x ir.Node) ir.Node {
|
||||
}
|
||||
fallthrough
|
||||
case ir.ONAME, ir.ONONAME, ir.OPACK:
|
||||
p := ir.NewParenExpr(p.pos(n), x)
|
||||
p := ir.NewParenExpr(pos, x)
|
||||
p.SetImplicit(true)
|
||||
return p
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
func (p *noder) wrapname(n syntax.Node, x ir.Node) ir.Node {
|
||||
return wrapname(p.pos(n), x)
|
||||
}
|
||||
|
||||
func (p *noder) setlineno(n syntax.Node) {
|
||||
if n != nil {
|
||||
base.Pos = p.pos(n)
|
||||
|
@ -10,6 +10,7 @@ import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/constant"
|
||||
"internal/buildcfg"
|
||||
"strings"
|
||||
|
||||
"cmd/compile/internal/base"
|
||||
@ -78,8 +79,6 @@ type reader struct {
|
||||
|
||||
p *pkgReader
|
||||
|
||||
ext *reader
|
||||
|
||||
dict *readerDict
|
||||
|
||||
// TODO(mdempsky): The state below is all specific to reading
|
||||
@ -194,15 +193,32 @@ func (pr *pkgReader) posBaseIdx(idx int) *src.PosBase {
|
||||
r := pr.newReader(relocPosBase, idx, syncPosBase)
|
||||
var b *src.PosBase
|
||||
|
||||
filename := r.string()
|
||||
absFilename := r.string()
|
||||
filename := absFilename
|
||||
|
||||
// For build artifact stability, the export data format only
|
||||
// contains the "absolute" filename as returned by objabi.AbsFile.
|
||||
// However, some tests (e.g., test/run.go's asmcheck tests) expect
|
||||
// to see the full, original filename printed out. Re-expanding
|
||||
// "$GOROOT" to buildcfg.GOROOT is a close-enough approximation to
|
||||
// satisfy this.
|
||||
//
|
||||
// TODO(mdempsky): De-duplicate this logic with similar logic in
|
||||
// cmd/link/internal/ld's expandGoroot. However, this will probably
|
||||
// require being more consistent about when we use native vs UNIX
|
||||
// file paths.
|
||||
const dollarGOROOT = "$GOROOT"
|
||||
if strings.HasPrefix(filename, dollarGOROOT) {
|
||||
filename = buildcfg.GOROOT + filename[len(dollarGOROOT):]
|
||||
}
|
||||
|
||||
if r.bool() {
|
||||
b = src.NewFileBase(filename, filename)
|
||||
b = src.NewFileBase(filename, absFilename)
|
||||
} else {
|
||||
pos := r.pos0()
|
||||
line := r.uint()
|
||||
col := r.uint()
|
||||
b = src.NewLinePragmaBase(pos, filename, filename, line, col)
|
||||
b = src.NewLinePragmaBase(pos, filename, absFilename, line, col)
|
||||
}
|
||||
|
||||
pr.posBases[idx] = b
|
||||
@ -568,10 +584,10 @@ func (pr *pkgReader) objIdx(idx int, implicits, explicits []*types.Type) ir.Node
|
||||
dict := pr.objDictIdx(sym, idx, implicits, explicits)
|
||||
|
||||
r := pr.newReader(relocObj, idx, syncObject1)
|
||||
r.ext = pr.newReader(relocObjExt, idx, syncObject1)
|
||||
rext := pr.newReader(relocObjExt, idx, syncObject1)
|
||||
|
||||
r.dict = dict
|
||||
r.ext.dict = dict
|
||||
rext.dict = dict
|
||||
|
||||
sym = r.mangle(sym)
|
||||
if !sym.IsBlank() && sym.Def != nil {
|
||||
@ -608,7 +624,8 @@ func (pr *pkgReader) objIdx(idx int, implicits, explicits []*types.Type) ir.Node
|
||||
|
||||
case objConst:
|
||||
name := do(ir.OLITERAL, false)
|
||||
typ, val := r.value()
|
||||
typ := r.typ()
|
||||
val := FixValue(typ, r.value())
|
||||
setType(name, typ)
|
||||
setValue(name, val)
|
||||
return name
|
||||
@ -623,7 +640,7 @@ func (pr *pkgReader) objIdx(idx int, implicits, explicits []*types.Type) ir.Node
|
||||
name.Func = ir.NewFunc(r.pos())
|
||||
name.Func.Nname = name
|
||||
|
||||
r.ext.funcExt(name)
|
||||
rext.funcExt(name)
|
||||
return name
|
||||
|
||||
case objType:
|
||||
@ -632,7 +649,7 @@ func (pr *pkgReader) objIdx(idx int, implicits, explicits []*types.Type) ir.Node
|
||||
setType(name, typ)
|
||||
|
||||
// Important: We need to do this before SetUnderlying.
|
||||
r.ext.typeExt(name)
|
||||
rext.typeExt(name)
|
||||
|
||||
// We need to defer CheckSize until we've called SetUnderlying to
|
||||
// handle recursive types.
|
||||
@ -642,7 +659,7 @@ func (pr *pkgReader) objIdx(idx int, implicits, explicits []*types.Type) ir.Node
|
||||
|
||||
methods := make([]*types.Field, r.len())
|
||||
for i := range methods {
|
||||
methods[i] = r.method()
|
||||
methods[i] = r.method(rext)
|
||||
}
|
||||
if len(methods) != 0 {
|
||||
typ.Methods().Set(methods)
|
||||
@ -655,7 +672,7 @@ func (pr *pkgReader) objIdx(idx int, implicits, explicits []*types.Type) ir.Node
|
||||
case objVar:
|
||||
name := do(ir.ONAME, false)
|
||||
setType(name, r.typ())
|
||||
r.ext.varExt(name)
|
||||
rext.varExt(name)
|
||||
return name
|
||||
}
|
||||
}
|
||||
@ -737,13 +754,7 @@ func (r *reader) typeParamNames() {
|
||||
}
|
||||
}
|
||||
|
||||
func (r *reader) value() (*types.Type, constant.Value) {
|
||||
r.sync(syncValue)
|
||||
typ := r.typ()
|
||||
return typ, FixValue(typ, r.rawValue())
|
||||
}
|
||||
|
||||
func (r *reader) method() *types.Field {
|
||||
func (r *reader) method(rext *reader) *types.Field {
|
||||
r.sync(syncMethod)
|
||||
pos := r.pos()
|
||||
pkg, sym := r.selector()
|
||||
@ -759,7 +770,7 @@ func (r *reader) method() *types.Field {
|
||||
name.Func = ir.NewFunc(r.pos())
|
||||
name.Func.Nname = name
|
||||
|
||||
r.ext.funcExt(name)
|
||||
rext.funcExt(name)
|
||||
|
||||
meth := types.NewField(name.Func.Pos(), sym, typ)
|
||||
meth.Nname = name
|
||||
@ -916,6 +927,11 @@ var bodyReader = map[*ir.Func]pkgReaderIndex{}
|
||||
// constructed.
|
||||
var todoBodies []*ir.Func
|
||||
|
||||
// todoBodiesDone signals that we constructed all function in todoBodies.
|
||||
// This is necessary to prevent reader.addBody adds thing to todoBodies
|
||||
// when nested inlining happens.
|
||||
var todoBodiesDone = false
|
||||
|
||||
func (r *reader) addBody(fn *ir.Func) {
|
||||
pri := pkgReaderIndex{r.p, r.reloc(relocBody), r.dict}
|
||||
bodyReader[fn] = pri
|
||||
@ -926,7 +942,7 @@ func (r *reader) addBody(fn *ir.Func) {
|
||||
return
|
||||
}
|
||||
|
||||
if r.curfn == nil {
|
||||
if r.curfn == nil && !todoBodiesDone {
|
||||
todoBodies = append(todoBodies, fn)
|
||||
return
|
||||
}
|
||||
@ -1538,7 +1554,8 @@ func (r *reader) expr() (res ir.Node) {
|
||||
|
||||
case exprConst:
|
||||
pos := r.pos()
|
||||
typ, val := r.value()
|
||||
typ := r.typ()
|
||||
val := FixValue(typ, r.value())
|
||||
op := r.op()
|
||||
orig := r.string()
|
||||
return typecheck.Expr(OrigConst(pos, typ, val, op, orig))
|
||||
|
@ -7,8 +7,6 @@
|
||||
package noder
|
||||
|
||||
import (
|
||||
"go/constant"
|
||||
|
||||
"cmd/compile/internal/base"
|
||||
"cmd/compile/internal/syntax"
|
||||
"cmd/compile/internal/types2"
|
||||
@ -18,7 +16,7 @@ import (
|
||||
type pkgReader2 struct {
|
||||
pkgDecoder
|
||||
|
||||
check *types2.Checker
|
||||
env *types2.Environment
|
||||
imports map[string]*types2.Package
|
||||
|
||||
posBases []*syntax.PosBase
|
||||
@ -26,11 +24,11 @@ type pkgReader2 struct {
|
||||
typs []types2.Type
|
||||
}
|
||||
|
||||
func readPackage2(check *types2.Checker, imports map[string]*types2.Package, input pkgDecoder) *types2.Package {
|
||||
func readPackage2(env *types2.Environment, imports map[string]*types2.Package, input pkgDecoder) *types2.Package {
|
||||
pr := pkgReader2{
|
||||
pkgDecoder: input,
|
||||
|
||||
check: check,
|
||||
env: env,
|
||||
imports: imports,
|
||||
|
||||
posBases: make([]*syntax.PosBase, input.numElems(relocPosBase)),
|
||||
@ -233,7 +231,7 @@ func (r *reader2) doTyp() (res types2.Type) {
|
||||
obj, targs := r.obj()
|
||||
name := obj.(*types2.TypeName)
|
||||
if len(targs) != 0 {
|
||||
t, _ := types2.Instantiate(types2.NewEnvironment(r.p.check), name.Type(), targs, false)
|
||||
t, _ := types2.Instantiate(r.p.env, name.Type(), targs, false)
|
||||
return t
|
||||
}
|
||||
return name.Type()
|
||||
@ -388,14 +386,15 @@ func (pr *pkgReader2) objIdx(idx int) (*types2.Package, string) {
|
||||
|
||||
case objConst:
|
||||
pos := r.pos()
|
||||
typ, val := r.value()
|
||||
typ := r.typ()
|
||||
val := r.value()
|
||||
return types2.NewConst(pos, objPkg, objName, typ, val)
|
||||
|
||||
case objFunc:
|
||||
pos := r.pos()
|
||||
tparams := r.typeParamNames()
|
||||
sig := r.signature(nil)
|
||||
sig.SetTParams(tparams)
|
||||
sig.SetTypeParams(tparams)
|
||||
return types2.NewFunc(pos, objPkg, objName, sig)
|
||||
|
||||
case objType:
|
||||
@ -428,11 +427,6 @@ func (pr *pkgReader2) objIdx(idx int) (*types2.Package, string) {
|
||||
return objPkg, objName
|
||||
}
|
||||
|
||||
func (r *reader2) value() (types2.Type, constant.Value) {
|
||||
r.sync(syncValue)
|
||||
return r.typ(), r.rawValue()
|
||||
}
|
||||
|
||||
func (pr *pkgReader2) objDictIdx(idx int) *reader2Dict {
|
||||
r := pr.newReader(relocObjDict, idx, syncObject1)
|
||||
|
||||
@ -481,7 +475,7 @@ func (r *reader2) typeParamNames() []*types2.TypeParam {
|
||||
pkg, name := r.localIdent()
|
||||
|
||||
tname := types2.NewTypeName(pos, pkg, name, nil)
|
||||
r.dict.tparams[i] = r.p.check.NewTypeParam(tname, nil)
|
||||
r.dict.tparams[i] = types2.NewTypeParam(tname, nil)
|
||||
}
|
||||
|
||||
for i, bound := range r.dict.bounds {
|
||||
@ -498,7 +492,7 @@ func (r *reader2) method() *types2.Func {
|
||||
|
||||
rparams := r.typeParamNames()
|
||||
sig := r.signature(r.param())
|
||||
sig.SetRParams(rparams)
|
||||
sig.SetRecvTypeParams(rparams)
|
||||
|
||||
_ = r.pos() // TODO(mdempsky): Remove; this is a hacker for linker.go.
|
||||
return types2.NewFunc(pos, pkg, name, sig)
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -37,16 +37,12 @@ func (g *irgen) stmt(stmt syntax.Stmt) ir.Node {
|
||||
case *syntax.BlockStmt:
|
||||
return ir.NewBlockStmt(g.pos(stmt), g.blockStmt(stmt))
|
||||
case *syntax.ExprStmt:
|
||||
return g.expr(stmt.X)
|
||||
return wrapname(g.pos(stmt.X), g.expr(stmt.X))
|
||||
case *syntax.SendStmt:
|
||||
n := ir.NewSendStmt(g.pos(stmt), g.expr(stmt.Chan), g.expr(stmt.Value))
|
||||
if n.Chan.Type().HasTParam() || n.Value.Type().HasTParam() {
|
||||
// Delay transforming the send if the channel or value
|
||||
// have a type param.
|
||||
n.SetTypecheck(3)
|
||||
return n
|
||||
if !delayTransform() {
|
||||
transformSend(n)
|
||||
}
|
||||
transformSend(n)
|
||||
n.SetTypecheck(1)
|
||||
return n
|
||||
case *syntax.DeclStmt:
|
||||
@ -66,11 +62,9 @@ func (g *irgen) stmt(stmt syntax.Stmt) ir.Node {
|
||||
lhs := g.expr(stmt.Lhs)
|
||||
n = ir.NewAssignOpStmt(g.pos(stmt), op, lhs, rhs)
|
||||
}
|
||||
if n.X.Typecheck() == 3 {
|
||||
n.SetTypecheck(3)
|
||||
return n
|
||||
if !delayTransform() {
|
||||
transformAsOp(n)
|
||||
}
|
||||
transformAsOp(n)
|
||||
n.SetTypecheck(1)
|
||||
return n
|
||||
}
|
||||
@ -79,49 +73,24 @@ func (g *irgen) stmt(stmt syntax.Stmt) ir.Node {
|
||||
rhs := g.exprList(stmt.Rhs)
|
||||
names, lhs := g.assignList(stmt.Lhs, stmt.Op == syntax.Def)
|
||||
|
||||
// We must delay transforming the assign statement if any of the
|
||||
// lhs or rhs nodes are also delayed, since transformAssign needs
|
||||
// to know the types of the left and right sides in various cases.
|
||||
delay := false
|
||||
for _, e := range lhs {
|
||||
if e.Typecheck() == 3 {
|
||||
delay = true
|
||||
break
|
||||
}
|
||||
}
|
||||
for _, e := range rhs {
|
||||
if e.Typecheck() == 3 {
|
||||
delay = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if len(lhs) == 1 && len(rhs) == 1 {
|
||||
n := ir.NewAssignStmt(g.pos(stmt), lhs[0], rhs[0])
|
||||
n.Def = initDefn(n, names)
|
||||
|
||||
if delay {
|
||||
earlyTransformAssign(n, lhs, rhs)
|
||||
if !delayTransform() {
|
||||
lhs, rhs := []ir.Node{n.X}, []ir.Node{n.Y}
|
||||
transformAssign(n, lhs, rhs)
|
||||
n.X, n.Y = lhs[0], rhs[0]
|
||||
n.SetTypecheck(3)
|
||||
return n
|
||||
}
|
||||
|
||||
lhs, rhs := []ir.Node{n.X}, []ir.Node{n.Y}
|
||||
transformAssign(n, lhs, rhs)
|
||||
n.X, n.Y = lhs[0], rhs[0]
|
||||
n.SetTypecheck(1)
|
||||
return n
|
||||
}
|
||||
|
||||
n := ir.NewAssignListStmt(g.pos(stmt), ir.OAS2, lhs, rhs)
|
||||
n.Def = initDefn(n, names)
|
||||
if delay {
|
||||
earlyTransformAssign(n, lhs, rhs)
|
||||
n.SetTypecheck(3)
|
||||
return n
|
||||
if !delayTransform() {
|
||||
transformAssign(n, n.Lhs, n.Rhs)
|
||||
}
|
||||
transformAssign(n, n.Lhs, n.Rhs)
|
||||
n.SetTypecheck(1)
|
||||
return n
|
||||
|
||||
@ -131,21 +100,9 @@ func (g *irgen) stmt(stmt syntax.Stmt) ir.Node {
|
||||
return ir.NewGoDeferStmt(g.pos(stmt), g.tokOp(int(stmt.Tok), callOps[:]), g.expr(stmt.Call))
|
||||
case *syntax.ReturnStmt:
|
||||
n := ir.NewReturnStmt(g.pos(stmt), g.exprList(stmt.Results))
|
||||
for _, e := range n.Results {
|
||||
if e.Type().HasTParam() {
|
||||
// Delay transforming the return statement if any of the
|
||||
// return values have a type param.
|
||||
if !ir.HasNamedResults(ir.CurFunc) {
|
||||
transformArgs(n)
|
||||
// But add CONVIFACE nodes where needed if
|
||||
// any of the return values have interface type.
|
||||
typecheckaste(ir.ORETURN, nil, false, ir.CurFunc.Type().Results(), n.Results, true)
|
||||
}
|
||||
n.SetTypecheck(3)
|
||||
return n
|
||||
}
|
||||
if !delayTransform() {
|
||||
transformReturn(n)
|
||||
}
|
||||
transformReturn(n)
|
||||
n.SetTypecheck(1)
|
||||
return n
|
||||
case *syntax.IfStmt:
|
||||
@ -154,7 +111,10 @@ func (g *irgen) stmt(stmt syntax.Stmt) ir.Node {
|
||||
return g.forStmt(stmt)
|
||||
case *syntax.SelectStmt:
|
||||
n := g.selectStmt(stmt)
|
||||
transformSelect(n.(*ir.SelectStmt))
|
||||
|
||||
if !delayTransform() {
|
||||
transformSelect(n.(*ir.SelectStmt))
|
||||
}
|
||||
n.SetTypecheck(1)
|
||||
return n
|
||||
case *syntax.SwitchStmt:
|
||||
|
@ -157,7 +157,7 @@ func transformCall(n *ir.CallExpr) {
|
||||
n.SetOp(ir.OCALLFUNC)
|
||||
}
|
||||
|
||||
typecheckaste(ir.OCALL, n.X, n.IsDDD, t.Params(), n.Args, false)
|
||||
typecheckaste(ir.OCALL, n.X, n.IsDDD, t.Params(), n.Args)
|
||||
if l.Op() == ir.ODOTMETH && len(deref(n.X.Type().Recv().Type).RParams()) == 0 {
|
||||
typecheck.FixMethodCall(n)
|
||||
}
|
||||
@ -195,7 +195,7 @@ func transformCompare(n *ir.BinaryExpr) {
|
||||
aop, _ := typecheck.Assignop(lt, rt)
|
||||
if aop != ir.OXXX {
|
||||
types.CalcSize(lt)
|
||||
if lt.HasTParam() || rt.IsInterface() == lt.IsInterface() || lt.Size() >= 1<<16 {
|
||||
if lt.HasShape() || rt.IsInterface() == lt.IsInterface() || lt.Size() >= 1<<16 {
|
||||
l = ir.NewConvExpr(base.Pos, aop, rt, l)
|
||||
l.SetTypecheck(1)
|
||||
}
|
||||
@ -365,59 +365,6 @@ assignOK:
|
||||
}
|
||||
}
|
||||
|
||||
// Version of transformAssign that can run on generic code that adds CONVIFACE calls
|
||||
// as needed (and rewrites multi-value calls).
|
||||
func earlyTransformAssign(stmt ir.Node, lhs, rhs []ir.Node) {
|
||||
cr := len(rhs)
|
||||
if len(rhs) == 1 {
|
||||
if rtyp := rhs[0].Type(); rtyp != nil && rtyp.IsFuncArgStruct() {
|
||||
cr = rtyp.NumFields()
|
||||
}
|
||||
}
|
||||
|
||||
// x,y,z = f()
|
||||
_, isCallExpr := rhs[0].(*ir.CallExpr)
|
||||
if isCallExpr && cr > len(rhs) {
|
||||
stmt := stmt.(*ir.AssignListStmt)
|
||||
stmt.SetOp(ir.OAS2FUNC)
|
||||
r := rhs[0].(*ir.CallExpr)
|
||||
rtyp := r.Type()
|
||||
|
||||
mismatched := false
|
||||
failed := false
|
||||
for i := range lhs {
|
||||
result := rtyp.Field(i).Type
|
||||
|
||||
if lhs[i].Type() == nil || result == nil {
|
||||
failed = true
|
||||
} else if lhs[i] != ir.BlankNode && !types.Identical(lhs[i].Type(), result) {
|
||||
mismatched = true
|
||||
}
|
||||
}
|
||||
if mismatched && !failed {
|
||||
typecheck.RewriteMultiValueCall(stmt, r)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// x, ok = y
|
||||
if len(lhs) != len(rhs) {
|
||||
assert(len(lhs) == 2 && len(rhs) == 1)
|
||||
// TODO(danscales): deal with case where x or ok is an interface
|
||||
// type. We want to add CONVIFACE now, but that is tricky, because
|
||||
// the rhs may be AS2MAPR, AS2RECV, etc. which has two result values,
|
||||
// and that is not rewritten until the order phase (o.stmt, as2ok).
|
||||
return
|
||||
}
|
||||
|
||||
// Check for interface conversion on each assignment
|
||||
for i, r := range rhs {
|
||||
if lhs[i].Type() != nil && lhs[i].Type().IsInterface() {
|
||||
rhs[i] = assignconvfn(r, lhs[i].Type())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Corresponds to typecheck.typecheckargs. Really just deals with multi-value calls.
|
||||
func transformArgs(n ir.InitNode) {
|
||||
var list []ir.Node
|
||||
@ -457,11 +404,15 @@ func assignconvfn(n ir.Node, t *types.Type) ir.Node {
|
||||
return n
|
||||
}
|
||||
|
||||
if types.Identical(n.Type(), t) {
|
||||
if n.Op() == ir.OPAREN {
|
||||
n = n.(*ir.ParenExpr).X
|
||||
}
|
||||
|
||||
if types.IdenticalStrict(n.Type(), t) {
|
||||
return n
|
||||
}
|
||||
|
||||
op, why := typecheck.Assignop(n.Type(), t)
|
||||
op, why := Assignop(n.Type(), t)
|
||||
if op == ir.OXXX {
|
||||
base.Fatalf("found illegal assignment %+v -> %+v; %s", n.Type(), t, why)
|
||||
}
|
||||
@ -472,11 +423,35 @@ func assignconvfn(n ir.Node, t *types.Type) ir.Node {
|
||||
return r
|
||||
}
|
||||
|
||||
func Assignop(src, dst *types.Type) (ir.Op, string) {
|
||||
if src == dst {
|
||||
return ir.OCONVNOP, ""
|
||||
}
|
||||
if src == nil || dst == nil || src.Kind() == types.TFORW || dst.Kind() == types.TFORW || src.Underlying() == nil || dst.Underlying() == nil {
|
||||
return ir.OXXX, ""
|
||||
}
|
||||
|
||||
// 1. src type is identical to dst (taking shapes into account)
|
||||
if types.Identical(src, dst) {
|
||||
// We already know from assignconvfn above that IdenticalStrict(src,
|
||||
// dst) is false, so the types are not exactly the same and one of
|
||||
// src or dst is a shape. If dst is an interface (which means src is
|
||||
// an interface too), we need a real OCONVIFACE op; otherwise we need a
|
||||
// OCONVNOP. See issue #48453.
|
||||
if dst.IsInterface() {
|
||||
return ir.OCONVIFACE, ""
|
||||
} else {
|
||||
return ir.OCONVNOP, ""
|
||||
}
|
||||
}
|
||||
return typecheck.Assignop1(src, dst)
|
||||
}
|
||||
|
||||
// Corresponds to typecheck.typecheckaste, but we add an extra flag convifaceOnly
|
||||
// only. If convifaceOnly is true, we only do interface conversion. We use this to do
|
||||
// early insertion of CONVIFACE nodes during noder2, when the function or args may
|
||||
// have typeparams.
|
||||
func typecheckaste(op ir.Op, call ir.Node, isddd bool, tstruct *types.Type, nl ir.Nodes, convifaceOnly bool) {
|
||||
func typecheckaste(op ir.Op, call ir.Node, isddd bool, tstruct *types.Type, nl ir.Nodes) {
|
||||
var t *types.Type
|
||||
var i int
|
||||
|
||||
@ -495,7 +470,7 @@ func typecheckaste(op ir.Op, call ir.Node, isddd bool, tstruct *types.Type, nl i
|
||||
if isddd {
|
||||
n = nl[i]
|
||||
ir.SetPos(n)
|
||||
if n.Type() != nil && (!convifaceOnly || t.IsInterface()) {
|
||||
if n.Type() != nil {
|
||||
nl[i] = assignconvfn(n, t)
|
||||
}
|
||||
return
|
||||
@ -505,7 +480,7 @@ func typecheckaste(op ir.Op, call ir.Node, isddd bool, tstruct *types.Type, nl i
|
||||
for ; i < len(nl); i++ {
|
||||
n = nl[i]
|
||||
ir.SetPos(n)
|
||||
if n.Type() != nil && (!convifaceOnly || t.IsInterface()) {
|
||||
if n.Type() != nil {
|
||||
nl[i] = assignconvfn(n, t.Elem())
|
||||
}
|
||||
}
|
||||
@ -514,7 +489,7 @@ func typecheckaste(op ir.Op, call ir.Node, isddd bool, tstruct *types.Type, nl i
|
||||
|
||||
n = nl[i]
|
||||
ir.SetPos(n)
|
||||
if n.Type() != nil && (!convifaceOnly || t.IsInterface()) {
|
||||
if n.Type() != nil {
|
||||
nl[i] = assignconvfn(n, t)
|
||||
}
|
||||
i++
|
||||
@ -536,7 +511,7 @@ func transformReturn(rs *ir.ReturnStmt) {
|
||||
return
|
||||
}
|
||||
|
||||
typecheckaste(ir.ORETURN, nil, false, ir.CurFunc.Type().Results(), nl, false)
|
||||
typecheckaste(ir.ORETURN, nil, false, ir.CurFunc.Type().Results(), nl)
|
||||
}
|
||||
|
||||
// transformSelect transforms a select node, creating an assignment list as needed
|
||||
@ -554,6 +529,7 @@ func transformSelect(sel *ir.SelectStmt) {
|
||||
}
|
||||
selrecv.Def = def
|
||||
selrecv.SetTypecheck(1)
|
||||
selrecv.SetInit(n.Init())
|
||||
ncase.Comm = selrecv
|
||||
}
|
||||
switch n.Op() {
|
||||
|
@ -91,7 +91,7 @@ func (g *irgen) typ0(typ types2.Type) *types.Type {
|
||||
// since that is the only use of a generic type that doesn't
|
||||
// involve instantiation. We just translate the named type in the
|
||||
// normal way below using g.obj().
|
||||
if typ.TParams() != nil && typ.TArgs() != nil {
|
||||
if typ.TypeParams() != nil && typ.TypeArgs() != nil {
|
||||
// typ is an instantiation of a defined (named) generic type.
|
||||
// This instantiation should also be a defined (named) type.
|
||||
// types2 gives us the substituted type in t.Underlying()
|
||||
@ -101,7 +101,7 @@ func (g *irgen) typ0(typ types2.Type) *types.Type {
|
||||
//
|
||||
// When converted to types.Type, typ has a unique name,
|
||||
// based on the names of the type arguments.
|
||||
instName := g.instTypeName2(typ.Obj().Name(), typ.TArgs())
|
||||
instName := g.instTypeName2(typ.Obj().Name(), typ.TypeArgs())
|
||||
s := g.pkg(typ.Obj().Pkg()).Lookup(instName)
|
||||
if s.Def != nil {
|
||||
// We have already encountered this instantiation.
|
||||
@ -135,7 +135,7 @@ func (g *irgen) typ0(typ types2.Type) *types.Type {
|
||||
// non-generic types used to instantiate this type. We'll
|
||||
// use these when instantiating the methods of the
|
||||
// instantiated type.
|
||||
targs := typ.TArgs()
|
||||
targs := typ.TypeArgs()
|
||||
rparams := make([]*types.Type, targs.Len())
|
||||
for i := range rparams {
|
||||
rparams[i] = g.typ1(targs.At(i))
|
||||
@ -272,7 +272,7 @@ func (g *irgen) typ0(typ types2.Type) *types.Type {
|
||||
// instantiated types, and for actually generating the methods for instantiated
|
||||
// types.
|
||||
func (g *irgen) fillinMethods(typ *types2.Named, ntyp *types.Type) {
|
||||
targs2 := typ.TArgs()
|
||||
targs2 := typ.TypeArgs()
|
||||
targs := make([]*types.Type, targs2.Len())
|
||||
for i := range targs {
|
||||
targs[i] = g.typ1(targs2.At(i))
|
||||
@ -296,7 +296,7 @@ func (g *irgen) fillinMethods(typ *types2.Named, ntyp *types.Type) {
|
||||
// generic type, so we have to do a substitution to get
|
||||
// the name/type of the method of the instantiated type,
|
||||
// using m.Type().RParams() and typ.TArgs()
|
||||
inst2 := g.instTypeName2("", typ.TArgs())
|
||||
inst2 := g.instTypeName2("", typ.TypeArgs())
|
||||
name := meth.Sym().Name
|
||||
i1 := strings.Index(name, "[")
|
||||
i2 := strings.Index(name[i1:], "]")
|
||||
@ -309,7 +309,7 @@ func (g *irgen) fillinMethods(typ *types2.Named, ntyp *types.Type) {
|
||||
meth2 = newsym.Def.(*ir.Name)
|
||||
} else {
|
||||
meth2 = ir.NewNameAt(meth.Pos(), newsym)
|
||||
rparams := types2.AsSignature(m.Type()).RParams()
|
||||
rparams := types2.AsSignature(m.Type()).RecvTypeParams()
|
||||
tparams := make([]*types.Type, rparams.Len())
|
||||
for i := range tparams {
|
||||
tparams[i] = g.typ1(rparams.At(i))
|
||||
@ -336,7 +336,7 @@ func (g *irgen) fillinMethods(typ *types2.Named, ntyp *types.Type) {
|
||||
}
|
||||
|
||||
func (g *irgen) signature(recv *types.Field, sig *types2.Signature) *types.Type {
|
||||
tparams2 := sig.TParams()
|
||||
tparams2 := sig.TypeParams()
|
||||
tparams := make([]*types.Field, tparams2.Len())
|
||||
for i := range tparams {
|
||||
tp := tparams2.At(i).Obj()
|
||||
|
@ -78,12 +78,12 @@ func unified(noders []*noder) {
|
||||
base.Errorf("cannot use -G and -d=quirksmode together")
|
||||
}
|
||||
|
||||
newReadImportFunc = func(data string, pkg1 *types.Pkg, check *types2.Checker, packages map[string]*types2.Package) (pkg2 *types2.Package, err error) {
|
||||
newReadImportFunc = func(data string, pkg1 *types.Pkg, env *types2.Environment, packages map[string]*types2.Package) (pkg2 *types2.Package, err error) {
|
||||
pr := newPkgDecoder(pkg1.Path, data)
|
||||
|
||||
// Read package descriptors for both types2 and compiler backend.
|
||||
readPackage(newPkgReader(pr), pkg1)
|
||||
pkg2 = readPackage2(check, packages, pr)
|
||||
pkg2 = readPackage2(env, packages, pr)
|
||||
return
|
||||
}
|
||||
|
||||
@ -106,7 +106,6 @@ func unified(noders []*noder) {
|
||||
readPackage(localPkgReader, types.LocalPkg)
|
||||
|
||||
r := localPkgReader.newReader(relocMeta, privateRootIdx, syncPrivate)
|
||||
r.ext = r
|
||||
r.pkgInit(types.LocalPkg, target)
|
||||
|
||||
// Type-check any top-level assignments. We ignore non-assignments
|
||||
@ -137,6 +136,7 @@ func unified(noders []*noder) {
|
||||
}
|
||||
}
|
||||
todoBodies = nil
|
||||
todoBodiesDone = true
|
||||
|
||||
// Check that nothing snuck past typechecking.
|
||||
for _, n := range target.Decls {
|
||||
@ -190,7 +190,6 @@ func writePkgStub(noders []*noder) string {
|
||||
|
||||
{
|
||||
w := privateRootWriter
|
||||
w.ext = w
|
||||
w.pkgInit(noders)
|
||||
w.flush()
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
flagCmp = flag.Bool("cmp", false, "enable TestUnifiedCompare")
|
||||
flagPkgs = flag.String("pkgs", "std", "list of packages to compare (ignored in -short mode)")
|
||||
flagAll = flag.Bool("all", false, "enable testing of all GOOS/GOARCH targets")
|
||||
flagParallel = flag.Bool("parallel", false, "test GOOS/GOARCH targets in parallel")
|
||||
@ -37,7 +38,12 @@ var (
|
||||
// command's -run flag for subtest matching is recommended for less
|
||||
// powerful machines.
|
||||
func TestUnifiedCompare(t *testing.T) {
|
||||
t.Skip("TODO(#48265): this fails on testing/internal/testdeps, possibly due to type aliases. Fix before merging to master.")
|
||||
// TODO(mdempsky): Either re-enable or delete. Disabled for now to
|
||||
// avoid impeding others' forward progress.
|
||||
if !*flagCmp {
|
||||
t.Skip("skipping TestUnifiedCompare (use -cmp to enable)")
|
||||
}
|
||||
|
||||
targets, err := exec.Command("go", "tool", "dist", "list").Output()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
|
@ -75,14 +75,6 @@ type writer struct {
|
||||
|
||||
encoder
|
||||
|
||||
// For writing out object descriptions, ext points to the extension
|
||||
// writer for where we can write the compiler's private extension
|
||||
// details for the object.
|
||||
//
|
||||
// TODO(mdempsky): This is a little hacky, but works easiest with
|
||||
// the way things are currently.
|
||||
ext *writer
|
||||
|
||||
// TODO(mdempsky): We should be able to prune localsIdx whenever a
|
||||
// scope closes, and then maybe we can just use the same map for
|
||||
// storing the TypeParams too (as their TypeName instead).
|
||||
@ -299,16 +291,16 @@ func (pw *pkgWriter) typIdx(typ types2.Type, dict *writerDict) typeInfo {
|
||||
// Type aliases can refer to uninstantiated generic types, so we
|
||||
// might see len(TParams) != 0 && len(TArgs) == 0 here.
|
||||
// TODO(mdempsky): Revisit after #46477 is resolved.
|
||||
assert(typ.TParams().Len() == typ.TArgs().Len() || typ.TArgs().Len() == 0)
|
||||
assert(typ.TypeParams().Len() == typ.TypeArgs().Len() || typ.TypeArgs().Len() == 0)
|
||||
|
||||
// TODO(mdempsky): Why do we need to loop here?
|
||||
orig := typ
|
||||
for orig.TArgs() != nil {
|
||||
for orig.TypeArgs() != nil {
|
||||
orig = orig.Orig()
|
||||
}
|
||||
|
||||
w.code(typeNamed)
|
||||
w.obj(orig.Obj(), typ.TArgs())
|
||||
w.obj(orig.Obj(), typ.TypeArgs())
|
||||
|
||||
case *types2.TypeParam:
|
||||
index := func() int {
|
||||
@ -345,7 +337,7 @@ func (pw *pkgWriter) typIdx(typ types2.Type, dict *writerDict) typeInfo {
|
||||
w.typ(typ.Elem())
|
||||
|
||||
case *types2.Signature:
|
||||
assert(typ.TParams() == nil)
|
||||
assert(typ.TypeParams() == nil)
|
||||
w.code(typeSignature)
|
||||
w.signature(typ)
|
||||
|
||||
@ -405,7 +397,7 @@ func (w *writer) interfaceType(typ *types2.Interface) {
|
||||
for i := 0; i < typ.NumExplicitMethods(); i++ {
|
||||
m := typ.ExplicitMethod(i)
|
||||
sig := m.Type().(*types2.Signature)
|
||||
assert(sig.TParams() == nil)
|
||||
assert(sig.TypeParams() == nil)
|
||||
|
||||
w.pos(m)
|
||||
w.selector(m)
|
||||
@ -504,21 +496,21 @@ func (pw *pkgWriter) objIdx(obj types2.Object) int {
|
||||
}
|
||||
|
||||
w := pw.newWriter(relocObj, syncObject1)
|
||||
w.ext = pw.newWriter(relocObjExt, syncObject1)
|
||||
wext := pw.newWriter(relocObjExt, syncObject1)
|
||||
wname := pw.newWriter(relocName, syncObject1)
|
||||
wdict := pw.newWriter(relocObjDict, syncObject1)
|
||||
|
||||
pw.globalsIdx[obj] = w.idx // break cycles
|
||||
assert(w.ext.idx == w.idx)
|
||||
assert(wext.idx == w.idx)
|
||||
assert(wname.idx == w.idx)
|
||||
assert(wdict.idx == w.idx)
|
||||
|
||||
w.dict = dict
|
||||
w.ext.dict = dict
|
||||
wext.dict = dict
|
||||
|
||||
code := w.doObj(obj)
|
||||
code := w.doObj(wext, obj)
|
||||
w.flush()
|
||||
w.ext.flush()
|
||||
wext.flush()
|
||||
|
||||
wname.qualifiedIdent(obj)
|
||||
wname.code(code)
|
||||
@ -530,7 +522,7 @@ func (pw *pkgWriter) objIdx(obj types2.Object) int {
|
||||
return w.idx
|
||||
}
|
||||
|
||||
func (w *writer) doObj(obj types2.Object) codeObj {
|
||||
func (w *writer) doObj(wext *writer, obj types2.Object) codeObj {
|
||||
if obj.Pkg() != w.p.curpkg {
|
||||
return objStub
|
||||
}
|
||||
@ -542,7 +534,8 @@ func (w *writer) doObj(obj types2.Object) codeObj {
|
||||
|
||||
case *types2.Const:
|
||||
w.pos(obj)
|
||||
w.value(obj.Type(), obj.Val())
|
||||
w.typ(obj.Type())
|
||||
w.value(obj.Val())
|
||||
return objConst
|
||||
|
||||
case *types2.Func:
|
||||
@ -551,10 +544,10 @@ func (w *writer) doObj(obj types2.Object) codeObj {
|
||||
sig := obj.Type().(*types2.Signature)
|
||||
|
||||
w.pos(obj)
|
||||
w.typeParamNames(sig.TParams())
|
||||
w.typeParamNames(sig.TypeParams())
|
||||
w.signature(sig)
|
||||
w.pos(decl)
|
||||
w.ext.funcExt(obj)
|
||||
wext.funcExt(obj)
|
||||
return objFunc
|
||||
|
||||
case *types2.TypeName:
|
||||
@ -568,16 +561,16 @@ func (w *writer) doObj(obj types2.Object) codeObj {
|
||||
}
|
||||
|
||||
named := obj.Type().(*types2.Named)
|
||||
assert(named.TArgs() == nil)
|
||||
assert(named.TypeArgs() == nil)
|
||||
|
||||
w.pos(obj)
|
||||
w.typeParamNames(named.TParams())
|
||||
w.ext.typeExt(obj)
|
||||
w.typeParamNames(named.TypeParams())
|
||||
wext.typeExt(obj)
|
||||
w.typExpr(decl.Type)
|
||||
|
||||
w.len(named.NumMethods())
|
||||
for i := 0; i < named.NumMethods(); i++ {
|
||||
w.method(named.Method(i))
|
||||
w.method(wext, named.Method(i))
|
||||
}
|
||||
|
||||
return objType
|
||||
@ -585,7 +578,7 @@ func (w *writer) doObj(obj types2.Object) codeObj {
|
||||
case *types2.Var:
|
||||
w.pos(obj)
|
||||
w.typ(obj.Type())
|
||||
w.ext.varExt(obj)
|
||||
wext.varExt(obj)
|
||||
return objVar
|
||||
}
|
||||
}
|
||||
@ -598,12 +591,6 @@ func (w *writer) typExpr(expr syntax.Expr) {
|
||||
w.typ(tv.Type)
|
||||
}
|
||||
|
||||
func (w *writer) value(typ types2.Type, val constant.Value) {
|
||||
w.sync(syncValue)
|
||||
w.typ(typ)
|
||||
w.rawValue(val)
|
||||
}
|
||||
|
||||
// objDict writes the dictionary needed for reading the given object.
|
||||
func (w *writer) objDict(obj types2.Object, dict *writerDict) {
|
||||
// TODO(mdempsky): Split objDict into multiple entries? reader.go
|
||||
@ -642,7 +629,7 @@ func (w *writer) objDict(obj types2.Object, dict *writerDict) {
|
||||
assert(len(dict.funcs) == nfuncs)
|
||||
}
|
||||
|
||||
func (w *writer) typeParamNames(tparams *types2.TParamList) {
|
||||
func (w *writer) typeParamNames(tparams *types2.TypeParamList) {
|
||||
w.sync(syncTypeParamNames)
|
||||
|
||||
ntparams := tparams.Len()
|
||||
@ -653,7 +640,7 @@ func (w *writer) typeParamNames(tparams *types2.TParamList) {
|
||||
}
|
||||
}
|
||||
|
||||
func (w *writer) method(meth *types2.Func) {
|
||||
func (w *writer) method(wext *writer, meth *types2.Func) {
|
||||
decl, ok := w.p.funDecls[meth]
|
||||
assert(ok)
|
||||
sig := meth.Type().(*types2.Signature)
|
||||
@ -661,12 +648,12 @@ func (w *writer) method(meth *types2.Func) {
|
||||
w.sync(syncMethod)
|
||||
w.pos(meth)
|
||||
w.selector(meth)
|
||||
w.typeParamNames(sig.RParams())
|
||||
w.typeParamNames(sig.RecvTypeParams())
|
||||
w.param(sig.Recv())
|
||||
w.signature(sig)
|
||||
|
||||
w.pos(decl) // XXX: Hack to workaround linker limitations.
|
||||
w.ext.funcExt(meth)
|
||||
wext.funcExt(meth)
|
||||
}
|
||||
|
||||
// qualifiedIdent writes out the name of an object declared at package
|
||||
@ -1199,7 +1186,8 @@ func (w *writer) expr(expr syntax.Expr) {
|
||||
|
||||
w.code(exprConst)
|
||||
w.pos(pos)
|
||||
w.value(tv.Type, tv.Value)
|
||||
w.typ(tv.Type)
|
||||
w.value(tv.Value)
|
||||
|
||||
// TODO(mdempsky): These details are only important for backend
|
||||
// diagnostics. Explore writing them out separately.
|
||||
@ -1677,7 +1665,7 @@ func (w *writer) pkgDecl(decl syntax.Decl) {
|
||||
obj := w.p.info.Defs[decl.Name].(*types2.Func)
|
||||
sig := obj.Type().(*types2.Signature)
|
||||
|
||||
if sig.RParams() != nil || sig.TParams() != nil {
|
||||
if sig.RecvTypeParams() != nil || sig.TypeParams() != nil {
|
||||
break // skip generic functions
|
||||
}
|
||||
|
||||
@ -1711,7 +1699,7 @@ func (w *writer) pkgDecl(decl syntax.Decl) {
|
||||
// TODO(mdempsky): Revisit after #46477 is resolved.
|
||||
if name.IsAlias() {
|
||||
named, ok := name.Type().(*types2.Named)
|
||||
if ok && named.TParams().Len() != 0 && named.TArgs().Len() == 0 {
|
||||
if ok && named.TypeParams().Len() != 0 && named.TypeArgs().Len() == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
@ -1858,17 +1846,17 @@ func fieldIndex(info *types2.Info, str *types2.Struct, key *syntax.Name) int {
|
||||
}
|
||||
|
||||
// objTypeParams returns the type parameters on the given object.
|
||||
func objTypeParams(obj types2.Object) *types2.TParamList {
|
||||
func objTypeParams(obj types2.Object) *types2.TypeParamList {
|
||||
switch obj := obj.(type) {
|
||||
case *types2.Func:
|
||||
sig := obj.Type().(*types2.Signature)
|
||||
if sig.Recv() != nil {
|
||||
return sig.RParams()
|
||||
return sig.RecvTypeParams()
|
||||
}
|
||||
return sig.TParams()
|
||||
return sig.TypeParams()
|
||||
case *types2.TypeName:
|
||||
if !obj.IsAlias() {
|
||||
return obj.Type().(*types2.Named).TParams()
|
||||
return obj.Type().(*types2.Named).TypeParams()
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
@ -1829,6 +1829,9 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
|
||||
case ssa.OpPPC64CALLstatic:
|
||||
s.Call(v)
|
||||
|
||||
case ssa.OpPPC64CALLtail:
|
||||
s.TailCall(v)
|
||||
|
||||
case ssa.OpPPC64CALLclosure, ssa.OpPPC64CALLinter:
|
||||
p := s.Prog(ppc64.AMOVD)
|
||||
p.From.Type = obj.TYPE_REG
|
||||
@ -1980,14 +1983,9 @@ func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
|
||||
p.To.Type = obj.TYPE_BRANCH
|
||||
s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
|
||||
}
|
||||
case ssa.BlockExit:
|
||||
case ssa.BlockExit, ssa.BlockRetJmp:
|
||||
case ssa.BlockRet:
|
||||
s.Prog(obj.ARET)
|
||||
case ssa.BlockRetJmp:
|
||||
p := s.Prog(obj.AJMP)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
p.To.Name = obj.NAME_EXTERN
|
||||
p.To.Sym = b.Aux.(*obj.LSym)
|
||||
|
||||
case ssa.BlockPPC64EQ, ssa.BlockPPC64NE,
|
||||
ssa.BlockPPC64LT, ssa.BlockPPC64GE,
|
||||
|
@ -7,7 +7,6 @@ package reflectdata
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"internal/buildcfg"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
@ -1869,15 +1868,11 @@ func methodWrapper(rcvr *types.Type, method *types.Field, forItab bool) *obj.LSy
|
||||
// Disable tailcall for RegabiArgs for now. The IR does not connect the
|
||||
// arguments with the OTAILCALL node, and the arguments are not marshaled
|
||||
// correctly.
|
||||
if !base.Flag.Cfg.Instrumenting && rcvr.IsPtr() && methodrcvr.IsPtr() && method.Embedded != 0 && !types.IsInterfaceMethod(method.Type) && !(base.Ctxt.Arch.Name == "ppc64le" && base.Ctxt.Flag_dynlink) && !buildcfg.Experiment.RegabiArgs && !generic {
|
||||
// generate tail call: adjust pointer receiver and jump to embedded method.
|
||||
left := dot.X // skip final .M
|
||||
if !left.Type().IsPtr() {
|
||||
left = typecheck.NodAddr(left)
|
||||
}
|
||||
as := ir.NewAssignStmt(base.Pos, nthis, typecheck.ConvNop(left, rcvr))
|
||||
fn.Body.Append(as)
|
||||
fn.Body.Append(ir.NewTailCallStmt(base.Pos, method.Nname.(*ir.Name)))
|
||||
if !base.Flag.Cfg.Instrumenting && rcvr.IsPtr() && methodrcvr.IsPtr() && method.Embedded != 0 && !types.IsInterfaceMethod(method.Type) && !(base.Ctxt.Arch.Name == "ppc64le" && base.Ctxt.Flag_dynlink) && !generic {
|
||||
call := ir.NewCallExpr(base.Pos, ir.OCALL, dot, nil)
|
||||
call.Args = ir.ParamNames(tfn.Type())
|
||||
call.IsDDD = tfn.Type().IsVariadic()
|
||||
fn.Body.Append(ir.NewTailCallStmt(base.Pos, call))
|
||||
} else {
|
||||
fn.SetWrapper(true) // ignore frame for panic+recover matching
|
||||
var call *ir.CallExpr
|
||||
@ -1921,7 +1916,7 @@ func methodWrapper(rcvr *types.Type, method *types.Field, forItab bool) *obj.LSy
|
||||
// Target method uses shaped names.
|
||||
targs2 := make([]*types.Type, len(targs))
|
||||
for i, t := range targs {
|
||||
targs2[i] = typecheck.Shapify(t)
|
||||
targs2[i] = typecheck.Shapify(t, i)
|
||||
}
|
||||
targs = targs2
|
||||
|
||||
|
@ -272,7 +272,8 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
|
||||
ssa.OpRISCV64FADDS, ssa.OpRISCV64FSUBS, ssa.OpRISCV64FMULS, ssa.OpRISCV64FDIVS,
|
||||
ssa.OpRISCV64FEQS, ssa.OpRISCV64FNES, ssa.OpRISCV64FLTS, ssa.OpRISCV64FLES,
|
||||
ssa.OpRISCV64FADDD, ssa.OpRISCV64FSUBD, ssa.OpRISCV64FMULD, ssa.OpRISCV64FDIVD,
|
||||
ssa.OpRISCV64FEQD, ssa.OpRISCV64FNED, ssa.OpRISCV64FLTD, ssa.OpRISCV64FLED:
|
||||
ssa.OpRISCV64FEQD, ssa.OpRISCV64FNED, ssa.OpRISCV64FLTD, ssa.OpRISCV64FLED,
|
||||
ssa.OpRISCV64FSGNJD:
|
||||
r := v.Reg()
|
||||
r1 := v.Args[0].Reg()
|
||||
r2 := v.Args[1].Reg()
|
||||
@ -329,7 +330,7 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
|
||||
p.SetRestArgs([]obj.Addr{{Type: obj.TYPE_REG, Reg: r3}})
|
||||
p.To.Type = obj.TYPE_REG
|
||||
p.To.Reg = r
|
||||
case ssa.OpRISCV64FSQRTS, ssa.OpRISCV64FNEGS, ssa.OpRISCV64FSQRTD, ssa.OpRISCV64FNEGD,
|
||||
case ssa.OpRISCV64FSQRTS, ssa.OpRISCV64FNEGS, ssa.OpRISCV64FABSD, ssa.OpRISCV64FSQRTD, ssa.OpRISCV64FNEGD,
|
||||
ssa.OpRISCV64FMVSX, ssa.OpRISCV64FMVDX,
|
||||
ssa.OpRISCV64FCVTSW, ssa.OpRISCV64FCVTSL, ssa.OpRISCV64FCVTWS, ssa.OpRISCV64FCVTLS,
|
||||
ssa.OpRISCV64FCVTDW, ssa.OpRISCV64FCVTDL, ssa.OpRISCV64FCVTWD, ssa.OpRISCV64FCVTLD, ssa.OpRISCV64FCVTDS, ssa.OpRISCV64FCVTSD,
|
||||
@ -412,6 +413,8 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
|
||||
p.To.Reg = v.Reg()
|
||||
case ssa.OpRISCV64CALLstatic, ssa.OpRISCV64CALLclosure, ssa.OpRISCV64CALLinter:
|
||||
s.Call(v)
|
||||
case ssa.OpRISCV64CALLtail:
|
||||
s.TailCall(v)
|
||||
case ssa.OpRISCV64LoweredWB:
|
||||
p := s.Prog(obj.ACALL)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
@ -724,14 +727,9 @@ func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
|
||||
p.To.Type = obj.TYPE_BRANCH
|
||||
s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
|
||||
}
|
||||
case ssa.BlockExit:
|
||||
case ssa.BlockExit, ssa.BlockRetJmp:
|
||||
case ssa.BlockRet:
|
||||
s.Prog(obj.ARET)
|
||||
case ssa.BlockRetJmp:
|
||||
p := s.Prog(obj.ARET)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
p.To.Name = obj.NAME_EXTERN
|
||||
p.To.Sym = b.Aux.(*obj.LSym)
|
||||
case ssa.BlockRISCV64BEQ, ssa.BlockRISCV64BEQZ, ssa.BlockRISCV64BNE, ssa.BlockRISCV64BNEZ,
|
||||
ssa.BlockRISCV64BLT, ssa.BlockRISCV64BLEZ, ssa.BlockRISCV64BGE, ssa.BlockRISCV64BGEZ,
|
||||
ssa.BlockRISCV64BLTZ, ssa.BlockRISCV64BGTZ, ssa.BlockRISCV64BLTU, ssa.BlockRISCV64BGEU:
|
||||
|
@ -556,6 +556,8 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
|
||||
p.To.Reg = v.Reg()
|
||||
case ssa.OpS390XCALLstatic, ssa.OpS390XCALLclosure, ssa.OpS390XCALLinter:
|
||||
s.Call(v)
|
||||
case ssa.OpS390XCALLtail:
|
||||
s.TailCall(v)
|
||||
case ssa.OpS390XLoweredWB:
|
||||
p := s.Prog(obj.ACALL)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
@ -899,17 +901,11 @@ func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
|
||||
s.Br(s390x.ABR, b.Succs[0].Block())
|
||||
}
|
||||
return
|
||||
case ssa.BlockExit:
|
||||
case ssa.BlockExit, ssa.BlockRetJmp:
|
||||
return
|
||||
case ssa.BlockRet:
|
||||
s.Prog(obj.ARET)
|
||||
return
|
||||
case ssa.BlockRetJmp:
|
||||
p := s.Prog(s390x.ABR)
|
||||
p.To.Type = obj.TYPE_MEM
|
||||
p.To.Name = obj.NAME_EXTERN
|
||||
p.To.Sym = b.Aux.(*obj.LSym)
|
||||
return
|
||||
}
|
||||
|
||||
// Handle s390x-specific blocks. These blocks all have a
|
||||
|
@ -66,9 +66,6 @@ func checkFunc(f *Func) {
|
||||
if !b.Controls[0].Type.IsMemory() {
|
||||
f.Fatalf("retjmp block %s has non-memory control value %s", b, b.Controls[0].LongString())
|
||||
}
|
||||
if b.Aux == nil {
|
||||
f.Fatalf("retjmp block %s has nil Aux field", b)
|
||||
}
|
||||
case BlockPlain:
|
||||
if len(b.Succs) != 1 {
|
||||
f.Fatalf("plain block %s len(Succs)==%d, want 1", b, len(b.Succs))
|
||||
|
@ -10,9 +10,11 @@ import (
|
||||
"fmt"
|
||||
"hash/crc32"
|
||||
"internal/buildcfg"
|
||||
"io"
|
||||
"log"
|
||||
"math/rand"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"sort"
|
||||
@ -59,7 +61,7 @@ func Compile(f *Func) {
|
||||
printFunc(f)
|
||||
}
|
||||
f.HTMLWriter.WritePhase("start", "start")
|
||||
if BuildDump != "" && BuildDump == f.Name {
|
||||
if BuildDump[f.Name] {
|
||||
f.dumpFile("build")
|
||||
}
|
||||
if checkEnabled {
|
||||
@ -163,25 +165,37 @@ func Compile(f *Func) {
|
||||
phaseName = ""
|
||||
}
|
||||
|
||||
// dumpFile creates a file from the phase name and function name
|
||||
// Dumping is done to files to avoid buffering huge strings before
|
||||
// output.
|
||||
func (f *Func) dumpFile(phaseName string) {
|
||||
// DumpFileForPhase creates a file from the function name and phase name,
|
||||
// warning and returning nil if this is not possible.
|
||||
func (f *Func) DumpFileForPhase(phaseName string) io.WriteCloser {
|
||||
f.dumpFileSeq++
|
||||
fname := fmt.Sprintf("%s_%02d__%s.dump", f.Name, int(f.dumpFileSeq), phaseName)
|
||||
fname = strings.Replace(fname, " ", "_", -1)
|
||||
fname = strings.Replace(fname, "/", "_", -1)
|
||||
fname = strings.Replace(fname, ":", "_", -1)
|
||||
|
||||
if ssaDir := os.Getenv("GOSSADIR"); ssaDir != "" {
|
||||
fname = filepath.Join(ssaDir, fname)
|
||||
}
|
||||
|
||||
fi, err := os.Create(fname)
|
||||
if err != nil {
|
||||
f.Warnl(src.NoXPos, "Unable to create after-phase dump file %s", fname)
|
||||
return
|
||||
return nil
|
||||
}
|
||||
return fi
|
||||
}
|
||||
|
||||
p := stringFuncPrinter{w: fi}
|
||||
fprintFunc(p, f)
|
||||
fi.Close()
|
||||
// dumpFile creates a file from the phase name and function name
|
||||
// Dumping is done to files to avoid buffering huge strings before
|
||||
// output.
|
||||
func (f *Func) dumpFile(phaseName string) {
|
||||
fi := f.DumpFileForPhase(phaseName)
|
||||
if fi != nil {
|
||||
p := stringFuncPrinter{w: fi}
|
||||
fprintFunc(p, f)
|
||||
fi.Close()
|
||||
}
|
||||
}
|
||||
|
||||
type pass struct {
|
||||
@ -224,7 +238,9 @@ var IntrinsicsDisable bool
|
||||
var BuildDebug int
|
||||
var BuildTest int
|
||||
var BuildStats int
|
||||
var BuildDump string // name of function to dump after initial build of ssa
|
||||
var BuildDump map[string]bool = make(map[string]bool) // names of functions to dump after initial build of ssa
|
||||
|
||||
var GenssaDump map[string]bool = make(map[string]bool) // names of functions to dump after ssa has been converted to asm
|
||||
|
||||
// PhaseOption sets the specified flag in the specified ssa phase,
|
||||
// returning empty string if this was successful or a string explaining
|
||||
@ -248,7 +264,7 @@ func PhaseOption(phase, flag string, val int, valString string) string {
|
||||
switch phase {
|
||||
case "", "help":
|
||||
lastcr := 0
|
||||
phasenames := " check, all, build, intrinsics"
|
||||
phasenames := " check, all, build, intrinsics, genssa"
|
||||
for _, p := range passes {
|
||||
pn := strings.Replace(p.name, " ", "_", -1)
|
||||
if len(pn)+len(phasenames)-lastcr > 70 {
|
||||
@ -278,6 +294,7 @@ where:
|
||||
|
||||
Phase "all" supports flags "time", "mem", and "dump".
|
||||
Phase "intrinsics" supports flags "on", "off", and "debug".
|
||||
Phase "genssa" (assembly generation) supports the flag "dump".
|
||||
|
||||
If the "dump" flag is specified, the output is written on a file named
|
||||
<phase>__<function_name>_<seq>.dump; otherwise it is directed to stdout.
|
||||
@ -339,10 +356,11 @@ commas. For example:
|
||||
case "dump":
|
||||
alldump = val != 0
|
||||
if alldump {
|
||||
BuildDump = valString
|
||||
BuildDump[valString] = true
|
||||
GenssaDump[valString] = true
|
||||
}
|
||||
default:
|
||||
return fmt.Sprintf("Did not find a flag matching %s in -d=ssa/%s debug option", flag, phase)
|
||||
return fmt.Sprintf("Did not find a flag matching %s in -d=ssa/%s debug option (expected ssa/all/{time,mem,dump=function_name})", flag, phase)
|
||||
}
|
||||
}
|
||||
|
||||
@ -355,7 +373,7 @@ commas. For example:
|
||||
case "debug":
|
||||
IntrinsicsDebug = val
|
||||
default:
|
||||
return fmt.Sprintf("Did not find a flag matching %s in -d=ssa/%s debug option", flag, phase)
|
||||
return fmt.Sprintf("Did not find a flag matching %s in -d=ssa/%s debug option (expected ssa/intrinsics/{on,off,debug})", flag, phase)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
@ -368,9 +386,18 @@ commas. For example:
|
||||
case "stats":
|
||||
BuildStats = val
|
||||
case "dump":
|
||||
BuildDump = valString
|
||||
BuildDump[valString] = true
|
||||
default:
|
||||
return fmt.Sprintf("Did not find a flag matching %s in -d=ssa/%s debug option", flag, phase)
|
||||
return fmt.Sprintf("Did not find a flag matching %s in -d=ssa/%s debug option (expected ssa/build/{debug,test,stats,dump=function_name})", flag, phase)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
if phase == "genssa" {
|
||||
switch flag {
|
||||
case "dump":
|
||||
GenssaDump[valString] = true
|
||||
default:
|
||||
return fmt.Sprintf("Did not find a flag matching %s in -d=ssa/%s debug option (expected ssa/genssa/dump=function_name)", flag, phase)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
213
src/cmd/compile/internal/ssa/debug_lines_test.go
Normal file
213
src/cmd/compile/internal/ssa/debug_lines_test.go
Normal file
@ -0,0 +1,213 @@
|
||||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ssa_test
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"flag"
|
||||
"runtime"
|
||||
"sort"
|
||||
|
||||
// "flag"
|
||||
"fmt"
|
||||
"internal/testenv"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// Matches lines in genssa output that are marked "isstmt", and the parenthesized plus-prefixed line number is a submatch
|
||||
var asmLine *regexp.Regexp = regexp.MustCompile(`^\s[vb][0-9]+\s+[0-9]+\s\(\+([0-9]+)\)`)
|
||||
|
||||
// this matches e.g. ` v123456789 000007 (+9876654310) MOVUPS X15, ""..autotmp_2-32(SP)`
|
||||
|
||||
// Matches lines in genssa output that describe an inlined file (on a Unix filesystem). Note it expects an unadventurous choice of basename.
|
||||
var inlineLine *regexp.Regexp = regexp.MustCompile(`^#\s/.*/[-a-zA-Z0-9_]+\.go:([0-9]+)`)
|
||||
|
||||
// this matches e.g. # /pa/inline-dumpxxxx.go:6
|
||||
|
||||
var testGoArchFlag = flag.String("arch", "", "run test for specified architecture")
|
||||
|
||||
func testGoArch() string {
|
||||
if *testGoArchFlag == "" {
|
||||
return runtime.GOARCH
|
||||
}
|
||||
return *testGoArchFlag
|
||||
}
|
||||
|
||||
func TestDebugLines(t *testing.T) {
|
||||
if runtime.GOOS == "windows" {
|
||||
t.Skip("Windows lacks $HOME which complicates workaround for 'missing $GOPATH'") // $HOME needed to work around #43938
|
||||
}
|
||||
// This test is potentially fragile, the goal is that debugging should step properly through "sayhi"
|
||||
// If the blocks are reordered in a way that changes the statement order but execution flows correctly,
|
||||
// then rearrange the expected numbers. Register abi and not-register-abi also have different sequences,
|
||||
// at least for now.
|
||||
|
||||
switch testGoArch() {
|
||||
case "arm64", "amd64": // register ABI
|
||||
testDebugLines(t, "sayhi.go", "sayhi", []int{8, 9, 10, 11})
|
||||
|
||||
case "arm", "386": // probably not register ABI for a while
|
||||
testDebugLines(t, "sayhi.go", "sayhi", []int{9, 10, 11})
|
||||
|
||||
default: // expect ppc64le and riscv will pick up register ABI soonish, not sure about others
|
||||
t.Skip("skipped for many architectures, also changes w/ register ABI")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInlineLines(t *testing.T) {
|
||||
if runtime.GOOS == "windows" {
|
||||
t.Skip("Windows lacks $HOME which complicates workaround for 'missing $GOPATH'") // $HOME needed to work around #43938
|
||||
}
|
||||
if runtime.GOARCH != "amd64" && *testGoArchFlag == "" {
|
||||
// As of september 2021, works for everything except mips64, but still potentially fragile
|
||||
t.Skip("only runs for amd64 unless -arch explicitly supplied")
|
||||
}
|
||||
|
||||
want := [][]int{{3}, {4, 10}, {4, 10, 16}, {4, 10}, {4, 11, 16}, {4, 11}, {4}, {5, 10}, {5, 10, 16}, {5, 10}, {5, 11, 16}, {5, 11}, {5}}
|
||||
testInlineStack(t, "inline-dump.go", "f", want)
|
||||
}
|
||||
|
||||
func compileAndDump(t *testing.T, file, function, moreGCFlags string) []byte {
|
||||
testenv.MustHaveGoBuild(t)
|
||||
|
||||
tmpdir, err := ioutil.TempDir("", "debug_lines_test")
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("Problem creating TempDir, error %v", err))
|
||||
}
|
||||
if testing.Verbose() {
|
||||
fmt.Printf("Preserving temporary directory %s\n", tmpdir)
|
||||
} else {
|
||||
defer os.RemoveAll(tmpdir)
|
||||
}
|
||||
|
||||
source, err := filepath.Abs(filepath.Join("testdata", file))
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("Could not get abspath of testdata directory and file, %v", err))
|
||||
}
|
||||
|
||||
cmd := exec.Command(testenv.GoToolPath(t), "build", "-o", "foo.o", "-gcflags=-d=ssa/genssa/dump="+function+" "+moreGCFlags, source)
|
||||
cmd.Dir = tmpdir
|
||||
cmd.Env = replaceEnv(cmd.Env, "GOSSADIR", tmpdir)
|
||||
cmd.Env = replaceEnv(cmd.Env, "HOME", os.Getenv("HOME")) // workaround for #43938
|
||||
testGoos := "linux" // default to linux
|
||||
if testGoArch() == "wasm" {
|
||||
testGoos = "js"
|
||||
}
|
||||
cmd.Env = replaceEnv(cmd.Env, "GOOS", testGoos)
|
||||
cmd.Env = replaceEnv(cmd.Env, "GOARCH", testGoArch())
|
||||
|
||||
if testing.Verbose() {
|
||||
fmt.Printf("About to run %s\n", asCommandLine("", cmd))
|
||||
}
|
||||
|
||||
var stdout, stderr bytes.Buffer
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
|
||||
if err := cmd.Run(); err != nil {
|
||||
t.Fatalf("error running cmd %s: %v\nstdout:\n%sstderr:\n%s\n", asCommandLine("", cmd), err, stdout.String(), stderr.String())
|
||||
}
|
||||
|
||||
if s := stderr.String(); s != "" {
|
||||
t.Fatalf("Wanted empty stderr, instead got:\n%s\n", s)
|
||||
}
|
||||
|
||||
dumpFile := filepath.Join(tmpdir, function+"_01__genssa.dump")
|
||||
dumpBytes, err := os.ReadFile(dumpFile)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not read dump file %s, err=%v", dumpFile, err)
|
||||
}
|
||||
return dumpBytes
|
||||
}
|
||||
|
||||
func sortInlineStacks(x [][]int) {
|
||||
sort.Slice(x, func(i, j int) bool {
|
||||
if len(x[i]) != len(x[j]) {
|
||||
return len(x[i]) < len(x[j])
|
||||
}
|
||||
for k := range x[i] {
|
||||
if x[i][k] != x[j][k] {
|
||||
return x[i][k] < x[j][k]
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
// testInlineStack ensures that inlining is described properly in the comments in the dump file
|
||||
func testInlineStack(t *testing.T, file, function string, wantStacks [][]int) {
|
||||
// this is an inlining reporting test, not an optimization test. -N makes it less fragile
|
||||
dumpBytes := compileAndDump(t, file, function, "-N")
|
||||
dump := bufio.NewScanner(bytes.NewReader(dumpBytes))
|
||||
dumpLineNum := 0
|
||||
var gotStmts []int
|
||||
var gotStacks [][]int
|
||||
for dump.Scan() {
|
||||
line := dump.Text()
|
||||
dumpLineNum++
|
||||
matches := inlineLine.FindStringSubmatch(line)
|
||||
if len(matches) == 2 {
|
||||
stmt, err := strconv.ParseInt(matches[1], 10, 32)
|
||||
if err != nil {
|
||||
t.Fatalf("Expected to parse a line number but saw %s instead on dump line #%d, error %v", matches[1], dumpLineNum, err)
|
||||
}
|
||||
if testing.Verbose() {
|
||||
fmt.Printf("Saw stmt# %d for submatch '%s' on dump line #%d = '%s'\n", stmt, matches[1], dumpLineNum, line)
|
||||
}
|
||||
gotStmts = append(gotStmts, int(stmt))
|
||||
} else if len(gotStmts) > 0 {
|
||||
gotStacks = append(gotStacks, gotStmts)
|
||||
gotStmts = nil
|
||||
}
|
||||
}
|
||||
if len(gotStmts) > 0 {
|
||||
gotStacks = append(gotStacks, gotStmts)
|
||||
gotStmts = nil
|
||||
}
|
||||
sortInlineStacks(gotStacks)
|
||||
sortInlineStacks(wantStacks)
|
||||
if !reflect.DeepEqual(wantStacks, gotStacks) {
|
||||
t.Errorf("wanted inlines %+v but got %+v", wantStacks, gotStacks)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// testDebugLines compiles testdata/<file> with flags -N -l and -d=ssa/genssa/dump=<function>
|
||||
// then verifies that the statement-marked lines in that file are the same as those in wantStmts
|
||||
// These files must all be short because this is super-fragile.
|
||||
// "go build" is run in a temporary directory that is normally deleted, unless -test.v
|
||||
func testDebugLines(t *testing.T, file, function string, wantStmts []int) {
|
||||
dumpBytes := compileAndDump(t, file, function, "-N -l")
|
||||
dump := bufio.NewScanner(bytes.NewReader(dumpBytes))
|
||||
var gotStmts []int
|
||||
dumpLineNum := 0
|
||||
for dump.Scan() {
|
||||
line := dump.Text()
|
||||
dumpLineNum++
|
||||
matches := asmLine.FindStringSubmatch(line)
|
||||
if len(matches) == 2 {
|
||||
stmt, err := strconv.ParseInt(matches[1], 10, 32)
|
||||
if err != nil {
|
||||
t.Fatalf("Expected to parse a line number but saw %s instead on dump line #%d, error %v", matches[1], dumpLineNum, err)
|
||||
}
|
||||
if testing.Verbose() {
|
||||
fmt.Printf("Saw stmt# %d for submatch '%s' on dump line #%d = '%s'\n", stmt, matches[1], dumpLineNum, line)
|
||||
}
|
||||
gotStmts = append(gotStmts, int(stmt))
|
||||
}
|
||||
}
|
||||
if !reflect.DeepEqual(wantStmts, gotStmts) {
|
||||
t.Errorf("wanted stmts %v but got %v", wantStmts, gotStmts)
|
||||
}
|
||||
|
||||
}
|
@ -176,7 +176,7 @@ func (c *registerCursor) hasRegs() bool {
|
||||
type expandState struct {
|
||||
f *Func
|
||||
abi1 *abi.ABIConfig
|
||||
debug bool
|
||||
debug int // odd values log lost statement markers, so likely settings are 1 (stmts), 2 (expansion), and 3 (both)
|
||||
canSSAType func(*types.Type) bool
|
||||
regSize int64
|
||||
sp *Value
|
||||
@ -302,7 +302,7 @@ func (x *expandState) Printf(format string, a ...interface{}) (n int, err error)
|
||||
//
|
||||
// TODO when registers really arrive, must also decompose anything split across two registers or registers and memory.
|
||||
func (x *expandState) rewriteSelect(leaf *Value, selector *Value, offset int64, regOffset Abi1RO) []*LocalSlot {
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.indent(3)
|
||||
defer x.indent(-3)
|
||||
x.Printf("rewriteSelect(%s; %s; memOff=%d; regOff=%d)\n", leaf.LongString(), selector.LongString(), offset, regOffset)
|
||||
@ -325,7 +325,7 @@ func (x *expandState) rewriteSelect(leaf *Value, selector *Value, offset int64,
|
||||
} else {
|
||||
x.f.Fatalf("Unexpected %s type, selector=%s, leaf=%s\n", selector.Op.String(), selector.LongString(), leaf.LongString())
|
||||
}
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("---%s, break\n", selector.Op.String())
|
||||
}
|
||||
case OpArg:
|
||||
@ -335,7 +335,7 @@ func (x *expandState) rewriteSelect(leaf *Value, selector *Value, offset int64,
|
||||
} else {
|
||||
x.f.Fatalf("Unexpected OpArg type, selector=%s, leaf=%s\n", selector.LongString(), leaf.LongString())
|
||||
}
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("---OpArg, break\n")
|
||||
}
|
||||
break
|
||||
@ -381,7 +381,7 @@ func (x *expandState) rewriteSelect(leaf *Value, selector *Value, offset int64,
|
||||
// This case removes that StructSelect.
|
||||
if leafType != selector.Type {
|
||||
if x.f.Config.SoftFloat && selector.Type.IsFloat() {
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("---OpLoad, break\n")
|
||||
}
|
||||
break // softfloat pass will take care of that
|
||||
@ -468,7 +468,7 @@ func (x *expandState) rewriteSelect(leaf *Value, selector *Value, offset int64,
|
||||
} else {
|
||||
w := call.Block.NewValue2(leaf.Pos, OpLoad, leafType, off, call)
|
||||
leaf.copyOf(w)
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("---new %s\n", w.LongString())
|
||||
}
|
||||
}
|
||||
@ -687,7 +687,7 @@ func (x *expandState) decomposeArg(pos src.XPos, b *Block, source, mem *Value, t
|
||||
panic(fmt.Errorf("offset %d of requested register %d should be zero, source=%s", offs[loadRegOffset], loadRegOffset, source.LongString()))
|
||||
}
|
||||
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("decompose arg %s has %d locs\n", source.LongString(), len(locs))
|
||||
}
|
||||
|
||||
@ -836,7 +836,7 @@ func (x *expandState) decomposeLoad(pos src.XPos, b *Block, source, mem *Value,
|
||||
// pos and b locate the store instruction, source is the "base" of the value input,
|
||||
// mem is the input mem, t is the type in question, and offArg and offStore are the offsets from the respective bases.
|
||||
func storeOneArg(x *expandState, pos src.XPos, b *Block, locs []*LocalSlot, suffix string, source, mem *Value, t *types.Type, argOffset, storeOffset int64, loadRegOffset Abi1RO, storeRc registerCursor) *Value {
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.indent(3)
|
||||
defer x.indent(-3)
|
||||
x.Printf("storeOneArg(%s; %s; %s; aO=%d; sO=%d; lrO=%d; %s)\n", source.LongString(), mem.String(), t.String(), argOffset, storeOffset, loadRegOffset, storeRc.String())
|
||||
@ -877,7 +877,7 @@ func storeTwoLoad(x *expandState, pos src.XPos, b *Block, source, mem *Value, t1
|
||||
// stores of non-aggregate types. It recursively walks up a chain of selectors until it reaches a Load or an Arg.
|
||||
// If it does not reach a Load or an Arg, nothing happens; this allows a little freedom in phase ordering.
|
||||
func (x *expandState) storeArgOrLoad(pos src.XPos, b *Block, source, mem *Value, t *types.Type, storeOffset int64, loadRegOffset Abi1RO, storeRc registerCursor) *Value {
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.indent(3)
|
||||
defer x.indent(-3)
|
||||
x.Printf("storeArgOrLoad(%s; %s; %s; %d; %s)\n", source.LongString(), mem.String(), t.String(), storeOffset, storeRc.String())
|
||||
@ -1060,7 +1060,7 @@ func (x *expandState) storeArgOrLoad(pos src.XPos, b *Block, source, mem *Value,
|
||||
dst := x.offsetFrom(b, storeRc.storeDest, storeOffset, types.NewPtr(t))
|
||||
s = b.NewValue3A(pos, OpStore, types.TypeMem, t, dst, source, mem)
|
||||
}
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("-->storeArg returns %s, storeRc=%s\n", s.LongString(), storeRc.String())
|
||||
}
|
||||
return s
|
||||
@ -1071,18 +1071,23 @@ func (x *expandState) storeArgOrLoad(pos src.XPos, b *Block, source, mem *Value,
|
||||
// to account for any parameter stores required.
|
||||
// Any of the old Args that have their use count fall to zero are marked OpInvalid.
|
||||
func (x *expandState) rewriteArgs(v *Value, firstArg int) {
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.indent(3)
|
||||
defer x.indent(-3)
|
||||
x.Printf("rewriteArgs(%s; %d)\n", v.LongString(), firstArg)
|
||||
}
|
||||
// Thread the stores on the memory arg
|
||||
aux := v.Aux.(*AuxCall)
|
||||
pos := v.Pos.WithNotStmt()
|
||||
m0 := v.MemoryArg()
|
||||
mem := m0
|
||||
newArgs := []*Value{}
|
||||
oldArgs := []*Value{}
|
||||
sp := x.sp
|
||||
if v.Op == OpTailLECall {
|
||||
// For tail call, we unwind the frame before the call so we'll use the caller's
|
||||
// SP.
|
||||
sp = x.f.Entry.NewValue0(src.NoXPos, OpGetCallerSP, x.typs.Uintptr)
|
||||
}
|
||||
for i, a := range v.Args[firstArg : len(v.Args)-1] { // skip leading non-parameter SSA Args and trailing mem SSA Arg.
|
||||
oldArgs = append(oldArgs, a)
|
||||
auxI := int64(i)
|
||||
@ -1093,9 +1098,20 @@ func (x *expandState) rewriteArgs(v *Value, firstArg int) {
|
||||
if a.MemoryArg() != m0 {
|
||||
x.f.Fatalf("Op...LECall and OpDereference have mismatched mem, %s and %s", v.LongString(), a.LongString())
|
||||
}
|
||||
if v.Op == OpTailLECall {
|
||||
// It's common for a tail call passing the same arguments (e.g. method wrapper),
|
||||
// so this would be a self copy. Detect this and optimize it out.
|
||||
a0 := a.Args[0]
|
||||
if a0.Op == OpLocalAddr {
|
||||
n := a0.Aux.(*ir.Name)
|
||||
if n.Class == ir.PPARAM && n.FrameOffset()+x.f.Config.ctxt.FixedFrameSize() == aOffset {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
// "Dereference" of addressed (probably not-SSA-eligible) value becomes Move
|
||||
// TODO(register args) this will be more complicated with registers in the picture.
|
||||
mem = x.rewriteDereference(v.Block, x.sp, a, mem, aOffset, aux.SizeOfArg(auxI), aType, pos)
|
||||
mem = x.rewriteDereference(v.Block, sp, a, mem, aOffset, aux.SizeOfArg(auxI), aType, a.Pos)
|
||||
} else {
|
||||
var rc registerCursor
|
||||
var result *[]*Value
|
||||
@ -1105,11 +1121,19 @@ func (x *expandState) rewriteArgs(v *Value, firstArg int) {
|
||||
} else {
|
||||
aOffset = aux.OffsetOfArg(auxI)
|
||||
}
|
||||
if x.debug {
|
||||
if v.Op == OpTailLECall && a.Op == OpArg && a.AuxInt == 0 {
|
||||
// It's common for a tail call passing the same arguments (e.g. method wrapper),
|
||||
// so this would be a self copy. Detect this and optimize it out.
|
||||
n := a.Aux.(*ir.Name)
|
||||
if n.Class == ir.PPARAM && n.FrameOffset()+x.f.Config.ctxt.FixedFrameSize() == aOffset {
|
||||
continue
|
||||
}
|
||||
}
|
||||
if x.debug > 1 {
|
||||
x.Printf("...storeArg %s, %v, %d\n", a.LongString(), aType, aOffset)
|
||||
}
|
||||
rc.init(aRegs, aux.abiInfo, result, x.sp)
|
||||
mem = x.storeArgOrLoad(pos, v.Block, a, mem, aType, aOffset, 0, rc)
|
||||
rc.init(aRegs, aux.abiInfo, result, sp)
|
||||
mem = x.storeArgOrLoad(a.Pos, v.Block, a, mem, aType, aOffset, 0, rc)
|
||||
}
|
||||
}
|
||||
var preArgStore [2]*Value
|
||||
@ -1120,16 +1144,31 @@ func (x *expandState) rewriteArgs(v *Value, firstArg int) {
|
||||
v.AddArg(mem)
|
||||
for _, a := range oldArgs {
|
||||
if a.Uses == 0 {
|
||||
if x.debug {
|
||||
x.Printf("...marking %v unused\n", a.LongString())
|
||||
}
|
||||
a.invalidateRecursively()
|
||||
x.invalidateRecursively(a)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (x *expandState) invalidateRecursively(a *Value) {
|
||||
var s string
|
||||
if x.debug > 0 {
|
||||
plus := " "
|
||||
if a.Pos.IsStmt() == src.PosIsStmt {
|
||||
plus = " +"
|
||||
}
|
||||
s = a.String() + plus + a.Pos.LineNumber() + " " + a.LongString()
|
||||
if x.debug > 1 {
|
||||
x.Printf("...marking %v unused\n", s)
|
||||
}
|
||||
}
|
||||
lost := a.invalidateRecursively()
|
||||
if x.debug&1 != 0 && lost { // For odd values of x.debug, do this.
|
||||
x.Printf("Lost statement marker in %s on former %s\n", base.Ctxt.Pkgpath+"."+x.f.Name, s)
|
||||
}
|
||||
}
|
||||
|
||||
// expandCalls converts LE (Late Expansion) calls that act like they receive value args into a lower-level form
|
||||
// that is more oriented to a platform's ABI. The SelectN operations that extract results are rewritten into
|
||||
// more appropriate forms, and any StructMake or ArrayMake inputs are decomposed until non-struct values are
|
||||
@ -1148,7 +1187,7 @@ func expandCalls(f *Func) {
|
||||
x := &expandState{
|
||||
f: f,
|
||||
abi1: f.ABI1,
|
||||
debug: f.pass.debug > 0,
|
||||
debug: f.pass.debug,
|
||||
canSSAType: f.fe.CanSSA,
|
||||
regSize: f.Config.RegSize,
|
||||
sp: sp,
|
||||
@ -1170,7 +1209,7 @@ func expandCalls(f *Func) {
|
||||
x.loRo, x.hiRo = 0, 1
|
||||
}
|
||||
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("\nexpandsCalls(%s)\n", f.Name)
|
||||
}
|
||||
|
||||
@ -1193,7 +1232,7 @@ func expandCalls(f *Func) {
|
||||
for _, v := range b.Values {
|
||||
firstArg := 0
|
||||
switch v.Op {
|
||||
case OpStaticLECall:
|
||||
case OpStaticLECall, OpTailLECall:
|
||||
case OpInterLECall:
|
||||
firstArg = 1
|
||||
case OpClosureLECall:
|
||||
@ -1210,9 +1249,8 @@ func expandCalls(f *Func) {
|
||||
m0 := v.MemoryArg()
|
||||
mem := m0
|
||||
aux := f.OwnAux
|
||||
pos := v.Pos.WithNotStmt()
|
||||
allResults := []*Value{}
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("multiValueExit rewriting %s\n", v.LongString())
|
||||
}
|
||||
var oldArgs []*Value
|
||||
@ -1233,7 +1271,7 @@ func expandCalls(f *Func) {
|
||||
}
|
||||
continue
|
||||
}
|
||||
mem = x.rewriteDereference(v.Block, auxBase, a, mem, auxOffset, auxSize, auxType, pos)
|
||||
mem = x.rewriteDereference(v.Block, auxBase, a, mem, auxOffset, auxSize, auxType, a.Pos)
|
||||
} else {
|
||||
if a.Op == OpLoad && a.Args[0].Op == OpLocalAddr {
|
||||
addr := a.Args[0] // This is a self-move. // TODO(register args) do what here for registers?
|
||||
@ -1257,13 +1295,13 @@ func expandCalls(f *Func) {
|
||||
b.SetControl(v)
|
||||
for _, a := range oldArgs {
|
||||
if a.Uses == 0 {
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("...marking %v unused\n", a.LongString())
|
||||
}
|
||||
a.invalidateRecursively()
|
||||
x.invalidateRecursively(a)
|
||||
}
|
||||
}
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("...multiValueExit new result %s\n", v.LongString())
|
||||
}
|
||||
x.indent(-3)
|
||||
@ -1317,7 +1355,7 @@ func expandCalls(f *Func) {
|
||||
switch w.Op {
|
||||
case OpStructSelect, OpArraySelect, OpSelectN, OpArg:
|
||||
val2Preds[w] += 1
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("v2p[%s] = %d\n", w.LongString(), val2Preds[w])
|
||||
}
|
||||
}
|
||||
@ -1326,7 +1364,7 @@ func expandCalls(f *Func) {
|
||||
case OpSelectN:
|
||||
if _, ok := val2Preds[v]; !ok {
|
||||
val2Preds[v] = 0
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("v2p[%s] = %d\n", v.LongString(), val2Preds[v])
|
||||
}
|
||||
}
|
||||
@ -1337,7 +1375,7 @@ func expandCalls(f *Func) {
|
||||
}
|
||||
if _, ok := val2Preds[v]; !ok {
|
||||
val2Preds[v] = 0
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("v2p[%s] = %d\n", v.LongString(), val2Preds[v])
|
||||
}
|
||||
}
|
||||
@ -1451,7 +1489,7 @@ func expandCalls(f *Func) {
|
||||
if dupe == nil {
|
||||
x.commonSelectors[sk] = v
|
||||
} else if x.sdom.IsAncestorEq(dupe.Block, v.Block) {
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("Duplicate, make %s copy of %s\n", v, dupe)
|
||||
}
|
||||
v.copyOf(dupe)
|
||||
@ -1467,12 +1505,12 @@ func expandCalls(f *Func) {
|
||||
|
||||
// Rewrite selectors.
|
||||
for i, v := range allOrdered {
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
b := v.Block
|
||||
x.Printf("allOrdered[%d] = b%d, %s, uses=%d\n", i, b.ID, v.LongString(), v.Uses)
|
||||
}
|
||||
if v.Uses == 0 {
|
||||
v.invalidateRecursively()
|
||||
x.invalidateRecursively(v)
|
||||
continue
|
||||
}
|
||||
if v.Op == OpCopy {
|
||||
@ -1512,6 +1550,10 @@ func expandCalls(f *Func) {
|
||||
v.Op = OpStaticCall
|
||||
rts := abi.RegisterTypes(v.Aux.(*AuxCall).abiInfo.OutParams())
|
||||
v.Type = types.NewResults(append(rts, types.TypeMem))
|
||||
case OpTailLECall:
|
||||
v.Op = OpTailCall
|
||||
rts := abi.RegisterTypes(v.Aux.(*AuxCall).abiInfo.OutParams())
|
||||
v.Type = types.NewResults(append(rts, types.TypeMem))
|
||||
case OpClosureLECall:
|
||||
v.Op = OpClosureCall
|
||||
rts := abi.RegisterTypes(v.Aux.(*AuxCall).abiInfo.OutParams())
|
||||
@ -1583,7 +1625,7 @@ func expandCalls(f *Func) {
|
||||
v.SetArg(i, aa)
|
||||
for a.Uses == 0 {
|
||||
b := a.Args[0]
|
||||
a.invalidateRecursively()
|
||||
x.invalidateRecursively(a)
|
||||
a = b
|
||||
}
|
||||
}
|
||||
@ -1619,7 +1661,7 @@ func expandCalls(f *Func) {
|
||||
// rewriteArgToMemOrRegs converts OpArg v in-place into the register version of v,
|
||||
// if that is appropriate.
|
||||
func (x *expandState) rewriteArgToMemOrRegs(v *Value) *Value {
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.indent(3)
|
||||
defer x.indent(-3)
|
||||
x.Printf("rewriteArgToMemOrRegs(%s)\n", v.LongString())
|
||||
@ -1650,7 +1692,7 @@ func (x *expandState) rewriteArgToMemOrRegs(v *Value) *Value {
|
||||
default:
|
||||
panic(badVal("Saw unexpanded OpArg", v))
|
||||
}
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("-->%s\n", v.LongString())
|
||||
}
|
||||
return v
|
||||
@ -1660,7 +1702,7 @@ func (x *expandState) rewriteArgToMemOrRegs(v *Value) *Value {
|
||||
// or rewrites it into a copy of the appropriate OpArgXXX. The actual OpArgXXX is determined by combining baseArg (an OpArg)
|
||||
// with offset, regOffset, and t to determine which portion of it to reference (either all or a part, in memory or in registers).
|
||||
func (x *expandState) newArgToMemOrRegs(baseArg, toReplace *Value, offset int64, regOffset Abi1RO, t *types.Type, pos src.XPos) *Value {
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.indent(3)
|
||||
defer x.indent(-3)
|
||||
x.Printf("newArgToMemOrRegs(base=%s; toReplace=%s; t=%s; memOff=%d; regOff=%d)\n", baseArg.String(), toReplace.LongString(), t.String(), offset, regOffset)
|
||||
@ -1696,7 +1738,7 @@ func (x *expandState) newArgToMemOrRegs(baseArg, toReplace *Value, offset int64,
|
||||
if toReplace != nil {
|
||||
toReplace.copyOf(w)
|
||||
}
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("-->%s\n", w.LongString())
|
||||
}
|
||||
return w
|
||||
@ -1727,7 +1769,7 @@ func (x *expandState) newArgToMemOrRegs(baseArg, toReplace *Value, offset int64,
|
||||
if toReplace != nil {
|
||||
toReplace.copyOf(w)
|
||||
}
|
||||
if x.debug {
|
||||
if x.debug > 1 {
|
||||
x.Printf("-->%s\n", w.LongString())
|
||||
}
|
||||
return w
|
||||
|
@ -43,7 +43,7 @@ type Func struct {
|
||||
logfiles map[string]writeSyncer
|
||||
HTMLWriter *HTMLWriter // html writer, for debugging
|
||||
DebugTest bool // default true unless $GOSSAHASH != ""; as a debugging aid, make new code conditional on this and use GOSSAHASH to binary search for failing cases
|
||||
PrintOrHtmlSSA bool // true if GOSSAFUNC matches, true even if fe.Log() (spew phase results to stdout) is false.
|
||||
PrintOrHtmlSSA bool // true if GOSSAFUNC matches, true even if fe.Log() (spew phase results to stdout) is false. There's an odd dependence on this in debug.go for method logf.
|
||||
ruleMatches map[string]int // number of times countRule was called during compilation for any given string
|
||||
ABI0 *abi.ABIConfig // A copy, for no-sync access
|
||||
ABI1 *abi.ABIConfig // A copy, for no-sync access
|
||||
|
@ -317,6 +317,7 @@
|
||||
(StaticCall ...) => (CALLstatic ...)
|
||||
(ClosureCall ...) => (CALLclosure ...)
|
||||
(InterCall ...) => (CALLinter ...)
|
||||
(TailCall ...) => (CALLtail ...)
|
||||
|
||||
// Miscellaneous
|
||||
(IsNonNil p) => (SETNE (TESTL p p))
|
||||
|
@ -455,6 +455,7 @@ func init() {
|
||||
},
|
||||
|
||||
{name: "CALLstatic", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "CALLtail", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // tail call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "CALLclosure", argLength: 3, reg: regInfo{inputs: []regMask{gpsp, buildReg("DX"), 0}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call function via closure. arg0=codeptr, arg1=closure, arg2=mem, auxint=argsize, returns mem
|
||||
{name: "CALLinter", argLength: 2, reg: regInfo{inputs: []regMask{gp}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call fn by pointer. arg0=codeptr, arg1=mem, auxint=argsize, returns mem
|
||||
|
||||
|
@ -408,6 +408,7 @@
|
||||
(StaticCall ...) => (CALLstatic ...)
|
||||
(ClosureCall ...) => (CALLclosure ...)
|
||||
(InterCall ...) => (CALLinter ...)
|
||||
(TailCall ...) => (CALLtail ...)
|
||||
|
||||
// Lowering conditional moves
|
||||
// If the condition is a SETxx, we can just run a CMOV from the comparison that was
|
||||
|
@ -765,6 +765,7 @@ func init() {
|
||||
|
||||
// With a register ABI, the actual register info for these instructions (i.e., what is used in regalloc) is augmented with per-call-site bindings of additional arguments to specific in and out registers.
|
||||
{name: "CALLstatic", argLength: -1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call static function aux.(*obj.LSym). last arg=mem, auxint=argsize, returns mem
|
||||
{name: "CALLtail", argLength: -1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // tail call static function aux.(*obj.LSym). last arg=mem, auxint=argsize, returns mem
|
||||
{name: "CALLclosure", argLength: -1, reg: regInfo{inputs: []regMask{gpsp, buildReg("DX"), 0}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call function via closure. arg0=codeptr, arg1=closure, last arg=mem, auxint=argsize, returns mem
|
||||
{name: "CALLinter", argLength: -1, reg: regInfo{inputs: []regMask{gp}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call fn by pointer. arg0=codeptr, last arg=mem, auxint=argsize, returns mem
|
||||
|
||||
|
@ -351,6 +351,7 @@
|
||||
(StaticCall ...) => (CALLstatic ...)
|
||||
(ClosureCall ...) => (CALLclosure ...)
|
||||
(InterCall ...) => (CALLinter ...)
|
||||
(TailCall ...) => (CALLtail ...)
|
||||
|
||||
// checks
|
||||
(NilCheck ...) => (LoweredNilCheck ...)
|
||||
@ -497,9 +498,9 @@
|
||||
(XOR x (MOVWconst [c])) => (XORconst [c] x)
|
||||
(BIC x (MOVWconst [c])) => (BICconst [c] x)
|
||||
|
||||
(SLL x (MOVWconst [c])) => (SLLconst x [c&31]) // Note: I don't think we ever generate bad constant shifts (i.e. c>=32)
|
||||
(SRL x (MOVWconst [c])) => (SRLconst x [c&31])
|
||||
(SRA x (MOVWconst [c])) => (SRAconst x [c&31])
|
||||
(SLL x (MOVWconst [c])) && 0 <= c && c < 32 => (SLLconst x [c])
|
||||
(SRL x (MOVWconst [c])) && 0 <= c && c < 32 => (SRLconst x [c])
|
||||
(SRA x (MOVWconst [c])) && 0 <= c && c < 32 => (SRAconst x [c])
|
||||
|
||||
(CMP x (MOVWconst [c])) => (CMPconst [c] x)
|
||||
(CMP (MOVWconst [c]) x) => (InvertFlags (CMPconst [c] x))
|
||||
@ -507,6 +508,8 @@
|
||||
(TST x (MOVWconst [c])) => (TSTconst [c] x)
|
||||
(TEQ x (MOVWconst [c])) => (TEQconst [c] x)
|
||||
|
||||
(SRR x (MOVWconst [c])) => (SRRconst x [c&31])
|
||||
|
||||
// Canonicalize the order of arguments to comparisons - helps with CSE.
|
||||
(CMP x y) && canonLessThan(x,y) => (InvertFlags (CMP y x))
|
||||
|
||||
@ -1072,60 +1075,60 @@
|
||||
(CMNshiftRL x (MOVWconst [c]) [d]) => (CMNconst x [int32(uint32(c)>>uint64(d))])
|
||||
(CMNshiftRA x (MOVWconst [c]) [d]) => (CMNconst x [c>>uint64(d)])
|
||||
|
||||
(ADDshiftLLreg x y (MOVWconst [c])) => (ADDshiftLL x y [c])
|
||||
(ADDshiftRLreg x y (MOVWconst [c])) => (ADDshiftRL x y [c])
|
||||
(ADDshiftRAreg x y (MOVWconst [c])) => (ADDshiftRA x y [c])
|
||||
(ADCshiftLLreg x y (MOVWconst [c]) flags) => (ADCshiftLL x y [c] flags)
|
||||
(ADCshiftRLreg x y (MOVWconst [c]) flags) => (ADCshiftRL x y [c] flags)
|
||||
(ADCshiftRAreg x y (MOVWconst [c]) flags) => (ADCshiftRA x y [c] flags)
|
||||
(ADDSshiftLLreg x y (MOVWconst [c])) => (ADDSshiftLL x y [c])
|
||||
(ADDSshiftRLreg x y (MOVWconst [c])) => (ADDSshiftRL x y [c])
|
||||
(ADDSshiftRAreg x y (MOVWconst [c])) => (ADDSshiftRA x y [c])
|
||||
(SUBshiftLLreg x y (MOVWconst [c])) => (SUBshiftLL x y [c])
|
||||
(SUBshiftRLreg x y (MOVWconst [c])) => (SUBshiftRL x y [c])
|
||||
(SUBshiftRAreg x y (MOVWconst [c])) => (SUBshiftRA x y [c])
|
||||
(SBCshiftLLreg x y (MOVWconst [c]) flags) => (SBCshiftLL x y [c] flags)
|
||||
(SBCshiftRLreg x y (MOVWconst [c]) flags) => (SBCshiftRL x y [c] flags)
|
||||
(SBCshiftRAreg x y (MOVWconst [c]) flags) => (SBCshiftRA x y [c] flags)
|
||||
(SUBSshiftLLreg x y (MOVWconst [c])) => (SUBSshiftLL x y [c])
|
||||
(SUBSshiftRLreg x y (MOVWconst [c])) => (SUBSshiftRL x y [c])
|
||||
(SUBSshiftRAreg x y (MOVWconst [c])) => (SUBSshiftRA x y [c])
|
||||
(RSBshiftLLreg x y (MOVWconst [c])) => (RSBshiftLL x y [c])
|
||||
(RSBshiftRLreg x y (MOVWconst [c])) => (RSBshiftRL x y [c])
|
||||
(RSBshiftRAreg x y (MOVWconst [c])) => (RSBshiftRA x y [c])
|
||||
(RSCshiftLLreg x y (MOVWconst [c]) flags) => (RSCshiftLL x y [c] flags)
|
||||
(RSCshiftRLreg x y (MOVWconst [c]) flags) => (RSCshiftRL x y [c] flags)
|
||||
(RSCshiftRAreg x y (MOVWconst [c]) flags) => (RSCshiftRA x y [c] flags)
|
||||
(RSBSshiftLLreg x y (MOVWconst [c])) => (RSBSshiftLL x y [c])
|
||||
(RSBSshiftRLreg x y (MOVWconst [c])) => (RSBSshiftRL x y [c])
|
||||
(RSBSshiftRAreg x y (MOVWconst [c])) => (RSBSshiftRA x y [c])
|
||||
(ANDshiftLLreg x y (MOVWconst [c])) => (ANDshiftLL x y [c])
|
||||
(ANDshiftRLreg x y (MOVWconst [c])) => (ANDshiftRL x y [c])
|
||||
(ANDshiftRAreg x y (MOVWconst [c])) => (ANDshiftRA x y [c])
|
||||
(ORshiftLLreg x y (MOVWconst [c])) => (ORshiftLL x y [c])
|
||||
(ORshiftRLreg x y (MOVWconst [c])) => (ORshiftRL x y [c])
|
||||
(ORshiftRAreg x y (MOVWconst [c])) => (ORshiftRA x y [c])
|
||||
(XORshiftLLreg x y (MOVWconst [c])) => (XORshiftLL x y [c])
|
||||
(XORshiftRLreg x y (MOVWconst [c])) => (XORshiftRL x y [c])
|
||||
(XORshiftRAreg x y (MOVWconst [c])) => (XORshiftRA x y [c])
|
||||
(BICshiftLLreg x y (MOVWconst [c])) => (BICshiftLL x y [c])
|
||||
(BICshiftRLreg x y (MOVWconst [c])) => (BICshiftRL x y [c])
|
||||
(BICshiftRAreg x y (MOVWconst [c])) => (BICshiftRA x y [c])
|
||||
(MVNshiftLLreg x (MOVWconst [c])) => (MVNshiftLL x [c])
|
||||
(MVNshiftRLreg x (MOVWconst [c])) => (MVNshiftRL x [c])
|
||||
(MVNshiftRAreg x (MOVWconst [c])) => (MVNshiftRA x [c])
|
||||
(CMPshiftLLreg x y (MOVWconst [c])) => (CMPshiftLL x y [c])
|
||||
(CMPshiftRLreg x y (MOVWconst [c])) => (CMPshiftRL x y [c])
|
||||
(CMPshiftRAreg x y (MOVWconst [c])) => (CMPshiftRA x y [c])
|
||||
(TSTshiftLLreg x y (MOVWconst [c])) => (TSTshiftLL x y [c])
|
||||
(TSTshiftRLreg x y (MOVWconst [c])) => (TSTshiftRL x y [c])
|
||||
(TSTshiftRAreg x y (MOVWconst [c])) => (TSTshiftRA x y [c])
|
||||
(TEQshiftLLreg x y (MOVWconst [c])) => (TEQshiftLL x y [c])
|
||||
(TEQshiftRLreg x y (MOVWconst [c])) => (TEQshiftRL x y [c])
|
||||
(TEQshiftRAreg x y (MOVWconst [c])) => (TEQshiftRA x y [c])
|
||||
(CMNshiftLLreg x y (MOVWconst [c])) => (CMNshiftLL x y [c])
|
||||
(CMNshiftRLreg x y (MOVWconst [c])) => (CMNshiftRL x y [c])
|
||||
(CMNshiftRAreg x y (MOVWconst [c])) => (CMNshiftRA x y [c])
|
||||
(ADDshiftLLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (ADDshiftLL x y [c])
|
||||
(ADDshiftRLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (ADDshiftRL x y [c])
|
||||
(ADDshiftRAreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (ADDshiftRA x y [c])
|
||||
(ADCshiftLLreg x y (MOVWconst [c]) flags) && 0 <= c && c < 32 => (ADCshiftLL x y [c] flags)
|
||||
(ADCshiftRLreg x y (MOVWconst [c]) flags) && 0 <= c && c < 32 => (ADCshiftRL x y [c] flags)
|
||||
(ADCshiftRAreg x y (MOVWconst [c]) flags) && 0 <= c && c < 32 => (ADCshiftRA x y [c] flags)
|
||||
(ADDSshiftLLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (ADDSshiftLL x y [c])
|
||||
(ADDSshiftRLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (ADDSshiftRL x y [c])
|
||||
(ADDSshiftRAreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (ADDSshiftRA x y [c])
|
||||
(SUBshiftLLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (SUBshiftLL x y [c])
|
||||
(SUBshiftRLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (SUBshiftRL x y [c])
|
||||
(SUBshiftRAreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (SUBshiftRA x y [c])
|
||||
(SBCshiftLLreg x y (MOVWconst [c]) flags) && 0 <= c && c < 32 => (SBCshiftLL x y [c] flags)
|
||||
(SBCshiftRLreg x y (MOVWconst [c]) flags) && 0 <= c && c < 32 => (SBCshiftRL x y [c] flags)
|
||||
(SBCshiftRAreg x y (MOVWconst [c]) flags) && 0 <= c && c < 32 => (SBCshiftRA x y [c] flags)
|
||||
(SUBSshiftLLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (SUBSshiftLL x y [c])
|
||||
(SUBSshiftRLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (SUBSshiftRL x y [c])
|
||||
(SUBSshiftRAreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (SUBSshiftRA x y [c])
|
||||
(RSBshiftLLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (RSBshiftLL x y [c])
|
||||
(RSBshiftRLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (RSBshiftRL x y [c])
|
||||
(RSBshiftRAreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (RSBshiftRA x y [c])
|
||||
(RSCshiftLLreg x y (MOVWconst [c]) flags) && 0 <= c && c < 32 => (RSCshiftLL x y [c] flags)
|
||||
(RSCshiftRLreg x y (MOVWconst [c]) flags) && 0 <= c && c < 32 => (RSCshiftRL x y [c] flags)
|
||||
(RSCshiftRAreg x y (MOVWconst [c]) flags) && 0 <= c && c < 32 => (RSCshiftRA x y [c] flags)
|
||||
(RSBSshiftLLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (RSBSshiftLL x y [c])
|
||||
(RSBSshiftRLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (RSBSshiftRL x y [c])
|
||||
(RSBSshiftRAreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (RSBSshiftRA x y [c])
|
||||
(ANDshiftLLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (ANDshiftLL x y [c])
|
||||
(ANDshiftRLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (ANDshiftRL x y [c])
|
||||
(ANDshiftRAreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (ANDshiftRA x y [c])
|
||||
(ORshiftLLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (ORshiftLL x y [c])
|
||||
(ORshiftRLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (ORshiftRL x y [c])
|
||||
(ORshiftRAreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (ORshiftRA x y [c])
|
||||
(XORshiftLLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (XORshiftLL x y [c])
|
||||
(XORshiftRLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (XORshiftRL x y [c])
|
||||
(XORshiftRAreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (XORshiftRA x y [c])
|
||||
(BICshiftLLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (BICshiftLL x y [c])
|
||||
(BICshiftRLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (BICshiftRL x y [c])
|
||||
(BICshiftRAreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (BICshiftRA x y [c])
|
||||
(MVNshiftLLreg x (MOVWconst [c])) && 0 <= c && c < 32 => (MVNshiftLL x [c])
|
||||
(MVNshiftRLreg x (MOVWconst [c])) && 0 <= c && c < 32 => (MVNshiftRL x [c])
|
||||
(MVNshiftRAreg x (MOVWconst [c])) && 0 <= c && c < 32 => (MVNshiftRA x [c])
|
||||
(CMPshiftLLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (CMPshiftLL x y [c])
|
||||
(CMPshiftRLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (CMPshiftRL x y [c])
|
||||
(CMPshiftRAreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (CMPshiftRA x y [c])
|
||||
(TSTshiftLLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (TSTshiftLL x y [c])
|
||||
(TSTshiftRLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (TSTshiftRL x y [c])
|
||||
(TSTshiftRAreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (TSTshiftRA x y [c])
|
||||
(TEQshiftLLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (TEQshiftLL x y [c])
|
||||
(TEQshiftRLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (TEQshiftRL x y [c])
|
||||
(TEQshiftRAreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (TEQshiftRA x y [c])
|
||||
(CMNshiftLLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (CMNshiftLL x y [c])
|
||||
(CMNshiftRLreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (CMNshiftRL x y [c])
|
||||
(CMNshiftRAreg x y (MOVWconst [c])) && 0 <= c && c < 32 => (CMNshiftRA x y [c])
|
||||
|
||||
// Generate rotates
|
||||
(ADDshiftLL [c] (SRLconst x [32-c]) x) => (SRRconst [32-c] x)
|
||||
@ -1135,7 +1138,6 @@
|
||||
( ORshiftRL [c] (SLLconst x [32-c]) x) => (SRRconst [ c] x)
|
||||
(XORshiftRL [c] (SLLconst x [32-c]) x) => (SRRconst [ c] x)
|
||||
|
||||
(RotateLeft32 x (MOVWconst [c])) => (SRRconst [-c&31] x)
|
||||
(RotateLeft16 <t> x (MOVWconst [c])) => (Or16 (Lsh16x32 <t> x (MOVWconst [c&15])) (Rsh16Ux32 <t> x (MOVWconst [-c&15])))
|
||||
(RotateLeft8 <t> x (MOVWconst [c])) => (Or8 (Lsh8x32 <t> x (MOVWconst [c&7])) (Rsh8Ux32 <t> x (MOVWconst [-c&7])))
|
||||
(RotateLeft32 x y) => (SRR x (RSBconst [0] <y.Type> y))
|
||||
@ -1237,24 +1239,24 @@
|
||||
(AND x (MVN y)) => (BIC x y)
|
||||
|
||||
// simplification with *shift ops
|
||||
(SUBshiftLL x (SLLconst x [c]) [d]) && c==d => (MOVWconst [0])
|
||||
(SUBshiftRL x (SRLconst x [c]) [d]) && c==d => (MOVWconst [0])
|
||||
(SUBshiftRA x (SRAconst x [c]) [d]) && c==d => (MOVWconst [0])
|
||||
(RSBshiftLL x (SLLconst x [c]) [d]) && c==d => (MOVWconst [0])
|
||||
(RSBshiftRL x (SRLconst x [c]) [d]) && c==d => (MOVWconst [0])
|
||||
(RSBshiftRA x (SRAconst x [c]) [d]) && c==d => (MOVWconst [0])
|
||||
(ANDshiftLL x y:(SLLconst x [c]) [d]) && c==d => y
|
||||
(ANDshiftRL x y:(SRLconst x [c]) [d]) && c==d => y
|
||||
(ANDshiftRA x y:(SRAconst x [c]) [d]) && c==d => y
|
||||
(ORshiftLL x y:(SLLconst x [c]) [d]) && c==d => y
|
||||
(ORshiftRL x y:(SRLconst x [c]) [d]) && c==d => y
|
||||
(ORshiftRA x y:(SRAconst x [c]) [d]) && c==d => y
|
||||
(XORshiftLL x (SLLconst x [c]) [d]) && c==d => (MOVWconst [0])
|
||||
(XORshiftRL x (SRLconst x [c]) [d]) && c==d => (MOVWconst [0])
|
||||
(XORshiftRA x (SRAconst x [c]) [d]) && c==d => (MOVWconst [0])
|
||||
(BICshiftLL x (SLLconst x [c]) [d]) && c==d => (MOVWconst [0])
|
||||
(BICshiftRL x (SRLconst x [c]) [d]) && c==d => (MOVWconst [0])
|
||||
(BICshiftRA x (SRAconst x [c]) [d]) && c==d => (MOVWconst [0])
|
||||
(SUBshiftLL (SLLconst x [c]) x [c]) => (MOVWconst [0])
|
||||
(SUBshiftRL (SRLconst x [c]) x [c]) => (MOVWconst [0])
|
||||
(SUBshiftRA (SRAconst x [c]) x [c]) => (MOVWconst [0])
|
||||
(RSBshiftLL (SLLconst x [c]) x [c]) => (MOVWconst [0])
|
||||
(RSBshiftRL (SRLconst x [c]) x [c]) => (MOVWconst [0])
|
||||
(RSBshiftRA (SRAconst x [c]) x [c]) => (MOVWconst [0])
|
||||
(ANDshiftLL y:(SLLconst x [c]) x [c]) => y
|
||||
(ANDshiftRL y:(SRLconst x [c]) x [c]) => y
|
||||
(ANDshiftRA y:(SRAconst x [c]) x [c]) => y
|
||||
(ORshiftLL y:(SLLconst x [c]) x [c]) => y
|
||||
(ORshiftRL y:(SRLconst x [c]) x [c]) => y
|
||||
(ORshiftRA y:(SRAconst x [c]) x [c]) => y
|
||||
(XORshiftLL (SLLconst x [c]) x [c]) => (MOVWconst [0])
|
||||
(XORshiftRL (SRLconst x [c]) x [c]) => (MOVWconst [0])
|
||||
(XORshiftRA (SRAconst x [c]) x [c]) => (MOVWconst [0])
|
||||
(BICshiftLL (SLLconst x [c]) x [c]) => (MOVWconst [0])
|
||||
(BICshiftRL (SRLconst x [c]) x [c]) => (MOVWconst [0])
|
||||
(BICshiftRA (SRAconst x [c]) x [c]) => (MOVWconst [0])
|
||||
(AND x (MVNshiftLL y [c])) => (BICshiftLL x y [c])
|
||||
(AND x (MVNshiftRL y [c])) => (BICshiftRL x y [c])
|
||||
(AND x (MVNshiftRA y [c])) => (BICshiftRA x y [c])
|
||||
|
@ -503,6 +503,7 @@
|
||||
(StaticCall ...) => (CALLstatic ...)
|
||||
(ClosureCall ...) => (CALLclosure ...)
|
||||
(InterCall ...) => (CALLinter ...)
|
||||
(TailCall ...) => (CALLtail ...)
|
||||
|
||||
// checks
|
||||
(NilCheck ...) => (LoweredNilCheck ...)
|
||||
@ -1174,6 +1175,9 @@
|
||||
(CMPW x (MOVDconst [c])) => (CMPWconst [int32(c)] x)
|
||||
(CMPW (MOVDconst [c]) x) => (InvertFlags (CMPWconst [int32(c)] x))
|
||||
|
||||
(ROR x (MOVDconst [c])) => (RORconst x [c&63])
|
||||
(RORW x (MOVDconst [c])) => (RORWconst x [c&31])
|
||||
|
||||
// Canonicalize the order of arguments to comparisons - helps with CSE.
|
||||
((CMP|CMPW) x y) && canonLessThan(x,y) => (InvertFlags ((CMP|CMPW) y x))
|
||||
|
||||
@ -1359,6 +1363,7 @@
|
||||
(XOR x (MVN y)) => (EON x y)
|
||||
(OR x (MVN y)) => (ORN x y)
|
||||
(MVN (XOR x y)) => (EON x y)
|
||||
(NEG (NEG x)) => x
|
||||
|
||||
(CSEL [cc] (MOVDconst [-1]) (MOVDconst [0]) flag) => (CSETM [cc] flag)
|
||||
(CSEL [cc] (MOVDconst [0]) (MOVDconst [-1]) flag) => (CSETM [arm64Negate(cc)] flag)
|
||||
@ -1596,6 +1601,7 @@
|
||||
(MVN x:(SLLconst [c] y)) && clobberIfDead(x) => (MVNshiftLL [c] y)
|
||||
(MVN x:(SRLconst [c] y)) && clobberIfDead(x) => (MVNshiftRL [c] y)
|
||||
(MVN x:(SRAconst [c] y)) && clobberIfDead(x) => (MVNshiftRA [c] y)
|
||||
(MVN x:(RORconst [c] y)) && clobberIfDead(x) => (MVNshiftRO [c] y)
|
||||
(ADD x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (ADDshiftLL x0 y [c])
|
||||
(ADD x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (ADDshiftRL x0 y [c])
|
||||
(ADD x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (ADDshiftRA x0 y [c])
|
||||
@ -1605,21 +1611,27 @@
|
||||
(AND x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (ANDshiftLL x0 y [c])
|
||||
(AND x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (ANDshiftRL x0 y [c])
|
||||
(AND x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (ANDshiftRA x0 y [c])
|
||||
(AND x0 x1:(RORconst [c] y)) && clobberIfDead(x1) => (ANDshiftRO x0 y [c])
|
||||
(OR x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (ORshiftLL x0 y [c]) // useful for combined load
|
||||
(OR x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (ORshiftRL x0 y [c])
|
||||
(OR x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (ORshiftRA x0 y [c])
|
||||
(OR x0 x1:(RORconst [c] y)) && clobberIfDead(x1) => (ORshiftRO x0 y [c])
|
||||
(XOR x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (XORshiftLL x0 y [c])
|
||||
(XOR x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (XORshiftRL x0 y [c])
|
||||
(XOR x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (XORshiftRA x0 y [c])
|
||||
(XOR x0 x1:(RORconst [c] y)) && clobberIfDead(x1) => (XORshiftRO x0 y [c])
|
||||
(BIC x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (BICshiftLL x0 y [c])
|
||||
(BIC x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (BICshiftRL x0 y [c])
|
||||
(BIC x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (BICshiftRA x0 y [c])
|
||||
(BIC x0 x1:(RORconst [c] y)) && clobberIfDead(x1) => (BICshiftRO x0 y [c])
|
||||
(ORN x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (ORNshiftLL x0 y [c])
|
||||
(ORN x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (ORNshiftRL x0 y [c])
|
||||
(ORN x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (ORNshiftRA x0 y [c])
|
||||
(ORN x0 x1:(RORconst [c] y)) && clobberIfDead(x1) => (ORNshiftRO x0 y [c])
|
||||
(EON x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (EONshiftLL x0 y [c])
|
||||
(EON x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (EONshiftRL x0 y [c])
|
||||
(EON x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (EONshiftRA x0 y [c])
|
||||
(EON x0 x1:(RORconst [c] y)) && clobberIfDead(x1) => (EONshiftRO x0 y [c])
|
||||
(CMP x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (CMPshiftLL x0 y [c])
|
||||
(CMP x0:(SLLconst [c] y) x1) && clobberIfDead(x0) => (InvertFlags (CMPshiftLL x1 y [c]))
|
||||
(CMP x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (CMPshiftRL x0 y [c])
|
||||
@ -1632,6 +1644,7 @@
|
||||
(TST x0 x1:(SLLconst [c] y)) && clobberIfDead(x1) => (TSTshiftLL x0 y [c])
|
||||
(TST x0 x1:(SRLconst [c] y)) && clobberIfDead(x1) => (TSTshiftRL x0 y [c])
|
||||
(TST x0 x1:(SRAconst [c] y)) && clobberIfDead(x1) => (TSTshiftRA x0 y [c])
|
||||
(TST x0 x1:(RORconst [c] y)) && clobberIfDead(x1) => (TSTshiftRO x0 y [c])
|
||||
|
||||
// prefer *const ops to *shift ops
|
||||
(ADDshiftLL (MOVDconst [c]) x [d]) => (ADDconst [c] (SLLconst <x.Type> x [d]))
|
||||
@ -1640,12 +1653,15 @@
|
||||
(ANDshiftLL (MOVDconst [c]) x [d]) => (ANDconst [c] (SLLconst <x.Type> x [d]))
|
||||
(ANDshiftRL (MOVDconst [c]) x [d]) => (ANDconst [c] (SRLconst <x.Type> x [d]))
|
||||
(ANDshiftRA (MOVDconst [c]) x [d]) => (ANDconst [c] (SRAconst <x.Type> x [d]))
|
||||
(ANDshiftRO (MOVDconst [c]) x [d]) => (ANDconst [c] (RORconst <x.Type> x [d]))
|
||||
(ORshiftLL (MOVDconst [c]) x [d]) => (ORconst [c] (SLLconst <x.Type> x [d]))
|
||||
(ORshiftRL (MOVDconst [c]) x [d]) => (ORconst [c] (SRLconst <x.Type> x [d]))
|
||||
(ORshiftRA (MOVDconst [c]) x [d]) => (ORconst [c] (SRAconst <x.Type> x [d]))
|
||||
(ORshiftRO (MOVDconst [c]) x [d]) => (ORconst [c] (RORconst <x.Type> x [d]))
|
||||
(XORshiftLL (MOVDconst [c]) x [d]) => (XORconst [c] (SLLconst <x.Type> x [d]))
|
||||
(XORshiftRL (MOVDconst [c]) x [d]) => (XORconst [c] (SRLconst <x.Type> x [d]))
|
||||
(XORshiftRA (MOVDconst [c]) x [d]) => (XORconst [c] (SRAconst <x.Type> x [d]))
|
||||
(XORshiftRO (MOVDconst [c]) x [d]) => (XORconst [c] (RORconst <x.Type> x [d]))
|
||||
(CMPshiftLL (MOVDconst [c]) x [d]) => (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d])))
|
||||
(CMPshiftRL (MOVDconst [c]) x [d]) => (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d])))
|
||||
(CMPshiftRA (MOVDconst [c]) x [d]) => (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d])))
|
||||
@ -1655,11 +1671,13 @@
|
||||
(TSTshiftLL (MOVDconst [c]) x [d]) => (TSTconst [c] (SLLconst <x.Type> x [d]))
|
||||
(TSTshiftRL (MOVDconst [c]) x [d]) => (TSTconst [c] (SRLconst <x.Type> x [d]))
|
||||
(TSTshiftRA (MOVDconst [c]) x [d]) => (TSTconst [c] (SRAconst <x.Type> x [d]))
|
||||
(TSTshiftRO (MOVDconst [c]) x [d]) => (TSTconst [c] (RORconst <x.Type> x [d]))
|
||||
|
||||
// constant folding in *shift ops
|
||||
(MVNshiftLL (MOVDconst [c]) [d]) => (MOVDconst [^int64(uint64(c)<<uint64(d))])
|
||||
(MVNshiftRL (MOVDconst [c]) [d]) => (MOVDconst [^int64(uint64(c)>>uint64(d))])
|
||||
(MVNshiftRA (MOVDconst [c]) [d]) => (MOVDconst [^(c>>uint64(d))])
|
||||
(MVNshiftRO (MOVDconst [c]) [d]) => (MOVDconst [^rotateRight64(c, d)])
|
||||
(NEGshiftLL (MOVDconst [c]) [d]) => (MOVDconst [-int64(uint64(c)<<uint64(d))])
|
||||
(NEGshiftRL (MOVDconst [c]) [d]) => (MOVDconst [-int64(uint64(c)>>uint64(d))])
|
||||
(NEGshiftRA (MOVDconst [c]) [d]) => (MOVDconst [-(c>>uint64(d))])
|
||||
@ -1672,21 +1690,27 @@
|
||||
(ANDshiftLL x (MOVDconst [c]) [d]) => (ANDconst x [int64(uint64(c)<<uint64(d))])
|
||||
(ANDshiftRL x (MOVDconst [c]) [d]) => (ANDconst x [int64(uint64(c)>>uint64(d))])
|
||||
(ANDshiftRA x (MOVDconst [c]) [d]) => (ANDconst x [c>>uint64(d)])
|
||||
(ANDshiftRO x (MOVDconst [c]) [d]) => (ANDconst x [rotateRight64(c, d)])
|
||||
(ORshiftLL x (MOVDconst [c]) [d]) => (ORconst x [int64(uint64(c)<<uint64(d))])
|
||||
(ORshiftRL x (MOVDconst [c]) [d]) => (ORconst x [int64(uint64(c)>>uint64(d))])
|
||||
(ORshiftRA x (MOVDconst [c]) [d]) => (ORconst x [c>>uint64(d)])
|
||||
(ORshiftRO x (MOVDconst [c]) [d]) => (ORconst x [rotateRight64(c, d)])
|
||||
(XORshiftLL x (MOVDconst [c]) [d]) => (XORconst x [int64(uint64(c)<<uint64(d))])
|
||||
(XORshiftRL x (MOVDconst [c]) [d]) => (XORconst x [int64(uint64(c)>>uint64(d))])
|
||||
(XORshiftRA x (MOVDconst [c]) [d]) => (XORconst x [c>>uint64(d)])
|
||||
(XORshiftRO x (MOVDconst [c]) [d]) => (XORconst x [rotateRight64(c, d)])
|
||||
(BICshiftLL x (MOVDconst [c]) [d]) => (ANDconst x [^int64(uint64(c)<<uint64(d))])
|
||||
(BICshiftRL x (MOVDconst [c]) [d]) => (ANDconst x [^int64(uint64(c)>>uint64(d))])
|
||||
(BICshiftRA x (MOVDconst [c]) [d]) => (ANDconst x [^(c>>uint64(d))])
|
||||
(BICshiftRO x (MOVDconst [c]) [d]) => (ANDconst x [^rotateRight64(c, d)])
|
||||
(ORNshiftLL x (MOVDconst [c]) [d]) => (ORconst x [^int64(uint64(c)<<uint64(d))])
|
||||
(ORNshiftRL x (MOVDconst [c]) [d]) => (ORconst x [^int64(uint64(c)>>uint64(d))])
|
||||
(ORNshiftRA x (MOVDconst [c]) [d]) => (ORconst x [^(c>>uint64(d))])
|
||||
(ORNshiftRO x (MOVDconst [c]) [d]) => (ORconst x [^rotateRight64(c, d)])
|
||||
(EONshiftLL x (MOVDconst [c]) [d]) => (XORconst x [^int64(uint64(c)<<uint64(d))])
|
||||
(EONshiftRL x (MOVDconst [c]) [d]) => (XORconst x [^int64(uint64(c)>>uint64(d))])
|
||||
(EONshiftRA x (MOVDconst [c]) [d]) => (XORconst x [^(c>>uint64(d))])
|
||||
(EONshiftRO x (MOVDconst [c]) [d]) => (XORconst x [^rotateRight64(c, d)])
|
||||
(CMPshiftLL x (MOVDconst [c]) [d]) => (CMPconst x [int64(uint64(c)<<uint64(d))])
|
||||
(CMPshiftRL x (MOVDconst [c]) [d]) => (CMPconst x [int64(uint64(c)>>uint64(d))])
|
||||
(CMPshiftRA x (MOVDconst [c]) [d]) => (CMPconst x [c>>uint64(d)])
|
||||
@ -1696,29 +1720,36 @@
|
||||
(TSTshiftLL x (MOVDconst [c]) [d]) => (TSTconst x [int64(uint64(c)<<uint64(d))])
|
||||
(TSTshiftRL x (MOVDconst [c]) [d]) => (TSTconst x [int64(uint64(c)>>uint64(d))])
|
||||
(TSTshiftRA x (MOVDconst [c]) [d]) => (TSTconst x [c>>uint64(d)])
|
||||
(TSTshiftRO x (MOVDconst [c]) [d]) => (TSTconst x [rotateRight64(c, d)])
|
||||
|
||||
// simplification with *shift ops
|
||||
(SUBshiftLL x (SLLconst x [c]) [d]) && c==d => (MOVDconst [0])
|
||||
(SUBshiftRL x (SRLconst x [c]) [d]) && c==d => (MOVDconst [0])
|
||||
(SUBshiftRA x (SRAconst x [c]) [d]) && c==d => (MOVDconst [0])
|
||||
(ANDshiftLL x y:(SLLconst x [c]) [d]) && c==d => y
|
||||
(ANDshiftRL x y:(SRLconst x [c]) [d]) && c==d => y
|
||||
(ANDshiftRA x y:(SRAconst x [c]) [d]) && c==d => y
|
||||
(ORshiftLL x y:(SLLconst x [c]) [d]) && c==d => y
|
||||
(ORshiftRL x y:(SRLconst x [c]) [d]) && c==d => y
|
||||
(ORshiftRA x y:(SRAconst x [c]) [d]) && c==d => y
|
||||
(XORshiftLL x (SLLconst x [c]) [d]) && c==d => (MOVDconst [0])
|
||||
(XORshiftRL x (SRLconst x [c]) [d]) && c==d => (MOVDconst [0])
|
||||
(XORshiftRA x (SRAconst x [c]) [d]) && c==d => (MOVDconst [0])
|
||||
(BICshiftLL x (SLLconst x [c]) [d]) && c==d => (MOVDconst [0])
|
||||
(BICshiftRL x (SRLconst x [c]) [d]) && c==d => (MOVDconst [0])
|
||||
(BICshiftRA x (SRAconst x [c]) [d]) && c==d => (MOVDconst [0])
|
||||
(EONshiftLL x (SLLconst x [c]) [d]) && c==d => (MOVDconst [-1])
|
||||
(EONshiftRL x (SRLconst x [c]) [d]) && c==d => (MOVDconst [-1])
|
||||
(EONshiftRA x (SRAconst x [c]) [d]) && c==d => (MOVDconst [-1])
|
||||
(ORNshiftLL x (SLLconst x [c]) [d]) && c==d => (MOVDconst [-1])
|
||||
(ORNshiftRL x (SRLconst x [c]) [d]) && c==d => (MOVDconst [-1])
|
||||
(ORNshiftRA x (SRAconst x [c]) [d]) && c==d => (MOVDconst [-1])
|
||||
(SUBshiftLL (SLLconst x [c]) x [c]) => (MOVDconst [0])
|
||||
(SUBshiftRL (SRLconst x [c]) x [c]) => (MOVDconst [0])
|
||||
(SUBshiftRA (SRAconst x [c]) x [c]) => (MOVDconst [0])
|
||||
(ANDshiftLL y:(SLLconst x [c]) x [c]) => y
|
||||
(ANDshiftRL y:(SRLconst x [c]) x [c]) => y
|
||||
(ANDshiftRA y:(SRAconst x [c]) x [c]) => y
|
||||
(ANDshiftRO y:(RORconst x [c]) x [c]) => y
|
||||
(ORshiftLL y:(SLLconst x [c]) x [c]) => y
|
||||
(ORshiftRL y:(SRLconst x [c]) x [c]) => y
|
||||
(ORshiftRA y:(SRAconst x [c]) x [c]) => y
|
||||
(ORshiftRO y:(RORconst x [c]) x [c]) => y
|
||||
(XORshiftLL (SLLconst x [c]) x [c]) => (MOVDconst [0])
|
||||
(XORshiftRL (SRLconst x [c]) x [c]) => (MOVDconst [0])
|
||||
(XORshiftRA (SRAconst x [c]) x [c]) => (MOVDconst [0])
|
||||
(XORshiftRO (RORconst x [c]) x [c]) => (MOVDconst [0])
|
||||
(BICshiftLL (SLLconst x [c]) x [c]) => (MOVDconst [0])
|
||||
(BICshiftRL (SRLconst x [c]) x [c]) => (MOVDconst [0])
|
||||
(BICshiftRA (SRAconst x [c]) x [c]) => (MOVDconst [0])
|
||||
(BICshiftRO (RORconst x [c]) x [c]) => (MOVDconst [0])
|
||||
(EONshiftLL (SLLconst x [c]) x [c]) => (MOVDconst [-1])
|
||||
(EONshiftRL (SRLconst x [c]) x [c]) => (MOVDconst [-1])
|
||||
(EONshiftRA (SRAconst x [c]) x [c]) => (MOVDconst [-1])
|
||||
(EONshiftRO (RORconst x [c]) x [c]) => (MOVDconst [-1])
|
||||
(ORNshiftLL (SLLconst x [c]) x [c]) => (MOVDconst [-1])
|
||||
(ORNshiftRL (SRLconst x [c]) x [c]) => (MOVDconst [-1])
|
||||
(ORNshiftRA (SRAconst x [c]) x [c]) => (MOVDconst [-1])
|
||||
(ORNshiftRO (RORconst x [c]) x [c]) => (MOVDconst [-1])
|
||||
|
||||
// Generate rotates with const shift
|
||||
(ADDshiftLL [c] (SRLconst x [64-c]) x) => (RORconst [64-c] x)
|
||||
@ -1824,6 +1855,7 @@
|
||||
// sbfiz
|
||||
// (x << lc) >> rc
|
||||
(SRAconst [rc] (SLLconst [lc] x)) && lc > rc => (SBFIZ [armBFAuxInt(lc-rc, 64-lc)] x)
|
||||
// int64(x << lc)
|
||||
(MOVWreg (SLLconst [lc] x)) && lc < 32 => (SBFIZ [armBFAuxInt(lc, 32-lc)] x)
|
||||
(MOVHreg (SLLconst [lc] x)) && lc < 16 => (SBFIZ [armBFAuxInt(lc, 16-lc)] x)
|
||||
(MOVBreg (SLLconst [lc] x)) && lc < 8 => (SBFIZ [armBFAuxInt(lc, 8-lc)] x)
|
||||
@ -1835,6 +1867,7 @@
|
||||
// sbfx
|
||||
// (x << lc) >> rc
|
||||
(SRAconst [rc] (SLLconst [lc] x)) && lc <= rc => (SBFX [armBFAuxInt(rc-lc, 64-rc)] x)
|
||||
// int64(x) >> rc
|
||||
(SRAconst [rc] (MOVWreg x)) && rc < 32 => (SBFX [armBFAuxInt(rc, 32-rc)] x)
|
||||
(SRAconst [rc] (MOVHreg x)) && rc < 16 => (SBFX [armBFAuxInt(rc, 16-rc)] x)
|
||||
(SRAconst [rc] (MOVBreg x)) && rc < 8 => (SBFX [armBFAuxInt(rc, 8-rc)] x)
|
||||
@ -1851,42 +1884,43 @@
|
||||
=> (SBFX [armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc)] x)
|
||||
|
||||
// ubfiz
|
||||
// (x << lc) >> rc
|
||||
(SRLconst [rc] (SLLconst [lc] x)) && lc > rc => (UBFIZ [armBFAuxInt(lc-rc, 64-lc)] x)
|
||||
// uint64(x) << lc
|
||||
(SLLconst [lc] (MOVWUreg x)) => (UBFIZ [armBFAuxInt(lc, min(32, 64-lc))] x)
|
||||
(SLLconst [lc] (MOVHUreg x)) => (UBFIZ [armBFAuxInt(lc, min(16, 64-lc))] x)
|
||||
(SLLconst [lc] (MOVBUreg x)) => (UBFIZ [armBFAuxInt(lc, min(8, 64-lc))] x)
|
||||
// uint64(x << lc)
|
||||
(MOVWUreg (SLLconst [lc] x)) && lc < 32 => (UBFIZ [armBFAuxInt(lc, 32-lc)] x)
|
||||
(MOVHUreg (SLLconst [lc] x)) && lc < 16 => (UBFIZ [armBFAuxInt(lc, 16-lc)] x)
|
||||
(MOVBUreg (SLLconst [lc] x)) && lc < 8 => (UBFIZ [armBFAuxInt(lc, 8-lc)] x)
|
||||
|
||||
// merge ANDconst into ubfiz
|
||||
// (x & ac) << sc
|
||||
(SLLconst [sc] (ANDconst [ac] x)) && isARM64BFMask(sc, ac, 0)
|
||||
=> (UBFIZ [armBFAuxInt(sc, arm64BFWidth(ac, 0))] x)
|
||||
(SLLconst [sc] (MOVWUreg x)) && isARM64BFMask(sc, 1<<32-1, 0) => (UBFIZ [armBFAuxInt(sc, 32)] x)
|
||||
(SLLconst [sc] (MOVHUreg x)) && isARM64BFMask(sc, 1<<16-1, 0) => (UBFIZ [armBFAuxInt(sc, 16)] x)
|
||||
(SLLconst [sc] (MOVBUreg x)) && isARM64BFMask(sc, 1<<8-1, 0) => (UBFIZ [armBFAuxInt(sc, 8)] x)
|
||||
// (x << sc) & ac
|
||||
(ANDconst [ac] (SLLconst [sc] x)) && isARM64BFMask(sc, ac, sc)
|
||||
=> (UBFIZ [armBFAuxInt(sc, arm64BFWidth(ac, sc))] x)
|
||||
(MOVWUreg (SLLconst [sc] x)) && isARM64BFMask(sc, 1<<32-1, sc)
|
||||
=> (UBFIZ [armBFAuxInt(sc, arm64BFWidth(1<<32-1, sc))] x)
|
||||
(MOVHUreg (SLLconst [sc] x)) && isARM64BFMask(sc, 1<<16-1, sc)
|
||||
=> (UBFIZ [armBFAuxInt(sc, arm64BFWidth(1<<16-1, sc))] x)
|
||||
(MOVBUreg (SLLconst [sc] x)) && isARM64BFMask(sc, 1<<8-1, sc)
|
||||
=> (UBFIZ [armBFAuxInt(sc, arm64BFWidth(1<<8-1, sc))] x)
|
||||
// (x << lc) >> rc
|
||||
(SRLconst [rc] (SLLconst [lc] x)) && lc > rc => (UBFIZ [armBFAuxInt(lc-rc, 64-lc)] x)
|
||||
|
||||
// ubfx
|
||||
// (x << lc) >> rc
|
||||
(SRLconst [rc] (SLLconst [lc] x)) && lc < rc => (UBFX [armBFAuxInt(rc-lc, 64-rc)] x)
|
||||
// uint64(x) >> rc
|
||||
(SRLconst [rc] (MOVWUreg x)) && rc < 32 => (UBFX [armBFAuxInt(rc, 32-rc)] x)
|
||||
(SRLconst [rc] (MOVHUreg x)) && rc < 16 => (UBFX [armBFAuxInt(rc, 16-rc)] x)
|
||||
(SRLconst [rc] (MOVBUreg x)) && rc < 8 => (UBFX [armBFAuxInt(rc, 8-rc)] x)
|
||||
// uint64(x >> rc)
|
||||
(MOVWUreg (SRLconst [rc] x)) && rc < 32 => (UBFX [armBFAuxInt(rc, 32)] x)
|
||||
(MOVHUreg (SRLconst [rc] x)) && rc < 16 => (UBFX [armBFAuxInt(rc, 16)] x)
|
||||
(MOVBUreg (SRLconst [rc] x)) && rc < 8 => (UBFX [armBFAuxInt(rc, 8)] x)
|
||||
// merge ANDconst into ubfx
|
||||
// (x >> sc) & ac
|
||||
(ANDconst [ac] (SRLconst [sc] x)) && isARM64BFMask(sc, ac, 0)
|
||||
=> (UBFX [armBFAuxInt(sc, arm64BFWidth(ac, 0))] x)
|
||||
(MOVWUreg (SRLconst [sc] x)) && isARM64BFMask(sc, 1<<32-1, 0) => (UBFX [armBFAuxInt(sc, 32)] x)
|
||||
(MOVHUreg (SRLconst [sc] x)) && isARM64BFMask(sc, 1<<16-1, 0) => (UBFX [armBFAuxInt(sc, 16)] x)
|
||||
(MOVBUreg (SRLconst [sc] x)) && isARM64BFMask(sc, 1<<8-1, 0) => (UBFX [armBFAuxInt(sc, 8)] x)
|
||||
// (x & ac) >> sc
|
||||
(SRLconst [sc] (ANDconst [ac] x)) && isARM64BFMask(sc, ac, sc)
|
||||
=> (UBFX [armBFAuxInt(sc, arm64BFWidth(ac, sc))] x)
|
||||
(SRLconst [sc] (MOVWUreg x)) && isARM64BFMask(sc, 1<<32-1, sc)
|
||||
=> (UBFX [armBFAuxInt(sc, arm64BFWidth(1<<32-1, sc))] x)
|
||||
(SRLconst [sc] (MOVHUreg x)) && isARM64BFMask(sc, 1<<16-1, sc)
|
||||
=> (UBFX [armBFAuxInt(sc, arm64BFWidth(1<<16-1, sc))] x)
|
||||
(SRLconst [sc] (MOVBUreg x)) && isARM64BFMask(sc, 1<<8-1, sc)
|
||||
=> (UBFX [armBFAuxInt(sc, arm64BFWidth(1<<8-1, sc))] x)
|
||||
// (x << lc) >> rc
|
||||
(SRLconst [rc] (SLLconst [lc] x)) && lc < rc => (UBFX [armBFAuxInt(rc-lc, 64-rc)] x)
|
||||
|
||||
// merge ubfx and zerso-extension into ubfx
|
||||
(MOVWUreg (UBFX [bfc] x)) && bfc.getARM64BFwidth() <= 32 => (UBFX [bfc] x)
|
||||
|
@ -302,6 +302,7 @@ func init() {
|
||||
{name: "MVNshiftLL", argLength: 1, reg: gp11, asm: "MVN", aux: "Int64"}, // ^(arg0<<auxInt), auxInt should be in the range 0 to 63.
|
||||
{name: "MVNshiftRL", argLength: 1, reg: gp11, asm: "MVN", aux: "Int64"}, // ^(arg0>>auxInt), unsigned shift, auxInt should be in the range 0 to 63.
|
||||
{name: "MVNshiftRA", argLength: 1, reg: gp11, asm: "MVN", aux: "Int64"}, // ^(arg0>>auxInt), signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "MVNshiftRO", argLength: 1, reg: gp11, asm: "MVN", aux: "Int64"}, // ^(arg0 ROR auxInt), signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "NEGshiftLL", argLength: 1, reg: gp11, asm: "NEG", aux: "Int64"}, // -(arg0<<auxInt), auxInt should be in the range 0 to 63.
|
||||
{name: "NEGshiftRL", argLength: 1, reg: gp11, asm: "NEG", aux: "Int64"}, // -(arg0>>auxInt), unsigned shift, auxInt should be in the range 0 to 63.
|
||||
{name: "NEGshiftRA", argLength: 1, reg: gp11, asm: "NEG", aux: "Int64"}, // -(arg0>>auxInt), signed shift, auxInt should be in the range 0 to 63.
|
||||
@ -314,21 +315,27 @@ func init() {
|
||||
{name: "ANDshiftLL", argLength: 2, reg: gp21, asm: "AND", aux: "Int64"}, // arg0 & (arg1<<auxInt), auxInt should be in the range 0 to 63.
|
||||
{name: "ANDshiftRL", argLength: 2, reg: gp21, asm: "AND", aux: "Int64"}, // arg0 & (arg1>>auxInt), unsigned shift, auxInt should be in the range 0 to 63.
|
||||
{name: "ANDshiftRA", argLength: 2, reg: gp21, asm: "AND", aux: "Int64"}, // arg0 & (arg1>>auxInt), signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "ANDshiftRO", argLength: 2, reg: gp21, asm: "AND", aux: "Int64"}, // arg0 & (arg1 ROR auxInt), signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "ORshiftLL", argLength: 2, reg: gp21, asm: "ORR", aux: "Int64"}, // arg0 | arg1<<auxInt, auxInt should be in the range 0 to 63.
|
||||
{name: "ORshiftRL", argLength: 2, reg: gp21, asm: "ORR", aux: "Int64"}, // arg0 | arg1>>auxInt, unsigned shift, auxInt should be in the range 0 to 63.
|
||||
{name: "ORshiftRA", argLength: 2, reg: gp21, asm: "ORR", aux: "Int64"}, // arg0 | arg1>>auxInt, signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "ORshiftRO", argLength: 2, reg: gp21, asm: "ORR", aux: "Int64"}, // arg0 | arg1 ROR auxInt, signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "XORshiftLL", argLength: 2, reg: gp21, asm: "EOR", aux: "Int64"}, // arg0 ^ arg1<<auxInt, auxInt should be in the range 0 to 63.
|
||||
{name: "XORshiftRL", argLength: 2, reg: gp21, asm: "EOR", aux: "Int64"}, // arg0 ^ arg1>>auxInt, unsigned shift, auxInt should be in the range 0 to 63.
|
||||
{name: "XORshiftRA", argLength: 2, reg: gp21, asm: "EOR", aux: "Int64"}, // arg0 ^ arg1>>auxInt, signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "XORshiftRO", argLength: 2, reg: gp21, asm: "EOR", aux: "Int64"}, // arg0 ^ arg1 ROR auxInt, signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "BICshiftLL", argLength: 2, reg: gp21, asm: "BIC", aux: "Int64"}, // arg0 &^ (arg1<<auxInt), auxInt should be in the range 0 to 63.
|
||||
{name: "BICshiftRL", argLength: 2, reg: gp21, asm: "BIC", aux: "Int64"}, // arg0 &^ (arg1>>auxInt), unsigned shift, auxInt should be in the range 0 to 63.
|
||||
{name: "BICshiftRA", argLength: 2, reg: gp21, asm: "BIC", aux: "Int64"}, // arg0 &^ (arg1>>auxInt), signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "BICshiftRO", argLength: 2, reg: gp21, asm: "BIC", aux: "Int64"}, // arg0 &^ (arg1 ROR auxInt), signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "EONshiftLL", argLength: 2, reg: gp21, asm: "EON", aux: "Int64"}, // arg0 ^ ^(arg1<<auxInt), auxInt should be in the range 0 to 63.
|
||||
{name: "EONshiftRL", argLength: 2, reg: gp21, asm: "EON", aux: "Int64"}, // arg0 ^ ^(arg1>>auxInt), unsigned shift, auxInt should be in the range 0 to 63.
|
||||
{name: "EONshiftRA", argLength: 2, reg: gp21, asm: "EON", aux: "Int64"}, // arg0 ^ ^(arg1>>auxInt), signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "EONshiftRO", argLength: 2, reg: gp21, asm: "EON", aux: "Int64"}, // arg0 ^ ^(arg1 ROR auxInt), signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "ORNshiftLL", argLength: 2, reg: gp21, asm: "ORN", aux: "Int64"}, // arg0 | ^(arg1<<auxInt), auxInt should be in the range 0 to 63.
|
||||
{name: "ORNshiftRL", argLength: 2, reg: gp21, asm: "ORN", aux: "Int64"}, // arg0 | ^(arg1>>auxInt), unsigned shift, auxInt should be in the range 0 to 63.
|
||||
{name: "ORNshiftRA", argLength: 2, reg: gp21, asm: "ORN", aux: "Int64"}, // arg0 | ^(arg1>>auxInt), signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "ORNshiftRO", argLength: 2, reg: gp21, asm: "ORN", aux: "Int64"}, // arg0 | ^(arg1 ROR auxInt), signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "CMPshiftLL", argLength: 2, reg: gp2flags, asm: "CMP", aux: "Int64", typ: "Flags"}, // arg0 compare to arg1<<auxInt, auxInt should be in the range 0 to 63.
|
||||
{name: "CMPshiftRL", argLength: 2, reg: gp2flags, asm: "CMP", aux: "Int64", typ: "Flags"}, // arg0 compare to arg1>>auxInt, unsigned shift, auxInt should be in the range 0 to 63.
|
||||
{name: "CMPshiftRA", argLength: 2, reg: gp2flags, asm: "CMP", aux: "Int64", typ: "Flags"}, // arg0 compare to arg1>>auxInt, signed shift, auxInt should be in the range 0 to 63.
|
||||
@ -338,6 +345,7 @@ func init() {
|
||||
{name: "TSTshiftLL", argLength: 2, reg: gp2flags, asm: "TST", aux: "Int64", typ: "Flags"}, // (arg0 & arg1<<auxInt) compare to 0, auxInt should be in the range 0 to 63.
|
||||
{name: "TSTshiftRL", argLength: 2, reg: gp2flags, asm: "TST", aux: "Int64", typ: "Flags"}, // (arg0 & arg1>>auxInt) compare to 0, unsigned shift, auxInt should be in the range 0 to 63.
|
||||
{name: "TSTshiftRA", argLength: 2, reg: gp2flags, asm: "TST", aux: "Int64", typ: "Flags"}, // (arg0 & arg1>>auxInt) compare to 0, signed shift, auxInt should be in the range 0 to 63.
|
||||
{name: "TSTshiftRO", argLength: 2, reg: gp2flags, asm: "TST", aux: "Int64", typ: "Flags"}, // (arg0 & arg1 ROR auxInt) compare to 0, signed shift, auxInt should be in the range 0 to 63.
|
||||
|
||||
// bitfield ops
|
||||
// for all bitfield ops lsb is auxInt>>8, width is auxInt&0xff
|
||||
@ -484,6 +492,7 @@ func init() {
|
||||
|
||||
// function calls
|
||||
{name: "CALLstatic", argLength: -1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call static function aux.(*obj.LSym). last arg=mem, auxint=argsize, returns mem
|
||||
{name: "CALLtail", argLength: -1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // tail call static function aux.(*obj.LSym). last arg=mem, auxint=argsize, returns mem
|
||||
{name: "CALLclosure", argLength: -1, reg: regInfo{inputs: []regMask{gpsp, buildReg("R26"), 0}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call function via closure. arg0=codeptr, arg1=closure, last arg=mem, auxint=argsize, returns mem
|
||||
{name: "CALLinter", argLength: -1, reg: regInfo{inputs: []regMask{gp}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call fn by pointer. arg0=codeptr, last arg=mem, auxint=argsize, returns mem
|
||||
|
||||
|
@ -228,14 +228,15 @@ func init() {
|
||||
|
||||
// shifts
|
||||
{name: "SLL", argLength: 2, reg: gp21, asm: "SLL"}, // arg0 << arg1, shift amount is mod 256
|
||||
{name: "SLLconst", argLength: 1, reg: gp11, asm: "SLL", aux: "Int32"}, // arg0 << auxInt
|
||||
{name: "SLLconst", argLength: 1, reg: gp11, asm: "SLL", aux: "Int32"}, // arg0 << auxInt, 0 <= auxInt < 32
|
||||
{name: "SRL", argLength: 2, reg: gp21, asm: "SRL"}, // arg0 >> arg1, unsigned, shift amount is mod 256
|
||||
{name: "SRLconst", argLength: 1, reg: gp11, asm: "SRL", aux: "Int32"}, // arg0 >> auxInt, unsigned
|
||||
{name: "SRLconst", argLength: 1, reg: gp11, asm: "SRL", aux: "Int32"}, // arg0 >> auxInt, unsigned, 0 <= auxInt < 32
|
||||
{name: "SRA", argLength: 2, reg: gp21, asm: "SRA"}, // arg0 >> arg1, signed, shift amount is mod 256
|
||||
{name: "SRAconst", argLength: 1, reg: gp11, asm: "SRA", aux: "Int32"}, // arg0 >> auxInt, signed
|
||||
{name: "SRAconst", argLength: 1, reg: gp11, asm: "SRA", aux: "Int32"}, // arg0 >> auxInt, signed, 0 <= auxInt < 32
|
||||
{name: "SRR", argLength: 2, reg: gp21}, // arg0 right rotate by arg1 bits
|
||||
{name: "SRRconst", argLength: 1, reg: gp11, aux: "Int32"}, // arg0 right rotate by auxInt bits
|
||||
{name: "SRRconst", argLength: 1, reg: gp11, aux: "Int32"}, // arg0 right rotate by auxInt bits, 0 <= auxInt < 32
|
||||
|
||||
// auxInt for all of these satisfy 0 <= auxInt < 32
|
||||
{name: "ADDshiftLL", argLength: 2, reg: gp21, asm: "ADD", aux: "Int32"}, // arg0 + arg1<<auxInt
|
||||
{name: "ADDshiftRL", argLength: 2, reg: gp21, asm: "ADD", aux: "Int32"}, // arg0 + arg1>>auxInt, unsigned shift
|
||||
{name: "ADDshiftRA", argLength: 2, reg: gp21, asm: "ADD", aux: "Int32"}, // arg0 + arg1>>auxInt, signed shift
|
||||
@ -431,6 +432,7 @@ func init() {
|
||||
|
||||
// function calls
|
||||
{name: "CALLstatic", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "CALLtail", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // tail call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "CALLclosure", argLength: 3, reg: regInfo{inputs: []regMask{gpsp, buildReg("R7"), 0}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call function via closure. arg0=codeptr, arg1=closure, arg2=mem, auxint=argsize, returns mem
|
||||
{name: "CALLinter", argLength: 2, reg: regInfo{inputs: []regMask{gp}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call fn by pointer. arg0=codeptr, arg1=mem, auxint=argsize, returns mem
|
||||
|
||||
|
@ -334,6 +334,7 @@
|
||||
(StaticCall ...) => (CALLstatic ...)
|
||||
(ClosureCall ...) => (CALLclosure ...)
|
||||
(InterCall ...) => (CALLinter ...)
|
||||
(TailCall ...) => (CALLtail ...)
|
||||
|
||||
// atomic intrinsics
|
||||
(AtomicLoad(8|32) ...) => (LoweredAtomicLoad(8|32) ...)
|
||||
|
@ -379,6 +379,7 @@
|
||||
(StaticCall ...) => (CALLstatic ...)
|
||||
(ClosureCall ...) => (CALLclosure ...)
|
||||
(InterCall ...) => (CALLinter ...)
|
||||
(TailCall ...) => (CALLtail ...)
|
||||
|
||||
// atomic intrinsics
|
||||
(AtomicLoad(8|32|64) ...) => (LoweredAtomicLoad(8|32|64) ...)
|
||||
|
@ -276,6 +276,7 @@ func init() {
|
||||
|
||||
// function calls
|
||||
{name: "CALLstatic", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "CALLtail", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // tail call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "CALLclosure", argLength: 3, reg: regInfo{inputs: []regMask{gpsp, buildReg("R22"), 0}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call function via closure. arg0=codeptr, arg1=closure, arg2=mem, auxint=argsize, returns mem
|
||||
{name: "CALLinter", argLength: 2, reg: regInfo{inputs: []regMask{gp}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call fn by pointer. arg0=codeptr, arg1=mem, auxint=argsize, returns mem
|
||||
|
||||
|
@ -258,6 +258,7 @@ func init() {
|
||||
|
||||
// function calls
|
||||
{name: "CALLstatic", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "CALLtail", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // tail call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "CALLclosure", argLength: 3, reg: regInfo{inputs: []regMask{gpsp, buildReg("R22"), 0}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call function via closure. arg0=codeptr, arg1=closure, arg2=mem, auxint=argsize, returns mem
|
||||
{name: "CALLinter", argLength: 2, reg: regInfo{inputs: []regMask{gp}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call fn by pointer. arg0=codeptr, arg1=mem, auxint=argsize, returns mem
|
||||
|
||||
|
@ -670,6 +670,7 @@
|
||||
(StaticCall ...) => (CALLstatic ...)
|
||||
(ClosureCall ...) => (CALLclosure ...)
|
||||
(InterCall ...) => (CALLinter ...)
|
||||
(TailCall ...) => (CALLtail ...)
|
||||
|
||||
// Miscellaneous
|
||||
(GetClosurePtr ...) => (LoweredGetClosurePtr ...)
|
||||
|
@ -429,6 +429,7 @@ func init() {
|
||||
{name: "LoweredRound64F", argLength: 1, reg: fp11, resultInArg0: true, zeroWidth: true},
|
||||
|
||||
{name: "CALLstatic", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "CALLtail", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // tail call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "CALLclosure", argLength: 3, reg: regInfo{inputs: []regMask{callptr, ctxt, 0}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call function via closure. arg0=codeptr, arg1=closure, arg2=mem, auxint=argsize, returns mem
|
||||
{name: "CALLinter", argLength: 2, reg: regInfo{inputs: []regMask{callptr}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call fn by pointer. arg0=codeptr, arg1=mem, auxint=argsize, returns mem
|
||||
|
||||
|
@ -96,6 +96,10 @@
|
||||
(Sqrt ...) => (FSQRTD ...)
|
||||
(Sqrt32 ...) => (FSQRTS ...)
|
||||
|
||||
(Copysign ...) => (FSGNJD ...)
|
||||
|
||||
(Abs ...) => (FABSD ...)
|
||||
|
||||
(FMA ...) => (FMADDD ...)
|
||||
|
||||
// Sign and zero extension.
|
||||
@ -542,6 +546,7 @@
|
||||
(StaticCall ...) => (CALLstatic ...)
|
||||
(ClosureCall ...) => (CALLclosure ...)
|
||||
(InterCall ...) => (CALLinter ...)
|
||||
(TailCall ...) => (CALLtail ...)
|
||||
|
||||
// Atomic Intrinsics
|
||||
(AtomicLoad8 ...) => (LoweredAtomicLoad8 ...)
|
||||
|
@ -241,6 +241,7 @@ func init() {
|
||||
|
||||
// Calls
|
||||
{name: "CALLstatic", argLength: 1, reg: call, aux: "CallOff", call: true}, // call static function aux.(*gc.Sym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "CALLtail", argLength: 1, reg: call, aux: "CallOff", call: true}, // tail call static function aux.(*gc.Sym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "CALLclosure", argLength: 3, reg: callClosure, aux: "CallOff", call: true}, // call function via closure. arg0=codeptr, arg1=closure, arg2=mem, auxint=argsize, returns mem
|
||||
{name: "CALLinter", argLength: 2, reg: callInter, aux: "CallOff", call: true}, // call fn by pointer. arg0=codeptr, arg1=mem, auxint=argsize, returns mem
|
||||
|
||||
@ -432,6 +433,8 @@ func init() {
|
||||
{name: "FNMSUBD", argLength: 3, reg: fp31, asm: "FNMSUBD", commutative: true, typ: "Float64"}, // -(arg0 * arg1) - arg2
|
||||
{name: "FSQRTD", argLength: 1, reg: fp11, asm: "FSQRTD", typ: "Float64"}, // sqrt(arg0)
|
||||
{name: "FNEGD", argLength: 1, reg: fp11, asm: "FNEGD", typ: "Float64"}, // -arg0
|
||||
{name: "FABSD", argLength: 1, reg: fp11, asm: "FABSD", typ: "Float64"}, // abs(arg0)
|
||||
{name: "FSGNJD", argLength: 2, reg: fp21, asm: "FSGNJD", typ: "Float64"}, // copy sign of arg1 to arg0
|
||||
{name: "FMVDX", argLength: 1, reg: gpfp, asm: "FMVDX", typ: "Float64"}, // reinterpret arg0 as float
|
||||
{name: "FCVTDW", argLength: 1, reg: gpfp, asm: "FCVTDW", typ: "Float64"}, // float64(low 32 bits of arg0)
|
||||
{name: "FCVTDL", argLength: 1, reg: gpfp, asm: "FCVTDL", typ: "Float64"}, // float64(arg0)
|
||||
|
@ -434,6 +434,7 @@
|
||||
(StaticCall ...) => (CALLstatic ...)
|
||||
(ClosureCall ...) => (CALLclosure ...)
|
||||
(InterCall ...) => (CALLinter ...)
|
||||
(TailCall ...) => (CALLtail ...)
|
||||
|
||||
// Miscellaneous
|
||||
(IsNonNil p) => (LOCGR {s390x.NotEqual} (MOVDconst [0]) (MOVDconst [1]) (CMPconst p [0]))
|
||||
|
@ -480,6 +480,7 @@ func init() {
|
||||
{name: "CLEAR", argLength: 2, reg: regInfo{inputs: []regMask{ptr, 0}}, asm: "CLEAR", aux: "SymValAndOff", typ: "Mem", clobberFlags: true, faultOnNilArg0: true, symEffect: "Write"},
|
||||
|
||||
{name: "CALLstatic", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "CALLtail", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // tail call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "CALLclosure", argLength: 3, reg: regInfo{inputs: []regMask{ptrsp, buildReg("R12"), 0}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call function via closure. arg0=codeptr, arg1=closure, arg2=mem, auxint=argsize, returns mem
|
||||
{name: "CALLinter", argLength: 2, reg: regInfo{inputs: []regMask{ptr}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call fn by pointer. arg0=codeptr, arg1=mem, auxint=argsize, returns mem
|
||||
|
||||
|
@ -307,6 +307,7 @@
|
||||
(StaticCall ...) => (LoweredStaticCall ...)
|
||||
(ClosureCall ...) => (LoweredClosureCall ...)
|
||||
(InterCall ...) => (LoweredInterCall ...)
|
||||
(TailCall ...) => (LoweredTailCall ...)
|
||||
|
||||
// Miscellaneous
|
||||
(Convert ...) => (LoweredConvert ...)
|
||||
|
@ -124,6 +124,7 @@ func init() {
|
||||
|
||||
var WasmOps = []opData{
|
||||
{name: "LoweredStaticCall", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", call: true}, // call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "LoweredTailCall", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", call: true}, // tail call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
|
||||
{name: "LoweredClosureCall", argLength: 3, reg: regInfo{inputs: []regMask{gp, gp, 0}, clobbers: callerSave}, aux: "CallOff", call: true}, // call function via closure. arg0=codeptr, arg1=closure, arg2=mem, auxint=argsize, returns mem
|
||||
{name: "LoweredInterCall", argLength: 2, reg: regInfo{inputs: []regMask{gp}, clobbers: callerSave}, aux: "CallOff", call: true}, // call fn by pointer. arg0=codeptr, arg1=mem, auxint=argsize, returns mem
|
||||
|
||||
|
@ -106,7 +106,7 @@ var genericOps = []opData{
|
||||
|
||||
// For shifts, AxB means the shifted value has A bits and the shift amount has B bits.
|
||||
// Shift amounts are considered unsigned.
|
||||
// If arg1 is known to be less than the number of bits in arg0,
|
||||
// If arg1 is known to be nonnegative and less than the number of bits in arg0,
|
||||
// then auxInt may be set to 1.
|
||||
// This enables better code generation on some platforms.
|
||||
{name: "Lsh8x8", argLength: 2, aux: "Bool"}, // arg0 << arg1
|
||||
@ -417,10 +417,12 @@ var genericOps = []opData{
|
||||
{name: "ClosureCall", argLength: -1, aux: "CallOff", call: true}, // arg0=code pointer, arg1=context ptr, arg2..argN-1 are register inputs, argN=memory. auxint=arg size. Returns Result of register results, plus memory.
|
||||
{name: "StaticCall", argLength: -1, aux: "CallOff", call: true}, // call function aux.(*obj.LSym), arg0..argN-1 are register inputs, argN=memory. auxint=arg size. Returns Result of register results, plus memory.
|
||||
{name: "InterCall", argLength: -1, aux: "CallOff", call: true}, // interface call. arg0=code pointer, arg1..argN-1 are register inputs, argN=memory, auxint=arg size. Returns Result of register results, plus memory.
|
||||
{name: "TailCall", argLength: -1, aux: "CallOff", call: true}, // tail call function aux.(*obj.LSym), arg0..argN-1 are register inputs, argN=memory. auxint=arg size. Returns Result of register results, plus memory.
|
||||
|
||||
{name: "ClosureLECall", argLength: -1, aux: "CallOff", call: true}, // late-expanded closure call. arg0=code pointer, arg1=context ptr, arg2..argN-1 are inputs, argN is mem. auxint = arg size. Result is tuple of result(s), plus mem.
|
||||
{name: "StaticLECall", argLength: -1, aux: "CallOff", call: true}, // late-expanded static call function aux.(*ssa.AuxCall.Fn). arg0..argN-1 are inputs, argN is mem. auxint = arg size. Result is tuple of result(s), plus mem.
|
||||
{name: "InterLECall", argLength: -1, aux: "CallOff", call: true}, // late-expanded interface call. arg0=code pointer, arg1..argN-1 are inputs, argN is mem. auxint = arg size. Result is tuple of result(s), plus mem.
|
||||
{name: "TailLECall", argLength: -1, aux: "CallOff", call: true}, // late-expanded static tail call function aux.(*ssa.AuxCall.Fn). arg0..argN-1 are inputs, argN is mem. auxint = arg size. Result is tuple of result(s), plus mem.
|
||||
|
||||
// Conversions: signed extensions, zero (unsigned) extensions, truncations
|
||||
{name: "SignExt8to16", argLength: 1, typ: "Int16"},
|
||||
@ -638,7 +640,7 @@ var genericBlocks = []blockData{
|
||||
{name: "If", controls: 1}, // if Controls[0] goto Succs[0] else goto Succs[1]
|
||||
{name: "Defer", controls: 1}, // Succs[0]=defer queued, Succs[1]=defer recovered. Controls[0] is call op (of memory type)
|
||||
{name: "Ret", controls: 1}, // no successors, Controls[0] value is memory result
|
||||
{name: "RetJmp", controls: 1}, // no successors, Controls[0] value is memory result, jumps to b.Aux.(*gc.Sym)
|
||||
{name: "RetJmp", controls: 1}, // no successors, Controls[0] value is a tail call
|
||||
{name: "Exit", controls: 1}, // no successors, Controls[0] value generates a panic
|
||||
|
||||
// transient block state used for dead code removal
|
||||
|
@ -903,15 +903,12 @@ func (w *HTMLWriter) WriteAST(phase string, buf *bytes.Buffer) {
|
||||
if strings.HasPrefix(l, "buildssa") {
|
||||
escaped = fmt.Sprintf("<b>%v</b>", l)
|
||||
} else {
|
||||
// Parse the line number from the format l(123).
|
||||
idx := strings.Index(l, " l(")
|
||||
if idx != -1 {
|
||||
subl := l[idx+3:]
|
||||
idxEnd := strings.Index(subl, ")")
|
||||
if idxEnd != -1 {
|
||||
if _, err := strconv.Atoi(subl[:idxEnd]); err == nil {
|
||||
lineNo = subl[:idxEnd]
|
||||
}
|
||||
// Parse the line number from the format file:line:col.
|
||||
// See the implementation in ir/fmt.go:dumpNodeHeader.
|
||||
sl := strings.Split(l, ":")
|
||||
if len(sl) >= 3 {
|
||||
if _, err := strconv.Atoi(sl[len(sl)-2]); err == nil {
|
||||
lineNo = sl[len(sl)-2]
|
||||
}
|
||||
}
|
||||
escaped = html.EscapeString(l)
|
||||
@ -1221,7 +1218,7 @@ func (p htmlFuncPrinter) startBlock(b *Block, reachable bool) {
|
||||
}
|
||||
}
|
||||
|
||||
func (p htmlFuncPrinter) endBlock(b *Block) {
|
||||
func (p htmlFuncPrinter) endBlock(b *Block, reachable bool) {
|
||||
if len(b.Values) > 0 { // end list of values
|
||||
io.WriteString(p.w, "</ul>")
|
||||
io.WriteString(p.w, "</li>")
|
||||
|
@ -91,8 +91,8 @@ func (t LocPair) String() string {
|
||||
type LocResults []Location
|
||||
|
||||
func (t LocResults) String() string {
|
||||
s := "<"
|
||||
a := ""
|
||||
s := ""
|
||||
a := "<"
|
||||
for _, r := range t {
|
||||
a += s
|
||||
s = ","
|
||||
|
@ -515,6 +515,7 @@ const (
|
||||
Op386DUFFZERO
|
||||
Op386REPSTOSL
|
||||
Op386CALLstatic
|
||||
Op386CALLtail
|
||||
Op386CALLclosure
|
||||
Op386CALLinter
|
||||
Op386DUFFCOPY
|
||||
@ -993,6 +994,7 @@ const (
|
||||
OpAMD64DUFFZERO
|
||||
OpAMD64REPSTOSQ
|
||||
OpAMD64CALLstatic
|
||||
OpAMD64CALLtail
|
||||
OpAMD64CALLclosure
|
||||
OpAMD64CALLinter
|
||||
OpAMD64DUFFCOPY
|
||||
@ -1269,6 +1271,7 @@ const (
|
||||
OpARMCMOVWLSconst
|
||||
OpARMSRAcond
|
||||
OpARMCALLstatic
|
||||
OpARMCALLtail
|
||||
OpARMCALLclosure
|
||||
OpARMCALLinter
|
||||
OpARMLoweredNilCheck
|
||||
@ -1409,6 +1412,7 @@ const (
|
||||
OpARM64MVNshiftLL
|
||||
OpARM64MVNshiftRL
|
||||
OpARM64MVNshiftRA
|
||||
OpARM64MVNshiftRO
|
||||
OpARM64NEGshiftLL
|
||||
OpARM64NEGshiftRL
|
||||
OpARM64NEGshiftRA
|
||||
@ -1421,21 +1425,27 @@ const (
|
||||
OpARM64ANDshiftLL
|
||||
OpARM64ANDshiftRL
|
||||
OpARM64ANDshiftRA
|
||||
OpARM64ANDshiftRO
|
||||
OpARM64ORshiftLL
|
||||
OpARM64ORshiftRL
|
||||
OpARM64ORshiftRA
|
||||
OpARM64ORshiftRO
|
||||
OpARM64XORshiftLL
|
||||
OpARM64XORshiftRL
|
||||
OpARM64XORshiftRA
|
||||
OpARM64XORshiftRO
|
||||
OpARM64BICshiftLL
|
||||
OpARM64BICshiftRL
|
||||
OpARM64BICshiftRA
|
||||
OpARM64BICshiftRO
|
||||
OpARM64EONshiftLL
|
||||
OpARM64EONshiftRL
|
||||
OpARM64EONshiftRA
|
||||
OpARM64EONshiftRO
|
||||
OpARM64ORNshiftLL
|
||||
OpARM64ORNshiftRL
|
||||
OpARM64ORNshiftRA
|
||||
OpARM64ORNshiftRO
|
||||
OpARM64CMPshiftLL
|
||||
OpARM64CMPshiftRL
|
||||
OpARM64CMPshiftRA
|
||||
@ -1445,6 +1455,7 @@ const (
|
||||
OpARM64TSTshiftLL
|
||||
OpARM64TSTshiftRL
|
||||
OpARM64TSTshiftRA
|
||||
OpARM64TSTshiftRO
|
||||
OpARM64BFI
|
||||
OpARM64BFXIL
|
||||
OpARM64SBFIZ
|
||||
@ -1552,6 +1563,7 @@ const (
|
||||
OpARM64CSNEG
|
||||
OpARM64CSETM
|
||||
OpARM64CALLstatic
|
||||
OpARM64CALLtail
|
||||
OpARM64CALLclosure
|
||||
OpARM64CALLinter
|
||||
OpARM64LoweredNilCheck
|
||||
@ -1697,6 +1709,7 @@ const (
|
||||
OpMIPSMOVFD
|
||||
OpMIPSMOVDF
|
||||
OpMIPSCALLstatic
|
||||
OpMIPSCALLtail
|
||||
OpMIPSCALLclosure
|
||||
OpMIPSCALLinter
|
||||
OpMIPSLoweredAtomicLoad8
|
||||
@ -1813,6 +1826,7 @@ const (
|
||||
OpMIPS64MOVFD
|
||||
OpMIPS64MOVDF
|
||||
OpMIPS64CALLstatic
|
||||
OpMIPS64CALLtail
|
||||
OpMIPS64CALLclosure
|
||||
OpMIPS64CALLinter
|
||||
OpMIPS64DUFFZERO
|
||||
@ -2025,6 +2039,7 @@ const (
|
||||
OpPPC64LoweredRound32F
|
||||
OpPPC64LoweredRound64F
|
||||
OpPPC64CALLstatic
|
||||
OpPPC64CALLtail
|
||||
OpPPC64CALLclosure
|
||||
OpPPC64CALLinter
|
||||
OpPPC64LoweredZero
|
||||
@ -2128,6 +2143,7 @@ const (
|
||||
OpRISCV64SLTIU
|
||||
OpRISCV64MOVconvert
|
||||
OpRISCV64CALLstatic
|
||||
OpRISCV64CALLtail
|
||||
OpRISCV64CALLclosure
|
||||
OpRISCV64CALLinter
|
||||
OpRISCV64DUFFZERO
|
||||
@ -2183,6 +2199,8 @@ const (
|
||||
OpRISCV64FNMSUBD
|
||||
OpRISCV64FSQRTD
|
||||
OpRISCV64FNEGD
|
||||
OpRISCV64FABSD
|
||||
OpRISCV64FSGNJD
|
||||
OpRISCV64FMVDX
|
||||
OpRISCV64FCVTDW
|
||||
OpRISCV64FCVTDL
|
||||
@ -2384,6 +2402,7 @@ const (
|
||||
OpS390XMOVDstoreconst
|
||||
OpS390XCLEAR
|
||||
OpS390XCALLstatic
|
||||
OpS390XCALLtail
|
||||
OpS390XCALLclosure
|
||||
OpS390XCALLinter
|
||||
OpS390XInvertFlags
|
||||
@ -2437,6 +2456,7 @@ const (
|
||||
OpS390XLoweredZero
|
||||
|
||||
OpWasmLoweredStaticCall
|
||||
OpWasmLoweredTailCall
|
||||
OpWasmLoweredClosureCall
|
||||
OpWasmLoweredInterCall
|
||||
OpWasmLoweredAddr
|
||||
@ -2783,9 +2803,11 @@ const (
|
||||
OpClosureCall
|
||||
OpStaticCall
|
||||
OpInterCall
|
||||
OpTailCall
|
||||
OpClosureLECall
|
||||
OpStaticLECall
|
||||
OpInterLECall
|
||||
OpTailLECall
|
||||
OpSignExt8to16
|
||||
OpSignExt8to32
|
||||
OpSignExt8to64
|
||||
@ -5904,6 +5926,16 @@ var opcodeTable = [...]opInfo{
|
||||
clobbers: 65519, // AX CX DX BX BP SI DI X0 X1 X2 X3 X4 X5 X6 X7
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLtail",
|
||||
auxType: auxCallOff,
|
||||
argLen: 1,
|
||||
clobberFlags: true,
|
||||
call: true,
|
||||
reg: regInfo{
|
||||
clobbers: 65519, // AX CX DX BX BP SI DI X0 X1 X2 X3 X4 X5 X6 X7
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLclosure",
|
||||
auxType: auxCallOff,
|
||||
@ -13101,6 +13133,16 @@ var opcodeTable = [...]opInfo{
|
||||
clobbers: 2147483631, // AX CX DX BX BP SI DI R8 R9 R10 R11 R12 R13 g R15 X0 X1 X2 X3 X4 X5 X6 X7 X8 X9 X10 X11 X12 X13 X14
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLtail",
|
||||
auxType: auxCallOff,
|
||||
argLen: -1,
|
||||
clobberFlags: true,
|
||||
call: true,
|
||||
reg: regInfo{
|
||||
clobbers: 2147483631, // AX CX DX BX BP SI DI R8 R9 R10 R11 R12 R13 g R15 X0 X1 X2 X3 X4 X5 X6 X7 X8 X9 X10 X11 X12 X13 X14
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLclosure",
|
||||
auxType: auxCallOff,
|
||||
@ -16937,6 +16979,16 @@ var opcodeTable = [...]opInfo{
|
||||
clobbers: 4294924287, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 g R12 R14 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLtail",
|
||||
auxType: auxCallOff,
|
||||
argLen: 1,
|
||||
clobberFlags: true,
|
||||
call: true,
|
||||
reg: regInfo{
|
||||
clobbers: 4294924287, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 g R12 R14 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLclosure",
|
||||
auxType: auxCallOff,
|
||||
@ -18763,6 +18815,20 @@ var opcodeTable = [...]opInfo{
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "MVNshiftRO",
|
||||
auxType: auxInt64,
|
||||
argLen: 1,
|
||||
asm: arm64.AMVN,
|
||||
reg: regInfo{
|
||||
inputs: []inputInfo{
|
||||
{0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
},
|
||||
outputs: []outputInfo{
|
||||
{0, 670826495}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 R30
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "NEGshiftLL",
|
||||
auxType: auxInt64,
|
||||
@ -18940,6 +19006,21 @@ var opcodeTable = [...]opInfo{
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "ANDshiftRO",
|
||||
auxType: auxInt64,
|
||||
argLen: 2,
|
||||
asm: arm64.AAND,
|
||||
reg: regInfo{
|
||||
inputs: []inputInfo{
|
||||
{0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
{1, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
},
|
||||
outputs: []outputInfo{
|
||||
{0, 670826495}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 R30
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "ORshiftLL",
|
||||
auxType: auxInt64,
|
||||
@ -18985,6 +19066,21 @@ var opcodeTable = [...]opInfo{
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "ORshiftRO",
|
||||
auxType: auxInt64,
|
||||
argLen: 2,
|
||||
asm: arm64.AORR,
|
||||
reg: regInfo{
|
||||
inputs: []inputInfo{
|
||||
{0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
{1, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
},
|
||||
outputs: []outputInfo{
|
||||
{0, 670826495}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 R30
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "XORshiftLL",
|
||||
auxType: auxInt64,
|
||||
@ -19030,6 +19126,21 @@ var opcodeTable = [...]opInfo{
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "XORshiftRO",
|
||||
auxType: auxInt64,
|
||||
argLen: 2,
|
||||
asm: arm64.AEOR,
|
||||
reg: regInfo{
|
||||
inputs: []inputInfo{
|
||||
{0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
{1, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
},
|
||||
outputs: []outputInfo{
|
||||
{0, 670826495}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 R30
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "BICshiftLL",
|
||||
auxType: auxInt64,
|
||||
@ -19075,6 +19186,21 @@ var opcodeTable = [...]opInfo{
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "BICshiftRO",
|
||||
auxType: auxInt64,
|
||||
argLen: 2,
|
||||
asm: arm64.ABIC,
|
||||
reg: regInfo{
|
||||
inputs: []inputInfo{
|
||||
{0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
{1, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
},
|
||||
outputs: []outputInfo{
|
||||
{0, 670826495}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 R30
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "EONshiftLL",
|
||||
auxType: auxInt64,
|
||||
@ -19120,6 +19246,21 @@ var opcodeTable = [...]opInfo{
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "EONshiftRO",
|
||||
auxType: auxInt64,
|
||||
argLen: 2,
|
||||
asm: arm64.AEON,
|
||||
reg: regInfo{
|
||||
inputs: []inputInfo{
|
||||
{0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
{1, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
},
|
||||
outputs: []outputInfo{
|
||||
{0, 670826495}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 R30
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "ORNshiftLL",
|
||||
auxType: auxInt64,
|
||||
@ -19165,6 +19306,21 @@ var opcodeTable = [...]opInfo{
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "ORNshiftRO",
|
||||
auxType: auxInt64,
|
||||
argLen: 2,
|
||||
asm: arm64.AORN,
|
||||
reg: regInfo{
|
||||
inputs: []inputInfo{
|
||||
{0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
{1, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
},
|
||||
outputs: []outputInfo{
|
||||
{0, 670826495}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 R30
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CMPshiftLL",
|
||||
auxType: auxInt64,
|
||||
@ -19273,6 +19429,18 @@ var opcodeTable = [...]opInfo{
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TSTshiftRO",
|
||||
auxType: auxInt64,
|
||||
argLen: 2,
|
||||
asm: arm64.ATST,
|
||||
reg: regInfo{
|
||||
inputs: []inputInfo{
|
||||
{0, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
{1, 805044223}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "BFI",
|
||||
auxType: auxARM64BitField,
|
||||
@ -20704,6 +20872,16 @@ var opcodeTable = [...]opInfo{
|
||||
clobbers: 9223372035512336383, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26 F27 F28 F29 F30 F31
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLtail",
|
||||
auxType: auxCallOff,
|
||||
argLen: -1,
|
||||
clobberFlags: true,
|
||||
call: true,
|
||||
reg: regInfo{
|
||||
clobbers: 9223372035512336383, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R19 R20 R21 R22 R23 R24 R25 R26 g R30 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26 F27 F28 F29 F30 F31
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLclosure",
|
||||
auxType: auxCallOff,
|
||||
@ -22637,6 +22815,16 @@ var opcodeTable = [...]opInfo{
|
||||
clobbers: 140737421246462, // R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R18 R19 R20 R21 R22 R24 R25 R28 g R31 F0 F2 F4 F6 F8 F10 F12 F14 F16 F18 F20 F22 F24 F26 F28 F30 HI LO
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLtail",
|
||||
auxType: auxCallOff,
|
||||
argLen: 1,
|
||||
clobberFlags: true,
|
||||
call: true,
|
||||
reg: regInfo{
|
||||
clobbers: 140737421246462, // R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R18 R19 R20 R21 R22 R24 R25 R28 g R31 F0 F2 F4 F6 F8 F10 F12 F14 F16 F18 F20 F22 F24 F26 F28 F30 HI LO
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLclosure",
|
||||
auxType: auxCallOff,
|
||||
@ -24196,6 +24384,16 @@ var opcodeTable = [...]opInfo{
|
||||
clobbers: 4611686018393833470, // R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R18 R19 R20 R21 R22 R24 R25 g R31 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26 F27 F28 F29 F30 F31 HI LO
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLtail",
|
||||
auxType: auxCallOff,
|
||||
argLen: 1,
|
||||
clobberFlags: true,
|
||||
call: true,
|
||||
reg: regInfo{
|
||||
clobbers: 4611686018393833470, // R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R18 R19 R20 R21 R22 R24 R25 g R31 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26 F27 F28 F29 F30 F31 HI LO
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLclosure",
|
||||
auxType: auxCallOff,
|
||||
@ -27024,6 +27222,16 @@ var opcodeTable = [...]opInfo{
|
||||
clobbers: 576460745860964344, // R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R14 R15 R16 R17 R18 R19 R20 R21 R22 R23 R24 R25 R26 R27 R28 R29 g F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLtail",
|
||||
auxType: auxCallOff,
|
||||
argLen: 1,
|
||||
clobberFlags: true,
|
||||
call: true,
|
||||
reg: regInfo{
|
||||
clobbers: 576460745860964344, // R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R14 R15 R16 R17 R18 R19 R20 R21 R22 R23 R24 R25 R26 R27 R28 R29 g F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLclosure",
|
||||
auxType: auxCallOff,
|
||||
@ -28430,6 +28638,15 @@ var opcodeTable = [...]opInfo{
|
||||
clobbers: 9223372035781033972, // X3 X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 g X28 X29 X30 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26 F27 F28 F29 F30 F31
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLtail",
|
||||
auxType: auxCallOff,
|
||||
argLen: 1,
|
||||
call: true,
|
||||
reg: regInfo{
|
||||
clobbers: 9223372035781033972, // X3 X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 g X28 X29 X30 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26 F27 F28 F29 F30 F31
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLclosure",
|
||||
auxType: auxCallOff,
|
||||
@ -29187,6 +29404,33 @@ var opcodeTable = [...]opInfo{
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "FABSD",
|
||||
argLen: 1,
|
||||
asm: riscv.AFABSD,
|
||||
reg: regInfo{
|
||||
inputs: []inputInfo{
|
||||
{0, 9223372034707292160}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26 F27 F28 F29 F30 F31
|
||||
},
|
||||
outputs: []outputInfo{
|
||||
{0, 9223372034707292160}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26 F27 F28 F29 F30 F31
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "FSGNJD",
|
||||
argLen: 2,
|
||||
asm: riscv.AFSGNJD,
|
||||
reg: regInfo{
|
||||
inputs: []inputInfo{
|
||||
{0, 9223372034707292160}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26 F27 F28 F29 F30 F31
|
||||
{1, 9223372034707292160}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26 F27 F28 F29 F30 F31
|
||||
},
|
||||
outputs: []outputInfo{
|
||||
{0, 9223372034707292160}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26 F27 F28 F29 F30 F31
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "FMVDX",
|
||||
argLen: 1,
|
||||
@ -32158,6 +32402,16 @@ var opcodeTable = [...]opInfo{
|
||||
clobbers: 4294933503, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R11 R12 g R14 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLtail",
|
||||
auxType: auxCallOff,
|
||||
argLen: 1,
|
||||
clobberFlags: true,
|
||||
call: true,
|
||||
reg: regInfo{
|
||||
clobbers: 4294933503, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R11 R12 g R14 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CALLclosure",
|
||||
auxType: auxCallOff,
|
||||
@ -32828,6 +33082,15 @@ var opcodeTable = [...]opInfo{
|
||||
clobbers: 844424930131967, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26 F27 F28 F29 F30 F31 g
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "LoweredTailCall",
|
||||
auxType: auxCallOff,
|
||||
argLen: 1,
|
||||
call: true,
|
||||
reg: regInfo{
|
||||
clobbers: 844424930131967, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26 F27 F28 F29 F30 F31 g
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "LoweredClosureCall",
|
||||
auxType: auxCallOff,
|
||||
@ -35604,6 +35867,13 @@ var opcodeTable = [...]opInfo{
|
||||
call: true,
|
||||
generic: true,
|
||||
},
|
||||
{
|
||||
name: "TailCall",
|
||||
auxType: auxCallOff,
|
||||
argLen: -1,
|
||||
call: true,
|
||||
generic: true,
|
||||
},
|
||||
{
|
||||
name: "ClosureLECall",
|
||||
auxType: auxCallOff,
|
||||
@ -35625,6 +35895,13 @@ var opcodeTable = [...]opInfo{
|
||||
call: true,
|
||||
generic: true,
|
||||
},
|
||||
{
|
||||
name: "TailLECall",
|
||||
auxType: auxCallOff,
|
||||
argLen: -1,
|
||||
call: true,
|
||||
generic: true,
|
||||
},
|
||||
{
|
||||
name: "SignExt8to16",
|
||||
argLen: 1,
|
||||
|
@ -6,6 +6,7 @@ package ssa
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"cmd/internal/src"
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"io"
|
||||
@ -17,22 +18,30 @@ func printFunc(f *Func) {
|
||||
|
||||
func hashFunc(f *Func) []byte {
|
||||
h := sha256.New()
|
||||
p := stringFuncPrinter{w: h}
|
||||
p := stringFuncPrinter{w: h, printDead: true}
|
||||
fprintFunc(p, f)
|
||||
return h.Sum(nil)
|
||||
}
|
||||
|
||||
func (f *Func) String() string {
|
||||
var buf bytes.Buffer
|
||||
p := stringFuncPrinter{w: &buf}
|
||||
p := stringFuncPrinter{w: &buf, printDead: true}
|
||||
fprintFunc(p, f)
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// rewriteHash returns a hash of f suitable for detecting rewrite cycles.
|
||||
func (f *Func) rewriteHash() string {
|
||||
h := sha256.New()
|
||||
p := stringFuncPrinter{w: h, printDead: false}
|
||||
fprintFunc(p, f)
|
||||
return fmt.Sprintf("%x", h.Sum(nil))
|
||||
}
|
||||
|
||||
type funcPrinter interface {
|
||||
header(f *Func)
|
||||
startBlock(b *Block, reachable bool)
|
||||
endBlock(b *Block)
|
||||
endBlock(b *Block, reachable bool)
|
||||
value(v *Value, live bool)
|
||||
startDepCycle()
|
||||
endDepCycle()
|
||||
@ -40,7 +49,8 @@ type funcPrinter interface {
|
||||
}
|
||||
|
||||
type stringFuncPrinter struct {
|
||||
w io.Writer
|
||||
w io.Writer
|
||||
printDead bool
|
||||
}
|
||||
|
||||
func (p stringFuncPrinter) header(f *Func) {
|
||||
@ -50,6 +60,9 @@ func (p stringFuncPrinter) header(f *Func) {
|
||||
}
|
||||
|
||||
func (p stringFuncPrinter) startBlock(b *Block, reachable bool) {
|
||||
if !p.printDead && !reachable {
|
||||
return
|
||||
}
|
||||
fmt.Fprintf(p.w, " b%d:", b.ID)
|
||||
if len(b.Preds) > 0 {
|
||||
io.WriteString(p.w, " <-")
|
||||
@ -64,14 +77,33 @@ func (p stringFuncPrinter) startBlock(b *Block, reachable bool) {
|
||||
io.WriteString(p.w, "\n")
|
||||
}
|
||||
|
||||
func (p stringFuncPrinter) endBlock(b *Block) {
|
||||
func (p stringFuncPrinter) endBlock(b *Block, reachable bool) {
|
||||
if !p.printDead && !reachable {
|
||||
return
|
||||
}
|
||||
fmt.Fprintln(p.w, " "+b.LongString())
|
||||
}
|
||||
|
||||
func StmtString(p src.XPos) string {
|
||||
linenumber := "(?) "
|
||||
if p.IsKnown() {
|
||||
pfx := ""
|
||||
if p.IsStmt() == src.PosIsStmt {
|
||||
pfx = "+"
|
||||
}
|
||||
if p.IsStmt() == src.PosNotStmt {
|
||||
pfx = "-"
|
||||
}
|
||||
linenumber = fmt.Sprintf("(%s%d) ", pfx, p.Line())
|
||||
}
|
||||
return linenumber
|
||||
}
|
||||
|
||||
func (p stringFuncPrinter) value(v *Value, live bool) {
|
||||
fmt.Fprint(p.w, " ")
|
||||
//fmt.Fprint(p.w, v.Block.Func.fe.Pos(v.Pos))
|
||||
//fmt.Fprint(p.w, ": ")
|
||||
if !p.printDead && !live {
|
||||
return
|
||||
}
|
||||
fmt.Fprintf(p.w, " %s", StmtString(v.Pos))
|
||||
fmt.Fprint(p.w, v.LongString())
|
||||
if !live {
|
||||
fmt.Fprint(p.w, " DEAD")
|
||||
@ -103,7 +135,7 @@ func fprintFunc(p funcPrinter, f *Func) {
|
||||
p.value(v, live[v.ID])
|
||||
printed[v.ID] = true
|
||||
}
|
||||
p.endBlock(b)
|
||||
p.endBlock(b, reachable[b.ID])
|
||||
continue
|
||||
}
|
||||
|
||||
@ -151,7 +183,7 @@ func fprintFunc(p funcPrinter, f *Func) {
|
||||
}
|
||||
}
|
||||
|
||||
p.endBlock(b)
|
||||
p.endBlock(b, reachable[b.ID])
|
||||
}
|
||||
for _, name := range f.Names {
|
||||
p.named(*name, f.NamedValues[*name])
|
||||
|
@ -36,6 +36,8 @@ func applyRewrite(f *Func, rb blockRewriter, rv valueRewriter, deadcode deadValu
|
||||
if debug > 1 {
|
||||
fmt.Printf("%s: rewriting for %s\n", f.pass.name, f.Name)
|
||||
}
|
||||
var iters int
|
||||
var states map[string]bool
|
||||
for {
|
||||
change := false
|
||||
for _, b := range f.Blocks {
|
||||
@ -146,6 +148,30 @@ func applyRewrite(f *Func, rb blockRewriter, rv valueRewriter, deadcode deadValu
|
||||
if !change {
|
||||
break
|
||||
}
|
||||
iters++
|
||||
if iters > 1000 || debug >= 2 {
|
||||
// We've done a suspiciously large number of rewrites (or we're in debug mode).
|
||||
// As of Sep 2021, 90% of rewrites complete in 4 iterations or fewer
|
||||
// and the maximum value encountered during make.bash is 12.
|
||||
// Start checking for cycles. (This is too expensive to do routinely.)
|
||||
if states == nil {
|
||||
states = make(map[string]bool)
|
||||
}
|
||||
h := f.rewriteHash()
|
||||
if _, ok := states[h]; ok {
|
||||
// We've found a cycle.
|
||||
// To diagnose it, set debug to 2 and start again,
|
||||
// so that we'll print all rules applied until we complete another cycle.
|
||||
// If debug is already >= 2, we've already done that, so it's time to crash.
|
||||
if debug < 2 {
|
||||
debug = 2
|
||||
states = make(map[string]bool)
|
||||
} else {
|
||||
f.Fatalf("rewrite cycle detected")
|
||||
}
|
||||
}
|
||||
states[h] = true
|
||||
}
|
||||
}
|
||||
// remove clobbered values
|
||||
for _, b := range f.Blocks {
|
||||
@ -1541,12 +1567,16 @@ func rotateLeft32(v, rotate int64) int64 {
|
||||
return int64(bits.RotateLeft32(uint32(v), int(rotate)))
|
||||
}
|
||||
|
||||
func rotateRight64(v, rotate int64) int64 {
|
||||
return int64(bits.RotateLeft64(uint64(v), int(-rotate)))
|
||||
}
|
||||
|
||||
// encodes the lsb and width for arm(64) bitfield ops into the expected auxInt format.
|
||||
func armBFAuxInt(lsb, width int64) arm64BitField {
|
||||
if lsb < 0 || lsb > 63 {
|
||||
panic("ARM(64) bit field lsb constant out of range")
|
||||
}
|
||||
if width < 1 || width > 64 {
|
||||
if width < 1 || lsb+width > 64 {
|
||||
panic("ARM(64) bit field width constant out of range")
|
||||
}
|
||||
return arm64BitField(width | lsb<<8)
|
||||
|
@ -652,6 +652,9 @@ func rewriteValue386(v *Value) bool {
|
||||
case OpSubPtr:
|
||||
v.Op = Op386SUBL
|
||||
return true
|
||||
case OpTailCall:
|
||||
v.Op = Op386CALLtail
|
||||
return true
|
||||
case OpTrunc16to8:
|
||||
v.Op = OpCopy
|
||||
return true
|
||||
|
@ -1103,6 +1103,9 @@ func rewriteValueAMD64(v *Value) bool {
|
||||
case OpSubPtr:
|
||||
v.Op = OpAMD64SUBQ
|
||||
return true
|
||||
case OpTailCall:
|
||||
v.Op = OpAMD64CALLtail
|
||||
return true
|
||||
case OpTrunc:
|
||||
return rewriteValueAMD64_OpTrunc(v)
|
||||
case OpTrunc16to8:
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -544,6 +544,9 @@ func rewriteValueMIPS(v *Value) bool {
|
||||
case OpSubPtr:
|
||||
v.Op = OpMIPSSUB
|
||||
return true
|
||||
case OpTailCall:
|
||||
v.Op = OpMIPSCALLtail
|
||||
return true
|
||||
case OpTrunc16to8:
|
||||
v.Op = OpCopy
|
||||
return true
|
||||
|
@ -625,6 +625,9 @@ func rewriteValueMIPS64(v *Value) bool {
|
||||
case OpSubPtr:
|
||||
v.Op = OpMIPS64SUBV
|
||||
return true
|
||||
case OpTailCall:
|
||||
v.Op = OpMIPS64CALLtail
|
||||
return true
|
||||
case OpTrunc16to8:
|
||||
v.Op = OpCopy
|
||||
return true
|
||||
|
@ -772,6 +772,9 @@ func rewriteValuePPC64(v *Value) bool {
|
||||
case OpSubPtr:
|
||||
v.Op = OpPPC64SUB
|
||||
return true
|
||||
case OpTailCall:
|
||||
v.Op = OpPPC64CALLtail
|
||||
return true
|
||||
case OpTrunc:
|
||||
v.Op = OpPPC64FTRUNC
|
||||
return true
|
||||
|
@ -8,6 +8,9 @@ import "cmd/compile/internal/types"
|
||||
|
||||
func rewriteValueRISCV64(v *Value) bool {
|
||||
switch v.Op {
|
||||
case OpAbs:
|
||||
v.Op = OpRISCV64FABSD
|
||||
return true
|
||||
case OpAdd16:
|
||||
v.Op = OpRISCV64ADD
|
||||
return true
|
||||
@ -134,6 +137,9 @@ func rewriteValueRISCV64(v *Value) bool {
|
||||
case OpConvert:
|
||||
v.Op = OpRISCV64MOVconvert
|
||||
return true
|
||||
case OpCopysign:
|
||||
v.Op = OpRISCV64FSGNJD
|
||||
return true
|
||||
case OpCvt32Fto32:
|
||||
v.Op = OpRISCV64FCVTWS
|
||||
return true
|
||||
@ -633,6 +639,9 @@ func rewriteValueRISCV64(v *Value) bool {
|
||||
case OpSubPtr:
|
||||
v.Op = OpRISCV64SUB
|
||||
return true
|
||||
case OpTailCall:
|
||||
v.Op = OpRISCV64CALLtail
|
||||
return true
|
||||
case OpTrunc16to8:
|
||||
v.Op = OpCopy
|
||||
return true
|
||||
|
@ -819,6 +819,9 @@ func rewriteValueS390X(v *Value) bool {
|
||||
case OpSubPtr:
|
||||
v.Op = OpS390XSUB
|
||||
return true
|
||||
case OpTailCall:
|
||||
v.Op = OpS390XCALLtail
|
||||
return true
|
||||
case OpTrunc:
|
||||
return rewriteValueS390X_OpTrunc(v)
|
||||
case OpTrunc16to8:
|
||||
|
@ -556,6 +556,9 @@ func rewriteValueWasm(v *Value) bool {
|
||||
case OpSubPtr:
|
||||
v.Op = OpWasmI64Sub
|
||||
return true
|
||||
case OpTailCall:
|
||||
v.Op = OpWasmLoweredTailCall
|
||||
return true
|
||||
case OpTrunc:
|
||||
v.Op = OpWasmF64Trunc
|
||||
return true
|
||||
|
17
src/cmd/compile/internal/ssa/testdata/inline-dump.go
vendored
Normal file
17
src/cmd/compile/internal/ssa/testdata/inline-dump.go
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
package foo
|
||||
|
||||
func f(m, n int) int {
|
||||
a := g(n)
|
||||
b := g(m)
|
||||
return a + b
|
||||
}
|
||||
|
||||
func g(x int) int {
|
||||
y := h(x + 1)
|
||||
z := h(x - 1)
|
||||
return y + z
|
||||
}
|
||||
|
||||
func h(x int) int {
|
||||
return x * x
|
||||
}
|
12
src/cmd/compile/internal/ssa/testdata/sayhi.go
vendored
Normal file
12
src/cmd/compile/internal/ssa/testdata/sayhi.go
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
package foo
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
)
|
||||
|
||||
func sayhi(n int, wg *sync.WaitGroup) {
|
||||
fmt.Println("hi", n)
|
||||
fmt.Println("hi", n)
|
||||
wg.Done()
|
||||
}
|
@ -351,11 +351,13 @@ func (v *Value) reset(op Op) {
|
||||
// invalidateRecursively marks a value as invalid (unused)
|
||||
// and after decrementing reference counts on its Args,
|
||||
// also recursively invalidates any of those whose use
|
||||
// count goes to zero.
|
||||
// count goes to zero. It returns whether any of the
|
||||
// invalidated values was marked with IsStmt.
|
||||
//
|
||||
// BEWARE of doing this *before* you've applied intended
|
||||
// updates to SSA.
|
||||
func (v *Value) invalidateRecursively() {
|
||||
func (v *Value) invalidateRecursively() bool {
|
||||
lostStmt := v.Pos.IsStmt() == src.PosIsStmt
|
||||
if v.InCache {
|
||||
v.Block.Func.unCache(v)
|
||||
}
|
||||
@ -364,7 +366,8 @@ func (v *Value) invalidateRecursively() {
|
||||
for _, a := range v.Args {
|
||||
a.Uses--
|
||||
if a.Uses == 0 {
|
||||
a.invalidateRecursively()
|
||||
lost := a.invalidateRecursively()
|
||||
lostStmt = lost || lostStmt
|
||||
}
|
||||
}
|
||||
|
||||
@ -375,6 +378,7 @@ func (v *Value) invalidateRecursively() {
|
||||
|
||||
v.AuxInt = 0
|
||||
v.Aux = nil
|
||||
return lostStmt
|
||||
}
|
||||
|
||||
// copyOf is called from rewrite rules.
|
||||
|
@ -544,7 +544,7 @@ func IsStackAddr(v *Value) bool {
|
||||
v = v.Args[0]
|
||||
}
|
||||
switch v.Op {
|
||||
case OpSP, OpLocalAddr, OpSelectNAddr:
|
||||
case OpSP, OpLocalAddr, OpSelectNAddr, OpGetCallerSP:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
|
@ -382,18 +382,16 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) {
|
||||
}
|
||||
|
||||
var tail ir.Node
|
||||
call := ir.NewCallExpr(base.Pos, ir.OCALL, f.Nname, nil)
|
||||
call.Args = ir.ParamNames(tfn.Type())
|
||||
call.IsDDD = tfn.Type().IsVariadic()
|
||||
tail = call
|
||||
if tailcall {
|
||||
tail = ir.NewTailCallStmt(base.Pos, f.Nname)
|
||||
} else {
|
||||
call := ir.NewCallExpr(base.Pos, ir.OCALL, f.Nname, nil)
|
||||
call.Args = ir.ParamNames(tfn.Type())
|
||||
call.IsDDD = tfn.Type().IsVariadic()
|
||||
tail = call
|
||||
if tfn.Type().NumResults() > 0 {
|
||||
n := ir.NewReturnStmt(base.Pos, nil)
|
||||
n.Results = []ir.Node{call}
|
||||
tail = n
|
||||
}
|
||||
tail = ir.NewTailCallStmt(base.Pos, call)
|
||||
} else if tfn.Type().NumResults() > 0 {
|
||||
n := ir.NewReturnStmt(base.Pos, nil)
|
||||
n.Results = []ir.Node{call}
|
||||
tail = n
|
||||
}
|
||||
fn.Body.Append(tail)
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user