From 0e07b82b3b3ec5af236400610939724e137f8e90 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Tue, 16 Apr 2024 21:27:12 +0200 Subject: Merging upstream version 1.22.2. Signed-off-by: Daniel Baumann --- src/cmd/compile/internal/noder/writer.go | 15 +- src/cmd/compile/internal/ssa/_gen/AMD64.rules | 4 - .../compile/internal/ssa/_gen/AMD64latelower.rules | 5 + src/cmd/compile/internal/ssa/_gen/ARM64.rules | 62 -- .../compile/internal/ssa/_gen/ARM64latelower.rules | 66 ++ src/cmd/compile/internal/ssa/rewrite.go | 14 +- src/cmd/compile/internal/ssa/rewriteAMD64.go | 33 - .../compile/internal/ssa/rewriteAMD64latelower.go | 51 ++ src/cmd/compile/internal/ssa/rewriteARM64.go | 726 -------------------- .../compile/internal/ssa/rewriteARM64latelower.go | 748 +++++++++++++++++++++ src/cmd/compile/internal/types2/subst.go | 12 + src/cmd/compile/internal/walk/order.go | 7 +- src/cmd/go/internal/modfetch/codehost/git.go | 2 +- src/cmd/go/internal/test/test.go | 2 +- .../go/testdata/script/cover_coverpkg_partial.txt | 8 + src/cmd/go/testdata/script/reuse_git.txt | 44 +- .../go/testdata/script/test_ppc64_linker_funcs.txt | 4 + src/cmd/internal/moddeps/moddeps_test.go | 2 + src/cmd/internal/obj/ppc64/obj9.go | 19 +- src/cmd/link/internal/ppc64/asm.go | 17 +- src/cmd/link/internal/riscv64/asm.go | 4 +- 21 files changed, 966 insertions(+), 879 deletions(-) (limited to 'src/cmd') diff --git a/src/cmd/compile/internal/noder/writer.go b/src/cmd/compile/internal/noder/writer.go index e5894c9..c317f39 100644 --- a/src/cmd/compile/internal/noder/writer.go +++ b/src/cmd/compile/internal/noder/writer.go @@ -1209,10 +1209,17 @@ func (w *writer) stmt(stmt syntax.Stmt) { func (w *writer) stmts(stmts []syntax.Stmt) { dead := false w.Sync(pkgbits.SyncStmts) - for _, stmt := range stmts { - if dead { - // Any statements after a terminating statement are safe to - // omit, at least until the next labeled statement. + var lastLabel = -1 + for i, stmt := range stmts { + if _, ok := stmt.(*syntax.LabeledStmt); ok { + lastLabel = i + } + } + for i, stmt := range stmts { + if dead && i > lastLabel { + // Any statements after a terminating and last label statement are safe to omit. + // Otherwise, code after label statement may refer to dead stmts between terminating + // and label statement, see issue #65593. if _, ok := stmt.(*syntax.LabeledStmt); !ok { continue } diff --git a/src/cmd/compile/internal/ssa/_gen/AMD64.rules b/src/cmd/compile/internal/ssa/_gen/AMD64.rules index aac6873..2a4c59e 100644 --- a/src/cmd/compile/internal/ssa/_gen/AMD64.rules +++ b/src/cmd/compile/internal/ssa/_gen/AMD64.rules @@ -1020,10 +1020,6 @@ (MOVLQZX x:(MOVLload [off] {sym} ptr mem)) && x.Uses == 1 && clobber(x) => @x.Block (MOVLload [off] {sym} ptr mem) (MOVLQZX x:(MOVQload [off] {sym} ptr mem)) && x.Uses == 1 && clobber(x) => @x.Block (MOVLload [off] {sym} ptr mem) -(MOVLQZX x) && zeroUpper32Bits(x,3) => x -(MOVWQZX x) && zeroUpper48Bits(x,3) => x -(MOVBQZX x) && zeroUpper56Bits(x,3) => x - // replace load from same location as preceding store with zero/sign extension (or copy in case of full width) (MOVBload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _)) && sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) => (MOVBQZX x) (MOVWload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _)) && sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) => (MOVWQZX x) diff --git a/src/cmd/compile/internal/ssa/_gen/AMD64latelower.rules b/src/cmd/compile/internal/ssa/_gen/AMD64latelower.rules index a1e63d6..1dd8045 100644 --- a/src/cmd/compile/internal/ssa/_gen/AMD64latelower.rules +++ b/src/cmd/compile/internal/ssa/_gen/AMD64latelower.rules @@ -6,3 +6,8 @@ (SAR(Q|L) x y) && buildcfg.GOAMD64 >= 3 => (SARX(Q|L) x y) (SHL(Q|L) x y) && buildcfg.GOAMD64 >= 3 => (SHLX(Q|L) x y) (SHR(Q|L) x y) && buildcfg.GOAMD64 >= 3 => (SHRX(Q|L) x y) + +// See comments in ARM64latelower.rules for why these are here. +(MOVLQZX x) && zeroUpper32Bits(x,3) => x +(MOVWQZX x) && zeroUpper48Bits(x,3) => x +(MOVBQZX x) && zeroUpper56Bits(x,3) => x diff --git a/src/cmd/compile/internal/ssa/_gen/ARM64.rules b/src/cmd/compile/internal/ssa/_gen/ARM64.rules index c5ee028..18a6586 100644 --- a/src/cmd/compile/internal/ssa/_gen/ARM64.rules +++ b/src/cmd/compile/internal/ssa/_gen/ARM64.rules @@ -1054,61 +1054,6 @@ (MOVWUloadidx4 ptr idx (MOVWstorezeroidx4 ptr2 idx2 _)) && isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) => (MOVDconst [0]) (MOVDloadidx8 ptr idx (MOVDstorezeroidx8 ptr2 idx2 _)) && isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) => (MOVDconst [0]) -// don't extend after proper load -(MOVBreg x:(MOVBload _ _)) => (MOVDreg x) -(MOVBUreg x:(MOVBUload _ _)) => (MOVDreg x) -(MOVHreg x:(MOVBload _ _)) => (MOVDreg x) -(MOVHreg x:(MOVBUload _ _)) => (MOVDreg x) -(MOVHreg x:(MOVHload _ _)) => (MOVDreg x) -(MOVHUreg x:(MOVBUload _ _)) => (MOVDreg x) -(MOVHUreg x:(MOVHUload _ _)) => (MOVDreg x) -(MOVWreg x:(MOVBload _ _)) => (MOVDreg x) -(MOVWreg x:(MOVBUload _ _)) => (MOVDreg x) -(MOVWreg x:(MOVHload _ _)) => (MOVDreg x) -(MOVWreg x:(MOVHUload _ _)) => (MOVDreg x) -(MOVWreg x:(MOVWload _ _)) => (MOVDreg x) -(MOVWUreg x:(MOVBUload _ _)) => (MOVDreg x) -(MOVWUreg x:(MOVHUload _ _)) => (MOVDreg x) -(MOVWUreg x:(MOVWUload _ _)) => (MOVDreg x) -(MOVBreg x:(MOVBloadidx _ _ _)) => (MOVDreg x) -(MOVBUreg x:(MOVBUloadidx _ _ _)) => (MOVDreg x) -(MOVHreg x:(MOVBloadidx _ _ _)) => (MOVDreg x) -(MOVHreg x:(MOVBUloadidx _ _ _)) => (MOVDreg x) -(MOVHreg x:(MOVHloadidx _ _ _)) => (MOVDreg x) -(MOVHUreg x:(MOVBUloadidx _ _ _)) => (MOVDreg x) -(MOVHUreg x:(MOVHUloadidx _ _ _)) => (MOVDreg x) -(MOVWreg x:(MOVBloadidx _ _ _)) => (MOVDreg x) -(MOVWreg x:(MOVBUloadidx _ _ _)) => (MOVDreg x) -(MOVWreg x:(MOVHloadidx _ _ _)) => (MOVDreg x) -(MOVWreg x:(MOVHUloadidx _ _ _)) => (MOVDreg x) -(MOVWreg x:(MOVWloadidx _ _ _)) => (MOVDreg x) -(MOVWUreg x:(MOVBUloadidx _ _ _)) => (MOVDreg x) -(MOVWUreg x:(MOVHUloadidx _ _ _)) => (MOVDreg x) -(MOVWUreg x:(MOVWUloadidx _ _ _)) => (MOVDreg x) -(MOVHreg x:(MOVHloadidx2 _ _ _)) => (MOVDreg x) -(MOVHUreg x:(MOVHUloadidx2 _ _ _)) => (MOVDreg x) -(MOVWreg x:(MOVHloadidx2 _ _ _)) => (MOVDreg x) -(MOVWreg x:(MOVHUloadidx2 _ _ _)) => (MOVDreg x) -(MOVWreg x:(MOVWloadidx4 _ _ _)) => (MOVDreg x) -(MOVWUreg x:(MOVHUloadidx2 _ _ _)) => (MOVDreg x) -(MOVWUreg x:(MOVWUloadidx4 _ _ _)) => (MOVDreg x) - -// fold double extensions -(MOVBreg x:(MOVBreg _)) => (MOVDreg x) -(MOVBUreg x:(MOVBUreg _)) => (MOVDreg x) -(MOVHreg x:(MOVBreg _)) => (MOVDreg x) -(MOVHreg x:(MOVBUreg _)) => (MOVDreg x) -(MOVHreg x:(MOVHreg _)) => (MOVDreg x) -(MOVHUreg x:(MOVBUreg _)) => (MOVDreg x) -(MOVHUreg x:(MOVHUreg _)) => (MOVDreg x) -(MOVWreg x:(MOVBreg _)) => (MOVDreg x) -(MOVWreg x:(MOVBUreg _)) => (MOVDreg x) -(MOVWreg x:(MOVHreg _)) => (MOVDreg x) -(MOVWreg x:(MOVWreg _)) => (MOVDreg x) -(MOVWUreg x:(MOVBUreg _)) => (MOVDreg x) -(MOVWUreg x:(MOVHUreg _)) => (MOVDreg x) -(MOVWUreg x:(MOVWUreg _)) => (MOVDreg x) - // don't extend before store (MOVBstore [off] {sym} ptr (MOVBreg x) mem) => (MOVBstore [off] {sym} ptr x mem) (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) => (MOVBstore [off] {sym} ptr x mem) @@ -1572,18 +1517,11 @@ (LessThanNoov (InvertFlags x)) => (CSEL0 [OpARM64NotEqual] (GreaterEqualNoov x) x) (GreaterEqualNoov (InvertFlags x)) => (CSINC [OpARM64NotEqual] (LessThanNoov x) (MOVDconst [0]) x) -// Boolean-generating instructions (NOTE: NOT all boolean Values) always -// zero upper bit of the register; no need to zero-extend -(MOVBUreg x:((Equal|NotEqual|LessThan|LessThanU|LessThanF|LessEqual|LessEqualU|LessEqualF|GreaterThan|GreaterThanU|GreaterThanF|GreaterEqual|GreaterEqualU|GreaterEqualF) _)) => (MOVDreg x) - // Don't bother extending if we're not using the higher bits. (MOV(B|BU)reg x) && v.Type.Size() <= 1 => x (MOV(H|HU)reg x) && v.Type.Size() <= 2 => x (MOV(W|WU)reg x) && v.Type.Size() <= 4 => x -// omit unsign extension -(MOVWUreg x) && zeroUpper32Bits(x, 3) => x - // omit sign extension (MOVWreg (ANDconst x [c])) && uint64(c) & uint64(0xffffffff80000000) == 0 => (ANDconst x [c]) (MOVHreg (ANDconst x [c])) && uint64(c) & uint64(0xffffffffffff8000) == 0 => (ANDconst x [c]) diff --git a/src/cmd/compile/internal/ssa/_gen/ARM64latelower.rules b/src/cmd/compile/internal/ssa/_gen/ARM64latelower.rules index d0c2099..e50d985 100644 --- a/src/cmd/compile/internal/ssa/_gen/ARM64latelower.rules +++ b/src/cmd/compile/internal/ssa/_gen/ARM64latelower.rules @@ -19,3 +19,69 @@ (CMNWconst [c] x) && !isARM64addcon(int64(c)) => (CMNW x (MOVDconst [int64(c)])) (ADDSconstflags [c] x) && !isARM64addcon(c) => (ADDSflags x (MOVDconst [c])) + +// These rules remove unneeded sign/zero extensions. +// They occur in late lower because they rely on the fact +// that their arguments don't get rewritten to a non-extended opcode instead. + +// Boolean-generating instructions (NOTE: NOT all boolean Values) always +// zero upper bit of the register; no need to zero-extend +(MOVBUreg x:((Equal|NotEqual|LessThan|LessThanU|LessThanF|LessEqual|LessEqualU|LessEqualF|GreaterThan|GreaterThanU|GreaterThanF|GreaterEqual|GreaterEqualU|GreaterEqualF) _)) => x + +// omit unsigned extension +(MOVWUreg x) && zeroUpper32Bits(x, 3) => x + +// don't extend after proper load +(MOVBreg x:(MOVBload _ _)) => (MOVDreg x) +(MOVBUreg x:(MOVBUload _ _)) => (MOVDreg x) +(MOVHreg x:(MOVBload _ _)) => (MOVDreg x) +(MOVHreg x:(MOVBUload _ _)) => (MOVDreg x) +(MOVHreg x:(MOVHload _ _)) => (MOVDreg x) +(MOVHUreg x:(MOVBUload _ _)) => (MOVDreg x) +(MOVHUreg x:(MOVHUload _ _)) => (MOVDreg x) +(MOVWreg x:(MOVBload _ _)) => (MOVDreg x) +(MOVWreg x:(MOVBUload _ _)) => (MOVDreg x) +(MOVWreg x:(MOVHload _ _)) => (MOVDreg x) +(MOVWreg x:(MOVHUload _ _)) => (MOVDreg x) +(MOVWreg x:(MOVWload _ _)) => (MOVDreg x) +(MOVWUreg x:(MOVBUload _ _)) => (MOVDreg x) +(MOVWUreg x:(MOVHUload _ _)) => (MOVDreg x) +(MOVWUreg x:(MOVWUload _ _)) => (MOVDreg x) +(MOVBreg x:(MOVBloadidx _ _ _)) => (MOVDreg x) +(MOVBUreg x:(MOVBUloadidx _ _ _)) => (MOVDreg x) +(MOVHreg x:(MOVBloadidx _ _ _)) => (MOVDreg x) +(MOVHreg x:(MOVBUloadidx _ _ _)) => (MOVDreg x) +(MOVHreg x:(MOVHloadidx _ _ _)) => (MOVDreg x) +(MOVHUreg x:(MOVBUloadidx _ _ _)) => (MOVDreg x) +(MOVHUreg x:(MOVHUloadidx _ _ _)) => (MOVDreg x) +(MOVWreg x:(MOVBloadidx _ _ _)) => (MOVDreg x) +(MOVWreg x:(MOVBUloadidx _ _ _)) => (MOVDreg x) +(MOVWreg x:(MOVHloadidx _ _ _)) => (MOVDreg x) +(MOVWreg x:(MOVHUloadidx _ _ _)) => (MOVDreg x) +(MOVWreg x:(MOVWloadidx _ _ _)) => (MOVDreg x) +(MOVWUreg x:(MOVBUloadidx _ _ _)) => (MOVDreg x) +(MOVWUreg x:(MOVHUloadidx _ _ _)) => (MOVDreg x) +(MOVWUreg x:(MOVWUloadidx _ _ _)) => (MOVDreg x) +(MOVHreg x:(MOVHloadidx2 _ _ _)) => (MOVDreg x) +(MOVHUreg x:(MOVHUloadidx2 _ _ _)) => (MOVDreg x) +(MOVWreg x:(MOVHloadidx2 _ _ _)) => (MOVDreg x) +(MOVWreg x:(MOVHUloadidx2 _ _ _)) => (MOVDreg x) +(MOVWreg x:(MOVWloadidx4 _ _ _)) => (MOVDreg x) +(MOVWUreg x:(MOVHUloadidx2 _ _ _)) => (MOVDreg x) +(MOVWUreg x:(MOVWUloadidx4 _ _ _)) => (MOVDreg x) + +// fold double extensions +(MOVBreg x:(MOVBreg _)) => (MOVDreg x) +(MOVBUreg x:(MOVBUreg _)) => (MOVDreg x) +(MOVHreg x:(MOVBreg _)) => (MOVDreg x) +(MOVHreg x:(MOVBUreg _)) => (MOVDreg x) +(MOVHreg x:(MOVHreg _)) => (MOVDreg x) +(MOVHUreg x:(MOVBUreg _)) => (MOVDreg x) +(MOVHUreg x:(MOVHUreg _)) => (MOVDreg x) +(MOVWreg x:(MOVBreg _)) => (MOVDreg x) +(MOVWreg x:(MOVBUreg _)) => (MOVDreg x) +(MOVWreg x:(MOVHreg _)) => (MOVDreg x) +(MOVWreg x:(MOVWreg _)) => (MOVDreg x) +(MOVWUreg x:(MOVBUreg _)) => (MOVDreg x) +(MOVWUreg x:(MOVHUreg _)) => (MOVDreg x) +(MOVWUreg x:(MOVWUreg _)) => (MOVDreg x) diff --git a/src/cmd/compile/internal/ssa/rewrite.go b/src/cmd/compile/internal/ssa/rewrite.go index bb09c6c..5ed7331 100644 --- a/src/cmd/compile/internal/ssa/rewrite.go +++ b/src/cmd/compile/internal/ssa/rewrite.go @@ -1294,8 +1294,10 @@ func zeroUpper32Bits(x *Value, depth int) bool { OpARM64MULW, OpARM64MNEGW, OpARM64UDIVW, OpARM64DIVW, OpARM64UMODW, OpARM64MADDW, OpARM64MSUBW, OpARM64RORW, OpARM64RORWconst: return true - case OpArg: - return x.Type.Size() == 4 + case OpArg: // note: but not ArgIntReg + // amd64 always loads args from the stack unsigned. + // most other architectures load them sign/zero extended based on the type. + return x.Type.Size() == 4 && (x.Type.IsUnsigned() || x.Block.Func.Config.arch == "amd64") case OpPhi, OpSelect0, OpSelect1: // Phis can use each-other as an arguments, instead of tracking visited values, // just limit recursion depth. @@ -1318,8 +1320,8 @@ func zeroUpper48Bits(x *Value, depth int) bool { switch x.Op { case OpAMD64MOVWQZX, OpAMD64MOVWload, OpAMD64MOVWloadidx1, OpAMD64MOVWloadidx2: return true - case OpArg: - return x.Type.Size() == 2 + case OpArg: // note: but not ArgIntReg + return x.Type.Size() == 2 && (x.Type.IsUnsigned() || x.Block.Func.Config.arch == "amd64") case OpPhi, OpSelect0, OpSelect1: // Phis can use each-other as an arguments, instead of tracking visited values, // just limit recursion depth. @@ -1342,8 +1344,8 @@ func zeroUpper56Bits(x *Value, depth int) bool { switch x.Op { case OpAMD64MOVBQZX, OpAMD64MOVBload, OpAMD64MOVBloadidx1: return true - case OpArg: - return x.Type.Size() == 1 + case OpArg: // note: but not ArgIntReg + return x.Type.Size() == 1 && (x.Type.IsUnsigned() || x.Block.Func.Config.arch == "amd64") case OpPhi, OpSelect0, OpSelect1: // Phis can use each-other as an arguments, instead of tracking visited values, // just limit recursion depth. diff --git a/src/cmd/compile/internal/ssa/rewriteAMD64.go b/src/cmd/compile/internal/ssa/rewriteAMD64.go index 5332512..ba71189 100644 --- a/src/cmd/compile/internal/ssa/rewriteAMD64.go +++ b/src/cmd/compile/internal/ssa/rewriteAMD64.go @@ -9640,17 +9640,6 @@ func rewriteValueAMD64_OpAMD64MOVBQZX(v *Value) bool { v0.AddArg2(ptr, mem) return true } - // match: (MOVBQZX x) - // cond: zeroUpper56Bits(x,3) - // result: x - for { - x := v_0 - if !(zeroUpper56Bits(x, 3)) { - break - } - v.copyOf(x) - return true - } // match: (MOVBQZX (ANDLconst [c] x)) // result: (ANDLconst [c & 0xff] x) for { @@ -10392,17 +10381,6 @@ func rewriteValueAMD64_OpAMD64MOVLQZX(v *Value) bool { v0.AddArg2(ptr, mem) return true } - // match: (MOVLQZX x) - // cond: zeroUpper32Bits(x,3) - // result: x - for { - x := v_0 - if !(zeroUpper32Bits(x, 3)) { - break - } - v.copyOf(x) - return true - } // match: (MOVLQZX (ANDLconst [c] x)) // result: (ANDLconst [c] x) for { @@ -12756,17 +12734,6 @@ func rewriteValueAMD64_OpAMD64MOVWQZX(v *Value) bool { v0.AddArg2(ptr, mem) return true } - // match: (MOVWQZX x) - // cond: zeroUpper48Bits(x,3) - // result: x - for { - x := v_0 - if !(zeroUpper48Bits(x, 3)) { - break - } - v.copyOf(x) - return true - } // match: (MOVWQZX (ANDLconst [c] x)) // result: (ANDLconst [c & 0xffff] x) for { diff --git a/src/cmd/compile/internal/ssa/rewriteAMD64latelower.go b/src/cmd/compile/internal/ssa/rewriteAMD64latelower.go index d3dd263..11ecb0b 100644 --- a/src/cmd/compile/internal/ssa/rewriteAMD64latelower.go +++ b/src/cmd/compile/internal/ssa/rewriteAMD64latelower.go @@ -6,6 +6,12 @@ import "internal/buildcfg" func rewriteValueAMD64latelower(v *Value) bool { switch v.Op { + case OpAMD64MOVBQZX: + return rewriteValueAMD64latelower_OpAMD64MOVBQZX(v) + case OpAMD64MOVLQZX: + return rewriteValueAMD64latelower_OpAMD64MOVLQZX(v) + case OpAMD64MOVWQZX: + return rewriteValueAMD64latelower_OpAMD64MOVWQZX(v) case OpAMD64SARL: return rewriteValueAMD64latelower_OpAMD64SARL(v) case OpAMD64SARQ: @@ -21,6 +27,51 @@ func rewriteValueAMD64latelower(v *Value) bool { } return false } +func rewriteValueAMD64latelower_OpAMD64MOVBQZX(v *Value) bool { + v_0 := v.Args[0] + // match: (MOVBQZX x) + // cond: zeroUpper56Bits(x,3) + // result: x + for { + x := v_0 + if !(zeroUpper56Bits(x, 3)) { + break + } + v.copyOf(x) + return true + } + return false +} +func rewriteValueAMD64latelower_OpAMD64MOVLQZX(v *Value) bool { + v_0 := v.Args[0] + // match: (MOVLQZX x) + // cond: zeroUpper32Bits(x,3) + // result: x + for { + x := v_0 + if !(zeroUpper32Bits(x, 3)) { + break + } + v.copyOf(x) + return true + } + return false +} +func rewriteValueAMD64latelower_OpAMD64MOVWQZX(v *Value) bool { + v_0 := v.Args[0] + // match: (MOVWQZX x) + // cond: zeroUpper48Bits(x,3) + // result: x + for { + x := v_0 + if !(zeroUpper48Bits(x, 3)) { + break + } + v.copyOf(x) + return true + } + return false +} func rewriteValueAMD64latelower_OpAMD64SARL(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] diff --git a/src/cmd/compile/internal/ssa/rewriteARM64.go b/src/cmd/compile/internal/ssa/rewriteARM64.go index f0a4425..8f60f02 100644 --- a/src/cmd/compile/internal/ssa/rewriteARM64.go +++ b/src/cmd/compile/internal/ssa/rewriteARM64.go @@ -8307,39 +8307,6 @@ func rewriteValueARM64_OpARM64MOVBUloadidx(v *Value) bool { } func rewriteValueARM64_OpARM64MOVBUreg(v *Value) bool { v_0 := v.Args[0] - // match: (MOVBUreg x:(MOVBUload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(MOVBUloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(MOVBUreg _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUreg { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } // match: (MOVBUreg (ANDconst [c] x)) // result: (ANDconst [c&(1<<8-1)] x) for { @@ -8364,160 +8331,6 @@ func rewriteValueARM64_OpARM64MOVBUreg(v *Value) bool { v.AuxInt = int64ToAuxInt(int64(uint8(c))) return true } - // match: (MOVBUreg x:(Equal _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64Equal { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(NotEqual _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64NotEqual { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(LessThan _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64LessThan { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(LessThanU _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64LessThanU { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(LessThanF _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64LessThanF { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(LessEqual _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64LessEqual { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(LessEqualU _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64LessEqualU { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(LessEqualF _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64LessEqualF { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(GreaterThan _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64GreaterThan { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(GreaterThanU _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64GreaterThanU { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(GreaterThanF _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64GreaterThanF { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(GreaterEqual _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64GreaterEqual { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(GreaterEqualU _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64GreaterEqualU { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBUreg x:(GreaterEqualF _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64GreaterEqualF { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } // match: (MOVBUreg x) // cond: v.Type.Size() <= 1 // result: x @@ -8748,39 +8561,6 @@ func rewriteValueARM64_OpARM64MOVBloadidx(v *Value) bool { } func rewriteValueARM64_OpARM64MOVBreg(v *Value) bool { v_0 := v.Args[0] - // match: (MOVBreg x:(MOVBload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBreg x:(MOVBloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVBreg x:(MOVBreg _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBreg { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } // match: (MOVBreg (MOVDconst [c])) // result: (MOVDconst [int64(int8(c))]) for { @@ -10353,83 +10133,6 @@ func rewriteValueARM64_OpARM64MOVHUloadidx2(v *Value) bool { } func rewriteValueARM64_OpARM64MOVHUreg(v *Value) bool { v_0 := v.Args[0] - // match: (MOVHUreg x:(MOVBUload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHUreg x:(MOVHUload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHUload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHUreg x:(MOVBUloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHUreg x:(MOVHUloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHUloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHUreg x:(MOVHUloadidx2 _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHUloadidx2 { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHUreg x:(MOVBUreg _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUreg { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHUreg x:(MOVHUreg _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHUreg { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } // match: (MOVHUreg (ANDconst [c] x)) // result: (ANDconst [c&(1<<16-1)] x) for { @@ -10790,116 +10493,6 @@ func rewriteValueARM64_OpARM64MOVHloadidx2(v *Value) bool { } func rewriteValueARM64_OpARM64MOVHreg(v *Value) bool { v_0 := v.Args[0] - // match: (MOVHreg x:(MOVBload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHreg x:(MOVBUload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHreg x:(MOVHload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHreg x:(MOVBloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHreg x:(MOVBUloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHreg x:(MOVHloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHreg x:(MOVHloadidx2 _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHloadidx2 { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHreg x:(MOVBreg _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBreg { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHreg x:(MOVBUreg _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUreg { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVHreg x:(MOVHreg _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHreg { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } // match: (MOVHreg (MOVDconst [c])) // result: (MOVDconst [int64(int16(c))]) for { @@ -11955,127 +11548,6 @@ func rewriteValueARM64_OpARM64MOVWUloadidx4(v *Value) bool { } func rewriteValueARM64_OpARM64MOVWUreg(v *Value) bool { v_0 := v.Args[0] - // match: (MOVWUreg x:(MOVBUload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWUreg x:(MOVHUload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHUload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWUreg x:(MOVWUload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVWUload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWUreg x:(MOVBUloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWUreg x:(MOVHUloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHUloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWUreg x:(MOVWUloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVWUloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWUreg x:(MOVHUloadidx2 _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHUloadidx2 { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWUreg x:(MOVWUloadidx4 _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVWUloadidx4 { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWUreg x:(MOVBUreg _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUreg { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWUreg x:(MOVHUreg _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHUreg { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWUreg x:(MOVWUreg _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVWUreg { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } // match: (MOVWUreg (ANDconst [c] x)) // result: (ANDconst [c&(1<<32-1)] x) for { @@ -12111,17 +11583,6 @@ func rewriteValueARM64_OpARM64MOVWUreg(v *Value) bool { v.copyOf(x) return true } - // match: (MOVWUreg x) - // cond: zeroUpper32Bits(x, 3) - // result: x - for { - x := v_0 - if !(zeroUpper32Bits(x, 3)) { - break - } - v.copyOf(x) - return true - } // match: (MOVWUreg (SLLconst [lc] x)) // cond: lc >= 32 // result: (MOVDconst [0]) @@ -12428,193 +11889,6 @@ func rewriteValueARM64_OpARM64MOVWloadidx4(v *Value) bool { } func rewriteValueARM64_OpARM64MOVWreg(v *Value) bool { v_0 := v.Args[0] - // match: (MOVWreg x:(MOVBload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVBUload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVHload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVHUload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHUload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVWload _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVWload { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVBloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVBUloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVHloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVHUloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHUloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVWloadidx _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVWloadidx { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVHloadidx2 _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHloadidx2 { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVHUloadidx2 _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHUloadidx2 { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVWloadidx4 _ _ _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVWloadidx4 { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVBreg _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBreg { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVBUreg _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVBUreg { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVHreg _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVHreg { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } - // match: (MOVWreg x:(MOVWreg _)) - // result: (MOVDreg x) - for { - x := v_0 - if x.Op != OpARM64MOVWreg { - break - } - v.reset(OpARM64MOVDreg) - v.AddArg(x) - return true - } // match: (MOVWreg (MOVDconst [c])) // result: (MOVDconst [int64(int32(c))]) for { diff --git a/src/cmd/compile/internal/ssa/rewriteARM64latelower.go b/src/cmd/compile/internal/ssa/rewriteARM64latelower.go index 0998757..6873fd7 100644 --- a/src/cmd/compile/internal/ssa/rewriteARM64latelower.go +++ b/src/cmd/compile/internal/ssa/rewriteARM64latelower.go @@ -18,6 +18,18 @@ func rewriteValueARM64latelower(v *Value) bool { return rewriteValueARM64latelower_OpARM64CMPWconst(v) case OpARM64CMPconst: return rewriteValueARM64latelower_OpARM64CMPconst(v) + case OpARM64MOVBUreg: + return rewriteValueARM64latelower_OpARM64MOVBUreg(v) + case OpARM64MOVBreg: + return rewriteValueARM64latelower_OpARM64MOVBreg(v) + case OpARM64MOVHUreg: + return rewriteValueARM64latelower_OpARM64MOVHUreg(v) + case OpARM64MOVHreg: + return rewriteValueARM64latelower_OpARM64MOVHreg(v) + case OpARM64MOVWUreg: + return rewriteValueARM64latelower_OpARM64MOVWUreg(v) + case OpARM64MOVWreg: + return rewriteValueARM64latelower_OpARM64MOVWreg(v) case OpARM64ORconst: return rewriteValueARM64latelower_OpARM64ORconst(v) case OpARM64SUBconst: @@ -178,6 +190,742 @@ func rewriteValueARM64latelower_OpARM64CMPconst(v *Value) bool { } return false } +func rewriteValueARM64latelower_OpARM64MOVBUreg(v *Value) bool { + v_0 := v.Args[0] + // match: (MOVBUreg x:(Equal _)) + // result: x + for { + x := v_0 + if x.Op != OpARM64Equal { + break + } + v.copyOf(x) + return true + } + // match: (MOVBUreg x:(NotEqual _)) + // result: x + for { + x := v_0 + if x.Op != OpARM64NotEqual { + break + } + v.copyOf(x) + return true + } + // match: (MOVBUreg x:(LessThan _)) + // result: x + for { + x := v_0 + if x.Op != OpARM64LessThan { + break + } + v.copyOf(x) + return true + } + // match: (MOVBUreg x:(LessThanU _)) + // result: x + for { + x := v_0 + if x.Op != OpARM64LessThanU { + break + } + v.copyOf(x) + return true + } + // match: (MOVBUreg x:(LessThanF _)) + // result: x + for { + x := v_0 + if x.Op != OpARM64LessThanF { + break + } + v.copyOf(x) + return true + } + // match: (MOVBUreg x:(LessEqual _)) + // result: x + for { + x := v_0 + if x.Op != OpARM64LessEqual { + break + } + v.copyOf(x) + return true + } + // match: (MOVBUreg x:(LessEqualU _)) + // result: x + for { + x := v_0 + if x.Op != OpARM64LessEqualU { + break + } + v.copyOf(x) + return true + } + // match: (MOVBUreg x:(LessEqualF _)) + // result: x + for { + x := v_0 + if x.Op != OpARM64LessEqualF { + break + } + v.copyOf(x) + return true + } + // match: (MOVBUreg x:(GreaterThan _)) + // result: x + for { + x := v_0 + if x.Op != OpARM64GreaterThan { + break + } + v.copyOf(x) + return true + } + // match: (MOVBUreg x:(GreaterThanU _)) + // result: x + for { + x := v_0 + if x.Op != OpARM64GreaterThanU { + break + } + v.copyOf(x) + return true + } + // match: (MOVBUreg x:(GreaterThanF _)) + // result: x + for { + x := v_0 + if x.Op != OpARM64GreaterThanF { + break + } + v.copyOf(x) + return true + } + // match: (MOVBUreg x:(GreaterEqual _)) + // result: x + for { + x := v_0 + if x.Op != OpARM64GreaterEqual { + break + } + v.copyOf(x) + return true + } + // match: (MOVBUreg x:(GreaterEqualU _)) + // result: x + for { + x := v_0 + if x.Op != OpARM64GreaterEqualU { + break + } + v.copyOf(x) + return true + } + // match: (MOVBUreg x:(GreaterEqualF _)) + // result: x + for { + x := v_0 + if x.Op != OpARM64GreaterEqualF { + break + } + v.copyOf(x) + return true + } + // match: (MOVBUreg x:(MOVBUload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVBUreg x:(MOVBUloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVBUreg x:(MOVBUreg _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUreg { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + return false +} +func rewriteValueARM64latelower_OpARM64MOVBreg(v *Value) bool { + v_0 := v.Args[0] + // match: (MOVBreg x:(MOVBload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVBreg x:(MOVBloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVBreg x:(MOVBreg _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBreg { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + return false +} +func rewriteValueARM64latelower_OpARM64MOVHUreg(v *Value) bool { + v_0 := v.Args[0] + // match: (MOVHUreg x:(MOVBUload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHUreg x:(MOVHUload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHUload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHUreg x:(MOVBUloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHUreg x:(MOVHUloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHUloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHUreg x:(MOVHUloadidx2 _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHUloadidx2 { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHUreg x:(MOVBUreg _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUreg { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHUreg x:(MOVHUreg _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHUreg { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + return false +} +func rewriteValueARM64latelower_OpARM64MOVHreg(v *Value) bool { + v_0 := v.Args[0] + // match: (MOVHreg x:(MOVBload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHreg x:(MOVBUload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHreg x:(MOVHload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHreg x:(MOVBloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHreg x:(MOVBUloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHreg x:(MOVHloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHreg x:(MOVHloadidx2 _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHloadidx2 { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHreg x:(MOVBreg _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBreg { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHreg x:(MOVBUreg _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUreg { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVHreg x:(MOVHreg _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHreg { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + return false +} +func rewriteValueARM64latelower_OpARM64MOVWUreg(v *Value) bool { + v_0 := v.Args[0] + // match: (MOVWUreg x) + // cond: zeroUpper32Bits(x, 3) + // result: x + for { + x := v_0 + if !(zeroUpper32Bits(x, 3)) { + break + } + v.copyOf(x) + return true + } + // match: (MOVWUreg x:(MOVBUload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWUreg x:(MOVHUload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHUload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWUreg x:(MOVWUload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVWUload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWUreg x:(MOVBUloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWUreg x:(MOVHUloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHUloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWUreg x:(MOVWUloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVWUloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWUreg x:(MOVHUloadidx2 _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHUloadidx2 { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWUreg x:(MOVWUloadidx4 _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVWUloadidx4 { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWUreg x:(MOVBUreg _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUreg { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWUreg x:(MOVHUreg _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHUreg { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWUreg x:(MOVWUreg _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVWUreg { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + return false +} +func rewriteValueARM64latelower_OpARM64MOVWreg(v *Value) bool { + v_0 := v.Args[0] + // match: (MOVWreg x:(MOVBload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVBUload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVHload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVHUload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHUload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVWload _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVWload { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVBloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVBUloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVHloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVHUloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHUloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVWloadidx _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVWloadidx { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVHloadidx2 _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHloadidx2 { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVHUloadidx2 _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHUloadidx2 { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVWloadidx4 _ _ _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVWloadidx4 { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVBreg _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBreg { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVBUreg _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVBUreg { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVHreg _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVHreg { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + // match: (MOVWreg x:(MOVWreg _)) + // result: (MOVDreg x) + for { + x := v_0 + if x.Op != OpARM64MOVWreg { + break + } + v.reset(OpARM64MOVDreg) + v.AddArg(x) + return true + } + return false +} func rewriteValueARM64latelower_OpARM64ORconst(v *Value) bool { v_0 := v.Args[0] b := v.Block diff --git a/src/cmd/compile/internal/types2/subst.go b/src/cmd/compile/internal/types2/subst.go index 09dc585..1ad73c4 100644 --- a/src/cmd/compile/internal/types2/subst.go +++ b/src/cmd/compile/internal/types2/subst.go @@ -95,6 +95,18 @@ func (subst *subster) typ(typ Type) Type { case *Basic: // nothing to do + case *Alias: + rhs := subst.typ(t.fromRHS) + if rhs != t.fromRHS { + // This branch cannot be reached because the RHS of an alias + // may only contain type parameters of an enclosing function. + // Such function bodies are never "instantiated" and thus + // substitution is not called on locally declared alias types. + // TODO(gri) adjust once parameterized aliases are supported + panic("unreachable for unparameterized aliases") + // return subst.check.newAlias(t.obj, rhs) + } + case *Array: elem := subst.typOrNil(t.elem) if elem != t.elem { diff --git a/src/cmd/compile/internal/walk/order.go b/src/cmd/compile/internal/walk/order.go index 179fbdb..de180a4 100644 --- a/src/cmd/compile/internal/walk/order.go +++ b/src/cmd/compile/internal/walk/order.go @@ -643,7 +643,12 @@ func (o *orderState) stmt(n ir.Node) { indexLHS.Index = o.cheapExpr(indexLHS.Index) call := n.Y.(*ir.CallExpr) - indexRHS := call.Args[0].(*ir.IndexExpr) + arg0 := call.Args[0] + // ir.SameSafeExpr skips OCONVNOPs, so we must do the same here (#66096). + for arg0.Op() == ir.OCONVNOP { + arg0 = arg0.(*ir.ConvExpr).X + } + indexRHS := arg0.(*ir.IndexExpr) indexRHS.X = indexLHS.X indexRHS.Index = indexLHS.Index diff --git a/src/cmd/go/internal/modfetch/codehost/git.go b/src/cmd/go/internal/modfetch/codehost/git.go index 7d9e5d8..bab4c5e 100644 --- a/src/cmd/go/internal/modfetch/codehost/git.go +++ b/src/cmd/go/internal/modfetch/codehost/git.go @@ -554,7 +554,7 @@ func (r *gitRepo) stat(ctx context.Context, rev string) (info *RevInfo, err erro // an apparent Git bug introduced in Git 2.21 (commit 61c771), // which causes the handler for protocol version 1 to sometimes miss // tags that point to the requested commit (see https://go.dev/issue/56881). - _, err = Run(ctx, r.dir, "git", "fetch", "-f", "-c", "protocol.version=2", "--depth=1", r.remote, refspec) + _, err = Run(ctx, r.dir, "git", "-c", "protocol.version=2", "fetch", "-f", "--depth=1", r.remote, refspec) release() if err == nil { diff --git a/src/cmd/go/internal/test/test.go b/src/cmd/go/internal/test/test.go index 8a40547..13818b7 100644 --- a/src/cmd/go/internal/test/test.go +++ b/src/cmd/go/internal/test/test.go @@ -1396,7 +1396,7 @@ func (r *runTestActor) Act(b *work.Builder, ctx context.Context, a *work.Action) if p := a.Package; len(p.TestGoFiles)+len(p.XTestGoFiles) == 0 { reportNoTestFiles := true - if cfg.BuildCover && cfg.Experiment.CoverageRedesign { + if cfg.BuildCover && cfg.Experiment.CoverageRedesign && p.Internal.Cover.GenMeta { if err := sh.Mkdir(a.Objdir); err != nil { return err } diff --git a/src/cmd/go/testdata/script/cover_coverpkg_partial.txt b/src/cmd/go/testdata/script/cover_coverpkg_partial.txt index 5240241..ef7a4dd 100644 --- a/src/cmd/go/testdata/script/cover_coverpkg_partial.txt +++ b/src/cmd/go/testdata/script/cover_coverpkg_partial.txt @@ -39,6 +39,14 @@ go test -coverprofile=baz.p -coverpkg=./a,./d,./f ./b ./f stdout '^ok\s+M/b\s+\S+\s+coverage: 83.3% of statements in ./a, ./d, ./f' stdout '^\s*M/f\s+coverage: 0.0% of statements' +# This sub-test inspired by issue 65653: if package P is is matched +# via the package pattern supplied as the argument to "go test -cover" +# but P is not part of "-coverpkg", then we don't want coverage for P +# (including the specific case where P has no test files). +go test -coverpkg=./a ./... +stdout '^ok\s+M/a\s+\S+\s+coverage: 100.0% of statements in ./a' +stdout '^\s*\?\s+M/f\s+\[no test files\]' + -- a/a.go -- package a diff --git a/src/cmd/go/testdata/script/reuse_git.txt b/src/cmd/go/testdata/script/reuse_git.txt index 432f5a9..3c1b38b 100644 --- a/src/cmd/go/testdata/script/reuse_git.txt +++ b/src/cmd/go/testdata/script/reuse_git.txt @@ -7,7 +7,7 @@ env GOSUMDB=off # go mod download with the pseudo-version should invoke git but not have a TagSum or Ref. go mod download -x -json vcs-test.golang.org/git/hello.git@v0.0.0-20170922010558-fc3a09f3dc5c -stderr 'git fetch' +stderr 'git( .*)* fetch' cp stdout hellopseudo.json ! stdout '"(Query|TagPrefix|TagSum|Ref)"' stdout '"Version": "v0.0.0-20170922010558-fc3a09f3dc5c"' @@ -18,7 +18,7 @@ go clean -modcache # go mod download vcstest/hello should invoke git, print origin info go mod download -x -json vcs-test.golang.org/git/hello.git@latest -stderr 'git fetch' +stderr 'git( .*)* fetch' cp stdout hello.json stdout '"Version": "v0.0.0-20170922010558-fc3a09f3dc5c"' stdout '"VCS": "git"' @@ -33,13 +33,13 @@ stdout '"Hash": "fc3a09f3dc5cfe0d7a743ea18f1f5226e68b3777"' # but still be careful not to include a TagSum or a Ref, especially not Ref set to HEAD, # which is easy to do when reusing the cached version from the @latest query. go mod download -x -json vcs-test.golang.org/git/hello.git@v0.0.0-20170922010558-fc3a09f3dc5c -! stderr 'git fetch' +! stderr 'git( .*)* fetch' cp stdout hellopseudo2.json cmpenv hellopseudo.json hellopseudo2.json # go mod download vcstest/hello@hash needs to check TagSum to find pseudoversion base. go mod download -x -json vcs-test.golang.org/git/hello.git@fc3a09f3dc5c -! stderr 'git fetch' +! stderr 'git( .*)* fetch' cp stdout hellohash.json stdout '"Version": "v0.0.0-20170922010558-fc3a09f3dc5c"' stdout '"Query": "fc3a09f3dc5c"' @@ -98,7 +98,7 @@ stdout '"RepoSum": "r1:c0/9JCZ25lxoBiK3[+]3BhACU4giH49flcJmBynJ[+]Jvmc="' # go mod download vcstest/tagtests should invoke git, print origin info go mod download -x -json vcs-test.golang.org/git/tagtests.git@latest -stderr 'git fetch' +stderr 'git( .*)* fetch' cp stdout tagtests.json stdout '"Version": "v0.2.2"' stdout '"Query": "latest"' @@ -135,7 +135,7 @@ stdout '"Hash": "c7818c24fa2f3f714c67d0a6d3e411c85a518d1f"' # go mod download vcstest/prefixtagtests should invoke git, print origin info go mod download -x -json vcs-test.golang.org/git/prefixtagtests.git/sub@latest -stderr 'git fetch' +stderr 'git( .*)* fetch' cp stdout prefixtagtests.json stdout '"Version": "v0.0.10"' stdout '"Query": "latest"' @@ -154,12 +154,12 @@ cp stdout all.json # clean the module cache, make sure that makes go mod download re-run git fetch, clean again go clean -modcache go mod download -x -json vcs-test.golang.org/git/hello.git@latest -stderr 'git fetch' +stderr 'git( .*)* fetch' go clean -modcache # reuse go mod download vcstest/hello result go mod download -reuse=hello.json -x -json vcs-test.golang.org/git/hello.git@latest -! stderr 'git fetch' +! stderr 'git( .*)* fetch' stdout '"Reuse": true' stdout '"Version": "v0.0.0-20170922010558-fc3a09f3dc5c"' stdout '"VCS": "git"' @@ -175,7 +175,7 @@ stdout '"Hash": "fc3a09f3dc5cfe0d7a743ea18f1f5226e68b3777"' # reuse go mod download vcstest/hello pseudoversion result go mod download -reuse=hellopseudo.json -x -json vcs-test.golang.org/git/hello.git@v0.0.0-20170922010558-fc3a09f3dc5c -! stderr 'git fetch' +! stderr 'git( .*)* fetch' stdout '"Reuse": true' stdout '"Version": "v0.0.0-20170922010558-fc3a09f3dc5c"' stdout '"VCS": "git"' @@ -186,7 +186,7 @@ stdout '"Hash": "fc3a09f3dc5cfe0d7a743ea18f1f5226e68b3777"' # reuse go mod download vcstest/hello@hash go mod download -reuse=hellohash.json -x -json vcs-test.golang.org/git/hello.git@fc3a09f3dc5c -! stderr 'git fetch' +! stderr 'git( .*)* fetch' stdout '"Reuse": true' stdout '"Query": "fc3a09f3dc5c"' stdout '"Version": "v0.0.0-20170922010558-fc3a09f3dc5c"' @@ -199,7 +199,7 @@ stdout '"Hash": "fc3a09f3dc5cfe0d7a743ea18f1f5226e68b3777"' # reuse go mod download vcstest/hello/v9 error result ! go mod download -reuse=hellov9.json -x -json vcs-test.golang.org/git/hello.git/v9@latest -! stderr 'git fetch' +! stderr 'git( .*)* fetch' stdout '"Reuse": true' stdout '"Error":.*no matching versions' ! stdout '"TagPrefix"' @@ -210,7 +210,7 @@ stdout '"Hash": "fc3a09f3dc5cfe0d7a743ea18f1f5226e68b3777"' # reuse go mod download vcstest/hello/sub/v9 error result ! go mod download -reuse=hellosubv9.json -x -json vcs-test.golang.org/git/hello.git/sub/v9@latest -! stderr 'git fetch' +! stderr 'git( .*)* fetch' stdout '"Reuse": true' stdout '"Error":.*no matching versions' stdout '"TagPrefix": "sub/"' @@ -221,7 +221,7 @@ stdout '"Hash": "fc3a09f3dc5cfe0d7a743ea18f1f5226e68b3777"' # reuse go mod download vcstest/hello@nonexist ! go mod download -reuse=hellononexist.json -x -json vcs-test.golang.org/git/hello.git@nonexist -! stderr 'git fetch' +! stderr 'git( .*)* fetch' stdout '"Reuse": true' stdout '"Version": "nonexist"' stdout '"Error":.*unknown revision nonexist' @@ -231,7 +231,7 @@ stdout '"RepoSum": "r1:c0/9JCZ25lxoBiK3[+]3BhACU4giH49flcJmBynJ[+]Jvmc="' # reuse go mod download vcstest/hello@1234567890123456789012345678901234567890 ! go mod download -reuse=hellononhash.json -x -json vcs-test.golang.org/git/hello.git@1234567890123456789012345678901234567890 -! stderr 'git fetch' +! stderr 'git( .*)* fetch' stdout '"Reuse": true' stdout '"Version": "1234567890123456789012345678901234567890"' stdout '"Error":.*unknown revision 1234567890123456789012345678901234567890' @@ -241,7 +241,7 @@ stdout '"RepoSum": "r1:c0/9JCZ25lxoBiK3[+]3BhACU4giH49flcJmBynJ[+]Jvmc="' # reuse go mod download vcstest/hello@v0.0.0-20220101120101-123456789abc ! go mod download -reuse=hellononpseudo.json -x -json vcs-test.golang.org/git/hello.git@v0.0.0-20220101120101-123456789abc -! stderr 'git fetch' +! stderr 'git( .*)* fetch' stdout '"Reuse": true' stdout '"Version": "v0.0.0-20220101120101-123456789abc"' stdout '"Error":.*unknown revision 123456789abc' @@ -251,7 +251,7 @@ stdout '"RepoSum": "r1:c0/9JCZ25lxoBiK3[+]3BhACU4giH49flcJmBynJ[+]Jvmc="' # reuse go mod download vcstest/tagtests result go mod download -reuse=tagtests.json -x -json vcs-test.golang.org/git/tagtests.git@latest -! stderr 'git fetch' +! stderr 'git( .*)* fetch' stdout '"Reuse": true' stdout '"Version": "v0.2.2"' stdout '"Query": "latest"' @@ -265,7 +265,7 @@ stdout '"Hash": "59356c8cd18c5fe9a598167d98a6843e52d57952"' # reuse go mod download vcstest/tagtests@v0.2.2 result go mod download -reuse=tagtestsv022.json -x -json vcs-test.golang.org/git/tagtests.git@v0.2.2 -! stderr 'git fetch' +! stderr 'git( .*)* fetch' stdout '"Reuse": true' stdout '"Version": "v0.2.2"' ! stdout '"Query":' @@ -279,7 +279,7 @@ stdout '"Hash": "59356c8cd18c5fe9a598167d98a6843e52d57952"' # reuse go mod download vcstest/tagtests@master result go mod download -reuse=tagtestsmaster.json -x -json vcs-test.golang.org/git/tagtests.git@master -! stderr 'git fetch' +! stderr 'git( .*)* fetch' stdout '"Reuse": true' stdout '"Version": "v0.2.3-0.20190509225625-c7818c24fa2f"' stdout '"Query": "master"' @@ -293,7 +293,7 @@ stdout '"Hash": "c7818c24fa2f3f714c67d0a6d3e411c85a518d1f"' # reuse go mod download vcstest/tagtests@master result again with all.json go mod download -reuse=all.json -x -json vcs-test.golang.org/git/tagtests.git@master -! stderr 'git fetch' +! stderr 'git( .*)* fetch' stdout '"Reuse": true' stdout '"Version": "v0.2.3-0.20190509225625-c7818c24fa2f"' stdout '"Query": "master"' @@ -307,7 +307,7 @@ stdout '"Hash": "c7818c24fa2f3f714c67d0a6d3e411c85a518d1f"' # go mod download vcstest/prefixtagtests result with json go mod download -reuse=prefixtagtests.json -x -json vcs-test.golang.org/git/prefixtagtests.git/sub@latest -! stderr 'git fetch' +! stderr 'git( .*)* fetch' stdout '"Version": "v0.0.10"' stdout '"Query": "latest"' stdout '"VCS": "git"' @@ -321,7 +321,7 @@ stdout '"Hash": "2b7c4692e12c109263cab51b416fcc835ddd7eae"' # reuse the bulk results with all.json ! go mod download -reuse=all.json -json vcs-test.golang.org/git/hello.git@latest vcs-test.golang.org/git/hello.git/v9@latest vcs-test.golang.org/git/hello.git/sub/v9@latest vcs-test.golang.org/git/tagtests.git@latest vcs-test.golang.org/git/tagtests.git@v0.2.2 vcs-test.golang.org/git/tagtests.git@master -! stderr 'git fetch' +! stderr 'git( .*)* fetch' stdout '"Reuse": true' ! stdout '"(Dir|Info|GoMod|Zip)"' @@ -329,7 +329,7 @@ stdout '"Reuse": true' cp tagtestsv022.json tagtestsv022badhash.json replace '57952' '56952XXX' tagtestsv022badhash.json go mod download -reuse=tagtestsv022badhash.json -x -json vcs-test.golang.org/git/tagtests.git@v0.2.2 -stderr 'git fetch' +stderr 'git( .*)* fetch' ! stdout '"Reuse": true' stdout '"Version": "v0.2.2"' ! stdout '"Query"' diff --git a/src/cmd/go/testdata/script/test_ppc64_linker_funcs.txt b/src/cmd/go/testdata/script/test_ppc64_linker_funcs.txt index 735b5dc..d789f89 100644 --- a/src/cmd/go/testdata/script/test_ppc64_linker_funcs.txt +++ b/src/cmd/go/testdata/script/test_ppc64_linker_funcs.txt @@ -14,6 +14,10 @@ go build -ldflags='-linkmode=internal' exec ./abitest stdout success +go build -buildmode=pie -o abitest.pie -ldflags='-linkmode=internal' +exec ./abitest.pie +stdout success + -- go.mod -- module abitest diff --git a/src/cmd/internal/moddeps/moddeps_test.go b/src/cmd/internal/moddeps/moddeps_test.go index 3d4c99e..8adc653 100644 --- a/src/cmd/internal/moddeps/moddeps_test.go +++ b/src/cmd/internal/moddeps/moddeps_test.go @@ -33,6 +33,8 @@ import ( // See issues 36852, 41409, and 43687. // (Also see golang.org/issue/27348.) func TestAllDependencies(t *testing.T) { + t.Skip("TODO(#65051): 1.22.2 contains unreleased changes from vendored modules") + goBin := testenv.GoToolPath(t) // Ensure that all packages imported within GOROOT diff --git a/src/cmd/internal/obj/ppc64/obj9.go b/src/cmd/internal/obj/ppc64/obj9.go index a3d392d..7e26118 100644 --- a/src/cmd/internal/obj/ppc64/obj9.go +++ b/src/cmd/internal/obj/ppc64/obj9.go @@ -37,6 +37,7 @@ import ( "internal/abi" "log" "math/bits" + "strings" ) // Test if this value can encoded as a mask for @@ -72,6 +73,22 @@ func encodePPC64RLDCMask(mask int64) (mb, me int) { return mb, me - 1 } +// Is this a symbol which should never have a TOC prologue generated? +// These are special functions which should not have a TOC regeneration +// prologue. +func isNOTOCfunc(name string) bool { + switch { + case name == "runtime.duffzero": + return true + case name == "runtime.duffcopy": + return true + case strings.HasPrefix(name, "runtime.elf_"): + return true + default: + return false + } +} + func progedit(ctxt *obj.Link, p *obj.Prog, newprog obj.ProgAlloc) { p.From.Class = 0 p.To.Class = 0 @@ -762,7 +779,7 @@ func preprocess(ctxt *obj.Link, cursym *obj.LSym, newprog obj.ProgAlloc) { q = p - if NeedTOCpointer(c.ctxt) && c.cursym.Name != "runtime.duffzero" && c.cursym.Name != "runtime.duffcopy" { + if NeedTOCpointer(c.ctxt) && !isNOTOCfunc(c.cursym.Name) { // When compiling Go into PIC, without PCrel support, all functions must start // with instructions to load the TOC pointer into r2: // diff --git a/src/cmd/link/internal/ppc64/asm.go b/src/cmd/link/internal/ppc64/asm.go index 09647d8..de5614e 100644 --- a/src/cmd/link/internal/ppc64/asm.go +++ b/src/cmd/link/internal/ppc64/asm.go @@ -474,24 +474,9 @@ func rewriteABIFuncReloc(ctxt *ld.Link, ldr *loader.Loader, tname string, r load r.SetAdd(int64((n - minReg) * offMul)) firstUse = !ldr.AttrReachable(ts) if firstUse { - ldr.SetAttrReachable(ts, true) // This function only becomes reachable now. It has been dropped from // the text section (it was unreachable until now), it needs included. - // - // Similarly, TOC regeneration should not happen for these functions, - // remove it from this save/restore function. - if ldr.AttrShared(ts) { - sb := ldr.MakeSymbolUpdater(ts) - sb.SetData(sb.Data()[8:]) - sb.SetSize(sb.Size() - 8) - relocs := sb.Relocs() - // Only one PCREL reloc to .TOC. should be present. - if relocs.Count() != 1 { - log.Fatalf("Unexpected number of relocs in %s\n", ldr.SymName(ts)) - } - sb.ResetRelocs() - - } + ldr.SetAttrReachable(ts, true) } return ts, firstUse } diff --git a/src/cmd/link/internal/riscv64/asm.go b/src/cmd/link/internal/riscv64/asm.go index d95de6c..6a4dd01 100644 --- a/src/cmd/link/internal/riscv64/asm.go +++ b/src/cmd/link/internal/riscv64/asm.go @@ -170,8 +170,8 @@ func genSymsLate(ctxt *ld.Link, ldr *loader.Loader) { relocs := ldr.Relocs(s) for ri := 0; ri < relocs.Count(); ri++ { r := relocs.At(ri) - if r.Type() != objabi.R_RISCV_PCREL_ITYPE && r.Type() != objabi.R_RISCV_PCREL_STYPE && - r.Type() != objabi.R_RISCV_TLS_IE { + if r.Type() != objabi.R_RISCV_CALL && r.Type() != objabi.R_RISCV_PCREL_ITYPE && + r.Type() != objabi.R_RISCV_PCREL_STYPE && r.Type() != objabi.R_RISCV_TLS_IE { continue } if r.Off() == 0 && ldr.SymType(s) == sym.STEXT { -- cgit v1.2.3