1// Copyright 2018 The Go Authors. All rights reserved. 2// Use of this source code is governed by a BSD-style 3// license that can be found in the LICENSE file. 4 5// Lowering arithmetic 6(Add(64|32|16|8|Ptr) ...) => (I64Add ...) 7(Add(64|32)F ...) => (F(64|32)Add ...) 8 9(Sub(64|32|16|8|Ptr) ...) => (I64Sub ...) 10(Sub(64|32)F ...) => (F(64|32)Sub ...) 11 12(Mul(64|32|16|8) ...) => (I64Mul ...) 13(Mul(64|32)F ...) => (F(64|32)Mul ...) 14 15(Div64 [false] x y) => (I64DivS x y) 16(Div32 [false] x y) => (I64DivS (SignExt32to64 x) (SignExt32to64 y)) 17(Div16 [false] x y) => (I64DivS (SignExt16to64 x) (SignExt16to64 y)) 18(Div8 x y) => (I64DivS (SignExt8to64 x) (SignExt8to64 y)) 19(Div64u ...) => (I64DivU ...) 20(Div32u x y) => (I64DivU (ZeroExt32to64 x) (ZeroExt32to64 y)) 21(Div16u x y) => (I64DivU (ZeroExt16to64 x) (ZeroExt16to64 y)) 22(Div8u x y) => (I64DivU (ZeroExt8to64 x) (ZeroExt8to64 y)) 23(Div(64|32)F ...) => (F(64|32)Div ...) 24 25(Mod64 [false] x y) => (I64RemS x y) 26(Mod32 [false] x y) => (I64RemS (SignExt32to64 x) (SignExt32to64 y)) 27(Mod16 [false] x y) => (I64RemS (SignExt16to64 x) (SignExt16to64 y)) 28(Mod8 x y) => (I64RemS (SignExt8to64 x) (SignExt8to64 y)) 29(Mod64u ...) => (I64RemU ...) 30(Mod32u x y) => (I64RemU (ZeroExt32to64 x) (ZeroExt32to64 y)) 31(Mod16u x y) => (I64RemU (ZeroExt16to64 x) (ZeroExt16to64 y)) 32(Mod8u x y) => (I64RemU (ZeroExt8to64 x) (ZeroExt8to64 y)) 33 34(And(64|32|16|8|B) ...) => (I64And ...) 35 36(Or(64|32|16|8|B) ...) => (I64Or ...) 37 38(Xor(64|32|16|8) ...) => (I64Xor ...) 39 40(Neg(64|32|16|8) x) => (I64Sub (I64Const [0]) x) 41(Neg(64|32)F ...) => (F(64|32)Neg ...) 42 43(Com(64|32|16|8) x) => (I64Xor x (I64Const [-1])) 44 45(Not ...) => (I64Eqz ...) 46 47// Lowering pointer arithmetic 48(OffPtr ...) => (I64AddConst ...) 49 50// Lowering extension 51// It is unnecessary to extend loads 52(SignExt32to64 x:(I64Load32S _ _)) => x 53(SignExt16to(64|32) x:(I64Load16S _ _)) => x 54(SignExt8to(64|32|16) x:(I64Load8S _ _)) => x 55(ZeroExt32to64 x:(I64Load32U _ _)) => x 56(ZeroExt16to(64|32) x:(I64Load16U _ _)) => x 57(ZeroExt8to(64|32|16) x:(I64Load8U _ _)) => x 58(SignExt32to64 x) && buildcfg.GOWASM.SignExt => (I64Extend32S x) 59(SignExt8to(64|32|16) x) && buildcfg.GOWASM.SignExt => (I64Extend8S x) 60(SignExt16to(64|32) x) && buildcfg.GOWASM.SignExt => (I64Extend16S x) 61(SignExt32to64 x) => (I64ShrS (I64Shl x (I64Const [32])) (I64Const [32])) 62(SignExt16to(64|32) x) => (I64ShrS (I64Shl x (I64Const [48])) (I64Const [48])) 63(SignExt8to(64|32|16) x) => (I64ShrS (I64Shl x (I64Const [56])) (I64Const [56])) 64(ZeroExt32to64 x) => (I64And x (I64Const [0xffffffff])) 65(ZeroExt16to(64|32) x) => (I64And x (I64Const [0xffff])) 66(ZeroExt8to(64|32|16) x) => (I64And x (I64Const [0xff])) 67 68(Slicemask x) => (I64ShrS (I64Sub (I64Const [0]) x) (I64Const [63])) 69 70// Lowering truncation 71// Because we ignore the high parts, truncates are just copies. 72(Trunc64to(32|16|8) ...) => (Copy ...) 73(Trunc32to(16|8) ...) => (Copy ...) 74(Trunc16to8 ...) => (Copy ...) 75 76// Lowering float <=> int 77(Cvt32to(64|32)F x) => (F(64|32)ConvertI64S (SignExt32to64 x)) 78(Cvt64to(64|32)F ...) => (F(64|32)ConvertI64S ...) 79(Cvt32Uto(64|32)F x) => (F(64|32)ConvertI64U (ZeroExt32to64 x)) 80(Cvt64Uto(64|32)F ...) => (F(64|32)ConvertI64U ...) 81 82(Cvt32Fto32 ...) => (I64TruncSatF32S ...) 83(Cvt32Fto64 ...) => (I64TruncSatF32S ...) 84(Cvt64Fto32 ...) => (I64TruncSatF64S ...) 85(Cvt64Fto64 ...) => (I64TruncSatF64S ...) 86(Cvt32Fto32U ...) => (I64TruncSatF32U ...) 87(Cvt32Fto64U ...) => (I64TruncSatF32U ...) 88(Cvt64Fto32U ...) => (I64TruncSatF64U ...) 89(Cvt64Fto64U ...) => (I64TruncSatF64U ...) 90 91(Cvt32Fto64F ...) => (F64PromoteF32 ...) 92(Cvt64Fto32F ...) => (F32DemoteF64 ...) 93 94(CvtBoolToUint8 ...) => (Copy ...) 95 96(Round32F ...) => (Copy ...) 97(Round64F ...) => (Copy ...) 98 99// Lowering shifts 100// Unsigned shifts need to return 0 if shift amount is >= width of shifted value. 101 102(Lsh64x64 x y) && shiftIsBounded(v) => (I64Shl x y) 103(Lsh64x64 x (I64Const [c])) && uint64(c) < 64 => (I64Shl x (I64Const [c])) 104(Lsh64x64 x (I64Const [c])) && uint64(c) >= 64 => (I64Const [0]) 105(Lsh64x64 x y) => (Select (I64Shl x y) (I64Const [0]) (I64LtU y (I64Const [64]))) 106(Lsh64x(32|16|8) [c] x y) => (Lsh64x64 [c] x (ZeroExt(32|16|8)to64 y)) 107 108(Lsh32x64 ...) => (Lsh64x64 ...) 109(Lsh32x(32|16|8) [c] x y) => (Lsh64x64 [c] x (ZeroExt(32|16|8)to64 y)) 110 111(Lsh16x64 ...) => (Lsh64x64 ...) 112(Lsh16x(32|16|8) [c] x y) => (Lsh64x64 [c] x (ZeroExt(32|16|8)to64 y)) 113 114(Lsh8x64 ...) => (Lsh64x64 ...) 115(Lsh8x(32|16|8) [c] x y) => (Lsh64x64 [c] x (ZeroExt(32|16|8)to64 y)) 116 117(Rsh64Ux64 x y) && shiftIsBounded(v) => (I64ShrU x y) 118(Rsh64Ux64 x (I64Const [c])) && uint64(c) < 64 => (I64ShrU x (I64Const [c])) 119(Rsh64Ux64 x (I64Const [c])) && uint64(c) >= 64 => (I64Const [0]) 120(Rsh64Ux64 x y) => (Select (I64ShrU x y) (I64Const [0]) (I64LtU y (I64Const [64]))) 121(Rsh64Ux(32|16|8) [c] x y) => (Rsh64Ux64 [c] x (ZeroExt(32|16|8)to64 y)) 122 123(Rsh32Ux64 [c] x y) => (Rsh64Ux64 [c] (ZeroExt32to64 x) y) 124(Rsh32Ux(32|16|8) [c] x y) => (Rsh64Ux64 [c] (ZeroExt32to64 x) (ZeroExt(32|16|8)to64 y)) 125 126(Rsh16Ux64 [c] x y) => (Rsh64Ux64 [c] (ZeroExt16to64 x) y) 127(Rsh16Ux(32|16|8) [c] x y) => (Rsh64Ux64 [c] (ZeroExt16to64 x) (ZeroExt(32|16|8)to64 y)) 128 129(Rsh8Ux64 [c] x y) => (Rsh64Ux64 [c] (ZeroExt8to64 x) y) 130(Rsh8Ux(32|16|8) [c] x y) => (Rsh64Ux64 [c] (ZeroExt8to64 x) (ZeroExt(32|16|8)to64 y)) 131 132// Signed right shift needs to return 0/-1 if shift amount is >= width of shifted value. 133// We implement this by setting the shift value to (width - 1) if the shift value is >= width. 134 135(Rsh64x64 x y) && shiftIsBounded(v) => (I64ShrS x y) 136(Rsh64x64 x (I64Const [c])) && uint64(c) < 64 => (I64ShrS x (I64Const [c])) 137(Rsh64x64 x (I64Const [c])) && uint64(c) >= 64 => (I64ShrS x (I64Const [63])) 138(Rsh64x64 x y) => (I64ShrS x (Select <typ.Int64> y (I64Const [63]) (I64LtU y (I64Const [64])))) 139(Rsh64x(32|16|8) [c] x y) => (Rsh64x64 [c] x (ZeroExt(32|16|8)to64 y)) 140 141(Rsh32x64 [c] x y) => (Rsh64x64 [c] (SignExt32to64 x) y) 142(Rsh32x(32|16|8) [c] x y) => (Rsh64x64 [c] (SignExt32to64 x) (ZeroExt(32|16|8)to64 y)) 143 144(Rsh16x64 [c] x y) => (Rsh64x64 [c] (SignExt16to64 x) y) 145(Rsh16x(32|16|8) [c] x y) => (Rsh64x64 [c] (SignExt16to64 x) (ZeroExt(32|16|8)to64 y)) 146 147(Rsh8x64 [c] x y) => (Rsh64x64 [c] (SignExt8to64 x) y) 148(Rsh8x(32|16|8) [c] x y) => (Rsh64x64 [c] (SignExt8to64 x) (ZeroExt(32|16|8)to64 y)) 149 150// Lowering rotates 151(RotateLeft8 <t> x (I64Const [c])) => (Or8 (Lsh8x64 <t> x (I64Const [c&7])) (Rsh8Ux64 <t> x (I64Const [-c&7]))) 152(RotateLeft16 <t> x (I64Const [c])) => (Or16 (Lsh16x64 <t> x (I64Const [c&15])) (Rsh16Ux64 <t> x (I64Const [-c&15]))) 153(RotateLeft32 ...) => (I32Rotl ...) 154(RotateLeft64 ...) => (I64Rotl ...) 155 156// Lowering comparisons 157(Less64 ...) => (I64LtS ...) 158(Less32 x y) => (I64LtS (SignExt32to64 x) (SignExt32to64 y)) 159(Less16 x y) => (I64LtS (SignExt16to64 x) (SignExt16to64 y)) 160(Less8 x y) => (I64LtS (SignExt8to64 x) (SignExt8to64 y)) 161(Less64U ...) => (I64LtU ...) 162(Less32U x y) => (I64LtU (ZeroExt32to64 x) (ZeroExt32to64 y)) 163(Less16U x y) => (I64LtU (ZeroExt16to64 x) (ZeroExt16to64 y)) 164(Less8U x y) => (I64LtU (ZeroExt8to64 x) (ZeroExt8to64 y)) 165(Less(64|32)F ...) => (F(64|32)Lt ...) 166 167(Leq64 ...) => (I64LeS ...) 168(Leq32 x y) => (I64LeS (SignExt32to64 x) (SignExt32to64 y)) 169(Leq16 x y) => (I64LeS (SignExt16to64 x) (SignExt16to64 y)) 170(Leq8 x y) => (I64LeS (SignExt8to64 x) (SignExt8to64 y)) 171(Leq64U ...) => (I64LeU ...) 172(Leq32U x y) => (I64LeU (ZeroExt32to64 x) (ZeroExt32to64 y)) 173(Leq16U x y) => (I64LeU (ZeroExt16to64 x) (ZeroExt16to64 y)) 174(Leq8U x y) => (I64LeU (ZeroExt8to64 x) (ZeroExt8to64 y)) 175(Leq(64|32)F ...) => (F(64|32)Le ...) 176 177(Eq64 ...) => (I64Eq ...) 178(Eq32 x y) => (I64Eq (ZeroExt32to64 x) (ZeroExt32to64 y)) 179(Eq16 x y) => (I64Eq (ZeroExt16to64 x) (ZeroExt16to64 y)) 180(Eq8 x y) => (I64Eq (ZeroExt8to64 x) (ZeroExt8to64 y)) 181(EqB ...) => (I64Eq ...) 182(EqPtr ...) => (I64Eq ...) 183(Eq(64|32)F ...) => (F(64|32)Eq ...) 184 185(Neq64 ...) => (I64Ne ...) 186(Neq32 x y) => (I64Ne (ZeroExt32to64 x) (ZeroExt32to64 y)) 187(Neq16 x y) => (I64Ne (ZeroExt16to64 x) (ZeroExt16to64 y)) 188(Neq8 x y) => (I64Ne (ZeroExt8to64 x) (ZeroExt8to64 y)) 189(NeqB ...) => (I64Ne ...) 190(NeqPtr ...) => (I64Ne ...) 191(Neq(64|32)F ...) => (F(64|32)Ne ...) 192 193// Lowering loads 194(Load <t> ptr mem) && is32BitFloat(t) => (F32Load ptr mem) 195(Load <t> ptr mem) && is64BitFloat(t) => (F64Load ptr mem) 196(Load <t> ptr mem) && t.Size() == 8 => (I64Load ptr mem) 197(Load <t> ptr mem) && t.Size() == 4 && !t.IsSigned() => (I64Load32U ptr mem) 198(Load <t> ptr mem) && t.Size() == 4 && t.IsSigned() => (I64Load32S ptr mem) 199(Load <t> ptr mem) && t.Size() == 2 && !t.IsSigned() => (I64Load16U ptr mem) 200(Load <t> ptr mem) && t.Size() == 2 && t.IsSigned() => (I64Load16S ptr mem) 201(Load <t> ptr mem) && t.Size() == 1 && !t.IsSigned() => (I64Load8U ptr mem) 202(Load <t> ptr mem) && t.Size() == 1 && t.IsSigned() => (I64Load8S ptr mem) 203 204// Lowering stores 205(Store {t} ptr val mem) && is64BitFloat(t) => (F64Store ptr val mem) 206(Store {t} ptr val mem) && is32BitFloat(t) => (F32Store ptr val mem) 207(Store {t} ptr val mem) && t.Size() == 8 => (I64Store ptr val mem) 208(Store {t} ptr val mem) && t.Size() == 4 => (I64Store32 ptr val mem) 209(Store {t} ptr val mem) && t.Size() == 2 => (I64Store16 ptr val mem) 210(Store {t} ptr val mem) && t.Size() == 1 => (I64Store8 ptr val mem) 211 212// Lowering moves 213(Move [0] _ _ mem) => mem 214(Move [1] dst src mem) => (I64Store8 dst (I64Load8U src mem) mem) 215(Move [2] dst src mem) => (I64Store16 dst (I64Load16U src mem) mem) 216(Move [4] dst src mem) => (I64Store32 dst (I64Load32U src mem) mem) 217(Move [8] dst src mem) => (I64Store dst (I64Load src mem) mem) 218(Move [16] dst src mem) => 219 (I64Store [8] dst (I64Load [8] src mem) 220 (I64Store dst (I64Load src mem) mem)) 221(Move [3] dst src mem) => 222 (I64Store8 [2] dst (I64Load8U [2] src mem) 223 (I64Store16 dst (I64Load16U src mem) mem)) 224(Move [5] dst src mem) => 225 (I64Store8 [4] dst (I64Load8U [4] src mem) 226 (I64Store32 dst (I64Load32U src mem) mem)) 227(Move [6] dst src mem) => 228 (I64Store16 [4] dst (I64Load16U [4] src mem) 229 (I64Store32 dst (I64Load32U src mem) mem)) 230(Move [7] dst src mem) => 231 (I64Store32 [3] dst (I64Load32U [3] src mem) 232 (I64Store32 dst (I64Load32U src mem) mem)) 233(Move [s] dst src mem) && s > 8 && s < 16 => 234 (I64Store [s-8] dst (I64Load [s-8] src mem) 235 (I64Store dst (I64Load src mem) mem)) 236 237// Large copying uses helper. 238(Move [s] dst src mem) && logLargeCopy(v, s) => 239 (LoweredMove [s] dst src mem) 240 241// Lowering Zero instructions 242(Zero [0] _ mem) => mem 243(Zero [1] destptr mem) => (I64Store8 destptr (I64Const [0]) mem) 244(Zero [2] destptr mem) => (I64Store16 destptr (I64Const [0]) mem) 245(Zero [4] destptr mem) => (I64Store32 destptr (I64Const [0]) mem) 246(Zero [8] destptr mem) => (I64Store destptr (I64Const [0]) mem) 247 248(Zero [3] destptr mem) => 249 (I64Store8 [2] destptr (I64Const [0]) 250 (I64Store16 destptr (I64Const [0]) mem)) 251(Zero [5] destptr mem) => 252 (I64Store8 [4] destptr (I64Const [0]) 253 (I64Store32 destptr (I64Const [0]) mem)) 254(Zero [6] destptr mem) => 255 (I64Store16 [4] destptr (I64Const [0]) 256 (I64Store32 destptr (I64Const [0]) mem)) 257(Zero [7] destptr mem) => 258 (I64Store32 [3] destptr (I64Const [0]) 259 (I64Store32 destptr (I64Const [0]) mem)) 260 261// Strip off any fractional word zeroing. 262(Zero [s] destptr mem) && s%8 != 0 && s > 8 && s < 32 => 263 (Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8]) 264 (I64Store destptr (I64Const [0]) mem)) 265 266// Zero small numbers of words directly. 267(Zero [16] destptr mem) => 268 (I64Store [8] destptr (I64Const [0]) 269 (I64Store destptr (I64Const [0]) mem)) 270(Zero [24] destptr mem) => 271 (I64Store [16] destptr (I64Const [0]) 272 (I64Store [8] destptr (I64Const [0]) 273 (I64Store destptr (I64Const [0]) mem))) 274(Zero [32] destptr mem) => 275 (I64Store [24] destptr (I64Const [0]) 276 (I64Store [16] destptr (I64Const [0]) 277 (I64Store [8] destptr (I64Const [0]) 278 (I64Store destptr (I64Const [0]) mem)))) 279 280// Large zeroing uses helper. 281(Zero [s] destptr mem) => 282 (LoweredZero [s] destptr mem) 283 284// Lowering constants 285(Const64 ...) => (I64Const ...) 286(Const(32|16|8) [c]) => (I64Const [int64(c)]) 287(Const(64|32)F ...) => (F(64|32)Const ...) 288(ConstNil) => (I64Const [0]) 289(ConstBool [c]) => (I64Const [b2i(c)]) 290 291// Lowering calls 292(StaticCall ...) => (LoweredStaticCall ...) 293(ClosureCall ...) => (LoweredClosureCall ...) 294(InterCall ...) => (LoweredInterCall ...) 295(TailCall ...) => (LoweredTailCall ...) 296 297// Miscellaneous 298(Convert ...) => (LoweredConvert ...) 299(IsNonNil p) => (I64Eqz (I64Eqz p)) 300(IsInBounds ...) => (I64LtU ...) 301(IsSliceInBounds ...) => (I64LeU ...) 302(NilCheck ...) => (LoweredNilCheck ...) 303(GetClosurePtr ...) => (LoweredGetClosurePtr ...) 304(GetCallerPC ...) => (LoweredGetCallerPC ...) 305(GetCallerSP ...) => (LoweredGetCallerSP ...) 306(Addr {sym} base) => (LoweredAddr {sym} [0] base) 307(LocalAddr <t> {sym} base mem) && t.Elem().HasPointers() => (LoweredAddr {sym} (SPanchored base mem)) 308(LocalAddr <t> {sym} base _) && !t.Elem().HasPointers() => (LoweredAddr {sym} base) 309 310// Write barrier. 311(WB ...) => (LoweredWB ...) 312 313// --- Intrinsics --- 314(Sqrt ...) => (F64Sqrt ...) 315(Trunc ...) => (F64Trunc ...) 316(Ceil ...) => (F64Ceil ...) 317(Floor ...) => (F64Floor ...) 318(RoundToEven ...) => (F64Nearest ...) 319(Abs ...) => (F64Abs ...) 320(Copysign ...) => (F64Copysign ...) 321 322(Sqrt32 ...) => (F32Sqrt ...) 323 324(Ctz64 ...) => (I64Ctz ...) 325(Ctz32 x) => (I64Ctz (I64Or x (I64Const [0x100000000]))) 326(Ctz16 x) => (I64Ctz (I64Or x (I64Const [0x10000]))) 327(Ctz8 x) => (I64Ctz (I64Or x (I64Const [0x100]))) 328 329(Ctz(64|32|16|8)NonZero ...) => (I64Ctz ...) 330 331(BitLen64 x) => (I64Sub (I64Const [64]) (I64Clz x)) 332 333(PopCount64 ...) => (I64Popcnt ...) 334(PopCount32 x) => (I64Popcnt (ZeroExt32to64 x)) 335(PopCount16 x) => (I64Popcnt (ZeroExt16to64 x)) 336(PopCount8 x) => (I64Popcnt (ZeroExt8to64 x)) 337 338(CondSelect ...) => (Select ...) 339 340// --- Optimizations --- 341(I64Add (I64Const [x]) (I64Const [y])) => (I64Const [x + y]) 342(I64Mul (I64Const [x]) (I64Const [y])) => (I64Const [x * y]) 343(I64And (I64Const [x]) (I64Const [y])) => (I64Const [x & y]) 344(I64Or (I64Const [x]) (I64Const [y])) => (I64Const [x | y]) 345(I64Xor (I64Const [x]) (I64Const [y])) => (I64Const [x ^ y]) 346(F64Add (F64Const [x]) (F64Const [y])) => (F64Const [x + y]) 347(F64Mul (F64Const [x]) (F64Const [y])) && !math.IsNaN(x * y) => (F64Const [x * y]) 348(I64Eq (I64Const [x]) (I64Const [y])) && x == y => (I64Const [1]) 349(I64Eq (I64Const [x]) (I64Const [y])) && x != y => (I64Const [0]) 350(I64Ne (I64Const [x]) (I64Const [y])) && x == y => (I64Const [0]) 351(I64Ne (I64Const [x]) (I64Const [y])) && x != y => (I64Const [1]) 352 353(I64Shl (I64Const [x]) (I64Const [y])) => (I64Const [x << uint64(y)]) 354(I64ShrU (I64Const [x]) (I64Const [y])) => (I64Const [int64(uint64(x) >> uint64(y))]) 355(I64ShrS (I64Const [x]) (I64Const [y])) => (I64Const [x >> uint64(y)]) 356 357// TODO: declare these operations as commutative and get rid of these rules? 358(I64Add (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Add y (I64Const [x])) 359(I64Mul (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Mul y (I64Const [x])) 360(I64And (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64And y (I64Const [x])) 361(I64Or (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Or y (I64Const [x])) 362(I64Xor (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Xor y (I64Const [x])) 363(F64Add (F64Const [x]) y) && y.Op != OpWasmF64Const => (F64Add y (F64Const [x])) 364(F64Mul (F64Const [x]) y) && y.Op != OpWasmF64Const => (F64Mul y (F64Const [x])) 365(I64Eq (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Eq y (I64Const [x])) 366(I64Ne (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Ne y (I64Const [x])) 367 368(I64Eq x (I64Const [0])) => (I64Eqz x) 369(I64LtU (I64Const [0]) x) => (I64Eqz (I64Eqz x)) 370(I64LeU x (I64Const [0])) => (I64Eqz x) 371(I64LtU x (I64Const [1])) => (I64Eqz x) 372(I64LeU (I64Const [1]) x) => (I64Eqz (I64Eqz x)) 373(I64Ne x (I64Const [0])) => (I64Eqz (I64Eqz x)) 374 375(I64Add x (I64Const <t> [y])) && !t.IsPtr() => (I64AddConst [y] x) 376(I64AddConst [0] x) => x 377(I64Eqz (I64Eqz (I64Eqz x))) => (I64Eqz x) 378 379// folding offset into load/store 380((I64Load|I64Load32U|I64Load32S|I64Load16U|I64Load16S|I64Load8U|I64Load8S) [off] (I64AddConst [off2] ptr) mem) 381 && isU32Bit(off+off2) => 382 ((I64Load|I64Load32U|I64Load32S|I64Load16U|I64Load16S|I64Load8U|I64Load8S) [off+off2] ptr mem) 383 384((I64Store|I64Store32|I64Store16|I64Store8) [off] (I64AddConst [off2] ptr) val mem) 385 && isU32Bit(off+off2) => 386 ((I64Store|I64Store32|I64Store16|I64Store8) [off+off2] ptr val mem) 387 388// folding offset into address 389(I64AddConst [off] (LoweredAddr {sym} [off2] base)) && isU32Bit(off+int64(off2)) => 390 (LoweredAddr {sym} [int32(off)+off2] base) 391(I64AddConst [off] x:(SP)) && isU32Bit(off) => (LoweredAddr [int32(off)] x) // so it is rematerializeable 392 393// transforming readonly globals into constants 394(I64Load [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read64(sym, off+int64(off2), config.ctxt.Arch.ByteOrder))]) 395(I64Load32U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read32(sym, off+int64(off2), config.ctxt.Arch.ByteOrder))]) 396(I64Load16U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read16(sym, off+int64(off2), config.ctxt.Arch.ByteOrder))]) 397(I64Load8U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read8(sym, off+int64(off2)))]) 398