Linux vmi284606.contaboserver.net 4.15.0-213-generic #224-Ubuntu SMP Mon Jun 19 13:30:12 UTC 2023 x86_64
Apache/2.4.57 (Ubuntu)
: 167.86.127.34 | : 216.73.217.31
Cant Read [ /etc/named.conf ]
7.2.24-0ubuntu0.18.04.17
root
Terminal
AUTO ROOT
Adminer
Backdoor Destroyer
Linux Exploit
Lock Shell
Lock File
Create User
CREATE RDP
PHP Mailer
BACKCONNECT
UNLOCK SHELL
HASH IDENTIFIER
README
+ Create Folder
+ Create File
/
usr /
local /
go /
src /
cmd /
compile /
internal /
ssa /
gen /
[ HOME SHELL ]
Name
Size
Permission
Action
386.rules
59.58
KB
-rw-r--r--
386Ops.go
45.59
KB
-rw-r--r--
386splitload.rules
447
B
-rw-r--r--
AMD64.rules
122.33
KB
-rw-r--r--
AMD64Ops.go
70.81
KB
-rw-r--r--
AMD64splitload.rules
797
B
-rw-r--r--
ARM.rules
93.49
KB
-rw-r--r--
ARM64.rules
154.73
KB
-rw-r--r--
ARM64Ops.go
48.32
KB
-rw-r--r--
ARMOps.go
40.29
KB
-rw-r--r--
MIPS.rules
35.61
KB
-rw-r--r--
MIPS64.rules
37.25
KB
-rw-r--r--
MIPS64Ops.go
24.05
KB
-rw-r--r--
MIPSOps.go
23.03
KB
-rw-r--r--
PPC64.rules
68.23
KB
-rw-r--r--
PPC64Ops.go
36.2
KB
-rw-r--r--
README
287
B
-rw-r--r--
RISCV64.rules
22.52
KB
-rw-r--r--
RISCV64Ops.go
19.61
KB
-rw-r--r--
S390X.rules
100.46
KB
-rw-r--r--
S390XOps.go
49.69
KB
-rw-r--r--
Wasm.rules
17.34
KB
-rw-r--r--
WasmOps.go
17.45
KB
-rw-r--r--
dec.rules
2.63
KB
-rw-r--r--
dec64.rules
14.59
KB
-rw-r--r--
dec64Ops.go
383
B
-rw-r--r--
decArgs.rules
2.12
KB
-rw-r--r--
decArgsOps.go
393
B
-rw-r--r--
decOps.go
373
B
-rw-r--r--
generic.rules
104.82
KB
-rw-r--r--
genericOps.go
33.98
KB
-rw-r--r--
main.go
15.43
KB
-rw-r--r--
rulegen.go
37.36
KB
-rw-r--r--
Delete
Unzip
Zip
${this.title}
Close
Code Editor : Wasm.rules
// Copyright 2018 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Lowering arithmetic (Add(64|32|16|8|Ptr) x y) -> (I64Add x y) (Add(64|32)F x y) -> (F(64|32)Add x y) (Sub(64|32|16|8|Ptr) x y) -> (I64Sub x y) (Sub(64|32)F x y) -> (F(64|32)Sub x y) (Mul(64|32|16|8) x y) -> (I64Mul x y) (Mul(64|32)F x y) -> (F(64|32)Mul x y) (Div64 x y) -> (I64DivS x y) (Div64u x y) -> (I64DivU x y) (Div32 x y) -> (I64DivS (SignExt32to64 x) (SignExt32to64 y)) (Div32u x y) -> (I64DivU (ZeroExt32to64 x) (ZeroExt32to64 y)) (Div16 x y) -> (I64DivS (SignExt16to64 x) (SignExt16to64 y)) (Div16u x y) -> (I64DivU (ZeroExt16to64 x) (ZeroExt16to64 y)) (Div8 x y) -> (I64DivS (SignExt8to64 x) (SignExt8to64 y)) (Div8u x y) -> (I64DivU (ZeroExt8to64 x) (ZeroExt8to64 y)) (Div(64|32)F x y) -> (F(64|32)Div x y) (Mod64 x y) -> (I64RemS x y) (Mod64u x y) -> (I64RemU x y) (Mod32 x y) -> (I64RemS (SignExt32to64 x) (SignExt32to64 y)) (Mod32u x y) -> (I64RemU (ZeroExt32to64 x) (ZeroExt32to64 y)) (Mod16 x y) -> (I64RemS (SignExt16to64 x) (SignExt16to64 y)) (Mod16u x y) -> (I64RemU (ZeroExt16to64 x) (ZeroExt16to64 y)) (Mod8 x y) -> (I64RemS (SignExt8to64 x) (SignExt8to64 y)) (Mod8u x y) -> (I64RemU (ZeroExt8to64 x) (ZeroExt8to64 y)) (And(64|32|16|8|B) x y) -> (I64And x y) (Or(64|32|16|8|B) x y) -> (I64Or x y) (Xor(64|32|16|8) x y) -> (I64Xor x y) (Neg(64|32|16|8) x) -> (I64Sub (I64Const [0]) x) (Neg(64|32)F x) -> (F(64|32)Neg x) (Com(64|32|16|8) x) -> (I64Xor x (I64Const [-1])) (Not x) -> (I64Eqz x) // Lowering pointer arithmetic (OffPtr [off] ptr) -> (I64AddConst [off] ptr) // Lowering extension // It is unnecessary to extend loads (SignExt32to64 x:(I64Load32S _ _)) -> x (SignExt16to(64|32) x:(I64Load16S _ _)) -> x (SignExt8to(64|32|16) x:(I64Load8S _ _)) -> x (ZeroExt32to64 x:(I64Load32U _ _)) -> x (ZeroExt16to(64|32) x:(I64Load16U _ _)) -> x (ZeroExt8to(64|32|16) x:(I64Load8U _ _)) -> x (SignExt32to64 x) && objabi.GOWASM.SignExt -> (I64Extend32S x) (SignExt8to(64|32|16) x) && objabi.GOWASM.SignExt -> (I64Extend8S x) (SignExt16to(64|32) x) && objabi.GOWASM.SignExt -> (I64Extend16S x) (SignExt32to64 x) -> (I64ShrS (I64Shl x (I64Const [32])) (I64Const [32])) (SignExt16to(64|32) x) -> (I64ShrS (I64Shl x (I64Const [48])) (I64Const [48])) (SignExt8to(64|32|16) x) -> (I64ShrS (I64Shl x (I64Const [56])) (I64Const [56])) (ZeroExt32to64 x) -> (I64And x (I64Const [0xffffffff])) (ZeroExt16to(64|32) x) -> (I64And x (I64Const [0xffff])) (ZeroExt8to(64|32|16) x) -> (I64And x (I64Const [0xff])) (Slicemask x) -> (I64ShrS (I64Sub (I64Const [0]) x) (I64Const [63])) // Lowering truncation // Because we ignore the high parts, truncates are just copies. (Trunc64to(32|16|8) x) -> x (Trunc32to(16|8) x) -> x (Trunc16to8 x) -> x // Lowering float <-> int (Cvt32to(64|32)F x) -> (F(64|32)ConvertI64S (SignExt32to64 x)) (Cvt64to(64|32)F x) -> (F(64|32)ConvertI64S x) (Cvt32Uto(64|32)F x) -> (F(64|32)ConvertI64U (ZeroExt32to64 x)) (Cvt64Uto(64|32)F x) -> (F(64|32)ConvertI64U x) (Cvt32Fto32 x) -> (I64TruncSatF32S x) (Cvt32Fto64 x) -> (I64TruncSatF32S x) (Cvt64Fto32 x) -> (I64TruncSatF64S x) (Cvt64Fto64 x) -> (I64TruncSatF64S x) (Cvt32Fto32U x) -> (I64TruncSatF32U x) (Cvt32Fto64U x) -> (I64TruncSatF32U x) (Cvt64Fto32U x) -> (I64TruncSatF64U x) (Cvt64Fto64U x) -> (I64TruncSatF64U x) (Cvt32Fto64F x) -> (F64PromoteF32 x) (Cvt64Fto32F x) -> (F32DemoteF64 x) (Round32F x) -> x (Round64F x) -> x // Lowering shifts // Unsigned shifts need to return 0 if shift amount is >= width of shifted value. (Lsh64x64 x y) && shiftIsBounded(v) -> (I64Shl x y) (Lsh64x64 x (I64Const [c])) && uint64(c) < 64 -> (I64Shl x (I64Const [c])) (Lsh64x64 x (I64Const [c])) && uint64(c) >= 64 -> (I64Const [0]) (Lsh64x64 x y) -> (Select (I64Shl x y) (I64Const [0]) (I64LtU y (I64Const [64]))) (Lsh64x(32|16|8) x y) -> (Lsh64x64 x (ZeroExt(32|16|8)to64 y)) (Lsh32x64 x y) -> (Lsh64x64 x y) (Lsh32x(32|16|8) x y) -> (Lsh64x64 x (ZeroExt(32|16|8)to64 y)) (Lsh16x64 x y) -> (Lsh64x64 x y) (Lsh16x(32|16|8) x y) -> (Lsh64x64 x (ZeroExt(32|16|8)to64 y)) (Lsh8x64 x y) -> (Lsh64x64 x y) (Lsh8x(32|16|8) x y) -> (Lsh64x64 x (ZeroExt(32|16|8)to64 y)) (Rsh64Ux64 x y) && shiftIsBounded(v) -> (I64ShrU x y) (Rsh64Ux64 x (I64Const [c])) && uint64(c) < 64 -> (I64ShrU x (I64Const [c])) (Rsh64Ux64 x (I64Const [c])) && uint64(c) >= 64 -> (I64Const [0]) (Rsh64Ux64 x y) -> (Select (I64ShrU x y) (I64Const [0]) (I64LtU y (I64Const [64]))) (Rsh64Ux(32|16|8) x y) -> (Rsh64Ux64 x (ZeroExt(32|16|8)to64 y)) (Rsh32Ux64 x y) -> (Rsh64Ux64 (ZeroExt32to64 x) y) (Rsh32Ux(32|16|8) x y) -> (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt(32|16|8)to64 y)) (Rsh16Ux64 x y) -> (Rsh64Ux64 (ZeroExt16to64 x) y) (Rsh16Ux(32|16|8) x y) -> (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt(32|16|8)to64 y)) (Rsh8Ux64 x y) -> (Rsh64Ux64 (ZeroExt8to64 x) y) (Rsh8Ux(32|16|8) x y) -> (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt(32|16|8)to64 y)) // Signed right shift needs to return 0/-1 if shift amount is >= width of shifted value. // We implement this by setting the shift value to (width - 1) if the shift value is >= width. (Rsh64x64 x y) && shiftIsBounded(v) -> (I64ShrS x y) (Rsh64x64 x (I64Const [c])) && uint64(c) < 64 -> (I64ShrS x (I64Const [c])) (Rsh64x64 x (I64Const [c])) && uint64(c) >= 64 -> (I64ShrS x (I64Const [63])) (Rsh64x64 x y) -> (I64ShrS x (Select <typ.Int64> y (I64Const [63]) (I64LtU y (I64Const [64])))) (Rsh64x(32|16|8) x y) -> (Rsh64x64 x (ZeroExt(32|16|8)to64 y)) (Rsh32x64 x y) -> (Rsh64x64 (SignExt32to64 x) y) (Rsh32x(32|16|8) x y) -> (Rsh64x64 (SignExt32to64 x) (ZeroExt(32|16|8)to64 y)) (Rsh16x64 x y) -> (Rsh64x64 (SignExt16to64 x) y) (Rsh16x(32|16|8) x y) -> (Rsh64x64 (SignExt16to64 x) (ZeroExt(32|16|8)to64 y)) (Rsh8x64 x y) -> (Rsh64x64 (SignExt8to64 x) y) (Rsh8x(32|16|8) x y) -> (Rsh64x64 (SignExt8to64 x) (ZeroExt(32|16|8)to64 y)) // Lowering rotates (RotateLeft8 <t> x (I64Const [c])) -> (Or8 (Lsh8x64 <t> x (I64Const [c&7])) (Rsh8Ux64 <t> x (I64Const [-c&7]))) (RotateLeft16 <t> x (I64Const [c])) -> (Or16 (Lsh16x64 <t> x (I64Const [c&15])) (Rsh16Ux64 <t> x (I64Const [-c&15]))) (RotateLeft32 x y) -> (I32Rotl x y) (RotateLeft64 x y) -> (I64Rotl x y) // Lowering comparisons (Less64 x y) -> (I64LtS x y) (Less32 x y) -> (I64LtS (SignExt32to64 x) (SignExt32to64 y)) (Less16 x y) -> (I64LtS (SignExt16to64 x) (SignExt16to64 y)) (Less8 x y) -> (I64LtS (SignExt8to64 x) (SignExt8to64 y)) (Less64U x y) -> (I64LtU x y) (Less32U x y) -> (I64LtU (ZeroExt32to64 x) (ZeroExt32to64 y)) (Less16U x y) -> (I64LtU (ZeroExt16to64 x) (ZeroExt16to64 y)) (Less8U x y) -> (I64LtU (ZeroExt8to64 x) (ZeroExt8to64 y)) (Less(64|32)F x y) -> (F(64|32)Lt x y) (Leq64 x y) -> (I64LeS x y) (Leq32 x y) -> (I64LeS (SignExt32to64 x) (SignExt32to64 y)) (Leq16 x y) -> (I64LeS (SignExt16to64 x) (SignExt16to64 y)) (Leq8 x y) -> (I64LeS (SignExt8to64 x) (SignExt8to64 y)) (Leq64U x y) -> (I64LeU x y) (Leq32U x y) -> (I64LeU (ZeroExt32to64 x) (ZeroExt32to64 y)) (Leq16U x y) -> (I64LeU (ZeroExt16to64 x) (ZeroExt16to64 y)) (Leq8U x y) -> (I64LeU (ZeroExt8to64 x) (ZeroExt8to64 y)) (Leq(64|32)F x y) -> (F(64|32)Le x y) (Greater64 x y) -> (I64GtS x y) (Greater32 x y) -> (I64GtS (SignExt32to64 x) (SignExt32to64 y)) (Greater16 x y) -> (I64GtS (SignExt16to64 x) (SignExt16to64 y)) (Greater8 x y) -> (I64GtS (SignExt8to64 x) (SignExt8to64 y)) (Greater64U x y) -> (I64GtU x y) (Greater32U x y) -> (I64GtU (ZeroExt32to64 x) (ZeroExt32to64 y)) (Greater16U x y) -> (I64GtU (ZeroExt16to64 x) (ZeroExt16to64 y)) (Greater8U x y) -> (I64GtU (ZeroExt8to64 x) (ZeroExt8to64 y)) (Greater(64|32)F x y) -> (F(64|32)Gt x y) (Geq64 x y) -> (I64GeS x y) (Geq32 x y) -> (I64GeS (SignExt32to64 x) (SignExt32to64 y)) (Geq16 x y) -> (I64GeS (SignExt16to64 x) (SignExt16to64 y)) (Geq8 x y) -> (I64GeS (SignExt8to64 x) (SignExt8to64 y)) (Geq64U x y) -> (I64GeU x y) (Geq32U x y) -> (I64GeU (ZeroExt32to64 x) (ZeroExt32to64 y)) (Geq16U x y) -> (I64GeU (ZeroExt16to64 x) (ZeroExt16to64 y)) (Geq8U x y) -> (I64GeU (ZeroExt8to64 x) (ZeroExt8to64 y)) (Geq(64|32)F x y) -> (F(64|32)Ge x y) (Eq64 x y) -> (I64Eq x y) (Eq32 x y) -> (I64Eq (ZeroExt32to64 x) (ZeroExt32to64 y)) (Eq16 x y) -> (I64Eq (ZeroExt16to64 x) (ZeroExt16to64 y)) (Eq8 x y) -> (I64Eq (ZeroExt8to64 x) (ZeroExt8to64 y)) (EqB x y) -> (I64Eq x y) (EqPtr x y) -> (I64Eq x y) (Eq(64|32)F x y) -> (F(64|32)Eq x y) (Neq64 x y) -> (I64Ne x y) (Neq32 x y) -> (I64Ne (ZeroExt32to64 x) (ZeroExt32to64 y)) (Neq16 x y) -> (I64Ne (ZeroExt16to64 x) (ZeroExt16to64 y)) (Neq8 x y) -> (I64Ne (ZeroExt8to64 x) (ZeroExt8to64 y)) (NeqB x y) -> (I64Ne x y) (NeqPtr x y) -> (I64Ne x y) (Neq(64|32)F x y) -> (F(64|32)Ne x y) // Lowering loads (Load <t> ptr mem) && is32BitFloat(t) -> (F32Load ptr mem) (Load <t> ptr mem) && is64BitFloat(t) -> (F64Load ptr mem) (Load <t> ptr mem) && t.Size() == 8 -> (I64Load ptr mem) (Load <t> ptr mem) && t.Size() == 4 && !t.IsSigned() -> (I64Load32U ptr mem) (Load <t> ptr mem) && t.Size() == 4 && t.IsSigned() -> (I64Load32S ptr mem) (Load <t> ptr mem) && t.Size() == 2 && !t.IsSigned() -> (I64Load16U ptr mem) (Load <t> ptr mem) && t.Size() == 2 && t.IsSigned() -> (I64Load16S ptr mem) (Load <t> ptr mem) && t.Size() == 1 && !t.IsSigned() -> (I64Load8U ptr mem) (Load <t> ptr mem) && t.Size() == 1 && t.IsSigned() -> (I64Load8S ptr mem) // Lowering stores (Store {t} ptr val mem) && is64BitFloat(t.(*types.Type)) -> (F64Store ptr val mem) (Store {t} ptr val mem) && is32BitFloat(t.(*types.Type)) -> (F32Store ptr val mem) (Store {t} ptr val mem) && t.(*types.Type).Size() == 8 -> (I64Store ptr val mem) (Store {t} ptr val mem) && t.(*types.Type).Size() == 4 -> (I64Store32 ptr val mem) (Store {t} ptr val mem) && t.(*types.Type).Size() == 2 -> (I64Store16 ptr val mem) (Store {t} ptr val mem) && t.(*types.Type).Size() == 1 -> (I64Store8 ptr val mem) // Lowering moves (Move [0] _ _ mem) -> mem (Move [1] dst src mem) -> (I64Store8 dst (I64Load8U src mem) mem) (Move [2] dst src mem) -> (I64Store16 dst (I64Load16U src mem) mem) (Move [4] dst src mem) -> (I64Store32 dst (I64Load32U src mem) mem) (Move [8] dst src mem) -> (I64Store dst (I64Load src mem) mem) (Move [16] dst src mem) -> (I64Store [8] dst (I64Load [8] src mem) (I64Store dst (I64Load src mem) mem)) (Move [3] dst src mem) -> (I64Store8 [2] dst (I64Load8U [2] src mem) (I64Store16 dst (I64Load16U src mem) mem)) (Move [5] dst src mem) -> (I64Store8 [4] dst (I64Load8U [4] src mem) (I64Store32 dst (I64Load32U src mem) mem)) (Move [6] dst src mem) -> (I64Store16 [4] dst (I64Load16U [4] src mem) (I64Store32 dst (I64Load32U src mem) mem)) (Move [7] dst src mem) -> (I64Store32 [3] dst (I64Load32U [3] src mem) (I64Store32 dst (I64Load32U src mem) mem)) (Move [s] dst src mem) && s > 8 && s < 16 -> (I64Store [s-8] dst (I64Load [s-8] src mem) (I64Store dst (I64Load src mem) mem)) // Adjust moves to be a multiple of 16 bytes. (Move [s] dst src mem) && s > 16 && s%16 != 0 && s%16 <= 8 -> (Move [s-s%16] (OffPtr <dst.Type> dst [s%16]) (OffPtr <src.Type> src [s%16]) (I64Store dst (I64Load src mem) mem)) (Move [s] dst src mem) && s > 16 && s%16 != 0 && s%16 > 8 -> (Move [s-s%16] (OffPtr <dst.Type> dst [s%16]) (OffPtr <src.Type> src [s%16]) (I64Store [8] dst (I64Load [8] src mem) (I64Store dst (I64Load src mem) mem))) // Large copying uses helper. (Move [s] dst src mem) && s%8 == 0 -> (LoweredMove [s/8] dst src mem) // Lowering Zero instructions (Zero [0] _ mem) -> mem (Zero [1] destptr mem) -> (I64Store8 destptr (I64Const [0]) mem) (Zero [2] destptr mem) -> (I64Store16 destptr (I64Const [0]) mem) (Zero [4] destptr mem) -> (I64Store32 destptr (I64Const [0]) mem) (Zero [8] destptr mem) -> (I64Store destptr (I64Const [0]) mem) (Zero [3] destptr mem) -> (I64Store8 [2] destptr (I64Const [0]) (I64Store16 destptr (I64Const [0]) mem)) (Zero [5] destptr mem) -> (I64Store8 [4] destptr (I64Const [0]) (I64Store32 destptr (I64Const [0]) mem)) (Zero [6] destptr mem) -> (I64Store16 [4] destptr (I64Const [0]) (I64Store32 destptr (I64Const [0]) mem)) (Zero [7] destptr mem) -> (I64Store32 [3] destptr (I64Const [0]) (I64Store32 destptr (I64Const [0]) mem)) // Strip off any fractional word zeroing. (Zero [s] destptr mem) && s%8 != 0 && s > 8 -> (Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8]) (I64Store destptr (I64Const [0]) mem)) // Zero small numbers of words directly. (Zero [16] destptr mem) -> (I64Store [8] destptr (I64Const [0]) (I64Store destptr (I64Const [0]) mem)) (Zero [24] destptr mem) -> (I64Store [16] destptr (I64Const [0]) (I64Store [8] destptr (I64Const [0]) (I64Store destptr (I64Const [0]) mem))) (Zero [32] destptr mem) -> (I64Store [24] destptr (I64Const [0]) (I64Store [16] destptr (I64Const [0]) (I64Store [8] destptr (I64Const [0]) (I64Store destptr (I64Const [0]) mem)))) // Large zeroing uses helper. (Zero [s] destptr mem) && s%8 == 0 && s > 32 -> (LoweredZero [s/8] destptr mem) // Lowering constants (Const(64|32|16|8) [val]) -> (I64Const [val]) (Const(64|32)F [val]) -> (F(64|32)Const [val]) (ConstNil) -> (I64Const [0]) (ConstBool [b]) -> (I64Const [b]) // Lowering calls (StaticCall [argwid] {target} mem) -> (LoweredStaticCall [argwid] {target} mem) (ClosureCall [argwid] entry closure mem) -> (LoweredClosureCall [argwid] entry closure mem) (InterCall [argwid] entry mem) -> (LoweredInterCall [argwid] entry mem) // Miscellaneous (Convert <t> x mem) -> (LoweredConvert <t> x mem) (IsNonNil p) -> (I64Eqz (I64Eqz p)) (IsInBounds idx len) -> (I64LtU idx len) (IsSliceInBounds idx len) -> (I64LeU idx len) (NilCheck ptr mem) -> (LoweredNilCheck ptr mem) (GetClosurePtr) -> (LoweredGetClosurePtr) (GetCallerPC) -> (LoweredGetCallerPC) (GetCallerSP) -> (LoweredGetCallerSP) (Addr {sym} base) -> (LoweredAddr {sym} base) (LocalAddr {sym} base _) -> (LoweredAddr {sym} base) // Write barrier. (WB {fn} destptr srcptr mem) -> (LoweredWB {fn} destptr srcptr mem) // --- Intrinsics --- (Sqrt x) -> (F64Sqrt x) (Trunc x) -> (F64Trunc x) (Ceil x) -> (F64Ceil x) (Floor x) -> (F64Floor x) (RoundToEven x) -> (F64Nearest x) (Abs x) -> (F64Abs x) (Copysign x y) -> (F64Copysign x y) (Ctz64 x) -> (I64Ctz x) (Ctz32 x) -> (I64Ctz (I64Or x (I64Const [0x100000000]))) (Ctz16 x) -> (I64Ctz (I64Or x (I64Const [0x10000]))) (Ctz8 x) -> (I64Ctz (I64Or x (I64Const [0x100]))) (Ctz(64|32|16|8)NonZero x) -> (I64Ctz x) (BitLen64 x) -> (I64Sub (I64Const [64]) (I64Clz x)) (PopCount64 x) -> (I64Popcnt x) (PopCount32 x) -> (I64Popcnt (ZeroExt32to64 x)) (PopCount16 x) -> (I64Popcnt (ZeroExt16to64 x)) (PopCount8 x) -> (I64Popcnt (ZeroExt8to64 x)) (CondSelect <t> x y cond) -> (Select <t> x y cond) // --- Optimizations --- (I64Add (I64Const [x]) (I64Const [y])) -> (I64Const [x + y]) (I64Mul (I64Const [x]) (I64Const [y])) -> (I64Const [x * y]) (I64And (I64Const [x]) (I64Const [y])) -> (I64Const [x & y]) (I64Or (I64Const [x]) (I64Const [y])) -> (I64Const [x | y]) (I64Xor (I64Const [x]) (I64Const [y])) -> (I64Const [x ^ y]) (F64Add (F64Const [x]) (F64Const [y])) -> (F64Const [auxFrom64F(auxTo64F(x) + auxTo64F(y))]) (F64Mul (F64Const [x]) (F64Const [y])) -> (F64Const [auxFrom64F(auxTo64F(x) * auxTo64F(y))]) (I64Eq (I64Const [x]) (I64Const [y])) && x == y -> (I64Const [1]) (I64Eq (I64Const [x]) (I64Const [y])) && x != y -> (I64Const [0]) (I64Ne (I64Const [x]) (I64Const [y])) && x == y -> (I64Const [0]) (I64Ne (I64Const [x]) (I64Const [y])) && x != y -> (I64Const [1]) (I64Shl (I64Const [x]) (I64Const [y])) -> (I64Const [x << uint64(y)]) (I64ShrU (I64Const [x]) (I64Const [y])) -> (I64Const [int64(uint64(x) >> uint64(y))]) (I64ShrS (I64Const [x]) (I64Const [y])) -> (I64Const [x >> uint64(y)]) (I64Add (I64Const [x]) y) -> (I64Add y (I64Const [x])) (I64Mul (I64Const [x]) y) -> (I64Mul y (I64Const [x])) (I64And (I64Const [x]) y) -> (I64And y (I64Const [x])) (I64Or (I64Const [x]) y) -> (I64Or y (I64Const [x])) (I64Xor (I64Const [x]) y) -> (I64Xor y (I64Const [x])) (F64Add (F64Const [x]) y) -> (F64Add y (F64Const [x])) (F64Mul (F64Const [x]) y) -> (F64Mul y (F64Const [x])) (I64Eq (I64Const [x]) y) -> (I64Eq y (I64Const [x])) (I64Ne (I64Const [x]) y) -> (I64Ne y (I64Const [x])) (I64Eq x (I64Const [0])) -> (I64Eqz x) (I64Ne x (I64Const [0])) -> (I64Eqz (I64Eqz x)) (I64Add x (I64Const [y])) -> (I64AddConst [y] x) (I64AddConst [0] x) -> x (I64Eqz (I64Eqz (I64Eqz x))) -> (I64Eqz x) // folding offset into load/store ((I64Load|I64Load32U|I64Load32S|I64Load16U|I64Load16S|I64Load8U|I64Load8S) [off] (I64AddConst [off2] ptr) mem) && isU32Bit(off+off2) -> ((I64Load|I64Load32U|I64Load32S|I64Load16U|I64Load16S|I64Load8U|I64Load8S) [off+off2] ptr mem) ((I64Store|I64Store32|I64Store16|I64Store8) [off] (I64AddConst [off2] ptr) val mem) && isU32Bit(off+off2) -> ((I64Store|I64Store32|I64Store16|I64Store8) [off+off2] ptr val mem) // folding offset into address (I64AddConst [off] (LoweredAddr {sym} [off2] base)) && isU32Bit(off+off2) -> (LoweredAddr {sym} [off+off2] base) // transforming readonly globals into constants (I64Load [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+off2) -> (I64Const [int64(read64(sym, off+off2, config.BigEndian))]) (I64Load32U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+off2) -> (I64Const [int64(read32(sym, off+off2, config.BigEndian))]) (I64Load16U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+off2) -> (I64Const [int64(read16(sym, off+off2, config.BigEndian))]) (I64Load8U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+off2) -> (I64Const [int64(read8(sym, off+off2))])
Close