diff --git a/Cargo.lock b/Cargo.lock index 29258cbf34d5..be667f8883d8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3005,9 +3005,9 @@ dependencies = [ [[package]] name = "regalloc2" -version = "0.13.4" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "919cc500365f55230d2e2230cb813dd7c6fa5f907ad332d83ad3b862112cb69e" +checksum = "d56c86aa200e0d597422a1099f82b38577ec52c660f32d3af3a9c4fd4f2891f2" dependencies = [ "allocator-api2", "bumpalo", diff --git a/Cargo.toml b/Cargo.toml index 0607bb9f3ebe..30c846c9a860 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -322,7 +322,7 @@ component-async-tests = { path = "crates/misc/component-async-tests" } # Bytecode Alliance maintained dependencies: # --------------------------- -regalloc2 = "0.13.4" +regalloc2 = "0.14.0" wasip1 = { version = "1.0.0", default-features = false } # cap-std family: diff --git a/cranelift/codegen/src/isa/aarch64/abi.rs b/cranelift/codegen/src/isa/aarch64/abi.rs index 8f1afce63a87..6bab37ac03df 100644 --- a/cranelift/codegen/src/isa/aarch64/abi.rs +++ b/cranelift/codegen/src/isa/aarch64/abi.rs @@ -17,7 +17,6 @@ use alloc::boxed::Box; use alloc::vec::Vec; use regalloc2::{MachineEnv, PReg, PRegSet}; use smallvec::{SmallVec, smallvec}; -use std::sync::OnceLock; // We use a generic implementation that factors out AArch64 and x64 ABI commonalities, because // these ABIs are very similar. @@ -1089,11 +1088,11 @@ impl ABIMachineSpec for AArch64MachineDeps { fn get_machine_env(flags: &settings::Flags, _call_conv: isa::CallConv) -> &MachineEnv { if flags.enable_pinned_reg() { - static MACHINE_ENV: OnceLock = OnceLock::new(); - MACHINE_ENV.get_or_init(|| create_reg_env(true)) + static MACHINE_ENV: MachineEnv = create_reg_env(true); + &MACHINE_ENV } else { - static MACHINE_ENV: OnceLock = OnceLock::new(); - MACHINE_ENV.get_or_init(|| create_reg_env(false)) + static MACHINE_ENV: MachineEnv = create_reg_env(false); + &MACHINE_ENV } } @@ -1534,100 +1533,96 @@ const WINCH_CLOBBERS: PRegSet = winch_clobbers(); const ALL_CLOBBERS: PRegSet = all_clobbers(); const NO_CLOBBERS: PRegSet = PRegSet::empty(); -fn create_reg_env(enable_pinned_reg: bool) -> MachineEnv { - fn preg(r: Reg) -> PReg { - r.to_real_reg().unwrap().into() +const fn create_reg_env(enable_pinned_reg: bool) -> MachineEnv { + const fn preg(r: Reg) -> PReg { + r.to_real_reg().unwrap().preg() } let mut env = MachineEnv { preferred_regs_by_class: [ - vec![ - preg(xreg(0)), - preg(xreg(1)), - preg(xreg(2)), - preg(xreg(3)), - preg(xreg(4)), - preg(xreg(5)), - preg(xreg(6)), - preg(xreg(7)), - preg(xreg(8)), - preg(xreg(9)), - preg(xreg(10)), - preg(xreg(11)), - preg(xreg(12)), - preg(xreg(13)), - preg(xreg(14)), - preg(xreg(15)), - // x16 and x17 are spilltmp and tmp2 (see above). - // x18 could be used by the platform to carry inter-procedural state; - // conservatively assume so and make it not allocatable. - // x19-28 are callee-saved and so not preferred. - // x21 is the pinned register (if enabled) and not allocatable if so. - // x29 is FP, x30 is LR, x31 is SP/ZR. - ], - vec![ - preg(vreg(0)), - preg(vreg(1)), - preg(vreg(2)), - preg(vreg(3)), - preg(vreg(4)), - preg(vreg(5)), - preg(vreg(6)), - preg(vreg(7)), + PRegSet::empty() + .with(preg(xreg(0))) + .with(preg(xreg(1))) + .with(preg(xreg(2))) + .with(preg(xreg(3))) + .with(preg(xreg(4))) + .with(preg(xreg(5))) + .with(preg(xreg(6))) + .with(preg(xreg(7))) + .with(preg(xreg(8))) + .with(preg(xreg(9))) + .with(preg(xreg(10))) + .with(preg(xreg(11))) + .with(preg(xreg(12))) + .with(preg(xreg(13))) + .with(preg(xreg(14))) + .with(preg(xreg(15))), + // x16 and x17 are spilltmp and tmp2 (see above). + // x18 could be used by the platform to carry inter-procedural state; + // conservatively assume so and make it not allocatable. + // x19-28 are callee-saved and so not preferred. + // x21 is the pinned register (if enabled) and not allocatable if so. + // x29 is FP, x30 is LR, x31 is SP/ZR. + PRegSet::empty() + .with(preg(vreg(0))) + .with(preg(vreg(1))) + .with(preg(vreg(2))) + .with(preg(vreg(3))) + .with(preg(vreg(4))) + .with(preg(vreg(5))) + .with(preg(vreg(6))) + .with(preg(vreg(7))) // v8-15 are callee-saved and so not preferred. - preg(vreg(16)), - preg(vreg(17)), - preg(vreg(18)), - preg(vreg(19)), - preg(vreg(20)), - preg(vreg(21)), - preg(vreg(22)), - preg(vreg(23)), - preg(vreg(24)), - preg(vreg(25)), - preg(vreg(26)), - preg(vreg(27)), - preg(vreg(28)), - preg(vreg(29)), - preg(vreg(30)), - preg(vreg(31)), - ], + .with(preg(vreg(16))) + .with(preg(vreg(17))) + .with(preg(vreg(18))) + .with(preg(vreg(19))) + .with(preg(vreg(20))) + .with(preg(vreg(21))) + .with(preg(vreg(22))) + .with(preg(vreg(23))) + .with(preg(vreg(24))) + .with(preg(vreg(25))) + .with(preg(vreg(26))) + .with(preg(vreg(27))) + .with(preg(vreg(28))) + .with(preg(vreg(29))) + .with(preg(vreg(30))) + .with(preg(vreg(31))), // Vector Regclass is unused - vec![], + PRegSet::empty(), ], non_preferred_regs_by_class: [ - vec![ - preg(xreg(19)), - preg(xreg(20)), + PRegSet::empty() + .with(preg(xreg(19))) + .with(preg(xreg(20))) // x21 is pinned reg if enabled; we add to this list below if not. - preg(xreg(22)), - preg(xreg(23)), - preg(xreg(24)), - preg(xreg(25)), - preg(xreg(26)), - preg(xreg(27)), - preg(xreg(28)), - ], - vec![ - preg(vreg(8)), - preg(vreg(9)), - preg(vreg(10)), - preg(vreg(11)), - preg(vreg(12)), - preg(vreg(13)), - preg(vreg(14)), - preg(vreg(15)), - ], + .with(preg(xreg(22))) + .with(preg(xreg(23))) + .with(preg(xreg(24))) + .with(preg(xreg(25))) + .with(preg(xreg(26))) + .with(preg(xreg(27))) + .with(preg(xreg(28))), + PRegSet::empty() + .with(preg(vreg(8))) + .with(preg(vreg(9))) + .with(preg(vreg(10))) + .with(preg(vreg(11))) + .with(preg(vreg(12))) + .with(preg(vreg(13))) + .with(preg(vreg(14))) + .with(preg(vreg(15))), // Vector Regclass is unused - vec![], + PRegSet::empty(), ], fixed_stack_slots: vec![], scratch_by_class: [None, None, None], }; if !enable_pinned_reg { - debug_assert_eq!(PINNED_REG, 21); // We assumed this above in hardcoded reg list. - env.non_preferred_regs_by_class[0].push(preg(xreg(PINNED_REG))); + debug_assert!(PINNED_REG == 21); + env.non_preferred_regs_by_class[0].add(preg(xreg(PINNED_REG))); } env diff --git a/cranelift/codegen/src/isa/aarch64/inst/regs.rs b/cranelift/codegen/src/isa/aarch64/inst/regs.rs index 7db69df4ebe4..7dc0c91f3319 100644 --- a/cranelift/codegen/src/isa/aarch64/inst/regs.rs +++ b/cranelift/codegen/src/isa/aarch64/inst/regs.rs @@ -36,8 +36,8 @@ pub fn writable_xreg(num: u8) -> Writable { } /// Get a reference to a V-register (vector/FP register). -pub fn vreg(num: u8) -> Reg { - Reg::from(vreg_preg(num)) +pub const fn vreg(num: u8) -> Reg { + Reg::from_real_reg(vreg_preg(num)) } /// Get the given V-register as a PReg. diff --git a/cranelift/codegen/src/isa/pulley_shared/abi.rs b/cranelift/codegen/src/isa/pulley_shared/abi.rs index f4907beccde5..7290d6eaf01a 100644 --- a/cranelift/codegen/src/isa/pulley_shared/abi.rs +++ b/cranelift/codegen/src/isa/pulley_shared/abi.rs @@ -13,9 +13,8 @@ use alloc::borrow::ToOwned; use alloc::vec::Vec; use core::marker::PhantomData; use cranelift_bitset::ScalarBitSet; -use regalloc2::{MachineEnv, PReg, PRegSet}; +use regalloc2::{MachineEnv, PRegSet}; use smallvec::{SmallVec, smallvec}; -use std::sync::OnceLock; /// Support for the Pulley ABI from the callee side (within a function body). pub(crate) type PulleyCallee

= Callee>; @@ -481,8 +480,8 @@ where } fn get_machine_env(_flags: &settings::Flags, _call_conv: isa::CallConv) -> &MachineEnv { - static MACHINE_ENV: OnceLock = OnceLock::new(); - MACHINE_ENV.get_or_init(create_reg_environment) + static MACHINE_ENV: MachineEnv = create_reg_environment(); + &MACHINE_ENV } fn get_regs_clobbered_by_call( @@ -972,26 +971,118 @@ const ALL_CLOBBERS: PRegSet = PRegSet::empty() const NO_CLOBBERS: PRegSet = PRegSet::empty(); -fn create_reg_environment() -> MachineEnv { +const fn create_reg_environment() -> MachineEnv { // Prefer caller-saved registers over callee-saved registers, because that // way we don't need to emit code to save and restore them if we don't // mutate them. - let preferred_regs_by_class: [Vec; 3] = { - let x_registers: Vec = (0..16).map(|x| px_reg(x)).collect(); - let f_registers: Vec = (0..32).map(|x| pf_reg(x)).collect(); - let v_registers: Vec = (0..32).map(|x| pv_reg(x)).collect(); - [x_registers, f_registers, v_registers] - }; - - let non_preferred_regs_by_class: [Vec; 3] = { - let x_registers: Vec = (16..XReg::SPECIAL_START) - .map(|x| px_reg(x.into())) - .collect(); - let f_registers: Vec = vec![]; - let v_registers: Vec = vec![]; - [x_registers, f_registers, v_registers] - }; + let preferred_regs_by_class: [PRegSet; 3] = [ + PRegSet::empty() + .with(px_reg(0)) + .with(px_reg(1)) + .with(px_reg(2)) + .with(px_reg(3)) + .with(px_reg(4)) + .with(px_reg(5)) + .with(px_reg(6)) + .with(px_reg(7)) + .with(px_reg(8)) + .with(px_reg(9)) + .with(px_reg(10)) + .with(px_reg(11)) + .with(px_reg(12)) + .with(px_reg(13)) + .with(px_reg(14)) + .with(px_reg(15)), + PRegSet::empty() + .with(pf_reg(0)) + .with(pf_reg(1)) + .with(pf_reg(2)) + .with(pf_reg(3)) + .with(pf_reg(4)) + .with(pf_reg(5)) + .with(pf_reg(6)) + .with(pf_reg(7)) + .with(pf_reg(8)) + .with(pf_reg(9)) + .with(pf_reg(10)) + .with(pf_reg(11)) + .with(pf_reg(12)) + .with(pf_reg(13)) + .with(pf_reg(14)) + .with(pf_reg(15)) + .with(pf_reg(16)) + .with(pf_reg(17)) + .with(pf_reg(18)) + .with(pf_reg(19)) + .with(pf_reg(20)) + .with(pf_reg(21)) + .with(pf_reg(22)) + .with(pf_reg(23)) + .with(pf_reg(24)) + .with(pf_reg(25)) + .with(pf_reg(26)) + .with(pf_reg(27)) + .with(pf_reg(28)) + .with(pf_reg(29)) + .with(pf_reg(30)) + .with(pf_reg(31)), + PRegSet::empty() + .with(pv_reg(0)) + .with(pv_reg(1)) + .with(pv_reg(2)) + .with(pv_reg(3)) + .with(pv_reg(4)) + .with(pv_reg(5)) + .with(pv_reg(6)) + .with(pv_reg(7)) + .with(pv_reg(8)) + .with(pv_reg(9)) + .with(pv_reg(10)) + .with(pv_reg(11)) + .with(pv_reg(12)) + .with(pv_reg(13)) + .with(pv_reg(14)) + .with(pv_reg(15)) + .with(pv_reg(16)) + .with(pv_reg(17)) + .with(pv_reg(18)) + .with(pv_reg(19)) + .with(pv_reg(20)) + .with(pv_reg(21)) + .with(pv_reg(22)) + .with(pv_reg(23)) + .with(pv_reg(24)) + .with(pv_reg(25)) + .with(pv_reg(26)) + .with(pv_reg(27)) + .with(pv_reg(28)) + .with(pv_reg(29)) + .with(pv_reg(30)) + .with(pv_reg(31)), + ]; + + let non_preferred_regs_by_class: [PRegSet; 3] = [ + PRegSet::empty() + .with(px_reg(16)) + .with(px_reg(17)) + .with(px_reg(18)) + .with(px_reg(19)) + .with(px_reg(20)) + .with(px_reg(21)) + .with(px_reg(22)) + .with(px_reg(23)) + .with(px_reg(24)) + .with(px_reg(25)) + .with(px_reg(26)) + .with(px_reg(27)) + .with(px_reg(28)) + .with(px_reg(29)), + PRegSet::empty(), + PRegSet::empty(), + ]; + + debug_assert!(XReg::SPECIAL_START == 30); MachineEnv { preferred_regs_by_class, diff --git a/cranelift/codegen/src/isa/riscv64/abi.rs b/cranelift/codegen/src/isa/riscv64/abi.rs index f53eee9a53a0..d341287434a3 100644 --- a/cranelift/codegen/src/isa/riscv64/abi.rs +++ b/cranelift/codegen/src/isa/riscv64/abi.rs @@ -17,11 +17,10 @@ use crate::isa::unwind::UnwindInst; use crate::settings; use alloc::boxed::Box; use alloc::vec::Vec; -use regalloc2::{MachineEnv, PReg, PRegSet}; +use regalloc2::{MachineEnv, PRegSet}; use alloc::borrow::ToOwned; use smallvec::{SmallVec, smallvec}; -use std::sync::OnceLock; /// Support for the Riscv64 ABI from the callee side (within a function body). pub(crate) type Riscv64Callee = Callee; @@ -613,8 +612,8 @@ impl ABIMachineSpec for Riscv64MachineDeps { } fn get_machine_env(_flags: &settings::Flags, _call_conv: isa::CallConv) -> &MachineEnv { - static MACHINE_ENV: OnceLock = OnceLock::new(); - MACHINE_ENV.get_or_init(create_reg_environment) + static MACHINE_ENV: MachineEnv = create_reg_environment(); + &MACHINE_ENV } fn get_regs_clobbered_by_call( @@ -956,7 +955,7 @@ const ALL_CLOBBERS: PRegSet = PRegSet::empty() const NO_CLOBBERS: PRegSet = PRegSet::empty(); -fn create_reg_environment() -> MachineEnv { +const fn create_reg_environment() -> MachineEnv { // Some C Extension instructions can only use a subset of the registers. // x8 - x15, f8 - f15, v8 - v15 so we should prefer to use those since // they allow us to emit C instructions more often. @@ -967,45 +966,114 @@ fn create_reg_environment() -> MachineEnv { // 3. Compressible Callee Saved registers. // 4. Non-Compressible Callee Saved registers. - let preferred_regs_by_class: [Vec; 3] = { - let x_registers: Vec = (10..=15).map(px_reg).collect(); - let f_registers: Vec = (10..=15).map(pf_reg).collect(); - let v_registers: Vec = (8..=15).map(pv_reg).collect(); - - [x_registers, f_registers, v_registers] - }; - - let non_preferred_regs_by_class: [Vec; 3] = { + let preferred_regs_by_class: [PRegSet; 3] = [ + PRegSet::empty() + .with(px_reg(10)) + .with(px_reg(11)) + .with(px_reg(12)) + .with(px_reg(13)) + .with(px_reg(14)) + .with(px_reg(15)), + PRegSet::empty() + .with(pf_reg(10)) + .with(pf_reg(11)) + .with(pf_reg(12)) + .with(pf_reg(13)) + .with(pf_reg(14)) + .with(pf_reg(15)), + PRegSet::empty() + .with(pv_reg(8)) + .with(pv_reg(9)) + .with(pv_reg(10)) + .with(pv_reg(11)) + .with(pv_reg(12)) + .with(pv_reg(13)) + .with(pv_reg(14)) + .with(pv_reg(15)), + ]; + + let non_preferred_regs_by_class: [PRegSet; 3] = [ // x0 - x4 are special registers, so we don't want to use them. // Omit x30 and x31 since they are the spilltmp registers. - - // Start with the Non-Compressible Caller Saved registers. - let x_registers: Vec = (5..=7) - .chain(16..=17) - .chain(28..=29) + PRegSet::empty() + .with(px_reg(5)) + .with(px_reg(6)) + .with(px_reg(7)) + // Start with the Non-Compressible Caller Saved registers. + .with(px_reg(16)) + .with(px_reg(17)) + .with(px_reg(28)) + .with(px_reg(29)) // The first Callee Saved register is x9 since its Compressible // Omit x8 since it's the frame pointer. - .chain(9..=9) + .with(px_reg(9)) // The rest of the Callee Saved registers are Non-Compressible - .chain(18..=27) - .map(px_reg) - .collect(); - + .with(px_reg(18)) + .with(px_reg(19)) + .with(px_reg(20)) + .with(px_reg(21)) + .with(px_reg(22)) + .with(px_reg(23)) + .with(px_reg(24)) + .with(px_reg(25)) + .with(px_reg(26)) + .with(px_reg(27)), // Prefer Caller Saved registers. - let f_registers: Vec = (0..=7) - .chain(16..=17) - .chain(28..=31) + PRegSet::empty() + .with(pf_reg(0)) + .with(pf_reg(1)) + .with(pf_reg(2)) + .with(pf_reg(3)) + .with(pf_reg(4)) + .with(pf_reg(5)) + .with(pf_reg(6)) + .with(pf_reg(7)) + .with(pf_reg(16)) + .with(pf_reg(17)) + .with(pf_reg(28)) + .with(pf_reg(29)) + .with(pf_reg(30)) + .with(pf_reg(31)) // Once those are exhausted, we should prefer f8 and f9 since they are // callee saved, but compressible. - .chain(8..=9) - .chain(18..=27) - .map(pf_reg) - .collect(); - - let v_registers = (0..=7).chain(16..=31).map(pv_reg).collect(); - - [x_registers, f_registers, v_registers] - }; + .with(pf_reg(8)) + .with(pf_reg(9)) + .with(pf_reg(18)) + .with(pf_reg(19)) + .with(pf_reg(20)) + .with(pf_reg(21)) + .with(pf_reg(22)) + .with(pf_reg(23)) + .with(pf_reg(24)) + .with(pf_reg(25)) + .with(pf_reg(26)) + .with(pf_reg(27)), + PRegSet::empty() + .with(pv_reg(0)) + .with(pv_reg(1)) + .with(pv_reg(2)) + .with(pv_reg(3)) + .with(pv_reg(4)) + .with(pv_reg(5)) + .with(pv_reg(6)) + .with(pv_reg(7)) + .with(pv_reg(16)) + .with(pv_reg(17)) + .with(pv_reg(18)) + .with(pv_reg(19)) + .with(pv_reg(20)) + .with(pv_reg(21)) + .with(pv_reg(22)) + .with(pv_reg(23)) + .with(pv_reg(24)) + .with(pv_reg(25)) + .with(pv_reg(26)) + .with(pv_reg(27)) + .with(pv_reg(28)) + .with(pv_reg(29)) + .with(pv_reg(30)) + .with(pv_reg(31)), + ]; MachineEnv { preferred_regs_by_class, diff --git a/cranelift/codegen/src/isa/s390x/abi.rs b/cranelift/codegen/src/isa/s390x/abi.rs index 738e037047d3..7b128e3ff1c0 100644 --- a/cranelift/codegen/src/isa/s390x/abi.rs +++ b/cranelift/codegen/src/isa/s390x/abi.rs @@ -148,7 +148,6 @@ use alloc::borrow::ToOwned; use alloc::vec::Vec; use regalloc2::{MachineEnv, PRegSet}; use smallvec::{SmallVec, smallvec}; -use std::sync::OnceLock; // We use a generic implementation that factors out ABI commonalities. @@ -907,12 +906,12 @@ impl ABIMachineSpec for S390xMachineDeps { fn get_machine_env(_flags: &settings::Flags, call_conv: isa::CallConv) -> &MachineEnv { match call_conv { isa::CallConv::Tail => { - static TAIL_MACHINE_ENV: OnceLock = OnceLock::new(); - TAIL_MACHINE_ENV.get_or_init(tail_create_machine_env) + static TAIL_MACHINE_ENV: MachineEnv = tail_create_machine_env(); + &TAIL_MACHINE_ENV } _ => { - static SYSV_MACHINE_ENV: OnceLock = OnceLock::new(); - SYSV_MACHINE_ENV.get_or_init(sysv_create_machine_env) + static SYSV_MACHINE_ENV: MachineEnv = sysv_create_machine_env(); + &SYSV_MACHINE_ENV } } } @@ -1485,143 +1484,135 @@ const ALL_CLOBBERS: PRegSet = all_clobbers(); const NO_CLOBBERS: PRegSet = PRegSet::empty(); -fn sysv_create_machine_env() -> MachineEnv { +const fn sysv_create_machine_env() -> MachineEnv { MachineEnv { preferred_regs_by_class: [ - vec![ + PRegSet::empty() // no r0; can't use for addressing? // no r1; it is our spilltmp. - gpr_preg(2), - gpr_preg(3), - gpr_preg(4), - gpr_preg(5), - ], - vec![ - vr_preg(0), - vr_preg(1), - vr_preg(2), - vr_preg(3), - vr_preg(4), - vr_preg(5), - vr_preg(6), - vr_preg(7), - vr_preg(16), - vr_preg(17), - vr_preg(18), - vr_preg(19), - vr_preg(20), - vr_preg(21), - vr_preg(22), - vr_preg(23), - vr_preg(24), - vr_preg(25), - vr_preg(26), - vr_preg(27), - vr_preg(28), - vr_preg(29), - vr_preg(30), - vr_preg(31), - ], + .with(gpr_preg(2)) + .with(gpr_preg(3)) + .with(gpr_preg(4)) + .with(gpr_preg(5)), + PRegSet::empty() + .with(vr_preg(0)) + .with(vr_preg(1)) + .with(vr_preg(2)) + .with(vr_preg(3)) + .with(vr_preg(4)) + .with(vr_preg(5)) + .with(vr_preg(6)) + .with(vr_preg(7)) + .with(vr_preg(16)) + .with(vr_preg(17)) + .with(vr_preg(18)) + .with(vr_preg(19)) + .with(vr_preg(20)) + .with(vr_preg(21)) + .with(vr_preg(22)) + .with(vr_preg(23)) + .with(vr_preg(24)) + .with(vr_preg(25)) + .with(vr_preg(26)) + .with(vr_preg(27)) + .with(vr_preg(28)) + .with(vr_preg(29)) + .with(vr_preg(30)) + .with(vr_preg(31)), // Vector Regclass is unused - vec![], + PRegSet::empty(), ], non_preferred_regs_by_class: [ - vec![ - gpr_preg(6), - gpr_preg(7), - gpr_preg(8), - gpr_preg(9), - gpr_preg(10), - gpr_preg(11), - gpr_preg(12), - gpr_preg(13), - gpr_preg(14), - // no r15; it is the stack pointer. - ], - vec![ - vr_preg(8), - vr_preg(9), - vr_preg(10), - vr_preg(11), - vr_preg(12), - vr_preg(13), - vr_preg(14), - vr_preg(15), - ], + PRegSet::empty() + .with(gpr_preg(6)) + .with(gpr_preg(7)) + .with(gpr_preg(8)) + .with(gpr_preg(9)) + .with(gpr_preg(10)) + .with(gpr_preg(11)) + .with(gpr_preg(12)) + .with(gpr_preg(13)) + .with(gpr_preg(14)), + // no r15; it is the stack pointer. + PRegSet::empty() + .with(vr_preg(8)) + .with(vr_preg(9)) + .with(vr_preg(10)) + .with(vr_preg(11)) + .with(vr_preg(12)) + .with(vr_preg(13)) + .with(vr_preg(14)) + .with(vr_preg(15)), // Vector Regclass is unused - vec![], + PRegSet::empty(), ], fixed_stack_slots: vec![], scratch_by_class: [None, None, None], } } -fn tail_create_machine_env() -> MachineEnv { +const fn tail_create_machine_env() -> MachineEnv { // Same as the SystemV ABI, except that %r6 and %r7 are preferred. MachineEnv { preferred_regs_by_class: [ - vec![ + PRegSet::empty() // no r0; can't use for addressing? // no r1; it is our spilltmp. - gpr_preg(2), - gpr_preg(3), - gpr_preg(4), - gpr_preg(5), - gpr_preg(6), - gpr_preg(7), - ], - vec![ - vr_preg(0), - vr_preg(1), - vr_preg(2), - vr_preg(3), - vr_preg(4), - vr_preg(5), - vr_preg(6), - vr_preg(7), - vr_preg(16), - vr_preg(17), - vr_preg(18), - vr_preg(19), - vr_preg(20), - vr_preg(21), - vr_preg(22), - vr_preg(23), - vr_preg(24), - vr_preg(25), - vr_preg(26), - vr_preg(27), - vr_preg(28), - vr_preg(29), - vr_preg(30), - vr_preg(31), - ], + .with(gpr_preg(2)) + .with(gpr_preg(3)) + .with(gpr_preg(4)) + .with(gpr_preg(5)) + .with(gpr_preg(6)) + .with(gpr_preg(7)), + PRegSet::empty() + .with(vr_preg(0)) + .with(vr_preg(1)) + .with(vr_preg(2)) + .with(vr_preg(3)) + .with(vr_preg(4)) + .with(vr_preg(5)) + .with(vr_preg(6)) + .with(vr_preg(7)) + .with(vr_preg(16)) + .with(vr_preg(17)) + .with(vr_preg(18)) + .with(vr_preg(19)) + .with(vr_preg(20)) + .with(vr_preg(21)) + .with(vr_preg(22)) + .with(vr_preg(23)) + .with(vr_preg(24)) + .with(vr_preg(25)) + .with(vr_preg(26)) + .with(vr_preg(27)) + .with(vr_preg(28)) + .with(vr_preg(29)) + .with(vr_preg(30)) + .with(vr_preg(31)), // Vector Regclass is unused - vec![], + PRegSet::empty(), ], non_preferred_regs_by_class: [ - vec![ - gpr_preg(8), - gpr_preg(9), - gpr_preg(10), - gpr_preg(11), - gpr_preg(12), - gpr_preg(13), - gpr_preg(14), - // no r15; it is the stack pointer. - ], - vec![ - vr_preg(8), - vr_preg(9), - vr_preg(10), - vr_preg(11), - vr_preg(12), - vr_preg(13), - vr_preg(14), - vr_preg(15), - ], + PRegSet::empty() + .with(gpr_preg(8)) + .with(gpr_preg(9)) + .with(gpr_preg(10)) + .with(gpr_preg(11)) + .with(gpr_preg(12)) + .with(gpr_preg(13)) + .with(gpr_preg(14)), + // no r15; it is the stack pointer. + PRegSet::empty() + .with(vr_preg(8)) + .with(vr_preg(9)) + .with(vr_preg(10)) + .with(vr_preg(11)) + .with(vr_preg(12)) + .with(vr_preg(13)) + .with(vr_preg(14)) + .with(vr_preg(15)), // Vector Regclass is unused - vec![], + PRegSet::empty(), ], fixed_stack_slots: vec![], scratch_by_class: [None, None, None], diff --git a/cranelift/codegen/src/isa/x64/abi.rs b/cranelift/codegen/src/isa/x64/abi.rs index b3a8dfed44a0..a752614631ec 100644 --- a/cranelift/codegen/src/isa/x64/abi.rs +++ b/cranelift/codegen/src/isa/x64/abi.rs @@ -16,7 +16,6 @@ use args::*; use cranelift_assembler_x64 as asm; use regalloc2::{MachineEnv, PReg, PRegSet}; use smallvec::{SmallVec, smallvec}; -use std::sync::OnceLock; /// Support for the x64 ABI from the callee side (within a function body). pub(crate) type X64Callee = Callee; @@ -875,11 +874,11 @@ impl ABIMachineSpec for X64ABIMachineSpec { fn get_machine_env(flags: &settings::Flags, _call_conv: isa::CallConv) -> &MachineEnv { if flags.enable_pinned_reg() { - static MACHINE_ENV: OnceLock = OnceLock::new(); - MACHINE_ENV.get_or_init(|| create_reg_env_systemv(true)) + static MACHINE_ENV: MachineEnv = create_reg_env_systemv(true); + &MACHINE_ENV } else { - static MACHINE_ENV: OnceLock = OnceLock::new(); - MACHINE_ENV.get_or_init(|| create_reg_env_systemv(false)) + static MACHINE_ENV: MachineEnv = create_reg_env_systemv(false); + &MACHINE_ENV } } @@ -1278,70 +1277,67 @@ const fn all_clobbers() -> PRegSet { .with(regs::fpr_preg(XMM15)) } -fn create_reg_env_systemv(enable_pinned_reg: bool) -> MachineEnv { - fn preg(r: Reg) -> PReg { - r.to_real_reg().unwrap().into() +const fn create_reg_env_systemv(enable_pinned_reg: bool) -> MachineEnv { + const fn preg(r: Reg) -> PReg { + r.to_real_reg().unwrap().preg() } let mut env = MachineEnv { preferred_regs_by_class: [ // Preferred GPRs: caller-saved in the SysV ABI. - vec![ - preg(regs::rsi()), - preg(regs::rdi()), - preg(regs::rax()), - preg(regs::rcx()), - preg(regs::rdx()), - preg(regs::r8()), - preg(regs::r9()), - preg(regs::r10()), - preg(regs::r11()), - ], + PRegSet::empty() + .with(preg(regs::rsi())) + .with(preg(regs::rdi())) + .with(preg(regs::rax())) + .with(preg(regs::rcx())) + .with(preg(regs::rdx())) + .with(preg(regs::r8())) + .with(preg(regs::r9())) + .with(preg(regs::r10())) + .with(preg(regs::r11())), // Preferred XMMs: the first 8, which can have smaller encodings // with AVX instructions. - vec![ - preg(regs::xmm0()), - preg(regs::xmm1()), - preg(regs::xmm2()), - preg(regs::xmm3()), - preg(regs::xmm4()), - preg(regs::xmm5()), - preg(regs::xmm6()), - preg(regs::xmm7()), - ], + PRegSet::empty() + .with(preg(regs::xmm0())) + .with(preg(regs::xmm1())) + .with(preg(regs::xmm2())) + .with(preg(regs::xmm3())) + .with(preg(regs::xmm4())) + .with(preg(regs::xmm5())) + .with(preg(regs::xmm6())) + .with(preg(regs::xmm7())), // The Vector Regclass is unused - vec![], + PRegSet::empty(), ], non_preferred_regs_by_class: [ // Non-preferred GPRs: callee-saved in the SysV ABI. - vec![ - preg(regs::rbx()), - preg(regs::r12()), - preg(regs::r13()), - preg(regs::r14()), - ], + PRegSet::empty() + .with(preg(regs::rbx())) + .with(preg(regs::r12())) + .with(preg(regs::r13())) + .with(preg(regs::r14())), // Non-preferred XMMs: the last 8 registers, which can have larger // encodings with AVX instructions. - vec![ - preg(regs::xmm8()), - preg(regs::xmm9()), - preg(regs::xmm10()), - preg(regs::xmm11()), - preg(regs::xmm12()), - preg(regs::xmm13()), - preg(regs::xmm14()), - preg(regs::xmm15()), - ], + PRegSet::empty() + .with(preg(regs::xmm8())) + .with(preg(regs::xmm9())) + .with(preg(regs::xmm10())) + .with(preg(regs::xmm11())) + .with(preg(regs::xmm12())) + .with(preg(regs::xmm13())) + .with(preg(regs::xmm14())) + .with(preg(regs::xmm15())), // The Vector Regclass is unused - vec![], + PRegSet::empty(), ], fixed_stack_slots: vec![], scratch_by_class: [None, None, None], }; - debug_assert_eq!(regs::r15(), regs::pinned_reg()); + debug_assert!(regs::PINNED_REG == cranelift_assembler_x64::gpr::enc::R15); if !enable_pinned_reg { - env.non_preferred_regs_by_class[0].push(preg(regs::r15())); + env.non_preferred_regs_by_class[0] = + env.non_preferred_regs_by_class[0].with(preg(regs::r15())); } env diff --git a/cranelift/codegen/src/isa/x64/inst/regs.rs b/cranelift/codegen/src/isa/x64/inst/regs.rs index b1416e88df5c..6a03ba9d4658 100644 --- a/cranelift/codegen/src/isa/x64/inst/regs.rs +++ b/cranelift/codegen/src/isa/x64/inst/regs.rs @@ -73,8 +73,9 @@ pub(crate) const fn r15() -> Reg { /// The pinned register on this architecture. /// It must be the same as Spidermonkey's HeapReg, as found in this file. /// https://searchfox.org/mozilla-central/source/js/src/jit/x64/Assembler-x64.h#99 +pub(crate) const PINNED_REG: u8 = gpr::enc::R15; pub(crate) const fn pinned_reg() -> Reg { - r15() + gpr(PINNED_REG) } const fn fpr(enc: u8) -> Reg { diff --git a/cranelift/codegen/src/machinst/reg.rs b/cranelift/codegen/src/machinst/reg.rs index dd28ee05f4c8..f269e24ca677 100644 --- a/cranelift/codegen/src/machinst/reg.rs +++ b/cranelift/codegen/src/machinst/reg.rs @@ -16,7 +16,7 @@ use serde_derive::{Deserialize, Serialize}; const PINNED_VREGS: usize = 192; /// Convert a `VReg` to its pinned `PReg`, if any. -pub fn pinned_vreg_to_preg(vreg: VReg) -> Option { +pub const fn pinned_vreg_to_preg(vreg: VReg) -> Option { if vreg.vreg() < PINNED_VREGS { Some(PReg::from_index(vreg.vreg())) } else { @@ -64,8 +64,12 @@ impl Reg { /// Get the physical register (`RealReg`), if this register is /// one. - pub fn to_real_reg(self) -> Option { - pinned_vreg_to_preg(self.0.into()).map(RealReg) + pub const fn to_real_reg(self) -> Option { + // We can't use `map` or `?` in a const fn. + match pinned_vreg_to_preg(VReg::from_bits(self.0)) { + Some(preg) => Some(RealReg(preg)), + None => None, + } } /// Get the virtual (non-physical) register, if this register is @@ -151,6 +155,11 @@ impl RealReg { pub fn hw_enc(self) -> u8 { self.0.hw_enc() as u8 } + + /// The underlying PReg. + pub const fn preg(self) -> PReg { + self.0 + } } impl core::fmt::Debug for RealReg { diff --git a/cranelift/filetests/filetests/isa/aarch64/bitops.clif b/cranelift/filetests/filetests/isa/aarch64/bitops.clif index 4a302d01bde9..c7e22aa5a985 100644 --- a/cranelift/filetests/filetests/isa/aarch64/bitops.clif +++ b/cranelift/filetests/filetests/isa/aarch64/bitops.clif @@ -405,8 +405,8 @@ block0(v0: i128): ; fmov d4, x0 ; mov v4.d[1], v4.d[1], x1 ; cnt v7.16b, v4.16b -; addv b17, v7.16b -; umov w0, v17.b[0] +; addv b16, v7.16b +; umov w0, v16.b[0] ; movz x1, #0 ; ret ; @@ -415,8 +415,8 @@ block0(v0: i128): ; fmov d4, x0 ; mov v4.d[1], x1 ; cnt v7.16b, v4.16b -; addv b17, v7.16b -; umov w0, v17.b[0] +; addv b16, v7.16b +; umov w0, v16.b[0] ; mov x1, #0 ; ret @@ -1691,9 +1691,9 @@ block0(v0: i128, v1: i128): ; lsl x11, x1, #1 ; lsl x13, x11, x9 ; asr x15, x1, #63 -; orr x1, x5, x13 +; orr x0, x5, x13 ; ands xzr, x2, #64 -; csel x0, x7, x1, ne +; csel x0, x7, x0, ne ; csel x1, x15, x7, ne ; ret ; @@ -1705,9 +1705,9 @@ block0(v0: i128, v1: i128): ; lsl x11, x1, #1 ; lsl x13, x11, x9 ; asr x15, x1, #0x3f -; orr x1, x5, x13 +; orr x0, x5, x13 ; tst x2, #0x40 -; csel x0, x7, x1, ne +; csel x0, x7, x0, ne ; csel x1, x15, x7, ne ; ret diff --git a/cranelift/filetests/filetests/isa/aarch64/call.clif b/cranelift/filetests/filetests/isa/aarch64/call.clif index dae6137124e2..eb1eb7d5fd5a 100644 --- a/cranelift/filetests/filetests/isa/aarch64/call.clif +++ b/cranelift/filetests/filetests/isa/aarch64/call.clif @@ -836,14 +836,14 @@ block0(v0: i64): ; VCode: ; stp fp, lr, [sp, #-16]! ; mov fp, sp -; str x24, [sp, #-16]! +; str x19, [sp, #-16]! ; block0: -; mov x24, x0 +; mov x19, x0 ; load_ext_name_far x2, TestCase(%g)+0 -; mov x8, x24 +; mov x8, x19 ; blr x2 -; mov x0, x24 -; ldr x24, [sp], #16 +; mov x0, x19 +; ldr x19, [sp], #16 ; ldp fp, lr, [sp], #16 ; ret ; @@ -851,17 +851,17 @@ block0(v0: i64): ; block0: ; offset 0x0 ; stp x29, x30, [sp, #-0x10]! ; mov x29, sp -; str x24, [sp, #-0x10]! +; str x19, [sp, #-0x10]! ; block1: ; offset 0xc -; mov x24, x0 +; mov x19, x0 ; ldr x2, #0x18 ; b #0x20 ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; mov x8, x24 +; mov x8, x19 ; blr x2 -; mov x0, x24 -; ldr x24, [sp], #0x10 +; mov x0, x19 +; ldr x19, [sp], #0x10 ; ldp x29, x30, [sp], #0x10 ; ret @@ -876,13 +876,13 @@ block0(v0: i64): ; VCode: ; stp fp, lr, [sp, #-16]! ; mov fp, sp -; str x24, [sp, #-16]! +; str x19, [sp, #-16]! ; block0: -; mov x24, x8 +; mov x19, x8 ; load_ext_name_far x2, TestCase(%g)+0 ; blr x2 -; mov x8, x24 -; ldr x24, [sp], #16 +; mov x8, x19 +; ldr x19, [sp], #16 ; ldp fp, lr, [sp], #16 ; ret ; @@ -890,16 +890,16 @@ block0(v0: i64): ; block0: ; offset 0x0 ; stp x29, x30, [sp, #-0x10]! ; mov x29, sp -; str x24, [sp, #-0x10]! +; str x19, [sp, #-0x10]! ; block1: ; offset 0xc -; mov x24, x8 +; mov x19, x8 ; ldr x2, #0x18 ; b #0x20 ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g 0 ; .byte 0x00, 0x00, 0x00, 0x00 ; blr x2 -; mov x8, x24 -; ldr x24, [sp], #0x10 +; mov x8, x19 +; ldr x19, [sp], #0x10 ; ldp x29, x30, [sp], #0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/aarch64/fcvt-small.clif b/cranelift/filetests/filetests/isa/aarch64/fcvt-small.clif index f44b403ec278..ffc83cb4e852 100644 --- a/cranelift/filetests/filetests/isa/aarch64/fcvt-small.clif +++ b/cranelift/filetests/filetests/isa/aarch64/fcvt-small.clif @@ -88,8 +88,8 @@ block0(v0: f32): ; fcmp s0, s4 ; b.le #trap=int_ovf ; movz w8, #17280, LSL #16 -; fmov s18, w8 -; fcmp s0, s18 +; fmov s16, w8 +; fcmp s0, s16 ; b.ge #trap=int_ovf ; fcvtzu w0, s0 ; ret @@ -102,8 +102,8 @@ block0(v0: f32): ; fcmp s0, s4 ; b.le #0x30 ; mov w8, #0x43800000 -; fmov s18, w8 -; fcmp s0, s18 +; fmov s16, w8 +; fcmp s0, s16 ; b.ge #0x34 ; fcvtzu w0, s0 ; ret @@ -125,8 +125,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #trap=int_ovf ; movz x8, #16496, LSL #48 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #trap=int_ovf ; fcvtzu w0, d0 ; ret @@ -139,8 +139,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #0x30 ; mov x8, #0x4070000000000000 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #0x34 ; fcvtzu w0, d0 ; ret @@ -162,8 +162,8 @@ block0(v0: f32): ; fcmp s0, s4 ; b.le #trap=int_ovf ; movz w8, #18304, LSL #16 -; fmov s18, w8 -; fcmp s0, s18 +; fmov s16, w8 +; fcmp s0, s16 ; b.ge #trap=int_ovf ; fcvtzu w0, s0 ; ret @@ -176,8 +176,8 @@ block0(v0: f32): ; fcmp s0, s4 ; b.le #0x30 ; mov w8, #0x47800000 -; fmov s18, w8 -; fcmp s0, s18 +; fmov s16, w8 +; fcmp s0, s16 ; b.ge #0x34 ; fcvtzu w0, s0 ; ret @@ -199,8 +199,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #trap=int_ovf ; movz x8, #16624, LSL #48 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #trap=int_ovf ; fcvtzu w0, d0 ; ret @@ -213,8 +213,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #0x30 ; mov x8, #0x40f0000000000000 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #0x34 ; fcvtzu w0, d0 ; ret diff --git a/cranelift/filetests/filetests/isa/aarch64/fcvt.clif b/cranelift/filetests/filetests/isa/aarch64/fcvt.clif index 08da01b780db..9456f73f35ee 100644 --- a/cranelift/filetests/filetests/isa/aarch64/fcvt.clif +++ b/cranelift/filetests/filetests/isa/aarch64/fcvt.clif @@ -171,27 +171,27 @@ block0(v0: i8, v1: i16, v2: i32, v3: i64): ; VCode: ; block0: ; uxtb w12, w0 -; ucvtf s22, w12 +; ucvtf s16, w12 ; uxth w12, w1 -; ucvtf s23, w12 -; ucvtf s21, w2 -; ucvtf s24, x3 -; fadd s22, s22, s23 -; fadd s21, s22, s21 -; fadd s0, s21, s24 +; ucvtf s17, w12 +; ucvtf s18, w2 +; ucvtf s19, x3 +; fadd s16, s16, s17 +; fadd s16, s16, s18 +; fadd s0, s16, s19 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; uxtb w12, w0 -; ucvtf s22, w12 +; ucvtf s16, w12 ; uxth w12, w1 -; ucvtf s23, w12 -; ucvtf s21, w2 -; ucvtf s24, x3 -; fadd s22, s22, s23 -; fadd s21, s22, s21 -; fadd s0, s21, s24 +; ucvtf s17, w12 +; ucvtf s18, w2 +; ucvtf s19, x3 +; fadd s16, s16, s17 +; fadd s16, s16, s18 +; fadd s0, s16, s19 ; ret function %f11(i32x4) -> f64x2 { @@ -243,8 +243,8 @@ block0(v0: f32): ; fcmp s0, s4 ; b.le #trap=int_ovf ; movz w8, #20352, LSL #16 -; fmov s18, w8 -; fcmp s0, s18 +; fmov s16, w8 +; fcmp s0, s16 ; b.ge #trap=int_ovf ; fcvtzu w0, s0 ; ret @@ -257,8 +257,8 @@ block0(v0: f32): ; fcmp s0, s4 ; b.le #0x30 ; mov w8, #0x4f800000 -; fmov s18, w8 -; fcmp s0, s18 +; fmov s16, w8 +; fcmp s0, s16 ; b.ge #0x34 ; fcvtzu w0, s0 ; ret @@ -280,8 +280,8 @@ block0(v0: f32): ; fcmp s0, s4 ; b.le #trap=int_ovf ; movz w8, #24448, LSL #16 -; fmov s18, w8 -; fcmp s0, s18 +; fmov s16, w8 +; fcmp s0, s16 ; b.ge #trap=int_ovf ; fcvtzu x0, s0 ; ret @@ -294,8 +294,8 @@ block0(v0: f32): ; fcmp s0, s4 ; b.le #0x30 ; mov w8, #0x5f800000 -; fmov s18, w8 -; fcmp s0, s18 +; fmov s16, w8 +; fcmp s0, s16 ; b.ge #0x34 ; fcvtzu x0, s0 ; ret @@ -317,8 +317,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #trap=int_ovf ; movz x8, #16880, LSL #48 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #trap=int_ovf ; fcvtzu w0, d0 ; ret @@ -331,8 +331,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #0x30 ; mov x8, #0x41f0000000000000 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #0x34 ; fcvtzu w0, d0 ; ret @@ -354,8 +354,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #trap=int_ovf ; movz x8, #17392, LSL #48 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #trap=int_ovf ; fcvtzu x0, d0 ; ret @@ -368,8 +368,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #0x30 ; mov x8, #0x43f0000000000000 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #0x34 ; fcvtzu x0, d0 ; ret @@ -456,8 +456,8 @@ block0(v0: f32): ; fcmp s0, s6 ; b.lt #trap=int_ovf ; movz w10, #20224, LSL #16 -; fmov s20, w10 -; fcmp s0, s20 +; fmov s16, w10 +; fcmp s0, s16 ; b.ge #trap=int_ovf ; fcvtzs w0, s0 ; ret @@ -471,8 +471,8 @@ block0(v0: f32): ; fcmp s0, s6 ; b.lt #0x34 ; mov w10, #0x4f000000 -; fmov s20, w10 -; fcmp s0, s20 +; fmov s16, w10 +; fcmp s0, s16 ; b.ge #0x38 ; fcvtzs w0, s0 ; ret @@ -495,8 +495,8 @@ block0(v0: f32): ; fcmp s0, s6 ; b.lt #trap=int_ovf ; movz w10, #24320, LSL #16 -; fmov s20, w10 -; fcmp s0, s20 +; fmov s16, w10 +; fcmp s0, s16 ; b.ge #trap=int_ovf ; fcvtzs x0, s0 ; ret @@ -510,8 +510,8 @@ block0(v0: f32): ; fcmp s0, s6 ; b.lt #0x34 ; mov w10, #0x5f000000 -; fmov s20, w10 -; fcmp s0, s20 +; fmov s16, w10 +; fcmp s0, s16 ; b.ge #0x38 ; fcvtzs x0, s0 ; ret @@ -533,8 +533,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #trap=int_ovf ; movz x8, #16864, LSL #48 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #trap=int_ovf ; fcvtzs w0, d0 ; ret @@ -547,8 +547,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #0x30 ; mov x8, #0x41e0000000000000 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #0x34 ; fcvtzs w0, d0 ; ret @@ -573,8 +573,8 @@ block0(v0: f64): ; fcmp d0, d6 ; b.lt #trap=int_ovf ; movz x10, #17376, LSL #48 -; fmov d20, x10 -; fcmp d0, d20 +; fmov d16, x10 +; fcmp d0, d16 ; b.ge #trap=int_ovf ; fcvtzs x0, d0 ; ret @@ -588,8 +588,8 @@ block0(v0: f64): ; fcmp d0, d6 ; b.lt #0x34 ; mov x10, #0x43e0000000000000 -; fmov d20, x10 -; fcmp d0, d20 +; fmov d16, x10 +; fcmp d0, d16 ; b.ge #0x38 ; fcvtzs x0, d0 ; ret diff --git a/cranelift/filetests/filetests/isa/aarch64/floating-point.clif b/cranelift/filetests/filetests/isa/aarch64/floating-point.clif index 647ace4dc728..e563fdb955f2 100644 --- a/cranelift/filetests/filetests/isa/aarch64/floating-point.clif +++ b/cranelift/filetests/filetests/isa/aarch64/floating-point.clif @@ -532,8 +532,8 @@ block0(v0: f32): ; fcmp s0, s4 ; b.le #trap=int_ovf ; movz w8, #20352, LSL #16 -; fmov s18, w8 -; fcmp s0, s18 +; fmov s16, w8 +; fcmp s0, s16 ; b.ge #trap=int_ovf ; fcvtzu w0, s0 ; ret @@ -546,8 +546,8 @@ block0(v0: f32): ; fcmp s0, s4 ; b.le #0x30 ; mov w8, #0x4f800000 -; fmov s18, w8 -; fcmp s0, s18 +; fmov s16, w8 +; fcmp s0, s16 ; b.ge #0x34 ; fcvtzu w0, s0 ; ret @@ -570,8 +570,8 @@ block0(v0: f32): ; fcmp s0, s6 ; b.lt #trap=int_ovf ; movz w10, #20224, LSL #16 -; fmov s20, w10 -; fcmp s0, s20 +; fmov s16, w10 +; fcmp s0, s16 ; b.ge #trap=int_ovf ; fcvtzs w0, s0 ; ret @@ -585,8 +585,8 @@ block0(v0: f32): ; fcmp s0, s6 ; b.lt #0x34 ; mov w10, #0x4f000000 -; fmov s20, w10 -; fcmp s0, s20 +; fmov s16, w10 +; fcmp s0, s16 ; b.ge #0x38 ; fcvtzs w0, s0 ; ret @@ -608,8 +608,8 @@ block0(v0: f32): ; fcmp s0, s4 ; b.le #trap=int_ovf ; movz w8, #24448, LSL #16 -; fmov s18, w8 -; fcmp s0, s18 +; fmov s16, w8 +; fcmp s0, s16 ; b.ge #trap=int_ovf ; fcvtzu x0, s0 ; ret @@ -622,8 +622,8 @@ block0(v0: f32): ; fcmp s0, s4 ; b.le #0x30 ; mov w8, #0x5f800000 -; fmov s18, w8 -; fcmp s0, s18 +; fmov s16, w8 +; fcmp s0, s16 ; b.ge #0x34 ; fcvtzu x0, s0 ; ret @@ -646,8 +646,8 @@ block0(v0: f32): ; fcmp s0, s6 ; b.lt #trap=int_ovf ; movz w10, #24320, LSL #16 -; fmov s20, w10 -; fcmp s0, s20 +; fmov s16, w10 +; fcmp s0, s16 ; b.ge #trap=int_ovf ; fcvtzs x0, s0 ; ret @@ -661,8 +661,8 @@ block0(v0: f32): ; fcmp s0, s6 ; b.lt #0x34 ; mov w10, #0x5f000000 -; fmov s20, w10 -; fcmp s0, s20 +; fmov s16, w10 +; fcmp s0, s16 ; b.ge #0x38 ; fcvtzs x0, s0 ; ret @@ -684,8 +684,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #trap=int_ovf ; movz x8, #16880, LSL #48 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #trap=int_ovf ; fcvtzu w0, d0 ; ret @@ -698,8 +698,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #0x30 ; mov x8, #0x41f0000000000000 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #0x34 ; fcvtzu w0, d0 ; ret @@ -721,8 +721,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #trap=int_ovf ; movz x8, #16864, LSL #48 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #trap=int_ovf ; fcvtzs w0, d0 ; ret @@ -735,8 +735,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #0x30 ; mov x8, #0x41e0000000000000 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #0x34 ; fcvtzs w0, d0 ; ret @@ -760,8 +760,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #trap=int_ovf ; movz x8, #17392, LSL #48 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #trap=int_ovf ; fcvtzu x0, d0 ; ret @@ -774,8 +774,8 @@ block0(v0: f64): ; fcmp d0, d4 ; b.le #0x30 ; mov x8, #0x43f0000000000000 -; fmov d18, x8 -; fcmp d0, d18 +; fmov d16, x8 +; fcmp d0, d16 ; b.ge #0x34 ; fcvtzu x0, d0 ; ret @@ -798,8 +798,8 @@ block0(v0: f64): ; fcmp d0, d6 ; b.lt #trap=int_ovf ; movz x10, #17376, LSL #48 -; fmov d20, x10 -; fcmp d0, d20 +; fmov d16, x10 +; fcmp d0, d16 ; b.ge #trap=int_ovf ; fcvtzs x0, d0 ; ret @@ -813,8 +813,8 @@ block0(v0: f64): ; fcmp d0, d6 ; b.lt #0x34 ; mov x10, #0x43e0000000000000 -; fmov d20, x10 -; fcmp d0, d20 +; fmov d16, x10 +; fcmp d0, d16 ; b.ge #0x38 ; fcvtzs x0, d0 ; ret diff --git a/cranelift/filetests/filetests/isa/aarch64/fma.clif b/cranelift/filetests/filetests/isa/aarch64/fma.clif index 95e5e37c90a7..3f9cae79950b 100644 --- a/cranelift/filetests/filetests/isa/aarch64/fma.clif +++ b/cranelift/filetests/filetests/isa/aarch64/fma.clif @@ -360,26 +360,26 @@ block0(v0: f32x4, v1: f32x4, v2: f32x4): ; block0: ; movz w6, #7452 ; movk w6, w6, #7966, LSL #16 -; dup v17.4s, w6 +; dup v16.4s, w6 ; mov v31.16b, v1.16b ; mov v30.16b, v31.16b -; tbl v19.16b, { v30.16b, v31.16b }, v17.16b -; mov v22.16b, v0.16b +; tbl v16.16b, { v30.16b, v31.16b }, v16.16b +; mov v17.16b, v0.16b ; mov v0.16b, v2.16b -; fmla v0.4s, v0.4s, v22.4s, v19.4s +; fmla v0.4s, v0.4s, v17.4s, v16.4s ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; mov w6, #0x1d1c ; movk w6, #0x1f1e, lsl #16 -; dup v17.4s, w6 +; dup v16.4s, w6 ; mov v31.16b, v1.16b ; mov v30.16b, v31.16b -; tbl v19.16b, {v30.16b, v31.16b}, v17.16b -; mov v22.16b, v0.16b +; tbl v16.16b, {v30.16b, v31.16b}, v16.16b +; mov v17.16b, v0.16b ; mov v0.16b, v2.16b -; fmla v0.4s, v22.4s, v19.4s +; fmla v0.4s, v17.4s, v16.4s ; ret function %f64x2_splat0(f64x2, f64x2, f64x2) -> f64x2 { diff --git a/cranelift/filetests/filetests/isa/aarch64/nan-canonicalization.clif b/cranelift/filetests/filetests/isa/aarch64/nan-canonicalization.clif index 306d96a6e2c5..619b245e1ad3 100644 --- a/cranelift/filetests/filetests/isa/aarch64/nan-canonicalization.clif +++ b/cranelift/filetests/filetests/isa/aarch64/nan-canonicalization.clif @@ -13,8 +13,8 @@ block0(v0: f32x4, v1: f32x4): ; movq %rsp, %rbp ; block0: ; addps %xmm1, %xmm0 -; movl $0x7fc00000, %r10d -; movd %r10d, %xmm7 +; movl $0x7fc00000, %edi +; movd %edi, %xmm7 ; shufps $0x0, (%rip), %xmm7 ; movdqa %xmm0, %xmm1 ; cmpunordps %xmm0, %xmm1 @@ -33,9 +33,9 @@ block0(v0: f32x4, v1: f32x4): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; addps %xmm1, %xmm0 -; movl $0x7fc00000, %r10d -; movd %r10d, %xmm7 -; shufps $0, 0x26(%rip), %xmm7 +; movl $0x7fc00000, %edi +; movd %edi, %xmm7 +; shufps $0, 0x28(%rip), %xmm7 ; movdqa %xmm0, %xmm1 ; cmpunordps %xmm0, %xmm1 ; movdqa %xmm0, %xmm2 @@ -49,6 +49,7 @@ block0(v0: f32x4, v1: f32x4): ; addb %al, (%rax) ; addb %al, (%rax) ; addb %al, (%rax) +; addb %al, (%rax) ; sarb $0, (%rdi) ; addb %al, (%rax) ; addb %al, (%rax) @@ -67,22 +68,22 @@ block0(v0: f64, v1: f64): ; movq %rsp, %rbp ; block0: ; addsd %xmm1, %xmm0 -; movdqa %xmm0, %xmm7 -; movabsq $0x7ff8000000000000, %rcx -; movq %rcx, %xmm6 -; uninit %xmm5 -; xorpd %xmm5, %xmm5 -; movsd %xmm6, %xmm5 +; movdqa %xmm0, %xmm2 +; movabsq $0x7ff8000000000000, %rax +; movq %rax, %xmm0 +; uninit %xmm1 +; xorpd %xmm1, %xmm1 +; movsd %xmm0, %xmm1 ; uninit %xmm0 ; xorpd %xmm0, %xmm0 -; movdqa %xmm7, %xmm6 -; movsd %xmm6, %xmm0 -; movdqa %xmm0, %xmm6 -; cmpunordpd %xmm0, %xmm6 +; movsd %xmm2, %xmm0 +; movdqa %xmm0, %xmm2 +; cmpunordpd %xmm0, %xmm2 ; movdqa %xmm0, %xmm3 -; movdqa %xmm6, %xmm0 -; pblendvb %xmm0, %xmm5, %xmm3 -; movdqa %xmm3, %xmm0 +; movdqa %xmm2, %xmm0 +; movdqa %xmm3, %xmm2 +; pblendvb %xmm0, %xmm1, %xmm2 +; movdqa %xmm2, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -93,20 +94,20 @@ block0(v0: f64, v1: f64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; addsd %xmm1, %xmm0 -; movdqa %xmm0, %xmm7 -; movabsq $0x7ff8000000000000, %rcx -; movq %rcx, %xmm6 -; xorpd %xmm5, %xmm5 -; movsd %xmm6, %xmm5 +; movdqa %xmm0, %xmm2 +; movabsq $0x7ff8000000000000, %rax +; movq %rax, %xmm0 +; xorpd %xmm1, %xmm1 +; movsd %xmm0, %xmm1 ; xorpd %xmm0, %xmm0 -; movdqa %xmm7, %xmm6 -; movsd %xmm6, %xmm0 -; movdqa %xmm0, %xmm6 -; cmpunordpd %xmm0, %xmm6 +; movsd %xmm2, %xmm0 +; movdqa %xmm0, %xmm2 +; cmpunordpd %xmm0, %xmm2 ; movdqa %xmm0, %xmm3 -; movdqa %xmm6, %xmm0 -; pblendvb %xmm0, %xmm5, %xmm3 -; movdqa %xmm3, %xmm0 +; movdqa %xmm2, %xmm0 +; movdqa %xmm3, %xmm2 +; pblendvb %xmm0, %xmm1, %xmm2 +; movdqa %xmm2, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -122,22 +123,22 @@ block0(v0: f32, v1: f32): ; movq %rsp, %rbp ; block0: ; addss %xmm1, %xmm0 -; movdqa %xmm0, %xmm7 -; movl $0x7fc00000, %ecx -; movd %ecx, %xmm6 -; uninit %xmm5 -; xorps %xmm5, %xmm5 -; movss %xmm6, %xmm5 +; movdqa %xmm0, %xmm2 +; movl $0x7fc00000, %eax +; movd %eax, %xmm0 +; uninit %xmm1 +; xorps %xmm1, %xmm1 +; movss %xmm0, %xmm1 ; uninit %xmm0 ; xorps %xmm0, %xmm0 -; movdqa %xmm7, %xmm6 -; movss %xmm6, %xmm0 -; movdqa %xmm0, %xmm6 -; cmpunordps %xmm0, %xmm6 +; movss %xmm2, %xmm0 +; movdqa %xmm0, %xmm2 +; cmpunordps %xmm0, %xmm2 ; movdqa %xmm0, %xmm3 -; movdqa %xmm6, %xmm0 -; pblendvb %xmm0, %xmm5, %xmm3 -; movdqa %xmm3, %xmm0 +; movdqa %xmm2, %xmm0 +; movdqa %xmm3, %xmm2 +; pblendvb %xmm0, %xmm1, %xmm2 +; movdqa %xmm2, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -148,20 +149,20 @@ block0(v0: f32, v1: f32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; addss %xmm1, %xmm0 -; movdqa %xmm0, %xmm7 -; movl $0x7fc00000, %ecx -; movd %ecx, %xmm6 -; xorps %xmm5, %xmm5 -; movss %xmm6, %xmm5 +; movdqa %xmm0, %xmm2 +; movl $0x7fc00000, %eax +; movd %eax, %xmm0 +; xorps %xmm1, %xmm1 +; movss %xmm0, %xmm1 ; xorps %xmm0, %xmm0 -; movdqa %xmm7, %xmm6 -; movss %xmm6, %xmm0 -; movdqa %xmm0, %xmm6 -; cmpunordps %xmm0, %xmm6 +; movss %xmm2, %xmm0 +; movdqa %xmm0, %xmm2 +; cmpunordps %xmm0, %xmm2 ; movdqa %xmm0, %xmm3 -; movdqa %xmm6, %xmm0 -; pblendvb %xmm0, %xmm5, %xmm3 -; movdqa %xmm3, %xmm0 +; movdqa %xmm2, %xmm0 +; movdqa %xmm3, %xmm2 +; pblendvb %xmm0, %xmm1, %xmm2 +; movdqa %xmm2, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/aarch64/prologue.clif b/cranelift/filetests/filetests/isa/aarch64/prologue.clif index 07238158741a..d6f3d66dc93b 100644 --- a/cranelift/filetests/filetests/isa/aarch64/prologue.clif +++ b/cranelift/filetests/filetests/isa/aarch64/prologue.clif @@ -83,15 +83,6 @@ block0(v0: f64): ; stp d10, d11, [sp, #-16]! ; stp d8, d9, [sp, #-16]! ; block0: -; fadd d23, d0, d0 -; fadd d24, d0, d0 -; fadd d25, d0, d0 -; fadd d26, d0, d0 -; fadd d27, d0, d0 -; fadd d28, d0, d0 -; fadd d29, d0, d0 -; fadd d30, d0, d0 -; fadd d31, d0, d0 ; fadd d1, d0, d0 ; fadd d2, d0, d0 ; fadd d3, d0, d0 @@ -106,7 +97,15 @@ block0(v0: f64): ; fadd d20, d0, d0 ; fadd d21, d0, d0 ; fadd d22, d0, d0 -; fadd d15, d0, d0 +; fadd d23, d0, d0 +; fadd d24, d0, d0 +; fadd d25, d0, d0 +; fadd d26, d0, d0 +; fadd d27, d0, d0 +; fadd d28, d0, d0 +; fadd d29, d0, d0 +; fadd d30, d0, d0 +; fadd d31, d0, d0 ; fadd d8, d0, d0 ; fadd d9, d0, d0 ; fadd d10, d0, d0 @@ -114,37 +113,38 @@ block0(v0: f64): ; fadd d12, d0, d0 ; fadd d13, d0, d0 ; fadd d14, d0, d0 -; fadd d23, d0, d23 -; fadd d24, d24, d25 -; fadd d25, d26, d27 -; fadd d26, d28, d29 -; fadd d27, d30, d31 -; fadd d28, d1, d2 -; fadd d29, d3, d4 -; fadd d30, d5, d6 -; fadd d31, d7, d16 -; fadd d0, d17, d18 -; fadd d1, d19, d20 -; fadd d2, d21, d22 -; fadd d3, d15, d8 -; fadd d4, d9, d10 -; fadd d5, d11, d12 -; fadd d6, d13, d14 -; fadd d23, d23, d24 -; fadd d24, d25, d26 -; fadd d25, d27, d28 -; fadd d26, d29, d30 -; fadd d27, d31, d0 -; fadd d28, d1, d2 -; fadd d29, d3, d4 -; fadd d30, d5, d6 -; fadd d23, d23, d24 -; fadd d24, d25, d26 -; fadd d25, d27, d28 -; fadd d26, d29, d30 -; fadd d23, d23, d24 -; fadd d24, d25, d26 -; fadd d0, d23, d24 +; fadd d15, d0, d0 +; fadd d0, d0, d1 +; fadd d1, d2, d3 +; fadd d2, d4, d5 +; fadd d3, d6, d7 +; fadd d4, d16, d17 +; fadd d5, d18, d19 +; fadd d6, d20, d21 +; fadd d7, d22, d23 +; fadd d16, d24, d25 +; fadd d17, d26, d27 +; fadd d18, d28, d29 +; fadd d19, d30, d31 +; fadd d20, d8, d9 +; fadd d21, d10, d11 +; fadd d22, d12, d13 +; fadd d23, d14, d15 +; fadd d0, d0, d1 +; fadd d1, d2, d3 +; fadd d2, d4, d5 +; fadd d3, d6, d7 +; fadd d4, d16, d17 +; fadd d5, d18, d19 +; fadd d6, d20, d21 +; fadd d7, d22, d23 +; fadd d0, d0, d1 +; fadd d1, d2, d3 +; fadd d2, d4, d5 +; fadd d3, d6, d7 +; fadd d0, d0, d1 +; fadd d1, d2, d3 +; fadd d0, d0, d1 ; ldp d8, d9, [sp], #16 ; ldp d10, d11, [sp], #16 ; ldp d12, d13, [sp], #16 @@ -161,15 +161,6 @@ block0(v0: f64): ; stp d10, d11, [sp, #-0x10]! ; stp d8, d9, [sp, #-0x10]! ; block1: ; offset 0x18 -; fadd d23, d0, d0 -; fadd d24, d0, d0 -; fadd d25, d0, d0 -; fadd d26, d0, d0 -; fadd d27, d0, d0 -; fadd d28, d0, d0 -; fadd d29, d0, d0 -; fadd d30, d0, d0 -; fadd d31, d0, d0 ; fadd d1, d0, d0 ; fadd d2, d0, d0 ; fadd d3, d0, d0 @@ -184,7 +175,15 @@ block0(v0: f64): ; fadd d20, d0, d0 ; fadd d21, d0, d0 ; fadd d22, d0, d0 -; fadd d15, d0, d0 +; fadd d23, d0, d0 +; fadd d24, d0, d0 +; fadd d25, d0, d0 +; fadd d26, d0, d0 +; fadd d27, d0, d0 +; fadd d28, d0, d0 +; fadd d29, d0, d0 +; fadd d30, d0, d0 +; fadd d31, d0, d0 ; fadd d8, d0, d0 ; fadd d9, d0, d0 ; fadd d10, d0, d0 @@ -192,37 +191,38 @@ block0(v0: f64): ; fadd d12, d0, d0 ; fadd d13, d0, d0 ; fadd d14, d0, d0 -; fadd d23, d0, d23 -; fadd d24, d24, d25 -; fadd d25, d26, d27 -; fadd d26, d28, d29 -; fadd d27, d30, d31 -; fadd d28, d1, d2 -; fadd d29, d3, d4 -; fadd d30, d5, d6 -; fadd d31, d7, d16 -; fadd d0, d17, d18 -; fadd d1, d19, d20 -; fadd d2, d21, d22 -; fadd d3, d15, d8 -; fadd d4, d9, d10 -; fadd d5, d11, d12 -; fadd d6, d13, d14 -; fadd d23, d23, d24 -; fadd d24, d25, d26 -; fadd d25, d27, d28 -; fadd d26, d29, d30 -; fadd d27, d31, d0 -; fadd d28, d1, d2 -; fadd d29, d3, d4 -; fadd d30, d5, d6 -; fadd d23, d23, d24 -; fadd d24, d25, d26 -; fadd d25, d27, d28 -; fadd d26, d29, d30 -; fadd d23, d23, d24 -; fadd d24, d25, d26 -; fadd d0, d23, d24 +; fadd d15, d0, d0 +; fadd d0, d0, d1 +; fadd d1, d2, d3 +; fadd d2, d4, d5 +; fadd d3, d6, d7 +; fadd d4, d16, d17 +; fadd d5, d18, d19 +; fadd d6, d20, d21 +; fadd d7, d22, d23 +; fadd d16, d24, d25 +; fadd d17, d26, d27 +; fadd d18, d28, d29 +; fadd d19, d30, d31 +; fadd d20, d8, d9 +; fadd d21, d10, d11 +; fadd d22, d12, d13 +; fadd d23, d14, d15 +; fadd d0, d0, d1 +; fadd d1, d2, d3 +; fadd d2, d4, d5 +; fadd d3, d6, d7 +; fadd d4, d16, d17 +; fadd d5, d18, d19 +; fadd d6, d20, d21 +; fadd d7, d22, d23 +; fadd d0, d0, d1 +; fadd d1, d2, d3 +; fadd d2, d4, d5 +; fadd d3, d6, d7 +; fadd d0, d0, d1 +; fadd d1, d2, d3 +; fadd d0, d0, d1 ; ldp d8, d9, [sp], #0x10 ; ldp d10, d11, [sp], #0x10 ; ldp d12, d13, [sp], #0x10 @@ -279,10 +279,14 @@ block0(v0: i64): ; VCode: ; stp fp, lr, [sp, #-16]! ; mov fp, sp -; str x28, [sp, #-16]! -; stp x21, x27, [sp, #-16]! +; str x21, [sp, #-16]! +; stp x19, x20, [sp, #-16]! ; block0: -; add x5, x0, x0 +; add x1, x0, x0 +; add x2, x0, x1 +; add x3, x0, x2 +; add x4, x0, x3 +; add x5, x0, x4 ; add x6, x0, x5 ; add x7, x0, x6 ; add x8, x0, x7 @@ -293,33 +297,29 @@ block0(v0: i64): ; add x13, x0, x12 ; add x14, x0, x13 ; add x15, x0, x14 -; add x1, x0, x15 -; add x2, x0, x1 -; add x3, x0, x2 -; add x4, x0, x3 -; add x27, x0, x4 -; add x28, x0, x27 -; add x21, x0, x28 -; add x5, x0, x5 -; add x6, x6, x7 -; add x7, x8, x9 -; add x8, x10, x11 -; add x9, x12, x13 -; add x10, x14, x15 -; add x11, x1, x2 -; add x12, x3, x4 -; add x13, x27, x28 -; add x5, x21, x5 -; add x6, x6, x7 -; add x7, x8, x9 -; add x8, x10, x11 -; add x9, x12, x13 -; add x5, x5, x6 -; add x6, x7, x8 -; add x5, x9, x5 -; add x0, x6, x5 -; ldp x21, x27, [sp], #16 -; ldr x28, [sp], #16 +; add x19, x0, x15 +; add x20, x0, x19 +; add x21, x0, x20 +; add x0, x0, x1 +; add x1, x2, x3 +; add x2, x4, x5 +; add x3, x6, x7 +; add x4, x8, x9 +; add x5, x10, x11 +; add x6, x12, x13 +; add x7, x14, x15 +; add x8, x19, x20 +; add x0, x21, x0 +; add x1, x1, x2 +; add x2, x3, x4 +; add x3, x5, x6 +; add x4, x7, x8 +; add x0, x0, x1 +; add x1, x2, x3 +; add x0, x4, x0 +; add x0, x1, x0 +; ldp x19, x20, [sp], #16 +; ldr x21, [sp], #16 ; ldp fp, lr, [sp], #16 ; ret ; @@ -327,10 +327,14 @@ block0(v0: i64): ; block0: ; offset 0x0 ; stp x29, x30, [sp, #-0x10]! ; mov x29, sp -; str x28, [sp, #-0x10]! -; stp x21, x27, [sp, #-0x10]! +; str x21, [sp, #-0x10]! +; stp x19, x20, [sp, #-0x10]! ; block1: ; offset 0x10 -; add x5, x0, x0 +; add x1, x0, x0 +; add x2, x0, x1 +; add x3, x0, x2 +; add x4, x0, x3 +; add x5, x0, x4 ; add x6, x0, x5 ; add x7, x0, x6 ; add x8, x0, x7 @@ -341,33 +345,29 @@ block0(v0: i64): ; add x13, x0, x12 ; add x14, x0, x13 ; add x15, x0, x14 -; add x1, x0, x15 -; add x2, x0, x1 -; add x3, x0, x2 -; add x4, x0, x3 -; add x27, x0, x4 -; add x28, x0, x27 -; add x21, x0, x28 -; add x5, x0, x5 -; add x6, x6, x7 -; add x7, x8, x9 -; add x8, x10, x11 -; add x9, x12, x13 -; add x10, x14, x15 -; add x11, x1, x2 -; add x12, x3, x4 -; add x13, x27, x28 -; add x5, x21, x5 -; add x6, x6, x7 -; add x7, x8, x9 -; add x8, x10, x11 -; add x9, x12, x13 -; add x5, x5, x6 -; add x6, x7, x8 -; add x5, x9, x5 -; add x0, x6, x5 -; ldp x21, x27, [sp], #0x10 -; ldr x28, [sp], #0x10 +; add x19, x0, x15 +; add x20, x0, x19 +; add x21, x0, x20 +; add x0, x0, x1 +; add x1, x2, x3 +; add x2, x4, x5 +; add x3, x6, x7 +; add x4, x8, x9 +; add x5, x10, x11 +; add x6, x12, x13 +; add x7, x14, x15 +; add x8, x19, x20 +; add x0, x21, x0 +; add x1, x1, x2 +; add x2, x3, x4 +; add x3, x5, x6 +; add x4, x7, x8 +; add x0, x0, x1 +; add x1, x2, x3 +; add x0, x4, x0 +; add x0, x1, x0 +; ldp x19, x20, [sp], #0x10 +; ldr x21, [sp], #0x10 ; ldp x29, x30, [sp], #0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/aarch64/return-call-indirect.clif b/cranelift/filetests/filetests/isa/aarch64/return-call-indirect.clif index d8ca4f4486e2..9c746fad53f8 100644 --- a/cranelift/filetests/filetests/isa/aarch64/return-call-indirect.clif +++ b/cranelift/filetests/filetests/isa/aarch64/return-call-indirect.clif @@ -233,43 +233,43 @@ block0: ; movz x5, #25 ; movz x6, #30 ; movz x7, #35 -; movz x10, #40 -; movz x11, #45 -; movz x12, #50 -; movz x13, #55 -; movz x14, #60 -; movz x15, #65 -; movz x0, #70 -; movz x1, #75 -; movz x8, #80 -; movz x9, #85 +; movz x0, #40 +; movz x1, #45 +; movz x8, #50 +; movz x9, #55 +; movz x10, #60 +; movz x11, #65 +; movz x12, #70 +; movz x13, #75 +; movz x14, #80 +; movz x15, #85 ; movz x26, #90 ; movz x27, #95 ; movz x28, #100 -; movz x21, #105 -; movz x19, #110 -; movz x20, #115 +; movz x19, #105 +; movz x20, #110 +; movz x21, #115 ; movz x22, #120 ; movz x23, #125 ; movz x24, #130 ; movz x25, #135 ; load_ext_name_far x2, TestCase(%tail_callee_stack_args)+0 -; str x10, [sp, #112] -; str x11, [sp, #120] -; str x12, [sp, #128] -; str x13, [sp, #136] -; str x14, [sp, #144] -; str x15, [sp, #152] -; str x0, [sp, #160] -; str x1, [sp, #168] -; str x8, [sp, #176] -; str x9, [sp, #184] +; str x0, [sp, #112] +; str x1, [sp, #120] +; str x8, [sp, #128] +; str x9, [sp, #136] +; str x10, [sp, #144] +; str x11, [sp, #152] +; str x12, [sp, #160] +; str x13, [sp, #168] +; str x14, [sp, #176] +; str x15, [sp, #184] ; str x26, [sp, #192] ; str x27, [sp, #200] ; str x28, [sp, #208] -; str x21, [sp, #216] -; str x19, [sp, #224] -; str x20, [sp, #232] +; str x19, [sp, #216] +; str x20, [sp, #224] +; str x21, [sp, #232] ; str x22, [sp, #240] ; str x23, [sp, #248] ; str x24, [sp, #256] @@ -300,22 +300,22 @@ block0: ; mov x5, #0x19 ; mov x6, #0x1e ; mov x7, #0x23 -; mov x10, #0x28 -; mov x11, #0x2d -; mov x12, #0x32 -; mov x13, #0x37 -; mov x14, #0x3c -; mov x15, #0x41 -; mov x0, #0x46 -; mov x1, #0x4b -; mov x8, #0x50 -; mov x9, #0x55 +; mov x0, #0x28 +; mov x1, #0x2d +; mov x8, #0x32 +; mov x9, #0x37 +; mov x10, #0x3c +; mov x11, #0x41 +; mov x12, #0x46 +; mov x13, #0x4b +; mov x14, #0x50 +; mov x15, #0x55 ; mov x26, #0x5a ; mov x27, #0x5f ; mov x28, #0x64 -; mov x21, #0x69 -; mov x19, #0x6e -; mov x20, #0x73 +; mov x19, #0x69 +; mov x20, #0x6e +; mov x21, #0x73 ; mov x22, #0x78 ; mov x23, #0x7d ; mov x24, #0x82 @@ -324,22 +324,22 @@ block0: ; b #0xac ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %tail_callee_stack_args 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; stur x10, [sp, #0x70] -; stur x11, [sp, #0x78] -; stur x12, [sp, #0x80] -; stur x13, [sp, #0x88] -; stur x14, [sp, #0x90] -; stur x15, [sp, #0x98] -; stur x0, [sp, #0xa0] -; stur x1, [sp, #0xa8] -; stur x8, [sp, #0xb0] -; stur x9, [sp, #0xb8] +; stur x0, [sp, #0x70] +; stur x1, [sp, #0x78] +; stur x8, [sp, #0x80] +; stur x9, [sp, #0x88] +; stur x10, [sp, #0x90] +; stur x11, [sp, #0x98] +; stur x12, [sp, #0xa0] +; stur x13, [sp, #0xa8] +; stur x14, [sp, #0xb0] +; stur x15, [sp, #0xb8] ; stur x26, [sp, #0xc0] ; stur x27, [sp, #0xc8] ; stur x28, [sp, #0xd0] -; stur x21, [sp, #0xd8] -; stur x19, [sp, #0xe0] -; stur x20, [sp, #0xe8] +; stur x19, [sp, #0xd8] +; stur x20, [sp, #0xe0] +; stur x21, [sp, #0xe8] ; stur x22, [sp, #0xf0] ; stur x23, [sp, #0xf8] ; str x24, [sp, #0x100] diff --git a/cranelift/filetests/filetests/isa/aarch64/return-call.clif b/cranelift/filetests/filetests/isa/aarch64/return-call.clif index 96b36885ef9e..ffb8ee2a38db 100644 --- a/cranelift/filetests/filetests/isa/aarch64/return-call.clif +++ b/cranelift/filetests/filetests/isa/aarch64/return-call.clif @@ -243,42 +243,42 @@ block0: ; movz x5, #25 ; movz x6, #30 ; movz x7, #35 -; movz x10, #40 -; movz x11, #45 -; movz x12, #50 -; movz x13, #55 -; movz x14, #60 -; movz x15, #65 -; movz x0, #70 -; movz x1, #75 -; movz x8, #80 -; movz x9, #85 +; movz x0, #40 +; movz x1, #45 +; movz x8, #50 +; movz x9, #55 +; movz x10, #60 +; movz x11, #65 +; movz x12, #70 +; movz x13, #75 +; movz x14, #80 +; movz x15, #85 ; movz x26, #90 ; movz x27, #95 ; movz x28, #100 -; movz x21, #105 -; movz x19, #110 -; movz x20, #115 +; movz x19, #105 +; movz x20, #110 +; movz x21, #115 ; movz x22, #120 ; movz x23, #125 ; movz x24, #130 ; movz x25, #135 -; str x10, [sp, #96] -; str x11, [sp, #104] -; str x12, [sp, #112] -; str x13, [sp, #120] -; str x14, [sp, #128] -; str x15, [sp, #136] -; str x0, [sp, #144] -; str x1, [sp, #152] -; str x8, [sp, #160] -; str x9, [sp, #168] +; str x0, [sp, #96] +; str x1, [sp, #104] +; str x8, [sp, #112] +; str x9, [sp, #120] +; str x10, [sp, #128] +; str x11, [sp, #136] +; str x12, [sp, #144] +; str x13, [sp, #152] +; str x14, [sp, #160] +; str x15, [sp, #168] ; str x26, [sp, #176] ; str x27, [sp, #184] ; str x28, [sp, #192] -; str x21, [sp, #200] -; str x19, [sp, #208] -; str x20, [sp, #216] +; str x19, [sp, #200] +; str x20, [sp, #208] +; str x21, [sp, #216] ; str x22, [sp, #224] ; str x23, [sp, #232] ; str x24, [sp, #240] @@ -306,42 +306,42 @@ block0: ; mov x5, #0x19 ; mov x6, #0x1e ; mov x7, #0x23 -; mov x10, #0x28 -; mov x11, #0x2d -; mov x12, #0x32 -; mov x13, #0x37 -; mov x14, #0x3c -; mov x15, #0x41 -; mov x0, #0x46 -; mov x1, #0x4b -; mov x8, #0x50 -; mov x9, #0x55 +; mov x0, #0x28 +; mov x1, #0x2d +; mov x8, #0x32 +; mov x9, #0x37 +; mov x10, #0x3c +; mov x11, #0x41 +; mov x12, #0x46 +; mov x13, #0x4b +; mov x14, #0x50 +; mov x15, #0x55 ; mov x26, #0x5a ; mov x27, #0x5f ; mov x28, #0x64 -; mov x21, #0x69 -; mov x19, #0x6e -; mov x20, #0x73 +; mov x19, #0x69 +; mov x20, #0x6e +; mov x21, #0x73 ; mov x22, #0x78 ; mov x23, #0x7d ; mov x24, #0x82 ; mov x25, #0x87 -; stur x10, [sp, #0x60] -; stur x11, [sp, #0x68] -; stur x12, [sp, #0x70] -; stur x13, [sp, #0x78] -; stur x14, [sp, #0x80] -; stur x15, [sp, #0x88] -; stur x0, [sp, #0x90] -; stur x1, [sp, #0x98] -; stur x8, [sp, #0xa0] -; stur x9, [sp, #0xa8] +; stur x0, [sp, #0x60] +; stur x1, [sp, #0x68] +; stur x8, [sp, #0x70] +; stur x9, [sp, #0x78] +; stur x10, [sp, #0x80] +; stur x11, [sp, #0x88] +; stur x12, [sp, #0x90] +; stur x13, [sp, #0x98] +; stur x14, [sp, #0xa0] +; stur x15, [sp, #0xa8] ; stur x26, [sp, #0xb0] ; stur x27, [sp, #0xb8] ; stur x28, [sp, #0xc0] -; stur x21, [sp, #0xc8] -; stur x19, [sp, #0xd0] -; stur x20, [sp, #0xd8] +; stur x19, [sp, #0xc8] +; stur x20, [sp, #0xd0] +; stur x21, [sp, #0xd8] ; stur x22, [sp, #0xe0] ; stur x23, [sp, #0xe8] ; stur x24, [sp, #0xf0] @@ -463,72 +463,48 @@ block2: ; stp x19, x20, [sp, #-16]! ; sub sp, sp, #16 ; block0: -; movz x14, #10 -; str x14, [sp] +; movz x0, #10 +; str x0, [sp] ; movz x3, #15 ; movz x4, #20 ; movz x5, #25 ; movz x6, #30 ; movz x7, #35 -; movz x21, #40 -; movz x28, #45 -; movz x27, #50 -; movz x26, #55 -; movz x25, #60 -; movz x24, #65 -; movz x23, #70 -; movz x22, #75 +; movz x28, #40 +; movz x27, #45 +; movz x26, #50 +; movz x25, #55 +; movz x24, #60 +; movz x23, #65 +; movz x22, #70 +; movz x21, #75 ; movz x20, #80 ; movz x19, #85 -; movz x13, #90 -; movz x12, #95 -; movz x11, #100 -; movz x10, #105 -; movz x9, #110 -; movz x8, #115 -; movz x1, #120 -; movz x0, #125 -; movz x15, #130 -; movz x14, #135 +; movz x15, #90 +; movz x14, #95 +; movz x13, #100 +; movz x12, #105 +; movz x11, #110 +; movz x10, #115 +; movz x9, #120 +; movz x8, #125 +; movz x1, #130 +; movz x0, #135 ; cbnz x2, label2 ; b label1 ; block1: ; movz x2, #140 -; str x21, [sp, #112] -; str x28, [sp, #120] -; str x27, [sp, #128] -; str x26, [sp, #136] -; str x25, [sp, #144] -; str x24, [sp, #152] -; str x23, [sp, #160] -; str x22, [sp, #168] +; str x28, [sp, #112] +; str x27, [sp, #120] +; str x26, [sp, #128] +; str x25, [sp, #136] +; str x24, [sp, #144] +; str x23, [sp, #152] +; str x22, [sp, #160] +; str x21, [sp, #168] ; str x20, [sp, #176] ; str x19, [sp, #184] -; str x13, [sp, #192] -; str x12, [sp, #200] -; str x11, [sp, #208] -; str x10, [sp, #216] -; str x9, [sp, #224] -; str x8, [sp, #232] -; str x1, [sp, #240] -; str x0, [sp, #248] -; str x15, [sp, #256] -; str x14, [sp, #264] -; str x2, [sp, #272] -; load_ext_name_far x1, TestCase(%different_callee2)+0 -; ldr x2, [sp] -; return_call_ind x1 new_stack_arg_size:176 x2=x2 x3=x3 x4=x4 x5=x5 x6=x6 x7=x7 -; block2: -; ldr x2, [sp] -; str x21, [sp, #128] -; str x28, [sp, #136] -; str x27, [sp, #144] -; str x26, [sp, #152] -; str x25, [sp, #160] -; str x24, [sp, #168] -; str x23, [sp, #176] -; str x22, [sp, #184] -; str x20, [sp, #192] -; str x19, [sp, #200] +; str x15, [sp, #192] +; str x14, [sp, #200] ; str x13, [sp, #208] ; str x12, [sp, #216] ; str x11, [sp, #224] @@ -537,8 +513,32 @@ block2: ; str x8, [sp, #248] ; str x1, [sp, #256] ; str x0, [sp, #264] -; str x15, [sp, #272] -; str x14, [sp, #280] +; str x2, [sp, #272] +; load_ext_name_far x1, TestCase(%different_callee2)+0 +; ldr x2, [sp] +; return_call_ind x1 new_stack_arg_size:176 x2=x2 x3=x3 x4=x4 x5=x5 x6=x6 x7=x7 +; block2: +; ldr x2, [sp] +; str x28, [sp, #128] +; str x27, [sp, #136] +; str x26, [sp, #144] +; str x25, [sp, #152] +; str x24, [sp, #160] +; str x23, [sp, #168] +; str x22, [sp, #176] +; str x21, [sp, #184] +; str x20, [sp, #192] +; str x19, [sp, #200] +; str x15, [sp, #208] +; str x14, [sp, #216] +; str x13, [sp, #224] +; str x12, [sp, #232] +; str x11, [sp, #240] +; str x10, [sp, #248] +; str x9, [sp, #256] +; str x8, [sp, #264] +; str x1, [sp, #272] +; str x0, [sp, #280] ; load_ext_name_far x1, TestCase(%different_callee1)+0 ; return_call_ind x1 new_stack_arg_size:160 x2=x2 x3=x3 x4=x4 x5=x5 x6=x6 x7=x7 ; @@ -557,56 +557,56 @@ block2: ; stp x19, x20, [sp, #-0x10]! ; sub sp, sp, #0x10 ; block1: ; offset 0x30 -; mov x14, #0xa -; stur x14, [sp] +; mov x0, #0xa +; stur x0, [sp] ; mov x3, #0xf ; mov x4, #0x14 ; mov x5, #0x19 ; mov x6, #0x1e ; mov x7, #0x23 -; mov x21, #0x28 -; mov x28, #0x2d -; mov x27, #0x32 -; mov x26, #0x37 -; mov x25, #0x3c -; mov x24, #0x41 -; mov x23, #0x46 -; mov x22, #0x4b +; mov x28, #0x28 +; mov x27, #0x2d +; mov x26, #0x32 +; mov x25, #0x37 +; mov x24, #0x3c +; mov x23, #0x41 +; mov x22, #0x46 +; mov x21, #0x4b ; mov x20, #0x50 ; mov x19, #0x55 -; mov x13, #0x5a -; mov x12, #0x5f -; mov x11, #0x64 -; mov x10, #0x69 -; mov x9, #0x6e -; mov x8, #0x73 -; mov x1, #0x78 -; mov x0, #0x7d -; mov x15, #0x82 -; mov x14, #0x87 +; mov x15, #0x5a +; mov x14, #0x5f +; mov x13, #0x64 +; mov x12, #0x69 +; mov x11, #0x6e +; mov x10, #0x73 +; mov x9, #0x78 +; mov x8, #0x7d +; mov x1, #0x82 +; mov x0, #0x87 ; cbnz x2, #0x12c ; block2: ; offset 0xa0 ; mov x2, #0x8c -; stur x21, [sp, #0x70] -; stur x28, [sp, #0x78] -; stur x27, [sp, #0x80] -; stur x26, [sp, #0x88] -; stur x25, [sp, #0x90] -; stur x24, [sp, #0x98] -; stur x23, [sp, #0xa0] -; stur x22, [sp, #0xa8] +; stur x28, [sp, #0x70] +; stur x27, [sp, #0x78] +; stur x26, [sp, #0x80] +; stur x25, [sp, #0x88] +; stur x24, [sp, #0x90] +; stur x23, [sp, #0x98] +; stur x22, [sp, #0xa0] +; stur x21, [sp, #0xa8] ; stur x20, [sp, #0xb0] ; stur x19, [sp, #0xb8] -; stur x13, [sp, #0xc0] -; stur x12, [sp, #0xc8] -; stur x11, [sp, #0xd0] -; stur x10, [sp, #0xd8] -; stur x9, [sp, #0xe0] -; stur x8, [sp, #0xe8] -; stur x1, [sp, #0xf0] -; stur x0, [sp, #0xf8] -; str x15, [sp, #0x100] -; str x14, [sp, #0x108] +; stur x15, [sp, #0xc0] +; stur x14, [sp, #0xc8] +; stur x13, [sp, #0xd0] +; stur x12, [sp, #0xd8] +; stur x11, [sp, #0xe0] +; stur x10, [sp, #0xe8] +; stur x9, [sp, #0xf0] +; stur x8, [sp, #0xf8] +; str x1, [sp, #0x100] +; str x0, [sp, #0x108] ; str x2, [sp, #0x110] ; ldr x1, #0x100 ; b #0x108 @@ -623,26 +623,26 @@ block2: ; br x1 ; block3: ; offset 0x12c ; ldur x2, [sp] -; stur x21, [sp, #0x80] -; stur x28, [sp, #0x88] -; stur x27, [sp, #0x90] -; stur x26, [sp, #0x98] -; stur x25, [sp, #0xa0] -; stur x24, [sp, #0xa8] -; stur x23, [sp, #0xb0] -; stur x22, [sp, #0xb8] +; stur x28, [sp, #0x80] +; stur x27, [sp, #0x88] +; stur x26, [sp, #0x90] +; stur x25, [sp, #0x98] +; stur x24, [sp, #0xa0] +; stur x23, [sp, #0xa8] +; stur x22, [sp, #0xb0] +; stur x21, [sp, #0xb8] ; stur x20, [sp, #0xc0] ; stur x19, [sp, #0xc8] -; stur x13, [sp, #0xd0] -; stur x12, [sp, #0xd8] -; stur x11, [sp, #0xe0] -; stur x10, [sp, #0xe8] -; stur x9, [sp, #0xf0] -; stur x8, [sp, #0xf8] -; str x1, [sp, #0x100] -; str x0, [sp, #0x108] -; str x15, [sp, #0x110] -; str x14, [sp, #0x118] +; stur x15, [sp, #0xd0] +; stur x14, [sp, #0xd8] +; stur x13, [sp, #0xe0] +; stur x12, [sp, #0xe8] +; stur x11, [sp, #0xf0] +; stur x10, [sp, #0xf8] +; str x9, [sp, #0x100] +; str x8, [sp, #0x108] +; str x1, [sp, #0x110] +; str x0, [sp, #0x118] ; ldr x1, #0x188 ; b #0x190 ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %different_callee1 0 diff --git a/cranelift/filetests/filetests/isa/aarch64/shift-rotate.clif b/cranelift/filetests/filetests/isa/aarch64/shift-rotate.clif index 34c7ded00cf7..42d066572af0 100644 --- a/cranelift/filetests/filetests/isa/aarch64/shift-rotate.clif +++ b/cranelift/filetests/filetests/isa/aarch64/shift-rotate.clif @@ -23,19 +23,19 @@ block0(v0: i128, v1: i128): ; lsl x3, x15, x13 ; orr x3, x9, x3 ; ands xzr, x2, #64 -; csel x6, x11, x3, ne -; csel x8, xzr, x11, ne -; lsl x10, x0, x7 -; lsl x12, x1, x7 -; orn w14, wzr, w7 +; csel x2, x11, x3, ne +; csel x3, xzr, x11, ne +; lsl x4, x0, x7 +; lsl x1, x1, x7 +; orn w5, wzr, w7 ; lsr x0, x0, #1 -; lsr x2, x0, x14 -; orr x4, x12, x2 +; lsr x0, x0, x5 +; orr x1, x1, x0 ; ands xzr, x7, #64 -; csel x7, xzr, x10, ne -; csel x9, x10, x4, ne -; orr x1, x8, x9 -; orr x0, x6, x7 +; csel x0, xzr, x4, ne +; csel x1, x4, x1, ne +; orr x1, x3, x1 +; orr x0, x2, x0 ; ret ; ; Disassembled: @@ -49,19 +49,19 @@ block0(v0: i128, v1: i128): ; lsl x3, x15, x13 ; orr x3, x9, x3 ; tst x2, #0x40 -; csel x6, x11, x3, ne -; csel x8, xzr, x11, ne -; lsl x10, x0, x7 -; lsl x12, x1, x7 -; mvn w14, w7 +; csel x2, x11, x3, ne +; csel x3, xzr, x11, ne +; lsl x4, x0, x7 +; lsl x1, x1, x7 +; mvn w5, w7 ; lsr x0, x0, #1 -; lsr x2, x0, x14 -; orr x4, x12, x2 +; lsr x0, x0, x5 +; orr x1, x1, x0 ; tst x7, #0x40 -; csel x7, xzr, x10, ne -; csel x9, x10, x4, ne -; orr x1, x8, x9 -; orr x0, x6, x7 +; csel x0, xzr, x4, ne +; csel x1, x4, x1, ne +; orr x1, x3, x1 +; orr x0, x2, x0 ; ret function %f0(i64, i64) -> i64 { @@ -167,21 +167,21 @@ block0(v0: i128, v1: i128): ; orn w13, wzr, w2 ; lsr x15, x0, #1 ; lsr x3, x15, x13 -; orr x3, x11, x3 +; orr x4, x11, x3 ; ands xzr, x2, #64 -; csel x6, xzr, x9, ne -; csel x8, x9, x3, ne -; lsr x10, x0, x7 -; lsr x12, x1, x7 -; orn w14, wzr, w7 -; lsl x0, x1, #1 -; lsl x2, x0, x14 -; orr x4, x10, x2 +; csel x3, xzr, x9, ne +; csel x2, x9, x4, ne +; lsr x4, x0, x7 +; lsr x0, x1, x7 +; orn w5, wzr, w7 +; lsl x1, x1, #1 +; lsl x1, x1, x5 +; orr x1, x4, x1 ; ands xzr, x7, #64 -; csel x7, x12, x4, ne -; csel x9, xzr, x12, ne -; orr x0, x6, x7 -; orr x1, x8, x9 +; csel x4, x0, x1, ne +; csel x1, xzr, x0, ne +; orr x0, x3, x4 +; orr x1, x2, x1 ; ret ; ; Disassembled: @@ -193,21 +193,21 @@ block0(v0: i128, v1: i128): ; mvn w13, w2 ; lsr x15, x0, #1 ; lsr x3, x15, x13 -; orr x3, x11, x3 +; orr x4, x11, x3 ; tst x2, #0x40 -; csel x6, xzr, x9, ne -; csel x8, x9, x3, ne -; lsr x10, x0, x7 -; lsr x12, x1, x7 -; mvn w14, w7 -; lsl x0, x1, #1 -; lsl x2, x0, x14 -; orr x4, x10, x2 +; csel x3, xzr, x9, ne +; csel x2, x9, x4, ne +; lsr x4, x0, x7 +; lsr x0, x1, x7 +; mvn w5, w7 +; lsl x1, x1, #1 +; lsl x1, x1, x5 +; orr x1, x4, x1 ; tst x7, #0x40 -; csel x7, x12, x4, ne -; csel x9, xzr, x12, ne -; orr x0, x6, x7 -; orr x1, x8, x9 +; csel x4, x0, x1, ne +; csel x1, xzr, x0, ne +; orr x0, x3, x4 +; orr x1, x2, x1 ; ret function %f4(i64, i64) -> i64 { diff --git a/cranelift/filetests/filetests/isa/aarch64/simd-arithmetic.clif b/cranelift/filetests/filetests/isa/aarch64/simd-arithmetic.clif index 506ef2c2b642..c440aee1ce05 100644 --- a/cranelift/filetests/filetests/isa/aarch64/simd-arithmetic.clif +++ b/cranelift/filetests/filetests/isa/aarch64/simd-arithmetic.clif @@ -109,11 +109,11 @@ block0(v0: i64x2, v1: i64x2): ; movz x3, #1 ; dup v5.2d, x3 ; orr v7.16b, v0.16b, v1.16b -; and v17.16b, v7.16b, v5.16b -; ushr v19.2d, v0.2d, #1 -; ushr v21.2d, v1.2d, #1 -; add v23.2d, v19.2d, v21.2d -; add v0.2d, v17.2d, v23.2d +; and v16.16b, v7.16b, v5.16b +; ushr v17.2d, v0.2d, #1 +; ushr v18.2d, v1.2d, #1 +; add v17.2d, v17.2d, v18.2d +; add v0.2d, v16.2d, v17.2d ; ret ; ; Disassembled: @@ -121,11 +121,11 @@ block0(v0: i64x2, v1: i64x2): ; mov x3, #1 ; dup v5.2d, x3 ; orr v7.16b, v0.16b, v1.16b -; and v17.16b, v7.16b, v5.16b -; ushr v19.2d, v0.2d, #1 -; ushr v21.2d, v1.2d, #1 -; add v23.2d, v19.2d, v21.2d -; add v0.2d, v17.2d, v23.2d +; and v16.16b, v7.16b, v5.16b +; ushr v17.2d, v0.2d, #1 +; ushr v18.2d, v1.2d, #1 +; add v17.2d, v17.2d, v18.2d +; add v0.2d, v16.2d, v17.2d ; ret function %ishl_i8x16_imm(i8x16) -> i8x16 { diff --git a/cranelift/filetests/filetests/isa/aarch64/stack.clif b/cranelift/filetests/filetests/isa/aarch64/stack.clif index 167f62922b73..9170ac73f0ee 100644 --- a/cranelift/filetests/filetests/isa/aarch64/stack.clif +++ b/cranelift/filetests/filetests/isa/aarch64/stack.clif @@ -362,167 +362,167 @@ block0(v0: i8): ; stp x23, x24, [sp, #-16]! ; stp x21, x22, [sp, #-16]! ; stp x19, x20, [sp, #-16]! -; sub sp, sp, #1216 +; sub sp, sp, #1232 ; block0: ; str x0, [sp, #1000] -; movz x8, #2 -; str x8, [sp, #1008] -; movz x8, #4 -; movz x9, #6 -; movz x10, #8 -; movz x11, #10 -; movz x12, #12 -; movz x13, #14 -; movz x14, #16 -; movz x15, #18 -; movz x1, #20 -; movz x2, #22 -; movz x3, #24 -; movz x4, #26 -; movz x5, #28 -; movz x6, #30 -; movz x23, #32 -; movz x24, #34 -; movz x25, #36 -; movz x26, #38 -; movz x27, #30 -; movz x28, #32 -; movz x21, #34 -; movz x19, #36 -; movz x20, #38 -; movz x22, #30 +; movz x1, #2 +; movz x2, #4 +; movz x3, #6 +; movz x4, #8 +; movz x5, #10 +; movz x6, #12 +; movz x7, #14 +; movz x8, #16 +; movz x9, #18 +; movz x10, #20 +; movz x11, #22 +; movz x12, #24 +; movz x13, #26 +; movz x14, #28 +; movz x15, #30 +; movz x19, #32 +; movz x20, #34 +; movz x21, #36 +; movz x22, #38 +; movz x23, #30 +; movz x24, #32 +; movz x25, #34 +; movz x26, #36 +; movz x27, #38 +; movz x28, #30 ; movz x0, #32 -; movz x7, #34 -; str x7, [sp, #1208] -; movz x7, #36 -; str x7, [sp, #1200] -; movz x7, #38 -; str x7, [sp, #1192] -; movz x7, #30 -; str x7, [sp, #1184] -; movz x7, #32 -; str x7, [sp, #1176] -; movz x7, #34 -; str x7, [sp, #1168] -; movz x7, #36 -; str x7, [sp, #1160] -; movz x7, #38 -; str x7, [sp, #1152] -; ldr x7, [sp, #1008] -; add x7, x7, #1 -; str x7, [sp, #1144] -; add x7, x8, #3 -; str x7, [sp, #1136] -; add x7, x9, #5 -; str x7, [sp, #1128] -; add x7, x10, #7 -; str x7, [sp, #1120] -; add x7, x11, #9 -; str x7, [sp, #1112] -; add x7, x12, #11 -; str x7, [sp, #1104] -; add x7, x13, #13 -; str x7, [sp, #1096] -; add x7, x14, #15 -; str x7, [sp, #1088] -; add x7, x15, #17 -; str x7, [sp, #1080] -; add x7, x1, #19 -; str x7, [sp, #1072] -; add x7, x2, #21 -; str x7, [sp, #1064] -; add x7, x3, #23 -; str x7, [sp, #1056] -; add x7, x4, #25 -; str x7, [sp, #1048] -; add x7, x5, #27 -; str x7, [sp, #1040] -; add x7, x6, #29 -; str x7, [sp, #1032] -; add x7, x23, #31 -; str x7, [sp, #1024] -; add x7, x24, #33 -; str x7, [sp, #1016] -; add x7, x25, #35 -; str x7, [sp, #1008] -; add x26, x26, #37 -; add x27, x27, #39 -; add x28, x28, #31 -; add x21, x21, #33 -; add x19, x19, #35 -; add x20, x20, #37 -; add x22, x22, #39 -; add x0, x0, #31 -; ldr x7, [sp, #1208] -; add x7, x7, #33 -; ldr x10, [sp, #1200] -; add x8, x10, #35 -; ldr x13, [sp, #1192] -; add x9, x13, #37 -; ldr x1, [sp, #1184] -; add x10, x1, #39 -; ldr x3, [sp, #1176] -; add x11, x3, #31 +; str x0, [sp, #1216] +; movz x0, #34 +; str x0, [sp, #1208] +; movz x0, #36 +; str x0, [sp, #1200] +; movz x0, #38 +; str x0, [sp, #1192] +; movz x0, #30 +; str x0, [sp, #1184] +; movz x0, #32 +; str x0, [sp, #1176] +; movz x0, #34 +; str x0, [sp, #1168] +; movz x0, #36 +; str x0, [sp, #1160] +; movz x0, #38 +; str x0, [sp, #1152] +; add x0, x1, #1 +; str x0, [sp, #1144] +; add x0, x2, #3 +; str x0, [sp, #1136] +; add x0, x3, #5 +; str x0, [sp, #1128] +; add x0, x4, #7 +; str x0, [sp, #1120] +; add x0, x5, #9 +; str x0, [sp, #1112] +; add x0, x6, #11 +; str x0, [sp, #1104] +; add x0, x7, #13 +; str x0, [sp, #1096] +; add x0, x8, #15 +; str x0, [sp, #1088] +; add x0, x9, #17 +; str x0, [sp, #1080] +; add x0, x10, #19 +; str x0, [sp, #1072] +; add x0, x11, #21 +; str x0, [sp, #1064] +; add x0, x12, #23 +; str x0, [sp, #1056] +; add x0, x13, #25 +; str x0, [sp, #1048] +; add x0, x14, #27 +; str x0, [sp, #1040] +; add x0, x15, #29 +; str x0, [sp, #1032] +; add x0, x19, #31 +; str x0, [sp, #1024] +; add x0, x20, #33 +; str x0, [sp, #1016] +; add x0, x21, #35 +; str x0, [sp, #1008] +; add x22, x22, #37 +; add x23, x23, #39 +; add x24, x24, #31 +; add x25, x25, #33 +; add x26, x26, #35 +; add x27, x27, #37 +; add x28, x28, #39 +; ldr x1, [sp, #1216] +; add x0, x1, #31 +; ldr x1, [sp, #1208] +; add x1, x1, #33 +; ldr x2, [sp, #1200] +; add x2, x2, #35 +; ldr x3, [sp, #1192] +; add x3, x3, #37 +; ldr x4, [sp, #1184] +; add x4, x4, #39 +; ldr x5, [sp, #1176] +; add x5, x5, #31 ; ldr x6, [sp, #1168] -; add x12, x6, #33 -; ldr x13, [sp, #1160] -; add x13, x13, #35 -; ldr x14, [sp, #1152] -; add x14, x14, #37 -; ldr x15, [sp, #1144] -; add x15, x15, #39 -; ldr x2, [sp, #1136] -; ldr x4, [sp, #1128] -; add x1, x2, x4 -; ldr x2, [sp, #1112] -; ldr x3, [sp, #1120] -; add x2, x3, x2 -; ldr x3, [sp, #1096] -; ldr x4, [sp, #1104] -; add x3, x4, x3 -; ldr x4, [sp, #1080] -; ldr x5, [sp, #1088] -; add x4, x5, x4 -; ldr x5, [sp, #1064] -; ldr x6, [sp, #1072] -; add x5, x6, x5 -; ldr x6, [sp, #1048] -; ldr x23, [sp, #1056] -; add x6, x23, x6 -; ldr x23, [sp, #1032] -; ldr x24, [sp, #1040] -; add x23, x24, x23 -; ldr x24, [sp, #1024] -; ldr x25, [sp, #1016] -; add x24, x24, x25 -; ldr x25, [sp, #1008] -; add x25, x25, x26 -; add x26, x27, x28 -; add x27, x21, x19 -; add x28, x20, x22 -; add x7, x0, x7 -; add x8, x8, x9 -; add x9, x10, x11 -; add x10, x12, x13 -; add x11, x14, x15 -; add x12, x1, x2 -; add x13, x3, x4 -; add x14, x5, x6 -; add x15, x23, x24 -; add x0, x25, x26 -; add x1, x27, x28 -; add x7, x7, x8 -; add x8, x9, x10 -; add x9, x11, x12 -; add x10, x13, x14 -; add x11, x15, x0 -; add x7, x1, x7 -; add x8, x8, x9 -; add x9, x10, x11 -; add x7, x7, x8 -; add x1, x9, x7 +; add x6, x6, #33 +; ldr x7, [sp, #1160] +; add x7, x7, #35 +; ldr x8, [sp, #1152] +; add x8, x8, #37 +; ldr x9, [sp, #1144] +; add x9, x9, #39 +; ldr x10, [sp, #1128] +; ldr x11, [sp, #1136] +; add x10, x11, x10 +; ldr x11, [sp, #1120] +; ldr x12, [sp, #1112] +; add x11, x11, x12 +; ldr x12, [sp, #1096] +; ldr x15, [sp, #1104] +; add x12, x15, x12 +; ldr x13, [sp, #1080] +; ldr x14, [sp, #1088] +; add x13, x14, x13 +; ldr x14, [sp, #1064] +; ldr x15, [sp, #1072] +; add x14, x15, x14 +; ldr x15, [sp, #1048] +; ldr x19, [sp, #1056] +; add x15, x19, x15 +; ldr x19, [sp, #1032] +; ldr x20, [sp, #1040] +; add x19, x20, x19 +; ldr x20, [sp, #1024] +; ldr x21, [sp, #1016] +; add x20, x20, x21 +; ldr x21, [sp, #1008] +; add x21, x21, x22 +; add x22, x23, x24 +; add x23, x25, x26 +; add x24, x27, x28 +; add x0, x0, x1 +; add x1, x2, x3 +; add x2, x4, x5 +; add x3, x6, x7 +; add x4, x8, x9 +; add x5, x10, x11 +; add x6, x12, x13 +; add x7, x14, x15 +; add x8, x19, x20 +; add x9, x21, x22 +; add x10, x23, x24 +; add x0, x0, x1 +; add x1, x2, x3 +; add x2, x4, x5 +; add x3, x6, x7 +; add x4, x8, x9 +; add x0, x10, x0 +; add x1, x1, x2 +; add x2, x3, x4 +; add x0, x0, x1 +; add x1, x2, x0 ; ldr x0, [sp, #1000] -; add sp, sp, #1216 +; add sp, sp, #1232 ; ldp x19, x20, [sp], #16 ; ldp x21, x22, [sp], #16 ; ldp x23, x24, [sp], #16 @@ -540,167 +540,167 @@ block0(v0: i8): ; stp x23, x24, [sp, #-0x10]! ; stp x21, x22, [sp, #-0x10]! ; stp x19, x20, [sp, #-0x10]! -; sub sp, sp, #0x4c0 +; sub sp, sp, #0x4d0 ; block1: ; offset 0x20 ; str x0, [sp, #0x3e8] -; mov x8, #2 -; str x8, [sp, #0x3f0] -; mov x8, #4 -; mov x9, #6 -; mov x10, #8 -; mov x11, #0xa -; mov x12, #0xc -; mov x13, #0xe -; mov x14, #0x10 -; mov x15, #0x12 -; mov x1, #0x14 -; mov x2, #0x16 -; mov x3, #0x18 -; mov x4, #0x1a -; mov x5, #0x1c -; mov x6, #0x1e -; mov x23, #0x20 -; mov x24, #0x22 -; mov x25, #0x24 -; mov x26, #0x26 -; mov x27, #0x1e -; mov x28, #0x20 -; mov x21, #0x22 -; mov x19, #0x24 -; mov x20, #0x26 -; mov x22, #0x1e +; mov x1, #2 +; mov x2, #4 +; mov x3, #6 +; mov x4, #8 +; mov x5, #0xa +; mov x6, #0xc +; mov x7, #0xe +; mov x8, #0x10 +; mov x9, #0x12 +; mov x10, #0x14 +; mov x11, #0x16 +; mov x12, #0x18 +; mov x13, #0x1a +; mov x14, #0x1c +; mov x15, #0x1e +; mov x19, #0x20 +; mov x20, #0x22 +; mov x21, #0x24 +; mov x22, #0x26 +; mov x23, #0x1e +; mov x24, #0x20 +; mov x25, #0x22 +; mov x26, #0x24 +; mov x27, #0x26 +; mov x28, #0x1e +; mov x0, #0x20 +; str x0, [sp, #0x4c0] +; mov x0, #0x22 +; str x0, [sp, #0x4b8] +; mov x0, #0x24 +; str x0, [sp, #0x4b0] +; mov x0, #0x26 +; str x0, [sp, #0x4a8] +; mov x0, #0x1e +; str x0, [sp, #0x4a0] ; mov x0, #0x20 -; mov x7, #0x22 -; str x7, [sp, #0x4b8] -; mov x7, #0x24 -; str x7, [sp, #0x4b0] -; mov x7, #0x26 -; str x7, [sp, #0x4a8] -; mov x7, #0x1e -; str x7, [sp, #0x4a0] -; mov x7, #0x20 -; str x7, [sp, #0x498] -; mov x7, #0x22 -; str x7, [sp, #0x490] -; mov x7, #0x24 -; str x7, [sp, #0x488] -; mov x7, #0x26 -; str x7, [sp, #0x480] -; ldr x7, [sp, #0x3f0] -; add x7, x7, #1 -; str x7, [sp, #0x478] -; add x7, x8, #3 -; str x7, [sp, #0x470] -; add x7, x9, #5 -; str x7, [sp, #0x468] -; add x7, x10, #7 -; str x7, [sp, #0x460] -; add x7, x11, #9 -; str x7, [sp, #0x458] -; add x7, x12, #0xb -; str x7, [sp, #0x450] -; add x7, x13, #0xd -; str x7, [sp, #0x448] -; add x7, x14, #0xf -; str x7, [sp, #0x440] -; add x7, x15, #0x11 -; str x7, [sp, #0x438] -; add x7, x1, #0x13 -; str x7, [sp, #0x430] -; add x7, x2, #0x15 -; str x7, [sp, #0x428] -; add x7, x3, #0x17 -; str x7, [sp, #0x420] -; add x7, x4, #0x19 -; str x7, [sp, #0x418] -; add x7, x5, #0x1b -; str x7, [sp, #0x410] -; add x7, x6, #0x1d -; str x7, [sp, #0x408] -; add x7, x23, #0x1f -; str x7, [sp, #0x400] -; add x7, x24, #0x21 -; str x7, [sp, #0x3f8] -; add x7, x25, #0x23 -; str x7, [sp, #0x3f0] -; add x26, x26, #0x25 -; add x27, x27, #0x27 -; add x28, x28, #0x1f -; add x21, x21, #0x21 -; add x19, x19, #0x23 -; add x20, x20, #0x25 -; add x22, x22, #0x27 -; add x0, x0, #0x1f -; ldr x7, [sp, #0x4b8] -; add x7, x7, #0x21 -; ldr x10, [sp, #0x4b0] -; add x8, x10, #0x23 -; ldr x13, [sp, #0x4a8] -; add x9, x13, #0x25 -; ldr x1, [sp, #0x4a0] -; add x10, x1, #0x27 -; ldr x3, [sp, #0x498] -; add x11, x3, #0x1f +; str x0, [sp, #0x498] +; mov x0, #0x22 +; str x0, [sp, #0x490] +; mov x0, #0x24 +; str x0, [sp, #0x488] +; mov x0, #0x26 +; str x0, [sp, #0x480] +; add x0, x1, #1 +; str x0, [sp, #0x478] +; add x0, x2, #3 +; str x0, [sp, #0x470] +; add x0, x3, #5 +; str x0, [sp, #0x468] +; add x0, x4, #7 +; str x0, [sp, #0x460] +; add x0, x5, #9 +; str x0, [sp, #0x458] +; add x0, x6, #0xb +; str x0, [sp, #0x450] +; add x0, x7, #0xd +; str x0, [sp, #0x448] +; add x0, x8, #0xf +; str x0, [sp, #0x440] +; add x0, x9, #0x11 +; str x0, [sp, #0x438] +; add x0, x10, #0x13 +; str x0, [sp, #0x430] +; add x0, x11, #0x15 +; str x0, [sp, #0x428] +; add x0, x12, #0x17 +; str x0, [sp, #0x420] +; add x0, x13, #0x19 +; str x0, [sp, #0x418] +; add x0, x14, #0x1b +; str x0, [sp, #0x410] +; add x0, x15, #0x1d +; str x0, [sp, #0x408] +; add x0, x19, #0x1f +; str x0, [sp, #0x400] +; add x0, x20, #0x21 +; str x0, [sp, #0x3f8] +; add x0, x21, #0x23 +; str x0, [sp, #0x3f0] +; add x22, x22, #0x25 +; add x23, x23, #0x27 +; add x24, x24, #0x1f +; add x25, x25, #0x21 +; add x26, x26, #0x23 +; add x27, x27, #0x25 +; add x28, x28, #0x27 +; ldr x1, [sp, #0x4c0] +; add x0, x1, #0x1f +; ldr x1, [sp, #0x4b8] +; add x1, x1, #0x21 +; ldr x2, [sp, #0x4b0] +; add x2, x2, #0x23 +; ldr x3, [sp, #0x4a8] +; add x3, x3, #0x25 +; ldr x4, [sp, #0x4a0] +; add x4, x4, #0x27 +; ldr x5, [sp, #0x498] +; add x5, x5, #0x1f ; ldr x6, [sp, #0x490] -; add x12, x6, #0x21 -; ldr x13, [sp, #0x488] -; add x13, x13, #0x23 -; ldr x14, [sp, #0x480] -; add x14, x14, #0x25 -; ldr x15, [sp, #0x478] -; add x15, x15, #0x27 -; ldr x2, [sp, #0x470] -; ldr x4, [sp, #0x468] -; add x1, x2, x4 -; ldr x2, [sp, #0x458] -; ldr x3, [sp, #0x460] -; add x2, x3, x2 -; ldr x3, [sp, #0x448] -; ldr x4, [sp, #0x450] -; add x3, x4, x3 -; ldr x4, [sp, #0x438] -; ldr x5, [sp, #0x440] -; add x4, x5, x4 -; ldr x5, [sp, #0x428] -; ldr x6, [sp, #0x430] -; add x5, x6, x5 -; ldr x6, [sp, #0x418] -; ldr x23, [sp, #0x420] -; add x6, x23, x6 -; ldr x23, [sp, #0x408] -; ldr x24, [sp, #0x410] -; add x23, x24, x23 -; ldr x24, [sp, #0x400] -; ldr x25, [sp, #0x3f8] -; add x24, x24, x25 -; ldr x25, [sp, #0x3f0] -; add x25, x25, x26 -; add x26, x27, x28 -; add x27, x21, x19 -; add x28, x20, x22 -; add x7, x0, x7 -; add x8, x8, x9 -; add x9, x10, x11 -; add x10, x12, x13 -; add x11, x14, x15 -; add x12, x1, x2 -; add x13, x3, x4 -; add x14, x5, x6 -; add x15, x23, x24 -; add x0, x25, x26 -; add x1, x27, x28 -; add x7, x7, x8 -; add x8, x9, x10 -; add x9, x11, x12 -; add x10, x13, x14 -; add x11, x15, x0 -; add x7, x1, x7 -; add x8, x8, x9 -; add x9, x10, x11 -; add x7, x7, x8 -; add x1, x9, x7 +; add x6, x6, #0x21 +; ldr x7, [sp, #0x488] +; add x7, x7, #0x23 +; ldr x8, [sp, #0x480] +; add x8, x8, #0x25 +; ldr x9, [sp, #0x478] +; add x9, x9, #0x27 +; ldr x10, [sp, #0x468] +; ldr x11, [sp, #0x470] +; add x10, x11, x10 +; ldr x11, [sp, #0x460] +; ldr x12, [sp, #0x458] +; add x11, x11, x12 +; ldr x12, [sp, #0x448] +; ldr x15, [sp, #0x450] +; add x12, x15, x12 +; ldr x13, [sp, #0x438] +; ldr x14, [sp, #0x440] +; add x13, x14, x13 +; ldr x14, [sp, #0x428] +; ldr x15, [sp, #0x430] +; add x14, x15, x14 +; ldr x15, [sp, #0x418] +; ldr x19, [sp, #0x420] +; add x15, x19, x15 +; ldr x19, [sp, #0x408] +; ldr x20, [sp, #0x410] +; add x19, x20, x19 +; ldr x20, [sp, #0x400] +; ldr x21, [sp, #0x3f8] +; add x20, x20, x21 +; ldr x21, [sp, #0x3f0] +; add x21, x21, x22 +; add x22, x23, x24 +; add x23, x25, x26 +; add x24, x27, x28 +; add x0, x0, x1 +; add x1, x2, x3 +; add x2, x4, x5 +; add x3, x6, x7 +; add x4, x8, x9 +; add x5, x10, x11 +; add x6, x12, x13 +; add x7, x14, x15 +; add x8, x19, x20 +; add x9, x21, x22 +; add x10, x23, x24 +; add x0, x0, x1 +; add x1, x2, x3 +; add x2, x4, x5 +; add x3, x6, x7 +; add x4, x8, x9 +; add x0, x10, x0 +; add x1, x1, x2 +; add x2, x3, x4 +; add x0, x0, x1 +; add x1, x2, x0 ; ldr x0, [sp, #0x3e8] -; add sp, sp, #0x4c0 +; add sp, sp, #0x4d0 ; ldp x19, x20, [sp], #0x10 ; ldp x21, x22, [sp], #0x10 ; ldp x23, x24, [sp], #0x10 diff --git a/cranelift/filetests/filetests/isa/aarch64/tail-call-conv.clif b/cranelift/filetests/filetests/isa/aarch64/tail-call-conv.clif index c456afa5532f..1a421e896257 100644 --- a/cranelift/filetests/filetests/isa/aarch64/tail-call-conv.clif +++ b/cranelift/filetests/filetests/isa/aarch64/tail-call-conv.clif @@ -78,48 +78,48 @@ block0: ; movz x5, #25 ; movz x6, #30 ; movz x7, #35 -; movz x11, #40 -; movz x12, #45 -; movz x13, #50 -; movz x14, #55 -; movz x15, #60 -; movz x0, #65 -; movz x1, #70 -; movz x8, #75 -; movz x9, #80 -; movz x10, #85 +; movz x0, #40 +; movz x1, #45 +; movz x8, #50 +; movz x9, #55 +; movz x10, #60 +; movz x11, #65 +; movz x12, #70 +; movz x13, #75 +; movz x14, #80 +; movz x15, #85 ; movz x27, #90 ; movz x28, #95 -; movz x21, #100 -; movz x19, #105 -; movz x20, #110 +; movz x19, #100 +; movz x20, #105 +; movz x21, #110 ; movz x22, #115 ; movz x23, #120 ; movz x24, #125 ; movz x25, #130 ; movz x26, #135 -; str x11, [sp] -; str x12, [sp, #8] -; str x13, [sp, #16] -; str x14, [sp, #24] -; str x15, [sp, #32] -; str x0, [sp, #40] -; str x1, [sp, #48] -; str x8, [sp, #56] -; str x9, [sp, #64] -; str x10, [sp, #72] +; str x0, [sp] +; str x1, [sp, #8] +; str x8, [sp, #16] +; str x9, [sp, #24] +; str x10, [sp, #32] +; str x11, [sp, #40] +; str x12, [sp, #48] +; str x13, [sp, #56] +; str x14, [sp, #64] +; str x15, [sp, #72] ; str x27, [sp, #80] ; str x28, [sp, #88] -; str x21, [sp, #96] -; str x19, [sp, #104] -; str x20, [sp, #112] +; str x19, [sp, #96] +; str x20, [sp, #104] +; str x21, [sp, #112] ; str x22, [sp, #120] ; str x23, [sp, #128] ; str x24, [sp, #136] ; str x25, [sp, #144] ; str x26, [sp, #152] -; load_ext_name_far x15, TestCase(%tail_callee_stack_args)+0 -; blr x15 +; load_ext_name_far x0, TestCase(%tail_callee_stack_args)+0 +; blr x0 ; add sp, sp, #160 ; ldp x19, x20, [sp], #16 ; ldp x21, x22, [sp], #16 @@ -146,51 +146,51 @@ block0: ; mov x5, #0x19 ; mov x6, #0x1e ; mov x7, #0x23 -; mov x11, #0x28 -; mov x12, #0x2d -; mov x13, #0x32 -; mov x14, #0x37 -; mov x15, #0x3c -; mov x0, #0x41 -; mov x1, #0x46 -; mov x8, #0x4b -; mov x9, #0x50 -; mov x10, #0x55 +; mov x0, #0x28 +; mov x1, #0x2d +; mov x8, #0x32 +; mov x9, #0x37 +; mov x10, #0x3c +; mov x11, #0x41 +; mov x12, #0x46 +; mov x13, #0x4b +; mov x14, #0x50 +; mov x15, #0x55 ; mov x27, #0x5a ; mov x28, #0x5f -; mov x21, #0x64 -; mov x19, #0x69 -; mov x20, #0x6e +; mov x19, #0x64 +; mov x20, #0x69 +; mov x21, #0x6e ; mov x22, #0x73 ; mov x23, #0x78 ; mov x24, #0x7d ; mov x25, #0x82 ; mov x26, #0x87 -; stur x11, [sp] -; stur x12, [sp, #8] -; stur x13, [sp, #0x10] -; stur x14, [sp, #0x18] -; stur x15, [sp, #0x20] -; stur x0, [sp, #0x28] -; stur x1, [sp, #0x30] -; stur x8, [sp, #0x38] -; stur x9, [sp, #0x40] -; stur x10, [sp, #0x48] +; stur x0, [sp] +; stur x1, [sp, #8] +; stur x8, [sp, #0x10] +; stur x9, [sp, #0x18] +; stur x10, [sp, #0x20] +; stur x11, [sp, #0x28] +; stur x12, [sp, #0x30] +; stur x13, [sp, #0x38] +; stur x14, [sp, #0x40] +; stur x15, [sp, #0x48] ; stur x27, [sp, #0x50] ; stur x28, [sp, #0x58] -; stur x21, [sp, #0x60] -; stur x19, [sp, #0x68] -; stur x20, [sp, #0x70] +; stur x19, [sp, #0x60] +; stur x20, [sp, #0x68] +; stur x21, [sp, #0x70] ; stur x22, [sp, #0x78] ; stur x23, [sp, #0x80] ; stur x24, [sp, #0x88] ; stur x25, [sp, #0x90] ; stur x26, [sp, #0x98] -; ldr x15, #0xe0 +; ldr x0, #0xe0 ; b #0xe8 ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %tail_callee_stack_args 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; blr x15 +; blr x0 ; sub sp, sp, #0xa0 ; add sp, sp, #0xa0 ; ldp x19, x20, [sp], #0x10 @@ -251,40 +251,40 @@ block0: ; movz x5, #25 ; movz x6, #30 ; movz x7, #35 -; movz x11, #40 -; movz x12, #45 -; movz x13, #50 -; movz x14, #55 -; movz x15, #60 -; movz x0, #65 -; movz x1, #70 -; movz x9, #75 -; movz x10, #80 +; movz x0, #40 +; movz x1, #45 +; movz x9, #50 +; movz x10, #55 +; movz x11, #60 +; movz x12, #65 +; movz x13, #70 +; movz x14, #75 +; movz x15, #80 ; movz x27, #85 ; movz x28, #90 -; movz x21, #95 -; movz x19, #100 -; movz x20, #105 +; movz x19, #95 +; movz x20, #100 +; movz x21, #105 ; movz x22, #110 ; movz x23, #115 ; movz x24, #120 ; movz x25, #125 ; movz x26, #130 ; movz x2, #135 -; str x11, [x8] -; str x12, [x8, #8] -; str x13, [x8, #16] -; str x14, [x8, #24] -; str x15, [x8, #32] -; str x0, [x8, #40] -; str x1, [x8, #48] -; str x9, [x8, #56] -; str x10, [x8, #64] +; str x0, [x8] +; str x1, [x8, #8] +; str x9, [x8, #16] +; str x10, [x8, #24] +; str x11, [x8, #32] +; str x12, [x8, #40] +; str x13, [x8, #48] +; str x14, [x8, #56] +; str x15, [x8, #64] ; str x27, [x8, #72] ; str x28, [x8, #80] -; str x21, [x8, #88] -; str x19, [x8, #96] -; str x20, [x8, #104] +; str x19, [x8, #88] +; str x20, [x8, #96] +; str x21, [x8, #104] ; str x22, [x8, #112] ; str x23, [x8, #120] ; str x24, [x8, #128] @@ -319,40 +319,40 @@ block0: ; mov x5, #0x19 ; mov x6, #0x1e ; mov x7, #0x23 -; mov x11, #0x28 -; mov x12, #0x2d -; mov x13, #0x32 -; mov x14, #0x37 -; mov x15, #0x3c -; mov x0, #0x41 -; mov x1, #0x46 -; mov x9, #0x4b -; mov x10, #0x50 +; mov x0, #0x28 +; mov x1, #0x2d +; mov x9, #0x32 +; mov x10, #0x37 +; mov x11, #0x3c +; mov x12, #0x41 +; mov x13, #0x46 +; mov x14, #0x4b +; mov x15, #0x50 ; mov x27, #0x55 ; mov x28, #0x5a -; mov x21, #0x5f -; mov x19, #0x64 -; mov x20, #0x69 +; mov x19, #0x5f +; mov x20, #0x64 +; mov x21, #0x69 ; mov x22, #0x6e ; mov x23, #0x73 ; mov x24, #0x78 ; mov x25, #0x7d ; mov x26, #0x82 ; mov x2, #0x87 -; stur x11, [x8] -; stur x12, [x8, #8] -; stur x13, [x8, #0x10] -; stur x14, [x8, #0x18] -; stur x15, [x8, #0x20] -; stur x0, [x8, #0x28] -; stur x1, [x8, #0x30] -; stur x9, [x8, #0x38] -; stur x10, [x8, #0x40] +; stur x0, [x8] +; stur x1, [x8, #8] +; stur x9, [x8, #0x10] +; stur x10, [x8, #0x18] +; stur x11, [x8, #0x20] +; stur x12, [x8, #0x28] +; stur x13, [x8, #0x30] +; stur x14, [x8, #0x38] +; stur x15, [x8, #0x40] ; stur x27, [x8, #0x48] ; stur x28, [x8, #0x50] -; stur x21, [x8, #0x58] -; stur x19, [x8, #0x60] -; stur x20, [x8, #0x68] +; stur x19, [x8, #0x58] +; stur x20, [x8, #0x60] +; stur x21, [x8, #0x68] ; stur x22, [x8, #0x70] ; stur x23, [x8, #0x78] ; stur x24, [x8, #0x80] @@ -388,8 +388,8 @@ block0: ; sub sp, sp, #240 ; block0: ; mov x8, sp -; load_ext_name_far x12, TestCase(%tail_callee_stack_rets)+0 -; blr x12 +; load_ext_name_far x0, TestCase(%tail_callee_stack_rets)+0 +; blr x0 ; ldr x2, [sp, #232] ; add sp, sp, #240 ; ldp x19, x20, [sp], #16 @@ -412,11 +412,11 @@ block0: ; sub sp, sp, #0xf0 ; block1: ; offset 0x20 ; mov x8, sp -; ldr x12, #0x2c +; ldr x0, #0x2c ; b #0x34 ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %tail_callee_stack_rets 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; blr x12 +; blr x0 ; ldur x9, [sp] ; stur x9, [sp, #0xa0] ; ldur x9, [sp, #8] @@ -435,13 +435,13 @@ block0: ; stur x9, [sp, #0xd8] ; ldur x9, [sp, #0x40] ; stur x9, [sp, #0xe0] -; ldur x25, [sp, #0x48] -; ldur x26, [sp, #0x50] -; ldur x27, [sp, #0x58] -; ldur x28, [sp, #0x60] -; ldur x21, [sp, #0x68] -; ldur x19, [sp, #0x70] -; ldur x20, [sp, #0x78] +; ldur x28, [sp, #0x48] +; ldur x27, [sp, #0x50] +; ldur x26, [sp, #0x58] +; ldur x25, [sp, #0x60] +; ldur x19, [sp, #0x68] +; ldur x20, [sp, #0x70] +; ldur x21, [sp, #0x78] ; ldur x22, [sp, #0x80] ; ldur x23, [sp, #0x88] ; ldur x24, [sp, #0x90] @@ -477,45 +477,45 @@ block0(v0: i64, v1: i64, v2: i64, v3: i64, v4: i64, v5: i64, v6: i64, v7: i64, v ; block0: ; str x7, [sp] ; ldr x7, [sp, #112] -; ldr x19, [sp, #120] -; ldr x22, [sp, #128] +; ldr x28, [sp, #120] +; ldr x26, [sp, #128] ; ldr x24, [sp, #136] -; ldr x26, [sp, #144] -; ldr x28, [sp, #152] -; ldr x21, [sp, #160] -; ldr x20, [sp, #168] +; ldr x22, [sp, #144] +; ldr x20, [sp, #152] +; ldr x19, [sp, #160] +; ldr x21, [sp, #168] ; ldr x23, [sp, #176] ; ldr x25, [sp, #184] ; ldr x27, [sp, #192] -; ldr x0, [sp, #200] -; ldr x15, [sp, #208] -; ldr x1, [sp, #216] -; ldr x14, [sp, #224] -; ldr x12, [sp, #232] +; ldr x15, [sp, #200] +; ldr x14, [sp, #208] +; ldr x13, [sp, #216] +; ldr x12, [sp, #224] +; ldr x11, [sp, #232] ; ldr x10, [sp, #240] ; ldr x9, [sp, #248] -; ldr x11, [sp, #256] -; ldr x13, [sp, #264] +; ldr x1, [sp, #256] +; ldr x0, [sp, #264] ; str x7, [x8] -; str x19, [x8, #8] -; str x22, [x8, #16] +; str x28, [x8, #8] +; str x26, [x8, #16] ; str x24, [x8, #24] -; str x26, [x8, #32] -; str x28, [x8, #40] -; str x21, [x8, #48] -; str x20, [x8, #56] +; str x22, [x8, #32] +; str x20, [x8, #40] +; str x19, [x8, #48] +; str x21, [x8, #56] ; str x23, [x8, #64] ; str x25, [x8, #72] ; str x27, [x8, #80] -; str x0, [x8, #88] -; str x15, [x8, #96] -; str x1, [x8, #104] -; str x14, [x8, #112] -; str x12, [x8, #120] +; str x15, [x8, #88] +; str x14, [x8, #96] +; str x13, [x8, #104] +; str x12, [x8, #112] +; str x11, [x8, #120] ; str x10, [x8, #128] ; str x9, [x8, #136] -; str x11, [x8, #144] -; str x13, [x8, #152] +; str x1, [x8, #144] +; str x0, [x8, #152] ; ldr x7, [sp] ; add sp, sp, #16 ; ldp x19, x20, [sp], #16 @@ -540,45 +540,45 @@ block0(v0: i64, v1: i64, v2: i64, v3: i64, v4: i64, v5: i64, v6: i64, v7: i64, v ; block1: ; offset 0x20 ; stur x7, [sp] ; ldur x7, [sp, #0x70] -; ldur x19, [sp, #0x78] -; ldur x22, [sp, #0x80] +; ldur x28, [sp, #0x78] +; ldur x26, [sp, #0x80] ; ldur x24, [sp, #0x88] -; ldur x26, [sp, #0x90] -; ldur x28, [sp, #0x98] -; ldur x21, [sp, #0xa0] -; ldur x20, [sp, #0xa8] +; ldur x22, [sp, #0x90] +; ldur x20, [sp, #0x98] +; ldur x19, [sp, #0xa0] +; ldur x21, [sp, #0xa8] ; ldur x23, [sp, #0xb0] ; ldur x25, [sp, #0xb8] ; ldur x27, [sp, #0xc0] -; ldur x0, [sp, #0xc8] -; ldur x15, [sp, #0xd0] -; ldur x1, [sp, #0xd8] -; ldur x14, [sp, #0xe0] -; ldur x12, [sp, #0xe8] +; ldur x15, [sp, #0xc8] +; ldur x14, [sp, #0xd0] +; ldur x13, [sp, #0xd8] +; ldur x12, [sp, #0xe0] +; ldur x11, [sp, #0xe8] ; ldur x10, [sp, #0xf0] ; ldur x9, [sp, #0xf8] -; ldr x11, [sp, #0x100] -; ldr x13, [sp, #0x108] +; ldr x1, [sp, #0x100] +; ldr x0, [sp, #0x108] ; stur x7, [x8] -; stur x19, [x8, #8] -; stur x22, [x8, #0x10] +; stur x28, [x8, #8] +; stur x26, [x8, #0x10] ; stur x24, [x8, #0x18] -; stur x26, [x8, #0x20] -; stur x28, [x8, #0x28] -; stur x21, [x8, #0x30] -; stur x20, [x8, #0x38] +; stur x22, [x8, #0x20] +; stur x20, [x8, #0x28] +; stur x19, [x8, #0x30] +; stur x21, [x8, #0x38] ; stur x23, [x8, #0x40] ; stur x25, [x8, #0x48] ; stur x27, [x8, #0x50] -; stur x0, [x8, #0x58] -; stur x15, [x8, #0x60] -; stur x1, [x8, #0x68] -; stur x14, [x8, #0x70] -; stur x12, [x8, #0x78] +; stur x15, [x8, #0x58] +; stur x14, [x8, #0x60] +; stur x13, [x8, #0x68] +; stur x12, [x8, #0x70] +; stur x11, [x8, #0x78] ; stur x10, [x8, #0x80] ; stur x9, [x8, #0x88] -; stur x11, [x8, #0x90] -; stur x13, [x8, #0x98] +; stur x1, [x8, #0x90] +; stur x0, [x8, #0x98] ; ldur x7, [sp] ; add sp, sp, #0x10 ; ldp x19, x20, [sp], #0x10 @@ -640,49 +640,49 @@ block0: ; movz x5, #25 ; movz x6, #30 ; movz x7, #35 -; movz x8, #40 -; movz x9, #45 -; movz x10, #50 -; movz x11, #55 -; movz x12, #60 -; movz x13, #65 -; movz x14, #70 -; movz x15, #75 -; movz x0, #80 -; movz x1, #85 -; movz x23, #90 -; movz x24, #95 -; movz x25, #100 -; movz x26, #105 -; movz x27, #110 -; movz x28, #115 -; movz x21, #120 -; movz x19, #125 -; movz x20, #130 -; movz x22, #135 -; str x8, [sp] -; str x9, [sp, #8] -; str x10, [sp, #16] -; str x11, [sp, #24] -; str x12, [sp, #32] -; str x13, [sp, #40] -; str x14, [sp, #48] -; str x15, [sp, #56] -; str x0, [sp, #64] -; str x1, [sp, #72] -; str x23, [sp, #80] -; str x24, [sp, #88] -; str x25, [sp, #96] -; str x26, [sp, #104] -; str x27, [sp, #112] -; str x28, [sp, #120] -; str x21, [sp, #128] -; str x19, [sp, #136] -; str x20, [sp, #144] -; str x22, [sp, #152] +; movz x0, #40 +; movz x1, #45 +; movz x8, #50 +; movz x9, #55 +; movz x10, #60 +; movz x11, #65 +; movz x12, #70 +; movz x13, #75 +; movz x14, #80 +; movz x15, #85 +; movz x19, #90 +; movz x20, #95 +; movz x21, #100 +; movz x22, #105 +; movz x23, #110 +; movz x24, #115 +; movz x25, #120 +; movz x26, #125 +; movz x27, #130 +; movz x28, #135 +; str x0, [sp] +; str x1, [sp, #8] +; str x8, [sp, #16] +; str x9, [sp, #24] +; str x10, [sp, #32] +; str x11, [sp, #40] +; str x12, [sp, #48] +; str x13, [sp, #56] +; str x14, [sp, #64] +; str x15, [sp, #72] +; str x19, [sp, #80] +; str x20, [sp, #88] +; str x21, [sp, #96] +; str x22, [sp, #104] +; str x23, [sp, #112] +; str x24, [sp, #120] +; str x25, [sp, #128] +; str x26, [sp, #136] +; str x27, [sp, #144] +; str x28, [sp, #152] ; add x8, sp, #160 -; load_ext_name_far x10, TestCase(%tail_callee_stack_args_and_rets)+0 -; blr x10 +; load_ext_name_far x11, TestCase(%tail_callee_stack_args_and_rets)+0 +; blr x11 ; ldr x2, [sp, #392] ; add sp, sp, #400 ; ldp x19, x20, [sp], #16 @@ -710,52 +710,52 @@ block0: ; mov x5, #0x19 ; mov x6, #0x1e ; mov x7, #0x23 -; mov x8, #0x28 -; mov x9, #0x2d -; mov x10, #0x32 -; mov x11, #0x37 -; mov x12, #0x3c -; mov x13, #0x41 -; mov x14, #0x46 -; mov x15, #0x4b -; mov x0, #0x50 -; mov x1, #0x55 -; mov x23, #0x5a -; mov x24, #0x5f -; mov x25, #0x64 -; mov x26, #0x69 -; mov x27, #0x6e -; mov x28, #0x73 -; mov x21, #0x78 -; mov x19, #0x7d -; mov x20, #0x82 -; mov x22, #0x87 -; stur x8, [sp] -; stur x9, [sp, #8] -; stur x10, [sp, #0x10] -; stur x11, [sp, #0x18] -; stur x12, [sp, #0x20] -; stur x13, [sp, #0x28] -; stur x14, [sp, #0x30] -; stur x15, [sp, #0x38] -; stur x0, [sp, #0x40] -; stur x1, [sp, #0x48] -; stur x23, [sp, #0x50] -; stur x24, [sp, #0x58] -; stur x25, [sp, #0x60] -; stur x26, [sp, #0x68] -; stur x27, [sp, #0x70] -; stur x28, [sp, #0x78] -; stur x21, [sp, #0x80] -; stur x19, [sp, #0x88] -; stur x20, [sp, #0x90] -; stur x22, [sp, #0x98] +; mov x0, #0x28 +; mov x1, #0x2d +; mov x8, #0x32 +; mov x9, #0x37 +; mov x10, #0x3c +; mov x11, #0x41 +; mov x12, #0x46 +; mov x13, #0x4b +; mov x14, #0x50 +; mov x15, #0x55 +; mov x19, #0x5a +; mov x20, #0x5f +; mov x21, #0x64 +; mov x22, #0x69 +; mov x23, #0x6e +; mov x24, #0x73 +; mov x25, #0x78 +; mov x26, #0x7d +; mov x27, #0x82 +; mov x28, #0x87 +; stur x0, [sp] +; stur x1, [sp, #8] +; stur x8, [sp, #0x10] +; stur x9, [sp, #0x18] +; stur x10, [sp, #0x20] +; stur x11, [sp, #0x28] +; stur x12, [sp, #0x30] +; stur x13, [sp, #0x38] +; stur x14, [sp, #0x40] +; stur x15, [sp, #0x48] +; stur x19, [sp, #0x50] +; stur x20, [sp, #0x58] +; stur x21, [sp, #0x60] +; stur x22, [sp, #0x68] +; stur x23, [sp, #0x70] +; stur x24, [sp, #0x78] +; stur x25, [sp, #0x80] +; stur x26, [sp, #0x88] +; stur x27, [sp, #0x90] +; stur x28, [sp, #0x98] ; add x8, sp, #0xa0 -; ldr x10, #0xe4 +; ldr x11, #0xe4 ; b #0xec ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %tail_callee_stack_args_and_rets 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; blr x10 +; blr x11 ; sub sp, sp, #0xa0 ; ldur x9, [sp, #0xa0] ; str x9, [sp, #0x140] @@ -775,13 +775,13 @@ block0: ; str x9, [sp, #0x178] ; ldur x9, [sp, #0xe0] ; str x9, [sp, #0x180] -; ldur x25, [sp, #0xe8] -; ldur x26, [sp, #0xf0] -; ldur x27, [sp, #0xf8] -; ldr x28, [sp, #0x100] -; ldr x21, [sp, #0x108] -; ldr x19, [sp, #0x110] -; ldr x20, [sp, #0x118] +; ldur x28, [sp, #0xe8] +; ldur x27, [sp, #0xf0] +; ldur x26, [sp, #0xf8] +; ldr x25, [sp, #0x100] +; ldr x19, [sp, #0x108] +; ldr x20, [sp, #0x110] +; ldr x21, [sp, #0x118] ; ldr x22, [sp, #0x120] ; ldr x23, [sp, #0x128] ; ldr x24, [sp, #0x130] diff --git a/cranelift/filetests/filetests/isa/aarch64/tls-macho.clif b/cranelift/filetests/filetests/isa/aarch64/tls-macho.clif index 23a20eb52ce1..627eca89069f 100644 --- a/cranelift/filetests/filetests/isa/aarch64/tls-macho.clif +++ b/cranelift/filetests/filetests/isa/aarch64/tls-macho.clif @@ -13,21 +13,21 @@ block0(v0: i32): ; VCode: ; stp fp, lr, [sp, #-16]! ; mov fp, sp -; str x25, [sp, #-16]! +; str x19, [sp, #-16]! ; stp d14, d15, [sp, #-16]! ; stp d12, d13, [sp, #-16]! ; stp d10, d11, [sp, #-16]! ; stp d8, d9, [sp, #-16]! ; block0: -; mov x25, x0 +; mov x19, x0 ; macho_tls_get_addr x0, userextname0 ; mov x1, x0 -; mov x0, x25 +; mov x0, x19 ; ldp d8, d9, [sp], #16 ; ldp d10, d11, [sp], #16 ; ldp d12, d13, [sp], #16 ; ldp d14, d15, [sp], #16 -; ldr x25, [sp], #16 +; ldr x19, [sp], #16 ; ldp fp, lr, [sp], #16 ; ret ; @@ -35,24 +35,24 @@ block0(v0: i32): ; block0: ; offset 0x0 ; stp x29, x30, [sp, #-0x10]! ; mov x29, sp -; str x25, [sp, #-0x10]! +; str x19, [sp, #-0x10]! ; stp d14, d15, [sp, #-0x10]! ; stp d12, d13, [sp, #-0x10]! ; stp d10, d11, [sp, #-0x10]! ; stp d8, d9, [sp, #-0x10]! ; block1: ; offset 0x1c -; mov x25, x0 +; mov x19, x0 ; adrp x0, #0 ; reloc_external MachOAarch64TlsAdrPage21 u1:0 0 ; ldr x0, [x0] ; reloc_external MachOAarch64TlsAdrPageOff12 u1:0 0 ; ldr x1, [x0] ; blr x1 ; mov x1, x0 -; mov x0, x25 +; mov x0, x19 ; ldp d8, d9, [sp], #0x10 ; ldp d10, d11, [sp], #0x10 ; ldp d12, d13, [sp], #0x10 ; ldp d14, d15, [sp], #0x10 -; ldr x25, [sp], #0x10 +; ldr x19, [sp], #0x10 ; ldp x29, x30, [sp], #0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/aarch64/user_stack_maps.clif b/cranelift/filetests/filetests/isa/aarch64/user_stack_maps.clif index 469086bed314..a910521f1914 100644 --- a/cranelift/filetests/filetests/isa/aarch64/user_stack_maps.clif +++ b/cranelift/filetests/filetests/isa/aarch64/user_stack_maps.clif @@ -34,33 +34,33 @@ block0: ; VCode: ; stp fp, lr, [sp, #-16]! ; mov fp, sp -; str x24, [sp, #-16]! -; stp x22, x23, [sp, #-16]! +; str x21, [sp, #-16]! +; stp x19, x20, [sp, #-16]! ; sub sp, sp, #16 ; block0: -; movz w24, #0 -; movz w22, #1 -; movz w23, #2 -; str w24, [sp] -; str w22, [sp, #4] -; str w23, [sp, #8] -; mov x0, x24 +; movz w21, #0 +; movz w19, #1 +; movz w20, #2 +; str w21, [sp] +; str w19, [sp, #4] +; str w20, [sp, #8] +; mov x0, x21 ; bl 0 ; ; UserStackMap { by_type: [(types::I32, CompoundBitSet {0, 4, 8})], sp_to_sized_stack_slots: None } -; str w22, [sp] -; str w23, [sp, #4] -; mov x0, x24 +; str w19, [sp] +; str w20, [sp, #4] +; mov x0, x21 ; bl 0 ; ; UserStackMap { by_type: [(types::I32, CompoundBitSet {0, 4})], sp_to_sized_stack_slots: None } -; str w23, [sp] -; mov x0, x22 +; str w20, [sp] +; mov x0, x19 ; bl 0 ; ; UserStackMap { by_type: [(types::I32, CompoundBitSet {0})], sp_to_sized_stack_slots: None } -; mov x0, x23 +; mov x0, x20 ; bl 0 ; add sp, sp, #16 -; ldp x22, x23, [sp], #16 -; ldr x24, [sp], #16 +; ldp x19, x20, [sp], #16 +; ldr x21, [sp], #16 ; ldp fp, lr, [sp], #16 ; ret ; @@ -68,30 +68,30 @@ block0: ; block0: ; offset 0x0 ; stp x29, x30, [sp, #-0x10]! ; mov x29, sp -; str x24, [sp, #-0x10]! -; stp x22, x23, [sp, #-0x10]! +; str x21, [sp, #-0x10]! +; stp x19, x20, [sp, #-0x10]! ; sub sp, sp, #0x10 ; block1: ; offset 0x14 -; mov w24, #0 -; mov w22, #1 -; mov w23, #2 -; stur w24, [sp] -; stur w22, [sp, #4] -; stur w23, [sp, #8] -; mov x0, x24 +; mov w21, #0 +; mov w19, #1 +; mov w20, #2 +; stur w21, [sp] +; stur w19, [sp, #4] +; stur w20, [sp, #8] +; mov x0, x21 ; bl #0x30 ; reloc_external Call u0:0 0 -; stur w22, [sp] -; stur w23, [sp, #4] -; mov x0, x24 +; stur w19, [sp] +; stur w20, [sp, #4] +; mov x0, x21 ; bl #0x40 ; reloc_external Call u0:0 0 -; stur w23, [sp] -; mov x0, x22 +; stur w20, [sp] +; mov x0, x19 ; bl #0x4c ; reloc_external Call u0:0 0 -; mov x0, x23 +; mov x0, x20 ; bl #0x54 ; reloc_external Call u0:0 0 ; add sp, sp, #0x10 -; ldp x22, x23, [sp], #0x10 -; ldr x24, [sp], #0x10 +; ldp x19, x20, [sp], #0x10 +; ldr x21, [sp], #0x10 ; ldp x29, x30, [sp], #0x10 ; ret @@ -118,33 +118,33 @@ block0(v0: i8, v1: i16, v2: i32, v3: i64, v4: f32, v5: f64): ; VCode: ; stp fp, lr, [sp, #-16]! ; mov fp, sp -; stp x24, x27, [sp, #-16]! -; stp x19, x23, [sp, #-16]! +; stp x21, x23, [sp, #-16]! +; stp x19, x20, [sp, #-16]! ; sub sp, sp, #128 ; block0: ; strb w0, [sp] ; mov x23, x0 ; strh w1, [sp, #8] -; mov x19, x1 +; mov x21, x1 ; str w2, [sp, #16] -; mov x27, x2 +; mov x20, x2 ; str s0, [sp, #20] ; str q0, [sp, #96] ; str x3, [sp, #24] -; mov x24, x3 +; mov x19, x3 ; str d1, [sp, #32] ; str q1, [sp, #112] ; bl 0 ; ; UserStackMap { by_type: [(types::I8, CompoundBitSet {0}), (types::I16, CompoundBitSet {8}), (types::I32, CompoundBitSet {16}), (types::F32, CompoundBitSet {20}), (types::I64, CompoundBitSet {24}), (types::F64, CompoundBitSet {32})], sp_to_sized_stack_slots: None } ; mov x0, x23 -; mov x1, x19 -; mov x2, x27 -; mov x3, x24 +; mov x1, x21 +; mov x2, x20 +; mov x3, x19 ; ldr q0, [sp, #96] ; ldr q1, [sp, #112] ; add sp, sp, #128 -; ldp x19, x23, [sp], #16 -; ldp x24, x27, [sp], #16 +; ldp x19, x20, [sp], #16 +; ldp x21, x23, [sp], #16 ; ldp fp, lr, [sp], #16 ; ret ; @@ -152,32 +152,32 @@ block0(v0: i8, v1: i16, v2: i32, v3: i64, v4: f32, v5: f64): ; block0: ; offset 0x0 ; stp x29, x30, [sp, #-0x10]! ; mov x29, sp -; stp x24, x27, [sp, #-0x10]! -; stp x19, x23, [sp, #-0x10]! +; stp x21, x23, [sp, #-0x10]! +; stp x19, x20, [sp, #-0x10]! ; sub sp, sp, #0x80 ; block1: ; offset 0x14 ; sturb w0, [sp] ; mov x23, x0 ; sturh w1, [sp, #8] -; mov x19, x1 +; mov x21, x1 ; stur w2, [sp, #0x10] -; mov x27, x2 +; mov x20, x2 ; stur s0, [sp, #0x14] ; stur q0, [sp, #0x60] ; stur x3, [sp, #0x18] -; mov x24, x3 +; mov x19, x3 ; stur d1, [sp, #0x20] ; stur q1, [sp, #0x70] ; bl #0x44 ; reloc_external Call u0:0 0 ; mov x0, x23 -; mov x1, x19 -; mov x2, x27 -; mov x3, x24 +; mov x1, x21 +; mov x2, x20 +; mov x3, x19 ; ldur q0, [sp, #0x60] ; ldur q1, [sp, #0x70] ; add sp, sp, #0x80 -; ldp x19, x23, [sp], #0x10 -; ldp x24, x27, [sp], #0x10 +; ldp x19, x20, [sp], #0x10 +; ldp x21, x23, [sp], #0x10 ; ldp x29, x30, [sp], #0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/aarch64/vhigh_bits.clif b/cranelift/filetests/filetests/isa/aarch64/vhigh_bits.clif index afb64c9443ac..d26dab8662aa 100644 --- a/cranelift/filetests/filetests/isa/aarch64/vhigh_bits.clif +++ b/cranelift/filetests/filetests/isa/aarch64/vhigh_bits.clif @@ -14,12 +14,12 @@ block0(v0: i8x16): ; movk x7, x7, #2052, LSL #16 ; movk x7, x7, #8208, LSL #32 ; movk x7, x7, #32832, LSL #48 -; dup v20.2d, x7 -; and v22.16b, v2.16b, v20.16b -; ext v24.16b, v22.16b, v22.16b, #8 -; zip1 v26.16b, v22.16b, v24.16b -; addv h28, v26.8h -; umov w0, v28.h[0] +; dup v16.2d, x7 +; and v16.16b, v2.16b, v16.16b +; ext v17.16b, v16.16b, v16.16b, #8 +; zip1 v18.16b, v16.16b, v17.16b +; addv h20, v18.8h +; umov w0, v20.h[0] ; ret ; ; Disassembled: @@ -29,12 +29,12 @@ block0(v0: i8x16): ; movk x7, #0x804, lsl #16 ; movk x7, #0x2010, lsl #32 ; movk x7, #0x8040, lsl #48 -; dup v20.2d, x7 -; and v22.16b, v2.16b, v20.16b -; ext v24.16b, v22.16b, v22.16b, #8 -; zip1 v26.16b, v22.16b, v24.16b -; addv h28, v26.8h -; umov w0, v28.h[0] +; dup v16.2d, x7 +; and v16.16b, v2.16b, v16.16b +; ext v17.16b, v16.16b, v16.16b, #8 +; zip1 v18.16b, v16.16b, v17.16b +; addv h20, v18.8h +; umov w0, v20.h[0] ; ret function %f2(i8x16) -> i16 { @@ -50,12 +50,12 @@ block0(v0: i8x16): ; movk x7, x7, #2052, LSL #16 ; movk x7, x7, #8208, LSL #32 ; movk x7, x7, #32832, LSL #48 -; dup v20.2d, x7 -; and v22.16b, v2.16b, v20.16b -; ext v24.16b, v22.16b, v22.16b, #8 -; zip1 v26.16b, v22.16b, v24.16b -; addv h28, v26.8h -; umov w0, v28.h[0] +; dup v16.2d, x7 +; and v16.16b, v2.16b, v16.16b +; ext v17.16b, v16.16b, v16.16b, #8 +; zip1 v18.16b, v16.16b, v17.16b +; addv h20, v18.8h +; umov w0, v20.h[0] ; ret ; ; Disassembled: @@ -65,12 +65,12 @@ block0(v0: i8x16): ; movk x7, #0x804, lsl #16 ; movk x7, #0x2010, lsl #32 ; movk x7, #0x8040, lsl #48 -; dup v20.2d, x7 -; and v22.16b, v2.16b, v20.16b -; ext v24.16b, v22.16b, v22.16b, #8 -; zip1 v26.16b, v22.16b, v24.16b -; addv h28, v26.8h -; umov w0, v28.h[0] +; dup v16.2d, x7 +; and v16.16b, v2.16b, v16.16b +; ext v17.16b, v16.16b, v16.16b, #8 +; zip1 v18.16b, v16.16b, v17.16b +; addv h20, v18.8h +; umov w0, v20.h[0] ; ret function %f3(i16x8) -> i8 { diff --git a/cranelift/filetests/filetests/isa/pulley32/call.clif b/cranelift/filetests/filetests/isa/pulley32/call.clif index d18b20e2c04e..c2dc9a09f6c9 100644 --- a/cranelift/filetests/filetests/isa/pulley32/call.clif +++ b/cranelift/filetests/filetests/isa/pulley32/call.clif @@ -214,70 +214,70 @@ block0: } ; VCode: -; push_frame_save 112, {x16, x17, x18, x19, x26, x27, x28, x29} +; push_frame_save 128, {x16, x17, x18, x19, x20, x21, x24, x25, x26} ; block0: -; x12 = load_addr OutgoingArg(0) -; call CallInfo { dest: PulleyCall { name: TestCase(%g), args: [XReg(p12i)] }, uses: [], defs: [CallRetPair { vreg: Writable { reg: p0i }, location: Reg(p0i, types::I64) }, CallRetPair { vreg: Writable { reg: p1i }, location: Reg(p1i, types::I64) }, CallRetPair { vreg: Writable { reg: p2i }, location: Reg(p2i, types::I64) }, CallRetPair { vreg: Writable { reg: p3i }, location: Reg(p3i, types::I64) }, CallRetPair { vreg: Writable { reg: p4i }, location: Reg(p4i, types::I64) }, CallRetPair { vreg: Writable { reg: p5i }, location: Reg(p5i, types::I64) }, CallRetPair { vreg: Writable { reg: p6i }, location: Reg(p6i, types::I64) }, CallRetPair { vreg: Writable { reg: p7i }, location: Reg(p7i, types::I64) }, CallRetPair { vreg: Writable { reg: p8i }, location: Reg(p8i, types::I64) }, CallRetPair { vreg: Writable { reg: p9i }, location: Reg(p9i, types::I64) }, CallRetPair { vreg: Writable { reg: p10i }, location: Reg(p10i, types::I64) }, CallRetPair { vreg: Writable { reg: p11i }, location: Reg(p11i, types::I64) }, CallRetPair { vreg: Writable { reg: p12i }, location: Reg(p12i, types::I64) }, CallRetPair { vreg: Writable { reg: p13i }, location: Reg(p13i, types::I64) }, CallRetPair { vreg: Writable { reg: p14i }, location: Reg(p14i, types::I64) }, CallRetPair { vreg: Writable { reg: p27i }, location: Stack(OutgoingArg(0), types::I64) }, CallRetPair { vreg: Writable { reg: p19i }, location: Stack(OutgoingArg(8), types::I64) }, CallRetPair { vreg: Writable { reg: p29i }, location: Stack(OutgoingArg(16), types::I64) }, CallRetPair { vreg: Writable { reg: p16i }, location: Stack(OutgoingArg(24), types::I64) }, CallRetPair { vreg: Writable { reg: p17i }, location: Stack(OutgoingArg(32), types::I64) }, CallRetPair { vreg: Writable { reg: p18i }, location: Stack(OutgoingArg(40), types::I64) }], clobbers: PRegSet { bits: [32768, 4294967295, 4294967295, 0] }, callee_conv: Fast, caller_conv: Fast, callee_pop_size: 0, try_call_info: None, patchable: false } +; x0 = load_addr OutgoingArg(0) +; call CallInfo { dest: PulleyCall { name: TestCase(%g), args: [XReg(p0i)] }, uses: [], defs: [CallRetPair { vreg: Writable { reg: p0i }, location: Reg(p0i, types::I64) }, CallRetPair { vreg: Writable { reg: p1i }, location: Reg(p1i, types::I64) }, CallRetPair { vreg: Writable { reg: p2i }, location: Reg(p2i, types::I64) }, CallRetPair { vreg: Writable { reg: p3i }, location: Reg(p3i, types::I64) }, CallRetPair { vreg: Writable { reg: p4i }, location: Reg(p4i, types::I64) }, CallRetPair { vreg: Writable { reg: p5i }, location: Reg(p5i, types::I64) }, CallRetPair { vreg: Writable { reg: p6i }, location: Reg(p6i, types::I64) }, CallRetPair { vreg: Writable { reg: p7i }, location: Reg(p7i, types::I64) }, CallRetPair { vreg: Writable { reg: p8i }, location: Reg(p8i, types::I64) }, CallRetPair { vreg: Writable { reg: p9i }, location: Reg(p9i, types::I64) }, CallRetPair { vreg: Writable { reg: p10i }, location: Reg(p10i, types::I64) }, CallRetPair { vreg: Writable { reg: p11i }, location: Reg(p11i, types::I64) }, CallRetPair { vreg: Writable { reg: p12i }, location: Reg(p12i, types::I64) }, CallRetPair { vreg: Writable { reg: p13i }, location: Reg(p13i, types::I64) }, CallRetPair { vreg: Writable { reg: p14i }, location: Reg(p14i, types::I64) }, CallRetPair { vreg: Writable { reg: p16i }, location: Stack(OutgoingArg(0), types::I64) }, CallRetPair { vreg: Writable { reg: p17i }, location: Stack(OutgoingArg(8), types::I64) }, CallRetPair { vreg: Writable { reg: p18i }, location: Stack(OutgoingArg(16), types::I64) }, CallRetPair { vreg: Writable { reg: p19i }, location: Stack(OutgoingArg(24), types::I64) }, CallRetPair { vreg: Writable { reg: p20i }, location: Stack(OutgoingArg(32), types::I64) }, CallRetPair { vreg: Writable { reg: p21i }, location: Stack(OutgoingArg(40), types::I64) }], clobbers: PRegSet { bits: [32768, 4294967295, 4294967295, 0] }, callee_conv: Fast, caller_conv: Fast, callee_pop_size: 0, try_call_info: None, patchable: false } ; xadd64 x26, x0, x1 -; xadd64 x28, x2, x3 -; xadd64 x2, x4, x5 +; xadd64 x25, x2, x3 +; xadd64 x24, x4, x5 ; xadd64 x15, x6, x7 -; xadd64 x1, x8, x9 -; xadd64 x3, x10, x11 -; xadd64 x4, x12, x13 -; xadd64 x12, x14, x27 -; xadd64 x13, x19, x29 -; xadd64 x9, x29, x16 -; xadd64 x10, x17, x18 -; xadd64 x8, x26, x28 -; xadd64 x11, x2, x15 -; xadd64 x14, x1, x3 -; xadd64 x12, x4, x12 -; xadd64 x9, x13, x9 -; xadd64 x10, x10, x10 -; xadd64 x8, x8, x11 -; xadd64 x11, x14, x12 -; xadd64 x9, x9, x10 -; xadd64 x8, x8, x11 -; xadd64 x9, x9, x9 -; xadd64 x0, x8, x9 -; pop_frame_restore 112, {x16, x17, x18, x19, x26, x27, x28, x29} +; xadd64 x9, x8, x9 +; xadd64 x11, x10, x11 +; xadd64 x12, x12, x13 +; xadd64 x10, x14, x16 +; xadd64 x7, x17, x18 +; xadd64 x3, x18, x19 +; xadd64 x0, x20, x21 +; xadd64 x1, x26, x25 +; xadd64 x2, x24, x15 +; xadd64 x4, x9, x11 +; xadd64 x5, x12, x10 +; xadd64 x3, x7, x3 +; xadd64 x0, x0, x0 +; xadd64 x1, x1, x2 +; xadd64 x2, x4, x5 +; xadd64 x0, x3, x0 +; xadd64 x1, x1, x2 +; xadd64 x0, x0, x0 +; xadd64 x0, x1, x0 +; pop_frame_restore 128, {x16, x17, x18, x19, x20, x21, x24, x25, x26} ; ret ; ; Disassembled: -; push_frame_save 112, x16, x17, x18, x19, x26, x27, x28, x29 -; xmov x12, sp -; call1 x12, 0x2 // target = 0xa -; xload64le_o32 x27, sp, 0 -; xload64le_o32 x19, sp, 8 -; xload64le_o32 x29, sp, 16 -; xload64le_o32 x16, sp, 24 -; xload64le_o32 x17, sp, 32 -; xload64le_o32 x18, sp, 40 +; push_frame_save 128, x16, x17, x18, x19, x20, x21, x24, x25, x26 +; xmov x0, sp +; call 0x1 // target = 0x9 +; xload64le_o32 x16, sp, 0 +; xload64le_o32 x17, sp, 8 +; xload64le_o32 x18, sp, 16 +; xload64le_o32 x19, sp, 24 +; xload64le_o32 x20, sp, 32 +; xload64le_o32 x21, sp, 40 ; xadd64 x26, x0, x1 -; xadd64 x28, x2, x3 -; xadd64 x2, x4, x5 +; xadd64 x25, x2, x3 +; xadd64 x24, x4, x5 ; xadd64 x15, x6, x7 -; xadd64 x1, x8, x9 -; xadd64 x3, x10, x11 -; xadd64 x4, x12, x13 -; xadd64 x12, x14, x27 -; xadd64 x13, x19, x29 -; xadd64 x9, x29, x16 -; xadd64 x10, x17, x18 -; xadd64 x8, x26, x28 -; xadd64 x11, x2, x15 -; xadd64 x14, x1, x3 -; xadd64 x12, x4, x12 -; xadd64 x9, x13, x9 -; xadd64 x10, x10, x10 -; xadd64 x8, x8, x11 -; xadd64 x11, x14, x12 -; xadd64 x9, x9, x10 -; xadd64 x8, x8, x11 -; xadd64 x9, x9, x9 -; xadd64 x0, x8, x9 -; pop_frame_restore 112, x16, x17, x18, x19, x26, x27, x28, x29 +; xadd64 x9, x8, x9 +; xadd64 x11, x10, x11 +; xadd64 x12, x12, x13 +; xadd64 x10, x14, x16 +; xadd64 x7, x17, x18 +; xadd64 x3, x18, x19 +; xadd64 x0, x20, x21 +; xadd64 x1, x26, x25 +; xadd64 x2, x24, x15 +; xadd64 x4, x9, x11 +; xadd64 x5, x12, x10 +; xadd64 x3, x7, x3 +; xadd64 x0, x0, x0 +; xadd64 x1, x1, x2 +; xadd64 x2, x4, x5 +; xadd64 x0, x3, x0 +; xadd64 x1, x1, x2 +; xadd64 x0, x0, x0 +; xadd64 x0, x1, x0 +; pop_frame_restore 128, x16, x17, x18, x19, x20, x21, x24, x25, x26 ; ret function %call_indirect(i32) -> i64 { diff --git a/cranelift/filetests/filetests/isa/pulley32/exceptions.clif b/cranelift/filetests/filetests/isa/pulley32/exceptions.clif index a1f45a8d7965..1967b5bd9f37 100644 --- a/cranelift/filetests/filetests/isa/pulley32/exceptions.clif +++ b/cranelift/filetests/filetests/isa/pulley32/exceptions.clif @@ -134,11 +134,11 @@ function %f4(i32, i32) -> i32, f32, f64 { ; fstore64 Slot(16), f1 // flags = notrap aligned ; call CallInfo { dest: PulleyCall { name: TestCase(%g), args: [XReg(p2i)] }, uses: [], defs: [CallRetPair { vreg: Writable { reg: p0f }, location: Reg(p0f, types::F32) }, CallRetPair { vreg: Writable { reg: p0i }, location: Reg(p0i, types::I32) }, CallRetPair { vreg: Writable { reg: p1i }, location: Reg(p1i, types::I32) }], clobbers: PRegSet { bits: [4294967292, 4294967294, 4294967295, 0] }, callee_conv: Tail, caller_conv: Fast, callee_pop_size: 0, try_call_info: Some(TryCallInfo { continuation: MachLabel(3), exception_handlers: [Context(stack2), Tag(tag0, MachLabel(1)), Tag(tag1, MachLabel(2)), Context(stack0), Tag(tag0, MachLabel(4))] }), patchable: false }; jump MachLabel(3); catch [context stack2, tag0: MachLabel(1), tag1: MachLabel(2), context stack0, tag0: MachLabel(4)] ; block1: -; xmov x3, x0 +; xmov x2, x0 ; f1 = fload64 Slot(16) // flags = notrap aligned ; jump label5 ; block2: -; xmov x3, x0 +; xmov x2, x0 ; f1 = fload64 Slot(16) // flags = notrap aligned ; jump label5 ; block3: @@ -148,10 +148,10 @@ function %f4(i32, i32) -> i32, f32, f64 { ; ret ; block4: ; f1 = fload64 Slot(16) // flags = notrap aligned -; x3 = xload64 Slot(8) // flags = notrap aligned +; x2 = xload64 Slot(8) // flags = notrap aligned ; jump label5 ; block5: -; xadd32_u8 x0, x3, 1 +; xadd32_u8 x0, x2, 1 ; fconst32 f0, 0 ; pop_frame_restore 160, {x16, x17, x18, x19, x20, x21, x22, x23, x24, x25, x26, x27, x28, x29, sp, spilltmp0} ; ret @@ -165,10 +165,10 @@ function %f4(i32, i32) -> i32, f32, f64 { ; fstore64le_o32 sp, 16, f1 ; call1 x2, 0x2 // target = 0x31 ; jump 0x27 // target = 0x5c -; xmov x3, x0 +; xmov x2, x0 ; fload64le_o32 f1, sp, 16 ; jump 0x37 // target = 0x7d -; xmov x3, x0 +; xmov x2, x0 ; fload64le_o32 f1, sp, 16 ; jump 0x26 // target = 0x7d ; xone x0 @@ -176,8 +176,8 @@ function %f4(i32, i32) -> i32, f32, f64 { ; pop_frame_restore 160, x16, x17, x18, x19, x20, x21, x22, x23, x24, x25, x26, x27, x28, x29, sp, spilltmp0 ; ret ; fload64le_o32 f1, sp, 16 -; xload64le_o32 x3, sp, 8 -; xadd32_u8 x0, x3, 1 +; xload64le_o32 x2, sp, 8 +; xadd32_u8 x0, x2, 1 ; fconst32 f0, 0 ; pop_frame_restore 160, x16, x17, x18, x19, x20, x21, x22, x23, x24, x25, x26, x27, x28, x29, sp, spilltmp0 ; ret diff --git a/cranelift/filetests/filetests/isa/pulley64/call.clif b/cranelift/filetests/filetests/isa/pulley64/call.clif index cd11d2a64eca..16b271835620 100644 --- a/cranelift/filetests/filetests/isa/pulley64/call.clif +++ b/cranelift/filetests/filetests/isa/pulley64/call.clif @@ -214,70 +214,70 @@ block0: } ; VCode: -; push_frame_save 112, {x16, x17, x18, x19, x26, x27, x28, x29} +; push_frame_save 128, {x16, x17, x18, x19, x20, x21, x24, x25, x26} ; block0: -; x12 = load_addr OutgoingArg(0) -; call CallInfo { dest: PulleyCall { name: TestCase(%g), args: [XReg(p12i)] }, uses: [], defs: [CallRetPair { vreg: Writable { reg: p0i }, location: Reg(p0i, types::I64) }, CallRetPair { vreg: Writable { reg: p1i }, location: Reg(p1i, types::I64) }, CallRetPair { vreg: Writable { reg: p2i }, location: Reg(p2i, types::I64) }, CallRetPair { vreg: Writable { reg: p3i }, location: Reg(p3i, types::I64) }, CallRetPair { vreg: Writable { reg: p4i }, location: Reg(p4i, types::I64) }, CallRetPair { vreg: Writable { reg: p5i }, location: Reg(p5i, types::I64) }, CallRetPair { vreg: Writable { reg: p6i }, location: Reg(p6i, types::I64) }, CallRetPair { vreg: Writable { reg: p7i }, location: Reg(p7i, types::I64) }, CallRetPair { vreg: Writable { reg: p8i }, location: Reg(p8i, types::I64) }, CallRetPair { vreg: Writable { reg: p9i }, location: Reg(p9i, types::I64) }, CallRetPair { vreg: Writable { reg: p10i }, location: Reg(p10i, types::I64) }, CallRetPair { vreg: Writable { reg: p11i }, location: Reg(p11i, types::I64) }, CallRetPair { vreg: Writable { reg: p12i }, location: Reg(p12i, types::I64) }, CallRetPair { vreg: Writable { reg: p13i }, location: Reg(p13i, types::I64) }, CallRetPair { vreg: Writable { reg: p14i }, location: Reg(p14i, types::I64) }, CallRetPair { vreg: Writable { reg: p27i }, location: Stack(OutgoingArg(0), types::I64) }, CallRetPair { vreg: Writable { reg: p19i }, location: Stack(OutgoingArg(8), types::I64) }, CallRetPair { vreg: Writable { reg: p29i }, location: Stack(OutgoingArg(16), types::I64) }, CallRetPair { vreg: Writable { reg: p16i }, location: Stack(OutgoingArg(24), types::I64) }, CallRetPair { vreg: Writable { reg: p17i }, location: Stack(OutgoingArg(32), types::I64) }, CallRetPair { vreg: Writable { reg: p18i }, location: Stack(OutgoingArg(40), types::I64) }], clobbers: PRegSet { bits: [32768, 4294967295, 4294967295, 0] }, callee_conv: Fast, caller_conv: Fast, callee_pop_size: 0, try_call_info: None, patchable: false } +; x0 = load_addr OutgoingArg(0) +; call CallInfo { dest: PulleyCall { name: TestCase(%g), args: [XReg(p0i)] }, uses: [], defs: [CallRetPair { vreg: Writable { reg: p0i }, location: Reg(p0i, types::I64) }, CallRetPair { vreg: Writable { reg: p1i }, location: Reg(p1i, types::I64) }, CallRetPair { vreg: Writable { reg: p2i }, location: Reg(p2i, types::I64) }, CallRetPair { vreg: Writable { reg: p3i }, location: Reg(p3i, types::I64) }, CallRetPair { vreg: Writable { reg: p4i }, location: Reg(p4i, types::I64) }, CallRetPair { vreg: Writable { reg: p5i }, location: Reg(p5i, types::I64) }, CallRetPair { vreg: Writable { reg: p6i }, location: Reg(p6i, types::I64) }, CallRetPair { vreg: Writable { reg: p7i }, location: Reg(p7i, types::I64) }, CallRetPair { vreg: Writable { reg: p8i }, location: Reg(p8i, types::I64) }, CallRetPair { vreg: Writable { reg: p9i }, location: Reg(p9i, types::I64) }, CallRetPair { vreg: Writable { reg: p10i }, location: Reg(p10i, types::I64) }, CallRetPair { vreg: Writable { reg: p11i }, location: Reg(p11i, types::I64) }, CallRetPair { vreg: Writable { reg: p12i }, location: Reg(p12i, types::I64) }, CallRetPair { vreg: Writable { reg: p13i }, location: Reg(p13i, types::I64) }, CallRetPair { vreg: Writable { reg: p14i }, location: Reg(p14i, types::I64) }, CallRetPair { vreg: Writable { reg: p16i }, location: Stack(OutgoingArg(0), types::I64) }, CallRetPair { vreg: Writable { reg: p17i }, location: Stack(OutgoingArg(8), types::I64) }, CallRetPair { vreg: Writable { reg: p18i }, location: Stack(OutgoingArg(16), types::I64) }, CallRetPair { vreg: Writable { reg: p19i }, location: Stack(OutgoingArg(24), types::I64) }, CallRetPair { vreg: Writable { reg: p20i }, location: Stack(OutgoingArg(32), types::I64) }, CallRetPair { vreg: Writable { reg: p21i }, location: Stack(OutgoingArg(40), types::I64) }], clobbers: PRegSet { bits: [32768, 4294967295, 4294967295, 0] }, callee_conv: Fast, caller_conv: Fast, callee_pop_size: 0, try_call_info: None, patchable: false } ; xadd64 x26, x0, x1 -; xadd64 x28, x2, x3 -; xadd64 x2, x4, x5 +; xadd64 x25, x2, x3 +; xadd64 x24, x4, x5 ; xadd64 x15, x6, x7 -; xadd64 x1, x8, x9 -; xadd64 x3, x10, x11 -; xadd64 x4, x12, x13 -; xadd64 x12, x14, x27 -; xadd64 x13, x19, x29 -; xadd64 x9, x29, x16 -; xadd64 x10, x17, x18 -; xadd64 x8, x26, x28 -; xadd64 x11, x2, x15 -; xadd64 x14, x1, x3 -; xadd64 x12, x4, x12 -; xadd64 x9, x13, x9 -; xadd64 x10, x10, x10 -; xadd64 x8, x8, x11 -; xadd64 x11, x14, x12 -; xadd64 x9, x9, x10 -; xadd64 x8, x8, x11 -; xadd64 x9, x9, x9 -; xadd64 x0, x8, x9 -; pop_frame_restore 112, {x16, x17, x18, x19, x26, x27, x28, x29} +; xadd64 x9, x8, x9 +; xadd64 x11, x10, x11 +; xadd64 x12, x12, x13 +; xadd64 x10, x14, x16 +; xadd64 x7, x17, x18 +; xadd64 x3, x18, x19 +; xadd64 x0, x20, x21 +; xadd64 x1, x26, x25 +; xadd64 x2, x24, x15 +; xadd64 x4, x9, x11 +; xadd64 x5, x12, x10 +; xadd64 x3, x7, x3 +; xadd64 x0, x0, x0 +; xadd64 x1, x1, x2 +; xadd64 x2, x4, x5 +; xadd64 x0, x3, x0 +; xadd64 x1, x1, x2 +; xadd64 x0, x0, x0 +; xadd64 x0, x1, x0 +; pop_frame_restore 128, {x16, x17, x18, x19, x20, x21, x24, x25, x26} ; ret ; ; Disassembled: -; push_frame_save 112, x16, x17, x18, x19, x26, x27, x28, x29 -; xmov x12, sp -; call1 x12, 0x2 // target = 0xa -; xload64le_o32 x27, sp, 0 -; xload64le_o32 x19, sp, 8 -; xload64le_o32 x29, sp, 16 -; xload64le_o32 x16, sp, 24 -; xload64le_o32 x17, sp, 32 -; xload64le_o32 x18, sp, 40 +; push_frame_save 128, x16, x17, x18, x19, x20, x21, x24, x25, x26 +; xmov x0, sp +; call 0x1 // target = 0x9 +; xload64le_o32 x16, sp, 0 +; xload64le_o32 x17, sp, 8 +; xload64le_o32 x18, sp, 16 +; xload64le_o32 x19, sp, 24 +; xload64le_o32 x20, sp, 32 +; xload64le_o32 x21, sp, 40 ; xadd64 x26, x0, x1 -; xadd64 x28, x2, x3 -; xadd64 x2, x4, x5 +; xadd64 x25, x2, x3 +; xadd64 x24, x4, x5 ; xadd64 x15, x6, x7 -; xadd64 x1, x8, x9 -; xadd64 x3, x10, x11 -; xadd64 x4, x12, x13 -; xadd64 x12, x14, x27 -; xadd64 x13, x19, x29 -; xadd64 x9, x29, x16 -; xadd64 x10, x17, x18 -; xadd64 x8, x26, x28 -; xadd64 x11, x2, x15 -; xadd64 x14, x1, x3 -; xadd64 x12, x4, x12 -; xadd64 x9, x13, x9 -; xadd64 x10, x10, x10 -; xadd64 x8, x8, x11 -; xadd64 x11, x14, x12 -; xadd64 x9, x9, x10 -; xadd64 x8, x8, x11 -; xadd64 x9, x9, x9 -; xadd64 x0, x8, x9 -; pop_frame_restore 112, x16, x17, x18, x19, x26, x27, x28, x29 +; xadd64 x9, x8, x9 +; xadd64 x11, x10, x11 +; xadd64 x12, x12, x13 +; xadd64 x10, x14, x16 +; xadd64 x7, x17, x18 +; xadd64 x3, x18, x19 +; xadd64 x0, x20, x21 +; xadd64 x1, x26, x25 +; xadd64 x2, x24, x15 +; xadd64 x4, x9, x11 +; xadd64 x5, x12, x10 +; xadd64 x3, x7, x3 +; xadd64 x0, x0, x0 +; xadd64 x1, x1, x2 +; xadd64 x2, x4, x5 +; xadd64 x0, x3, x0 +; xadd64 x1, x1, x2 +; xadd64 x0, x0, x0 +; xadd64 x0, x1, x0 +; pop_frame_restore 128, x16, x17, x18, x19, x20, x21, x24, x25, x26 ; ret function %call_indirect(i64) -> i64 { @@ -384,13 +384,13 @@ block0(v0: i32): ; VCode: ; push_frame ; stack_alloc32 1000016 -; xstore64 sp+1000008, x20 // flags = notrap aligned +; xstore64 sp+1000008, x16 // flags = notrap aligned ; block0: -; xmov x20, x0 +; xmov x16, x0 ; call CallInfo { dest: PulleyCall { name: TestCase(%g), args: [] }, uses: [], defs: [CallRetPair { vreg: Writable { reg: p0i }, location: Reg(p0i, types::I32) }], clobbers: PRegSet { bits: [65534, 4294967295, 4294967295, 0] }, callee_conv: Fast, caller_conv: Fast, callee_pop_size: 0, try_call_info: None, patchable: false } -; xmov x5, x20 +; xmov x5, x16 ; xadd32 x0, x5, x0 -; x20 = xload64 sp+1000008 // flags = notrap aligned +; x16 = xload64 sp+1000008 // flags = notrap aligned ; stack_free32 1000016 ; pop_frame ; ret @@ -398,12 +398,12 @@ block0(v0: i32): ; Disassembled: ; push_frame ; stack_alloc32 1000016 -; xstore64le_o32 sp, 1000008, x20 -; xmov x20, x0 +; xstore64le_o32 sp, 1000008, x16 +; xmov x16, x0 ; call 0x1 // target = 0x11 -; xmov x5, x20 +; xmov x5, x16 ; xadd32 x0, x5, x0 -; xload64le_o32 x20, sp, 1000008 +; xload64le_o32 x16, sp, 1000008 ; stack_free32 1000016 ; pop_frame ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/amodes.clif b/cranelift/filetests/filetests/isa/riscv64/amodes.clif index 678b86f30200..cc70ce763bd1 100644 --- a/cranelift/filetests/filetests/isa/riscv64/amodes.clif +++ b/cranelift/filetests/filetests/isa/riscv64/amodes.clif @@ -12,16 +12,16 @@ block0(v0: i64, v1: i32): ; VCode: ; block0: -; sext.w a5,a1 -; add a5,a0,a5 -; lw a0,0(a5) +; sext.w a1,a1 +; add a0,a0,a1 +; lw a0,0(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a5, a1 -; add a5, a0, a5 -; lw a0, 0(a5) ; trap: heap_oob +; sext.w a1, a1 +; add a0, a0, a1 +; lw a0, 0(a0) ; trap: heap_oob ; ret function %f6(i64, i32) -> i32 { @@ -34,16 +34,16 @@ block0(v0: i64, v1: i32): ; VCode: ; block0: -; sext.w a5,a1 -; add a5,a5,a0 -; lw a0,0(a5) +; sext.w a1,a1 +; add a0,a1,a0 +; lw a0,0(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a5, a1 -; add a5, a5, a0 -; lw a0, 0(a5) ; trap: heap_oob +; sext.w a1, a1 +; add a0, a1, a0 +; lw a0, 0(a0) ; trap: heap_oob ; ret function %f7(i32, i32) -> i32 { @@ -57,22 +57,22 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; slli a2,a0,32 -; srli a3,a2,32 +; slli a0,a0,32 +; srli a0,a0,32 ; slli a1,a1,32 -; srli a4,a1,32 -; add a2,a3,a4 -; lw a0,0(a2) +; srli a1,a1,32 +; add a0,a0,a1 +; lw a0,0(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x20 -; srli a3, a2, 0x20 +; slli a0, a0, 0x20 +; srli a0, a0, 0x20 ; slli a1, a1, 0x20 -; srli a4, a1, 0x20 -; add a2, a3, a4 -; lw a0, 0(a2) ; trap: heap_oob +; srli a1, a1, 0x20 +; add a0, a0, a1 +; lw a0, 0(a0) ; trap: heap_oob ; ret function %f8(i64, i32) -> i32 { @@ -90,18 +90,18 @@ block0(v0: i64, v1: i32): ; block0: ; sext.w a1,a1 ; addi a1,a1,32 -; add a1,a1,a0 -; add a1,a1,a1 -; lw a0,4(a1) +; add a0,a1,a0 +; add a0,a0,a0 +; lw a0,4(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; sext.w a1, a1 ; addi a1, a1, 0x20 -; add a1, a1, a0 -; add a1, a1, a1 -; lw a0, 4(a1) ; trap: heap_oob +; add a0, a1, a0 +; add a0, a0, a0 +; lw a0, 4(a0) ; trap: heap_oob ; ret function %f9(i64, i64, i64) -> i32 { @@ -164,14 +164,14 @@ block0: ; VCode: ; block0: -; li a1,1234 -; lw a0,0(a1) +; li a0,1234 +; lw a0,0(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a1, zero, 0x4d2 -; lw a0, 0(a1) ; trap: heap_oob +; addi a0, zero, 0x4d2 +; lw a0, 0(a0) ; trap: heap_oob ; ret function %f11(i64) -> i32 { @@ -242,14 +242,14 @@ block0(v0: i32): ; VCode: ; block0: -; sext.w a3,a0 -; lw a0,0(a3) +; sext.w a0,a0 +; lw a0,0(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a3, a0 -; lw a0, 0(a3) ; trap: heap_oob +; sext.w a0, a0 +; lw a0, 0(a0) ; trap: heap_oob ; ret function %f15(i32, i32) -> i32 { @@ -287,20 +287,20 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; lui a3,-1 -; addi a5,a3,-2 -; slli a3,a5,32 -; srli a5,a3,32 -; lh a0,0(a5) +; lui a0,-1 +; addi a0,a0,-2 +; slli a0,a0,32 +; srli a0,a0,32 +; lh a0,0(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a3, 0xfffff -; addi a5, a3, -2 -; slli a3, a5, 0x20 -; srli a5, a3, 0x20 -; lh a0, 0(a5) ; trap: heap_oob +; lui a0, 0xfffff +; addi a0, a0, -2 +; slli a0, a0, 0x20 +; srli a0, a0, 0x20 +; lh a0, 0(a0) ; trap: heap_oob ; ret function %f19(i64, i64, i64) -> i32 { @@ -313,20 +313,20 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; lui a3,1 -; addi a5,a3,2 -; slli a3,a5,32 -; srli a5,a3,32 -; lh a0,0(a5) +; lui a0,1 +; addi a0,a0,2 +; slli a0,a0,32 +; srli a0,a0,32 +; lh a0,0(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a3, 1 -; addi a5, a3, 2 -; slli a3, a5, 0x20 -; srli a5, a3, 0x20 -; lh a0, 0(a5) ; trap: heap_oob +; lui a0, 1 +; addi a0, a0, 2 +; slli a0, a0, 0x20 +; srli a0, a0, 0x20 +; lh a0, 0(a0) ; trap: heap_oob ; ret function %f20(i64, i64, i64) -> i32 { @@ -339,18 +339,18 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; lui a2,-1 -; addi a4,a2,-2 -; sext.w a3,a4 -; lh a0,0(a3) +; lui a0,-1 +; addi a0,a0,-2 +; sext.w a0,a0 +; lh a0,0(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a2, 0xfffff -; addi a4, a2, -2 -; sext.w a3, a4 -; lh a0, 0(a3) ; trap: heap_oob +; lui a0, 0xfffff +; addi a0, a0, -2 +; sext.w a0, a0 +; lh a0, 0(a0) ; trap: heap_oob ; ret function %f21(i64, i64, i64) -> i32 { @@ -363,18 +363,18 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; lui a2,1 -; addi a4,a2,2 -; sext.w a3,a4 -; lh a0,0(a3) +; lui a0,1 +; addi a0,a0,2 +; sext.w a0,a0 +; lh a0,0(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a2, 1 -; addi a4, a2, 2 -; sext.w a3, a4 -; lh a0, 0(a3) ; trap: heap_oob +; lui a0, 1 +; addi a0, a0, 2 +; sext.w a0, a0 +; lh a0, 0(a0) ; trap: heap_oob ; ret function %i128(i64) -> i128 { @@ -513,20 +513,22 @@ block0(v0: i32): ; VCode: ; block0: -; sext.w a4,a0 -; ld a0,0(a4) -; ld a1,8(a4) -; sd a0,0(a4) -; sd a1,8(a4) +; sext.w a0,a0 +; ld a2,0(a0) +; ld a1,8(a0) +; sd a2,0(a0) +; sd a1,8(a0) +; mv a0,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a4, a0 -; ld a0, 0(a4) ; trap: heap_oob -; ld a1, 8(a4) ; trap: heap_oob -; sd a0, 0(a4) ; trap: heap_oob -; sd a1, 8(a4) ; trap: heap_oob +; sext.w a0, a0 +; ld a2, 0(a0) ; trap: heap_oob +; ld a1, 8(a0) ; trap: heap_oob +; sd a2, 0(a0) ; trap: heap_oob +; sd a1, 8(a0) ; trap: heap_oob +; mv a0, a2 ; ret function %i128_32bit_sextend(i64, i32) -> i128 { @@ -543,22 +545,22 @@ block0(v0: i64, v1: i32): ; block0: ; sext.w a1,a1 ; add a0,a0,a1 -; ld a5,24(a0) +; ld a2,24(a0) ; ld a1,32(a0) -; sd a5,24(a0) +; sd a2,24(a0) ; sd a1,32(a0) -; mv a0,a5 +; mv a0,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; sext.w a1, a1 ; add a0, a0, a1 -; ld a5, 0x18(a0) ; trap: heap_oob +; ld a2, 0x18(a0) ; trap: heap_oob ; ld a1, 0x20(a0) ; trap: heap_oob -; sd a5, 0x18(a0) ; trap: heap_oob +; sd a2, 0x18(a0) ; trap: heap_oob ; sd a1, 0x20(a0) ; trap: heap_oob -; mv a0, a5 +; mv a0, a2 ; ret function %load_from_get_stack_pointer() -> i64 { diff --git a/cranelift/filetests/filetests/isa/riscv64/arithmetic.clif b/cranelift/filetests/filetests/isa/riscv64/arithmetic.clif index bb44ba6a25ba..17063c3397de 100644 --- a/cranelift/filetests/filetests/isa/riscv64/arithmetic.clif +++ b/cranelift/filetests/filetests/isa/riscv64/arithmetic.clif @@ -91,11 +91,11 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: ; trap_if int_divz##(a1 eq zero) -; lui a4,1 -; slli a2,a4,51 +; lui a2,1 +; slli a2,a2,51 ; xor a2,a0,a2 -; not a4,a1 -; or a2,a2,a4 +; not a3,a1 +; or a2,a2,a3 ; trap_if int_ovf##(a2 eq zero) ; div a0,a0,a1 ; ret @@ -104,11 +104,11 @@ block0(v0: i64, v1: i64): ; block0: ; offset 0x0 ; bnez a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_divz -; lui a4, 1 -; slli a2, a4, 0x33 +; lui a2, 1 +; slli a2, a2, 0x33 ; xor a2, a0, a2 -; not a4, a1 -; or a2, a2, a4 +; not a3, a1 +; or a2, a2, a3 ; bnez a2, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf ; div a0, a0, a1 @@ -123,14 +123,14 @@ block0(v0: i64): ; VCode: ; block0: -; li a3,2 -; div a0,a0,a3 +; li a1,2 +; div a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 2 -; div a0, a0, a3 +; addi a1, zero, 2 +; div a0, a0, a1 ; ret function %f8(i64, i64) -> i64 { @@ -161,14 +161,14 @@ block0(v0: i64): ; VCode: ; block0: -; li a3,2 -; divu a0,a0,a3 +; li a1,2 +; divu a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 2 -; divu a0, a0, a3 +; addi a1, zero, 2 +; divu a0, a0, a1 ; ret function %f10(i64, i64) -> i64 { @@ -217,30 +217,30 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; sext.w a3,a0 -; sext.w a5,a1 -; trap_if int_divz##(a5 eq zero) +; sext.w a0,a0 +; sext.w a1,a1 +; trap_if int_divz##(a1 eq zero) ; lui a2,-524288 -; xor a4,a3,a2 -; not a0,a5 -; or a2,a4,a0 -; trap_if int_ovf##(a2 eq zero) -; divw a0,a3,a5 +; xor a2,a0,a2 +; not a3,a1 +; or a4,a2,a3 +; trap_if int_ovf##(a4 eq zero) +; divw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a3, a0 -; sext.w a5, a1 -; bnez a5, 8 +; sext.w a0, a0 +; sext.w a1, a1 +; bnez a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_divz ; lui a2, 0x80000 -; xor a4, a3, a2 -; not a0, a5 -; or a2, a4, a0 -; bnez a2, 8 +; xor a2, a0, a2 +; not a3, a1 +; or a4, a2, a3 +; bnez a4, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf -; divw a0, a3, a5 +; divw a0, a0, a1 ; ret function %f13(i32) -> i32 { @@ -252,14 +252,14 @@ block0(v0: i32): ; VCode: ; block0: -; li a3,2 -; divw a0,a0,a3 +; li a1,2 +; divw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 2 -; divw a0, a0, a3 +; addi a1, zero, 2 +; divw a0, a0, a1 ; ret function %f14(i32, i32) -> i32 { @@ -270,19 +270,19 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; slli a3,a1,32 -; srli a5,a3,32 -; trap_if int_divz##(a5 eq zero) -; divuw a0,a0,a5 +; slli a1,a1,32 +; srli a1,a1,32 +; trap_if int_divz##(a1 eq zero) +; divuw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a1, 0x20 -; srli a5, a3, 0x20 -; bnez a5, 8 +; slli a1, a1, 0x20 +; srli a1, a1, 0x20 +; bnez a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_divz -; divuw a0, a0, a5 +; divuw a0, a0, a1 ; ret function %f15(i32) -> i32 { @@ -294,14 +294,14 @@ block0(v0: i32): ; VCode: ; block0: -; li a3,2 -; divuw a0,a0,a3 +; li a1,2 +; divuw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 2 -; divuw a0, a0, a3 +; addi a1, zero, 2 +; divuw a0, a0, a1 ; ret function %f16(i32, i32) -> i32 { @@ -312,17 +312,17 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; sext.w a3,a1 -; trap_if int_divz##(a3 eq zero) -; remw a0,a0,a3 +; sext.w a1,a1 +; trap_if int_divz##(a1 eq zero) +; remw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a3, a1 -; bnez a3, 8 +; sext.w a1, a1 +; bnez a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_divz -; remw a0, a0, a3 +; remw a0, a0, a1 ; ret function %f17(i32, i32) -> i32 { @@ -333,19 +333,19 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; slli a3,a1,32 -; srli a5,a3,32 -; trap_if int_divz##(a5 eq zero) -; remuw a0,a0,a5 +; slli a1,a1,32 +; srli a1,a1,32 +; trap_if int_divz##(a1 eq zero) +; remuw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a1, 0x20 -; srli a5, a3, 0x20 -; bnez a5, 8 +; slli a1, a1, 0x20 +; srli a1, a1, 0x20 +; bnez a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_divz -; remuw a0, a0, a5 +; remuw a0, a0, a1 ; ret function %f18(i64, i64) -> i64 { @@ -404,14 +404,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; not a4,a1 -; and a0,a0,a4 +; not a1,a1 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; not a4, a1 -; and a0, a0, a4 +; not a1, a1 +; and a0, a0, a1 ; ret function %f22(i64, i64) -> i64 { @@ -422,14 +422,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; not a4,a1 -; or a0,a0,a4 +; not a1,a1 +; or a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; not a4, a1 -; or a0, a0, a4 +; not a1, a1 +; or a0, a0, a1 ; ret function %f23(i64, i64) -> i64 { @@ -440,14 +440,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; not a4,a1 -; xor a0,a0,a4 +; not a1,a1 +; xor a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; not a4, a1 -; xor a0, a0, a4 +; not a1, a1 +; xor a0, a0, a1 ; ret function %f24(i64, i64) -> i64 { @@ -476,14 +476,14 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; slliw a4,a0,21 -; subw a0,a1,a4 +; slliw a0,a0,21 +; subw a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slliw a4, a0, 0x15 -; subw a0, a1, a4 +; slliw a0, a0, 0x15 +; subw a0, a1, a0 ; ret function %f26(i32) -> i32 { @@ -546,14 +546,14 @@ block0(v0: i64): ; VCode: ; block0: -; li a1,1 -; sub a0,zero,a1 +; li a0,1 +; sub a0,zero,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a1, zero, 1 -; neg a0, a1 +; addi a0, zero, 1 +; neg a0, a0 ; ret function %add_i128(i128, i128) -> i128 { @@ -566,16 +566,16 @@ block0(v0: i128, v1: i128): ; block0: ; add a0,a0,a2 ; sltu a2,a0,a2 -; add a3,a1,a3 -; add a1,a3,a2 +; add a1,a1,a3 +; add a1,a1,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; add a0, a0, a2 ; sltu a2, a0, a2 -; add a3, a1, a3 -; add a1, a3, a2 +; add a1, a1, a3 +; add a1, a1, a2 ; ret function %sub_i128(i128, i128) -> i128 { @@ -586,20 +586,20 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: -; sub a5,a0,a2 -; sltu a2,a0,a5 -; sub a3,a1,a3 -; sub a1,a3,a2 -; mv a0,a5 +; sub a4,a0,a2 +; sltu a2,a0,a4 +; sub a0,a1,a3 +; sub a1,a0,a2 +; mv a0,a4 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sub a5, a0, a2 -; sltu a2, a0, a5 -; sub a3, a1, a3 -; sub a1, a3, a2 -; mv a0, a5 +; sub a4, a0, a2 +; sltu a2, a0, a4 +; sub a0, a1, a3 +; sub a1, a0, a2 +; mv a0, a4 ; ret function %add_mul_2(i32, i32, i32) -> i32 { @@ -611,14 +611,14 @@ block0(v0: i32, v1: i32, v2: i32): ; VCode: ; block0: -; mulw a5,a1,a2 -; addw a0,a5,a0 +; mulw a1,a1,a2 +; addw a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mulw a5, a1, a2 -; addw a0, a5, a0 +; mulw a1, a1, a2 +; addw a0, a1, a0 ; ret function %msub_i32(i32, i32, i32) -> i32 { @@ -630,14 +630,14 @@ block0(v0: i32, v1: i32, v2: i32): ; VCode: ; block0: -; mulw a5,a1,a2 -; subw a0,a0,a5 +; mulw a1,a1,a2 +; subw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mulw a5, a1, a2 -; subw a0, a0, a5 +; mulw a1, a1, a2 +; subw a0, a0, a1 ; ret function %msub_i64(i64, i64, i64) -> i64 { @@ -649,14 +649,14 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; mul a5,a1,a2 -; sub a0,a0,a5 +; mul a1,a1,a2 +; sub a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mul a5, a1, a2 -; sub a0, a0, a5 +; mul a1, a1, a2 +; sub a0, a0, a1 ; ret function %imul_sub_i32(i32, i32, i32) -> i32 { @@ -668,14 +668,14 @@ block0(v0: i32, v1: i32, v2: i32): ; VCode: ; block0: -; mulw a5,a1,a2 -; subw a0,a5,a0 +; mulw a1,a1,a2 +; subw a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mulw a5, a1, a2 -; subw a0, a5, a0 +; mulw a1, a1, a2 +; subw a0, a1, a0 ; ret function %imul_sub_i64(i64, i64, i64) -> i64 { @@ -687,14 +687,14 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; mul a5,a1,a2 -; sub a0,a5,a0 +; mul a1,a1,a2 +; sub a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mul a5, a1, a2 -; sub a0, a5, a0 +; mul a1, a1, a2 +; sub a0, a1, a0 ; ret function %srem_const (i64) -> i64 { @@ -706,14 +706,14 @@ block0(v0: i64): ; VCode: ; block0: -; li a3,2 -; rem a0,a0,a3 +; li a1,2 +; rem a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 2 -; rem a0, a0, a3 +; addi a1, zero, 2 +; rem a0, a0, a1 ; ret function %urem_const (i64) -> i64 { @@ -725,14 +725,14 @@ block0(v0: i64): ; VCode: ; block0: -; li a3,2 -; remu a0,a0,a3 +; li a1,2 +; remu a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 2 -; remu a0, a0, a3 +; addi a1, zero, 2 +; remu a0, a0, a1 ; ret function %sdiv_minus_one(i64) -> i64 { @@ -744,30 +744,30 @@ block0(v0: i64): ; VCode: ; block0: -; li a2,-1 -; trap_if int_divz##(a2 eq zero) -; lui a4,1 -; slli a1,a4,51 -; xor a3,a0,a1 -; not a4,a2 -; or a1,a3,a4 -; trap_if int_ovf##(a1 eq zero) -; div a0,a0,a2 +; li a1,-1 +; trap_if int_divz##(a1 eq zero) +; lui a2,1 +; slli a2,a2,51 +; xor a2,a0,a2 +; not a3,a1 +; or a2,a2,a3 +; trap_if int_ovf##(a2 eq zero) +; div a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a2, zero, -1 -; bnez a2, 8 -; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_divz -; lui a4, 1 -; slli a1, a4, 0x33 -; xor a3, a0, a1 -; not a4, a2 -; or a1, a3, a4 +; addi a1, zero, -1 ; bnez a1, 8 +; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_divz +; lui a2, 1 +; slli a2, a2, 0x33 +; xor a2, a0, a2 +; not a3, a1 +; or a2, a2, a3 +; bnez a2, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf -; div a0, a0, a2 +; div a0, a0, a1 ; ret function %i8_iadd_const_neg1(i8) -> i8 { @@ -812,14 +812,14 @@ block0(v0: i8): ; VCode: ; block0: -; li a3,97 -; mulw a0,a0,a3 +; li a1,97 +; mulw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 0x61 -; mulw a0, a0, a3 +; addi a1, zero, 0x61 +; mulw a0, a0, a1 ; ret function %imul_i16_const(i16) -> i16 { @@ -830,14 +830,14 @@ block0(v0: i16): ; VCode: ; block0: -; li a3,97 -; mulw a0,a0,a3 +; li a1,97 +; mulw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 0x61 -; mulw a0, a0, a3 +; addi a1, zero, 0x61 +; mulw a0, a0, a1 ; ret function %imul_i32_const(i32) -> i32 { @@ -848,14 +848,14 @@ block0(v0: i32): ; VCode: ; block0: -; li a3,97 -; mulw a0,a0,a3 +; li a1,97 +; mulw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 0x61 -; mulw a0, a0, a3 +; addi a1, zero, 0x61 +; mulw a0, a0, a1 ; ret function %imul_i64_const(i64) -> i64 { @@ -866,13 +866,13 @@ block0(v0: i64): ; VCode: ; block0: -; li a3,97 -; mul a0,a0,a3 +; li a1,97 +; mul a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 0x61 -; mul a0, a0, a3 +; addi a1, zero, 0x61 +; mul a0, a0, a1 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/atomic-rmw.clif b/cranelift/filetests/filetests/isa/riscv64/atomic-rmw.clif index 59053bd2cc8f..f963a7b9a2b8 100644 --- a/cranelift/filetests/filetests/isa/riscv64/atomic-rmw.clif +++ b/cranelift/filetests/filetests/isa/riscv64/atomic-rmw.clif @@ -10,12 +10,12 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; amoadd.d.aqrl a3,a1,(a0) +; amoadd.d.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amoadd.d.aqrl a3, a1, (a0) ; trap: heap_oob +; amoadd.d.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_add_i32(i64, i32) { @@ -26,12 +26,12 @@ block0(v0: i64, v1: i32): ; VCode: ; block0: -; amoadd.w.aqrl a3,a1,(a0) +; amoadd.w.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amoadd.w.aqrl a3, a1, (a0) ; trap: heap_oob +; amoadd.w.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_sub_i64(i64, i64) { @@ -42,14 +42,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; sub a3,zero,a1 -; amoadd.d.aqrl a5,a3,(a0) +; sub a1,zero,a1 +; amoadd.d.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; neg a3, a1 -; amoadd.d.aqrl a5, a3, (a0) ; trap: heap_oob +; neg a1, a1 +; amoadd.d.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_sub_i32(i64, i32) { @@ -60,14 +60,14 @@ block0(v0: i64, v1: i32): ; VCode: ; block0: -; sub a3,zero,a1 -; amoadd.w.aqrl a5,a3,(a0) +; sub a1,zero,a1 +; amoadd.w.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; neg a3, a1 -; amoadd.w.aqrl a5, a3, (a0) ; trap: heap_oob +; neg a1, a1 +; amoadd.w.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_and_i64(i64, i64) { @@ -78,12 +78,12 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; amoand.d.aqrl a3,a1,(a0) +; amoand.d.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amoand.d.aqrl a3, a1, (a0) ; trap: heap_oob +; amoand.d.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_and_i32(i64, i32) { @@ -94,12 +94,12 @@ block0(v0: i64, v1: i32): ; VCode: ; block0: -; amoand.w.aqrl a3,a1,(a0) +; amoand.w.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amoand.w.aqrl a3, a1, (a0) ; trap: heap_oob +; amoand.w.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_nand_i64(i64, i64) { @@ -110,16 +110,20 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; atomic_rmw.i64 nand a3,a1,(a0)##t0=a4 offset=zero +; mv a2,a0 +; mv a3,a1 +; atomic_rmw.i64 nand a1,a3,(a2)##t0=a0 offset=zero ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lr.d.aqrl a3, (a0) ; trap: heap_oob -; and a4, a1, a3 -; not a4, a4 -; sc.d.aqrl a4, a4, (a0) ; trap: heap_oob -; bnez a4, -0x10 +; mv a2, a0 +; mv a3, a1 +; lr.d.aqrl a1, (a2) ; trap: heap_oob +; and a0, a3, a1 +; not a0, a0 +; sc.d.aqrl a0, a0, (a2) ; trap: heap_oob +; bnez a0, -0x10 ; ret function %atomic_rmw_nand_i32(i64, i32) { @@ -130,16 +134,20 @@ block0(v0: i64, v1: i32): ; VCode: ; block0: -; atomic_rmw.i32 nand a3,a1,(a0)##t0=a4 offset=zero +; mv a2,a0 +; mv a3,a1 +; atomic_rmw.i32 nand a1,a3,(a2)##t0=a0 offset=zero ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lr.w.aqrl a3, (a0) ; trap: heap_oob -; and a4, a1, a3 -; not a4, a4 -; sc.w.aqrl a4, a4, (a0) ; trap: heap_oob -; bnez a4, -0x10 +; mv a2, a0 +; mv a3, a1 +; lr.w.aqrl a1, (a2) ; trap: heap_oob +; and a0, a3, a1 +; not a0, a0 +; sc.w.aqrl a0, a0, (a2) ; trap: heap_oob +; bnez a0, -0x10 ; ret function %atomic_rmw_or_i64(i64, i64) { @@ -150,12 +158,12 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; amoor.d.aqrl a3,a1,(a0) +; amoor.d.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amoor.d.aqrl a3, a1, (a0) ; trap: heap_oob +; amoor.d.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_or_i32(i64, i32) { @@ -166,12 +174,12 @@ block0(v0: i64, v1: i32): ; VCode: ; block0: -; amoor.w.aqrl a3,a1,(a0) +; amoor.w.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amoor.w.aqrl a3, a1, (a0) ; trap: heap_oob +; amoor.w.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_xor_i64(i64, i64) { @@ -182,12 +190,12 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; amoxor.d.aqrl a3,a1,(a0) +; amoxor.d.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amoxor.d.aqrl a3, a1, (a0) ; trap: heap_oob +; amoxor.d.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_xor_i32(i64, i32) { @@ -198,12 +206,12 @@ block0(v0: i64, v1: i32): ; VCode: ; block0: -; amoxor.w.aqrl a3,a1,(a0) +; amoxor.w.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amoxor.w.aqrl a3, a1, (a0) ; trap: heap_oob +; amoxor.w.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_smax_i64(i64, i64) { @@ -214,12 +222,12 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; amomax.d.aqrl a3,a1,(a0) +; amomax.d.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amomax.d.aqrl a3, a1, (a0) ; trap: heap_oob +; amomax.d.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_smax_i32(i64, i32) { @@ -230,12 +238,12 @@ block0(v0: i64, v1: i32): ; VCode: ; block0: -; amomax.w.aqrl a3,a1,(a0) +; amomax.w.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amomax.w.aqrl a3, a1, (a0) ; trap: heap_oob +; amomax.w.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_umax_i64(i64, i64) { @@ -246,12 +254,12 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; amomaxu.d.aqrl a3,a1,(a0) +; amomaxu.d.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amomaxu.d.aqrl a3, a1, (a0) ; trap: heap_oob +; amomaxu.d.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_umax_i32(i64, i32) { @@ -262,12 +270,12 @@ block0(v0: i64, v1: i32): ; VCode: ; block0: -; amomaxu.w.aqrl a3,a1,(a0) +; amomaxu.w.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amomaxu.w.aqrl a3, a1, (a0) ; trap: heap_oob +; amomaxu.w.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_smin_i64(i64, i64) { @@ -278,12 +286,12 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; amomin.d.aqrl a3,a1,(a0) +; amomin.d.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amomin.d.aqrl a3, a1, (a0) ; trap: heap_oob +; amomin.d.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_smin_i32(i64, i32) { @@ -294,12 +302,12 @@ block0(v0: i64, v1: i32): ; VCode: ; block0: -; amomin.w.aqrl a3,a1,(a0) +; amomin.w.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amomin.w.aqrl a3, a1, (a0) ; trap: heap_oob +; amomin.w.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_umin_i64(i64, i64) { @@ -310,12 +318,12 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; amominu.d.aqrl a3,a1,(a0) +; amominu.d.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amominu.d.aqrl a3, a1, (a0) ; trap: heap_oob +; amominu.d.aqrl a0, a1, (a0) ; trap: heap_oob ; ret function %atomic_rmw_umin_i32(i64, i32) { @@ -326,11 +334,11 @@ block0(v0: i64, v1: i32): ; VCode: ; block0: -; amominu.w.aqrl a3,a1,(a0) +; amominu.w.aqrl a0,a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; amominu.w.aqrl a3, a1, (a0) ; trap: heap_oob +; amominu.w.aqrl a0, a1, (a0) ; trap: heap_oob ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/atomic_load.clif b/cranelift/filetests/filetests/isa/riscv64/atomic_load.clif index a81104594652..1a7b91d6a8c2 100644 --- a/cranelift/filetests/filetests/isa/riscv64/atomic_load.clif +++ b/cranelift/filetests/filetests/isa/riscv64/atomic_load.clif @@ -47,17 +47,17 @@ block0(v0: i64): ; VCode: ; block0: -; atomic_load.i32 a4,(a0) -; slli a3,a4,32 -; srli a0,a3,32 +; atomic_load.i32 a0,(a0) +; slli a0,a0,32 +; srli a0,a0,32 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; fence rw, rw -; lw a4, 0(a0) ; trap: heap_oob +; lw a0, 0(a0) ; trap: heap_oob ; fence r, rw -; slli a3, a4, 0x20 -; srli a0, a3, 0x20 +; slli a0, a0, 0x20 +; srli a0, a0, 0x20 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/atomic_store.clif b/cranelift/filetests/filetests/isa/riscv64/atomic_store.clif index 3f69cb1a9dbc..cee91ef108ba 100644 --- a/cranelift/filetests/filetests/isa/riscv64/atomic_store.clif +++ b/cranelift/filetests/filetests/isa/riscv64/atomic_store.clif @@ -29,16 +29,16 @@ block0(v0: i64): ; VCode: ; block0: -; load_ext_name_near a2,%sym+0 -; atomic_store.i64 a0,(a2) +; load_ext_name_near a1,%sym+0 +; atomic_store.i64 a0,(a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; auipc a2, 0 ; reloc_external RiscvPCRelHi20 %sym 0 -; mv a2, a2 ; reloc_external RiscvPCRelLo12I func+0 0 +; auipc a1, 0 ; reloc_external RiscvPCRelHi20 %sym 0 +; mv a1, a1 ; reloc_external RiscvPCRelLo12I func+0 0 ; fence rw, w -; sd a0, 0(a2) ; trap: heap_oob +; sd a0, 0(a1) ; trap: heap_oob ; ret function %atomic_store_imm_i64(i64) { @@ -50,17 +50,17 @@ block0(v0: i64): ; VCode: ; block0: -; lui a2,3 -; addi a4,a2,57 -; atomic_store.i64 a4,(a0) +; lui a1,3 +; addi a1,a1,57 +; atomic_store.i64 a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a2, 3 -; addi a4, a2, 0x39 +; lui a1, 3 +; addi a1, a1, 0x39 ; fence rw, w -; sd a4, 0(a0) ; trap: heap_oob +; sd a1, 0(a0) ; trap: heap_oob ; ret function %atomic_store_i32(i32, i64) { @@ -90,16 +90,16 @@ block0(v0: i32): ; VCode: ; block0: -; load_ext_name_near a2,%sym+0 -; atomic_store.i32 a0,(a2) +; load_ext_name_near a1,%sym+0 +; atomic_store.i32 a0,(a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; auipc a2, 0 ; reloc_external RiscvPCRelHi20 %sym 0 -; mv a2, a2 ; reloc_external RiscvPCRelLo12I func+0 0 +; auipc a1, 0 ; reloc_external RiscvPCRelHi20 %sym 0 +; mv a1, a1 ; reloc_external RiscvPCRelLo12I func+0 0 ; fence rw, w -; sw a0, 0(a2) ; trap: heap_oob +; sw a0, 0(a1) ; trap: heap_oob ; ret function %atomic_store_imm_i32(i64) { @@ -111,16 +111,16 @@ block0(v0: i64): ; VCode: ; block0: -; lui a2,3 -; addi a4,a2,57 -; atomic_store.i32 a4,(a0) +; lui a1,3 +; addi a1,a1,57 +; atomic_store.i32 a1,(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a2, 3 -; addi a4, a2, 0x39 +; lui a1, 3 +; addi a1, a1, 0x39 ; fence rw, w -; sw a4, 0(a0) ; trap: heap_oob +; sw a1, 0(a0) ; trap: heap_oob ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/bitcast-float.clif b/cranelift/filetests/filetests/isa/riscv64/bitcast-float.clif index d86acbd026bd..f60d3f77c0ab 100644 --- a/cranelift/filetests/filetests/isa/riscv64/bitcast-float.clif +++ b/cranelift/filetests/filetests/isa/riscv64/bitcast-float.clif @@ -26,16 +26,16 @@ block0(v0: i16): ; VCode: ; block0: -; lui a2,-16 -; or a4,a0,a2 -; fmv.w.x fa0,a4 +; lui a1,-16 +; or a0,a0,a1 +; fmv.w.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a2, 0xffff0 -; or a4, a0, a2 -; fmv.w.x fa0, a4 +; lui a1, 0xffff0 +; or a0, a0, a1 +; fmv.w.x fa0, a0 ; ret function %bitcast_f32_to_i32(f32) -> i32 { diff --git a/cranelift/filetests/filetests/isa/riscv64/bitcast-scalar-vector.clif b/cranelift/filetests/filetests/isa/riscv64/bitcast-scalar-vector.clif index bffc806eb3cc..c6605f74fa90 100644 --- a/cranelift/filetests/filetests/isa/riscv64/bitcast-scalar-vector.clif +++ b/cranelift/filetests/filetests/isa/riscv64/bitcast-scalar-vector.clif @@ -17,8 +17,8 @@ block0(v0: i64x2): ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.x.s a0,v8 #avl=2, #vtype=(e64, m1, ta, ma) -; vslidedown.vi v12,v8,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.x.s a1,v12 #avl=2, #vtype=(e64, m1, ta, ma) +; vslidedown.vi v8,v8,1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.x.s a1,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -36,8 +36,8 @@ block0(v0: i64x2): ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0x25, 0x80, 0x42 -; .byte 0x57, 0xb6, 0x80, 0x3e -; .byte 0xd7, 0x25, 0xc0, 0x42 +; .byte 0x57, 0xb4, 0x80, 0x3e +; .byte 0xd7, 0x25, 0x80, 0x42 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -51,20 +51,20 @@ block0(v0: i128): ; VCode: ; block0: -; vmv.s.x v12,a2 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv1r.v v14,v12 -; vslide1up.vx v14,v12,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.s.x v8,a2 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv1r.v v9,v8 +; vslide1up.vx v9,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x66, 0x06, 0x42 -; .byte 0x57, 0x37, 0xc0, 0x9e -; .byte 0x57, 0xe7, 0xc5, 0x3a +; .byte 0x57, 0x64, 0x06, 0x42 +; .byte 0xd7, 0x34, 0x80, 0x9e +; .byte 0xd7, 0xe4, 0x85, 0x3a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ret function %bitcast_vec_to_i64(i32x2) -> i64 { @@ -111,16 +111,16 @@ block0(v0: i64): ; VCode: ; block0: -; vmv.s.x v11,a1 #avl=1, #vtype=(e64, m1, ta, ma) -; vse8.v v11,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) +; vmv.s.x v8,a1 #avl=1, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0xf0, 0x80, 0xcd -; .byte 0xd7, 0xe5, 0x05, 0x42 +; .byte 0x57, 0xe4, 0x05, 0x42 ; .byte 0x57, 0x70, 0x04, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %bitcast_vec_to_f128(i64x2) -> f128 { @@ -137,8 +137,8 @@ block0(v0: i64x2): ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.x.s a0,v8 #avl=2, #vtype=(e64, m1, ta, ma) -; vslidedown.vi v12,v8,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.x.s a1,v12 #avl=2, #vtype=(e64, m1, ta, ma) +; vslidedown.vi v8,v8,1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.x.s a1,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -156,8 +156,8 @@ block0(v0: i64x2): ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0x25, 0x80, 0x42 -; .byte 0x57, 0xb6, 0x80, 0x3e -; .byte 0xd7, 0x25, 0xc0, 0x42 +; .byte 0x57, 0xb4, 0x80, 0x3e +; .byte 0xd7, 0x25, 0x80, 0x42 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -171,20 +171,20 @@ block0(v0: f128): ; VCode: ; block0: -; vmv.s.x v12,a2 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv1r.v v14,v12 -; vslide1up.vx v14,v12,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.s.x v8,a2 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv1r.v v9,v8 +; vslide1up.vx v9,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x66, 0x06, 0x42 -; .byte 0x57, 0x37, 0xc0, 0x9e -; .byte 0x57, 0xe7, 0xc5, 0x3a +; .byte 0x57, 0x64, 0x06, 0x42 +; .byte 0xd7, 0x34, 0x80, 0x9e +; .byte 0xd7, 0xe4, 0x85, 0x3a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ret function %bitcast_vec_to_f64(i32x2) -> f64 { @@ -231,16 +231,16 @@ block0(v0: f64): ; VCode: ; block0: -; vfmv.s.f v11,fa0 #avl=1, #vtype=(e64, m1, ta, ma) -; vse8.v v11,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) +; vfmv.s.f v8,fa0 #avl=1, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0xf0, 0x80, 0xcd -; .byte 0xd7, 0x55, 0x05, 0x42 +; .byte 0x57, 0x54, 0x05, 0x42 ; .byte 0x57, 0x70, 0x04, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %bitcast_i16x2_to_f32(i16x2) -> f32 { @@ -287,16 +287,16 @@ block0(v0: f32): ; VCode: ; block0: -; vfmv.s.f v11,fa0 #avl=1, #vtype=(e32, m1, ta, ma) -; vse8.v v11,0(a0) #avl=4, #vtype=(e8, m1, ta, ma) +; vfmv.s.f v8,fa0 #avl=1, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=4, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0xf0, 0x00, 0xcd -; .byte 0xd7, 0x55, 0x05, 0x42 +; .byte 0x57, 0x54, 0x05, 0x42 ; .byte 0x57, 0x70, 0x02, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %bitcast_i8x2_to_f16(i8x2) -> f16 { @@ -312,9 +312,9 @@ block0(v0: i8x2): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=2, #vtype=(e8, m1, ta, ma) -; vmv.x.s a2,v8 #avl=1, #vtype=(e16, m1, ta, ma) -; lui a4,-16 -; or a0,a2,a4 +; vmv.x.s a0,v8 #avl=1, #vtype=(e16, m1, ta, ma) +; lui a1,-16 +; or a0,a0,a1 ; fmv.w.x fa0,a0 ; ld ra,8(sp) ; ld fp,0(sp) @@ -332,9 +332,9 @@ block0(v0: i8x2): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0xf0, 0x80, 0xcc -; .byte 0x57, 0x26, 0x80, 0x42 -; lui a4, 0xffff0 -; or a0, a2, a4 +; .byte 0x57, 0x25, 0x80, 0x42 +; lui a1, 0xffff0 +; or a0, a0, a1 ; fmv.w.x fa0, a0 ; ld ra, 8(sp) ; ld s0, 0(sp) @@ -349,15 +349,15 @@ block0(v0: f16): ; VCode: ; block0: -; vfmv.s.f v11,fa0 #avl=1, #vtype=(e32, m1, ta, ma) -; vse8.v v11,0(a0) #avl=2, #vtype=(e8, m1, ta, ma) +; vfmv.s.f v8,fa0 #avl=1, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=2, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0xf0, 0x00, 0xcd -; .byte 0xd7, 0x55, 0x05, 0x42 +; .byte 0x57, 0x54, 0x05, 0x42 ; .byte 0x57, 0x70, 0x01, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/bitops-float.clif b/cranelift/filetests/filetests/isa/riscv64/bitops-float.clif index 649dbc6c5964..39c2c0a03ec2 100644 --- a/cranelift/filetests/filetests/isa/riscv64/bitops-float.clif +++ b/cranelift/filetests/filetests/isa/riscv64/bitops-float.clif @@ -22,19 +22,19 @@ block1(v4: f32): ; VCode: ; block0: ; li a0,0 -; fmv.w.x fa5,zero -; fmv.x.w a3,fa5 -; not a5,a3 -; fmv.w.x fa1,a5 -; fmv.x.w a2,fa1 -; fmv.x.w a4,fa1 -; or a1,a2,a4 -; fmv.w.x fa2,a1 -; br_table a0,[MachLabel(1),MachLabel(2)]##tmp1=a4,tmp2=a5 +; fmv.w.x fa1,zero +; fmv.x.w a1,fa1 +; not a1,a1 +; fmv.w.x fa3,a1 +; fmv.x.w a1,fa3 +; fmv.x.w a2,fa3 +; or a2,a1,a2 +; fmv.w.x fa0,a2 +; br_table a0,[MachLabel(1),MachLabel(2)]##tmp1=a2,tmp2=a1 ; block1: ; j label3 ; block2: -; fmv.d fa2,fa5 +; fmv.d fa0,fa1 ; j label3 ; block3: ; ret @@ -42,30 +42,30 @@ block1(v4: f32): ; Disassembled: ; block0: ; offset 0x0 ; mv a0, zero -; fmv.w.x fa5, zero -; fmv.x.w a3, fa5 -; not a5, a3 -; fmv.w.x fa1, a5 -; fmv.x.w a2, fa1 -; fmv.x.w a4, fa1 -; or a1, a2, a4 -; fmv.w.x fa2, a1 +; fmv.w.x fa1, zero +; fmv.x.w a1, fa1 +; not a1, a1 +; fmv.w.x fa3, a1 +; fmv.x.w a1, fa3 +; fmv.x.w a2, fa3 +; or a2, a1, a2 +; fmv.w.x fa0, a2 ; slli t6, a0, 0x20 ; srli t6, t6, 0x20 -; addi a5, zero, 1 -; bltu t6, a5, 0xc -; auipc a5, 0 -; jalr zero, a5, 0x28 -; auipc a4, 0 -; slli a5, t6, 3 -; add a4, a4, a5 -; jalr zero, a4, 0x10 -; auipc a5, 0 -; jalr zero, a5, 0xc +; addi a1, zero, 1 +; bltu t6, a1, 0xc +; auipc a1, 0 +; jalr zero, a1, 0x28 +; auipc a2, 0 +; slli a1, t6, 3 +; add a2, a2, a1 +; jalr zero, a2, 0x10 +; auipc a1, 0 +; jalr zero, a1, 0xc ; block1: ; offset 0x54 ; j 8 ; block2: ; offset 0x58 -; fmv.d fa2, fa5 +; fmv.d fa0, fa1 ; block3: ; offset 0x5c ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/bitops.clif b/cranelift/filetests/filetests/isa/riscv64/bitops.clif index 704d5329bd1d..f0f8b307f7e5 100644 --- a/cranelift/filetests/filetests/isa/riscv64/bitops.clif +++ b/cranelift/filetests/filetests/isa/riscv64/bitops.clif @@ -11,22 +11,22 @@ block0(v0: i8): ; VCode: ; block0: -; mv a5,a0 -; clz a0,a5##ty=i8 tmp=a2 step=a3 +; mv a3,a0 +; clz a0,a3##ty=i8 tmp=a2 step=a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, a0 +; mv a3, a0 ; mv a0, zero -; addi a3, zero, 8 +; addi a1, zero, 8 ; addi a2, zero, 1 ; slli a2, a2, 7 -; blez a3, 0x1c -; and t5, a2, a5 +; blez a1, 0x1c +; and t5, a2, a3 ; bne zero, t5, 0x14 ; addi a0, a0, 1 -; addi a3, a3, -1 +; addi a1, a1, -1 ; srli a2, a2, 1 ; j -0x18 ; ret @@ -39,22 +39,22 @@ block0(v0: i16): ; VCode: ; block0: -; mv a5,a0 -; clz a0,a5##ty=i16 tmp=a2 step=a3 +; mv a3,a0 +; clz a0,a3##ty=i16 tmp=a2 step=a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, a0 +; mv a3, a0 ; mv a0, zero -; addi a3, zero, 0x10 +; addi a1, zero, 0x10 ; addi a2, zero, 1 ; slli a2, a2, 0xf -; blez a3, 0x1c -; and t5, a2, a5 +; blez a1, 0x1c +; and t5, a2, a3 ; bne zero, t5, 0x14 ; addi a0, a0, 1 -; addi a3, a3, -1 +; addi a1, a1, -1 ; srli a2, a2, 1 ; j -0x18 ; ret @@ -67,22 +67,22 @@ block0(v0: i32): ; VCode: ; block0: -; mv a5,a0 -; clz a0,a5##ty=i32 tmp=a2 step=a3 +; mv a3,a0 +; clz a0,a3##ty=i32 tmp=a2 step=a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, a0 +; mv a3, a0 ; mv a0, zero -; addi a3, zero, 0x20 +; addi a1, zero, 0x20 ; addi a2, zero, 1 ; slli a2, a2, 0x1f -; blez a3, 0x1c -; and t5, a2, a5 +; blez a1, 0x1c +; and t5, a2, a3 ; bne zero, t5, 0x14 ; addi a0, a0, 1 -; addi a3, a3, -1 +; addi a1, a1, -1 ; srli a2, a2, 1 ; j -0x18 ; ret @@ -95,22 +95,22 @@ block0(v0: i64): ; VCode: ; block0: -; mv a5,a0 -; clz a0,a5##ty=i64 tmp=a2 step=a3 +; mv a3,a0 +; clz a0,a3##ty=i64 tmp=a2 step=a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, a0 +; mv a3, a0 ; mv a0, zero -; addi a3, zero, 0x40 +; addi a1, zero, 0x40 ; addi a2, zero, 1 ; slli a2, a2, 0x3f -; blez a3, 0x1c -; and t5, a2, a5 +; blez a1, 0x1c +; and t5, a2, a3 ; bne zero, t5, 0x14 ; addi a0, a0, 1 -; addi a3, a3, -1 +; addi a1, a1, -1 ; srli a2, a2, 1 ; j -0x18 ; ret @@ -123,41 +123,40 @@ block0(v0: i128): ; VCode: ; block0: -; clz a5,a1##ty=i64 tmp=a3 step=a4 -; clz a3,a0##ty=i64 tmp=a4 step=a2 -; select a0,a3,zero##condition=(a1 eq zero) -; add a0,a5,a0 +; clz a2,a1##ty=i64 tmp=a4 step=a3 +; clz a3,a0##ty=i64 tmp=a5 step=a4 +; select a3,a3,zero##condition=(a1 eq zero) +; add a0,a2,a3 ; li a1,0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, zero -; addi a4, zero, 0x40 -; addi a3, zero, 1 -; slli a3, a3, 0x3f -; blez a4, 0x1c -; and t5, a3, a1 +; mv a2, zero +; addi a3, zero, 0x40 +; addi a4, zero, 1 +; slli a4, a4, 0x3f +; blez a3, 0x1c +; and t5, a4, a1 ; bne zero, t5, 0x14 -; addi a5, a5, 1 -; addi a4, a4, -1 -; srli a3, a3, 1 +; addi a2, a2, 1 +; addi a3, a3, -1 +; srli a4, a4, 1 ; j -0x18 ; mv a3, zero -; addi a2, zero, 0x40 -; addi a4, zero, 1 -; slli a4, a4, 0x3f -; blez a2, 0x1c -; and t5, a4, a0 +; addi a4, zero, 0x40 +; addi a5, zero, 1 +; slli a5, a5, 0x3f +; blez a4, 0x1c +; and t5, a5, a0 ; bne zero, t5, 0x14 ; addi a3, a3, 1 -; addi a2, a2, -1 -; srli a4, a4, 1 +; addi a4, a4, -1 +; srli a5, a5, 1 ; j -0x18 -; mv a0, a3 ; beqz a1, 8 -; mv a0, zero -; add a0, a5, a0 +; mv a3, zero +; add a0, a2, a3 ; mv a1, zero ; ret @@ -169,34 +168,34 @@ block0(v0: i8): ; VCode: ; block0: -; slli a2,a0,56 -; srai a4,a2,56 -; not a0,a4 -; select a2,a0,a4##condition=(a4 slt zero) -; clz a0,a2##ty=i64 tmp=a4 step=a5 -; addi a0,a0,-57 +; slli a0,a0,56 +; srai a0,a0,56 +; not a1,a0 +; select a3,a1,a0##condition=(a0 slt zero) +; clz a2,a3##ty=i64 tmp=a0 step=a1 +; addi a0,a2,-57 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x38 -; srai a4, a2, 0x38 -; not a0, a4 -; mv a2, a0 -; bltz a4, 8 -; mv a2, a4 -; mv a0, zero -; addi a5, zero, 0x40 -; addi a4, zero, 1 -; slli a4, a4, 0x3f -; blez a5, 0x1c -; and t5, a4, a2 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 +; not a1, a0 +; mv a3, a1 +; bltz a0, 8 +; mv a3, a0 +; mv a2, zero +; addi a1, zero, 0x40 +; addi a0, zero, 1 +; slli a0, a0, 0x3f +; blez a1, 0x1c +; and t5, a0, a3 ; bne zero, t5, 0x14 -; addi a0, a0, 1 -; addi a5, a5, -1 -; srli a4, a4, 1 +; addi a2, a2, 1 +; addi a1, a1, -1 +; srli a0, a0, 1 ; j -0x18 -; addi a0, a0, -0x39 +; addi a0, a2, -0x39 ; ret function %c(i16) -> i16 { @@ -207,34 +206,34 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srai a4,a2,48 -; not a0,a4 -; select a2,a0,a4##condition=(a4 slt zero) -; clz a0,a2##ty=i64 tmp=a4 step=a5 -; addi a0,a0,-49 +; slli a0,a0,48 +; srai a0,a0,48 +; not a1,a0 +; select a3,a1,a0##condition=(a0 slt zero) +; clz a2,a3##ty=i64 tmp=a0 step=a1 +; addi a0,a2,-49 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srai a4, a2, 0x30 -; not a0, a4 -; mv a2, a0 -; bltz a4, 8 -; mv a2, a4 -; mv a0, zero -; addi a5, zero, 0x40 -; addi a4, zero, 1 -; slli a4, a4, 0x3f -; blez a5, 0x1c -; and t5, a4, a2 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 +; not a1, a0 +; mv a3, a1 +; bltz a0, 8 +; mv a3, a0 +; mv a2, zero +; addi a1, zero, 0x40 +; addi a0, zero, 1 +; slli a0, a0, 0x3f +; blez a1, 0x1c +; and t5, a0, a3 ; bne zero, t5, 0x14 -; addi a0, a0, 1 -; addi a5, a5, -1 -; srli a4, a4, 1 +; addi a2, a2, 1 +; addi a1, a1, -1 +; srli a0, a0, 1 ; j -0x18 -; addi a0, a0, -0x31 +; addi a0, a2, -0x31 ; ret function %c(i32) -> i32 { @@ -245,32 +244,32 @@ block0(v0: i32): ; VCode: ; block0: -; sext.w a2,a0 -; not a4,a2 -; select a0,a4,a2##condition=(a2 slt zero) -; clz a4,a0##ty=i64 tmp=a2 step=a3 -; addi a0,a4,-33 +; sext.w a0,a0 +; not a1,a0 +; select a3,a1,a0##condition=(a0 slt zero) +; clz a0,a3##ty=i64 tmp=a2 step=a1 +; addi a0,a0,-33 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a2, a0 -; not a4, a2 -; mv a0, a4 -; bltz a2, 8 -; mv a0, a2 -; mv a4, zero -; addi a3, zero, 0x40 +; sext.w a0, a0 +; not a1, a0 +; mv a3, a1 +; bltz a0, 8 +; mv a3, a0 +; mv a0, zero +; addi a1, zero, 0x40 ; addi a2, zero, 1 ; slli a2, a2, 0x3f -; blez a3, 0x1c -; and t5, a2, a0 +; blez a1, 0x1c +; and t5, a2, a3 ; bne zero, t5, 0x14 -; addi a4, a4, 1 -; addi a3, a3, -1 +; addi a0, a0, 1 +; addi a1, a1, -1 ; srli a2, a2, 1 ; j -0x18 -; addi a0, a4, -0x21 +; addi a0, a0, -0x21 ; ret function %c(i64) -> i64 { @@ -281,30 +280,30 @@ block0(v0: i64): ; VCode: ; block0: -; not a2,a0 -; select a4,a2,a0##condition=(a0 slt zero) -; clz a2,a4##ty=i64 tmp=a0 step=a1 -; addi a0,a2,-1 +; not a1,a0 +; select a3,a1,a0##condition=(a0 slt zero) +; clz a0,a3##ty=i64 tmp=a2 step=a1 +; addi a0,a0,-1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; not a2, a0 -; mv a4, a2 +; not a1, a0 +; mv a3, a1 ; bltz a0, 8 -; mv a4, a0 -; mv a2, zero +; mv a3, a0 +; mv a0, zero ; addi a1, zero, 0x40 -; addi a0, zero, 1 -; slli a0, a0, 0x3f +; addi a2, zero, 1 +; slli a2, a2, 0x3f ; blez a1, 0x1c -; and t5, a0, a4 +; and t5, a2, a3 ; bne zero, t5, 0x14 -; addi a2, a2, 1 +; addi a0, a0, 1 ; addi a1, a1, -1 -; srli a0, a0, 1 +; srli a2, a2, 1 ; j -0x18 -; addi a0, a2, -1 +; addi a0, a0, -1 ; ret function %c(i128) -> i128 { @@ -315,55 +314,52 @@ block0(v0: i128): ; VCode: ; block0: -; not a3,a0 -; select a5,a3,a0##condition=(a1 slt zero) -; not a2,a1 -; select a3,a2,a1##condition=(a1 slt zero) -; clz a1,a3##ty=i64 tmp=a2 step=a0 -; clz a0,a5##ty=i64 tmp=a2 step=a4 -; select a2,a0,zero##condition=(a3 eq zero) -; add a3,a1,a2 -; addi a0,a3,-1 +; not a2,a0 +; select a2,a2,a0##condition=(a1 slt zero) +; not a0,a1 +; select a0,a0,a1##condition=(a1 slt zero) +; clz a3,a0##ty=i64 tmp=a1 step=a4 +; clz a1,a2##ty=i64 tmp=a5 step=a4 +; select a1,a1,zero##condition=(a0 eq zero) +; add a0,a3,a1 +; addi a0,a0,-1 ; li a1,0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; not a3, a0 -; mv a5, a3 +; not a2, a0 ; bltz a1, 8 -; mv a5, a0 -; not a2, a1 -; mv a3, a2 +; mv a2, a0 +; not a0, a1 ; bltz a1, 8 -; mv a3, a1 -; mv a1, zero -; addi a0, zero, 0x40 -; addi a2, zero, 1 -; slli a2, a2, 0x3f -; blez a0, 0x1c -; and t5, a2, a3 +; mv a0, a1 +; mv a3, zero +; addi a4, zero, 0x40 +; addi a1, zero, 1 +; slli a1, a1, 0x3f +; blez a4, 0x1c +; and t5, a1, a0 ; bne zero, t5, 0x14 -; addi a1, a1, 1 -; addi a0, a0, -1 -; srli a2, a2, 1 +; addi a3, a3, 1 +; addi a4, a4, -1 +; srli a1, a1, 1 ; j -0x18 -; mv a0, zero +; mv a1, zero ; addi a4, zero, 0x40 -; addi a2, zero, 1 -; slli a2, a2, 0x3f +; addi a5, zero, 1 +; slli a5, a5, 0x3f ; blez a4, 0x1c -; and t5, a2, a5 +; and t5, a5, a2 ; bne zero, t5, 0x14 -; addi a0, a0, 1 +; addi a1, a1, 1 ; addi a4, a4, -1 -; srli a2, a2, 1 +; srli a5, a5, 1 ; j -0x18 -; mv a2, a0 -; beqz a3, 8 -; mv a2, zero -; add a3, a1, a2 -; addi a0, a3, -1 +; beqz a0, 8 +; mv a1, zero +; add a0, a3, a1 +; addi a0, a0, -1 ; mv a1, zero ; ret @@ -375,21 +371,21 @@ block0(v0: i8): ; VCode: ; block0: -; mv a5,a0 -; ctz a0,a5##ty=i8 tmp=a2 step=a3 +; mv a3,a0 +; ctz a0,a3##ty=i8 tmp=a2 step=a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, a0 +; mv a3, a0 ; mv a0, zero -; addi a3, zero, 8 +; addi a1, zero, 8 ; addi a2, zero, 1 -; blez a3, 0x1c -; and t5, a2, a5 +; blez a1, 0x1c +; and t5, a2, a3 ; bne zero, t5, 0x14 ; addi a0, a0, 1 -; addi a3, a3, -1 +; addi a1, a1, -1 ; slli a2, a2, 1 ; j -0x18 ; ret @@ -402,21 +398,21 @@ block0(v0: i16): ; VCode: ; block0: -; mv a5,a0 -; ctz a0,a5##ty=i16 tmp=a2 step=a3 +; mv a3,a0 +; ctz a0,a3##ty=i16 tmp=a2 step=a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, a0 +; mv a3, a0 ; mv a0, zero -; addi a3, zero, 0x10 +; addi a1, zero, 0x10 ; addi a2, zero, 1 -; blez a3, 0x1c -; and t5, a2, a5 +; blez a1, 0x1c +; and t5, a2, a3 ; bne zero, t5, 0x14 ; addi a0, a0, 1 -; addi a3, a3, -1 +; addi a1, a1, -1 ; slli a2, a2, 1 ; j -0x18 ; ret @@ -429,21 +425,21 @@ block0(v0: i32): ; VCode: ; block0: -; mv a5,a0 -; ctz a0,a5##ty=i32 tmp=a2 step=a3 +; mv a3,a0 +; ctz a0,a3##ty=i32 tmp=a2 step=a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, a0 +; mv a3, a0 ; mv a0, zero -; addi a3, zero, 0x20 +; addi a1, zero, 0x20 ; addi a2, zero, 1 -; blez a3, 0x1c -; and t5, a2, a5 +; blez a1, 0x1c +; and t5, a2, a3 ; bne zero, t5, 0x14 ; addi a0, a0, 1 -; addi a3, a3, -1 +; addi a1, a1, -1 ; slli a2, a2, 1 ; j -0x18 ; ret @@ -456,21 +452,21 @@ block0(v0: i64): ; VCode: ; block0: -; mv a5,a0 -; ctz a0,a5##ty=i64 tmp=a2 step=a3 +; mv a3,a0 +; ctz a0,a3##ty=i64 tmp=a2 step=a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, a0 +; mv a3, a0 ; mv a0, zero -; addi a3, zero, 0x40 +; addi a1, zero, 0x40 ; addi a2, zero, 1 -; blez a3, 0x1c -; and t5, a2, a5 +; blez a1, 0x1c +; and t5, a2, a3 ; bne zero, t5, 0x14 ; addi a0, a0, 1 -; addi a3, a3, -1 +; addi a1, a1, -1 ; slli a2, a2, 1 ; j -0x18 ; ret @@ -483,38 +479,40 @@ block0(v0: i128): ; VCode: ; block0: -; ctz a5,a1##ty=i64 tmp=a3 step=a4 -; ctz a3,a0##ty=i64 tmp=a1 step=a2 -; select a5,a5,zero##condition=(a0 eq zero) -; add a0,a3,a5 +; mv a4,a1 +; ctz a2,a4##ty=i64 tmp=a3 step=a1 +; ctz a1,a0##ty=i64 tmp=a4 step=a3 +; select a2,a2,zero##condition=(a0 eq zero) +; add a0,a1,a2 ; li a1,0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, zero -; addi a4, zero, 0x40 +; mv a4, a1 +; mv a2, zero +; addi a1, zero, 0x40 ; addi a3, zero, 1 -; blez a4, 0x1c -; and t5, a3, a1 +; blez a1, 0x1c +; and t5, a3, a4 ; bne zero, t5, 0x14 -; addi a5, a5, 1 -; addi a4, a4, -1 +; addi a2, a2, 1 +; addi a1, a1, -1 ; slli a3, a3, 1 ; j -0x18 -; mv a3, zero -; addi a2, zero, 0x40 -; addi a1, zero, 1 -; blez a2, 0x1c -; and t5, a1, a0 +; mv a1, zero +; addi a3, zero, 0x40 +; addi a4, zero, 1 +; blez a3, 0x1c +; and t5, a4, a0 ; bne zero, t5, 0x14 -; addi a3, a3, 1 -; addi a2, a2, -1 -; slli a1, a1, 1 +; addi a1, a1, 1 +; addi a3, a3, -1 +; slli a4, a4, 1 ; j -0x18 ; beqz a0, 8 -; mv a5, zero -; add a0, a3, a5 +; mv a2, zero +; add a0, a1, a2 ; mv a1, zero ; ret @@ -526,37 +524,37 @@ block0(v0: i128): ; VCode: ; block0: -; popcnt a5,a0##ty=i64 tmp=a3 step=a4 -; popcnt a3,a1##ty=i64 tmp=a4 step=a2 -; add a0,a5,a3 +; popcnt a5,a0##ty=i64 tmp=a3 step=a2 +; popcnt a2,a1##ty=i64 tmp=a4 step=a3 +; add a0,a5,a2 ; li a1,0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; mv a5, zero -; addi a4, zero, 0x40 +; addi a2, zero, 0x40 ; addi a3, zero, 1 ; slli a3, a3, 0x3f -; blez a4, 0x1c +; blez a2, 0x1c ; and t5, a3, a0 ; beq zero, t5, 8 ; addi a5, a5, 1 -; addi a4, a4, -1 +; addi a2, a2, -1 ; srli a3, a3, 1 ; j -0x18 -; mv a3, zero -; addi a2, zero, 0x40 +; mv a2, zero +; addi a3, zero, 0x40 ; addi a4, zero, 1 ; slli a4, a4, 0x3f -; blez a2, 0x1c +; blez a3, 0x1c ; and t5, a4, a1 ; beq zero, t5, 8 -; addi a3, a3, 1 -; addi a2, a2, -1 +; addi a2, a2, 1 +; addi a3, a3, -1 ; srli a4, a4, 1 ; j -0x18 -; add a0, a5, a3 +; add a0, a5, a2 ; mv a1, zero ; ret @@ -568,22 +566,22 @@ block0(v0: i64): ; VCode: ; block0: -; mv a5,a0 -; popcnt a0,a5##ty=i64 tmp=a2 step=a3 +; mv a3,a0 +; popcnt a0,a3##ty=i64 tmp=a2 step=a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, a0 +; mv a3, a0 ; mv a0, zero -; addi a3, zero, 0x40 +; addi a1, zero, 0x40 ; addi a2, zero, 1 ; slli a2, a2, 0x3f -; blez a3, 0x1c -; and t5, a2, a5 +; blez a1, 0x1c +; and t5, a2, a3 ; beq zero, t5, 8 ; addi a0, a0, 1 -; addi a3, a3, -1 +; addi a1, a1, -1 ; srli a2, a2, 1 ; j -0x18 ; ret @@ -596,21 +594,21 @@ block0(v0: i32): ; VCode: ; block0: -; slli a2,a0,32 -; srli a4,a2,32 -; popcnt a0,a4##ty=i64 tmp=a2 step=a1 +; slli a0,a0,32 +; srli a3,a0,32 +; popcnt a0,a3##ty=i64 tmp=a2 step=a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x20 -; srli a4, a2, 0x20 +; slli a0, a0, 0x20 +; srli a3, a0, 0x20 ; mv a0, zero ; addi a1, zero, 0x40 ; addi a2, zero, 1 ; slli a2, a2, 0x3f ; blez a1, 0x1c -; and t5, a2, a4 +; and t5, a2, a3 ; beq zero, t5, 8 ; addi a0, a0, 1 ; addi a1, a1, -1 @@ -626,21 +624,21 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srli a4,a2,48 -; popcnt a0,a4##ty=i64 tmp=a2 step=a1 +; slli a0,a0,48 +; srli a3,a0,48 +; popcnt a0,a3##ty=i64 tmp=a2 step=a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srli a4, a2, 0x30 +; slli a0, a0, 0x30 +; srli a3, a0, 0x30 ; mv a0, zero ; addi a1, zero, 0x40 ; addi a2, zero, 1 ; slli a2, a2, 0x3f ; blez a1, 0x1c -; and t5, a2, a4 +; and t5, a2, a3 ; beq zero, t5, 8 ; addi a0, a0, 1 ; addi a1, a1, -1 @@ -656,23 +654,23 @@ block0(v0: i8): ; VCode: ; block0: -; andi a2,a0,255 -; popcnt a0,a2##ty=i64 tmp=a4 step=a5 +; andi a3,a0,255 +; popcnt a0,a3##ty=i64 tmp=a2 step=a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a2, a0, 0xff +; andi a3, a0, 0xff ; mv a0, zero -; addi a5, zero, 0x40 -; addi a4, zero, 1 -; slli a4, a4, 0x3f -; blez a5, 0x1c -; and t5, a4, a2 +; addi a1, zero, 0x40 +; addi a2, zero, 1 +; slli a2, a2, 0x3f +; blez a1, 0x1c +; and t5, a2, a3 ; beq zero, t5, 8 ; addi a0, a0, 1 -; addi a5, a5, -1 -; srli a4, a4, 1 +; addi a1, a1, -1 +; srli a2, a2, 1 ; j -0x18 ; ret @@ -718,14 +716,14 @@ block0(v0: i64): ; VCode: ; block0: -; slli a3,a0,3 -; not a0,a3 +; slli a0,a0,3 +; not a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 3 -; not a0, a3 +; slli a0, a0, 3 +; not a0, a0 ; ret function %bnot_i128(i128) -> i128 { @@ -754,20 +752,20 @@ block0(v0: f16): ; VCode: ; block0: -; fmv.x.w a2,fa0 -; not a4,a2 -; lui a0,-16 -; or a2,a4,a0 -; fmv.w.x fa0,a2 +; fmv.x.w a0,fa0 +; not a0,a0 +; lui a1,-16 +; or a0,a0,a1 +; fmv.w.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.x.w a2, fa0 -; not a4, a2 -; lui a0, 0xffff0 -; or a2, a4, a0 -; fmv.w.x fa0, a2 +; fmv.x.w a0, fa0 +; not a0, a0 +; lui a1, 0xffff0 +; or a0, a0, a1 +; fmv.w.x fa0, a0 ; ret function %bnot_f32(f32) -> f32 { @@ -778,16 +776,16 @@ block0(v0: f32): ; VCode: ; block0: -; fmv.x.w a2,fa0 -; not a4,a2 -; fmv.w.x fa0,a4 +; fmv.x.w a0,fa0 +; not a0,a0 +; fmv.w.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.x.w a2, fa0 -; not a4, a2 -; fmv.w.x fa0, a4 +; fmv.x.w a0, fa0 +; not a0, a0 +; fmv.w.x fa0, a0 ; ret function %bnot_f64(f64) -> f64 { @@ -798,16 +796,16 @@ block0(v0: f64): ; VCode: ; block0: -; fmv.x.d a2,fa0 -; not a4,a2 -; fmv.d.x fa0,a4 +; fmv.x.d a0,fa0 +; not a0,a0 +; fmv.d.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.x.d a2, fa0 -; not a4, a2 -; fmv.d.x fa0, a4 +; fmv.x.d a0, fa0 +; not a0, a0 +; fmv.d.x fa0, a0 ; ret function %bnot_f128(f128) -> f128 { @@ -886,18 +884,18 @@ block0(v0: f16, v1: f16): ; VCode: ; block0: -; fmv.x.w a3,fa0 -; fmv.x.w a5,fa1 -; and a1,a3,a5 -; fmv.w.x fa0,a1 +; fmv.x.w a0,fa0 +; fmv.x.w a1,fa1 +; and a0,a0,a1 +; fmv.w.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.x.w a3, fa0 -; fmv.x.w a5, fa1 -; and a1, a3, a5 -; fmv.w.x fa0, a1 +; fmv.x.w a0, fa0 +; fmv.x.w a1, fa1 +; and a0, a0, a1 +; fmv.w.x fa0, a0 ; ret function %band_f32(f32, f32) -> f32 { @@ -908,18 +906,18 @@ block0(v0: f32, v1: f32): ; VCode: ; block0: -; fmv.x.w a3,fa0 -; fmv.x.w a5,fa1 -; and a1,a3,a5 -; fmv.w.x fa0,a1 +; fmv.x.w a0,fa0 +; fmv.x.w a1,fa1 +; and a0,a0,a1 +; fmv.w.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.x.w a3, fa0 -; fmv.x.w a5, fa1 -; and a1, a3, a5 -; fmv.w.x fa0, a1 +; fmv.x.w a0, fa0 +; fmv.x.w a1, fa1 +; and a0, a0, a1 +; fmv.w.x fa0, a0 ; ret function %band_f64(f64, f64) -> f64 { @@ -930,18 +928,18 @@ block0(v0: f64, v1: f64): ; VCode: ; block0: -; fmv.x.d a3,fa0 -; fmv.x.d a5,fa1 -; and a1,a3,a5 -; fmv.d.x fa0,a1 +; fmv.x.d a0,fa0 +; fmv.x.d a1,fa1 +; and a0,a0,a1 +; fmv.d.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.x.d a3, fa0 -; fmv.x.d a5, fa1 -; and a1, a3, a5 -; fmv.d.x fa0, a1 +; fmv.x.d a0, fa0 +; fmv.x.d a1, fa1 +; and a0, a0, a1 +; fmv.d.x fa0, a0 ; ret function %band_f128(f128, f128) -> f128 { @@ -1006,14 +1004,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; slli a4,a1,3 -; and a0,a0,a4 +; slli a1,a1,3 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a4, a1, 3 -; and a0, a0, a4 +; slli a1, a1, 3 +; and a0, a0, a1 ; ret function %band_i64_constant_shift2(i64, i64) -> i64 { @@ -1026,14 +1024,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; slli a4,a1,3 -; and a0,a4,a0 +; slli a1,a1,3 +; and a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a4, a1, 3 -; and a0, a4, a0 +; slli a1, a1, 3 +; and a0, a1, a0 ; ret function %bor_i32(i32, i32) -> i32 { @@ -1094,18 +1092,18 @@ block0(v0: f16, v1: f16): ; VCode: ; block0: -; fmv.x.w a3,fa0 -; fmv.x.w a5,fa1 -; or a1,a3,a5 -; fmv.w.x fa0,a1 +; fmv.x.w a0,fa0 +; fmv.x.w a1,fa1 +; or a0,a0,a1 +; fmv.w.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.x.w a3, fa0 -; fmv.x.w a5, fa1 -; or a1, a3, a5 -; fmv.w.x fa0, a1 +; fmv.x.w a0, fa0 +; fmv.x.w a1, fa1 +; or a0, a0, a1 +; fmv.w.x fa0, a0 ; ret function %bor_f32(f32, f32) -> f32 { @@ -1116,18 +1114,18 @@ block0(v0: f32, v1: f32): ; VCode: ; block0: -; fmv.x.w a3,fa0 -; fmv.x.w a5,fa1 -; or a1,a3,a5 -; fmv.w.x fa0,a1 +; fmv.x.w a0,fa0 +; fmv.x.w a1,fa1 +; or a0,a0,a1 +; fmv.w.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.x.w a3, fa0 -; fmv.x.w a5, fa1 -; or a1, a3, a5 -; fmv.w.x fa0, a1 +; fmv.x.w a0, fa0 +; fmv.x.w a1, fa1 +; or a0, a0, a1 +; fmv.w.x fa0, a0 ; ret function %bor_f64(f64, f64) -> f64 { @@ -1138,18 +1136,18 @@ block0(v0: f64, v1: f64): ; VCode: ; block0: -; fmv.x.d a3,fa0 -; fmv.x.d a5,fa1 -; or a1,a3,a5 -; fmv.d.x fa0,a1 +; fmv.x.d a0,fa0 +; fmv.x.d a1,fa1 +; or a0,a0,a1 +; fmv.d.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.x.d a3, fa0 -; fmv.x.d a5, fa1 -; or a1, a3, a5 -; fmv.d.x fa0, a1 +; fmv.x.d a0, fa0 +; fmv.x.d a1, fa1 +; or a0, a0, a1 +; fmv.d.x fa0, a0 ; ret function %bor_f128(f128, f128) -> f128 { @@ -1214,14 +1212,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; slli a4,a1,3 -; or a0,a0,a4 +; slli a1,a1,3 +; or a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a4, a1, 3 -; or a0, a0, a4 +; slli a1, a1, 3 +; or a0, a0, a1 ; ret function %bor_i64_constant_shift2(i64, i64) -> i64 { @@ -1234,14 +1232,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; slli a4,a1,3 -; or a0,a4,a0 +; slli a1,a1,3 +; or a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a4, a1, 3 -; or a0, a4, a0 +; slli a1, a1, 3 +; or a0, a1, a0 ; ret function %bxor_i32(i32, i32) -> i32 { @@ -1302,22 +1300,22 @@ block0(v0: f16, v1: f16): ; VCode: ; block0: -; fmv.x.w a3,fa0 -; fmv.x.w a5,fa1 -; xor a1,a3,a5 -; lui a3,-16 -; or a5,a1,a3 -; fmv.w.x fa0,a5 +; fmv.x.w a0,fa0 +; fmv.x.w a1,fa1 +; xor a0,a0,a1 +; lui a1,-16 +; or a1,a0,a1 +; fmv.w.x fa0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.x.w a3, fa0 -; fmv.x.w a5, fa1 -; xor a1, a3, a5 -; lui a3, 0xffff0 -; or a5, a1, a3 -; fmv.w.x fa0, a5 +; fmv.x.w a0, fa0 +; fmv.x.w a1, fa1 +; xor a0, a0, a1 +; lui a1, 0xffff0 +; or a1, a0, a1 +; fmv.w.x fa0, a1 ; ret function %bxor_f32(f32, f32) -> f32 { @@ -1328,18 +1326,18 @@ block0(v0: f32, v1: f32): ; VCode: ; block0: -; fmv.x.w a3,fa0 -; fmv.x.w a5,fa1 -; xor a1,a3,a5 -; fmv.w.x fa0,a1 +; fmv.x.w a0,fa0 +; fmv.x.w a1,fa1 +; xor a0,a0,a1 +; fmv.w.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.x.w a3, fa0 -; fmv.x.w a5, fa1 -; xor a1, a3, a5 -; fmv.w.x fa0, a1 +; fmv.x.w a0, fa0 +; fmv.x.w a1, fa1 +; xor a0, a0, a1 +; fmv.w.x fa0, a0 ; ret function %bxor_f64(f64, f64) -> f64 { @@ -1350,18 +1348,18 @@ block0(v0: f64, v1: f64): ; VCode: ; block0: -; fmv.x.d a3,fa0 -; fmv.x.d a5,fa1 -; xor a1,a3,a5 -; fmv.d.x fa0,a1 +; fmv.x.d a0,fa0 +; fmv.x.d a1,fa1 +; xor a0,a0,a1 +; fmv.d.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.x.d a3, fa0 -; fmv.x.d a5, fa1 -; xor a1, a3, a5 -; fmv.d.x fa0, a1 +; fmv.x.d a0, fa0 +; fmv.x.d a1, fa1 +; xor a0, a0, a1 +; fmv.d.x fa0, a0 ; ret function %bxor_f128(f128, f128) -> f128 { @@ -1426,14 +1424,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; slli a4,a1,3 -; xor a0,a0,a4 +; slli a1,a1,3 +; xor a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a4, a1, 3 -; xor a0, a0, a4 +; slli a1, a1, 3 +; xor a0, a0, a1 ; ret function %bxor_i64_constant_shift2(i64, i64) -> i64 { @@ -1446,14 +1444,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; slli a4,a1,3 -; xor a0,a4,a0 +; slli a1,a1,3 +; xor a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a4, a1, 3 -; xor a0, a4, a0 +; slli a1, a1, 3 +; xor a0, a1, a0 ; ret function %band_not_i32(i32, i32) -> i32 { @@ -1464,14 +1462,14 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; not a4,a1 -; and a0,a0,a4 +; not a1,a1 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; not a4, a1 -; and a0, a0, a4 +; not a1, a1 +; and a0, a0, a1 ; ret function %band_not_i64(i64, i64) -> i64 { @@ -1482,14 +1480,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; not a4,a1 -; and a0,a0,a4 +; not a1,a1 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; not a4, a1 -; and a0, a0, a4 +; not a1, a1 +; and a0, a0, a1 ; ret function %band_not_i128(i128, i128) -> i128 { @@ -1500,18 +1498,22 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: +; mv a4,a3 +; not a3,a2 +; mv a2,a4 ; not a2,a2 -; not a3,a3 -; and a0,a0,a2 -; and a1,a1,a3 +; and a0,a0,a3 +; and a1,a1,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 +; mv a4, a3 +; not a3, a2 +; mv a2, a4 ; not a2, a2 -; not a3, a3 -; and a0, a0, a2 -; and a1, a1, a3 +; and a0, a0, a3 +; and a1, a1, a2 ; ret function %band_not_i64_constant(i64) -> i64 { @@ -1523,16 +1525,16 @@ block0(v0: i64): ; VCode: ; block0: -; li a4,4 -; not a4,a4 -; and a0,a0,a4 +; li a1,4 +; not a1,a1 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 4 -; not a4, a4 -; and a0, a0, a4 +; addi a1, zero, 4 +; not a1, a1 +; and a0, a0, a1 ; ret function %band_not_i64_constant_shift(i64, i64) -> i64 { @@ -1545,16 +1547,16 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; slli a5,a1,4 -; not a5,a5 -; and a0,a0,a5 +; slli a1,a1,4 +; not a1,a1 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a5, a1, 4 -; not a5, a5 -; and a0, a0, a5 +; slli a1, a1, 4 +; not a1, a1 +; and a0, a0, a1 ; ret function %bor_not_i32(i32, i32) -> i32 { @@ -1565,14 +1567,14 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; not a4,a1 -; or a0,a0,a4 +; not a1,a1 +; or a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; not a4, a1 -; or a0, a0, a4 +; not a1, a1 +; or a0, a0, a1 ; ret function %bor_not_i64(i64, i64) -> i64 { @@ -1583,14 +1585,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; not a4,a1 -; or a0,a0,a4 +; not a1,a1 +; or a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; not a4, a1 -; or a0, a0, a4 +; not a1, a1 +; or a0, a0, a1 ; ret function %bor_not_i128(i128, i128) -> i128 { @@ -1601,18 +1603,22 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: +; mv a4,a3 +; not a3,a2 +; mv a2,a4 ; not a2,a2 -; not a3,a3 -; or a0,a0,a2 -; or a1,a1,a3 +; or a0,a0,a3 +; or a1,a1,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 +; mv a4, a3 +; not a3, a2 +; mv a2, a4 ; not a2, a2 -; not a3, a3 -; or a0, a0, a2 -; or a1, a1, a3 +; or a0, a0, a3 +; or a1, a1, a2 ; ret function %bor_not_i64_constant(i64) -> i64 { @@ -1624,16 +1630,16 @@ block0(v0: i64): ; VCode: ; block0: -; li a4,4 -; not a4,a4 -; or a0,a0,a4 +; li a1,4 +; not a1,a1 +; or a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 4 -; not a4, a4 -; or a0, a0, a4 +; addi a1, zero, 4 +; not a1, a1 +; or a0, a0, a1 ; ret function %bor_not_i64_constant_shift(i64, i64) -> i64 { @@ -1646,16 +1652,16 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; slli a5,a1,4 -; not a5,a5 -; or a0,a0,a5 +; slli a1,a1,4 +; not a1,a1 +; or a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a5, a1, 4 -; not a5, a5 -; or a0, a0, a5 +; slli a1, a1, 4 +; not a1, a1 +; or a0, a0, a1 ; ret function %bxor_not_i32(i32, i32) -> i32 { @@ -1666,14 +1672,14 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; not a4,a1 -; xor a0,a0,a4 +; not a1,a1 +; xor a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; not a4, a1 -; xor a0, a0, a4 +; not a1, a1 +; xor a0, a0, a1 ; ret function %bxor_not_i64(i64, i64) -> i64 { @@ -1684,14 +1690,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; not a4,a1 -; xor a0,a0,a4 +; not a1,a1 +; xor a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; not a4, a1 -; xor a0, a0, a4 +; not a1, a1 +; xor a0, a0, a1 ; ret function %bxor_not_i128(i128, i128) -> i128 { @@ -1702,18 +1708,22 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: +; mv a4,a3 +; not a3,a2 +; mv a2,a4 ; not a2,a2 -; not a3,a3 -; xor a0,a0,a2 -; xor a1,a1,a3 +; xor a0,a0,a3 +; xor a1,a1,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 +; mv a4, a3 +; not a3, a2 +; mv a2, a4 ; not a2, a2 -; not a3, a3 -; xor a0, a0, a2 -; xor a1, a1, a3 +; xor a0, a0, a3 +; xor a1, a1, a2 ; ret function %bxor_not_i64_constant(i64) -> i64 { @@ -1725,16 +1735,16 @@ block0(v0: i64): ; VCode: ; block0: -; li a4,4 -; not a4,a4 -; xor a0,a0,a4 +; li a1,4 +; not a1,a1 +; xor a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 4 -; not a4, a4 -; xor a0, a0, a4 +; addi a1, zero, 4 +; not a1, a1 +; xor a0, a0, a1 ; ret function %bxor_not_i64_constant_shift(i64, i64) -> i64 { @@ -1747,16 +1757,16 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; slli a5,a1,4 -; not a5,a5 -; xor a0,a0,a5 +; slli a1,a1,4 +; not a1,a1 +; xor a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a5, a1, 4 -; not a5, a5 -; xor a0, a0, a5 +; slli a1, a1, 4 +; not a1, a1 +; xor a0, a0, a1 ; ret function %ishl_i128_i8(i128, i8) -> i128 { @@ -1767,38 +1777,37 @@ block0(v0: i128, v1: i8): ; VCode: ; block0: -; andi a5,a2,63 -; li a3,64 -; sub a3,a3,a5 -; sll a4,a0,a5 -; srl a0,a0,a3 -; select a3,zero,a0##condition=(a5 eq zero) -; sll a5,a1,a5 -; or t0,a3,a5 -; li a3,64 -; andi a5,a2,127 -; select [a0,a1],[zero,a4],[a4,t0]##condition=(a5 uge a3) +; andi a3,a2,63 +; li a4,64 +; sub a5,a4,a3 +; sll a4,a0,a3 +; srl a5,a0,a5 +; select a5,zero,a5##condition=(a3 eq zero) +; sll a0,a1,a3 +; or a3,a5,a0 +; li a5,64 +; andi a2,a2,127 +; select [a0,a1],[zero,a4],[a4,a3]##condition=(a2 uge a5) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a5, a2, 0x3f -; addi a3, zero, 0x40 -; sub a3, a3, a5 -; sll a4, a0, a5 -; srl a0, a0, a3 -; mv a3, zero -; beqz a5, 8 -; mv a3, a0 -; sll a5, a1, a5 -; or t0, a3, a5 -; addi a3, zero, 0x40 -; andi a5, a2, 0x7f +; andi a3, a2, 0x3f +; addi a4, zero, 0x40 +; sub a5, a4, a3 +; sll a4, a0, a3 +; srl a5, a0, a5 +; bnez a3, 8 +; mv a5, zero +; sll a0, a1, a3 +; or a3, a5, a0 +; addi a5, zero, 0x40 +; andi a2, a2, 0x7f ; mv a0, zero ; mv a1, a4 -; bgeu a5, a3, 0xc +; bgeu a2, a5, 0xc ; mv a0, a4 -; mv a1, t0 +; mv a1, a3 ; ret function %ishl_i128_i128(i128, i128) -> i128 { @@ -1809,41 +1818,44 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: -; mv a5,a0 +; mv a4,a0 +; mv a7,a1 ; andi a0,a2,63 -; li a3,64 -; sub a3,a3,a0 -; mv a4,a5 -; sll a5,a4,a0 +; li a1,64 +; sub a3,a1,a0 +; sll a6,a4,a0 ; srl a3,a4,a3 -; select a3,zero,a3##condition=(a0 eq zero) +; select a5,zero,a3##condition=(a0 eq zero) +; mv a1,a7 ; sll a0,a1,a0 -; or a4,a3,a0 -; li a3,64 +; or a3,a5,a0 +; li a4,64 ; andi a2,a2,127 -; select [a0,a1],[zero,a5],[a5,a4]##condition=(a2 uge a3) +; select [a0,a1],[zero,a6],[a6,a3]##condition=(a2 uge a4) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, a0 +; mv a4, a0 +; mv a7, a1 ; andi a0, a2, 0x3f -; addi a3, zero, 0x40 -; sub a3, a3, a0 -; mv a4, a5 -; sll a5, a4, a0 +; addi a1, zero, 0x40 +; sub a3, a1, a0 +; sll a6, a4, a0 ; srl a3, a4, a3 -; bnez a0, 8 -; mv a3, zero +; mv a5, zero +; beqz a0, 8 +; mv a5, a3 +; mv a1, a7 ; sll a0, a1, a0 -; or a4, a3, a0 -; addi a3, zero, 0x40 +; or a3, a5, a0 +; addi a4, zero, 0x40 ; andi a2, a2, 0x7f ; mv a0, zero -; mv a1, a5 -; bgeu a2, a3, 0xc -; mv a0, a5 -; mv a1, a4 +; mv a1, a6 +; bgeu a2, a4, 0xc +; mv a0, a6 +; mv a1, a3 ; ret function %ushr_i128_i8(i128, i8) -> i128 { @@ -1854,38 +1866,39 @@ block0(v0: i128, v1: i8): ; VCode: ; block0: -; andi a4,a2,63 +; mv a4,a0 +; andi a0,a2,63 ; li a3,64 -; sub a3,a3,a4 -; sll a5,a1,a3 -; select a3,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a3,a5 -; li t0,64 -; srl a3,a1,a4 -; andi a4,a2,127 -; select [a0,a1],[a3,zero],[a5,a3]##condition=(a4 uge t0) +; sub a3,a3,a0 +; sll a3,a1,a3 +; select a3,zero,a3##condition=(a0 eq zero) +; srl a4,a4,a0 +; or a4,a3,a4 +; li a3,64 +; srl a5,a1,a0 +; andi a2,a2,127 +; select [a0,a1],[a5,zero],[a4,a5]##condition=(a2 uge a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a2, 0x3f +; mv a4, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a4 -; sll a5, a1, a3 +; sub a3, a3, a0 +; sll a3, a1, a3 +; bnez a0, 8 ; mv a3, zero -; beqz a4, 8 -; mv a3, a5 -; srl a5, a0, a4 -; or a5, a3, a5 -; addi t0, zero, 0x40 -; srl a3, a1, a4 -; andi a4, a2, 0x7f -; mv a0, a3 -; mv a1, zero -; bgeu a4, t0, 0xc +; srl a4, a4, a0 +; or a4, a3, a4 +; addi a3, zero, 0x40 +; srl a5, a1, a0 +; andi a2, a2, 0x7f ; mv a0, a5 -; mv a1, a3 +; mv a1, zero +; bgeu a2, a3, 0xc +; mv a0, a4 +; mv a1, a5 ; ret function %ushr_i128_i128(i128, i128) -> i128 { @@ -1895,61 +1908,40 @@ block0(v0: i128, v1: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s11,8(sp) ; block0: -; andi a5,a2,63 +; mv a4,a0 +; andi a0,a2,63 ; li a3,64 -; sub a3,a3,a5 +; sub a3,a3,a0 ; sll a3,a1,a3 -; select a3,zero,a3##condition=(a5 eq zero) -; srl a4,a0,a5 -; or s11,a3,a4 +; select a3,zero,a3##condition=(a0 eq zero) +; srl a5,a4,a0 +; or a4,a3,a5 ; li a3,64 -; srl a4,a1,a5 -; andi a5,a2,127 -; select [a0,a1],[a4,zero],[s11,a4]##condition=(a5 uge a3) -; ld s11,8(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; srl a5,a1,a0 +; andi a2,a2,127 +; select [a0,a1],[a5,zero],[a4,a5]##condition=(a2 uge a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s11, 8(sp) -; block1: ; offset 0x18 -; andi a5, a2, 0x3f +; mv a4, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a5 +; sub a3, a3, a0 ; sll a3, a1, a3 -; bnez a5, 8 +; bnez a0, 8 ; mv a3, zero -; srl a4, a0, a5 -; or s11, a3, a4 +; srl a5, a4, a0 +; or a4, a3, a5 ; addi a3, zero, 0x40 -; srl a4, a1, a5 -; andi a5, a2, 0x7f -; mv a0, a4 +; srl a5, a1, a0 +; andi a2, a2, 0x7f +; mv a0, a5 ; mv a1, zero -; bgeu a5, a3, 0xc -; mv a0, s11 -; mv a1, a4 -; ld s11, 8(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 +; bgeu a2, a3, 0xc +; mv a0, a4 +; mv a1, a5 ; ret function %sshr_i128_i8(i128, i8) -> i128 { @@ -1959,47 +1951,71 @@ block0(v0: i128, v1: i8): } ; VCode: +; addi sp,sp,-16 +; sd ra,8(sp) +; sd fp,0(sp) +; mv fp,sp +; addi sp,sp,-16 +; sd s4,8(sp) ; block0: -; andi a4,a2,63 +; mv a4,a0 +; andi a0,a2,63 ; li a3,64 -; sub a3,a3,a4 -; sll a5,a1,a3 -; select a3,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a3,a5 -; li a0,64 -; sra a3,a1,a4 -; li a4,-1 -; select t4,a4,zero##condition=(a1 slt zero) +; sub a3,a3,a0 +; sll a3,a1,a3 +; select a3,zero,a3##condition=(a0 eq zero) +; srl a4,a4,a0 +; or a3,a3,a4 ; li a4,64 +; sra s4,a1,a0 +; li a0,-1 +; select a4,a0,zero##condition=(a1 slt zero) +; li a5,64 ; andi a2,a2,127 -; select [a0,a1],[a3,t4],[a5,a3]##condition=(a2 uge a4) +; select [a0,a1],[s4,a4],[a3,s4]##condition=(a2 uge a5) +; ld s4,8(sp) +; addi sp,sp,16 +; ld ra,8(sp) +; ld fp,0(sp) +; addi sp,sp,16 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a2, 0x3f +; addi sp, sp, -0x10 +; sd ra, 8(sp) +; sd s0, 0(sp) +; mv s0, sp +; addi sp, sp, -0x10 +; sd s4, 8(sp) +; block1: ; offset 0x18 +; mv a4, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a4 -; sll a5, a1, a3 +; sub a3, a3, a0 +; sll a3, a1, a3 +; bnez a0, 8 ; mv a3, zero -; beqz a4, 8 -; mv a3, a5 -; srl a5, a0, a4 -; or a5, a3, a5 -; addi a0, zero, 0x40 -; sra a3, a1, a4 -; addi a4, zero, -1 -; mv t4, a4 -; bltz a1, 8 -; mv t4, zero +; srl a4, a4, a0 +; or a3, a3, a4 ; addi a4, zero, 0x40 +; sra s4, a1, a0 +; addi a0, zero, -1 +; mv a4, a0 +; bltz a1, 8 +; mv a4, zero +; addi a5, zero, 0x40 ; andi a2, a2, 0x7f +; mv a0, s4 +; mv a1, a4 +; bgeu a2, a5, 0xc ; mv a0, a3 -; mv a1, t4 -; bgeu a2, a4, 0xc -; mv a0, a5 -; mv a1, a3 +; mv a1, s4 +; ld s4, 8(sp) +; addi sp, sp, 0x10 +; ld ra, 8(sp) +; ld s0, 0(sp) +; addi sp, sp, 0x10 ; ret function %sshr_i128_i128(i128, i128) -> i128 { @@ -2014,23 +2030,24 @@ block0(v0: i128, v1: i128): ; sd fp,0(sp) ; mv fp,sp ; addi sp,sp,-16 -; sd s11,8(sp) +; sd s5,8(sp) ; block0: -; andi a5,a2,63 +; mv a4,a0 +; andi a0,a2,63 ; li a3,64 -; sub a3,a3,a5 +; sub a3,a3,a0 ; sll a3,a1,a3 -; select a3,zero,a3##condition=(a5 eq zero) -; srl a4,a0,a5 -; or s11,a3,a4 -; li a3,64 -; sra a3,a1,a5 -; li a5,-1 -; select a5,a5,zero##condition=(a1 slt zero) +; select a3,zero,a3##condition=(a0 eq zero) +; srl a5,a4,a0 +; or a3,a3,a5 ; li a4,64 +; sra s5,a1,a0 +; li a0,-1 +; select a4,a0,zero##condition=(a1 slt zero) +; li a5,64 ; andi a2,a2,127 -; select [a0,a1],[a3,a5],[s11,a3]##condition=(a2 uge a4) -; ld s11,8(sp) +; select [a0,a1],[s5,a4],[a3,s5]##condition=(a2 uge a5) +; ld s5,8(sp) ; addi sp,sp,16 ; ld ra,8(sp) ; ld fp,0(sp) @@ -2044,29 +2061,31 @@ block0(v0: i128, v1: i128): ; sd s0, 0(sp) ; mv s0, sp ; addi sp, sp, -0x10 -; sd s11, 8(sp) +; sd s5, 8(sp) ; block1: ; offset 0x18 -; andi a5, a2, 0x3f +; mv a4, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a5 +; sub a3, a3, a0 ; sll a3, a1, a3 -; bnez a5, 8 +; bnez a0, 8 ; mv a3, zero -; srl a4, a0, a5 -; or s11, a3, a4 -; addi a3, zero, 0x40 -; sra a3, a1, a5 -; addi a5, zero, -1 -; bltz a1, 8 -; mv a5, zero +; srl a5, a4, a0 +; or a3, a3, a5 ; addi a4, zero, 0x40 +; sra s5, a1, a0 +; addi a0, zero, -1 +; mv a4, a0 +; bltz a1, 8 +; mv a4, zero +; addi a5, zero, 0x40 ; andi a2, a2, 0x7f +; mv a0, s5 +; mv a1, a4 +; bgeu a2, a5, 0xc ; mv a0, a3 -; mv a1, a5 -; bgeu a2, a4, 0xc -; mv a0, s11 -; mv a1, a3 -; ld s11, 8(sp) +; mv a1, s5 +; ld s5, 8(sp) ; addi sp, sp, 0x10 ; ld ra, 8(sp) ; ld s0, 0(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/bitrev-zbb-zbkb.clif b/cranelift/filetests/filetests/isa/riscv64/bitrev-zbb-zbkb.clif index 84d16bfea213..eb8edde06b78 100644 --- a/cranelift/filetests/filetests/isa/riscv64/bitrev-zbb-zbkb.clif +++ b/cranelift/filetests/filetests/isa/riscv64/bitrev-zbb-zbkb.clif @@ -26,16 +26,16 @@ block0(v0: i16): ; VCode: ; block0: -; rev8 a2,a0 -; brev8 a4,a2 -; srli a0,a4,48 +; rev8 a0,a0 +; brev8 a0,a0 +; srli a0,a0,48 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x13, 0x56, 0x85, 0x6b -; .byte 0x13, 0x57, 0x76, 0x68 -; srli a0, a4, 0x30 +; .byte 0x13, 0x55, 0x85, 0x6b +; .byte 0x13, 0x55, 0x75, 0x68 +; srli a0, a0, 0x30 ; ret function %bitrev_i32(i32) -> i32 { @@ -46,16 +46,16 @@ block0(v0: i32): ; VCode: ; block0: -; rev8 a2,a0 -; brev8 a4,a2 -; srli a0,a4,32 +; rev8 a0,a0 +; brev8 a0,a0 +; srli a0,a0,32 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x13, 0x56, 0x85, 0x6b -; .byte 0x13, 0x57, 0x76, 0x68 -; srli a0, a4, 0x20 +; .byte 0x13, 0x55, 0x85, 0x6b +; .byte 0x13, 0x55, 0x75, 0x68 +; srli a0, a0, 0x20 ; ret function %bitrev_i64(i64) -> i64 { @@ -66,14 +66,14 @@ block0(v0: i64): ; VCode: ; block0: -; rev8 a2,a0 -; brev8 a0,a2 +; rev8 a0,a0 +; brev8 a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x13, 0x56, 0x85, 0x6b -; .byte 0x13, 0x55, 0x76, 0x68 +; .byte 0x13, 0x55, 0x85, 0x6b +; .byte 0x13, 0x55, 0x75, 0x68 ; ret function %bitrev_i128(i128) -> i128 { @@ -85,20 +85,20 @@ block0(v0: i128): ; VCode: ; block0: ; mv a2,a0 -; rev8 a3,a1 -; brev8 a0,a3 -; mv a3,a2 -; rev8 a1,a3 +; rev8 a0,a1 +; brev8 a0,a0 +; mv a1,a2 +; rev8 a1,a1 ; brev8 a1,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; mv a2, a0 -; .byte 0x93, 0xd6, 0x85, 0x6b -; .byte 0x13, 0xd5, 0x76, 0x68 -; mv a3, a2 -; .byte 0x93, 0xd5, 0x86, 0x6b +; .byte 0x13, 0xd5, 0x85, 0x6b +; .byte 0x13, 0x55, 0x75, 0x68 +; mv a1, a2 +; .byte 0x93, 0xd5, 0x85, 0x6b ; .byte 0x93, 0xd5, 0x75, 0x68 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/bitrev.clif b/cranelift/filetests/filetests/isa/riscv64/bitrev.clif index 2d03516441cb..674ba482b844 100644 --- a/cranelift/filetests/filetests/isa/riscv64/bitrev.clif +++ b/cranelift/filetests/filetests/isa/riscv64/bitrev.clif @@ -10,31 +10,31 @@ block0(v0: i8): ; VCode: ; block0: -; mv a1,a0 -; brev8 a0,a1##tmp=a2 tmp2=a3 step=a4 ty=i8 +; mv a4,a0 +; brev8 a0,a4##tmp=a3 tmp2=a2 step=a1 ty=i8 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a1, a0 +; mv a4, a0 ; mv a0, zero -; addi a4, zero, 8 -; addi a2, zero, 1 -; slli a2, a2, 7 +; addi a1, zero, 8 ; addi a3, zero, 1 -; slli a3, a3, 0 -; blez a4, 0x34 -; and t5, a2, a1 +; slli a3, a3, 7 +; addi a2, zero, 1 +; slli a2, a2, 0 +; blez a1, 0x34 +; and t5, a3, a4 ; beq zero, t5, 8 -; or a0, a0, a3 -; addi a4, a4, -1 -; srli a2, a2, 1 +; or a0, a0, a2 +; addi a1, a1, -1 +; srli a3, a3, 1 ; addi t5, zero, 8 -; rem t5, a4, t5 +; rem t5, a1, t5 ; bnez t5, 0xc -; srli a3, a3, 0xf +; srli a2, a2, 0xf ; j -0x28 -; slli a3, a3, 1 +; slli a2, a2, 1 ; j -0x30 ; ret @@ -46,102 +46,102 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,8 -; srli a4,a0,8 -; slli a1,a4,56 -; srli a3,a1,56 -; or a4,a2,a3 -; slli a1,a4,16 -; srli a2,a0,16 -; slli a4,a2,8 -; srli a2,a2,8 +; slli a1,a0,8 +; srli a2,a0,8 ; slli a2,a2,56 -; srli a5,a2,56 -; or a2,a4,a5 +; srli a2,a2,56 +; or a1,a1,a2 +; slli a2,a1,16 +; srli a4,a0,16 +; slli a1,a4,8 +; srli a3,a4,8 +; slli a3,a3,56 +; srli a3,a3,56 +; or a1,a1,a3 +; slli a1,a1,48 +; srli a1,a1,48 +; or a1,a2,a1 +; slli a1,a1,32 +; srli a2,a0,32 +; slli a0,a2,8 +; srli a3,a2,8 +; slli a3,a3,56 +; srli a3,a3,56 +; or a0,a0,a3 +; slli a0,a0,16 +; srli a3,a2,16 +; slli a2,a3,8 +; srli a3,a3,8 +; slli a3,a3,56 +; srli a3,a3,56 +; or a2,a2,a3 ; slli a2,a2,48 -; srli a4,a2,48 -; or a1,a1,a4 -; slli a2,a1,32 -; srli a5,a0,32 -; slli a0,a5,8 -; srli a3,a5,8 -; slli a4,a3,56 -; srli a1,a4,56 -; or a3,a0,a1 -; slli a4,a3,16 -; srli a0,a5,16 -; slli a3,a0,8 -; srli a5,a0,8 -; slli a0,a5,56 -; srli a5,a0,56 -; or a5,a3,a5 -; slli a0,a5,48 -; srli a3,a0,48 -; or a4,a4,a3 -; slli a0,a4,32 -; srli a3,a0,32 -; or a4,a2,a3 -; brev8 a3,a4##tmp=a0 tmp2=a1 step=a2 ty=i64 -; srli a0,a3,48 +; srli a2,a2,48 +; or a0,a0,a2 +; slli a0,a0,32 +; srli a0,a0,32 +; or a4,a1,a0 +; brev8 a2,a4##tmp=a3 tmp2=a0 step=a1 ty=i64 +; srli a0,a2,48 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 8 -; srli a4, a0, 8 -; slli a1, a4, 0x38 -; srli a3, a1, 0x38 -; or a4, a2, a3 -; slli a1, a4, 0x10 -; srli a2, a0, 0x10 -; slli a4, a2, 8 -; srli a2, a2, 8 +; slli a1, a0, 8 +; srli a2, a0, 8 ; slli a2, a2, 0x38 -; srli a5, a2, 0x38 -; or a2, a4, a5 +; srli a2, a2, 0x38 +; or a1, a1, a2 +; slli a2, a1, 0x10 +; srli a4, a0, 0x10 +; slli a1, a4, 8 +; srli a3, a4, 8 +; slli a3, a3, 0x38 +; srli a3, a3, 0x38 +; or a1, a1, a3 +; slli a1, a1, 0x30 +; srli a1, a1, 0x30 +; or a1, a2, a1 +; slli a1, a1, 0x20 +; srli a2, a0, 0x20 +; slli a0, a2, 8 +; srli a3, a2, 8 +; slli a3, a3, 0x38 +; srli a3, a3, 0x38 +; or a0, a0, a3 +; slli a0, a0, 0x10 +; srli a3, a2, 0x10 +; slli a2, a3, 8 +; srli a3, a3, 8 +; slli a3, a3, 0x38 +; srli a3, a3, 0x38 +; or a2, a2, a3 ; slli a2, a2, 0x30 -; srli a4, a2, 0x30 -; or a1, a1, a4 -; slli a2, a1, 0x20 -; srli a5, a0, 0x20 -; slli a0, a5, 8 -; srli a3, a5, 8 -; slli a4, a3, 0x38 -; srli a1, a4, 0x38 -; or a3, a0, a1 -; slli a4, a3, 0x10 -; srli a0, a5, 0x10 -; slli a3, a0, 8 -; srli a5, a0, 8 -; slli a0, a5, 0x38 -; srli a5, a0, 0x38 -; or a5, a3, a5 -; slli a0, a5, 0x30 -; srli a3, a0, 0x30 -; or a4, a4, a3 -; slli a0, a4, 0x20 -; srli a3, a0, 0x20 -; or a4, a2, a3 -; mv a3, zero -; addi a2, zero, 0x40 +; srli a2, a2, 0x30 +; or a0, a0, a2 +; slli a0, a0, 0x20 +; srli a0, a0, 0x20 +; or a4, a1, a0 +; mv a2, zero +; addi a1, zero, 0x40 +; addi a3, zero, 1 +; slli a3, a3, 0x3f ; addi a0, zero, 1 -; slli a0, a0, 0x3f -; addi a1, zero, 1 -; slli a1, a1, 0x38 -; blez a2, 0x34 -; and t5, a0, a4 +; slli a0, a0, 0x38 +; blez a1, 0x34 +; and t5, a3, a4 ; beq zero, t5, 8 -; or a3, a3, a1 -; addi a2, a2, -1 -; srli a0, a0, 1 +; or a2, a2, a0 +; addi a1, a1, -1 +; srli a3, a3, 1 ; addi t5, zero, 8 -; rem t5, a2, t5 +; rem t5, a1, t5 ; bnez t5, 0xc -; srli a1, a1, 0xf +; srli a0, a0, 0xf ; j -0x28 -; slli a1, a1, 1 +; slli a0, a0, 1 ; j -0x30 -; srli a0, a3, 0x30 +; srli a0, a2, 0x30 ; ret function %bitrev_i32(i32) -> i32 { @@ -152,102 +152,102 @@ block0(v0: i32): ; VCode: ; block0: -; slli a2,a0,8 -; srli a4,a0,8 -; slli a1,a4,56 -; srli a3,a1,56 -; or a4,a2,a3 -; slli a1,a4,16 -; srli a2,a0,16 -; slli a4,a2,8 -; srli a2,a2,8 +; slli a1,a0,8 +; srli a2,a0,8 ; slli a2,a2,56 -; srli a5,a2,56 -; or a2,a4,a5 +; srli a2,a2,56 +; or a1,a1,a2 +; slli a2,a1,16 +; srli a4,a0,16 +; slli a1,a4,8 +; srli a3,a4,8 +; slli a3,a3,56 +; srli a3,a3,56 +; or a1,a1,a3 +; slli a1,a1,48 +; srli a1,a1,48 +; or a1,a2,a1 +; slli a1,a1,32 +; srli a2,a0,32 +; slli a0,a2,8 +; srli a3,a2,8 +; slli a3,a3,56 +; srli a3,a3,56 +; or a0,a0,a3 +; slli a0,a0,16 +; srli a3,a2,16 +; slli a2,a3,8 +; srli a3,a3,8 +; slli a3,a3,56 +; srli a3,a3,56 +; or a2,a2,a3 ; slli a2,a2,48 -; srli a4,a2,48 -; or a1,a1,a4 -; slli a2,a1,32 -; srli a5,a0,32 -; slli a0,a5,8 -; srli a3,a5,8 -; slli a4,a3,56 -; srli a1,a4,56 -; or a3,a0,a1 -; slli a4,a3,16 -; srli a0,a5,16 -; slli a3,a0,8 -; srli a5,a0,8 -; slli a0,a5,56 -; srli a5,a0,56 -; or a5,a3,a5 -; slli a0,a5,48 -; srli a3,a0,48 -; or a4,a4,a3 -; slli a0,a4,32 -; srli a3,a0,32 -; or a4,a2,a3 -; brev8 a3,a4##tmp=a0 tmp2=a1 step=a2 ty=i64 -; srli a0,a3,32 +; srli a2,a2,48 +; or a0,a0,a2 +; slli a0,a0,32 +; srli a0,a0,32 +; or a4,a1,a0 +; brev8 a2,a4##tmp=a3 tmp2=a0 step=a1 ty=i64 +; srli a0,a2,32 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 8 -; srli a4, a0, 8 -; slli a1, a4, 0x38 -; srli a3, a1, 0x38 -; or a4, a2, a3 -; slli a1, a4, 0x10 -; srli a2, a0, 0x10 -; slli a4, a2, 8 -; srli a2, a2, 8 +; slli a1, a0, 8 +; srli a2, a0, 8 ; slli a2, a2, 0x38 -; srli a5, a2, 0x38 -; or a2, a4, a5 +; srli a2, a2, 0x38 +; or a1, a1, a2 +; slli a2, a1, 0x10 +; srli a4, a0, 0x10 +; slli a1, a4, 8 +; srli a3, a4, 8 +; slli a3, a3, 0x38 +; srli a3, a3, 0x38 +; or a1, a1, a3 +; slli a1, a1, 0x30 +; srli a1, a1, 0x30 +; or a1, a2, a1 +; slli a1, a1, 0x20 +; srli a2, a0, 0x20 +; slli a0, a2, 8 +; srli a3, a2, 8 +; slli a3, a3, 0x38 +; srli a3, a3, 0x38 +; or a0, a0, a3 +; slli a0, a0, 0x10 +; srli a3, a2, 0x10 +; slli a2, a3, 8 +; srli a3, a3, 8 +; slli a3, a3, 0x38 +; srli a3, a3, 0x38 +; or a2, a2, a3 ; slli a2, a2, 0x30 -; srli a4, a2, 0x30 -; or a1, a1, a4 -; slli a2, a1, 0x20 -; srli a5, a0, 0x20 -; slli a0, a5, 8 -; srli a3, a5, 8 -; slli a4, a3, 0x38 -; srli a1, a4, 0x38 -; or a3, a0, a1 -; slli a4, a3, 0x10 -; srli a0, a5, 0x10 -; slli a3, a0, 8 -; srli a5, a0, 8 -; slli a0, a5, 0x38 -; srli a5, a0, 0x38 -; or a5, a3, a5 -; slli a0, a5, 0x30 -; srli a3, a0, 0x30 -; or a4, a4, a3 -; slli a0, a4, 0x20 -; srli a3, a0, 0x20 -; or a4, a2, a3 -; mv a3, zero -; addi a2, zero, 0x40 +; srli a2, a2, 0x30 +; or a0, a0, a2 +; slli a0, a0, 0x20 +; srli a0, a0, 0x20 +; or a4, a1, a0 +; mv a2, zero +; addi a1, zero, 0x40 +; addi a3, zero, 1 +; slli a3, a3, 0x3f ; addi a0, zero, 1 -; slli a0, a0, 0x3f -; addi a1, zero, 1 -; slli a1, a1, 0x38 -; blez a2, 0x34 -; and t5, a0, a4 +; slli a0, a0, 0x38 +; blez a1, 0x34 +; and t5, a3, a4 ; beq zero, t5, 8 -; or a3, a3, a1 -; addi a2, a2, -1 -; srli a0, a0, 1 +; or a2, a2, a0 +; addi a1, a1, -1 +; srli a3, a3, 1 ; addi t5, zero, 8 -; rem t5, a2, t5 +; rem t5, a1, t5 ; bnez t5, 0xc -; srli a1, a1, 0xf +; srli a0, a0, 0xf ; j -0x28 -; slli a1, a1, 1 +; slli a0, a0, 1 ; j -0x30 -; srli a0, a3, 0x20 +; srli a0, a2, 0x20 ; ret function %bitrev_i64(i64) -> i64 { @@ -258,99 +258,99 @@ block0(v0: i64): ; VCode: ; block0: -; slli a2,a0,8 -; srli a4,a0,8 -; slli a1,a4,56 -; srli a3,a1,56 -; or a4,a2,a3 -; slli a1,a4,16 -; srli a2,a0,16 -; slli a4,a2,8 -; srli a2,a2,8 +; slli a1,a0,8 +; srli a2,a0,8 ; slli a2,a2,56 -; srli a5,a2,56 -; or a2,a4,a5 +; srli a2,a2,56 +; or a1,a1,a2 +; slli a2,a1,16 +; srli a4,a0,16 +; slli a1,a4,8 +; srli a3,a4,8 +; slli a3,a3,56 +; srli a3,a3,56 +; or a1,a1,a3 +; slli a1,a1,48 +; srli a1,a1,48 +; or a1,a2,a1 +; slli a1,a1,32 +; srli a2,a0,32 +; slli a0,a2,8 +; srli a3,a2,8 +; slli a3,a3,56 +; srli a3,a3,56 +; or a0,a0,a3 +; slli a0,a0,16 +; srli a3,a2,16 +; slli a2,a3,8 +; srli a3,a3,8 +; slli a3,a3,56 +; srli a3,a3,56 +; or a2,a2,a3 ; slli a2,a2,48 -; srli a4,a2,48 -; or a1,a1,a4 -; slli a2,a1,32 -; srli a5,a0,32 -; slli a0,a5,8 -; srli a3,a5,8 -; slli a4,a3,56 -; srli a1,a4,56 -; or a3,a0,a1 -; slli a4,a3,16 -; srli a0,a5,16 -; slli a3,a0,8 -; srli a5,a0,8 -; slli a0,a5,56 -; srli a5,a0,56 -; or a5,a3,a5 -; slli a0,a5,48 -; srli a3,a0,48 -; or a4,a4,a3 -; slli a0,a4,32 -; srli a3,a0,32 -; or a4,a2,a3 -; brev8 a0,a4##tmp=a3 tmp2=a1 step=a2 ty=i64 +; srli a2,a2,48 +; or a0,a0,a2 +; slli a0,a0,32 +; srli a0,a0,32 +; or a4,a1,a0 +; brev8 a0,a4##tmp=a3 tmp2=a2 step=a1 ty=i64 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 8 -; srli a4, a0, 8 -; slli a1, a4, 0x38 -; srli a3, a1, 0x38 -; or a4, a2, a3 -; slli a1, a4, 0x10 -; srli a2, a0, 0x10 -; slli a4, a2, 8 -; srli a2, a2, 8 +; slli a1, a0, 8 +; srli a2, a0, 8 ; slli a2, a2, 0x38 -; srli a5, a2, 0x38 -; or a2, a4, a5 +; srli a2, a2, 0x38 +; or a1, a1, a2 +; slli a2, a1, 0x10 +; srli a4, a0, 0x10 +; slli a1, a4, 8 +; srli a3, a4, 8 +; slli a3, a3, 0x38 +; srli a3, a3, 0x38 +; or a1, a1, a3 +; slli a1, a1, 0x30 +; srli a1, a1, 0x30 +; or a1, a2, a1 +; slli a1, a1, 0x20 +; srli a2, a0, 0x20 +; slli a0, a2, 8 +; srli a3, a2, 8 +; slli a3, a3, 0x38 +; srli a3, a3, 0x38 +; or a0, a0, a3 +; slli a0, a0, 0x10 +; srli a3, a2, 0x10 +; slli a2, a3, 8 +; srli a3, a3, 8 +; slli a3, a3, 0x38 +; srli a3, a3, 0x38 +; or a2, a2, a3 ; slli a2, a2, 0x30 -; srli a4, a2, 0x30 -; or a1, a1, a4 -; slli a2, a1, 0x20 -; srli a5, a0, 0x20 -; slli a0, a5, 8 -; srli a3, a5, 8 -; slli a4, a3, 0x38 -; srli a1, a4, 0x38 -; or a3, a0, a1 -; slli a4, a3, 0x10 -; srli a0, a5, 0x10 -; slli a3, a0, 8 -; srli a5, a0, 8 -; slli a0, a5, 0x38 -; srli a5, a0, 0x38 -; or a5, a3, a5 -; slli a0, a5, 0x30 -; srli a3, a0, 0x30 -; or a4, a4, a3 -; slli a0, a4, 0x20 -; srli a3, a0, 0x20 -; or a4, a2, a3 +; srli a2, a2, 0x30 +; or a0, a0, a2 +; slli a0, a0, 0x20 +; srli a0, a0, 0x20 +; or a4, a1, a0 ; mv a0, zero -; addi a2, zero, 0x40 +; addi a1, zero, 0x40 ; addi a3, zero, 1 ; slli a3, a3, 0x3f -; addi a1, zero, 1 -; slli a1, a1, 0x38 -; blez a2, 0x34 +; addi a2, zero, 1 +; slli a2, a2, 0x38 +; blez a1, 0x34 ; and t5, a3, a4 ; beq zero, t5, 8 -; or a0, a0, a1 -; addi a2, a2, -1 +; or a0, a0, a2 +; addi a1, a1, -1 ; srli a3, a3, 1 ; addi t5, zero, 8 -; rem t5, a2, t5 +; rem t5, a1, t5 ; bnez t5, 0xc -; srli a1, a1, 0xf +; srli a2, a2, 0xf ; j -0x28 -; slli a1, a1, 1 +; slli a2, a2, 1 ; j -0x30 ; ret @@ -362,191 +362,191 @@ block0(v0: i128): ; VCode: ; block0: -; slli a3,a1,8 -; srli a5,a1,8 -; slli a2,a5,56 -; srli a4,a2,56 -; or a5,a3,a4 -; slli a2,a5,16 -; srli a3,a1,16 -; slli a5,a3,8 -; srli a3,a3,8 +; slli a2,a1,8 +; srli a3,a1,8 ; slli a3,a3,56 ; srli a3,a3,56 -; or a3,a5,a3 +; or a2,a2,a3 +; slli a3,a2,16 +; srli a5,a1,16 +; slli a2,a5,8 +; srli a4,a5,8 +; slli a4,a4,56 +; srli a4,a4,56 +; or a2,a2,a4 +; slli a2,a2,48 +; srli a2,a2,48 +; or a2,a3,a2 +; slli a2,a2,32 +; srli a3,a1,32 +; slli a1,a3,8 +; srli a4,a3,8 +; slli a4,a4,56 +; srli a4,a4,56 +; or a1,a1,a4 +; slli a1,a1,16 +; srli a4,a3,16 +; slli a3,a4,8 +; srli a4,a4,8 +; slli a4,a4,56 +; srli a4,a4,56 +; or a3,a3,a4 ; slli a3,a3,48 -; srli a5,a3,48 -; or a2,a2,a5 -; slli a3,a2,32 +; srli a3,a3,48 +; or a1,a1,a3 +; slli a1,a1,32 ; srli a1,a1,32 -; slli a2,a1,8 -; srli a4,a1,8 -; slli a5,a4,56 -; srli a4,a5,56 -; or a4,a2,a4 -; slli a5,a4,16 -; srli a1,a1,16 -; slli a4,a1,8 -; srli a1,a1,8 +; or a5,a2,a1 +; brev8 a3,a5##tmp=a4 tmp2=a1 step=a2 ty=i64 +; slli a5,a0,8 +; srli a1,a0,8 ; slli a1,a1,56 ; srli a1,a1,56 -; or a1,a4,a1 -; slli a1,a1,48 -; srli a4,a1,48 -; or a5,a5,a4 -; slli a1,a5,32 -; srli a4,a1,32 -; or a5,a3,a4 -; brev8 a4,a5##tmp=a1 tmp2=a2 step=a3 ty=i64 -; slli a1,a0,8 -; srli a2,a0,8 -; slli a5,a2,56 -; srli a2,a5,56 -; or a2,a1,a2 -; slli a5,a2,16 -; srli a1,a0,16 -; slli a2,a1,8 -; srli a1,a1,8 -; slli a1,a1,56 -; srli a3,a1,56 -; or a1,a2,a3 -; slli a1,a1,48 -; srli a2,a1,48 -; or a5,a5,a2 -; slli a1,a5,32 -; srli a3,a0,32 -; slli a5,a3,8 -; srli a0,a3,8 -; slli a2,a0,56 -; srli a0,a2,56 -; or a0,a5,a0 -; slli a2,a0,16 -; srli a5,a3,16 -; slli a0,a5,8 -; srli a3,a5,8 -; slli a5,a3,56 -; srli a3,a5,56 -; or a3,a0,a3 -; slli a5,a3,48 -; srli a0,a5,48 -; or a2,a2,a0 -; slli a5,a2,32 -; srli a0,a5,32 -; or a3,a1,a0 -; brev8 a1,a3##tmp=a2 tmp2=a5 step=a0 ty=i64 -; mv a0,a4 +; or a1,a5,a1 +; slli a1,a1,16 +; srli a4,a0,16 +; slli a2,a4,8 +; srli a4,a4,8 +; slli a4,a4,56 +; srli a4,a4,56 +; or a2,a2,a4 +; slli a2,a2,48 +; srli a2,a2,48 +; or a1,a1,a2 +; slli a1,a1,32 +; srli a2,a0,32 +; slli a0,a2,8 +; srli a4,a2,8 +; slli a4,a4,56 +; srli a4,a4,56 +; or a0,a0,a4 +; slli a0,a0,16 +; srli a4,a2,16 +; slli a2,a4,8 +; srli a4,a4,8 +; slli a4,a4,56 +; srli a4,a4,56 +; or a2,a2,a4 +; slli a2,a2,48 +; srli a2,a2,48 +; or a4,a0,a2 +; slli a0,a4,32 +; srli a0,a0,32 +; or a5,a1,a0 +; brev8 a1,a5##tmp=a4 tmp2=a2 step=a0 ty=i64 +; mv a0,a3 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a1, 8 -; srli a5, a1, 8 -; slli a2, a5, 0x38 -; srli a4, a2, 0x38 -; or a5, a3, a4 -; slli a2, a5, 0x10 -; srli a3, a1, 0x10 -; slli a5, a3, 8 -; srli a3, a3, 8 +; slli a2, a1, 8 +; srli a3, a1, 8 ; slli a3, a3, 0x38 ; srli a3, a3, 0x38 -; or a3, a5, a3 +; or a2, a2, a3 +; slli a3, a2, 0x10 +; srli a5, a1, 0x10 +; slli a2, a5, 8 +; srli a4, a5, 8 +; slli a4, a4, 0x38 +; srli a4, a4, 0x38 +; or a2, a2, a4 +; slli a2, a2, 0x30 +; srli a2, a2, 0x30 +; or a2, a3, a2 +; slli a2, a2, 0x20 +; srli a3, a1, 0x20 +; slli a1, a3, 8 +; srli a4, a3, 8 +; slli a4, a4, 0x38 +; srli a4, a4, 0x38 +; or a1, a1, a4 +; slli a1, a1, 0x10 +; srli a4, a3, 0x10 +; slli a3, a4, 8 +; srli a4, a4, 8 +; slli a4, a4, 0x38 +; srli a4, a4, 0x38 +; or a3, a3, a4 ; slli a3, a3, 0x30 -; srli a5, a3, 0x30 -; or a2, a2, a5 -; slli a3, a2, 0x20 +; srli a3, a3, 0x30 +; or a1, a1, a3 +; slli a1, a1, 0x20 ; srli a1, a1, 0x20 -; slli a2, a1, 8 -; srli a4, a1, 8 -; slli a5, a4, 0x38 -; srli a4, a5, 0x38 -; or a4, a2, a4 -; slli a5, a4, 0x10 -; srli a1, a1, 0x10 -; slli a4, a1, 8 -; srli a1, a1, 8 -; slli a1, a1, 0x38 -; srli a1, a1, 0x38 -; or a1, a4, a1 -; slli a1, a1, 0x30 -; srli a4, a1, 0x30 -; or a5, a5, a4 -; slli a1, a5, 0x20 -; srli a4, a1, 0x20 -; or a5, a3, a4 -; mv a4, zero -; addi a3, zero, 0x40 +; or a5, a2, a1 +; mv a3, zero +; addi a2, zero, 0x40 +; addi a4, zero, 1 +; slli a4, a4, 0x3f ; addi a1, zero, 1 -; slli a1, a1, 0x3f -; addi a2, zero, 1 -; slli a2, a2, 0x38 -; blez a3, 0x34 -; and t5, a1, a5 +; slli a1, a1, 0x38 +; blez a2, 0x34 +; and t5, a4, a5 ; beq zero, t5, 8 -; or a4, a4, a2 -; addi a3, a3, -1 -; srli a1, a1, 1 +; or a3, a3, a1 +; addi a2, a2, -1 +; srli a4, a4, 1 ; addi t5, zero, 8 -; rem t5, a3, t5 +; rem t5, a2, t5 ; bnez t5, 0xc -; srli a2, a2, 0xf +; srli a1, a1, 0xf ; j -0x28 -; slli a2, a2, 1 +; slli a1, a1, 1 ; j -0x30 -; slli a1, a0, 8 -; srli a2, a0, 8 -; slli a5, a2, 0x38 -; srli a2, a5, 0x38 -; or a2, a1, a2 -; slli a5, a2, 0x10 -; srli a1, a0, 0x10 -; slli a2, a1, 8 -; srli a1, a1, 8 +; slli a5, a0, 8 +; srli a1, a0, 8 ; slli a1, a1, 0x38 -; srli a3, a1, 0x38 -; or a1, a2, a3 -; slli a1, a1, 0x30 -; srli a2, a1, 0x30 -; or a5, a5, a2 -; slli a1, a5, 0x20 -; srli a3, a0, 0x20 -; slli a5, a3, 8 -; srli a0, a3, 8 -; slli a2, a0, 0x38 -; srli a0, a2, 0x38 -; or a0, a5, a0 -; slli a2, a0, 0x10 -; srli a5, a3, 0x10 -; slli a0, a5, 8 -; srli a3, a5, 8 -; slli a5, a3, 0x38 -; srli a3, a5, 0x38 -; or a3, a0, a3 -; slli a5, a3, 0x30 -; srli a0, a5, 0x30 -; or a2, a2, a0 -; slli a5, a2, 0x20 -; srli a0, a5, 0x20 -; or a3, a1, a0 +; srli a1, a1, 0x38 +; or a1, a5, a1 +; slli a1, a1, 0x10 +; srli a4, a0, 0x10 +; slli a2, a4, 8 +; srli a4, a4, 8 +; slli a4, a4, 0x38 +; srli a4, a4, 0x38 +; or a2, a2, a4 +; slli a2, a2, 0x30 +; srli a2, a2, 0x30 +; or a1, a1, a2 +; slli a1, a1, 0x20 +; srli a2, a0, 0x20 +; slli a0, a2, 8 +; srli a4, a2, 8 +; slli a4, a4, 0x38 +; srli a4, a4, 0x38 +; or a0, a0, a4 +; slli a0, a0, 0x10 +; srli a4, a2, 0x10 +; slli a2, a4, 8 +; srli a4, a4, 8 +; slli a4, a4, 0x38 +; srli a4, a4, 0x38 +; or a2, a2, a4 +; slli a2, a2, 0x30 +; srli a2, a2, 0x30 +; or a4, a0, a2 +; slli a0, a4, 0x20 +; srli a0, a0, 0x20 +; or a5, a1, a0 ; mv a1, zero ; addi a0, zero, 0x40 +; addi a4, zero, 1 +; slli a4, a4, 0x3f ; addi a2, zero, 1 -; slli a2, a2, 0x3f -; addi a5, zero, 1 -; slli a5, a5, 0x38 +; slli a2, a2, 0x38 ; blez a0, 0x34 -; and t5, a2, a3 +; and t5, a4, a5 ; beq zero, t5, 8 -; or a1, a1, a5 +; or a1, a1, a2 ; addi a0, a0, -1 -; srli a2, a2, 1 +; srli a4, a4, 1 ; addi t5, zero, 8 ; rem t5, a0, t5 ; bnez t5, 0xc -; srli a5, a5, 0xf +; srli a2, a2, 0xf ; j -0x28 -; slli a5, a5, 1 +; slli a2, a2, 1 ; j -0x30 -; mv a0, a4 +; mv a0, a3 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/br_table.clif b/cranelift/filetests/filetests/isa/riscv64/br_table.clif index 38bf96b51498..2979fb615353 100644 --- a/cranelift/filetests/filetests/isa/riscv64/br_table.clif +++ b/cranelift/filetests/filetests/isa/riscv64/br_table.clif @@ -29,61 +29,61 @@ block5(v5: i32): ; VCode: ; block0: -; br_table a0,[MachLabel(6),MachLabel(5),MachLabel(1),MachLabel(2),MachLabel(3)]##tmp1=a2,tmp2=a3 +; br_table a0,[MachLabel(6),MachLabel(5),MachLabel(1),MachLabel(2),MachLabel(3)]##tmp1=a2,tmp2=a1 ; block1: ; j label4 ; block2: ; j label4 ; block3: -; li a5,3 +; li a1,3 ; j label7 ; block4: -; li a5,2 +; li a1,2 ; j label7 ; block5: -; li a5,1 +; li a1,1 ; j label7 ; block6: -; li a5,4 +; li a1,4 ; j label7 ; block7: -; addw a0,a0,a5 +; addw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; slli t6, a0, 0x20 ; srli t6, t6, 0x20 -; addi a3, zero, 4 -; bltu t6, a3, 0xc -; auipc a3, 0 -; jalr zero, a3, 0x54 +; addi a1, zero, 4 +; bltu t6, a1, 0xc +; auipc a1, 0 +; jalr zero, a1, 0x54 ; auipc a2, 0 -; slli a3, t6, 3 -; add a2, a2, a3 +; slli a1, t6, 3 +; add a2, a2, a1 ; jalr zero, a2, 0x10 -; auipc a3, 0 -; jalr zero, a3, 0x34 -; auipc a3, 0 -; jalr zero, a3, 0x24 -; auipc a3, 0 -; jalr zero, a3, 0x1c -; auipc a3, 0 -; jalr zero, a3, 0xc +; auipc a1, 0 +; jalr zero, a1, 0x34 +; auipc a1, 0 +; jalr zero, a1, 0x24 +; auipc a1, 0 +; jalr zero, a1, 0x1c +; auipc a1, 0 +; jalr zero, a1, 0xc ; block1: ; offset 0x48 ; j 0xc ; block2: ; offset 0x4c -; addi a5, zero, 3 +; addi a1, zero, 3 ; j 0x18 ; block3: ; offset 0x54 -; addi a5, zero, 2 +; addi a1, zero, 2 ; j 0x10 ; block4: ; offset 0x5c -; addi a5, zero, 1 +; addi a1, zero, 1 ; j 8 ; block5: ; offset 0x64 -; addi a5, zero, 4 +; addi a1, zero, 4 ; block6: ; offset 0x68 -; addw a0, a0, a5 +; addw a0, a0, a1 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/brif.clif b/cranelift/filetests/filetests/isa/riscv64/brif.clif index f998b0bf5da8..622e02aa5aef 100644 --- a/cranelift/filetests/filetests/isa/riscv64/brif.clif +++ b/cranelift/filetests/filetests/isa/riscv64/brif.clif @@ -18,8 +18,8 @@ block2: ; VCode: ; block0: -; andi a4,a0,255 -; bne a4,zero,taken(label2),not_taken(label1) +; andi a0,a0,255 +; bne a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -29,8 +29,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a0, 0xff -; bnez a4, 0xc +; andi a0, a0, 0xff +; bnez a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret @@ -53,8 +53,8 @@ block2: ; VCode: ; block0: -; slli a4,a0,48 -; srai a0,a4,48 +; slli a0,a0,48 +; srai a0,a0,48 ; bne a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 @@ -65,8 +65,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; slli a4, a0, 0x30 -; srai a0, a4, 0x30 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 ; bnez a0, 0xc ; block1: ; offset 0xc ; mv a0, zero @@ -90,8 +90,8 @@ block2: ; VCode: ; block0: -; sext.w a4,a0 -; bne a4,zero,taken(label2),not_taken(label1) +; sext.w a0,a0 +; bne a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -101,8 +101,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a4, a0 -; bnez a4, 0xc +; sext.w a0, a0 +; bnez a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret @@ -158,8 +158,8 @@ block2: ; VCode: ; block0: -; or a5,a0,a1 -; bne a5,zero,taken(label2),not_taken(label1) +; or a0,a0,a1 +; bne a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -169,8 +169,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; or a5, a0, a1 -; bnez a5, 0xc +; or a0, a0, a1 +; bnez a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret @@ -194,9 +194,9 @@ block2: ; VCode: ; block0: -; andi a5,a0,255 +; andi a0,a0,255 ; andi a1,a1,255 -; beq a5,a1,taken(label2),not_taken(label1) +; beq a0,a1,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -206,9 +206,9 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; andi a5, a0, 0xff +; andi a0, a0, 0xff ; andi a1, a1, 0xff -; beq a5, a1, 0xc +; beq a0, a1, 0xc ; block1: ; offset 0xc ; mv a0, zero ; ret @@ -232,13 +232,11 @@ block2: ; VCode: ; block0: -; mv a3,a1 -; slli a5,a0,48 -; srai a1,a5,48 -; mv a5,a3 -; slli a3,a5,48 -; srai a5,a3,48 -; bne a1,a5,taken(label2),not_taken(label1) +; slli a0,a0,48 +; srai a0,a0,48 +; slli a1,a1,48 +; srai a1,a1,48 +; bne a0,a1,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -248,17 +246,15 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; mv a3, a1 -; slli a5, a0, 0x30 -; srai a1, a5, 0x30 -; mv a5, a3 -; slli a3, a5, 0x30 -; srai a5, a3, 0x30 -; bne a1, a5, 0xc -; block1: ; offset 0x1c +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 +; slli a1, a1, 0x30 +; srai a1, a1, 0x30 +; bne a0, a1, 0xc +; block1: ; offset 0x14 ; mv a0, zero ; ret -; block2: ; offset 0x24 +; block2: ; offset 0x1c ; addi a0, zero, 1 ; ret @@ -278,9 +274,9 @@ block2: ; VCode: ; block0: -; sext.w a5,a0 +; sext.w a0,a0 ; sext.w a1,a1 -; blt a5,a1,taken(label2),not_taken(label1) +; blt a0,a1,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -290,9 +286,9 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a5, a0 +; sext.w a0, a0 ; sext.w a1, a1 -; blt a5, a1, 0xc +; blt a0, a1, 0xc ; block1: ; offset 0xc ; mv a0, zero ; ret @@ -349,66 +345,33 @@ block2: } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s1,8(sp) ; block0: -; slt s1,a3,a1 -; sltu a4,a2,a0 -; xor a5,a3,a1 -; select a1,a4,s1##condition=(a5 eq zero) -; bne a1,zero,taken(label2),not_taken(label1) +; slt a4,a3,a1 +; sltu a2,a2,a0 +; xor a1,a3,a1 +; select a3,a2,a4##condition=(a1 eq zero) +; bne a3,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 -; ld s1,8(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 ; ret ; block2: ; li a0,1 -; ld s1,8(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s1, 8(sp) -; block1: ; offset 0x18 -; slt s1, a3, a1 -; sltu a4, a2, a0 -; xor a5, a3, a1 -; mv a1, a4 -; beqz a5, 8 -; mv a1, s1 -; bnez a1, 0x20 -; block2: ; offset 0x34 +; slt a4, a3, a1 +; sltu a2, a2, a0 +; xor a1, a3, a1 +; mv a3, a2 +; beqz a1, 8 +; mv a3, a4 +; bnez a3, 0xc +; block1: ; offset 0x1c ; mv a0, zero -; ld s1, 8(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret -; block3: ; offset 0x50 +; block2: ; offset 0x24 ; addi a0, zero, 1 -; ld s1, 8(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret function %brif_fcmp_f32(f32, f32) -> i8 { @@ -427,8 +390,8 @@ block2: ; VCode: ; block0: -; flt.s a5,fa0,fa1 -; bne a5,zero,taken(label2),not_taken(label1) +; flt.s a0,fa0,fa1 +; bne a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -438,8 +401,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; flt.s a5, fa0, fa1 -; bnez a5, 0xc +; flt.s a0, fa0, fa1 +; bnez a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret @@ -463,8 +426,8 @@ block2: ; VCode: ; block0: -; flt.d a5,fa0,fa1 -; beq a5,zero,taken(label2),not_taken(label1) +; flt.d a0,fa0,fa1 +; beq a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -474,8 +437,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; flt.d a5, fa0, fa1 -; beqz a5, 0xc +; flt.d a0, fa0, fa1 +; beqz a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/bswap-zbb.clif b/cranelift/filetests/filetests/isa/riscv64/bswap-zbb.clif index 1a1e7497a052..eff2b3c09111 100644 --- a/cranelift/filetests/filetests/isa/riscv64/bswap-zbb.clif +++ b/cranelift/filetests/filetests/isa/riscv64/bswap-zbb.clif @@ -10,14 +10,14 @@ block0(v0: i16): ; VCode: ; block0: -; rev8 a2,a0 -; srli a0,a2,48 +; rev8 a0,a0 +; srli a0,a0,48 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x13, 0x56, 0x85, 0x6b -; srli a0, a2, 0x30 +; .byte 0x13, 0x55, 0x85, 0x6b +; srli a0, a0, 0x30 ; ret function %bswap_i32(i32) -> i32 { @@ -28,14 +28,14 @@ block0(v0: i32): ; VCode: ; block0: -; rev8 a2,a0 -; srli a0,a2,32 +; rev8 a0,a0 +; srli a0,a0,32 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x13, 0x56, 0x85, 0x6b -; srli a0, a2, 0x20 +; .byte 0x13, 0x55, 0x85, 0x6b +; srli a0, a0, 0x20 ; ret function %bswap_i64(i64) -> i64 { @@ -62,17 +62,17 @@ block0(v0: i128): ; VCode: ; block0: -; mv a5,a0 +; mv a2,a0 ; rev8 a0,a1 -; mv a1,a5 +; mv a1,a2 ; rev8 a1,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, a0 +; mv a2, a0 ; .byte 0x13, 0xd5, 0x85, 0x6b -; mv a1, a5 +; mv a1, a2 ; .byte 0x93, 0xd5, 0x85, 0x6b ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/bswap.clif b/cranelift/filetests/filetests/isa/riscv64/bswap.clif index 4007284550c5..76d614c786e9 100644 --- a/cranelift/filetests/filetests/isa/riscv64/bswap.clif +++ b/cranelift/filetests/filetests/isa/riscv64/bswap.clif @@ -10,20 +10,20 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,8 -; srli a4,a0,8 -; slli a0,a4,56 -; srli a3,a0,56 -; or a0,a2,a3 +; slli a1,a0,8 +; srli a0,a0,8 +; slli a0,a0,56 +; srli a0,a0,56 +; or a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 8 -; srli a4, a0, 8 -; slli a0, a4, 0x38 -; srli a3, a0, 0x38 -; or a0, a2, a3 +; slli a1, a0, 8 +; srli a0, a0, 8 +; slli a0, a0, 0x38 +; srli a0, a0, 0x38 +; or a0, a1, a0 ; ret function %bswap_i32(i32) -> i32 { @@ -34,40 +34,40 @@ block0(v0: i32): ; VCode: ; block0: -; slli a2,a0,8 -; srli a4,a0,8 -; slli a1,a4,56 -; srli a3,a1,56 -; or a4,a2,a3 -; slli a1,a4,16 -; srli a2,a0,16 -; slli a4,a2,8 -; srli a0,a2,8 -; slli a2,a0,56 -; srli a5,a2,56 -; or a0,a4,a5 -; slli a2,a0,48 -; srli a4,a2,48 -; or a0,a1,a4 +; slli a1,a0,8 +; srli a2,a0,8 +; slli a2,a2,56 +; srli a2,a2,56 +; or a1,a1,a2 +; slli a2,a1,16 +; srli a4,a0,16 +; slli a0,a4,8 +; srli a1,a4,8 +; slli a1,a1,56 +; srli a1,a1,56 +; or a0,a0,a1 +; slli a0,a0,48 +; srli a0,a0,48 +; or a0,a2,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 8 -; srli a4, a0, 8 -; slli a1, a4, 0x38 -; srli a3, a1, 0x38 -; or a4, a2, a3 -; slli a1, a4, 0x10 -; srli a2, a0, 0x10 -; slli a4, a2, 8 -; srli a0, a2, 8 -; slli a2, a0, 0x38 -; srli a5, a2, 0x38 -; or a0, a4, a5 -; slli a2, a0, 0x30 -; srli a4, a2, 0x30 -; or a0, a1, a4 +; slli a1, a0, 8 +; srli a2, a0, 8 +; slli a2, a2, 0x38 +; srli a2, a2, 0x38 +; or a1, a1, a2 +; slli a2, a1, 0x10 +; srli a4, a0, 0x10 +; slli a0, a4, 8 +; srli a1, a4, 8 +; slli a1, a1, 0x38 +; srli a1, a1, 0x38 +; or a0, a0, a1 +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 +; or a0, a2, a0 ; ret function %bswap_i64(i64) -> i64 { @@ -78,80 +78,80 @@ block0(v0: i64): ; VCode: ; block0: -; slli a2,a0,8 -; srli a4,a0,8 -; slli a1,a4,56 -; srli a3,a1,56 -; or a4,a2,a3 -; slli a1,a4,16 -; srli a2,a0,16 -; slli a4,a2,8 -; srli a2,a2,8 +; slli a1,a0,8 +; srli a2,a0,8 ; slli a2,a2,56 -; srli a5,a2,56 -; or a2,a4,a5 +; srli a2,a2,56 +; or a1,a1,a2 +; slli a2,a1,16 +; srli a4,a0,16 +; slli a1,a4,8 +; srli a3,a4,8 +; slli a3,a3,56 +; srli a3,a3,56 +; or a1,a1,a3 +; slli a1,a1,48 +; srli a1,a1,48 +; or a1,a2,a1 +; slli a1,a1,32 +; srli a2,a0,32 +; slli a0,a2,8 +; srli a3,a2,8 +; slli a3,a3,56 +; srli a3,a3,56 +; or a0,a0,a3 +; slli a0,a0,16 +; srli a3,a2,16 +; slli a2,a3,8 +; srli a3,a3,8 +; slli a3,a3,56 +; srli a3,a3,56 +; or a2,a2,a3 ; slli a2,a2,48 -; srli a4,a2,48 -; or a1,a1,a4 -; slli a2,a1,32 -; srli a5,a0,32 -; slli a0,a5,8 -; srli a3,a5,8 -; slli a4,a3,56 -; srli a1,a4,56 -; or a3,a0,a1 -; slli a4,a3,16 -; srli a0,a5,16 -; slli a3,a0,8 -; srli a5,a0,8 -; slli a0,a5,56 -; srli a5,a0,56 -; or a5,a3,a5 -; slli a0,a5,48 -; srli a3,a0,48 -; or a4,a4,a3 -; slli a0,a4,32 -; srli a3,a0,32 -; or a0,a2,a3 +; srli a2,a2,48 +; or a0,a0,a2 +; slli a0,a0,32 +; srli a0,a0,32 +; or a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 8 -; srli a4, a0, 8 -; slli a1, a4, 0x38 -; srli a3, a1, 0x38 -; or a4, a2, a3 -; slli a1, a4, 0x10 -; srli a2, a0, 0x10 -; slli a4, a2, 8 -; srli a2, a2, 8 +; slli a1, a0, 8 +; srli a2, a0, 8 ; slli a2, a2, 0x38 -; srli a5, a2, 0x38 -; or a2, a4, a5 +; srli a2, a2, 0x38 +; or a1, a1, a2 +; slli a2, a1, 0x10 +; srli a4, a0, 0x10 +; slli a1, a4, 8 +; srli a3, a4, 8 +; slli a3, a3, 0x38 +; srli a3, a3, 0x38 +; or a1, a1, a3 +; slli a1, a1, 0x30 +; srli a1, a1, 0x30 +; or a1, a2, a1 +; slli a1, a1, 0x20 +; srli a2, a0, 0x20 +; slli a0, a2, 8 +; srli a3, a2, 8 +; slli a3, a3, 0x38 +; srli a3, a3, 0x38 +; or a0, a0, a3 +; slli a0, a0, 0x10 +; srli a3, a2, 0x10 +; slli a2, a3, 8 +; srli a3, a3, 8 +; slli a3, a3, 0x38 +; srli a3, a3, 0x38 +; or a2, a2, a3 ; slli a2, a2, 0x30 -; srli a4, a2, 0x30 -; or a1, a1, a4 -; slli a2, a1, 0x20 -; srli a5, a0, 0x20 -; slli a0, a5, 8 -; srli a3, a5, 8 -; slli a4, a3, 0x38 -; srli a1, a4, 0x38 -; or a3, a0, a1 -; slli a4, a3, 0x10 -; srli a0, a5, 0x10 -; slli a3, a0, 8 -; srli a5, a0, 8 -; slli a0, a5, 0x38 -; srli a5, a0, 0x38 -; or a5, a3, a5 -; slli a0, a5, 0x30 -; srli a3, a0, 0x30 -; or a4, a4, a3 -; slli a0, a4, 0x20 -; srli a3, a0, 0x20 -; or a0, a2, a3 +; srli a2, a2, 0x30 +; or a0, a0, a2 +; slli a0, a0, 0x20 +; srli a0, a0, 0x20 +; or a0, a1, a0 ; ret function %bswap_i128(i128) -> i128 { @@ -162,151 +162,151 @@ block0(v0: i128): ; VCode: ; block0: -; slli a3,a1,8 -; srli a5,a1,8 -; slli a2,a5,56 -; srli a4,a2,56 -; or a5,a3,a4 -; slli a2,a5,16 -; srli a3,a1,16 -; slli a5,a3,8 -; srli a3,a3,8 +; slli a2,a1,8 +; srli a3,a1,8 ; slli a3,a3,56 ; srli a3,a3,56 -; or a3,a5,a3 +; or a2,a2,a3 +; slli a3,a2,16 +; srli a5,a1,16 +; slli a2,a5,8 +; srli a4,a5,8 +; slli a4,a4,56 +; srli a4,a4,56 +; or a2,a2,a4 +; slli a2,a2,48 +; srli a2,a2,48 +; or a2,a3,a2 +; slli a2,a2,32 +; srli a3,a1,32 +; slli a1,a3,8 +; srli a4,a3,8 +; slli a4,a4,56 +; srli a4,a4,56 +; or a1,a1,a4 +; slli a1,a1,16 +; srli a4,a3,16 +; slli a3,a4,8 +; srli a4,a4,8 +; slli a4,a4,56 +; srli a4,a4,56 +; or a3,a3,a4 ; slli a3,a3,48 -; srli a5,a3,48 -; or a2,a2,a5 -; slli a3,a2,32 +; srli a3,a3,48 +; or a1,a1,a3 +; slli a1,a1,32 ; srli a1,a1,32 -; slli a2,a1,8 -; srli a4,a1,8 -; slli a5,a4,56 -; srli a4,a5,56 -; or a4,a2,a4 -; slli a5,a4,16 -; srli a1,a1,16 -; slli a4,a1,8 -; srli a1,a1,8 +; or a5,a2,a1 +; slli a2,a0,8 +; srli a3,a0,8 +; slli a4,a3,56 +; srli a1,a4,56 +; or a1,a2,a1 +; slli a2,a1,16 +; srli a4,a0,16 +; slli a3,a4,8 +; srli a1,a4,8 ; slli a1,a1,56 ; srli a1,a1,56 -; or a1,a4,a1 +; or a1,a3,a1 ; slli a1,a1,48 -; srli a4,a1,48 -; or a5,a5,a4 -; slli a1,a5,32 -; srli a4,a1,32 -; or a5,a3,a4 -; slli a1,a0,8 -; srli a3,a0,8 -; slli a2,a3,56 -; srli a2,a2,56 -; or a3,a1,a2 -; slli a1,a3,16 -; srli a2,a0,16 -; slli a3,a2,8 -; srli a2,a2,8 -; slli a2,a2,56 -; srli a4,a2,56 -; or a2,a3,a4 -; slli a2,a2,48 -; srli a3,a2,48 -; or a1,a1,a3 -; slli a1,a1,32 -; srli a4,a0,32 -; slli a0,a4,8 -; srli a2,a4,8 -; slli a3,a2,56 -; srli a2,a3,56 -; or a2,a0,a2 -; slli a3,a2,16 -; srli a0,a4,16 -; slli a2,a0,8 -; srli a4,a0,8 -; slli a0,a4,56 -; srli a4,a0,56 -; or a4,a2,a4 -; slli a0,a4,48 -; srli a2,a0,48 -; or a3,a3,a2 -; slli a0,a3,32 -; srli a2,a0,32 -; or a1,a1,a2 +; srli a1,a1,48 +; or a1,a2,a1 +; slli a2,a1,32 +; srli a3,a0,32 +; slli a0,a3,8 +; srli a1,a3,8 +; slli a1,a1,56 +; srli a1,a1,56 +; or a0,a0,a1 +; slli a0,a0,16 +; srli a4,a3,16 +; slli a3,a4,8 +; srli a1,a4,8 +; slli a1,a1,56 +; srli a1,a1,56 +; or a1,a3,a1 +; slli a1,a1,48 +; srli a1,a1,48 +; or a0,a0,a1 +; slli a1,a0,32 +; srli a3,a1,32 +; or a1,a2,a3 ; mv a0,a5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a1, 8 -; srli a5, a1, 8 -; slli a2, a5, 0x38 -; srli a4, a2, 0x38 -; or a5, a3, a4 -; slli a2, a5, 0x10 -; srli a3, a1, 0x10 -; slli a5, a3, 8 -; srli a3, a3, 8 +; slli a2, a1, 8 +; srli a3, a1, 8 ; slli a3, a3, 0x38 ; srli a3, a3, 0x38 -; or a3, a5, a3 +; or a2, a2, a3 +; slli a3, a2, 0x10 +; srli a5, a1, 0x10 +; slli a2, a5, 8 +; srli a4, a5, 8 +; slli a4, a4, 0x38 +; srli a4, a4, 0x38 +; or a2, a2, a4 +; slli a2, a2, 0x30 +; srli a2, a2, 0x30 +; or a2, a3, a2 +; slli a2, a2, 0x20 +; srli a3, a1, 0x20 +; slli a1, a3, 8 +; srli a4, a3, 8 +; slli a4, a4, 0x38 +; srli a4, a4, 0x38 +; or a1, a1, a4 +; slli a1, a1, 0x10 +; srli a4, a3, 0x10 +; slli a3, a4, 8 +; srli a4, a4, 8 +; slli a4, a4, 0x38 +; srli a4, a4, 0x38 +; or a3, a3, a4 ; slli a3, a3, 0x30 -; srli a5, a3, 0x30 -; or a2, a2, a5 -; slli a3, a2, 0x20 +; srli a3, a3, 0x30 +; or a1, a1, a3 +; slli a1, a1, 0x20 ; srli a1, a1, 0x20 -; slli a2, a1, 8 -; srli a4, a1, 8 -; slli a5, a4, 0x38 -; srli a4, a5, 0x38 -; or a4, a2, a4 -; slli a5, a4, 0x10 -; srli a1, a1, 0x10 -; slli a4, a1, 8 -; srli a1, a1, 8 +; or a5, a2, a1 +; slli a2, a0, 8 +; srli a3, a0, 8 +; slli a4, a3, 0x38 +; srli a1, a4, 0x38 +; or a1, a2, a1 +; slli a2, a1, 0x10 +; srli a4, a0, 0x10 +; slli a3, a4, 8 +; srli a1, a4, 8 ; slli a1, a1, 0x38 ; srli a1, a1, 0x38 -; or a1, a4, a1 +; or a1, a3, a1 ; slli a1, a1, 0x30 -; srli a4, a1, 0x30 -; or a5, a5, a4 -; slli a1, a5, 0x20 -; srli a4, a1, 0x20 -; or a5, a3, a4 -; slli a1, a0, 8 -; srli a3, a0, 8 -; slli a2, a3, 0x38 -; srli a2, a2, 0x38 -; or a3, a1, a2 -; slli a1, a3, 0x10 -; srli a2, a0, 0x10 -; slli a3, a2, 8 -; srli a2, a2, 8 -; slli a2, a2, 0x38 -; srli a4, a2, 0x38 -; or a2, a3, a4 -; slli a2, a2, 0x30 -; srli a3, a2, 0x30 -; or a1, a1, a3 -; slli a1, a1, 0x20 -; srli a4, a0, 0x20 -; slli a0, a4, 8 -; srli a2, a4, 8 -; slli a3, a2, 0x38 -; srli a2, a3, 0x38 -; or a2, a0, a2 -; slli a3, a2, 0x10 -; srli a0, a4, 0x10 -; slli a2, a0, 8 -; srli a4, a0, 8 -; slli a0, a4, 0x38 -; srli a4, a0, 0x38 -; or a4, a2, a4 -; slli a0, a4, 0x30 -; srli a2, a0, 0x30 -; or a3, a3, a2 -; slli a0, a3, 0x20 -; srli a2, a0, 0x20 -; or a1, a1, a2 +; srli a1, a1, 0x30 +; or a1, a2, a1 +; slli a2, a1, 0x20 +; srli a3, a0, 0x20 +; slli a0, a3, 8 +; srli a1, a3, 8 +; slli a1, a1, 0x38 +; srli a1, a1, 0x38 +; or a0, a0, a1 +; slli a0, a0, 0x10 +; srli a4, a3, 0x10 +; slli a3, a4, 8 +; srli a1, a4, 8 +; slli a1, a1, 0x38 +; srli a1, a1, 0x38 +; or a1, a3, a1 +; slli a1, a1, 0x30 +; srli a1, a1, 0x30 +; or a0, a0, a1 +; slli a1, a0, 0x20 +; srli a3, a1, 0x20 +; or a1, a2, a3 ; mv a0, a5 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/call.clif b/cranelift/filetests/filetests/isa/riscv64/call.clif index 2acf93320b92..7e2f7403f0d8 100644 --- a/cranelift/filetests/filetests/isa/riscv64/call.clif +++ b/cranelift/filetests/filetests/isa/riscv64/call.clif @@ -17,8 +17,8 @@ block0(v0: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; load_ext_name_far a3,%g+0 -; callind a3 +; load_ext_name_far a1,%g+0 +; callind a1 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -31,12 +31,12 @@ block0(v0: i64): ; sd s0, 0(sp) ; mv s0, sp ; block1: ; offset 0x10 -; auipc a3, 0 -; ld a3, 0xc(a3) +; auipc a1, 0 +; ld a1, 0xc(a1) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; jalr a3 +; jalr a1 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -57,8 +57,8 @@ block0(v0: i32): ; mv fp,sp ; block0: ; slli a0,a0,32; srli a0,a0,32 -; load_ext_name_far a5,%g+0 -; callind a5 +; load_ext_name_far a1,%g+0 +; callind a1 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -73,12 +73,12 @@ block0(v0: i32): ; block1: ; offset 0x10 ; slli a0, a0, 0x20 ; srli a0, a0, 0x20 -; auipc a5, 0 -; ld a5, 0xc(a5) +; auipc a1, 0 +; ld a1, 0xc(a1) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; jalr a5 +; jalr a1 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -115,8 +115,8 @@ block0(v0: i32): ; mv fp,sp ; block0: ; slli a0,a0,32; srai a0,a0,32 -; load_ext_name_far a5,%g+0 -; callind a5 +; load_ext_name_far a1,%g+0 +; callind a1 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -131,12 +131,12 @@ block0(v0: i32): ; block1: ; offset 0x10 ; slli a0, a0, 0x20 ; srai a0, a0, 0x20 -; auipc a5, 0 -; ld a5, 0xc(a5) +; auipc a1, 0 +; ld a1, 0xc(a1) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; jalr a5 +; jalr a1 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -172,13 +172,12 @@ block0(v0: i8): ; sd ra,8(sp) ; sd fp,0(sp) ; mv fp,sp -; addi sp,sp,-32 -; sd s1,24(sp) +; addi sp,sp,-16 ; block0: ; li a7,42 -; slli a4,a0,56; srai a4,a4,56 -; sd a4,0(sp) -; load_ext_name_far s1,%g+0 +; slli a0,a0,56; srai a0,a0,56 +; sd a0,0(sp) +; load_ext_name_far t2,%g+0 ; mv a0,a7 ; mv a1,a7 ; mv a2,a7 @@ -186,9 +185,8 @@ block0(v0: i8): ; mv a4,a7 ; mv a5,a7 ; mv a6,a7 -; callind s1 -; ld s1,24(sp) -; addi sp,sp,32 +; callind t2 +; addi sp,sp,16 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -200,15 +198,14 @@ block0(v0: i8): ; sd ra, 8(sp) ; sd s0, 0(sp) ; mv s0, sp -; addi sp, sp, -0x20 -; sd s1, 0x18(sp) -; block1: ; offset 0x18 +; addi sp, sp, -0x10 +; block1: ; offset 0x14 ; addi a7, zero, 0x2a -; slli a4, a0, 0x38 -; srai a4, a4, 0x38 -; sd a4, 0(sp) -; auipc s1, 0 -; ld s1, 0xc(s1) +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 +; sd a0, 0(sp) +; auipc t2, 0 +; ld t2, 0xc(t2) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g 0 ; .byte 0x00, 0x00, 0x00, 0x00 @@ -219,9 +216,8 @@ block0(v0: i8): ; mv a4, a7 ; mv a5, a7 ; mv a6, a7 -; jalr s1 -; ld s1, 0x18(sp) -; addi sp, sp, 0x20 +; jalr t2 +; addi sp, sp, 0x10 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -235,7 +231,7 @@ block0(v0: i8): ; VCode: ; block0: -; mv a5,a1 +; mv a2,a1 ; li a1,42 ; sw a1,0(a0) ; sw a1,8(a0) @@ -243,15 +239,14 @@ block0(v0: i8): ; sw a1,24(a0) ; sw a1,32(a0) ; sw a1,40(a0) -; mv a2,a5 -; slli a4,a2,56; srai a4,a4,56 +; slli a3,a2,56; srai a3,a3,56 ; sd a2,48(a0) ; mv a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, a1 +; mv a2, a1 ; addi a1, zero, 0x2a ; sw a1, 0(a0) ; sw a1, 8(a0) @@ -259,9 +254,8 @@ block0(v0: i8): ; sw a1, 0x18(a0) ; sw a1, 0x20(a0) ; sw a1, 0x28(a0) -; mv a2, a5 -; slli a4, a2, 0x38 -; srai a4, a4, 0x38 +; slli a3, a2, 0x38 +; srai a3, a3, 0x38 ; sd a2, 0x30(a0) ; mv a0, a1 ; ret @@ -290,32 +284,32 @@ block0: ; sd fp,0(sp) ; mv fp,sp ; addi sp,sp,-32 -; fsd fs0,24(sp) -; fsd fs2,16(sp) +; fsd fs2,24(sp) +; fsd fs3,16(sp) ; fsd fs4,8(sp) ; block0: -; load_ext_name_far a3,%g0+0 -; callind a3 -; fmv.d fs0,fa0 -; load_ext_name_far a3,%g1+0 -; callind a3 +; load_ext_name_far a0,%g0+0 +; callind a0 ; fmv.d fs2,fa0 -; load_ext_name_far a3,%g1+0 -; callind a3 +; load_ext_name_far a0,%g1+0 +; callind a0 +; fmv.d fs3,fa0 +; load_ext_name_far a0,%g1+0 +; callind a0 ; fmv.d fs4,fa0 -; load_ext_name_far a3,%g2+0 -; callind a3 -; load_ext_name_far a4,%g3+0 -; fmv.d fa0,fs0 -; callind a4 -; load_ext_name_far a5,%g4+0 +; load_ext_name_far a0,%g2+0 +; callind a0 +; load_ext_name_far a0,%g3+0 ; fmv.d fa0,fs2 -; callind a5 -; load_ext_name_far a0,%g4+0 -; fmv.d fa0,fs4 ; callind a0 -; fld fs0,24(sp) -; fld fs2,16(sp) +; load_ext_name_far a1,%g4+0 +; fmv.d fa0,fs3 +; callind a1 +; load_ext_name_far a2,%g4+0 +; fmv.d fa0,fs4 +; callind a2 +; fld fs2,24(sp) +; fld fs3,16(sp) ; fld fs4,8(sp) ; addi sp,sp,32 ; ld ra,8(sp) @@ -330,60 +324,60 @@ block0: ; sd s0, 0(sp) ; mv s0, sp ; addi sp, sp, -0x20 -; fsd fs0, 0x18(sp) -; fsd fs2, 0x10(sp) +; fsd fs2, 0x18(sp) +; fsd fs3, 0x10(sp) ; fsd fs4, 8(sp) ; block1: ; offset 0x20 -; auipc a3, 0 -; ld a3, 0xc(a3) +; auipc a0, 0 +; ld a0, 0xc(a0) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g0 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; jalr a3 -; fmv.d fs0, fa0 -; auipc a3, 0 -; ld a3, 0xc(a3) +; jalr a0 +; fmv.d fs2, fa0 +; auipc a0, 0 +; ld a0, 0xc(a0) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g1 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; jalr a3 -; fmv.d fs2, fa0 -; auipc a3, 0 -; ld a3, 0xc(a3) +; jalr a0 +; fmv.d fs3, fa0 +; auipc a0, 0 +; ld a0, 0xc(a0) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g1 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; jalr a3 +; jalr a0 ; fmv.d fs4, fa0 -; auipc a3, 0 -; ld a3, 0xc(a3) +; auipc a0, 0 +; ld a0, 0xc(a0) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g2 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; jalr a3 -; auipc a4, 0 -; ld a4, 0xc(a4) +; jalr a0 +; auipc a0, 0 +; ld a0, 0xc(a0) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g3 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; fmv.d fa0, fs0 -; jalr a4 -; auipc a5, 0 -; ld a5, 0xc(a5) +; fmv.d fa0, fs2 +; jalr a0 +; auipc a1, 0 +; ld a1, 0xc(a1) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g4 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; fmv.d fa0, fs2 -; jalr a5 -; auipc a0, 0 -; ld a0, 0xc(a0) +; fmv.d fa0, fs3 +; jalr a1 +; auipc a2, 0 +; ld a2, 0xc(a2) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g4 0 ; .byte 0x00, 0x00, 0x00, 0x00 ; fmv.d fa0, fs4 -; jalr a0 -; fld fs0, 0x18(sp) -; fld fs2, 0x10(sp) +; jalr a2 +; fld fs2, 0x18(sp) +; fld fs3, 0x10(sp) ; fld fs4, 8(sp) ; addi sp, sp, 0x20 ; ld ra, 8(sp) @@ -424,10 +418,10 @@ block0(v0: i64): ; mv fp,sp ; block0: ; li a2,42 -; load_ext_name_far a4,%f11+0 +; load_ext_name_far a3,%f11+0 ; mv a1,a0 ; mv a0,a2 -; callind a4 +; callind a3 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -441,14 +435,14 @@ block0(v0: i64): ; mv s0, sp ; block1: ; offset 0x10 ; addi a2, zero, 0x2a -; auipc a4, 0 -; ld a4, 0xc(a4) +; auipc a3, 0 +; ld a3, 0xc(a3) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %f11 0 ; .byte 0x00, 0x00, 0x00, 0x00 ; mv a1, a0 ; mv a0, a2 -; jalr a4 +; jalr a3 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -487,10 +481,10 @@ block0(v0: i64): ; mv fp,sp ; block0: ; li a2,42 -; load_ext_name_far a4,%f12+0 +; load_ext_name_far a3,%f12+0 ; mv a1,a0 ; mv a0,a2 -; callind a4 +; callind a3 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -504,14 +498,14 @@ block0(v0: i64): ; mv s0, sp ; block1: ; offset 0x10 ; addi a2, zero, 0x2a -; auipc a4, 0 -; ld a4, 0xc(a4) +; auipc a3, 0 +; ld a3, 0xc(a3) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %f12 0 ; .byte 0x00, 0x00, 0x00, 0x00 ; mv a1, a0 ; mv a0, a2 -; jalr a4 +; jalr a3 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -550,10 +544,10 @@ block0(v0: i64): ; mv fp,sp ; block0: ; li a2,42 -; load_ext_name_far a4,%f13+0 +; load_ext_name_far a3,%f13+0 ; mv a1,a0 ; mv a0,a2 -; callind a4 +; callind a3 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -567,14 +561,14 @@ block0(v0: i64): ; mv s0, sp ; block1: ; offset 0x10 ; addi a2, zero, 0x2a -; auipc a4, 0 -; ld a4, 0xc(a4) +; auipc a3, 0 +; ld a3, 0xc(a3) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %f13 0 ; .byte 0x00, 0x00, 0x00, 0x00 ; mv a1, a0 ; mv a0, a2 -; jalr a4 +; jalr a3 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -627,20 +621,18 @@ block0(v0: i128, v1: i64): ; mv fp,sp ; addi sp,sp,-32 ; sd s1,24(sp) -; sd s3,16(sp) ; block0: ; sd a1,0(sp) -; load_ext_name_far s1,%f14+0 +; load_ext_name_far t2,%f14+0 +; mv s1,a1 ; mv a5,a1 ; mv a6,a2 ; mv a7,a0 -; mv s3,a1 ; mv a2,a7 ; mv a3,a5 ; mv a4,a7 -; callind s1 +; callind t2 ; ld s1,24(sp) -; ld s3,16(sp) ; addi sp,sp,32 ; ld ra,8(sp) ; ld fp,0(sp) @@ -655,24 +647,22 @@ block0(v0: i128, v1: i64): ; mv s0, sp ; addi sp, sp, -0x20 ; sd s1, 0x18(sp) -; sd s3, 0x10(sp) -; block1: ; offset 0x1c +; block1: ; offset 0x18 ; sd a1, 0(sp) -; auipc s1, 0 -; ld s1, 0xc(s1) +; auipc t2, 0 +; ld t2, 0xc(t2) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %f14 0 ; .byte 0x00, 0x00, 0x00, 0x00 +; mv s1, a1 ; mv a5, a1 ; mv a6, a2 ; mv a7, a0 -; mv s3, a1 ; mv a2, a7 ; mv a3, a5 ; mv a4, a7 -; jalr s1 +; jalr t2 ; ld s1, 0x18(sp) -; ld s3, 0x10(sp) ; addi sp, sp, 0x20 ; ld ra, 8(sp) ; ld s0, 0(sp) @@ -726,20 +716,18 @@ block0(v0: i128, v1: i64): ; mv fp,sp ; addi sp,sp,-32 ; sd s1,24(sp) -; sd s3,16(sp) ; block0: ; sd a1,0(sp) -; load_ext_name_far s1,%f15+0 +; load_ext_name_far t2,%f15+0 +; mv s1,a1 ; mv a5,a1 ; mv a6,a2 ; mv a7,a0 -; mv s3,a1 ; mv a2,a7 ; mv a3,a5 ; mv a4,a7 -; callind s1 +; callind t2 ; ld s1,24(sp) -; ld s3,16(sp) ; addi sp,sp,32 ; ld ra,8(sp) ; ld fp,0(sp) @@ -754,24 +742,22 @@ block0(v0: i128, v1: i64): ; mv s0, sp ; addi sp, sp, -0x20 ; sd s1, 0x18(sp) -; sd s3, 0x10(sp) -; block1: ; offset 0x1c +; block1: ; offset 0x18 ; sd a1, 0(sp) -; auipc s1, 0 -; ld s1, 0xc(s1) +; auipc t2, 0 +; ld t2, 0xc(t2) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %f15 0 ; .byte 0x00, 0x00, 0x00, 0x00 +; mv s1, a1 ; mv a5, a1 ; mv a6, a2 ; mv a7, a0 -; mv s3, a1 ; mv a2, a7 ; mv a3, a5 ; mv a4, a7 -; jalr s1 +; jalr t2 ; ld s1, 0x18(sp) -; ld s3, 0x10(sp) ; addi sp, sp, 0x20 ; ld ra, 8(sp) ; ld s0, 0(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/ceil.clif b/cranelift/filetests/filetests/isa/riscv64/ceil.clif index 8958ea25e6e9..7d774be686f1 100644 --- a/cranelift/filetests/filetests/isa/riscv64/ceil.clif +++ b/cranelift/filetests/filetests/isa/riscv64/ceil.clif @@ -11,31 +11,31 @@ block0(v0: f32): ; VCode: ; block0: -; lui a2,307200 -; fmv.w.x fa4,a2 -; fabs.s fa1,fa0 -; flt.s a2,fa1,fa4 -; fcvt.w.s a4,fa0,rup -; fcvt.s.w fa1,a4,rup -; fsgnj.s fa2,fa1,fa0 -; fmv.w.x fa4,zero -; fadd.s fa0,fa0,fa4,rne -; select fa0,fa0,fa2##condition=(a2 eq zero) +; lui a0,307200 +; fmv.w.x fa1,a0 +; fabs.s fa2,fa0 +; flt.s a0,fa2,fa1 +; fcvt.w.s a1,fa0,rup +; fcvt.s.w fa2,a1,rup +; fsgnj.s fa4,fa2,fa0 +; fmv.w.x fa1,zero +; fadd.s fa0,fa0,fa1,rne +; select fa0,fa0,fa4##condition=(a0 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a2, 0x4b000 -; fmv.w.x fa4, a2 -; fabs.s fa1, fa0 -; flt.s a2, fa1, fa4 -; fcvt.w.s a4, fa0, rup ; trap: bad_toint -; fcvt.s.w fa1, a4, rup -; fsgnj.s fa2, fa1, fa0 -; fmv.w.x fa4, zero -; fadd.s fa0, fa0, fa4, rne -; beqz a2, 8 -; fmv.d fa0, fa2 +; lui a0, 0x4b000 +; fmv.w.x fa1, a0 +; fabs.s fa2, fa0 +; flt.s a0, fa2, fa1 +; fcvt.w.s a1, fa0, rup ; trap: bad_toint +; fcvt.s.w fa2, a1, rup +; fsgnj.s fa4, fa2, fa0 +; fmv.w.x fa1, zero +; fadd.s fa0, fa0, fa1, rne +; beqz a0, 8 +; fmv.d fa0, fa4 ; ret function %ceil_f64(f64) -> f64 { @@ -46,33 +46,32 @@ block0(v0: f64): ; VCode: ; block0: -; lui a2,1075 -; slli a4,a2,40 -; fmv.d.x fa1,a4 +; lui a0,1075 +; slli a0,a0,40 +; fmv.d.x fa1,a0 ; fabs.d fa2,fa0 -; flt.d a4,fa2,fa1 -; fcvt.l.d a0,fa0,rup -; fcvt.d.l fa2,a0,rup -; fsgnj.d fa4,fa2,fa0 -; fmv.d.x fa1,zero -; fadd.d fa2,fa0,fa1,rne -; select fa0,fa2,fa4##condition=(a4 eq zero) +; flt.d a0,fa2,fa1 +; fcvt.l.d a2,fa0,rup +; fcvt.d.l fa4,a2,rup +; fsgnj.d fa1,fa4,fa0 +; fmv.d.x fa2,zero +; fadd.d fa0,fa0,fa2,rne +; select fa0,fa0,fa1##condition=(a0 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a2, 0x433 -; slli a4, a2, 0x28 -; fmv.d.x fa1, a4 +; lui a0, 0x433 +; slli a0, a0, 0x28 +; fmv.d.x fa1, a0 ; fabs.d fa2, fa0 -; flt.d a4, fa2, fa1 -; fcvt.l.d a0, fa0, rup ; trap: bad_toint -; fcvt.d.l fa2, a0, rup -; fsgnj.d fa4, fa2, fa0 -; fmv.d.x fa1, zero -; fadd.d fa2, fa0, fa1, rne -; fmv.d fa0, fa2 -; beqz a4, 8 -; fmv.d fa0, fa4 +; flt.d a0, fa2, fa1 +; fcvt.l.d a2, fa0, rup ; trap: bad_toint +; fcvt.d.l fa4, a2, rup +; fsgnj.d fa1, fa4, fa0 +; fmv.d.x fa2, zero +; fadd.d fa0, fa0, fa2, rne +; beqz a0, 8 +; fmv.d fa0, fa1 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/cls-zbb.clif b/cranelift/filetests/filetests/isa/riscv64/cls-zbb.clif index 229b157beedf..ea2523eeced4 100644 --- a/cranelift/filetests/filetests/isa/riscv64/cls-zbb.clif +++ b/cranelift/filetests/filetests/isa/riscv64/cls-zbb.clif @@ -11,22 +11,21 @@ block0(v0: i8): ; VCode: ; block0: -; sext.b a2,a0 -; not a4,a2 -; select a0,a4,a2##condition=(a2 slt zero) -; clz a2,a0 -; addi a0,a2,-57 +; sext.b a0,a0 +; not a1,a0 +; select a1,a1,a0##condition=(a0 slt zero) +; clz a0,a1 +; addi a0,a0,-57 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x13, 0x16, 0x45, 0x60 -; not a4, a2 -; mv a0, a4 -; bltz a2, 8 -; mv a0, a2 -; .byte 0x13, 0x16, 0x05, 0x60 -; addi a0, a2, -0x39 +; .byte 0x13, 0x15, 0x45, 0x60 +; not a1, a0 +; bltz a0, 8 +; mv a1, a0 +; .byte 0x13, 0x95, 0x05, 0x60 +; addi a0, a0, -0x39 ; ret function %cls_i16(i16) -> i16 { @@ -37,22 +36,21 @@ block0(v0: i16): ; VCode: ; block0: -; sext.h a2,a0 -; not a4,a2 -; select a0,a4,a2##condition=(a2 slt zero) -; clz a2,a0 -; addi a0,a2,-49 +; sext.h a0,a0 +; not a1,a0 +; select a1,a1,a0##condition=(a0 slt zero) +; clz a0,a1 +; addi a0,a0,-49 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x13, 0x16, 0x55, 0x60 -; not a4, a2 -; mv a0, a4 -; bltz a2, 8 -; mv a0, a2 -; .byte 0x13, 0x16, 0x05, 0x60 -; addi a0, a2, -0x31 +; .byte 0x13, 0x15, 0x55, 0x60 +; not a1, a0 +; bltz a0, 8 +; mv a1, a0 +; .byte 0x13, 0x95, 0x05, 0x60 +; addi a0, a0, -0x31 ; ret function %cls_i32(i32) -> i32 { @@ -63,22 +61,21 @@ block0(v0: i32): ; VCode: ; block0: -; sext.w a2,a0 -; not a4,a2 -; select a0,a4,a2##condition=(a2 slt zero) -; clz a2,a0 -; addi a0,a2,-33 +; sext.w a0,a0 +; not a1,a0 +; select a1,a1,a0##condition=(a0 slt zero) +; clz a0,a1 +; addi a0,a0,-33 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a2, a0 -; not a4, a2 -; mv a0, a4 -; bltz a2, 8 -; mv a0, a2 -; .byte 0x13, 0x16, 0x05, 0x60 -; addi a0, a2, -0x21 +; sext.w a0, a0 +; not a1, a0 +; bltz a0, 8 +; mv a1, a0 +; .byte 0x13, 0x95, 0x05, 0x60 +; addi a0, a0, -0x21 ; ret function %cls_i64(i64) -> i64 { @@ -89,19 +86,18 @@ block0(v0: i64): ; VCode: ; block0: -; not a2,a0 -; select a4,a2,a0##condition=(a0 slt zero) -; clz a0,a4 +; not a1,a0 +; select a1,a1,a0##condition=(a0 slt zero) +; clz a0,a1 ; addi a0,a0,-1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; not a2, a0 -; mv a4, a2 +; not a1, a0 ; bltz a0, 8 -; mv a4, a0 -; .byte 0x13, 0x15, 0x07, 0x60 +; mv a1, a0 +; .byte 0x13, 0x95, 0x05, 0x60 ; addi a0, a0, -1 ; ret @@ -113,35 +109,33 @@ block0(v0: i128): ; VCode: ; block0: -; not a3,a0 -; select a5,a3,a0##condition=(a1 slt zero) -; not a2,a1 -; select a3,a2,a1##condition=(a1 slt zero) -; clz a0,a3 -; clz a1,a5 -; select a4,a1,zero##condition=(a3 eq zero) -; add a5,a0,a4 -; addi a0,a5,-1 +; not a2,a0 +; select a2,a2,a0##condition=(a1 slt zero) +; not a0,a1 +; select a0,a0,a1##condition=(a1 slt zero) +; clz a1,a0 +; clz a3,a2 +; select a5,a3,zero##condition=(a0 eq zero) +; add a0,a1,a5 +; addi a0,a0,-1 ; li a1,0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; not a3, a0 -; mv a5, a3 +; not a2, a0 ; bltz a1, 8 -; mv a5, a0 -; not a2, a1 -; mv a3, a2 +; mv a2, a0 +; not a0, a1 ; bltz a1, 8 -; mv a3, a1 -; .byte 0x13, 0x95, 0x06, 0x60 -; .byte 0x93, 0x95, 0x07, 0x60 -; mv a4, a1 -; beqz a3, 8 -; mv a4, zero -; add a5, a0, a4 -; addi a0, a5, -1 +; mv a0, a1 +; .byte 0x93, 0x15, 0x05, 0x60 +; .byte 0x93, 0x16, 0x06, 0x60 +; mv a5, a3 +; beqz a0, 8 +; mv a5, zero +; add a0, a1, a5 +; addi a0, a0, -1 ; mv a1, zero ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/clz-zbb.clif b/cranelift/filetests/filetests/isa/riscv64/clz-zbb.clif index 41789817995e..ae3843d1c945 100644 --- a/cranelift/filetests/filetests/isa/riscv64/clz-zbb.clif +++ b/cranelift/filetests/filetests/isa/riscv64/clz-zbb.clif @@ -10,16 +10,16 @@ block0(v0: i8): ; VCode: ; block0: -; andi a2,a0,255 -; clz a4,a2 -; addi a0,a4,-56 +; andi a0,a0,255 +; clz a0,a0 +; addi a0,a0,-56 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a2, a0, 0xff -; .byte 0x13, 0x17, 0x06, 0x60 -; addi a0, a4, -0x38 +; andi a0, a0, 0xff +; .byte 0x13, 0x15, 0x05, 0x60 +; addi a0, a0, -0x38 ; ret function %clz_i16(i16) -> i16 { @@ -30,16 +30,16 @@ block0(v0: i16): ; VCode: ; block0: -; zext.h a2,a0 -; clz a4,a2 -; addi a0,a4,-48 +; zext.h a0,a0 +; clz a0,a0 +; addi a0,a0,-48 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x3b, 0x46, 0x05, 0x08 -; .byte 0x13, 0x17, 0x06, 0x60 -; addi a0, a4, -0x30 +; .byte 0x3b, 0x45, 0x05, 0x08 +; .byte 0x13, 0x15, 0x05, 0x60 +; addi a0, a0, -0x30 ; ret function %clz_i32(i32) -> i32 { @@ -82,21 +82,20 @@ block0(v0: i128): ; VCode: ; block0: -; clz a3,a1 -; clz a5,a0 -; select a2,a5,zero##condition=(a1 eq zero) -; add a0,a3,a2 +; clz a2,a1 +; clz a0,a0 +; select a0,a0,zero##condition=(a1 eq zero) +; add a0,a2,a0 ; li a1,0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x93, 0x96, 0x05, 0x60 -; .byte 0x93, 0x17, 0x05, 0x60 -; mv a2, a5 +; .byte 0x13, 0x96, 0x05, 0x60 +; .byte 0x13, 0x15, 0x05, 0x60 ; beqz a1, 8 -; mv a2, zero -; add a0, a3, a2 +; mv a0, zero +; add a0, a2, a0 ; mv a1, zero ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/cold-blocks.clif b/cranelift/filetests/filetests/isa/riscv64/cold-blocks.clif index 7b6fa896f8fa..422e07720755 100644 --- a/cranelift/filetests/filetests/isa/riscv64/cold-blocks.clif +++ b/cranelift/filetests/filetests/isa/riscv64/cold-blocks.clif @@ -16,8 +16,8 @@ block2: ; VCode: ; block0: -; sext.w a4,a0 -; bne a4,zero,taken(label1),not_taken(label2) +; sext.w a1,a0 +; bne a1,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -28,8 +28,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a4, a0 -; bnez a4, 8 +; sext.w a1, a0 +; bnez a1, 8 ; block1: ; offset 0x8 ; addi a0, zero, 0x61 ; block2: ; offset 0xc @@ -49,8 +49,8 @@ block2 cold: ; VCode: ; block0: -; sext.w a4,a0 -; bne a4,zero,taken(label1),not_taken(label2) +; sext.w a1,a0 +; bne a1,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block3: @@ -61,8 +61,8 @@ block2 cold: ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a4, a0 -; beqz a4, 8 +; sext.w a1, a0 +; beqz a1, 8 ; block1: ; offset 0x8 ; ret ; block2: ; offset 0xc diff --git a/cranelift/filetests/filetests/isa/riscv64/condbr.clif b/cranelift/filetests/filetests/isa/riscv64/condbr.clif index 6912c0b5502e..e6b9a74ef163 100644 --- a/cranelift/filetests/filetests/isa/riscv64/condbr.clif +++ b/cranelift/filetests/filetests/isa/riscv64/condbr.clif @@ -10,14 +10,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; xor a3,a0,a1 -; seqz a0,a3 +; xor a0,a0,a1 +; seqz a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; xor a3, a0, a1 -; seqz a0, a3 +; xor a0, a0, a1 +; seqz a0, a0 ; ret function %icmp_eq_i128(i128, i128) -> i8 { @@ -28,18 +28,18 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: -; xor a5,a0,a2 +; xor a0,a0,a2 ; xor a1,a1,a3 -; or a3,a5,a1 -; seqz a0,a3 +; or a0,a0,a1 +; seqz a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; xor a5, a0, a2 +; xor a0, a0, a2 ; xor a1, a1, a3 -; or a3, a5, a1 -; seqz a0, a3 +; or a0, a0, a1 +; seqz a0, a0 ; ret function %icmp_ne_i128(i128, i128) -> i8 { @@ -50,18 +50,18 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: -; xor a5,a0,a2 +; xor a0,a0,a2 ; xor a1,a1,a3 -; or a3,a5,a1 -; sltu a0,zero,a3 +; or a0,a0,a1 +; sltu a0,zero,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; xor a5, a0, a2 +; xor a0, a0, a2 ; xor a1, a1, a3 -; or a3, a5, a1 -; snez a0, a3 +; or a0, a0, a1 +; snez a0, a0 ; ret function %icmp_slt_i128(i128, i128) -> i8 { @@ -72,20 +72,20 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: -; slt a5,a1,a3 +; slt a4,a1,a3 ; sltu a2,a0,a2 -; xor a3,a1,a3 -; select a0,a2,a5##condition=(a3 eq zero) +; xor a1,a1,a3 +; select a0,a2,a4##condition=(a1 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slt a5, a1, a3 +; slt a4, a1, a3 ; sltu a2, a0, a2 -; xor a3, a1, a3 +; xor a1, a1, a3 ; mv a0, a2 -; beqz a3, 8 -; mv a0, a5 +; beqz a1, 8 +; mv a0, a4 ; ret function %icmp_ult_i128(i128, i128) -> i8 { @@ -96,20 +96,20 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: -; sltu a5,a1,a3 +; sltu a4,a1,a3 ; sltu a2,a0,a2 -; xor a3,a1,a3 -; select a0,a2,a5##condition=(a3 eq zero) +; xor a1,a1,a3 +; select a0,a2,a4##condition=(a1 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sltu a5, a1, a3 +; sltu a4, a1, a3 ; sltu a2, a0, a2 -; xor a3, a1, a3 +; xor a1, a1, a3 ; mv a0, a2 -; beqz a3, 8 -; mv a0, a5 +; beqz a1, 8 +; mv a0, a4 ; ret function %icmp_sle_i128(i128, i128) -> i8 { @@ -120,21 +120,21 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: -; slt a5,a3,a1 +; slt a4,a3,a1 ; sltu a2,a2,a0 -; xor a3,a3,a1 -; select a5,a2,a5##condition=(a3 eq zero) -; xori a0,a5,1 +; xor a1,a3,a1 +; select a2,a2,a4##condition=(a1 eq zero) +; xori a0,a2,1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slt a5, a3, a1 +; slt a4, a3, a1 ; sltu a2, a2, a0 -; xor a3, a3, a1 -; bnez a3, 8 -; mv a5, a2 -; xori a0, a5, 1 +; xor a1, a3, a1 +; beqz a1, 8 +; mv a2, a4 +; xori a0, a2, 1 ; ret function %icmp_ule_i128(i128, i128) -> i8 { @@ -145,21 +145,21 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: -; sltu a5,a3,a1 +; sltu a4,a3,a1 ; sltu a2,a2,a0 -; xor a3,a3,a1 -; select a5,a2,a5##condition=(a3 eq zero) -; xori a0,a5,1 +; xor a1,a3,a1 +; select a2,a2,a4##condition=(a1 eq zero) +; xori a0,a2,1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sltu a5, a3, a1 +; sltu a4, a3, a1 ; sltu a2, a2, a0 -; xor a3, a3, a1 -; bnez a3, 8 -; mv a5, a2 -; xori a0, a5, 1 +; xor a1, a3, a1 +; beqz a1, 8 +; mv a2, a4 +; xori a0, a2, 1 ; ret function %icmp_sgt_i128(i128, i128) -> i8 { @@ -170,20 +170,20 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: -; slt a5,a3,a1 +; slt a4,a3,a1 ; sltu a2,a2,a0 -; xor a3,a3,a1 -; select a0,a2,a5##condition=(a3 eq zero) +; xor a1,a3,a1 +; select a0,a2,a4##condition=(a1 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slt a5, a3, a1 +; slt a4, a3, a1 ; sltu a2, a2, a0 -; xor a3, a3, a1 +; xor a1, a3, a1 ; mv a0, a2 -; beqz a3, 8 -; mv a0, a5 +; beqz a1, 8 +; mv a0, a4 ; ret function %icmp_ugt_i128(i128, i128) -> i8 { @@ -194,20 +194,20 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: -; sltu a5,a3,a1 +; sltu a4,a3,a1 ; sltu a2,a2,a0 -; xor a3,a3,a1 -; select a0,a2,a5##condition=(a3 eq zero) +; xor a1,a3,a1 +; select a0,a2,a4##condition=(a1 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sltu a5, a3, a1 +; sltu a4, a3, a1 ; sltu a2, a2, a0 -; xor a3, a3, a1 +; xor a1, a3, a1 ; mv a0, a2 -; beqz a3, 8 -; mv a0, a5 +; beqz a1, 8 +; mv a0, a4 ; ret function %icmp_sge_i128(i128, i128) -> i8 { @@ -218,21 +218,21 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: -; slt a5,a1,a3 +; slt a4,a1,a3 ; sltu a2,a0,a2 -; xor a3,a1,a3 -; select a5,a2,a5##condition=(a3 eq zero) -; xori a0,a5,1 +; xor a1,a1,a3 +; select a2,a2,a4##condition=(a1 eq zero) +; xori a0,a2,1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slt a5, a1, a3 +; slt a4, a1, a3 ; sltu a2, a0, a2 -; xor a3, a1, a3 -; bnez a3, 8 -; mv a5, a2 -; xori a0, a5, 1 +; xor a1, a1, a3 +; beqz a1, 8 +; mv a2, a4 +; xori a0, a2, 1 ; ret function %icmp_uge_i128(i128, i128) -> i8 { @@ -243,21 +243,21 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: -; sltu a5,a1,a3 +; sltu a4,a1,a3 ; sltu a2,a0,a2 -; xor a3,a1,a3 -; select a5,a2,a5##condition=(a3 eq zero) -; xori a0,a5,1 +; xor a1,a1,a3 +; select a2,a2,a4##condition=(a1 eq zero) +; xori a0,a2,1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sltu a5, a1, a3 +; sltu a4, a1, a3 ; sltu a2, a0, a2 -; xor a3, a1, a3 -; bnez a3, 8 -; mv a5, a2 -; xori a0, a5, 1 +; xor a1, a1, a3 +; beqz a1, 8 +; mv a2, a4 +; xori a0, a2, 1 ; ret function %f(i64, i64) -> i64 { @@ -331,8 +331,8 @@ block1: ; VCode: ; block0: -; or a3,a0,a1 -; bne a3,zero,taken(label1),not_taken(label2) +; or a0,a0,a1 +; bne a0,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -342,7 +342,7 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; or a3, a0, a1 +; or a0, a0, a1 ; block1: ; offset 0x4 ; ret @@ -357,11 +357,11 @@ block1: ; VCode: ; block0: -; xor a5,a0,a2 +; xor a0,a0,a2 ; xor a1,a1,a3 -; or a3,a5,a1 -; seqz a5,a3 -; bne a5,zero,taken(label1),not_taken(label2) +; or a0,a0,a1 +; seqz a1,a0 +; bne a1,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -371,10 +371,10 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; xor a5, a0, a2 +; xor a0, a0, a2 ; xor a1, a1, a3 -; or a3, a5, a1 -; seqz a5, a3 +; or a0, a0, a1 +; seqz a1, a0 ; block1: ; offset 0x10 ; ret @@ -389,11 +389,11 @@ block1: ; VCode: ; block0: -; xor a5,a0,a2 +; xor a0,a0,a2 ; xor a1,a1,a3 -; or a3,a5,a1 -; sltu a5,zero,a3 -; bne a5,zero,taken(label1),not_taken(label2) +; or a0,a0,a1 +; sltu a1,zero,a0 +; bne a1,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -403,10 +403,10 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; xor a5, a0, a2 +; xor a0, a0, a2 ; xor a1, a1, a3 -; or a3, a5, a1 -; snez a5, a3 +; or a0, a0, a1 +; snez a1, a0 ; block1: ; offset 0x10 ; ret @@ -421,11 +421,11 @@ block1: ; VCode: ; block0: -; slt a5,a1,a3 +; slt a4,a1,a3 ; sltu a2,a0,a2 -; xor a3,a1,a3 -; select a5,a2,a5##condition=(a3 eq zero) -; bne a5,zero,taken(label1),not_taken(label2) +; xor a1,a1,a3 +; select a2,a2,a4##condition=(a1 eq zero) +; bne a2,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -435,11 +435,11 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; slt a5, a1, a3 +; slt a4, a1, a3 ; sltu a2, a0, a2 -; xor a3, a1, a3 -; bnez a3, 8 -; mv a5, a2 +; xor a1, a1, a3 +; beqz a1, 8 +; mv a2, a4 ; block1: ; offset 0x14 ; ret @@ -454,11 +454,11 @@ block1: ; VCode: ; block0: -; sltu a5,a1,a3 +; sltu a4,a1,a3 ; sltu a2,a0,a2 -; xor a3,a1,a3 -; select a5,a2,a5##condition=(a3 eq zero) -; bne a5,zero,taken(label1),not_taken(label2) +; xor a1,a1,a3 +; select a2,a2,a4##condition=(a1 eq zero) +; bne a2,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -468,11 +468,11 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; sltu a5, a1, a3 +; sltu a4, a1, a3 ; sltu a2, a0, a2 -; xor a3, a1, a3 -; bnez a3, 8 -; mv a5, a2 +; xor a1, a1, a3 +; beqz a1, 8 +; mv a2, a4 ; block1: ; offset 0x14 ; ret @@ -487,12 +487,12 @@ block1: ; VCode: ; block0: -; slt a5,a3,a1 +; slt a4,a3,a1 ; sltu a2,a2,a0 -; xor a3,a3,a1 -; select a5,a2,a5##condition=(a3 eq zero) -; xori a1,a5,1 -; bne a1,zero,taken(label1),not_taken(label2) +; xor a1,a3,a1 +; select a2,a2,a4##condition=(a1 eq zero) +; xori a3,a2,1 +; bne a3,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -502,12 +502,12 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; slt a5, a3, a1 +; slt a4, a3, a1 ; sltu a2, a2, a0 -; xor a3, a3, a1 -; bnez a3, 8 -; mv a5, a2 -; xori a1, a5, 1 +; xor a1, a3, a1 +; beqz a1, 8 +; mv a2, a4 +; xori a3, a2, 1 ; block1: ; offset 0x18 ; ret @@ -522,12 +522,12 @@ block1: ; VCode: ; block0: -; sltu a5,a3,a1 +; sltu a4,a3,a1 ; sltu a2,a2,a0 -; xor a3,a3,a1 -; select a5,a2,a5##condition=(a3 eq zero) -; xori a1,a5,1 -; bne a1,zero,taken(label1),not_taken(label2) +; xor a1,a3,a1 +; select a2,a2,a4##condition=(a1 eq zero) +; xori a3,a2,1 +; bne a3,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -537,12 +537,12 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; sltu a5, a3, a1 +; sltu a4, a3, a1 ; sltu a2, a2, a0 -; xor a3, a3, a1 -; bnez a3, 8 -; mv a5, a2 -; xori a1, a5, 1 +; xor a1, a3, a1 +; beqz a1, 8 +; mv a2, a4 +; xori a3, a2, 1 ; block1: ; offset 0x18 ; ret @@ -557,11 +557,11 @@ block1: ; VCode: ; block0: -; slt a5,a3,a1 +; slt a4,a3,a1 ; sltu a2,a2,a0 -; xor a3,a3,a1 -; select a5,a2,a5##condition=(a3 eq zero) -; bne a5,zero,taken(label1),not_taken(label2) +; xor a1,a3,a1 +; select a2,a2,a4##condition=(a1 eq zero) +; bne a2,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -571,11 +571,11 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; slt a5, a3, a1 +; slt a4, a3, a1 ; sltu a2, a2, a0 -; xor a3, a3, a1 -; bnez a3, 8 -; mv a5, a2 +; xor a1, a3, a1 +; beqz a1, 8 +; mv a2, a4 ; block1: ; offset 0x14 ; ret @@ -590,11 +590,11 @@ block1: ; VCode: ; block0: -; sltu a5,a3,a1 +; sltu a4,a3,a1 ; sltu a2,a2,a0 -; xor a3,a3,a1 -; select a5,a2,a5##condition=(a3 eq zero) -; bne a5,zero,taken(label1),not_taken(label2) +; xor a1,a3,a1 +; select a2,a2,a4##condition=(a1 eq zero) +; bne a2,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -604,11 +604,11 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; sltu a5, a3, a1 +; sltu a4, a3, a1 ; sltu a2, a2, a0 -; xor a3, a3, a1 -; bnez a3, 8 -; mv a5, a2 +; xor a1, a3, a1 +; beqz a1, 8 +; mv a2, a4 ; block1: ; offset 0x14 ; ret @@ -623,12 +623,12 @@ block1: ; VCode: ; block0: -; slt a5,a1,a3 +; slt a4,a1,a3 ; sltu a2,a0,a2 -; xor a3,a1,a3 -; select a5,a2,a5##condition=(a3 eq zero) -; xori a1,a5,1 -; bne a1,zero,taken(label1),not_taken(label2) +; xor a1,a1,a3 +; select a2,a2,a4##condition=(a1 eq zero) +; xori a3,a2,1 +; bne a3,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -638,12 +638,12 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; slt a5, a1, a3 +; slt a4, a1, a3 ; sltu a2, a0, a2 -; xor a3, a1, a3 -; bnez a3, 8 -; mv a5, a2 -; xori a1, a5, 1 +; xor a1, a1, a3 +; beqz a1, 8 +; mv a2, a4 +; xori a3, a2, 1 ; block1: ; offset 0x18 ; ret @@ -658,12 +658,12 @@ block1: ; VCode: ; block0: -; sltu a5,a1,a3 +; sltu a4,a1,a3 ; sltu a2,a0,a2 -; xor a3,a1,a3 -; select a5,a2,a5##condition=(a3 eq zero) -; xori a1,a5,1 -; bne a1,zero,taken(label1),not_taken(label2) +; xor a1,a1,a3 +; select a2,a2,a4##condition=(a1 eq zero) +; xori a3,a2,1 +; bne a3,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -673,12 +673,12 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; sltu a5, a1, a3 +; sltu a4, a1, a3 ; sltu a2, a0, a2 -; xor a3, a1, a3 -; bnez a3, 8 -; mv a5, a2 -; xori a1, a5, 1 +; xor a1, a1, a3 +; beqz a1, 8 +; mv a2, a4 +; xori a3, a2, 1 ; block1: ; offset 0x18 ; ret @@ -693,8 +693,8 @@ block1: ; VCode: ; block0: -; andi a2,a0,255 -; bne a2,zero,taken(label1),not_taken(label2) +; andi a0,a0,255 +; bne a0,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -704,7 +704,7 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; andi a2, a0, 0xff +; andi a0, a0, 0xff ; block1: ; offset 0x4 ; ret @@ -719,9 +719,9 @@ block1: ; VCode: ; block0: -; slli a2,a0,48 -; srai a4,a2,48 -; bne a4,zero,taken(label1),not_taken(label2) +; slli a0,a0,48 +; srai a0,a0,48 +; bne a0,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -731,8 +731,8 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srai a4, a2, 0x30 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 ; block1: ; offset 0x8 ; ret @@ -747,8 +747,8 @@ block1: ; VCode: ; block0: -; sext.w a2,a0 -; bne a2,zero,taken(label1),not_taken(label2) +; sext.w a0,a0 +; bne a0,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -758,7 +758,7 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a2, a0 +; sext.w a0, a0 ; block1: ; offset 0x4 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/condops.clif b/cranelift/filetests/filetests/isa/riscv64/condops.clif index f134e0dde614..92c2d55096ab 100644 --- a/cranelift/filetests/filetests/isa/riscv64/condops.clif +++ b/cranelift/filetests/filetests/isa/riscv64/condops.clif @@ -13,18 +13,18 @@ block0(v0: i8, v1: i64, v2: i64): ; VCode: ; block0: ; li a3,42 -; andi a5,a0,255 +; andi a4,a0,255 ; andi a3,a3,255 -; select a0,a1,a2##condition=(a5 eq a3) +; select a0,a1,a2##condition=(a4 eq a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a3, zero, 0x2a -; andi a5, a0, 0xff +; andi a4, a0, 0xff ; andi a3, a3, 0xff ; mv a0, a1 -; beq a5, a3, 8 +; beq a4, a3, 8 ; mv a0, a2 ; ret @@ -37,17 +37,17 @@ block0(v0: i8): ; VCode: ; block0: -; slli a2,a0,56 -; srai a4,a2,56 -; xori a0,a4,42 +; slli a0,a0,56 +; srai a0,a0,56 +; xori a0,a0,42 ; seqz a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x38 -; srai a4, a2, 0x38 -; xori a0, a4, 0x2a +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 +; xori a0, a0, 0x2a ; seqz a0, a0 ; ret @@ -59,18 +59,18 @@ block0(v0: i8, v1: i8, v2: i8): ; VCode: ; block0: -; and a4,a0,a1 +; and a1,a0,a1 ; not a0,a0 -; and a2,a0,a2 -; or a0,a4,a2 +; and a0,a0,a2 +; or a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; and a4, a0, a1 +; and a1, a0, a1 ; not a0, a0 -; and a2, a0, a2 -; or a0, a4, a2 +; and a0, a0, a2 +; or a0, a1, a0 ; ret function %i(i8, i8, i8) -> i8 { @@ -81,15 +81,15 @@ block0(v0: i8, v1: i8, v2: i8): ; VCode: ; block0: -; andi a4,a0,255 -; select a0,a1,a2##condition=(a4 ne zero) +; andi a3,a0,255 +; select a0,a1,a2##condition=(a3 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a0, 0xff +; andi a3, a0, 0xff ; mv a0, a1 -; bnez a4, 8 +; bnez a3, 8 ; mv a0, a2 ; ret @@ -104,18 +104,18 @@ block0(v0: i32, v1: i8, v2: i8): ; VCode: ; block0: ; li a3,42 -; sext.w a5,a0 +; sext.w a4,a0 ; sext.w a3,a3 -; select a0,a1,a2##condition=(a5 eq a3) +; select a0,a1,a2##condition=(a4 eq a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a3, zero, 0x2a -; sext.w a5, a0 +; sext.w a4, a0 ; sext.w a3, a3 ; mv a0, a1 -; beq a5, a3, 8 +; beq a4, a3, 8 ; mv a0, a2 ; ret @@ -126,43 +126,20 @@ block0(v0: i8, v1: i128, v2: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s4,8(sp) ; block0: -; mv s4,a1 +; mv a6,a1 ; andi a5,a0,255 -; select [a0,a1],[s4,a2],[a3,a4]##condition=(a5 ne zero) -; ld s4,8(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; select [a0,a1],[a6,a2],[a3,a4]##condition=(a5 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s4, 8(sp) -; block1: ; offset 0x18 -; mv s4, a1 +; mv a6, a1 ; andi a5, a0, 0xff -; mv a0, s4 +; mv a0, a6 ; mv a1, a2 ; bnez a5, 0xc ; mv a0, a3 ; mv a1, a4 -; ld s4, 8(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/constants.clif b/cranelift/filetests/filetests/isa/riscv64/constants.clif index 9949cf7c4ca6..23901a73d755 100644 --- a/cranelift/filetests/filetests/isa/riscv64/constants.clif +++ b/cranelift/filetests/filetests/isa/riscv64/constants.clif @@ -351,15 +351,15 @@ block0: ; VCode: ; block0: ; li a0,0 -; lui a2,16383 -; slli a1,a2,36 +; lui a1,16383 +; slli a1,a1,36 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; mv a0, zero -; lui a2, 0x3fff -; slli a1, a2, 0x24 +; lui a1, 0x3fff +; slli a1, a1, 0x24 ; ret function %f() -> f64 { @@ -371,15 +371,15 @@ block0: ; VCode: ; block0: ; lui a0,1023 -; slli a2,a0,40 -; fmv.d.x fa0,a2 +; slli a0,a0,40 +; fmv.d.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; lui a0, 0x3ff -; slli a2, a0, 0x28 -; fmv.d.x fa0, a2 +; slli a0, a0, 0x28 +; fmv.d.x fa0, a0 ; ret function %f() -> f32 { @@ -409,15 +409,15 @@ block0: ; VCode: ; block0: ; lui a0,-12 -; addi a2,a0,-1024 -; fmv.w.x fa0,a2 +; addi a0,a0,-1024 +; fmv.w.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; lui a0, 0xffff4 -; addi a2, a0, -0x400 -; fmv.w.x fa0, a2 +; addi a0, a0, -0x400 +; fmv.w.x fa0, a0 ; ret function %f() -> f128 { @@ -429,15 +429,15 @@ block0: ; VCode: ; block0: ; li a0,0 -; lui a2,262217 -; slli a1,a2,32 +; lui a1,262217 +; slli a1,a1,32 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; mv a0, zero -; lui a2, 0x40049 -; slli a1, a2, 0x20 +; lui a1, 0x40049 +; slli a1, a1, 0x20 ; ret function %f() -> f64 { @@ -449,15 +449,15 @@ block0: ; VCode: ; block0: ; lui a0,16457 -; slli a2,a0,36 -; fmv.d.x fa0,a2 +; slli a0,a0,36 +; fmv.d.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; lui a0, 0x4049 -; slli a2, a0, 0x24 -; fmv.d.x fa0, a2 +; slli a0, a0, 0x24 +; fmv.d.x fa0, a0 ; ret function %f() -> f32 { @@ -487,15 +487,15 @@ block0: ; VCode: ; block0: ; lui a0,-11 -; addi a2,a0,576 -; fmv.w.x fa0,a2 +; addi a0,a0,576 +; fmv.w.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; lui a0, 0xffff5 -; addi a2, a0, 0x240 -; fmv.w.x fa0, a2 +; addi a0, a0, 0x240 +; fmv.w.x fa0, a0 ; ret function %f() -> f128 { @@ -575,15 +575,15 @@ block0: ; VCode: ; block0: ; li a0,0 -; lui a2,49155 -; slli a1,a2,36 +; lui a1,49155 +; slli a1,a1,36 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; mv a0, zero -; lui a2, 0xc003 -; slli a1, a2, 0x24 +; lui a1, 0xc003 +; slli a1, a1, 0x24 ; ret function %f() -> f64 { @@ -595,15 +595,15 @@ block0: ; VCode: ; block0: ; lui a0,3075 -; slli a2,a0,40 -; fmv.d.x fa0,a2 +; slli a0,a0,40 +; fmv.d.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; lui a0, 0xc03 -; slli a2, a0, 0x28 -; fmv.d.x fa0, a2 +; slli a0, a0, 0x28 +; fmv.d.x fa0, a0 ; ret function %f() -> f32 { @@ -633,15 +633,15 @@ block0: ; VCode: ; block0: ; lui a0,-3 -; addi a2,a0,-1024 -; fmv.w.x fa0,a2 +; addi a0,a0,-1024 +; fmv.w.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; lui a0, 0xffffd -; addi a2, a0, -0x400 -; fmv.w.x fa0, a2 +; addi a0, a0, -0x400 +; fmv.w.x fa0, a0 ; ret function %f() -> f128 { diff --git a/cranelift/filetests/filetests/isa/riscv64/ctz-zbb-zbs.clif b/cranelift/filetests/filetests/isa/riscv64/ctz-zbb-zbs.clif index 295658005b88..8662e78aee28 100644 --- a/cranelift/filetests/filetests/isa/riscv64/ctz-zbb-zbs.clif +++ b/cranelift/filetests/filetests/isa/riscv64/ctz-zbb-zbs.clif @@ -10,14 +10,14 @@ block0(v0: i8): ; VCode: ; block0: -; bseti a2,a0,8 -; ctzw a0,a2 +; bseti a0,a0,8 +; ctzw a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x13, 0x16, 0x85, 0x28 -; .byte 0x1b, 0x15, 0x16, 0x60 +; .byte 0x13, 0x15, 0x85, 0x28 +; .byte 0x1b, 0x15, 0x15, 0x60 ; ret function %ctz_i16(i16) -> i16 { @@ -28,14 +28,14 @@ block0(v0: i16): ; VCode: ; block0: -; bseti a2,a0,16 -; ctzw a0,a2 +; bseti a0,a0,16 +; ctzw a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x13, 0x16, 0x05, 0x29 -; .byte 0x1b, 0x15, 0x16, 0x60 +; .byte 0x13, 0x15, 0x05, 0x29 +; .byte 0x1b, 0x15, 0x15, 0x60 ; ret function %ctz_i32(i32) -> i32 { diff --git a/cranelift/filetests/filetests/isa/riscv64/ctz-zbb.clif b/cranelift/filetests/filetests/isa/riscv64/ctz-zbb.clif index 97c6ba309dfa..a3fc22521ffc 100644 --- a/cranelift/filetests/filetests/isa/riscv64/ctz-zbb.clif +++ b/cranelift/filetests/filetests/isa/riscv64/ctz-zbb.clif @@ -11,14 +11,14 @@ block0(v0: i8): ; VCode: ; block0: -; ori a2,a0,256 -; ctzw a0,a2 +; ori a0,a0,256 +; ctzw a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; ori a2, a0, 0x100 -; .byte 0x1b, 0x15, 0x16, 0x60 +; ori a0, a0, 0x100 +; .byte 0x1b, 0x15, 0x15, 0x60 ; ret function %ctz_i16(i16) -> i16 { @@ -29,16 +29,16 @@ block0(v0: i16): ; VCode: ; block0: -; lui a2,16 -; or a4,a0,a2 -; ctzw a0,a4 +; lui a1,16 +; or a0,a0,a1 +; ctzw a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a2, 0x10 -; or a4, a0, a2 -; .byte 0x1b, 0x15, 0x17, 0x60 +; lui a1, 0x10 +; or a0, a0, a1 +; .byte 0x1b, 0x15, 0x15, 0x60 ; ret function %ctz_i32(i32) -> i32 { @@ -81,21 +81,20 @@ block0(v0: i128): ; VCode: ; block0: -; ctz a3,a1 -; ctz a5,a0 -; select a1,a3,zero##condition=(a0 eq zero) -; add a0,a5,a1 +; ctz a2,a1 +; ctz a1,a0 +; select a2,a2,zero##condition=(a0 eq zero) +; add a0,a1,a2 ; li a1,0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x93, 0x96, 0x15, 0x60 -; .byte 0x93, 0x17, 0x15, 0x60 -; mv a1, a3 +; .byte 0x13, 0x96, 0x15, 0x60 +; .byte 0x93, 0x15, 0x15, 0x60 ; beqz a0, 8 -; mv a1, zero -; add a0, a5, a1 +; mv a2, zero +; add a0, a1, a2 ; mv a1, zero ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/exceptions.clif b/cranelift/filetests/filetests/isa/riscv64/exceptions.clif index 859b01d39aac..ddef4a3efbcb 100644 --- a/cranelift/filetests/filetests/isa/riscv64/exceptions.clif +++ b/cranelift/filetests/filetests/isa/riscv64/exceptions.clif @@ -50,9 +50,9 @@ function %f0(i32) -> i32, f32, f64 { ; fsd fs10,32(sp) ; fsd fs11,24(sp) ; block0: -; lui a4,1023 -; slli a1,a4,40 -; fmv.d.x fa1,a1 +; lui a1,1023 +; slli a2,a1,40 +; fmv.d.x fa1,a2 ; fsd fa1,0(slot) ; call %g; j MachLabel(1); catch [default: MachLabel(2)] ; block1: @@ -150,9 +150,9 @@ function %f0(i32) -> i32, f32, f64 { ; fsd fs10, 0x20(sp) ; fsd fs11, 0x18(sp) ; block1: ; offset 0x70 -; lui a4, 0x3ff -; slli a1, a4, 0x28 -; fmv.d.x fa1, a1 +; lui a1, 0x3ff +; slli a2, a1, 0x28 +; fmv.d.x fa1, a2 ; fsd fa1, 0(sp) ; auipc ra, 0 ; reloc_external RiscvCallPlt %g 0 ; jalr ra @@ -270,12 +270,12 @@ function %f2(i32) -> i32, f32, f64 { ; fsd fs10,32(sp) ; fsd fs11,24(sp) ; block0: -; lui a5,1023 -; slli a1,a5,40 -; fmv.d.x fa1,a1 +; lui a1,1023 +; slli a3,a1,40 +; fmv.d.x fa1,a3 ; fsd fa1,0(slot) -; load_ext_name_far a1,%g+0 -; callind a1; j MachLabel(1); catch [default: MachLabel(2)] +; load_ext_name_far a3,%g+0 +; callind a3; j MachLabel(1); catch [default: MachLabel(2)] ; block1: ; li a0,1 ; fld fa1,0(slot) @@ -371,16 +371,16 @@ function %f2(i32) -> i32, f32, f64 { ; fsd fs10, 0x20(sp) ; fsd fs11, 0x18(sp) ; block1: ; offset 0x70 -; lui a5, 0x3ff -; slli a1, a5, 0x28 -; fmv.d.x fa1, a1 +; lui a1, 0x3ff +; slli a3, a1, 0x28 +; fmv.d.x fa1, a3 ; fsd fa1, 0(sp) -; auipc a1, 0 -; ld a1, 0xc(a1) +; auipc a3, 0 +; ld a3, 0xc(a3) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; jalr a1 +; jalr a3 ; block2: ; offset 0x98 ; addi a0, zero, 1 ; fld fa1, 0(sp) @@ -498,14 +498,14 @@ function %f4(i64, i32) -> i32, f32, f64 { ; fsd fs11,40(sp) ; block0: ; sd a0,8(slot) -; lui a0,1023 -; slli a2,a0,40 -; fmv.d.x fa1,a2 +; lui a2,1023 +; slli a4,a2,40 +; fmv.d.x fa1,a4 ; fsd fa1,16(slot) -; load_ext_name_far a2,%g+0 +; load_ext_name_far a4,%g+0 ; mv a0,a1 ; sd a1,0(slot) -; callind a2; j MachLabel(3); catch [context stack1, tag0: MachLabel(1), tag1: MachLabel(2), context stack0, tag0: MachLabel(4)] +; callind a4; j MachLabel(3); catch [context stack1, tag0: MachLabel(1), tag1: MachLabel(2), context stack0, tag0: MachLabel(4)] ; block1: ; fld fa1,16(slot) ; j label5 @@ -611,18 +611,18 @@ function %f4(i64, i32) -> i32, f32, f64 { ; fsd fs11, 0x28(sp) ; block1: ; offset 0x70 ; sd a0, 8(sp) -; lui a0, 0x3ff -; slli a2, a0, 0x28 -; fmv.d.x fa1, a2 +; lui a2, 0x3ff +; slli a4, a2, 0x28 +; fmv.d.x fa1, a4 ; fsd fa1, 0x10(sp) -; auipc a2, 0 -; ld a2, 0xc(a2) +; auipc a4, 0 +; ld a4, 0xc(a4) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g 0 ; .byte 0x00, 0x00, 0x00, 0x00 ; mv a0, a1 ; sd a1, 0(sp) -; jalr a2 +; jalr a4 ; j 0x14 ; block2: ; offset 0xa8 ; fld fa1, 0x10(sp) @@ -748,8 +748,8 @@ function %f5() -> i64 { ; sd a0,0(slot) ; j label1 ; block1: -; load_ext_name_far a4,%g+0 -; callind a4; j MachLabel(2); catch [default: MachLabel(3)] +; load_ext_name_far a1,%g+0 +; callind a1; j MachLabel(2); catch [default: MachLabel(3)] ; block2: ; ld a0,0(slot) ; ld fp,200(sp) @@ -846,12 +846,12 @@ function %f5() -> i64 { ; addi a0, a0, 0x98 ; sd a0, 0(sp) ; block2: ; offset 0x7c -; auipc a4, 0 -; ld a4, 0xc(a4) +; auipc a1, 0 +; ld a1, 0xc(a1) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %g 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; jalr a4 +; jalr a1 ; block3: ; offset 0x94 ; ld a0, 0(sp) ; ld s0, 0xc8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/extend-i128.clif b/cranelift/filetests/filetests/isa/riscv64/extend-i128.clif index 96ca30750cce..d07601ce7b40 100644 --- a/cranelift/filetests/filetests/isa/riscv64/extend-i128.clif +++ b/cranelift/filetests/filetests/isa/riscv64/extend-i128.clif @@ -26,15 +26,15 @@ block0(v0: i32): ; VCode: ; block0: -; slli a2,a0,32 -; srli a0,a2,32 +; slli a0,a0,32 +; srli a0,a0,32 ; li a1,0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x20 -; srli a0, a2, 0x20 +; slli a0, a0, 0x20 +; srli a0, a0, 0x20 ; mv a1, zero ; ret @@ -46,15 +46,15 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srli a0,a2,48 +; slli a0,a0,48 +; srli a0,a0,48 ; li a1,0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srli a0, a2, 0x30 +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 ; mv a1, zero ; ret @@ -118,15 +118,15 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srai a0,a2,48 +; slli a0,a0,48 +; srai a0,a0,48 ; srai a1,a0,63 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srai a0, a2, 0x30 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 ; srai a1, a0, 0x3f ; ret @@ -138,15 +138,15 @@ block0(v0: i8): ; VCode: ; block0: -; slli a2,a0,56 -; srai a0,a2,56 +; slli a0,a0,56 +; srai a0,a0,56 ; srai a1,a0,63 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x38 -; srai a0, a2, 0x38 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 ; srai a1, a0, 0x3f ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/extend.clif b/cranelift/filetests/filetests/isa/riscv64/extend.clif index 445671f2c8af..1d10d48202db 100644 --- a/cranelift/filetests/filetests/isa/riscv64/extend.clif +++ b/cranelift/filetests/filetests/isa/riscv64/extend.clif @@ -45,14 +45,14 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srli a0,a2,48 +; slli a0,a0,48 +; srli a0,a0,48 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srli a0, a2, 0x30 +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 ; ret function %uextend8_64(i8) -> i64 { @@ -79,14 +79,14 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srli a0,a2,48 +; slli a0,a0,48 +; srli a0,a0,48 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srli a0, a2, 0x30 +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 ; ret function %uextend32_64(i32) -> i64 { @@ -97,14 +97,14 @@ block0(v0: i32): ; VCode: ; block0: -; slli a2,a0,32 -; srli a0,a2,32 +; slli a0,a0,32 +; srli a0,a0,32 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x20 -; srli a0, a2, 0x20 +; slli a0, a0, 0x20 +; srli a0, a0, 0x20 ; ret ;;;; Sextend Rules @@ -117,14 +117,14 @@ block0(v0: i8): ; VCode: ; block0: -; slli a2,a0,56 -; srai a0,a2,56 +; slli a0,a0,56 +; srai a0,a0,56 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x38 -; srai a0, a2, 0x38 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 ; ret function %sextend8_32(i8) -> i32 { @@ -135,14 +135,14 @@ block0(v0: i8): ; VCode: ; block0: -; slli a2,a0,56 -; srai a0,a2,56 +; slli a0,a0,56 +; srai a0,a0,56 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x38 -; srai a0, a2, 0x38 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 ; ret function %sextend16_32(i16) -> i32 { @@ -153,14 +153,14 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srai a0,a2,48 +; slli a0,a0,48 +; srai a0,a0,48 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srai a0, a2, 0x30 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 ; ret function %sextend8_64(i8) -> i64 { @@ -171,14 +171,14 @@ block0(v0: i8): ; VCode: ; block0: -; slli a2,a0,56 -; srai a0,a2,56 +; slli a0,a0,56 +; srai a0,a0,56 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x38 -; srai a0, a2, 0x38 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 ; ret function %sextend16_64(i16) -> i64 { @@ -189,14 +189,14 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srai a0,a2,48 +; slli a0,a0,48 +; srai a0,a0,48 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srai a0, a2, 0x30 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 ; ret function %sextend32_64(i32) -> i64 { @@ -225,16 +225,16 @@ block0(v0: i8): ; VCode: ; block0: -; slli a3,a0,56 -; srai a5,a3,56 -; addi a0,a5,42 +; slli a0,a0,56 +; srai a0,a0,56 +; addi a0,a0,42 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x38 -; srai a5, a3, 0x38 -; addi a0, a5, 0x2a +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 +; addi a0, a0, 0x2a ; ret function %f2(i8, i64) -> i64 { @@ -246,19 +246,18 @@ block0(v0: i8, v1: i64): ; VCode: ; block0: -; slli a4,a0,56 -; srai a0,a4,56 +; slli a0,a0,56 +; srai a0,a0,56 ; add a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a4, a0, 0x38 -; srai a0, a4, 0x38 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 ; add a0, a0, a1 ; ret - function %extend_fcmp(f64, f64) -> i64 { block0(v0: f64, v1: f64): v3 = fcmp.f64 lt v0, v1 diff --git a/cranelift/filetests/filetests/isa/riscv64/f16-bitcast-zfhmin.clif b/cranelift/filetests/filetests/isa/riscv64/f16-bitcast-zfhmin.clif index f56d480d442f..34567e6bdcba 100644 --- a/cranelift/filetests/filetests/isa/riscv64/f16-bitcast-zfhmin.clif +++ b/cranelift/filetests/filetests/isa/riscv64/f16-bitcast-zfhmin.clif @@ -47,8 +47,8 @@ block0(v0: i8x2): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=2, #vtype=(e8, m1, ta, ma) -; vmv.x.s a2,v8 #avl=1, #vtype=(e16, m1, ta, ma) -; fmv.h.x fa0,a2 +; vmv.x.s a0,v8 #avl=1, #vtype=(e16, m1, ta, ma) +; fmv.h.x fa0,a0 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -65,8 +65,8 @@ block0(v0: i8x2): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0xf0, 0x80, 0xcc -; .byte 0x57, 0x26, 0x80, 0x42 -; .byte 0x53, 0x05, 0x06, 0xf4 +; .byte 0x57, 0x25, 0x80, 0x42 +; .byte 0x53, 0x05, 0x05, 0xf4 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -80,15 +80,15 @@ block0(v0: f16): ; VCode: ; block0: -; vfmv.s.f v11,fa0 #avl=1, #vtype=(e32, m1, ta, ma) -; vse8.v v11,0(a0) #avl=2, #vtype=(e8, m1, ta, ma) +; vfmv.s.f v8,fa0 #avl=1, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=2, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0xf0, 0x00, 0xcd -; .byte 0xd7, 0x55, 0x05, 0x42 +; .byte 0x57, 0x54, 0x05, 0x42 ; .byte 0x57, 0x70, 0x01, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/f16-bitcast-zvfh.clif b/cranelift/filetests/filetests/isa/riscv64/f16-bitcast-zvfh.clif index 225fa4f59da4..788bb2060ec9 100644 --- a/cranelift/filetests/filetests/isa/riscv64/f16-bitcast-zvfh.clif +++ b/cranelift/filetests/filetests/isa/riscv64/f16-bitcast-zvfh.clif @@ -78,15 +78,15 @@ block0(v0: f16): ; VCode: ; block0: -; vfmv.s.f v11,fa0 #avl=1, #vtype=(e16, m1, ta, ma) -; vse8.v v11,0(a0) #avl=2, #vtype=(e8, m1, ta, ma) +; vfmv.s.f v8,fa0 #avl=1, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=2, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0xf0, 0x80, 0xcc -; .byte 0xd7, 0x55, 0x05, 0x42 +; .byte 0x57, 0x54, 0x05, 0x42 ; .byte 0x57, 0x70, 0x01, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/f16-memory.clif b/cranelift/filetests/filetests/isa/riscv64/f16-memory.clif index 313dbd624e2a..89472f4b99aa 100644 --- a/cranelift/filetests/filetests/isa/riscv64/f16-memory.clif +++ b/cranelift/filetests/filetests/isa/riscv64/f16-memory.clif @@ -132,13 +132,13 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; flh fa3,0(a0) -; fsh fa3,0(a1) +; flh fa0,0(a0) +; fsh fa0,0(a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x87, 0x16, 0x05, 0x00 ; trap: heap_oob -; .byte 0x27, 0x90, 0xd5, 0x00 ; trap: heap_oob +; .byte 0x07, 0x15, 0x05, 0x00 ; trap: heap_oob +; .byte 0x27, 0x90, 0xa5, 0x00 ; trap: heap_oob ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/f16const-zfhmin.clif b/cranelift/filetests/filetests/isa/riscv64/f16const-zfhmin.clif index 5dbf6db7a5bb..906cb3ad08c2 100644 --- a/cranelift/filetests/filetests/isa/riscv64/f16const-zfhmin.clif +++ b/cranelift/filetests/filetests/isa/riscv64/f16const-zfhmin.clif @@ -26,14 +26,14 @@ block0(): ; VCode: ; block0: ; lui a0,4 -; addi a2,a0,-1024 -; fmv.h.x fa0,a2 +; addi a0,a0,-1024 +; fmv.h.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; lui a0, 4 -; addi a2, a0, -0x400 -; .byte 0x53, 0x05, 0x06, 0xf4 +; addi a0, a0, -0x400 +; .byte 0x53, 0x05, 0x05, 0xf4 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/fcmp.clif b/cranelift/filetests/filetests/isa/riscv64/fcmp.clif index 8e2ee2f91b3c..910d7cb20325 100644 --- a/cranelift/filetests/filetests/isa/riscv64/fcmp.clif +++ b/cranelift/filetests/filetests/isa/riscv64/fcmp.clif @@ -14,9 +14,9 @@ block1: ; VCode: ; block0: -; fmv.d.x fa1,zero -; fle.d a1,fa1,fa1 -; beq a1,zero,taken(label1),not_taken(label2) +; fmv.d.x fa0,zero +; fle.d a0,fa0,fa0 +; beq a0,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -26,8 +26,8 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.d.x fa1, zero -; fle.d a1, fa1, fa1 +; fmv.d.x fa0, zero +; fle.d a0, fa0, fa0 ; block1: ; offset 0x8 ; ret @@ -43,9 +43,9 @@ block1: ; VCode: ; block0: -; fmv.d.x fa1,zero -; fle.d a1,fa1,fa1 -; beq a1,zero,taken(label1),not_taken(label2) +; fmv.d.x fa0,zero +; fle.d a0,fa0,fa0 +; beq a0,zero,taken(label1),not_taken(label2) ; block1: ; j label3 ; block2: @@ -55,8 +55,8 @@ block1: ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.d.x fa1, zero -; fle.d a1, fa1, fa1 +; fmv.d.x fa0, zero +; fle.d a0, fa0, fa0 ; block1: ; offset 0x8 ; ret @@ -68,16 +68,16 @@ block0(v0: f32, v1: f32): ; VCode: ; block0: -; feq.s a3,fa0,fa0 -; feq.s a5,fa1,fa1 -; and a0,a3,a5 +; feq.s a0,fa0,fa0 +; feq.s a1,fa1,fa1 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a3, fa0, fa0 -; feq.s a5, fa1, fa1 -; and a0, a3, a5 +; feq.s a0, fa0, fa0 +; feq.s a1, fa1, fa1 +; and a0, a0, a1 ; ret function %uno(f32, f32) -> i8 { @@ -88,18 +88,18 @@ block0(v0: f32, v1: f32): ; VCode: ; block0: -; feq.s a3,fa0,fa0 -; feq.s a5,fa1,fa1 -; and a1,a3,a5 -; seqz a0,a1 +; feq.s a0,fa0,fa0 +; feq.s a1,fa1,fa1 +; and a0,a0,a1 +; seqz a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a3, fa0, fa0 -; feq.s a5, fa1, fa1 -; and a1, a3, a5 -; seqz a0, a1 +; feq.s a0, fa0, fa0 +; feq.s a1, fa1, fa1 +; and a0, a0, a1 +; seqz a0, a0 ; ret function %eq(f32, f32) -> i8 { @@ -126,14 +126,14 @@ block0(v0: f32, v1: f32): ; VCode: ; block0: -; feq.s a3,fa0,fa1 -; seqz a0,a3 +; feq.s a0,fa0,fa1 +; seqz a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a3, fa0, fa1 -; seqz a0, a3 +; feq.s a0, fa0, fa1 +; seqz a0, a0 ; ret function %one(f32, f32) -> i8 { @@ -144,16 +144,16 @@ block0(v0: f32, v1: f32): ; VCode: ; block0: -; flt.s a3,fa0,fa1 -; flt.s a5,fa1,fa0 -; or a0,a3,a5 +; flt.s a0,fa0,fa1 +; flt.s a1,fa1,fa0 +; or a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; flt.s a3, fa0, fa1 -; flt.s a5, fa1, fa0 -; or a0, a3, a5 +; flt.s a0, fa0, fa1 +; flt.s a1, fa1, fa0 +; or a0, a0, a1 ; ret function %ueq(f32, f32) -> i8 { @@ -164,18 +164,18 @@ block0(v0: f32, v1: f32): ; VCode: ; block0: -; flt.s a3,fa0,fa1 -; flt.s a5,fa1,fa0 -; or a1,a3,a5 -; seqz a0,a1 +; flt.s a0,fa0,fa1 +; flt.s a1,fa1,fa0 +; or a0,a0,a1 +; seqz a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; flt.s a3, fa0, fa1 -; flt.s a5, fa1, fa0 -; or a1, a3, a5 -; seqz a0, a1 +; flt.s a0, fa0, fa1 +; flt.s a1, fa1, fa0 +; or a0, a0, a1 +; seqz a0, a0 ; ret function %lt(f64, f64) -> i8 { @@ -250,14 +250,14 @@ block0(v0: f64, v1: f64): ; VCode: ; block0: -; fle.d a3,fa1,fa0 -; seqz a0,a3 +; fle.d a0,fa1,fa0 +; seqz a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fle.d a3, fa1, fa0 -; seqz a0, a3 +; fle.d a0, fa1, fa0 +; seqz a0, a0 ; ret function %ule(f64, f64) -> i8 { @@ -268,14 +268,14 @@ block0(v0: f64, v1: f64): ; VCode: ; block0: -; flt.d a3,fa1,fa0 -; seqz a0,a3 +; flt.d a0,fa1,fa0 +; seqz a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; flt.d a3, fa1, fa0 -; seqz a0, a3 +; flt.d a0, fa1, fa0 +; seqz a0, a0 ; ret function %ugt(f64, f64) -> i8 { @@ -286,14 +286,14 @@ block0(v0: f64, v1: f64): ; VCode: ; block0: -; fle.d a3,fa0,fa1 -; seqz a0,a3 +; fle.d a0,fa0,fa1 +; seqz a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fle.d a3, fa0, fa1 -; seqz a0, a3 +; fle.d a0, fa0, fa1 +; seqz a0, a0 ; ret function %uge(f64, f64) -> i8 { @@ -304,14 +304,14 @@ block0(v0: f64, v1: f64): ; VCode: ; block0: -; flt.d a3,fa0,fa1 -; seqz a0,a3 +; flt.d a0,fa0,fa1 +; seqz a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; flt.d a3, fa0, fa1 -; seqz a0, a3 +; flt.d a0, fa0, fa1 +; seqz a0, a0 ; ret function %br_ord(f32, f32) -> i8 { @@ -328,10 +328,10 @@ block2: ; VCode: ; block0: -; feq.s a5,fa0,fa0 +; feq.s a0,fa0,fa0 ; feq.s a1,fa1,fa1 -; and a3,a5,a1 -; bne a3,zero,taken(label2),not_taken(label1) +; and a0,a0,a1 +; bne a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -341,10 +341,10 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a5, fa0, fa0 +; feq.s a0, fa0, fa0 ; feq.s a1, fa1, fa1 -; and a3, a5, a1 -; bnez a3, 0xc +; and a0, a0, a1 +; bnez a0, 0xc ; block1: ; offset 0x10 ; mv a0, zero ; ret @@ -366,10 +366,10 @@ block2: ; VCode: ; block0: -; feq.s a5,fa0,fa0 +; feq.s a0,fa0,fa0 ; feq.s a1,fa1,fa1 -; and a3,a5,a1 -; beq a3,zero,taken(label2),not_taken(label1) +; and a0,a0,a1 +; beq a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -379,10 +379,10 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a5, fa0, fa0 +; feq.s a0, fa0, fa0 ; feq.s a1, fa1, fa1 -; and a3, a5, a1 -; beqz a3, 0xc +; and a0, a0, a1 +; beqz a0, 0xc ; block1: ; offset 0x10 ; mv a0, zero ; ret @@ -404,8 +404,8 @@ block2: ; VCode: ; block0: -; feq.s a5,fa0,fa1 -; bne a5,zero,taken(label2),not_taken(label1) +; feq.s a0,fa0,fa1 +; bne a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -415,8 +415,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a5, fa0, fa1 -; bnez a5, 0xc +; feq.s a0, fa0, fa1 +; bnez a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret @@ -438,8 +438,8 @@ block2: ; VCode: ; block0: -; feq.s a5,fa0,fa1 -; beq a5,zero,taken(label2),not_taken(label1) +; feq.s a0,fa0,fa1 +; beq a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -449,8 +449,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a5, fa0, fa1 -; beqz a5, 0xc +; feq.s a0, fa0, fa1 +; beqz a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret @@ -472,10 +472,10 @@ block2: ; VCode: ; block0: -; flt.s a5,fa0,fa1 +; flt.s a0,fa0,fa1 ; flt.s a1,fa1,fa0 -; or a3,a5,a1 -; bne a3,zero,taken(label2),not_taken(label1) +; or a0,a0,a1 +; bne a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -485,10 +485,10 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; flt.s a5, fa0, fa1 +; flt.s a0, fa0, fa1 ; flt.s a1, fa1, fa0 -; or a3, a5, a1 -; bnez a3, 0xc +; or a0, a0, a1 +; bnez a0, 0xc ; block1: ; offset 0x10 ; mv a0, zero ; ret @@ -510,10 +510,10 @@ block2: ; VCode: ; block0: -; flt.s a5,fa0,fa1 +; flt.s a0,fa0,fa1 ; flt.s a1,fa1,fa0 -; or a3,a5,a1 -; beq a3,zero,taken(label2),not_taken(label1) +; or a0,a0,a1 +; beq a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -523,10 +523,10 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; flt.s a5, fa0, fa1 +; flt.s a0, fa0, fa1 ; flt.s a1, fa1, fa0 -; or a3, a5, a1 -; beqz a3, 0xc +; or a0, a0, a1 +; beqz a0, 0xc ; block1: ; offset 0x10 ; mv a0, zero ; ret @@ -548,8 +548,8 @@ block2: ; VCode: ; block0: -; flt.s a5,fa0,fa1 -; bne a5,zero,taken(label2),not_taken(label1) +; flt.s a0,fa0,fa1 +; bne a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -559,8 +559,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; flt.s a5, fa0, fa1 -; bnez a5, 0xc +; flt.s a0, fa0, fa1 +; bnez a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret @@ -582,8 +582,8 @@ block2: ; VCode: ; block0: -; flt.s a5,fa1,fa0 -; bne a5,zero,taken(label2),not_taken(label1) +; flt.s a0,fa1,fa0 +; bne a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -593,8 +593,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; flt.s a5, fa1, fa0 -; bnez a5, 0xc +; flt.s a0, fa1, fa0 +; bnez a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret @@ -616,8 +616,8 @@ block2: ; VCode: ; block0: -; fle.s a5,fa0,fa1 -; bne a5,zero,taken(label2),not_taken(label1) +; fle.s a0,fa0,fa1 +; bne a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -627,8 +627,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; fle.s a5, fa0, fa1 -; bnez a5, 0xc +; fle.s a0, fa0, fa1 +; bnez a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret @@ -650,8 +650,8 @@ block2: ; VCode: ; block0: -; fle.s a5,fa1,fa0 -; bne a5,zero,taken(label2),not_taken(label1) +; fle.s a0,fa1,fa0 +; bne a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -661,8 +661,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; fle.s a5, fa1, fa0 -; bnez a5, 0xc +; fle.s a0, fa1, fa0 +; bnez a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret @@ -684,8 +684,8 @@ block2: ; VCode: ; block0: -; fle.s a5,fa1,fa0 -; beq a5,zero,taken(label2),not_taken(label1) +; fle.s a0,fa1,fa0 +; beq a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -695,8 +695,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; fle.s a5, fa1, fa0 -; beqz a5, 0xc +; fle.s a0, fa1, fa0 +; beqz a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret @@ -718,8 +718,8 @@ block2: ; VCode: ; block0: -; fle.s a5,fa0,fa1 -; beq a5,zero,taken(label2),not_taken(label1) +; fle.s a0,fa0,fa1 +; beq a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -729,8 +729,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; fle.s a5, fa0, fa1 -; beqz a5, 0xc +; fle.s a0, fa0, fa1 +; beqz a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret @@ -752,8 +752,8 @@ block2: ; VCode: ; block0: -; flt.s a5,fa1,fa0 -; beq a5,zero,taken(label2),not_taken(label1) +; flt.s a0,fa1,fa0 +; beq a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -763,8 +763,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; flt.s a5, fa1, fa0 -; beqz a5, 0xc +; flt.s a0, fa1, fa0 +; beqz a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret @@ -786,8 +786,8 @@ block2: ; VCode: ; block0: -; flt.s a5,fa0,fa1 -; beq a5,zero,taken(label2),not_taken(label1) +; flt.s a0,fa0,fa1 +; beq a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,0 ; ret @@ -797,8 +797,8 @@ block2: ; ; Disassembled: ; block0: ; offset 0x0 -; flt.s a5, fa0, fa1 -; beqz a5, 0xc +; flt.s a0, fa0, fa1 +; beqz a0, 0xc ; block1: ; offset 0x8 ; mv a0, zero ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/fcvt-small.clif b/cranelift/filetests/filetests/isa/riscv64/fcvt-small.clif index f9bf123db02b..c2a7954145d4 100644 --- a/cranelift/filetests/filetests/isa/riscv64/fcvt-small.clif +++ b/cranelift/filetests/filetests/isa/riscv64/fcvt-small.clif @@ -10,14 +10,14 @@ block0(v0: i8): ; VCode: ; block0: -; andi a2,a0,255 -; fcvt.s.lu fa0,a2,rne +; andi a0,a0,255 +; fcvt.s.lu fa0,a0,rne ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a2, a0, 0xff -; fcvt.s.lu fa0, a2, rne +; andi a0, a0, 0xff +; fcvt.s.lu fa0, a0, rne ; ret function u0:0(i8) -> f64 { @@ -28,14 +28,14 @@ block0(v0: i8): ; VCode: ; block0: -; andi a2,a0,255 -; fcvt.d.lu fa0,a2,rne +; andi a0,a0,255 +; fcvt.d.lu fa0,a0,rne ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a2, a0, 0xff -; fcvt.d.lu fa0, a2, rne +; andi a0, a0, 0xff +; fcvt.d.lu fa0, a0, rne ; ret function u0:0(i16) -> f32 { @@ -46,16 +46,16 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srli a4,a2,48 -; fcvt.s.lu fa0,a4,rne +; slli a0,a0,48 +; srli a0,a0,48 +; fcvt.s.lu fa0,a0,rne ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srli a4, a2, 0x30 -; fcvt.s.lu fa0, a4, rne +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 +; fcvt.s.lu fa0, a0, rne ; ret function u0:0(i16) -> f64 { @@ -66,16 +66,16 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srli a4,a2,48 -; fcvt.d.lu fa0,a4,rne +; slli a0,a0,48 +; srli a0,a0,48 +; fcvt.d.lu fa0,a0,rne ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srli a4, a2, 0x30 -; fcvt.d.lu fa0, a4, rne +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 +; fcvt.d.lu fa0, a0, rne ; ret function u0:0(f32) -> i8 { @@ -86,33 +86,33 @@ block0(v0: f32): ; VCode: ; block0: -; feq.s a2,fa0,fa0 -; trap_if bad_toint##(a2 eq zero) -; lui a5,-264192 -; fmv.w.x fa1,a5 -; fle.s a3,fa0,fa1 -; trap_if int_ovf##(a3 ne zero) -; lui a0,276480 -; fmv.w.x fa2,a0 -; fle.s a4,fa2,fa0 -; trap_if int_ovf##(a4 ne zero) +; feq.s a0,fa0,fa0 +; trap_if bad_toint##(a0 eq zero) +; lui a0,-264192 +; fmv.w.x fa1,a0 +; fle.s a0,fa0,fa1 +; trap_if int_ovf##(a0 ne zero) +; lui a2,276480 +; fmv.w.x fa4,a2 +; fle.s a0,fa4,fa0 +; trap_if int_ovf##(a0 ne zero) ; fcvt.wu.s a0,fa0,rtz ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a2, fa0, fa0 -; bnez a2, 8 +; feq.s a0, fa0, fa0 +; bnez a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: bad_toint -; lui a5, 0xbf800 -; fmv.w.x fa1, a5 -; fle.s a3, fa0, fa1 -; beqz a3, 8 +; lui a0, 0xbf800 +; fmv.w.x fa1, a0 +; fle.s a0, fa0, fa1 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf -; lui a0, 0x43800 -; fmv.w.x fa2, a0 -; fle.s a4, fa2, fa0 -; beqz a4, 8 +; lui a2, 0x43800 +; fmv.w.x fa4, a2 +; fle.s a0, fa4, fa0 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf ; fcvt.wu.s a0, fa0, rtz ; trap: bad_toint ; ret @@ -125,37 +125,37 @@ block0(v0: f64): ; VCode: ; block0: -; feq.d a2,fa0,fa0 -; trap_if bad_toint##(a2 eq zero) -; lui a5,3071 -; slli a1,a5,40 -; fmv.d.x fa3,a1 -; fle.d a5,fa0,fa3 -; trap_if int_ovf##(a5 ne zero) -; lui a2,1031 -; slli a4,a2,40 -; fmv.d.x fa1,a4 -; fle.d a2,fa1,fa0 -; trap_if int_ovf##(a2 ne zero) +; feq.d a0,fa0,fa0 +; trap_if bad_toint##(a0 eq zero) +; lui a0,3071 +; slli a0,a0,40 +; fmv.d.x fa1,a0 +; fle.d a1,fa0,fa1 +; trap_if int_ovf##(a1 ne zero) +; lui a4,1031 +; slli a0,a4,40 +; fmv.d.x fa1,a0 +; fle.d a0,fa1,fa0 +; trap_if int_ovf##(a0 ne zero) ; fcvt.wu.d a0,fa0,rtz ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.d a2, fa0, fa0 -; bnez a2, 8 +; feq.d a0, fa0, fa0 +; bnez a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: bad_toint -; lui a5, 0xbff -; slli a1, a5, 0x28 -; fmv.d.x fa3, a1 -; fle.d a5, fa0, fa3 -; beqz a5, 8 +; lui a0, 0xbff +; slli a0, a0, 0x28 +; fmv.d.x fa1, a0 +; fle.d a1, fa0, fa1 +; beqz a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf -; lui a2, 0x407 -; slli a4, a2, 0x28 -; fmv.d.x fa1, a4 -; fle.d a2, fa1, fa0 -; beqz a2, 8 +; lui a4, 0x407 +; slli a0, a4, 0x28 +; fmv.d.x fa1, a0 +; fle.d a0, fa1, fa0 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf ; fcvt.wu.d a0, fa0, rtz ; trap: bad_toint ; ret @@ -168,33 +168,33 @@ block0(v0: f32): ; VCode: ; block0: -; feq.s a2,fa0,fa0 -; trap_if bad_toint##(a2 eq zero) -; lui a5,-264192 -; fmv.w.x fa1,a5 -; fle.s a3,fa0,fa1 -; trap_if int_ovf##(a3 ne zero) -; lui a0,292864 -; fmv.w.x fa2,a0 -; fle.s a4,fa2,fa0 -; trap_if int_ovf##(a4 ne zero) +; feq.s a0,fa0,fa0 +; trap_if bad_toint##(a0 eq zero) +; lui a0,-264192 +; fmv.w.x fa1,a0 +; fle.s a0,fa0,fa1 +; trap_if int_ovf##(a0 ne zero) +; lui a2,292864 +; fmv.w.x fa4,a2 +; fle.s a0,fa4,fa0 +; trap_if int_ovf##(a0 ne zero) ; fcvt.wu.s a0,fa0,rtz ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a2, fa0, fa0 -; bnez a2, 8 +; feq.s a0, fa0, fa0 +; bnez a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: bad_toint -; lui a5, 0xbf800 -; fmv.w.x fa1, a5 -; fle.s a3, fa0, fa1 -; beqz a3, 8 +; lui a0, 0xbf800 +; fmv.w.x fa1, a0 +; fle.s a0, fa0, fa1 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf -; lui a0, 0x47800 -; fmv.w.x fa2, a0 -; fle.s a4, fa2, fa0 -; beqz a4, 8 +; lui a2, 0x47800 +; fmv.w.x fa4, a2 +; fle.s a0, fa4, fa0 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf ; fcvt.wu.s a0, fa0, rtz ; trap: bad_toint ; ret @@ -207,37 +207,37 @@ block0(v0: f64): ; VCode: ; block0: -; feq.d a2,fa0,fa0 -; trap_if bad_toint##(a2 eq zero) -; lui a5,3071 -; slli a1,a5,40 -; fmv.d.x fa3,a1 -; fle.d a5,fa0,fa3 -; trap_if int_ovf##(a5 ne zero) -; lui a2,1039 -; slli a4,a2,40 -; fmv.d.x fa1,a4 -; fle.d a2,fa1,fa0 -; trap_if int_ovf##(a2 ne zero) +; feq.d a0,fa0,fa0 +; trap_if bad_toint##(a0 eq zero) +; lui a0,3071 +; slli a0,a0,40 +; fmv.d.x fa1,a0 +; fle.d a1,fa0,fa1 +; trap_if int_ovf##(a1 ne zero) +; lui a4,1039 +; slli a0,a4,40 +; fmv.d.x fa1,a0 +; fle.d a0,fa1,fa0 +; trap_if int_ovf##(a0 ne zero) ; fcvt.wu.d a0,fa0,rtz ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.d a2, fa0, fa0 -; bnez a2, 8 +; feq.d a0, fa0, fa0 +; bnez a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: bad_toint -; lui a5, 0xbff -; slli a1, a5, 0x28 -; fmv.d.x fa3, a1 -; fle.d a5, fa0, fa3 -; beqz a5, 8 +; lui a0, 0xbff +; slli a0, a0, 0x28 +; fmv.d.x fa1, a0 +; fle.d a1, fa0, fa1 +; beqz a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf -; lui a2, 0x40f -; slli a4, a2, 0x28 -; fmv.d.x fa1, a4 -; fle.d a2, fa1, fa0 -; beqz a2, 8 +; lui a4, 0x40f +; slli a0, a4, 0x28 +; fmv.d.x fa1, a0 +; fle.d a0, fa1, fa0 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf ; fcvt.wu.d a0, fa0, rtz ; trap: bad_toint ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/float.clif b/cranelift/filetests/filetests/isa/riscv64/float.clif index 718959709d04..03e5680f3cd9 100644 --- a/cranelift/filetests/filetests/isa/riscv64/float.clif +++ b/cranelift/filetests/filetests/isa/riscv64/float.clif @@ -299,33 +299,33 @@ block0(v0: f32): ; VCode: ; block0: -; feq.s a2,fa0,fa0 -; trap_if bad_toint##(a2 eq zero) -; lui a5,-264192 -; fmv.w.x fa1,a5 -; fle.s a3,fa0,fa1 -; trap_if int_ovf##(a3 ne zero) -; lui a0,325632 -; fmv.w.x fa2,a0 -; fle.s a4,fa2,fa0 -; trap_if int_ovf##(a4 ne zero) +; feq.s a0,fa0,fa0 +; trap_if bad_toint##(a0 eq zero) +; lui a0,-264192 +; fmv.w.x fa1,a0 +; fle.s a0,fa0,fa1 +; trap_if int_ovf##(a0 ne zero) +; lui a2,325632 +; fmv.w.x fa4,a2 +; fle.s a0,fa4,fa0 +; trap_if int_ovf##(a0 ne zero) ; fcvt.wu.s a0,fa0,rtz ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a2, fa0, fa0 -; bnez a2, 8 +; feq.s a0, fa0, fa0 +; bnez a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: bad_toint -; lui a5, 0xbf800 -; fmv.w.x fa1, a5 -; fle.s a3, fa0, fa1 -; beqz a3, 8 +; lui a0, 0xbf800 +; fmv.w.x fa1, a0 +; fle.s a0, fa0, fa1 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf -; lui a0, 0x4f800 -; fmv.w.x fa2, a0 -; fle.s a4, fa2, fa0 -; beqz a4, 8 +; lui a2, 0x4f800 +; fmv.w.x fa4, a2 +; fle.s a0, fa4, fa0 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf ; fcvt.wu.s a0, fa0, rtz ; trap: bad_toint ; ret @@ -338,34 +338,34 @@ block0(v0: f32): ; VCode: ; block0: -; feq.s a2,fa0,fa0 -; trap_if bad_toint##(a2 eq zero) -; lui a5,-200704 -; addi a1,a5,1 -; fmv.w.x fa3,a1 -; fle.s a5,fa0,fa3 -; trap_if int_ovf##(a5 ne zero) -; lui a2,323584 -; fmv.w.x fa4,a2 -; fle.s a0,fa4,fa0 +; feq.s a0,fa0,fa0 +; trap_if bad_toint##(a0 eq zero) +; lui a0,-200704 +; addi a0,a0,1 +; fmv.w.x fa1,a0 +; fle.s a1,fa0,fa1 +; trap_if int_ovf##(a1 ne zero) +; lui a4,323584 +; fmv.w.x fa1,a4 +; fle.s a0,fa1,fa0 ; trap_if int_ovf##(a0 ne zero) ; fcvt.w.s a0,fa0,rtz ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a2, fa0, fa0 -; bnez a2, 8 +; feq.s a0, fa0, fa0 +; bnez a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: bad_toint -; lui a5, 0xcf000 -; addi a1, a5, 1 -; fmv.w.x fa3, a1 -; fle.s a5, fa0, fa3 -; beqz a5, 8 +; lui a0, 0xcf000 +; addi a0, a0, 1 +; fmv.w.x fa1, a0 +; fle.s a1, fa0, fa1 +; beqz a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf -; lui a2, 0x4f000 -; fmv.w.x fa4, a2 -; fle.s a0, fa4, fa0 +; lui a4, 0x4f000 +; fmv.w.x fa1, a4 +; fle.s a0, fa1, fa0 ; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf ; fcvt.w.s a0, fa0, rtz ; trap: bad_toint @@ -379,33 +379,33 @@ block0(v0: f32): ; VCode: ; block0: -; feq.s a2,fa0,fa0 -; trap_if bad_toint##(a2 eq zero) -; lui a5,-264192 -; fmv.w.x fa1,a5 -; fle.s a3,fa0,fa1 -; trap_if int_ovf##(a3 ne zero) -; lui a0,391168 -; fmv.w.x fa2,a0 -; fle.s a4,fa2,fa0 -; trap_if int_ovf##(a4 ne zero) +; feq.s a0,fa0,fa0 +; trap_if bad_toint##(a0 eq zero) +; lui a0,-264192 +; fmv.w.x fa1,a0 +; fle.s a0,fa0,fa1 +; trap_if int_ovf##(a0 ne zero) +; lui a2,391168 +; fmv.w.x fa4,a2 +; fle.s a0,fa4,fa0 +; trap_if int_ovf##(a0 ne zero) ; fcvt.lu.s a0,fa0,rtz ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a2, fa0, fa0 -; bnez a2, 8 +; feq.s a0, fa0, fa0 +; bnez a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: bad_toint -; lui a5, 0xbf800 -; fmv.w.x fa1, a5 -; fle.s a3, fa0, fa1 -; beqz a3, 8 +; lui a0, 0xbf800 +; fmv.w.x fa1, a0 +; fle.s a0, fa0, fa1 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf -; lui a0, 0x5f800 -; fmv.w.x fa2, a0 -; fle.s a4, fa2, fa0 -; beqz a4, 8 +; lui a2, 0x5f800 +; fmv.w.x fa4, a2 +; fle.s a0, fa4, fa0 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf ; fcvt.lu.s a0, fa0, rtz ; trap: bad_toint ; ret @@ -418,34 +418,34 @@ block0(v0: f32): ; VCode: ; block0: -; feq.s a2,fa0,fa0 -; trap_if bad_toint##(a2 eq zero) -; lui a5,-135168 -; addi a1,a5,1 -; fmv.w.x fa3,a1 -; fle.s a5,fa0,fa3 -; trap_if int_ovf##(a5 ne zero) -; lui a2,389120 -; fmv.w.x fa4,a2 -; fle.s a0,fa4,fa0 +; feq.s a0,fa0,fa0 +; trap_if bad_toint##(a0 eq zero) +; lui a0,-135168 +; addi a0,a0,1 +; fmv.w.x fa1,a0 +; fle.s a1,fa0,fa1 +; trap_if int_ovf##(a1 ne zero) +; lui a4,389120 +; fmv.w.x fa1,a4 +; fle.s a0,fa1,fa0 ; trap_if int_ovf##(a0 ne zero) ; fcvt.l.s a0,fa0,rtz ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a2, fa0, fa0 -; bnez a2, 8 +; feq.s a0, fa0, fa0 +; bnez a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: bad_toint -; lui a5, 0xdf000 -; addi a1, a5, 1 -; fmv.w.x fa3, a1 -; fle.s a5, fa0, fa3 -; beqz a5, 8 +; lui a0, 0xdf000 +; addi a0, a0, 1 +; fmv.w.x fa1, a0 +; fle.s a1, fa0, fa1 +; beqz a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf -; lui a2, 0x5f000 -; fmv.w.x fa4, a2 -; fle.s a0, fa4, fa0 +; lui a4, 0x5f000 +; fmv.w.x fa1, a4 +; fle.s a0, fa1, fa0 ; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf ; fcvt.l.s a0, fa0, rtz ; trap: bad_toint @@ -459,37 +459,37 @@ block0(v0: f64): ; VCode: ; block0: -; feq.d a2,fa0,fa0 -; trap_if bad_toint##(a2 eq zero) -; lui a5,3071 -; slli a1,a5,40 -; fmv.d.x fa3,a1 -; fle.d a5,fa0,fa3 -; trap_if int_ovf##(a5 ne zero) -; lui a2,1055 -; slli a4,a2,40 -; fmv.d.x fa1,a4 -; fle.d a2,fa1,fa0 -; trap_if int_ovf##(a2 ne zero) +; feq.d a0,fa0,fa0 +; trap_if bad_toint##(a0 eq zero) +; lui a0,3071 +; slli a0,a0,40 +; fmv.d.x fa1,a0 +; fle.d a1,fa0,fa1 +; trap_if int_ovf##(a1 ne zero) +; lui a4,1055 +; slli a0,a4,40 +; fmv.d.x fa1,a0 +; fle.d a0,fa1,fa0 +; trap_if int_ovf##(a0 ne zero) ; fcvt.wu.d a0,fa0,rtz ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.d a2, fa0, fa0 -; bnez a2, 8 +; feq.d a0, fa0, fa0 +; bnez a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: bad_toint -; lui a5, 0xbff -; slli a1, a5, 0x28 -; fmv.d.x fa3, a1 -; fle.d a5, fa0, fa3 -; beqz a5, 8 +; lui a0, 0xbff +; slli a0, a0, 0x28 +; fmv.d.x fa1, a0 +; fle.d a1, fa0, fa1 +; beqz a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf -; lui a2, 0x41f -; slli a4, a2, 0x28 -; fmv.d.x fa1, a4 -; fle.d a2, fa1, fa0 -; beqz a2, 8 +; lui a4, 0x41f +; slli a0, a4, 0x28 +; fmv.d.x fa1, a0 +; fle.d a0, fa1, fa0 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf ; fcvt.wu.d a0, fa0, rtz ; trap: bad_toint ; ret @@ -502,35 +502,35 @@ block0(v0: f64): ; VCode: ; block0: -; feq.d a2,fa0,fa0 -; trap_if bad_toint##(a2 eq zero) -; ld a5,[const(0)] -; fmv.d.x fa1,a5 -; fle.d a3,fa0,fa1 -; trap_if int_ovf##(a3 ne zero) -; lui a0,527 -; slli a2,a0,41 -; fmv.d.x fa4,a2 -; fle.d a0,fa4,fa0 +; feq.d a0,fa0,fa0 +; trap_if bad_toint##(a0 eq zero) +; ld a0,[const(0)] +; fmv.d.x fa1,a0 +; fle.d a0,fa0,fa1 +; trap_if int_ovf##(a0 ne zero) +; lui a2,527 +; slli a4,a2,41 +; fmv.d.x fa1,a4 +; fle.d a0,fa1,fa0 ; trap_if int_ovf##(a0 ne zero) ; fcvt.w.d a0,fa0,rtz ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.d a2, fa0, fa0 -; bnez a2, 8 +; feq.d a0, fa0, fa0 +; bnez a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: bad_toint -; auipc a5, 0 -; ld a5, 0x3c(a5) -; fmv.d.x fa1, a5 -; fle.d a3, fa0, fa1 -; beqz a3, 8 +; auipc a0, 0 +; ld a0, 0x3c(a0) +; fmv.d.x fa1, a0 +; fle.d a0, fa0, fa1 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf -; lui a0, 0x20f -; slli a2, a0, 0x29 -; fmv.d.x fa4, a2 -; fle.d a0, fa4, fa0 +; lui a2, 0x20f +; slli a4, a2, 0x29 +; fmv.d.x fa1, a4 +; fle.d a0, fa1, fa0 ; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf ; fcvt.w.d a0, fa0, rtz ; trap: bad_toint @@ -547,37 +547,37 @@ block0(v0: f64): ; VCode: ; block0: -; feq.d a2,fa0,fa0 -; trap_if bad_toint##(a2 eq zero) -; lui a5,3071 -; slli a1,a5,40 -; fmv.d.x fa3,a1 -; fle.d a5,fa0,fa3 -; trap_if int_ovf##(a5 ne zero) -; lui a2,1087 -; slli a4,a2,40 -; fmv.d.x fa1,a4 -; fle.d a2,fa1,fa0 -; trap_if int_ovf##(a2 ne zero) +; feq.d a0,fa0,fa0 +; trap_if bad_toint##(a0 eq zero) +; lui a0,3071 +; slli a0,a0,40 +; fmv.d.x fa1,a0 +; fle.d a1,fa0,fa1 +; trap_if int_ovf##(a1 ne zero) +; lui a4,1087 +; slli a0,a4,40 +; fmv.d.x fa1,a0 +; fle.d a0,fa1,fa0 +; trap_if int_ovf##(a0 ne zero) ; fcvt.lu.d a0,fa0,rtz ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.d a2, fa0, fa0 -; bnez a2, 8 +; feq.d a0, fa0, fa0 +; bnez a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: bad_toint -; lui a5, 0xbff -; slli a1, a5, 0x28 -; fmv.d.x fa3, a1 -; fle.d a5, fa0, fa3 -; beqz a5, 8 +; lui a0, 0xbff +; slli a0, a0, 0x28 +; fmv.d.x fa1, a0 +; fle.d a1, fa0, fa1 +; beqz a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf -; lui a2, 0x43f -; slli a4, a2, 0x28 -; fmv.d.x fa1, a4 -; fle.d a2, fa1, fa0 -; beqz a2, 8 +; lui a4, 0x43f +; slli a0, a4, 0x28 +; fmv.d.x fa1, a0 +; fle.d a0, fa1, fa0 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf ; fcvt.lu.d a0, fa0, rtz ; trap: bad_toint ; ret @@ -590,35 +590,35 @@ block0(v0: f64): ; VCode: ; block0: -; feq.d a2,fa0,fa0 -; trap_if bad_toint##(a2 eq zero) -; ld a5,[const(0)] -; fmv.d.x fa1,a5 -; fle.d a3,fa0,fa1 -; trap_if int_ovf##(a3 ne zero) -; lui a0,543 -; slli a2,a0,41 -; fmv.d.x fa4,a2 -; fle.d a0,fa4,fa0 +; feq.d a0,fa0,fa0 +; trap_if bad_toint##(a0 eq zero) +; ld a0,[const(0)] +; fmv.d.x fa1,a0 +; fle.d a0,fa0,fa1 +; trap_if int_ovf##(a0 ne zero) +; lui a2,543 +; slli a4,a2,41 +; fmv.d.x fa1,a4 +; fle.d a0,fa1,fa0 ; trap_if int_ovf##(a0 ne zero) ; fcvt.l.d a0,fa0,rtz ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.d a2, fa0, fa0 -; bnez a2, 8 +; feq.d a0, fa0, fa0 +; bnez a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: bad_toint -; auipc a5, 0 -; ld a5, 0x3c(a5) -; fmv.d.x fa1, a5 -; fle.d a3, fa0, fa1 -; beqz a3, 8 +; auipc a0, 0 +; ld a0, 0x3c(a0) +; fmv.d.x fa1, a0 +; fle.d a0, fa0, fa1 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf -; lui a0, 0x21f -; slli a2, a0, 0x29 -; fmv.d.x fa4, a2 -; fle.d a0, fa4, fa0 +; lui a2, 0x21f +; slli a4, a2, 0x29 +; fmv.d.x fa1, a4 +; fle.d a0, fa1, fa0 ; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: int_ovf ; fcvt.l.d a0, fa0, rtz ; trap: bad_toint @@ -763,18 +763,18 @@ block0(v0: f32): ; VCode: ; block0: -; fcvt.wu.s a2,fa0,rtz -; feq.s a4,fa0,fa0 -; sub a0,zero,a4 -; and a0,a2,a0 +; fcvt.wu.s a0,fa0,rtz +; feq.s a1,fa0,fa0 +; sub a1,zero,a1 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fcvt.wu.s a2, fa0, rtz ; trap: bad_toint -; feq.s a4, fa0, fa0 -; neg a0, a4 -; and a0, a2, a0 +; fcvt.wu.s a0, fa0, rtz ; trap: bad_toint +; feq.s a1, fa0, fa0 +; neg a1, a1 +; and a0, a0, a1 ; ret function %f50(f32) -> i32 { @@ -785,18 +785,18 @@ block0(v0: f32): ; VCode: ; block0: -; fcvt.w.s a2,fa0,rtz -; feq.s a4,fa0,fa0 -; sub a0,zero,a4 -; and a0,a2,a0 +; fcvt.w.s a0,fa0,rtz +; feq.s a1,fa0,fa0 +; sub a1,zero,a1 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fcvt.w.s a2, fa0, rtz ; trap: bad_toint -; feq.s a4, fa0, fa0 -; neg a0, a4 -; and a0, a2, a0 +; fcvt.w.s a0, fa0, rtz ; trap: bad_toint +; feq.s a1, fa0, fa0 +; neg a1, a1 +; and a0, a0, a1 ; ret function %f51(f32) -> i64 { @@ -807,18 +807,18 @@ block0(v0: f32): ; VCode: ; block0: -; fcvt.lu.s a2,fa0,rtz -; feq.s a4,fa0,fa0 -; sub a0,zero,a4 -; and a0,a2,a0 +; fcvt.lu.s a0,fa0,rtz +; feq.s a1,fa0,fa0 +; sub a1,zero,a1 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fcvt.lu.s a2, fa0, rtz ; trap: bad_toint -; feq.s a4, fa0, fa0 -; neg a0, a4 -; and a0, a2, a0 +; fcvt.lu.s a0, fa0, rtz ; trap: bad_toint +; feq.s a1, fa0, fa0 +; neg a1, a1 +; and a0, a0, a1 ; ret function %f52(f32) -> i64 { @@ -829,18 +829,18 @@ block0(v0: f32): ; VCode: ; block0: -; fcvt.l.s a2,fa0,rtz -; feq.s a4,fa0,fa0 -; sub a0,zero,a4 -; and a0,a2,a0 +; fcvt.l.s a0,fa0,rtz +; feq.s a1,fa0,fa0 +; sub a1,zero,a1 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fcvt.l.s a2, fa0, rtz ; trap: bad_toint -; feq.s a4, fa0, fa0 -; neg a0, a4 -; and a0, a2, a0 +; fcvt.l.s a0, fa0, rtz ; trap: bad_toint +; feq.s a1, fa0, fa0 +; neg a1, a1 +; and a0, a0, a1 ; ret function %f53(f64) -> i32 { @@ -851,18 +851,18 @@ block0(v0: f64): ; VCode: ; block0: -; fcvt.wu.d a2,fa0,rtz -; feq.d a4,fa0,fa0 -; sub a0,zero,a4 -; and a0,a2,a0 +; fcvt.wu.d a0,fa0,rtz +; feq.d a1,fa0,fa0 +; sub a1,zero,a1 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fcvt.wu.d a2, fa0, rtz ; trap: bad_toint -; feq.d a4, fa0, fa0 -; neg a0, a4 -; and a0, a2, a0 +; fcvt.wu.d a0, fa0, rtz ; trap: bad_toint +; feq.d a1, fa0, fa0 +; neg a1, a1 +; and a0, a0, a1 ; ret function %f54(f64) -> i32 { @@ -873,18 +873,18 @@ block0(v0: f64): ; VCode: ; block0: -; fcvt.w.d a2,fa0,rtz -; feq.d a4,fa0,fa0 -; sub a0,zero,a4 -; and a0,a2,a0 +; fcvt.w.d a0,fa0,rtz +; feq.d a1,fa0,fa0 +; sub a1,zero,a1 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fcvt.w.d a2, fa0, rtz ; trap: bad_toint -; feq.d a4, fa0, fa0 -; neg a0, a4 -; and a0, a2, a0 +; fcvt.w.d a0, fa0, rtz ; trap: bad_toint +; feq.d a1, fa0, fa0 +; neg a1, a1 +; and a0, a0, a1 ; ret function %f55(f64) -> i64 { @@ -895,18 +895,18 @@ block0(v0: f64): ; VCode: ; block0: -; fcvt.lu.d a2,fa0,rtz -; feq.d a4,fa0,fa0 -; sub a0,zero,a4 -; and a0,a2,a0 +; fcvt.lu.d a0,fa0,rtz +; feq.d a1,fa0,fa0 +; sub a1,zero,a1 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fcvt.lu.d a2, fa0, rtz ; trap: bad_toint -; feq.d a4, fa0, fa0 -; neg a0, a4 -; and a0, a2, a0 +; fcvt.lu.d a0, fa0, rtz ; trap: bad_toint +; feq.d a1, fa0, fa0 +; neg a1, a1 +; and a0, a0, a1 ; ret function %f56(f64) -> i64 { @@ -917,17 +917,17 @@ block0(v0: f64): ; VCode: ; block0: -; fcvt.l.d a2,fa0,rtz -; feq.d a4,fa0,fa0 -; sub a0,zero,a4 -; and a0,a2,a0 +; fcvt.l.d a0,fa0,rtz +; feq.d a1,fa0,fa0 +; sub a1,zero,a1 +; and a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fcvt.l.d a2, fa0, rtz ; trap: bad_toint -; feq.d a4, fa0, fa0 -; neg a0, a4 -; and a0, a2, a0 +; fcvt.l.d a0, fa0, rtz ; trap: bad_toint +; feq.d a1, fa0, fa0 +; neg a1, a1 +; and a0, a0, a1 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/floor.clif b/cranelift/filetests/filetests/isa/riscv64/floor.clif index 87d37139de96..547fdbdc9e30 100644 --- a/cranelift/filetests/filetests/isa/riscv64/floor.clif +++ b/cranelift/filetests/filetests/isa/riscv64/floor.clif @@ -11,31 +11,31 @@ block0(v0: f32): ; VCode: ; block0: -; lui a2,307200 -; fmv.w.x fa4,a2 -; fabs.s fa1,fa0 -; flt.s a2,fa1,fa4 -; fcvt.w.s a4,fa0,rdn -; fcvt.s.w fa1,a4,rdn -; fsgnj.s fa2,fa1,fa0 -; fmv.w.x fa4,zero -; fadd.s fa0,fa0,fa4,rne -; select fa0,fa0,fa2##condition=(a2 eq zero) +; lui a0,307200 +; fmv.w.x fa1,a0 +; fabs.s fa2,fa0 +; flt.s a0,fa2,fa1 +; fcvt.w.s a1,fa0,rdn +; fcvt.s.w fa2,a1,rdn +; fsgnj.s fa4,fa2,fa0 +; fmv.w.x fa1,zero +; fadd.s fa0,fa0,fa1,rne +; select fa0,fa0,fa4##condition=(a0 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a2, 0x4b000 -; fmv.w.x fa4, a2 -; fabs.s fa1, fa0 -; flt.s a2, fa1, fa4 -; fcvt.w.s a4, fa0, rdn ; trap: bad_toint -; fcvt.s.w fa1, a4, rdn -; fsgnj.s fa2, fa1, fa0 -; fmv.w.x fa4, zero -; fadd.s fa0, fa0, fa4, rne -; beqz a2, 8 -; fmv.d fa0, fa2 +; lui a0, 0x4b000 +; fmv.w.x fa1, a0 +; fabs.s fa2, fa0 +; flt.s a0, fa2, fa1 +; fcvt.w.s a1, fa0, rdn ; trap: bad_toint +; fcvt.s.w fa2, a1, rdn +; fsgnj.s fa4, fa2, fa0 +; fmv.w.x fa1, zero +; fadd.s fa0, fa0, fa1, rne +; beqz a0, 8 +; fmv.d fa0, fa4 ; ret function %floor_f64(f64) -> f64 { @@ -46,33 +46,32 @@ block0(v0: f64): ; VCode: ; block0: -; lui a2,1075 -; slli a4,a2,40 -; fmv.d.x fa1,a4 +; lui a0,1075 +; slli a0,a0,40 +; fmv.d.x fa1,a0 ; fabs.d fa2,fa0 -; flt.d a4,fa2,fa1 -; fcvt.l.d a0,fa0,rdn -; fcvt.d.l fa2,a0,rdn -; fsgnj.d fa4,fa2,fa0 -; fmv.d.x fa1,zero -; fadd.d fa2,fa0,fa1,rne -; select fa0,fa2,fa4##condition=(a4 eq zero) +; flt.d a0,fa2,fa1 +; fcvt.l.d a2,fa0,rdn +; fcvt.d.l fa4,a2,rdn +; fsgnj.d fa1,fa4,fa0 +; fmv.d.x fa2,zero +; fadd.d fa0,fa0,fa2,rne +; select fa0,fa0,fa1##condition=(a0 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a2, 0x433 -; slli a4, a2, 0x28 -; fmv.d.x fa1, a4 +; lui a0, 0x433 +; slli a0, a0, 0x28 +; fmv.d.x fa1, a0 ; fabs.d fa2, fa0 -; flt.d a4, fa2, fa1 -; fcvt.l.d a0, fa0, rdn ; trap: bad_toint -; fcvt.d.l fa2, a0, rdn -; fsgnj.d fa4, fa2, fa0 -; fmv.d.x fa1, zero -; fadd.d fa2, fa0, fa1, rne -; fmv.d fa0, fa2 -; beqz a4, 8 -; fmv.d fa0, fa4 +; flt.d a0, fa2, fa1 +; fcvt.l.d a2, fa0, rdn ; trap: bad_toint +; fcvt.d.l fa4, a2, rdn +; fsgnj.d fa1, fa4, fa0 +; fmv.d.x fa2, zero +; fadd.d fa0, fa0, fa2, rne +; beqz a0, 8 +; fmv.d fa0, fa1 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/fmax.clif b/cranelift/filetests/filetests/isa/riscv64/fmax.clif index 456ad8ca0823..9157115b7431 100644 --- a/cranelift/filetests/filetests/isa/riscv64/fmax.clif +++ b/cranelift/filetests/filetests/isa/riscv64/fmax.clif @@ -10,24 +10,24 @@ block0(v0: f32, v1: f32): ; VCode: ; block0: -; feq.s a3,fa0,fa0 -; feq.s a5,fa1,fa1 -; and a1,a3,a5 -; fadd.s fa3,fa0,fa1,rne -; fmax.s fa5,fa0,fa1 -; select fa0,fa5,fa3##condition=(a1 ne zero) +; feq.s a0,fa0,fa0 +; feq.s a1,fa1,fa1 +; and a0,a0,a1 +; fadd.s fa2,fa0,fa1,rne +; fmax.s fa1,fa0,fa1 +; select fa0,fa1,fa2##condition=(a0 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a3, fa0, fa0 -; feq.s a5, fa1, fa1 -; and a1, a3, a5 -; fadd.s fa3, fa0, fa1, rne -; fmax.s fa5, fa0, fa1 -; fmv.d fa0, fa5 -; bnez a1, 8 -; fmv.d fa0, fa3 +; feq.s a0, fa0, fa0 +; feq.s a1, fa1, fa1 +; and a0, a0, a1 +; fadd.s fa2, fa0, fa1, rne +; fmax.s fa1, fa0, fa1 +; fmv.d fa0, fa1 +; bnez a0, 8 +; fmv.d fa0, fa2 ; ret function %fmax_f64(f64, f64) -> f64 { @@ -38,23 +38,23 @@ block0(v0: f64, v1: f64): ; VCode: ; block0: -; feq.d a3,fa0,fa0 -; feq.d a5,fa1,fa1 -; and a1,a3,a5 -; fadd.d fa3,fa0,fa1,rne -; fmax.d fa5,fa0,fa1 -; select fa0,fa5,fa3##condition=(a1 ne zero) +; feq.d a0,fa0,fa0 +; feq.d a1,fa1,fa1 +; and a0,a0,a1 +; fadd.d fa2,fa0,fa1,rne +; fmax.d fa1,fa0,fa1 +; select fa0,fa1,fa2##condition=(a0 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.d a3, fa0, fa0 -; feq.d a5, fa1, fa1 -; and a1, a3, a5 -; fadd.d fa3, fa0, fa1, rne -; fmax.d fa5, fa0, fa1 -; fmv.d fa0, fa5 -; bnez a1, 8 -; fmv.d fa0, fa3 +; feq.d a0, fa0, fa0 +; feq.d a1, fa1, fa1 +; and a0, a0, a1 +; fadd.d fa2, fa0, fa1, rne +; fmax.d fa1, fa0, fa1 +; fmv.d fa0, fa1 +; bnez a0, 8 +; fmv.d fa0, fa2 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/fmin.clif b/cranelift/filetests/filetests/isa/riscv64/fmin.clif index dd208cb24a60..aaaf95204203 100644 --- a/cranelift/filetests/filetests/isa/riscv64/fmin.clif +++ b/cranelift/filetests/filetests/isa/riscv64/fmin.clif @@ -10,24 +10,24 @@ block0(v0: f32, v1: f32): ; VCode: ; block0: -; feq.s a3,fa0,fa0 -; feq.s a5,fa1,fa1 -; and a1,a3,a5 -; fadd.s fa3,fa0,fa1,rne -; fmin.s fa5,fa0,fa1 -; select fa0,fa5,fa3##condition=(a1 ne zero) +; feq.s a0,fa0,fa0 +; feq.s a1,fa1,fa1 +; and a0,a0,a1 +; fadd.s fa2,fa0,fa1,rne +; fmin.s fa1,fa0,fa1 +; select fa0,fa1,fa2##condition=(a0 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.s a3, fa0, fa0 -; feq.s a5, fa1, fa1 -; and a1, a3, a5 -; fadd.s fa3, fa0, fa1, rne -; fmin.s fa5, fa0, fa1 -; fmv.d fa0, fa5 -; bnez a1, 8 -; fmv.d fa0, fa3 +; feq.s a0, fa0, fa0 +; feq.s a1, fa1, fa1 +; and a0, a0, a1 +; fadd.s fa2, fa0, fa1, rne +; fmin.s fa1, fa0, fa1 +; fmv.d fa0, fa1 +; bnez a0, 8 +; fmv.d fa0, fa2 ; ret function %fmin_f64(f64, f64) -> f64 { @@ -38,23 +38,23 @@ block0(v0: f64, v1: f64): ; VCode: ; block0: -; feq.d a3,fa0,fa0 -; feq.d a5,fa1,fa1 -; and a1,a3,a5 -; fadd.d fa3,fa0,fa1,rne -; fmin.d fa5,fa0,fa1 -; select fa0,fa5,fa3##condition=(a1 ne zero) +; feq.d a0,fa0,fa0 +; feq.d a1,fa1,fa1 +; and a0,a0,a1 +; fadd.d fa2,fa0,fa1,rne +; fmin.d fa1,fa0,fa1 +; select fa0,fa1,fa2##condition=(a0 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; feq.d a3, fa0, fa0 -; feq.d a5, fa1, fa1 -; and a1, a3, a5 -; fadd.d fa3, fa0, fa1, rne -; fmin.d fa5, fa0, fa1 -; fmv.d fa0, fa5 -; bnez a1, 8 -; fmv.d fa0, fa3 +; feq.d a0, fa0, fa0 +; feq.d a1, fa1, fa1 +; and a0, a0, a1 +; fadd.d fa2, fa0, fa1, rne +; fmin.d fa1, fa0, fa1 +; fmv.d fa0, fa1 +; bnez a0, 8 +; fmv.d fa0, fa2 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/i128-bmask.clif b/cranelift/filetests/filetests/isa/riscv64/i128-bmask.clif index a258225f5217..424900c17b73 100644 --- a/cranelift/filetests/filetests/isa/riscv64/i128-bmask.clif +++ b/cranelift/filetests/filetests/isa/riscv64/i128-bmask.clif @@ -10,17 +10,17 @@ block0(v0: i128): ; VCode: ; block0: -; or a3,a0,a1 -; sltu a5,zero,a3 -; sub a1,zero,a5 +; or a0,a0,a1 +; sltu a0,zero,a0 +; sub a1,zero,a0 ; mv a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; or a3, a0, a1 -; snez a5, a3 -; neg a1, a5 +; or a0, a0, a1 +; snez a0, a0 +; neg a1, a0 ; mv a0, a1 ; ret @@ -32,16 +32,16 @@ block0(v0: i128): ; VCode: ; block0: -; or a3,a0,a1 -; sltu a5,zero,a3 -; sub a0,zero,a5 +; or a0,a0,a1 +; sltu a0,zero,a0 +; sub a0,zero,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; or a3, a0, a1 -; snez a5, a3 -; neg a0, a5 +; or a0, a0, a1 +; snez a0, a0 +; neg a0, a0 ; ret function %bmask_i128_i32(i128) -> i32 { @@ -52,16 +52,16 @@ block0(v0: i128): ; VCode: ; block0: -; or a3,a0,a1 -; sltu a5,zero,a3 -; sub a0,zero,a5 +; or a0,a0,a1 +; sltu a0,zero,a0 +; sub a0,zero,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; or a3, a0, a1 -; snez a5, a3 -; neg a0, a5 +; or a0, a0, a1 +; snez a0, a0 +; neg a0, a0 ; ret function %bmask_i128_i16(i128) -> i16 { @@ -72,16 +72,16 @@ block0(v0: i128): ; VCode: ; block0: -; or a3,a0,a1 -; sltu a5,zero,a3 -; sub a0,zero,a5 +; or a0,a0,a1 +; sltu a0,zero,a0 +; sub a0,zero,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; or a3, a0, a1 -; snez a5, a3 -; neg a0, a5 +; or a0, a0, a1 +; snez a0, a0 +; neg a0, a0 ; ret function %bmask_i128_i8(i128) -> i8 { @@ -92,16 +92,16 @@ block0(v0: i128): ; VCode: ; block0: -; or a3,a0,a1 -; sltu a5,zero,a3 -; sub a0,zero,a5 +; or a0,a0,a1 +; sltu a0,zero,a0 +; sub a0,zero,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; or a3, a0, a1 -; snez a5, a3 -; neg a0, a5 +; or a0, a0, a1 +; snez a0, a0 +; neg a0, a0 ; ret function %bmask_i64_i128(i64) -> i128 { @@ -112,15 +112,15 @@ block0(v0: i64): ; VCode: ; block0: -; sltu a2,zero,a0 -; sub a1,zero,a2 +; sltu a0,zero,a0 +; sub a1,zero,a0 ; mv a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; snez a2, a0 -; neg a1, a2 +; snez a0, a0 +; neg a1, a0 ; mv a0, a1 ; ret @@ -132,17 +132,17 @@ block0(v0: i32): ; VCode: ; block0: -; sext.w a2,a0 -; sltu a4,zero,a2 -; sub a1,zero,a4 +; sext.w a0,a0 +; sltu a0,zero,a0 +; sub a1,zero,a0 ; mv a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a2, a0 -; snez a4, a2 -; neg a1, a4 +; sext.w a0, a0 +; snez a0, a0 +; neg a1, a0 ; mv a0, a1 ; ret @@ -154,18 +154,18 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srai a4,a2,48 -; sltu a0,zero,a4 +; slli a0,a0,48 +; srai a0,a0,48 +; sltu a0,zero,a0 ; sub a1,zero,a0 ; mv a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srai a4, a2, 0x30 -; snez a0, a4 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 +; snez a0, a0 ; neg a1, a0 ; mv a0, a1 ; ret @@ -178,18 +178,18 @@ block0(v0: i8): ; VCode: ; block0: -; slli a2,a0,56 -; srai a4,a2,56 -; sltu a0,zero,a4 +; slli a0,a0,56 +; srai a0,a0,56 +; sltu a0,zero,a0 ; sub a1,zero,a0 ; mv a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x38 -; srai a4, a2, 0x38 -; snez a0, a4 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 +; snez a0, a0 ; neg a1, a0 ; mv a0, a1 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/i128.clif b/cranelift/filetests/filetests/isa/riscv64/i128.clif index 22b924792869..777a8183fac5 100644 --- a/cranelift/filetests/filetests/isa/riscv64/i128.clif +++ b/cranelift/filetests/filetests/isa/riscv64/i128.clif @@ -12,16 +12,16 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; mul a3,a0,a1 +; mul a2,a0,a1 ; mulhu a1,a0,a1 -; mv a0,a3 +; mv a0,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mul a3, a0, a1 +; mul a2, a0, a1 ; mulhu a1, a0, a1 -; mv a0, a3 +; mv a0, a2 ; ret function %mul_sextend_i64(i64, i64) -> i128 { @@ -34,16 +34,16 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; mul a3,a0,a1 +; mul a2,a0,a1 ; mulh a1,a0,a1 -; mv a0,a3 +; mv a0,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mul a3, a0, a1 +; mul a2, a0, a1 ; mulh a1, a0, a1 -; mv a0, a3 +; mv a0, a2 ; ret function %smul_high_i64_pattern(i64, i64) -> i64 { diff --git a/cranelift/filetests/filetests/isa/riscv64/iabs-zbb.clif b/cranelift/filetests/filetests/isa/riscv64/iabs-zbb.clif index 63e4cd795732..bc1c87f09051 100644 --- a/cranelift/filetests/filetests/isa/riscv64/iabs-zbb.clif +++ b/cranelift/filetests/filetests/isa/riscv64/iabs-zbb.clif @@ -9,16 +9,16 @@ block0(v0: i8): ; VCode: ; block0: -; sext.b a2,a0 -; sub a4,zero,a2 -; max a0,a2,a4 +; sext.b a0,a0 +; sub a1,zero,a0 +; max a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x13, 0x16, 0x45, 0x60 -; neg a4, a2 -; .byte 0x33, 0x65, 0xe6, 0x0a +; .byte 0x13, 0x15, 0x45, 0x60 +; neg a1, a0 +; .byte 0x33, 0x65, 0xb5, 0x0a ; ret function %iabs_i16(i16) -> i16 { @@ -29,16 +29,16 @@ block0(v0: i16): ; VCode: ; block0: -; sext.h a2,a0 -; sub a4,zero,a2 -; max a0,a2,a4 +; sext.h a0,a0 +; sub a1,zero,a0 +; max a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x13, 0x16, 0x55, 0x60 -; neg a4, a2 -; .byte 0x33, 0x65, 0xe6, 0x0a +; .byte 0x13, 0x15, 0x55, 0x60 +; neg a1, a0 +; .byte 0x33, 0x65, 0xb5, 0x0a ; ret function %iabs_i32(i32) -> i32 { @@ -49,16 +49,16 @@ block0(v0: i32): ; VCode: ; block0: -; sext.w a2,a0 -; sub a4,zero,a2 -; max a0,a2,a4 +; sext.w a0,a0 +; sub a1,zero,a0 +; max a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a2, a0 -; neg a4, a2 -; .byte 0x33, 0x65, 0xe6, 0x0a +; sext.w a0, a0 +; neg a1, a0 +; .byte 0x33, 0x65, 0xb5, 0x0a ; ret function %iabs_i64(i64) -> i64 { @@ -69,13 +69,13 @@ block0(v0: i64): ; VCode: ; block0: -; sub a2,zero,a0 -; max a0,a0,a2 +; sub a1,zero,a0 +; max a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; neg a2, a0 -; .byte 0x33, 0x65, 0xc5, 0x0a +; neg a1, a0 +; .byte 0x33, 0x65, 0xb5, 0x0a ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/iabs.clif b/cranelift/filetests/filetests/isa/riscv64/iabs.clif index 8c8290a8966e..34f5d7e9f2e2 100644 --- a/cranelift/filetests/filetests/isa/riscv64/iabs.clif +++ b/cranelift/filetests/filetests/isa/riscv64/iabs.clif @@ -9,19 +9,19 @@ block0(v0: i8): ; VCode: ; block0: -; slli a2,a0,56 -; srai a4,a2,56 -; sub a1,zero,a4 -; select a0,a4,a1##condition=(a4 sgt a1) +; slli a0,a0,56 +; srai a2,a0,56 +; sub a1,zero,a2 +; select a0,a2,a1##condition=(a2 sgt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x38 -; srai a4, a2, 0x38 -; neg a1, a4 -; mv a0, a4 -; blt a1, a4, 8 +; slli a0, a0, 0x38 +; srai a2, a0, 0x38 +; neg a1, a2 +; mv a0, a2 +; blt a1, a2, 8 ; mv a0, a1 ; ret @@ -33,19 +33,19 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srai a4,a2,48 -; sub a1,zero,a4 -; select a0,a4,a1##condition=(a4 sgt a1) +; slli a0,a0,48 +; srai a2,a0,48 +; sub a1,zero,a2 +; select a0,a2,a1##condition=(a2 sgt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srai a4, a2, 0x30 -; neg a1, a4 -; mv a0, a4 -; blt a1, a4, 8 +; slli a0, a0, 0x30 +; srai a2, a0, 0x30 +; neg a1, a2 +; mv a0, a2 +; blt a1, a2, 8 ; mv a0, a1 ; ret @@ -58,17 +58,17 @@ block0(v0: i32): ; VCode: ; block0: ; sext.w a2,a0 -; sub a4,zero,a2 -; select a0,a2,a4##condition=(a2 sgt a4) +; sub a1,zero,a2 +; select a0,a2,a1##condition=(a2 sgt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; sext.w a2, a0 -; neg a4, a2 +; neg a1, a2 ; mv a0, a2 -; blt a4, a2, 8 -; mv a0, a4 +; blt a1, a2, 8 +; mv a0, a1 ; ret function %iabs_i64(i64) -> i64 { @@ -79,17 +79,17 @@ block0(v0: i64): ; VCode: ; block0: -; sub a2,zero,a0 -; mv a5,a0 -; select a0,a5,a2##condition=(a5 sgt a2) +; sub a1,zero,a0 +; mv a2,a0 +; select a0,a2,a1##condition=(a2 sgt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; neg a2, a0 -; mv a5, a0 -; mv a0, a5 -; blt a2, a5, 8 +; neg a1, a0 +; mv a2, a0 ; mv a0, a2 +; blt a1, a2, 8 +; mv a0, a1 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/iconst-icmp-small.clif b/cranelift/filetests/filetests/isa/riscv64/iconst-icmp-small.clif index 49ef59eb2640..9c1601714ac0 100644 --- a/cranelift/filetests/filetests/isa/riscv64/iconst-icmp-small.clif +++ b/cranelift/filetests/filetests/isa/riscv64/iconst-icmp-small.clif @@ -13,25 +13,25 @@ block0: ; VCode: ; block0: ; lui a0,-2 -; addi a2,a0,-564 -; slli a3,a2,48 -; srai a4,a3,48 -; slli a0,a2,48 -; srai a2,a0,48 -; xor a4,a4,a2 -; sltu a0,zero,a4 +; addi a0,a0,-564 +; slli a1,a0,48 +; srai a1,a1,48 +; slli a0,a0,48 +; srai a0,a0,48 +; xor a0,a1,a0 +; sltu a0,zero,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; lui a0, 0xffffe -; addi a2, a0, -0x234 -; slli a3, a2, 0x30 -; srai a4, a3, 0x30 -; slli a0, a2, 0x30 -; srai a2, a0, 0x30 -; xor a4, a4, a2 -; snez a0, a4 +; addi a0, a0, -0x234 +; slli a1, a0, 0x30 +; srai a1, a1, 0x30 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 +; xor a0, a1, a0 +; snez a0, a0 ; ret function %seq_const0(i64) -> i8 system_v { @@ -138,14 +138,14 @@ block0(v0: i64): ; VCode: ; block0: -; xori a2,a0,1 -; seqz a0,a2 +; xori a0,a0,1 +; seqz a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; xori a2, a0, 1 -; seqz a0, a2 +; xori a0, a0, 1 +; seqz a0, a0 ; ret function %sne_const1(i64) -> i8 system_v { @@ -156,14 +156,14 @@ block0(v0: i64): ; VCode: ; block0: -; xori a2,a0,1 -; sltu a0,zero,a2 +; xori a0,a0,1 +; sltu a0,zero,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; xori a2, a0, 1 -; snez a0, a2 +; xori a0, a0, 1 +; snez a0, a0 ; ret function %slt_const1(i64) -> i8 system_v { @@ -190,14 +190,14 @@ block0(v0: i64): ; VCode: ; block0: -; li a3,1 -; slt a0,a3,a0 +; li a1,1 +; slt a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 1 -; slt a0, a3, a0 +; addi a1, zero, 1 +; slt a0, a1, a0 ; ret function %ult_const1(i64) -> i8 system_v { @@ -224,14 +224,14 @@ block0(v0: i64): ; VCode: ; block0: -; li a3,1 -; sltu a0,a3,a0 +; li a1,1 +; sltu a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 1 -; sltu a0, a3, a0 +; addi a1, zero, 1 +; sltu a0, a1, a0 ; ret function %seq_const2(i64) -> i8 system_v { @@ -242,14 +242,14 @@ block0(v0: i64): ; VCode: ; block0: -; xori a2,a0,2 -; seqz a0,a2 +; xori a0,a0,2 +; seqz a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; xori a2, a0, 2 -; seqz a0, a2 +; xori a0, a0, 2 +; seqz a0, a0 ; ret function %sne_const2(i64) -> i8 system_v { @@ -260,14 +260,14 @@ block0(v0: i64): ; VCode: ; block0: -; xori a2,a0,2 -; sltu a0,zero,a2 +; xori a0,a0,2 +; sltu a0,zero,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; xori a2, a0, 2 -; snez a0, a2 +; xori a0, a0, 2 +; snez a0, a0 ; ret function %slt_const2(i64) -> i8 system_v { @@ -294,14 +294,14 @@ block0(v0: i64): ; VCode: ; block0: -; li a3,2 -; slt a0,a3,a0 +; li a1,2 +; slt a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 2 -; slt a0, a3, a0 +; addi a1, zero, 2 +; slt a0, a1, a0 ; ret function %ult_const2(i64) -> i8 system_v { @@ -328,14 +328,14 @@ block0(v0: i64): ; VCode: ; block0: -; li a3,2 -; sltu a0,a3,a0 +; li a1,2 +; sltu a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 2 -; sltu a0, a3, a0 +; addi a1, zero, 2 +; sltu a0, a1, a0 ; ret function %sle_const2(i64) -> i8 system_v { @@ -346,16 +346,16 @@ block0(v0: i64): ; VCode: ; block0: -; li a4,2 -; slt a3,a4,a0 -; xori a0,a3,1 +; li a1,2 +; slt a0,a1,a0 +; xori a0,a0,1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 2 -; slt a3, a4, a0 -; xori a0, a3, 1 +; addi a1, zero, 2 +; slt a0, a1, a0 +; xori a0, a0, 1 ; ret function %sle_const_2046(i64) -> i8 system_v { @@ -366,16 +366,16 @@ block0(v0: i64): ; VCode: ; block0: -; li a4,2046 -; slt a3,a4,a0 -; xori a0,a3,1 +; li a1,2046 +; slt a0,a1,a0 +; xori a0,a0,1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 0x7fe -; slt a3, a4, a0 -; xori a0, a3, 1 +; addi a1, zero, 0x7fe +; slt a0, a1, a0 +; xori a0, a0, 1 ; ret function %sle_const_2047(i64) -> i8 system_v { @@ -386,16 +386,16 @@ block0(v0: i64): ; VCode: ; block0: -; li a4,2047 -; slt a3,a4,a0 -; xori a0,a3,1 +; li a1,2047 +; slt a0,a1,a0 +; xori a0,a0,1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 0x7ff -; slt a3, a4, a0 -; xori a0, a3, 1 +; addi a1, zero, 0x7ff +; slt a0, a1, a0 +; xori a0, a0, 1 ; ret function %sge_const2(i64) -> i8 system_v { @@ -406,14 +406,14 @@ block0(v0: i64): ; VCode: ; block0: -; slti a2,a0,2 -; xori a0,a2,1 +; slti a0,a0,2 +; xori a0,a0,1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slti a2, a0, 2 -; xori a0, a2, 1 +; slti a0, a0, 2 +; xori a0, a0, 1 ; ret function %ule_const2(i64) -> i8 system_v { @@ -424,16 +424,16 @@ block0(v0: i64): ; VCode: ; block0: -; li a4,2 -; sltu a3,a4,a0 -; xori a0,a3,1 +; li a1,2 +; sltu a0,a1,a0 +; xori a0,a0,1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 2 -; sltu a3, a4, a0 -; xori a0, a3, 1 +; addi a1, zero, 2 +; sltu a0, a1, a0 +; xori a0, a0, 1 ; ret function %uge_const2(i64) -> i8 system_v { @@ -444,13 +444,13 @@ block0(v0: i64): ; VCode: ; block0: -; sltiu a2,a0,2 -; xori a0,a2,1 +; sltiu a0,a0,2 +; xori a0,a0,1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sltiu a2, a0, 2 -; xori a0, a2, 1 +; sltiu a0, a0, 2 +; xori a0, a0, 1 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/ishl-const.clif b/cranelift/filetests/filetests/isa/riscv64/ishl-const.clif index d50f69938565..41c1c2392a52 100644 --- a/cranelift/filetests/filetests/isa/riscv64/ishl-const.clif +++ b/cranelift/filetests/filetests/isa/riscv64/ishl-const.clif @@ -355,41 +355,63 @@ block0(v0: i128): } ; VCode: +; addi sp,sp,-16 +; sd ra,8(sp) +; sd fp,0(sp) +; mv fp,sp +; addi sp,sp,-16 +; sd s4,8(sp) ; block0: -; li a3,5 -; andi a5,a3,63 -; li a2,64 -; sub a2,a2,a5 -; sll a4,a0,a5 -; srl a0,a0,a2 -; select a2,zero,a0##condition=(a5 eq zero) -; sll a5,a1,a5 -; or t0,a2,a5 -; li a2,64 -; andi a5,a3,127 -; select [a0,a1],[zero,a4],[a4,t0]##condition=(a5 uge a2) +; li a5,5 +; andi a2,a5,63 +; li a3,64 +; sub a4,a3,a2 +; sll a3,a0,a2 +; srl a4,a0,a4 +; select a4,zero,a4##condition=(a2 eq zero) +; sll a0,a1,a2 +; or a4,a4,a0 +; li s4,64 +; andi a2,a5,127 +; select [a0,a1],[zero,a3],[a3,a4]##condition=(a2 uge s4) +; ld s4,8(sp) +; addi sp,sp,16 +; ld ra,8(sp) +; ld fp,0(sp) +; addi sp,sp,16 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 5 -; andi a5, a3, 0x3f -; addi a2, zero, 0x40 -; sub a2, a2, a5 -; sll a4, a0, a5 -; srl a0, a0, a2 -; mv a2, zero -; beqz a5, 8 -; mv a2, a0 -; sll a5, a1, a5 -; or t0, a2, a5 -; addi a2, zero, 0x40 -; andi a5, a3, 0x7f +; addi sp, sp, -0x10 +; sd ra, 8(sp) +; sd s0, 0(sp) +; mv s0, sp +; addi sp, sp, -0x10 +; sd s4, 8(sp) +; block1: ; offset 0x18 +; addi a5, zero, 5 +; andi a2, a5, 0x3f +; addi a3, zero, 0x40 +; sub a4, a3, a2 +; sll a3, a0, a2 +; srl a4, a0, a4 +; bnez a2, 8 +; mv a4, zero +; sll a0, a1, a2 +; or a4, a4, a0 +; addi s4, zero, 0x40 +; andi a2, a5, 0x7f ; mv a0, zero +; mv a1, a3 +; bgeu a2, s4, 0xc +; mv a0, a3 ; mv a1, a4 -; bgeu a5, a2, 0xc -; mv a0, a4 -; mv a1, t0 +; ld s4, 8(sp) +; addi sp, sp, 0x10 +; ld ra, 8(sp) +; ld s0, 0(sp) +; addi sp, sp, 0x10 ; ret function %ishl_i128_const_i16(i128) -> i128 { @@ -400,41 +422,63 @@ block0(v0: i128): } ; VCode: +; addi sp,sp,-16 +; sd ra,8(sp) +; sd fp,0(sp) +; mv fp,sp +; addi sp,sp,-16 +; sd s4,8(sp) ; block0: -; li a3,5 -; andi a5,a3,63 -; li a2,64 -; sub a2,a2,a5 -; sll a4,a0,a5 -; srl a0,a0,a2 -; select a2,zero,a0##condition=(a5 eq zero) -; sll a5,a1,a5 -; or t0,a2,a5 -; li a2,64 -; andi a5,a3,127 -; select [a0,a1],[zero,a4],[a4,t0]##condition=(a5 uge a2) +; li a5,5 +; andi a2,a5,63 +; li a3,64 +; sub a4,a3,a2 +; sll a3,a0,a2 +; srl a4,a0,a4 +; select a4,zero,a4##condition=(a2 eq zero) +; sll a0,a1,a2 +; or a4,a4,a0 +; li s4,64 +; andi a2,a5,127 +; select [a0,a1],[zero,a3],[a3,a4]##condition=(a2 uge s4) +; ld s4,8(sp) +; addi sp,sp,16 +; ld ra,8(sp) +; ld fp,0(sp) +; addi sp,sp,16 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 5 -; andi a5, a3, 0x3f -; addi a2, zero, 0x40 -; sub a2, a2, a5 -; sll a4, a0, a5 -; srl a0, a0, a2 -; mv a2, zero -; beqz a5, 8 -; mv a2, a0 -; sll a5, a1, a5 -; or t0, a2, a5 -; addi a2, zero, 0x40 -; andi a5, a3, 0x7f +; addi sp, sp, -0x10 +; sd ra, 8(sp) +; sd s0, 0(sp) +; mv s0, sp +; addi sp, sp, -0x10 +; sd s4, 8(sp) +; block1: ; offset 0x18 +; addi a5, zero, 5 +; andi a2, a5, 0x3f +; addi a3, zero, 0x40 +; sub a4, a3, a2 +; sll a3, a0, a2 +; srl a4, a0, a4 +; bnez a2, 8 +; mv a4, zero +; sll a0, a1, a2 +; or a4, a4, a0 +; addi s4, zero, 0x40 +; andi a2, a5, 0x7f ; mv a0, zero +; mv a1, a3 +; bgeu a2, s4, 0xc +; mv a0, a3 ; mv a1, a4 -; bgeu a5, a2, 0xc -; mv a0, a4 -; mv a1, t0 +; ld s4, 8(sp) +; addi sp, sp, 0x10 +; ld ra, 8(sp) +; ld s0, 0(sp) +; addi sp, sp, 0x10 ; ret function %ishl_i128_const_i32(i128) -> i128 { @@ -445,41 +489,63 @@ block0(v0: i128): } ; VCode: +; addi sp,sp,-16 +; sd ra,8(sp) +; sd fp,0(sp) +; mv fp,sp +; addi sp,sp,-16 +; sd s4,8(sp) ; block0: -; li a3,5 -; andi a5,a3,63 -; li a2,64 -; sub a2,a2,a5 -; sll a4,a0,a5 -; srl a0,a0,a2 -; select a2,zero,a0##condition=(a5 eq zero) -; sll a5,a1,a5 -; or t0,a2,a5 -; li a2,64 -; andi a5,a3,127 -; select [a0,a1],[zero,a4],[a4,t0]##condition=(a5 uge a2) +; li a5,5 +; andi a2,a5,63 +; li a3,64 +; sub a4,a3,a2 +; sll a3,a0,a2 +; srl a4,a0,a4 +; select a4,zero,a4##condition=(a2 eq zero) +; sll a0,a1,a2 +; or a4,a4,a0 +; li s4,64 +; andi a2,a5,127 +; select [a0,a1],[zero,a3],[a3,a4]##condition=(a2 uge s4) +; ld s4,8(sp) +; addi sp,sp,16 +; ld ra,8(sp) +; ld fp,0(sp) +; addi sp,sp,16 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 5 -; andi a5, a3, 0x3f -; addi a2, zero, 0x40 -; sub a2, a2, a5 -; sll a4, a0, a5 -; srl a0, a0, a2 -; mv a2, zero -; beqz a5, 8 -; mv a2, a0 -; sll a5, a1, a5 -; or t0, a2, a5 -; addi a2, zero, 0x40 -; andi a5, a3, 0x7f +; addi sp, sp, -0x10 +; sd ra, 8(sp) +; sd s0, 0(sp) +; mv s0, sp +; addi sp, sp, -0x10 +; sd s4, 8(sp) +; block1: ; offset 0x18 +; addi a5, zero, 5 +; andi a2, a5, 0x3f +; addi a3, zero, 0x40 +; sub a4, a3, a2 +; sll a3, a0, a2 +; srl a4, a0, a4 +; bnez a2, 8 +; mv a4, zero +; sll a0, a1, a2 +; or a4, a4, a0 +; addi s4, zero, 0x40 +; andi a2, a5, 0x7f ; mv a0, zero +; mv a1, a3 +; bgeu a2, s4, 0xc +; mv a0, a3 ; mv a1, a4 -; bgeu a5, a2, 0xc -; mv a0, a4 -; mv a1, t0 +; ld s4, 8(sp) +; addi sp, sp, 0x10 +; ld ra, 8(sp) +; ld s0, 0(sp) +; addi sp, sp, 0x10 ; ret function %ishl_i128_const_i64(i128) -> i128 { @@ -490,41 +556,63 @@ block0(v0: i128): } ; VCode: +; addi sp,sp,-16 +; sd ra,8(sp) +; sd fp,0(sp) +; mv fp,sp +; addi sp,sp,-16 +; sd s4,8(sp) ; block0: -; li a3,5 -; andi a5,a3,63 -; li a2,64 -; sub a2,a2,a5 -; sll a4,a0,a5 -; srl a0,a0,a2 -; select a2,zero,a0##condition=(a5 eq zero) -; sll a5,a1,a5 -; or t0,a2,a5 -; li a2,64 -; andi a5,a3,127 -; select [a0,a1],[zero,a4],[a4,t0]##condition=(a5 uge a2) +; li a5,5 +; andi a2,a5,63 +; li a3,64 +; sub a4,a3,a2 +; sll a3,a0,a2 +; srl a4,a0,a4 +; select a4,zero,a4##condition=(a2 eq zero) +; sll a0,a1,a2 +; or a4,a4,a0 +; li s4,64 +; andi a2,a5,127 +; select [a0,a1],[zero,a3],[a3,a4]##condition=(a2 uge s4) +; ld s4,8(sp) +; addi sp,sp,16 +; ld ra,8(sp) +; ld fp,0(sp) +; addi sp,sp,16 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 5 -; andi a5, a3, 0x3f -; addi a2, zero, 0x40 -; sub a2, a2, a5 -; sll a4, a0, a5 -; srl a0, a0, a2 -; mv a2, zero -; beqz a5, 8 -; mv a2, a0 -; sll a5, a1, a5 -; or t0, a2, a5 -; addi a2, zero, 0x40 -; andi a5, a3, 0x7f +; addi sp, sp, -0x10 +; sd ra, 8(sp) +; sd s0, 0(sp) +; mv s0, sp +; addi sp, sp, -0x10 +; sd s4, 8(sp) +; block1: ; offset 0x18 +; addi a5, zero, 5 +; andi a2, a5, 0x3f +; addi a3, zero, 0x40 +; sub a4, a3, a2 +; sll a3, a0, a2 +; srl a4, a0, a4 +; bnez a2, 8 +; mv a4, zero +; sll a0, a1, a2 +; or a4, a4, a0 +; addi s4, zero, 0x40 +; andi a2, a5, 0x7f ; mv a0, zero +; mv a1, a3 +; bgeu a2, s4, 0xc +; mv a0, a3 ; mv a1, a4 -; bgeu a5, a2, 0xc -; mv a0, a4 -; mv a1, t0 +; ld s4, 8(sp) +; addi sp, sp, 0x10 +; ld ra, 8(sp) +; ld s0, 0(sp) +; addi sp, sp, 0x10 ; ret function %ishl_i128_const_i128(i128) -> i128 { @@ -537,40 +625,40 @@ block0(v0: i128): ; VCode: ; block0: -; li a4,5 -; li a5,0 -; andi a2,a4,63 -; li a3,64 -; sub a3,a3,a2 -; sll a5,a0,a2 -; srl a3,a0,a3 -; select a3,zero,a3##condition=(a2 eq zero) -; sll a0,a1,a2 -; or a2,a3,a0 -; li a3,64 -; andi a4,a4,127 -; select [a0,a1],[zero,a5],[a5,a2]##condition=(a4 uge a3) +; li a3,5 +; li a2,0 +; andi a4,a3,63 +; li a2,64 +; sub a5,a2,a4 +; sll a2,a0,a4 +; srl a5,a0,a5 +; select a5,zero,a5##condition=(a4 eq zero) +; sll a0,a1,a4 +; or a4,a5,a0 +; li a5,64 +; andi a3,a3,127 +; select [a0,a1],[zero,a2],[a2,a4]##condition=(a3 uge a5) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 5 +; addi a3, zero, 5 +; mv a2, zero +; andi a4, a3, 0x3f +; addi a2, zero, 0x40 +; sub a5, a2, a4 +; sll a2, a0, a4 +; srl a5, a0, a5 +; bnez a4, 8 ; mv a5, zero -; andi a2, a4, 0x3f -; addi a3, zero, 0x40 -; sub a3, a3, a2 -; sll a5, a0, a2 -; srl a3, a0, a3 -; bnez a2, 8 -; mv a3, zero -; sll a0, a1, a2 -; or a2, a3, a0 -; addi a3, zero, 0x40 -; andi a4, a4, 0x7f +; sll a0, a1, a4 +; or a4, a5, a0 +; addi a5, zero, 0x40 +; andi a3, a3, 0x7f ; mv a0, zero -; mv a1, a5 -; bgeu a4, a3, 0xc -; mv a0, a5 ; mv a1, a2 +; bgeu a3, a5, 0xc +; mv a0, a2 +; mv a1, a4 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/ishl.clif b/cranelift/filetests/filetests/isa/riscv64/ishl.clif index bff7536722b8..22bc6f7c8ed5 100644 --- a/cranelift/filetests/filetests/isa/riscv64/ishl.clif +++ b/cranelift/filetests/filetests/isa/riscv64/ishl.clif @@ -11,14 +11,14 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; andi a3,a1,7 -; sllw a0,a0,a3 +; andi a1,a1,7 +; sllw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a1, 7 -; sllw a0, a0, a3 +; andi a1, a1, 7 +; sllw a0, a0, a1 ; ret function %ishl_i8_i16(i8, i16) -> i8 { @@ -29,14 +29,14 @@ block0(v0: i8, v1: i16): ; VCode: ; block0: -; andi a3,a1,7 -; sllw a0,a0,a3 +; andi a1,a1,7 +; sllw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a1, 7 -; sllw a0, a0, a3 +; andi a1, a1, 7 +; sllw a0, a0, a1 ; ret function %ishl_i8_i32(i8, i32) -> i8 { @@ -47,14 +47,14 @@ block0(v0: i8, v1: i32): ; VCode: ; block0: -; andi a3,a1,7 -; sllw a0,a0,a3 +; andi a1,a1,7 +; sllw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a1, 7 -; sllw a0, a0, a3 +; andi a1, a1, 7 +; sllw a0, a0, a1 ; ret function %ishl_i8_i64(i8, i64) -> i8 { @@ -65,14 +65,14 @@ block0(v0: i8, v1: i64): ; VCode: ; block0: -; andi a3,a1,7 -; sllw a0,a0,a3 +; andi a1,a1,7 +; sllw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a1, 7 -; sllw a0, a0, a3 +; andi a1, a1, 7 +; sllw a0, a0, a1 ; ret function %ishl_i8_i128(i8, i128) -> i8 { @@ -83,14 +83,14 @@ block0(v0: i8, v1: i128): ; VCode: ; block0: -; andi a4,a1,7 -; sllw a0,a0,a4 +; andi a1,a1,7 +; sllw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a1, 7 -; sllw a0, a0, a4 +; andi a1, a1, 7 +; sllw a0, a0, a1 ; ret function %ishl_i16_i8(i16, i8) -> i16 { @@ -101,14 +101,14 @@ block0(v0: i16, v1: i8): ; VCode: ; block0: -; andi a3,a1,15 -; sllw a0,a0,a3 +; andi a1,a1,15 +; sllw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a1, 0xf -; sllw a0, a0, a3 +; andi a1, a1, 0xf +; sllw a0, a0, a1 ; ret function %ishl_i16_i16(i16, i16) -> i16 { @@ -119,14 +119,14 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; andi a3,a1,15 -; sllw a0,a0,a3 +; andi a1,a1,15 +; sllw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a1, 0xf -; sllw a0, a0, a3 +; andi a1, a1, 0xf +; sllw a0, a0, a1 ; ret function %ishl_i16_i32(i16, i32) -> i16 { @@ -137,14 +137,14 @@ block0(v0: i16, v1: i32): ; VCode: ; block0: -; andi a3,a1,15 -; sllw a0,a0,a3 +; andi a1,a1,15 +; sllw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a1, 0xf -; sllw a0, a0, a3 +; andi a1, a1, 0xf +; sllw a0, a0, a1 ; ret function %ishl_i16_i64(i16, i64) -> i16 { @@ -155,14 +155,14 @@ block0(v0: i16, v1: i64): ; VCode: ; block0: -; andi a3,a1,15 -; sllw a0,a0,a3 +; andi a1,a1,15 +; sllw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a1, 0xf -; sllw a0, a0, a3 +; andi a1, a1, 0xf +; sllw a0, a0, a1 ; ret function %ishl_i16_i128(i16, i128) -> i16 { @@ -173,14 +173,14 @@ block0(v0: i16, v1: i128): ; VCode: ; block0: -; andi a4,a1,15 -; sllw a0,a0,a4 +; andi a1,a1,15 +; sllw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a1, 0xf -; sllw a0, a0, a4 +; andi a1, a1, 0xf +; sllw a0, a0, a1 ; ret function %ishl_i32_i8(i32, i8) -> i32 { @@ -351,38 +351,37 @@ block0(v0: i128, v1: i8): ; VCode: ; block0: -; andi a5,a2,63 -; li a3,64 -; sub a3,a3,a5 -; sll a4,a0,a5 -; srl a0,a0,a3 -; select a3,zero,a0##condition=(a5 eq zero) -; sll a5,a1,a5 -; or t0,a3,a5 -; li a3,64 -; andi a5,a2,127 -; select [a0,a1],[zero,a4],[a4,t0]##condition=(a5 uge a3) +; andi a3,a2,63 +; li a4,64 +; sub a5,a4,a3 +; sll a4,a0,a3 +; srl a5,a0,a5 +; select a5,zero,a5##condition=(a3 eq zero) +; sll a0,a1,a3 +; or a3,a5,a0 +; li a5,64 +; andi a2,a2,127 +; select [a0,a1],[zero,a4],[a4,a3]##condition=(a2 uge a5) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a5, a2, 0x3f -; addi a3, zero, 0x40 -; sub a3, a3, a5 -; sll a4, a0, a5 -; srl a0, a0, a3 -; mv a3, zero -; beqz a5, 8 -; mv a3, a0 -; sll a5, a1, a5 -; or t0, a3, a5 -; addi a3, zero, 0x40 -; andi a5, a2, 0x7f +; andi a3, a2, 0x3f +; addi a4, zero, 0x40 +; sub a5, a4, a3 +; sll a4, a0, a3 +; srl a5, a0, a5 +; bnez a3, 8 +; mv a5, zero +; sll a0, a1, a3 +; or a3, a5, a0 +; addi a5, zero, 0x40 +; andi a2, a2, 0x7f ; mv a0, zero ; mv a1, a4 -; bgeu a5, a3, 0xc +; bgeu a2, a5, 0xc ; mv a0, a4 -; mv a1, t0 +; mv a1, a3 ; ret function %ishl_i128_i16(i128, i16) -> i128 { @@ -393,38 +392,37 @@ block0(v0: i128, v1: i16): ; VCode: ; block0: -; andi a5,a2,63 -; li a3,64 -; sub a3,a3,a5 -; sll a4,a0,a5 -; srl a0,a0,a3 -; select a3,zero,a0##condition=(a5 eq zero) -; sll a5,a1,a5 -; or t0,a3,a5 -; li a3,64 -; andi a5,a2,127 -; select [a0,a1],[zero,a4],[a4,t0]##condition=(a5 uge a3) +; andi a3,a2,63 +; li a4,64 +; sub a5,a4,a3 +; sll a4,a0,a3 +; srl a5,a0,a5 +; select a5,zero,a5##condition=(a3 eq zero) +; sll a0,a1,a3 +; or a3,a5,a0 +; li a5,64 +; andi a2,a2,127 +; select [a0,a1],[zero,a4],[a4,a3]##condition=(a2 uge a5) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a5, a2, 0x3f -; addi a3, zero, 0x40 -; sub a3, a3, a5 -; sll a4, a0, a5 -; srl a0, a0, a3 -; mv a3, zero -; beqz a5, 8 -; mv a3, a0 -; sll a5, a1, a5 -; or t0, a3, a5 -; addi a3, zero, 0x40 -; andi a5, a2, 0x7f +; andi a3, a2, 0x3f +; addi a4, zero, 0x40 +; sub a5, a4, a3 +; sll a4, a0, a3 +; srl a5, a0, a5 +; bnez a3, 8 +; mv a5, zero +; sll a0, a1, a3 +; or a3, a5, a0 +; addi a5, zero, 0x40 +; andi a2, a2, 0x7f ; mv a0, zero ; mv a1, a4 -; bgeu a5, a3, 0xc +; bgeu a2, a5, 0xc ; mv a0, a4 -; mv a1, t0 +; mv a1, a3 ; ret function %ishl_i128_i32(i128, i32) -> i128 { @@ -435,38 +433,37 @@ block0(v0: i128, v1: i32): ; VCode: ; block0: -; andi a5,a2,63 -; li a3,64 -; sub a3,a3,a5 -; sll a4,a0,a5 -; srl a0,a0,a3 -; select a3,zero,a0##condition=(a5 eq zero) -; sll a5,a1,a5 -; or t0,a3,a5 -; li a3,64 -; andi a5,a2,127 -; select [a0,a1],[zero,a4],[a4,t0]##condition=(a5 uge a3) +; andi a3,a2,63 +; li a4,64 +; sub a5,a4,a3 +; sll a4,a0,a3 +; srl a5,a0,a5 +; select a5,zero,a5##condition=(a3 eq zero) +; sll a0,a1,a3 +; or a3,a5,a0 +; li a5,64 +; andi a2,a2,127 +; select [a0,a1],[zero,a4],[a4,a3]##condition=(a2 uge a5) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a5, a2, 0x3f -; addi a3, zero, 0x40 -; sub a3, a3, a5 -; sll a4, a0, a5 -; srl a0, a0, a3 -; mv a3, zero -; beqz a5, 8 -; mv a3, a0 -; sll a5, a1, a5 -; or t0, a3, a5 -; addi a3, zero, 0x40 -; andi a5, a2, 0x7f +; andi a3, a2, 0x3f +; addi a4, zero, 0x40 +; sub a5, a4, a3 +; sll a4, a0, a3 +; srl a5, a0, a5 +; bnez a3, 8 +; mv a5, zero +; sll a0, a1, a3 +; or a3, a5, a0 +; addi a5, zero, 0x40 +; andi a2, a2, 0x7f ; mv a0, zero ; mv a1, a4 -; bgeu a5, a3, 0xc +; bgeu a2, a5, 0xc ; mv a0, a4 -; mv a1, t0 +; mv a1, a3 ; ret function %ishl_i128_i64(i128, i64) -> i128 { @@ -477,38 +474,37 @@ block0(v0: i128, v1: i64): ; VCode: ; block0: -; andi a5,a2,63 -; li a3,64 -; sub a3,a3,a5 -; sll a4,a0,a5 -; srl a0,a0,a3 -; select a3,zero,a0##condition=(a5 eq zero) -; sll a5,a1,a5 -; or t0,a3,a5 -; li a3,64 -; andi a5,a2,127 -; select [a0,a1],[zero,a4],[a4,t0]##condition=(a5 uge a3) +; andi a3,a2,63 +; li a4,64 +; sub a5,a4,a3 +; sll a4,a0,a3 +; srl a5,a0,a5 +; select a5,zero,a5##condition=(a3 eq zero) +; sll a0,a1,a3 +; or a3,a5,a0 +; li a5,64 +; andi a2,a2,127 +; select [a0,a1],[zero,a4],[a4,a3]##condition=(a2 uge a5) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a5, a2, 0x3f -; addi a3, zero, 0x40 -; sub a3, a3, a5 -; sll a4, a0, a5 -; srl a0, a0, a3 -; mv a3, zero -; beqz a5, 8 -; mv a3, a0 -; sll a5, a1, a5 -; or t0, a3, a5 -; addi a3, zero, 0x40 -; andi a5, a2, 0x7f +; andi a3, a2, 0x3f +; addi a4, zero, 0x40 +; sub a5, a4, a3 +; sll a4, a0, a3 +; srl a5, a0, a5 +; bnez a3, 8 +; mv a5, zero +; sll a0, a1, a3 +; or a3, a5, a0 +; addi a5, zero, 0x40 +; andi a2, a2, 0x7f ; mv a0, zero ; mv a1, a4 -; bgeu a5, a3, 0xc +; bgeu a2, a5, 0xc ; mv a0, a4 -; mv a1, t0 +; mv a1, a3 ; ret function %ishl_i128_i128(i128, i128) -> i128 { @@ -519,40 +515,43 @@ block0(v0: i128, v1: i128): ; VCode: ; block0: -; mv a5,a0 +; mv a4,a0 +; mv a7,a1 ; andi a0,a2,63 -; li a3,64 -; sub a3,a3,a0 -; mv a4,a5 -; sll a5,a4,a0 +; li a1,64 +; sub a3,a1,a0 +; sll a6,a4,a0 ; srl a3,a4,a3 -; select a3,zero,a3##condition=(a0 eq zero) +; select a5,zero,a3##condition=(a0 eq zero) +; mv a1,a7 ; sll a0,a1,a0 -; or a4,a3,a0 -; li a3,64 +; or a3,a5,a0 +; li a4,64 ; andi a2,a2,127 -; select [a0,a1],[zero,a5],[a5,a4]##condition=(a2 uge a3) +; select [a0,a1],[zero,a6],[a6,a3]##condition=(a2 uge a4) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, a0 +; mv a4, a0 +; mv a7, a1 ; andi a0, a2, 0x3f -; addi a3, zero, 0x40 -; sub a3, a3, a0 -; mv a4, a5 -; sll a5, a4, a0 +; addi a1, zero, 0x40 +; sub a3, a1, a0 +; sll a6, a4, a0 ; srl a3, a4, a3 -; bnez a0, 8 -; mv a3, zero +; mv a5, zero +; beqz a0, 8 +; mv a5, a3 +; mv a1, a7 ; sll a0, a1, a0 -; or a4, a3, a0 -; addi a3, zero, 0x40 +; or a3, a5, a0 +; addi a4, zero, 0x40 ; andi a2, a2, 0x7f ; mv a0, zero -; mv a1, a5 -; bgeu a2, a3, 0xc -; mv a0, a5 -; mv a1, a4 +; mv a1, a6 +; bgeu a2, a4, 0xc +; mv a0, a6 +; mv a1, a3 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/issue-6954.clif b/cranelift/filetests/filetests/isa/riscv64/issue-6954.clif index 6aabe97f99d3..902bf9fe8dd5 100644 --- a/cranelift/filetests/filetests/isa/riscv64/issue-6954.clif +++ b/cranelift/filetests/filetests/isa/riscv64/issue-6954.clif @@ -126,10 +126,10 @@ block0(v0: i16, v1: f32, v2: f64x2, v3: i32, v4: i8, v5: i64x2, v6: i8, v7: f32x ; mv fp,sp ; addi sp,sp,-384 ; block0: -; mv a7,a0 -; vle8.v v10,-64(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v13,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v15,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; mv a5,a0 +; vle8.v v9,-64(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; li a1,0 ; li a3,0 ; sd a1,0(slot) @@ -183,98 +183,98 @@ block0(v0: i16, v1: f32, v2: f64x2, v3: i32, v4: i8, v5: i64x2, v6: i8, v7: f32x ; sd zero,368(slot) ; sw zero,376(slot) ; sh zero,380(slot) -; sext.w a3,a2 -; select v12,v15,v15##condition=(a3 ne zero) -; sext.w a3,a2 -; select v12,v12,v12##condition=(a3 ne zero) -; sext.w a3,a2 -; select v14,v12,v12##condition=(a3 ne zero) -; vfsqrt.v v11,v10 #avl=2, #vtype=(e64, m1, ta, ma) -; lui a1,4095 -; slli a3,a1,39 -; fmv.d.x fa5,a3 -; vfmv.v.f v12,fa5 #avl=2, #vtype=(e64, m1, ta, ma) +; sext.w a0,a2 +; select v10,v10,v10##condition=(a0 ne zero) +; sext.w a0,a2 +; select v10,v10,v10##condition=(a0 ne zero) +; sext.w a0,a2 +; select v10,v10,v10##condition=(a0 ne zero) +; vfsqrt.v v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; lui a0,4095 +; slli a0,a0,39 +; fmv.d.x fa0,a0 +; vfmv.v.f v11,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfne.vv v0,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v12,v9,v11,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vfsqrt.v v11,v12 #avl=2, #vtype=(e64, m1, ta, ma) +; lui a0,4095 +; slli a0,a0,39 +; fmv.d.x fa0,a0 +; vfmv.v.f v12,fa0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmfne.vv v0,v11,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v15,v11,v12,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vfsqrt.v v11,v15 #avl=2, #vtype=(e64, m1, ta, ma) -; lui a1,4095 -; slli a3,a1,39 -; fmv.d.x fa5,a3 -; vfmv.v.f v15,fa5 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfne.vv v0,v11,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v12,v11,v15,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; sext.w a3,a2 -; select v14,v14,v14##condition=(a3 ne zero) -; sext.w a3,a2 -; select v14,v14,v14##condition=(a3 ne zero) -; sext.w a3,a2 -; select v14,v14,v14##condition=(a3 ne zero) -; sext.w a3,a2 -; select v14,v14,v14##condition=(a3 ne zero) -; sext.w a3,a2 -; select v14,v14,v14##condition=(a3 ne zero) -; sext.w a3,a2 -; select v14,v14,v14##condition=(a3 ne zero) -; sext.w a3,a2 -; select v14,v14,v14##condition=(a3 ne zero) -; sext.w a3,a2 -; select v14,v14,v14##condition=(a3 ne zero) -; addw a3,a2,a2 -; select v11,v14,v14##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v14,v11,v11##condition=(a3 ne zero) -; vmax.vv v11,v13,v13 #avl=2, #vtype=(e64, m1, ta, ma) -; select v13,v14,v14##condition=(a3 ne zero) -; load_addr a4,3(slot) -; addi a4,a4,0 -; andi a1,a4,3 -; slli a5,a1,3 -; andi a1,a4,-4 -; atomic_rmw.i8 and a0,a6,(a1)##t0=a4 offset=a5 -; select v10,v13,v13##condition=(a3 ne zero) -; select v10,v10,v10##condition=(a3 ne zero) -; select v10,v10,v10##condition=(a3 ne zero) -; select v10,v10,v10##condition=(a3 ne zero) -; select v10,v10,v10##condition=(a3 ne zero) -; select v10,v10,v10##condition=(a3 ne zero) -; select v10,v10,v10##condition=(a3 ne zero) -; vse64.v v11,33(slot) #avl=2, #vtype=(e64, m1, ta, ma) -; select v11,v10,v10##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; select v11,v11,v11##condition=(a3 ne zero) -; mv a1,a7 -; vse8.v v12,0(a1) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v11,16(a1) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,32(a1) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v11,48(a1) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v11,64(a1) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v11,80(a1) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v11,96(a1) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vvm v9,v11,v12,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; sext.w a0,a2 +; select v10,v10,v10##condition=(a0 ne zero) +; sext.w a0,a2 +; select v10,v10,v10##condition=(a0 ne zero) +; sext.w a0,a2 +; select v10,v10,v10##condition=(a0 ne zero) +; sext.w a0,a2 +; select v10,v10,v10##condition=(a0 ne zero) +; sext.w a0,a2 +; select v10,v10,v10##condition=(a0 ne zero) +; sext.w a0,a2 +; select v10,v10,v10##condition=(a0 ne zero) +; sext.w a0,a2 +; select v10,v10,v10##condition=(a0 ne zero) +; sext.w a0,a2 +; select v10,v10,v10##condition=(a0 ne zero) +; addw a1,a2,a2 +; select v10,v10,v10##condition=(a1 ne zero) +; select v10,v10,v10##condition=(a1 ne zero) +; select v10,v10,v10##condition=(a1 ne zero) +; select v10,v10,v10##condition=(a1 ne zero) +; select v10,v10,v10##condition=(a1 ne zero) +; vmax.vv v8,v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; select v10,v10,v10##condition=(a1 ne zero) +; load_addr a0,3(slot) +; addi a0,a0,0 +; andi a2,a0,3 +; slli a2,a2,3 +; andi a3,a0,-4 +; atomic_rmw.i8 and a0,a6,(a3)##t0=a4 offset=a2 +; select v10,v10,v10##condition=(a1 ne zero) +; select v10,v10,v10##condition=(a1 ne zero) +; select v10,v10,v10##condition=(a1 ne zero) +; select v10,v10,v10##condition=(a1 ne zero) +; select v10,v10,v10##condition=(a1 ne zero) +; select v10,v10,v10##condition=(a1 ne zero) +; select v10,v10,v10##condition=(a1 ne zero) +; vse64.v v8,33(slot) #avl=2, #vtype=(e64, m1, ta, ma) +; select v8,v10,v10##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; select v8,v8,v8##condition=(a1 ne zero) +; mv a1,a5 +; vse8.v v9,0(a1) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,16(a1) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v9,32(a1) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,48(a1) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,64(a1) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,80(a1) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,96(a1) #avl=16, #vtype=(e8, m1, ta, ma) ; addi sp,sp,384 ; ld ra,8(sp) ; ld fp,0(sp) @@ -289,14 +289,14 @@ block0(v0: i16, v1: f32, v2: f64x2, v3: i32, v4: i8, v5: i64x2, v6: i8, v7: f32x ; mv s0, sp ; addi sp, sp, -0x180 ; block1: ; offset 0x14 -; mv a7, a0 +; mv a5, a0 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x190 -; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x1a0 -; .byte 0x87, 0x86, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x1c0 -; .byte 0x87, 0x87, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; mv a1, zero ; mv a3, zero ; sd a1, 0(sp) @@ -350,91 +350,73 @@ block0(v0: i16, v1: f32, v2: f64x2, v3: i32, v4: i8, v5: i64x2, v6: i8, v7: f32x ; sd zero, 0x170(sp) ; sw zero, 0x178(sp) ; sh zero, 0x17c(sp) -; sext.w a3, a2 -; .byte 0x57, 0x36, 0xf0, 0x9e -; bnez a3, 8 -; .byte 0x57, 0x36, 0xf0, 0x9e -; sext.w a3, a2 -; sext.w a3, a2 -; .byte 0x57, 0x37, 0xc0, 0x9e -; bnez a3, 8 -; .byte 0x57, 0x37, 0xc0, 0x9e +; sext.w a0, a2 +; sext.w a0, a2 +; sext.w a0, a2 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x15, 0xa0, 0x4e -; lui a1, 0xfff -; slli a3, a1, 0x27 -; fmv.d.x fa5, a3 -; .byte 0x57, 0xd6, 0x07, 0x5e -; .byte 0x57, 0x90, 0xb5, 0x72 -; .byte 0xd7, 0x07, 0xb6, 0x5c -; .byte 0xd7, 0x15, 0xf0, 0x4e -; lui a1, 0xfff -; slli a3, a1, 0x27 -; fmv.d.x fa5, a3 -; .byte 0xd7, 0xd7, 0x07, 0x5e +; .byte 0xd7, 0x14, 0x90, 0x4e +; lui a0, 0xfff +; slli a0, a0, 0x27 +; fmv.d.x fa0, a0 +; .byte 0xd7, 0x55, 0x05, 0x5e +; .byte 0x57, 0x90, 0x94, 0x72 +; .byte 0x57, 0x86, 0x95, 0x5c +; .byte 0xd7, 0x15, 0xc0, 0x4e +; lui a0, 0xfff +; slli a0, a0, 0x27 +; fmv.d.x fa0, a0 +; .byte 0x57, 0x56, 0x05, 0x5e ; .byte 0x57, 0x90, 0xb5, 0x72 -; .byte 0x57, 0x86, 0xb7, 0x5c -; sext.w a3, a2 -; sext.w a3, a2 -; sext.w a3, a2 -; sext.w a3, a2 -; sext.w a3, a2 -; sext.w a3, a2 -; sext.w a3, a2 -; sext.w a3, a2 -; addw a3, a2, a2 -; .byte 0xd7, 0x35, 0xe0, 0x9e -; bnez a3, 8 -; .byte 0xd7, 0x35, 0xe0, 0x9e -; .byte 0x57, 0x37, 0xb0, 0x9e -; bnez a3, 8 -; .byte 0x57, 0x37, 0xb0, 0x9e -; .byte 0xd7, 0x85, 0xd6, 0x1e -; .byte 0xd7, 0x36, 0xe0, 0x9e -; bnez a3, 8 -; .byte 0xd7, 0x36, 0xe0, 0x9e -; addi a4, sp, 3 -; mv a4, a4 -; andi a1, a4, 3 -; slli a5, a1, 3 -; andi a1, a4, -4 -; lr.w.aqrl a0, (a1) ; trap: heap_oob -; srl a0, a0, a5 +; .byte 0xd7, 0x04, 0xb6, 0x5c +; sext.w a0, a2 +; sext.w a0, a2 +; sext.w a0, a2 +; sext.w a0, a2 +; sext.w a0, a2 +; sext.w a0, a2 +; sext.w a0, a2 +; sext.w a0, a2 +; addw a1, a2, a2 +; .byte 0x57, 0x04, 0x84, 0x1e +; addi a0, sp, 3 +; mv a0, a0 +; andi a2, a0, 3 +; slli a2, a2, 3 +; andi a3, a0, -4 +; lr.w.aqrl a0, (a3) ; trap: heap_oob +; srl a0, a0, a2 ; andi a0, a0, 0xff ; and a4, a0, a6 -; lr.w.aqrl t5, (a1) ; trap: heap_oob +; lr.w.aqrl t5, (a3) ; trap: heap_oob ; addi t6, zero, 0xff -; sll t6, t6, a5 +; sll t6, t6, a2 ; not t6, t6 ; and t5, t5, t6 ; andi t6, a4, 0xff -; sll t6, t6, a5 +; sll t6, t6, a2 ; or t5, t5, t6 -; sc.w.aqrl a4, t5, (a1) ; trap: heap_oob +; sc.w.aqrl a4, t5, (a3) ; trap: heap_oob ; bnez a4, -0x34 -; .byte 0x57, 0x35, 0xd0, 0x9e -; bnez a3, 8 -; .byte 0x57, 0x35, 0xd0, 0x9e ; addi t6, sp, 0x21 -; .byte 0xa7, 0xf5, 0x0f, 0x02 -; .byte 0xd7, 0x35, 0xa0, 0x9e -; bnez a3, 8 -; .byte 0xd7, 0x35, 0xa0, 0x9e -; mv a1, a7 +; .byte 0x27, 0xf4, 0x0f, 0x02 +; .byte 0x57, 0x34, 0xa0, 0x9e +; bnez a1, 8 +; .byte 0x57, 0x34, 0xa0, 0x9e +; mv a1, a5 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x86, 0x05, 0x02 +; .byte 0xa7, 0x84, 0x05, 0x02 ; addi t6, a1, 0x10 -; .byte 0xa7, 0x85, 0x0f, 0x02 +; .byte 0x27, 0x84, 0x0f, 0x02 ; addi t6, a1, 0x20 -; .byte 0x27, 0x86, 0x0f, 0x02 +; .byte 0xa7, 0x84, 0x0f, 0x02 ; addi t6, a1, 0x30 -; .byte 0xa7, 0x85, 0x0f, 0x02 +; .byte 0x27, 0x84, 0x0f, 0x02 ; addi t6, a1, 0x40 -; .byte 0xa7, 0x85, 0x0f, 0x02 +; .byte 0x27, 0x84, 0x0f, 0x02 ; addi t6, a1, 0x50 -; .byte 0xa7, 0x85, 0x0f, 0x02 +; .byte 0x27, 0x84, 0x0f, 0x02 ; addi t6, a1, 0x60 -; .byte 0xa7, 0x85, 0x0f, 0x02 +; .byte 0x27, 0x84, 0x0f, 0x02 ; addi sp, sp, 0x180 ; ld ra, 8(sp) ; ld s0, 0(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/leaf_function_detection.clif b/cranelift/filetests/filetests/isa/riscv64/leaf_function_detection.clif index 77182b343266..8d93bc34bd54 100644 --- a/cranelift/filetests/filetests/isa/riscv64/leaf_function_detection.clif +++ b/cranelift/filetests/filetests/isa/riscv64/leaf_function_detection.clif @@ -18,14 +18,14 @@ block0(v0: i32): ; VCode: ; block0: -; li a3,10 -; mulw a0,a0,a3 +; li a1,10 +; mulw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 0xa -; mulw a0, a0, a3 +; addi a1, zero, 0xa +; mulw a0, a0, a1 ; ret ;; Test 2: Leaf function with multiple basic blocks and control flow @@ -57,8 +57,8 @@ block3(v6: i32): ; subw a0,a1,a0 ; j label3 ; block2: -; li a3,2 -; mulw a0,a0,a3 +; li a1,2 +; mulw a0,a0,a1 ; j label3 ; block3: ; ret @@ -72,8 +72,8 @@ block3(v6: i32): ; subw a0, a1, a0 ; j 0xc ; block2: ; offset 0x14 -; addi a3, zero, 2 -; mulw a0, a0, a3 +; addi a1, zero, 2 +; mulw a0, a0, a1 ; block3: ; offset 0x1c ; ret @@ -160,15 +160,15 @@ block0(v0: i32, v1: i64): ; VCode: ; block0: ; sw a0,0(a1) -; lw a5,0(a1) -; addiw a0,a5,1 +; lw a0,0(a1) +; addiw a0,a0,1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; sw a0, 0(a1) ; trap: heap_oob -; lw a5, 0(a1) ; trap: heap_oob -; addiw a0, a5, 1 +; lw a0, 0(a1) ; trap: heap_oob +; addiw a0, a0, 1 ; ret ;; Test 6: Leaf function that looks like it might call but doesn't diff --git a/cranelift/filetests/filetests/isa/riscv64/load-f16-f128.clif b/cranelift/filetests/filetests/isa/riscv64/load-f16-f128.clif index bad30f841f1e..12ac2d428b37 100644 --- a/cranelift/filetests/filetests/isa/riscv64/load-f16-f128.clif +++ b/cranelift/filetests/filetests/isa/riscv64/load-f16-f128.clif @@ -10,17 +10,17 @@ block0(v0: i64): ; VCode: ; block0: -; lh a2,0(a0) -; lui a4,-16 -; or a0,a2,a4 +; lh a0,0(a0) +; lui a1,-16 +; or a0,a0,a1 ; fmv.w.x fa0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lh a2, 0(a0) ; trap: heap_oob -; lui a4, 0xffff0 -; or a0, a2, a4 +; lh a0, 0(a0) ; trap: heap_oob +; lui a1, 0xffff0 +; or a0, a0, a1 ; fmv.w.x fa0, a0 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/nan-canonicalization-has_v.clif b/cranelift/filetests/filetests/isa/riscv64/nan-canonicalization-has_v.clif index eb2b2cd0c35f..8cb6c8311532 100644 --- a/cranelift/filetests/filetests/isa/riscv64/nan-canonicalization-has_v.clif +++ b/cranelift/filetests/filetests/isa/riscv64/nan-canonicalization-has_v.clif @@ -15,12 +15,12 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfadd.vv v13,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; lui a1,523264 -; fmv.w.x fa2,a1 -; vfmv.v.f v14,fa2 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfadd.vv v13,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; lui a2,523264 +; fmv.w.x fa4,a2 +; vfmv.v.f v14,fa4 #avl=4, #vtype=(e32, m1, ta, ma) ; vmfne.vv v10,v13,v13 #avl=4, #vtype=(e32, m1, ta, ma) ; vmfne.vv v12,v13,v13 #avl=4, #vtype=(e32, m1, ta, ma) ; vmor.mm v0,v10,v12 #avl=4, #vtype=(e32, m1, ta, ma) @@ -40,14 +40,14 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x96, 0x95, 0x02 -; lui a1, 0x7fc00 -; fmv.w.x fa2, a1 -; .byte 0x57, 0x57, 0x06, 0x5e +; .byte 0xd7, 0x96, 0x84, 0x02 +; lui a2, 0x7fc00 +; fmv.w.x fa4, a2 +; .byte 0x57, 0x57, 0x07, 0x5e ; .byte 0x57, 0x95, 0xd6, 0x72 ; .byte 0x57, 0x96, 0xd6, 0x72 ; .byte 0x57, 0x20, 0xa6, 0x6a @@ -67,45 +67,45 @@ block0(v0: f64, v1: f64): ; VCode: ; block0: -; fadd.d fa1,fa0,fa1,rne -; lui a5,4095 -; slli a1,a5,39 -; fmv.d.x fa3,a1 -; vmv.v.x v8,zero #avl=2, #vtype=(e64, m1, ta, ma) -; vfmv.s.f v10,fa3 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v14,v8,v10,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.x v8,zero #avl=2, #vtype=(e64, m1, ta, ma) +; fadd.d fa0,fa0,fa1,rne +; lui a0,4095 +; slli a0,a0,39 +; fmv.d.x fa1,a0 +; vmv.v.x v9,zero #avl=2, #vtype=(e64, m1, ta, ma) ; vfmv.s.f v10,fa1 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v15,v8,v10,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vmfne.vv v8,v15,v15 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfne.vv v10,v15,v15 #avl=2, #vtype=(e64, m1, ta, ma) -; vmor.mm v0,v8,v10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v8,v15,v14,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vfmv.f.s fa0,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v8,v9,v10,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.x v10,zero #avl=2, #vtype=(e64, m1, ta, ma) +; vfmv.s.f v11,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v9,v10,v11,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vmfne.vv v10,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfne.vv v11,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vmor.mm v0,v10,v11 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v10,v9,v8,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vfmv.f.s fa0,v10 #avl=2, #vtype=(e64, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fadd.d fa1, fa0, fa1, rne -; lui a5, 0xfff -; slli a1, a5, 0x27 -; fmv.d.x fa3, a1 +; fadd.d fa0, fa0, fa1, rne +; lui a0, 0xfff +; slli a0, a0, 0x27 +; fmv.d.x fa1, a0 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x44, 0x00, 0x5e -; .byte 0x57, 0xd5, 0x06, 0x42 -; .byte 0x57, 0xb0, 0x00, 0x5e -; .byte 0x57, 0x07, 0x85, 0x5c -; .byte 0x57, 0x44, 0x00, 0x5e +; .byte 0xd7, 0x44, 0x00, 0x5e ; .byte 0x57, 0xd5, 0x05, 0x42 ; .byte 0x57, 0xb0, 0x00, 0x5e -; .byte 0xd7, 0x07, 0x85, 0x5c -; .byte 0x57, 0x94, 0xf7, 0x72 -; .byte 0x57, 0x95, 0xf7, 0x72 -; .byte 0x57, 0x20, 0x85, 0x6a -; .byte 0x57, 0x04, 0xf7, 0x5c -; .byte 0x57, 0x15, 0x80, 0x42 +; .byte 0x57, 0x04, 0x95, 0x5c +; .byte 0x57, 0x45, 0x00, 0x5e +; .byte 0xd7, 0x55, 0x05, 0x42 +; .byte 0x57, 0xb0, 0x00, 0x5e +; .byte 0xd7, 0x84, 0xa5, 0x5c +; .byte 0x57, 0x95, 0x94, 0x72 +; .byte 0xd7, 0x95, 0x94, 0x72 +; .byte 0x57, 0xa0, 0xa5, 0x6a +; .byte 0x57, 0x05, 0x94, 0x5c +; .byte 0x57, 0x15, 0xa0, 0x42 ; ret function %f1(f32, f32) -> f32 { @@ -117,45 +117,45 @@ block0(v0: f32, v1: f32): ; VCode: ; block0: ; fadd.s fa0,fa0,fa1,rne -; lui a5,523264 -; fmv.w.x fa1,a5 +; lui a0,523264 +; fmv.w.x fa1,a0 ; vmv.v.x v15,zero #avl=4, #vtype=(e32, m1, ta, ma) ; vfmv.s.f v9,fa1 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v13,v15,v9,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vvm v8,v15,v9,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.x v15,zero #avl=4, #vtype=(e32, m1, ta, ma) -; vfmv.s.f v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vfmv.s.f v10,fa0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v14,v15,v9,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vmfne.vv v15,v14,v14 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfne.vv v9,v14,v14 #avl=4, #vtype=(e32, m1, ta, ma) -; vmor.mm v0,v15,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vvm v15,v14,v13,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vfmv.f.s fa0,v15 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vvm v9,v15,v10,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vmfne.vv v15,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfne.vv v10,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vmor.mm v0,v15,v10 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vvm v10,v9,v8,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vfmv.f.s fa0,v10 #avl=4, #vtype=(e32, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; fadd.s fa0, fa0, fa1, rne -; lui a5, 0x7fc00 -; fmv.w.x fa1, a5 +; lui a0, 0x7fc00 +; fmv.w.x fa1, a0 ; .byte 0x57, 0x70, 0x02, 0xcd ; .byte 0xd7, 0x47, 0x00, 0x5e ; .byte 0xd7, 0xd4, 0x05, 0x42 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0xb0, 0x00, 0x5e ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x86, 0xf4, 0x5c +; .byte 0x57, 0x84, 0xf4, 0x5c ; .byte 0xd7, 0x47, 0x00, 0x5e -; .byte 0xd7, 0x54, 0x05, 0x42 +; .byte 0x57, 0x55, 0x05, 0x42 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0xb0, 0x00, 0x5e ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0xf4, 0x5c -; .byte 0xd7, 0x17, 0xe7, 0x72 -; .byte 0xd7, 0x14, 0xe7, 0x72 -; .byte 0x57, 0xa0, 0xf4, 0x6a -; .byte 0xd7, 0x87, 0xe6, 0x5c -; .byte 0x57, 0x15, 0xf0, 0x42 +; .byte 0xd7, 0x04, 0xf5, 0x5c +; .byte 0xd7, 0x97, 0x94, 0x72 +; .byte 0x57, 0x95, 0x94, 0x72 +; .byte 0x57, 0x20, 0xf5, 0x6a +; .byte 0x57, 0x05, 0x94, 0x5c +; .byte 0x57, 0x15, 0xa0, 0x42 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/nan-canonicalization.clif b/cranelift/filetests/filetests/isa/riscv64/nan-canonicalization.clif index 830753a19ef6..e06f250c6f2e 100644 --- a/cranelift/filetests/filetests/isa/riscv64/nan-canonicalization.clif +++ b/cranelift/filetests/filetests/isa/riscv64/nan-canonicalization.clif @@ -11,27 +11,27 @@ block0(v0: f64, v1: f64): ; VCode: ; block0: -; fadd.d fa4,fa0,fa1,rne -; lui a2,4095 -; slli a4,a2,39 -; fmv.d.x fa0,a4 -; feq.d a1,fa4,fa4 -; feq.d a3,fa4,fa4 -; and a5,a1,a3 -; select fa0,fa0,fa4##condition=(a5 eq zero) +; fadd.d fa0,fa0,fa1,rne +; lui a0,4095 +; slli a0,a0,39 +; fmv.d.x fa2,a0 +; feq.d a0,fa0,fa0 +; feq.d a1,fa0,fa0 +; and a1,a0,a1 +; select fa0,fa2,fa0##condition=(a1 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fadd.d fa4, fa0, fa1, rne -; lui a2, 0xfff -; slli a4, a2, 0x27 -; fmv.d.x fa0, a4 -; feq.d a1, fa4, fa4 -; feq.d a3, fa4, fa4 -; and a5, a1, a3 -; beqz a5, 8 -; fmv.d fa0, fa4 +; fadd.d fa0, fa0, fa1, rne +; lui a0, 0xfff +; slli a0, a0, 0x27 +; fmv.d.x fa2, a0 +; feq.d a0, fa0, fa0 +; feq.d a1, fa0, fa0 +; and a1, a0, a1 +; bnez a1, 8 +; fmv.d fa0, fa2 ; ret function %f1(f32, f32) -> f32 { @@ -42,25 +42,24 @@ block0(v0: f32, v1: f32): ; VCode: ; block0: -; fadd.s fa3,fa0,fa1,rne -; lui a2,523264 -; fmv.w.x fa4,a2 -; feq.s a0,fa3,fa3 -; feq.s a2,fa3,fa3 -; and a4,a0,a2 -; select fa0,fa4,fa3##condition=(a4 eq zero) +; fadd.s fa0,fa0,fa1,rne +; lui a0,523264 +; fmv.w.x fa1,a0 +; feq.s a0,fa0,fa0 +; feq.s a1,fa0,fa0 +; and a0,a0,a1 +; select fa0,fa1,fa0##condition=(a0 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fadd.s fa3, fa0, fa1, rne -; lui a2, 0x7fc00 -; fmv.w.x fa4, a2 -; feq.s a0, fa3, fa3 -; feq.s a2, fa3, fa3 -; and a4, a0, a2 -; fmv.d fa0, fa4 -; beqz a4, 8 -; fmv.d fa0, fa3 +; fadd.s fa0, fa0, fa1, rne +; lui a0, 0x7fc00 +; fmv.w.x fa1, a0 +; feq.s a0, fa0, fa0 +; feq.s a1, fa0, fa0 +; and a0, a0, a1 +; bnez a0, 8 +; fmv.d fa0, fa1 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/narrow-arithmetic.clif b/cranelift/filetests/filetests/isa/riscv64/narrow-arithmetic.clif index 05b39aeea943..101ff6e79d61 100644 --- a/cranelift/filetests/filetests/isa/riscv64/narrow-arithmetic.clif +++ b/cranelift/filetests/filetests/isa/riscv64/narrow-arithmetic.clif @@ -59,15 +59,15 @@ block0(v0: i32, v1: i8): ; VCode: ; block0: -; slli a4,a1,56 -; srai a1,a4,56 +; slli a1,a1,56 +; srai a1,a1,56 ; addw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a4, a1, 0x38 -; srai a1, a4, 0x38 +; slli a1, a1, 0x38 +; srai a1, a1, 0x38 ; addw a0, a0, a1 ; ret @@ -80,13 +80,13 @@ block0(v0: i64, v1: i32): ; VCode: ; block0: -; sext.w a4,a1 -; add a0,a0,a4 +; sext.w a1,a1 +; add a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a4, a1 -; add a0, a0, a4 +; sext.w a1, a1 +; add a0, a0, a1 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/nearest.clif b/cranelift/filetests/filetests/isa/riscv64/nearest.clif index d1d6a284323f..411cfb2649a5 100644 --- a/cranelift/filetests/filetests/isa/riscv64/nearest.clif +++ b/cranelift/filetests/filetests/isa/riscv64/nearest.clif @@ -10,31 +10,31 @@ block0(v0: f32): ; VCode: ; block0: -; lui a2,307200 -; fmv.w.x fa4,a2 -; fabs.s fa1,fa0 -; flt.s a2,fa1,fa4 -; fcvt.w.s a4,fa0,rne -; fcvt.s.w fa1,a4,rne -; fsgnj.s fa2,fa1,fa0 -; fmv.w.x fa4,zero -; fadd.s fa0,fa0,fa4,rne -; select fa0,fa0,fa2##condition=(a2 eq zero) +; lui a0,307200 +; fmv.w.x fa1,a0 +; fabs.s fa2,fa0 +; flt.s a0,fa2,fa1 +; fcvt.w.s a1,fa0,rne +; fcvt.s.w fa2,a1,rne +; fsgnj.s fa4,fa2,fa0 +; fmv.w.x fa1,zero +; fadd.s fa0,fa0,fa1,rne +; select fa0,fa0,fa4##condition=(a0 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a2, 0x4b000 -; fmv.w.x fa4, a2 -; fabs.s fa1, fa0 -; flt.s a2, fa1, fa4 -; fcvt.w.s a4, fa0, rne ; trap: bad_toint -; fcvt.s.w fa1, a4, rne -; fsgnj.s fa2, fa1, fa0 -; fmv.w.x fa4, zero -; fadd.s fa0, fa0, fa4, rne -; beqz a2, 8 -; fmv.d fa0, fa2 +; lui a0, 0x4b000 +; fmv.w.x fa1, a0 +; fabs.s fa2, fa0 +; flt.s a0, fa2, fa1 +; fcvt.w.s a1, fa0, rne ; trap: bad_toint +; fcvt.s.w fa2, a1, rne +; fsgnj.s fa4, fa2, fa0 +; fmv.w.x fa1, zero +; fadd.s fa0, fa0, fa1, rne +; beqz a0, 8 +; fmv.d fa0, fa4 ; ret function %nearest_f64(f64) -> f64 { @@ -45,33 +45,32 @@ block0(v0: f64): ; VCode: ; block0: -; lui a2,1075 -; slli a4,a2,40 -; fmv.d.x fa1,a4 +; lui a0,1075 +; slli a0,a0,40 +; fmv.d.x fa1,a0 ; fabs.d fa2,fa0 -; flt.d a4,fa2,fa1 -; fcvt.l.d a0,fa0,rne -; fcvt.d.l fa2,a0,rne -; fsgnj.d fa4,fa2,fa0 -; fmv.d.x fa1,zero -; fadd.d fa2,fa0,fa1,rne -; select fa0,fa2,fa4##condition=(a4 eq zero) +; flt.d a0,fa2,fa1 +; fcvt.l.d a2,fa0,rne +; fcvt.d.l fa4,a2,rne +; fsgnj.d fa1,fa4,fa0 +; fmv.d.x fa2,zero +; fadd.d fa0,fa0,fa2,rne +; select fa0,fa0,fa1##condition=(a0 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a2, 0x433 -; slli a4, a2, 0x28 -; fmv.d.x fa1, a4 +; lui a0, 0x433 +; slli a0, a0, 0x28 +; fmv.d.x fa1, a0 ; fabs.d fa2, fa0 -; flt.d a4, fa2, fa1 -; fcvt.l.d a0, fa0, rne ; trap: bad_toint -; fcvt.d.l fa2, a0, rne -; fsgnj.d fa4, fa2, fa0 -; fmv.d.x fa1, zero -; fadd.d fa2, fa0, fa1, rne -; fmv.d fa0, fa2 -; beqz a4, 8 -; fmv.d fa0, fa4 +; flt.d a0, fa2, fa1 +; fcvt.l.d a2, fa0, rne ; trap: bad_toint +; fcvt.d.l fa4, a2, rne +; fsgnj.d fa1, fa4, fa0 +; fmv.d.x fa2, zero +; fadd.d fa0, fa0, fa2, rne +; beqz a0, 8 +; fmv.d fa0, fa1 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/popcnt.clif b/cranelift/filetests/filetests/isa/riscv64/popcnt.clif index cb4b2847d861..2183768b073b 100644 --- a/cranelift/filetests/filetests/isa/riscv64/popcnt.clif +++ b/cranelift/filetests/filetests/isa/riscv64/popcnt.clif @@ -10,14 +10,14 @@ block0(v0: i8): ; VCode: ; block0: -; andi a2,a0,255 -; cpop a0,a2 +; andi a0,a0,255 +; cpop a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a2, a0, 0xff -; .byte 0x13, 0x15, 0x26, 0x60 +; andi a0, a0, 0xff +; .byte 0x13, 0x15, 0x25, 0x60 ; ret function %popcnt16(i16) -> i16 { @@ -28,14 +28,14 @@ block0(v0: i16): ; VCode: ; block0: -; zext.h a2,a0 -; cpop a0,a2 +; zext.h a0,a0 +; cpop a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x3b, 0x46, 0x05, 0x08 -; .byte 0x13, 0x15, 0x26, 0x60 +; .byte 0x3b, 0x45, 0x05, 0x08 +; .byte 0x13, 0x15, 0x25, 0x60 ; ret function %popcnt32(i32) -> i32 { @@ -78,17 +78,17 @@ block0(v0: i128): ; VCode: ; block0: -; cpop a3,a0 -; cpop a5,a1 -; add a0,a3,a5 +; cpop a0,a0 +; cpop a1,a1 +; add a0,a0,a1 ; li a1,0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x93, 0x16, 0x25, 0x60 -; .byte 0x93, 0x97, 0x25, 0x60 -; add a0, a3, a5 +; .byte 0x13, 0x15, 0x25, 0x60 +; .byte 0x93, 0x95, 0x25, 0x60 +; add a0, a0, a1 ; mv a1, zero ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/preserve-all.clif b/cranelift/filetests/filetests/isa/riscv64/preserve-all.clif index 0a2858214cca..2195928b1bc7 100644 --- a/cranelift/filetests/filetests/isa/riscv64/preserve-all.clif +++ b/cranelift/filetests/filetests/isa/riscv64/preserve-all.clif @@ -125,8 +125,8 @@ block0(v0: i64): ; vse8.v v30,16(sp) #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v31,0(sp) #avl=16, #vtype=(e8, m1, ta, ma) ; block0: -; load_ext_name_far a2,%libcall+0 -; callind a2 +; load_ext_name_far a1,%libcall+0 +; callind a1 ; ld ra,808(sp) ; ld t0,800(sp) ; ld t1,792(sp) @@ -311,12 +311,12 @@ block0(v0: i64): ; .byte 0x27, 0x8f, 0x0f, 0x02 ; .byte 0xa7, 0x0f, 0x01, 0x02 ; block1: ; offset 0x1a8 -; auipc a2, 0 -; ld a2, 0xc(a2) +; auipc a1, 0 +; ld a1, 0xc(a1) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %libcall 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; jalr a2 +; jalr a1 ; ld ra, 0x328(sp) ; ld t0, 0x320(sp) ; ld t1, 0x318(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/prologue.clif b/cranelift/filetests/filetests/isa/riscv64/prologue.clif index c7abde812fbf..328862e5836b 100644 --- a/cranelift/filetests/filetests/isa/riscv64/prologue.clif +++ b/cranelift/filetests/filetests/isa/riscv64/prologue.clif @@ -93,16 +93,23 @@ block0(v0: f64): ; fsd fs10,16(sp) ; fsd fs11,8(sp) ; block0: +; fadd.d fa1,fa0,fa0,rne +; fadd.d fa2,fa0,fa0,rne ; fadd.d fa3,fa0,fa0,rne ; fadd.d fa4,fa0,fa0,rne ; fadd.d fa5,fa0,fa0,rne -; fadd.d fa1,fa0,fa0,rne -; fadd.d fa2,fa0,fa0,rne -; fadd.d ft9,fa0,fa0,rne -; fadd.d ft10,fa0,fa0,rne -; fadd.d ft11,fa0,fa0,rne +; fadd.d ft0,fa0,fa0,rne +; fadd.d ft1,fa0,fa0,rne +; fadd.d ft2,fa0,fa0,rne +; fadd.d ft3,fa0,fa0,rne +; fadd.d ft4,fa0,fa0,rne +; fadd.d ft5,fa0,fa0,rne +; fadd.d ft6,fa0,fa0,rne +; fadd.d ft7,fa0,fa0,rne ; fadd.d fs0,fa0,fa0,rne ; fadd.d fs1,fa0,fa0,rne +; fadd.d fa6,fa0,fa0,rne +; fadd.d fa7,fa0,fa0,rne ; fadd.d fs2,fa0,fa0,rne ; fadd.d fs3,fa0,fa0,rne ; fadd.d fs4,fa0,fa0,rne @@ -113,48 +120,41 @@ block0(v0: f64): ; fadd.d fs9,fa0,fa0,rne ; fadd.d fs10,fa0,fa0,rne ; fadd.d fs11,fa0,fa0,rne -; fadd.d ft0,fa0,fa0,rne -; fadd.d ft1,fa0,fa0,rne -; fadd.d ft2,fa0,fa0,rne -; fadd.d ft3,fa0,fa0,rne -; fadd.d ft4,fa0,fa0,rne -; fadd.d ft5,fa0,fa0,rne -; fadd.d ft6,fa0,fa0,rne -; fadd.d ft7,fa0,fa0,rne -; fadd.d fa6,fa0,fa0,rne -; fadd.d fa7,fa0,fa0,rne ; fadd.d ft8,fa0,fa0,rne -; fadd.d fa3,fa0,fa3,rne -; fadd.d fa4,fa4,fa5,rne -; fadd.d fa5,fa1,fa2,rne -; fadd.d fa0,ft9,ft10,rne -; fadd.d fa1,ft11,fs0,rne -; fadd.d fa2,fs1,fs2,rne -; fadd.d ft9,fs3,fs4,rne -; fadd.d ft10,fs5,fs6,rne -; fadd.d ft11,fs7,fs8,rne -; fadd.d fs0,fs9,fs10,rne -; fadd.d fs1,fs11,ft0,rne -; fadd.d fs2,ft1,ft2,rne -; fadd.d fs3,ft3,ft4,rne -; fadd.d fs4,ft5,ft6,rne -; fadd.d fs5,ft7,fa6,rne -; fadd.d fs6,fa7,ft8,rne -; fadd.d fa3,fa3,fa4,rne -; fadd.d fa4,fa5,fa0,rne -; fadd.d fa5,fa1,fa2,rne -; fadd.d fa0,ft9,ft10,rne -; fadd.d fa1,ft11,fs0,rne -; fadd.d fa2,fs1,fs2,rne -; fadd.d ft9,fs3,fs4,rne -; fadd.d ft10,fs5,fs6,rne -; fadd.d fa3,fa3,fa4,rne -; fadd.d fa4,fa5,fa0,rne -; fadd.d fa5,fa1,fa2,rne -; fadd.d fa0,ft9,ft10,rne -; fadd.d fa3,fa3,fa4,rne -; fadd.d fa4,fa5,fa0,rne -; fadd.d fa0,fa3,fa4,rne +; fadd.d ft9,fa0,fa0,rne +; fadd.d ft10,fa0,fa0,rne +; fadd.d ft11,fa0,fa0,rne +; fadd.d fa0,fa0,fa1,rne +; fadd.d fa1,fa2,fa3,rne +; fadd.d fa2,fa4,fa5,rne +; fadd.d fa3,ft0,ft1,rne +; fadd.d fa4,ft2,ft3,rne +; fadd.d fa5,ft4,ft5,rne +; fadd.d ft0,ft6,ft7,rne +; fadd.d ft1,fs0,fs1,rne +; fadd.d ft2,fa6,fa7,rne +; fadd.d ft3,fs2,fs3,rne +; fadd.d ft4,fs4,fs5,rne +; fadd.d ft5,fs6,fs7,rne +; fadd.d ft6,fs8,fs9,rne +; fadd.d ft7,fs10,fs11,rne +; fadd.d fs0,ft8,ft9,rne +; fadd.d fs1,ft10,ft11,rne +; fadd.d fa0,fa0,fa1,rne +; fadd.d fa1,fa2,fa3,rne +; fadd.d fa2,fa4,fa5,rne +; fadd.d fa3,ft0,ft1,rne +; fadd.d fa4,ft2,ft3,rne +; fadd.d fa5,ft4,ft5,rne +; fadd.d ft0,ft6,ft7,rne +; fadd.d ft1,fs0,fs1,rne +; fadd.d fa0,fa0,fa1,rne +; fadd.d fa1,fa2,fa3,rne +; fadd.d fa2,fa4,fa5,rne +; fadd.d fa3,ft0,ft1,rne +; fadd.d fa0,fa0,fa1,rne +; fadd.d fa1,fa2,fa3,rne +; fadd.d fa0,fa0,fa1,rne ; fld fs0,88(sp) ; fld fs2,80(sp) ; fld fs3,72(sp) @@ -191,16 +191,23 @@ block0(v0: f64): ; fsd fs10, 0x10(sp) ; fsd fs11, 8(sp) ; block1: ; offset 0x40 +; fadd.d fa1, fa0, fa0, rne +; fadd.d fa2, fa0, fa0, rne ; fadd.d fa3, fa0, fa0, rne ; fadd.d fa4, fa0, fa0, rne ; fadd.d fa5, fa0, fa0, rne -; fadd.d fa1, fa0, fa0, rne -; fadd.d fa2, fa0, fa0, rne -; fadd.d ft9, fa0, fa0, rne -; fadd.d ft10, fa0, fa0, rne -; fadd.d ft11, fa0, fa0, rne +; fadd.d ft0, fa0, fa0, rne +; fadd.d ft1, fa0, fa0, rne +; fadd.d ft2, fa0, fa0, rne +; fadd.d ft3, fa0, fa0, rne +; fadd.d ft4, fa0, fa0, rne +; fadd.d ft5, fa0, fa0, rne +; fadd.d ft6, fa0, fa0, rne +; fadd.d ft7, fa0, fa0, rne ; fadd.d fs0, fa0, fa0, rne ; fadd.d fs1, fa0, fa0, rne +; fadd.d fa6, fa0, fa0, rne +; fadd.d fa7, fa0, fa0, rne ; fadd.d fs2, fa0, fa0, rne ; fadd.d fs3, fa0, fa0, rne ; fadd.d fs4, fa0, fa0, rne @@ -211,48 +218,41 @@ block0(v0: f64): ; fadd.d fs9, fa0, fa0, rne ; fadd.d fs10, fa0, fa0, rne ; fadd.d fs11, fa0, fa0, rne -; fadd.d ft0, fa0, fa0, rne -; fadd.d ft1, fa0, fa0, rne -; fadd.d ft2, fa0, fa0, rne -; fadd.d ft3, fa0, fa0, rne -; fadd.d ft4, fa0, fa0, rne -; fadd.d ft5, fa0, fa0, rne -; fadd.d ft6, fa0, fa0, rne -; fadd.d ft7, fa0, fa0, rne -; fadd.d fa6, fa0, fa0, rne -; fadd.d fa7, fa0, fa0, rne ; fadd.d ft8, fa0, fa0, rne -; fadd.d fa3, fa0, fa3, rne -; fadd.d fa4, fa4, fa5, rne -; fadd.d fa5, fa1, fa2, rne -; fadd.d fa0, ft9, ft10, rne -; fadd.d fa1, ft11, fs0, rne -; fadd.d fa2, fs1, fs2, rne -; fadd.d ft9, fs3, fs4, rne -; fadd.d ft10, fs5, fs6, rne -; fadd.d ft11, fs7, fs8, rne -; fadd.d fs0, fs9, fs10, rne -; fadd.d fs1, fs11, ft0, rne -; fadd.d fs2, ft1, ft2, rne -; fadd.d fs3, ft3, ft4, rne -; fadd.d fs4, ft5, ft6, rne -; fadd.d fs5, ft7, fa6, rne -; fadd.d fs6, fa7, ft8, rne -; fadd.d fa3, fa3, fa4, rne -; fadd.d fa4, fa5, fa0, rne -; fadd.d fa5, fa1, fa2, rne -; fadd.d fa0, ft9, ft10, rne -; fadd.d fa1, ft11, fs0, rne -; fadd.d fa2, fs1, fs2, rne -; fadd.d ft9, fs3, fs4, rne -; fadd.d ft10, fs5, fs6, rne -; fadd.d fa3, fa3, fa4, rne -; fadd.d fa4, fa5, fa0, rne -; fadd.d fa5, fa1, fa2, rne -; fadd.d fa0, ft9, ft10, rne -; fadd.d fa3, fa3, fa4, rne -; fadd.d fa4, fa5, fa0, rne -; fadd.d fa0, fa3, fa4, rne +; fadd.d ft9, fa0, fa0, rne +; fadd.d ft10, fa0, fa0, rne +; fadd.d ft11, fa0, fa0, rne +; fadd.d fa0, fa0, fa1, rne +; fadd.d fa1, fa2, fa3, rne +; fadd.d fa2, fa4, fa5, rne +; fadd.d fa3, ft0, ft1, rne +; fadd.d fa4, ft2, ft3, rne +; fadd.d fa5, ft4, ft5, rne +; fadd.d ft0, ft6, ft7, rne +; fadd.d ft1, fs0, fs1, rne +; fadd.d ft2, fa6, fa7, rne +; fadd.d ft3, fs2, fs3, rne +; fadd.d ft4, fs4, fs5, rne +; fadd.d ft5, fs6, fs7, rne +; fadd.d ft6, fs8, fs9, rne +; fadd.d ft7, fs10, fs11, rne +; fadd.d fs0, ft8, ft9, rne +; fadd.d fs1, ft10, ft11, rne +; fadd.d fa0, fa0, fa1, rne +; fadd.d fa1, fa2, fa3, rne +; fadd.d fa2, fa4, fa5, rne +; fadd.d fa3, ft0, ft1, rne +; fadd.d fa4, ft2, ft3, rne +; fadd.d fa5, ft4, ft5, rne +; fadd.d ft0, ft6, ft7, rne +; fadd.d ft1, fs0, fs1, rne +; fadd.d fa0, fa0, fa1, rne +; fadd.d fa1, fa2, fa3, rne +; fadd.d fa2, fa4, fa5, rne +; fadd.d fa3, ft0, ft1, rne +; fadd.d fa0, fa0, fa1, rne +; fadd.d fa1, fa2, fa3, rne +; fadd.d fa0, fa0, fa1, rne ; fld fs0, 0x58(sp) ; fld fs2, 0x50(sp) ; fld fs3, 0x48(sp) @@ -329,43 +329,44 @@ block0(v0: i64): ; sd s5,24(sp) ; sd s6,16(sp) ; sd s7,8(sp) +; sd s8,0(sp) ; block0: ; add a1,a0,a0 ; add a2,a0,a1 ; add a3,a0,a2 ; add a4,a0,a3 ; add a5,a0,a4 -; add t1,a0,a5 +; add t0,a0,a5 +; add t1,a0,t0 ; add t2,a0,t1 -; add a6,a0,t2 +; add s1,a0,t2 +; add a6,a0,s1 ; add a7,a0,a6 -; add t3,a0,a7 -; add t4,a0,t3 -; add s1,a0,t4 -; add s2,a0,s1 +; add s2,a0,a7 ; add s3,a0,s2 ; add s4,a0,s3 ; add s5,a0,s4 ; add s6,a0,s5 ; add s7,a0,s6 -; add a1,a0,a1 -; add a2,a2,a3 -; add a3,a4,a5 -; add a4,t1,t2 +; add s8,a0,s7 +; add a0,a0,a1 +; add a1,a2,a3 +; add a2,a4,a5 +; add a3,t0,t1 +; add a4,t2,s1 ; add a5,a6,a7 -; add a0,t3,t4 -; add t1,s1,s2 -; add t2,s3,s4 -; add a6,s5,s6 -; add a1,s7,a1 -; add a2,a2,a3 -; add a3,a4,a5 -; add a4,a0,t1 -; add a5,t2,a6 +; add t0,s2,s3 +; add t1,s4,s5 +; add t2,s6,s7 +; add a0,s8,a0 ; add a1,a1,a2 ; add a2,a3,a4 -; add a1,a5,a1 -; add a0,a2,a1 +; add a3,a5,t0 +; add a4,t1,t2 +; add a0,a0,a1 +; add a1,a2,a3 +; add a0,a4,a0 +; add a0,a1,a0 ; ld s1,56(sp) ; ld s2,48(sp) ; ld s3,40(sp) @@ -373,6 +374,7 @@ block0(v0: i64): ; ld s5,24(sp) ; ld s6,16(sp) ; ld s7,8(sp) +; ld s8,0(sp) ; addi sp,sp,64 ; ld ra,8(sp) ; ld fp,0(sp) @@ -393,43 +395,44 @@ block0(v0: i64): ; sd s5, 0x18(sp) ; sd s6, 0x10(sp) ; sd s7, 8(sp) -; block1: ; offset 0x30 +; sd s8, 0(sp) +; block1: ; offset 0x34 ; add a1, a0, a0 ; add a2, a0, a1 ; add a3, a0, a2 ; add a4, a0, a3 ; add a5, a0, a4 -; add t1, a0, a5 +; add t0, a0, a5 +; add t1, a0, t0 ; add t2, a0, t1 -; add a6, a0, t2 +; add s1, a0, t2 +; add a6, a0, s1 ; add a7, a0, a6 -; add t3, a0, a7 -; add t4, a0, t3 -; add s1, a0, t4 -; add s2, a0, s1 +; add s2, a0, a7 ; add s3, a0, s2 ; add s4, a0, s3 ; add s5, a0, s4 ; add s6, a0, s5 ; add s7, a0, s6 -; add a1, a0, a1 -; add a2, a2, a3 -; add a3, a4, a5 -; add a4, t1, t2 +; add s8, a0, s7 +; add a0, a0, a1 +; add a1, a2, a3 +; add a2, a4, a5 +; add a3, t0, t1 +; add a4, t2, s1 ; add a5, a6, a7 -; add a0, t3, t4 -; add t1, s1, s2 -; add t2, s3, s4 -; add a6, s5, s6 -; add a1, s7, a1 -; add a2, a2, a3 -; add a3, a4, a5 -; add a4, a0, t1 -; add a5, t2, a6 +; add t0, s2, s3 +; add t1, s4, s5 +; add t2, s6, s7 +; add a0, s8, a0 ; add a1, a1, a2 ; add a2, a3, a4 -; add a1, a5, a1 -; add a0, a2, a1 +; add a3, a5, t0 +; add a4, t1, t2 +; add a0, a0, a1 +; add a1, a2, a3 +; add a0, a4, a0 +; add a0, a1, a0 ; ld s1, 0x38(sp) ; ld s2, 0x30(sp) ; ld s3, 0x28(sp) @@ -437,6 +440,7 @@ block0(v0: i64): ; ld s5, 0x18(sp) ; ld s6, 0x10(sp) ; ld s7, 8(sp) +; ld s8, 0(sp) ; addi sp, sp, 0x40 ; ld ra, 8(sp) ; ld s0, 0(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/return-call-indirect.clif b/cranelift/filetests/filetests/isa/riscv64/return-call-indirect.clif index 6bbd6bdf8233..b3656edc9d53 100644 --- a/cranelift/filetests/filetests/isa/riscv64/return-call-indirect.clif +++ b/cranelift/filetests/filetests/isa/riscv64/return-call-indirect.clif @@ -100,17 +100,17 @@ block0(v0: f64): ; VCode: ; block0: -; lui a3,1027 -; slli a5,a3,40 -; fmv.d.x fa1,a5 +; lui a0,1027 +; slli a0,a0,40 +; fmv.d.x fa1,a0 ; fadd.d fa0,fa0,fa1,rne ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a3, 0x403 -; slli a5, a3, 0x28 -; fmv.d.x fa1, a5 +; lui a0, 0x403 +; slli a0, a0, 0x28 +; fmv.d.x fa1, a0 ; fadd.d fa0, fa0, fa1, rne ; ret @@ -160,14 +160,14 @@ block0(v0: i8): ; VCode: ; block0: -; andi a2,a0,255 -; seqz a0,a2 +; andi a0,a0,255 +; seqz a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a2, a0, 0xff -; seqz a0, a2 +; andi a0, a0, 0xff +; seqz a0, a0 ; ret function %call_i8(i8) -> i8 tail { @@ -275,41 +275,41 @@ block0: ; li a5,35 ; li a6,40 ; li a7,45 -; li s2,50 -; li s3,55 -; li s4,60 -; li s5,65 -; li s6,70 -; li s7,75 -; li s8,80 -; li s9,85 -; li s10,90 -; li s11,95 -; li t0,100 -; li t1,105 -; li t2,110 -; li t3,115 -; li t4,120 -; li s1,125 +; li s10,50 +; li s11,55 +; li t3,60 +; li t4,65 +; li t0,70 +; li t1,75 +; li t2,80 +; li s1,85 +; li s2,90 +; li s3,95 +; li s4,100 +; li s5,105 +; li s6,110 +; li s7,115 +; li s8,120 +; li s9,125 ; li a0,130 ; li a1,135 ; load_ext_name_far a2,%tail_callee_stack_args+0 -; sd s2,-144(incoming_arg) -; sd s3,-136(incoming_arg) -; sd s4,-128(incoming_arg) -; sd s5,-120(incoming_arg) -; sd s6,-112(incoming_arg) -; sd s7,-104(incoming_arg) -; sd s8,-96(incoming_arg) -; sd s9,-88(incoming_arg) -; sd s10,-80(incoming_arg) -; sd s11,-72(incoming_arg) -; sd t0,-64(incoming_arg) -; sd t1,-56(incoming_arg) -; sd t2,-48(incoming_arg) -; sd t3,-40(incoming_arg) -; sd t4,-32(incoming_arg) -; sd s1,-24(incoming_arg) +; sd s10,-144(incoming_arg) +; sd s11,-136(incoming_arg) +; sd t3,-128(incoming_arg) +; sd t4,-120(incoming_arg) +; sd t0,-112(incoming_arg) +; sd t1,-104(incoming_arg) +; sd t2,-96(incoming_arg) +; sd s1,-88(incoming_arg) +; sd s2,-80(incoming_arg) +; sd s3,-72(incoming_arg) +; sd s4,-64(incoming_arg) +; sd s5,-56(incoming_arg) +; sd s6,-48(incoming_arg) +; sd s7,-40(incoming_arg) +; sd s8,-32(incoming_arg) +; sd s9,-24(incoming_arg) ; sd a0,-16(incoming_arg) ; sd a1,-8(incoming_arg) ; ld a1,8(slot) @@ -353,22 +353,22 @@ block0: ; addi a5, zero, 0x23 ; addi a6, zero, 0x28 ; addi a7, zero, 0x2d -; addi s2, zero, 0x32 -; addi s3, zero, 0x37 -; addi s4, zero, 0x3c -; addi s5, zero, 0x41 -; addi s6, zero, 0x46 -; addi s7, zero, 0x4b -; addi s8, zero, 0x50 -; addi s9, zero, 0x55 -; addi s10, zero, 0x5a -; addi s11, zero, 0x5f -; addi t0, zero, 0x64 -; addi t1, zero, 0x69 -; addi t2, zero, 0x6e -; addi t3, zero, 0x73 -; addi t4, zero, 0x78 -; addi s1, zero, 0x7d +; addi s10, zero, 0x32 +; addi s11, zero, 0x37 +; addi t3, zero, 0x3c +; addi t4, zero, 0x41 +; addi t0, zero, 0x46 +; addi t1, zero, 0x4b +; addi t2, zero, 0x50 +; addi s1, zero, 0x55 +; addi s2, zero, 0x5a +; addi s3, zero, 0x5f +; addi s4, zero, 0x64 +; addi s5, zero, 0x69 +; addi s6, zero, 0x6e +; addi s7, zero, 0x73 +; addi s8, zero, 0x78 +; addi s9, zero, 0x7d ; addi a0, zero, 0x82 ; addi a1, zero, 0x87 ; auipc a2, 0 @@ -376,22 +376,22 @@ block0: ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %tail_callee_stack_args 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; sd s2, 0x90(sp) -; sd s3, 0x98(sp) -; sd s4, 0xa0(sp) -; sd s5, 0xa8(sp) -; sd s6, 0xb0(sp) -; sd s7, 0xb8(sp) -; sd s8, 0xc0(sp) -; sd s9, 0xc8(sp) -; sd s10, 0xd0(sp) -; sd s11, 0xd8(sp) -; sd t0, 0xe0(sp) -; sd t1, 0xe8(sp) -; sd t2, 0xf0(sp) -; sd t3, 0xf8(sp) -; sd t4, 0x100(sp) -; sd s1, 0x108(sp) +; sd s10, 0x90(sp) +; sd s11, 0x98(sp) +; sd t3, 0xa0(sp) +; sd t4, 0xa8(sp) +; sd t0, 0xb0(sp) +; sd t1, 0xb8(sp) +; sd t2, 0xc0(sp) +; sd s1, 0xc8(sp) +; sd s2, 0xd0(sp) +; sd s3, 0xd8(sp) +; sd s4, 0xe0(sp) +; sd s5, 0xe8(sp) +; sd s6, 0xf0(sp) +; sd s7, 0xf8(sp) +; sd s8, 0x100(sp) +; sd s9, 0x108(sp) ; sd a0, 0x110(sp) ; sd a1, 0x118(sp) ; ld a1, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/return-call.clif b/cranelift/filetests/filetests/isa/riscv64/return-call.clif index 3252d609732c..16b37586d099 100644 --- a/cranelift/filetests/filetests/isa/riscv64/return-call.clif +++ b/cranelift/filetests/filetests/isa/riscv64/return-call.clif @@ -94,17 +94,17 @@ block0(v0: f64): ; VCode: ; block0: -; lui a3,1027 -; slli a5,a3,40 -; fmv.d.x fa1,a5 +; lui a0,1027 +; slli a0,a0,40 +; fmv.d.x fa1,a0 ; fadd.d fa0,fa0,fa1,rne ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a3, 0x403 -; slli a5, a3, 0x28 -; fmv.d.x fa1, a5 +; lui a0, 0x403 +; slli a0, a0, 0x28 +; fmv.d.x fa1, a0 ; fadd.d fa0, fa0, fa1, rne ; ret @@ -152,14 +152,14 @@ block0(v0: i8): ; VCode: ; block0: -; andi a2,a0,255 -; seqz a0,a2 +; andi a0,a0,255 +; seqz a0,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a2, a0, 0xff -; seqz a0, a2 +; andi a0, a0, 0xff +; seqz a0, a0 ; ret function %call_i8(i8) -> i8 tail { @@ -295,40 +295,40 @@ block0: ; li a5,35 ; li a6,40 ; li a7,45 -; li s2,50 -; li s3,55 -; li s4,60 -; li s5,65 -; li s6,70 -; li s7,75 -; li s8,80 -; li s9,85 -; li s10,90 -; li s11,95 -; li t0,100 -; li t1,105 -; li t2,110 -; li t3,115 -; li t4,120 -; li s1,125 +; li s10,50 +; li s11,55 +; li t3,60 +; li t4,65 +; li t0,70 +; li t1,75 +; li t2,80 +; li s1,85 +; li s2,90 +; li s3,95 +; li s4,100 +; li s5,105 +; li s6,110 +; li s7,115 +; li s8,120 +; li s9,125 ; li a0,130 ; li a1,135 -; sd s2,-144(incoming_arg) -; sd s3,-136(incoming_arg) -; sd s4,-128(incoming_arg) -; sd s5,-120(incoming_arg) -; sd s6,-112(incoming_arg) -; sd s7,-104(incoming_arg) -; sd s8,-96(incoming_arg) -; sd s9,-88(incoming_arg) -; sd s10,-80(incoming_arg) -; sd s11,-72(incoming_arg) -; sd t0,-64(incoming_arg) -; sd t1,-56(incoming_arg) -; sd t2,-48(incoming_arg) -; sd t3,-40(incoming_arg) -; sd t4,-32(incoming_arg) -; sd s1,-24(incoming_arg) +; sd s10,-144(incoming_arg) +; sd s11,-136(incoming_arg) +; sd t3,-128(incoming_arg) +; sd t4,-120(incoming_arg) +; sd t0,-112(incoming_arg) +; sd t1,-104(incoming_arg) +; sd t2,-96(incoming_arg) +; sd s1,-88(incoming_arg) +; sd s2,-80(incoming_arg) +; sd s3,-72(incoming_arg) +; sd s4,-64(incoming_arg) +; sd s5,-56(incoming_arg) +; sd s6,-48(incoming_arg) +; sd s7,-40(incoming_arg) +; sd s8,-32(incoming_arg) +; sd s9,-24(incoming_arg) ; sd a0,-16(incoming_arg) ; sd a1,-8(incoming_arg) ; load_ext_name_far t0,%tail_callee_stack_args+0 @@ -370,40 +370,40 @@ block0: ; addi a5, zero, 0x23 ; addi a6, zero, 0x28 ; addi a7, zero, 0x2d -; addi s2, zero, 0x32 -; addi s3, zero, 0x37 -; addi s4, zero, 0x3c -; addi s5, zero, 0x41 -; addi s6, zero, 0x46 -; addi s7, zero, 0x4b -; addi s8, zero, 0x50 -; addi s9, zero, 0x55 -; addi s10, zero, 0x5a -; addi s11, zero, 0x5f -; addi t0, zero, 0x64 -; addi t1, zero, 0x69 -; addi t2, zero, 0x6e -; addi t3, zero, 0x73 -; addi t4, zero, 0x78 -; addi s1, zero, 0x7d +; addi s10, zero, 0x32 +; addi s11, zero, 0x37 +; addi t3, zero, 0x3c +; addi t4, zero, 0x41 +; addi t0, zero, 0x46 +; addi t1, zero, 0x4b +; addi t2, zero, 0x50 +; addi s1, zero, 0x55 +; addi s2, zero, 0x5a +; addi s3, zero, 0x5f +; addi s4, zero, 0x64 +; addi s5, zero, 0x69 +; addi s6, zero, 0x6e +; addi s7, zero, 0x73 +; addi s8, zero, 0x78 +; addi s9, zero, 0x7d ; addi a0, zero, 0x82 ; addi a1, zero, 0x87 -; sd s2, 0x80(sp) -; sd s3, 0x88(sp) -; sd s4, 0x90(sp) -; sd s5, 0x98(sp) -; sd s6, 0xa0(sp) -; sd s7, 0xa8(sp) -; sd s8, 0xb0(sp) -; sd s9, 0xb8(sp) -; sd s10, 0xc0(sp) -; sd s11, 0xc8(sp) -; sd t0, 0xd0(sp) -; sd t1, 0xd8(sp) -; sd t2, 0xe0(sp) -; sd t3, 0xe8(sp) -; sd t4, 0xf0(sp) -; sd s1, 0xf8(sp) +; sd s10, 0x80(sp) +; sd s11, 0x88(sp) +; sd t3, 0x90(sp) +; sd t4, 0x98(sp) +; sd t0, 0xa0(sp) +; sd t1, 0xa8(sp) +; sd t2, 0xb0(sp) +; sd s1, 0xb8(sp) +; sd s2, 0xc0(sp) +; sd s3, 0xc8(sp) +; sd s4, 0xd0(sp) +; sd s5, 0xd8(sp) +; sd s6, 0xe0(sp) +; sd s7, 0xe8(sp) +; sd s8, 0xf0(sp) +; sd s9, 0xf8(sp) ; sd a0, 0x100(sp) ; sd a1, 0x108(sp) ; auipc t0, 0 @@ -572,43 +572,43 @@ block2: ; li a7,45 ; li a2,50 ; li a1,55 -; li s5,60 -; li s4,65 -; li s3,70 -; li s2,75 -; li s1,80 -; li t4,85 -; li t3,90 -; li t2,95 -; li t1,100 -; li t0,105 -; li s11,110 -; li s10,115 -; li s9,120 -; li s8,125 -; li s7,130 -; li s6,135 +; li t4,60 +; li t3,65 +; li s11,70 +; li s10,75 +; li s9,80 +; li s8,85 +; li s7,90 +; li s6,95 +; li s5,100 +; li s4,105 +; li s3,110 +; li s2,115 +; li s1,120 +; li t2,125 +; li t1,130 +; li t0,135 ; bne a0,zero,taken(label2),not_taken(label1) ; block1: ; li a0,140 ; sd a2,-160(incoming_arg) ; sd a1,-152(incoming_arg) -; sd s5,-144(incoming_arg) -; sd s4,-136(incoming_arg) -; sd s3,-128(incoming_arg) -; sd s2,-120(incoming_arg) -; sd s1,-112(incoming_arg) -; sd t4,-104(incoming_arg) -; sd t3,-96(incoming_arg) -; sd t2,-88(incoming_arg) -; sd t1,-80(incoming_arg) -; sd t0,-72(incoming_arg) -; sd s11,-64(incoming_arg) -; sd s10,-56(incoming_arg) -; sd s9,-48(incoming_arg) -; sd s8,-40(incoming_arg) -; sd s7,-32(incoming_arg) -; sd s6,-24(incoming_arg) +; sd t4,-144(incoming_arg) +; sd t3,-136(incoming_arg) +; sd s11,-128(incoming_arg) +; sd s10,-120(incoming_arg) +; sd s9,-112(incoming_arg) +; sd s8,-104(incoming_arg) +; sd s7,-96(incoming_arg) +; sd s6,-88(incoming_arg) +; sd s5,-80(incoming_arg) +; sd s4,-72(incoming_arg) +; sd s3,-64(incoming_arg) +; sd s2,-56(incoming_arg) +; sd s1,-48(incoming_arg) +; sd t2,-40(incoming_arg) +; sd t1,-32(incoming_arg) +; sd t0,-24(incoming_arg) ; sd a0,-16(incoming_arg) ; load_ext_name_far t0,%different_callee2+0 ; ld a0,16(slot) @@ -619,22 +619,22 @@ block2: ; ld a0,16(slot) ; sd a2,-144(incoming_arg) ; sd a1,-136(incoming_arg) -; sd s5,-128(incoming_arg) -; sd s4,-120(incoming_arg) -; sd s3,-112(incoming_arg) -; sd s2,-104(incoming_arg) -; sd s1,-96(incoming_arg) -; sd t4,-88(incoming_arg) -; sd t3,-80(incoming_arg) -; sd t2,-72(incoming_arg) -; sd t1,-64(incoming_arg) -; sd t0,-56(incoming_arg) -; sd s11,-48(incoming_arg) -; sd s10,-40(incoming_arg) -; sd s9,-32(incoming_arg) -; sd s8,-24(incoming_arg) -; sd s7,-16(incoming_arg) -; sd s6,-8(incoming_arg) +; sd t4,-128(incoming_arg) +; sd t3,-120(incoming_arg) +; sd s11,-112(incoming_arg) +; sd s10,-104(incoming_arg) +; sd s9,-96(incoming_arg) +; sd s8,-88(incoming_arg) +; sd s7,-80(incoming_arg) +; sd s6,-72(incoming_arg) +; sd s5,-64(incoming_arg) +; sd s4,-56(incoming_arg) +; sd s3,-48(incoming_arg) +; sd s2,-40(incoming_arg) +; sd s1,-32(incoming_arg) +; sd t2,-24(incoming_arg) +; sd t1,-16(incoming_arg) +; sd t0,-8(incoming_arg) ; load_ext_name_far t0,%different_callee1+0 ; ld a1,8(slot) ; ld a2,0(slot) @@ -677,43 +677,43 @@ block2: ; addi a7, zero, 0x2d ; addi a2, zero, 0x32 ; addi a1, zero, 0x37 -; addi s5, zero, 0x3c -; addi s4, zero, 0x41 -; addi s3, zero, 0x46 -; addi s2, zero, 0x4b -; addi s1, zero, 0x50 -; addi t4, zero, 0x55 -; addi t3, zero, 0x5a -; addi t2, zero, 0x5f -; addi t1, zero, 0x64 -; addi t0, zero, 0x69 -; addi s11, zero, 0x6e -; addi s10, zero, 0x73 -; addi s9, zero, 0x78 -; addi s8, zero, 0x7d -; addi s7, zero, 0x82 -; addi s6, zero, 0x87 +; addi t4, zero, 0x3c +; addi t3, zero, 0x41 +; addi s11, zero, 0x46 +; addi s10, zero, 0x4b +; addi s9, zero, 0x50 +; addi s8, zero, 0x55 +; addi s7, zero, 0x5a +; addi s6, zero, 0x5f +; addi s5, zero, 0x64 +; addi s4, zero, 0x69 +; addi s3, zero, 0x6e +; addi s2, zero, 0x73 +; addi s1, zero, 0x78 +; addi t2, zero, 0x7d +; addi t1, zero, 0x82 +; addi t0, zero, 0x87 ; bnez a0, 0xb0 ; block2: ; offset 0xcc ; addi a0, zero, 0x8c ; sd a2, 0x90(sp) ; sd a1, 0x98(sp) -; sd s5, 0xa0(sp) -; sd s4, 0xa8(sp) -; sd s3, 0xb0(sp) -; sd s2, 0xb8(sp) -; sd s1, 0xc0(sp) -; sd t4, 0xc8(sp) -; sd t3, 0xd0(sp) -; sd t2, 0xd8(sp) -; sd t1, 0xe0(sp) -; sd t0, 0xe8(sp) -; sd s11, 0xf0(sp) -; sd s10, 0xf8(sp) -; sd s9, 0x100(sp) -; sd s8, 0x108(sp) -; sd s7, 0x110(sp) -; sd s6, 0x118(sp) +; sd t4, 0xa0(sp) +; sd t3, 0xa8(sp) +; sd s11, 0xb0(sp) +; sd s10, 0xb8(sp) +; sd s9, 0xc0(sp) +; sd s8, 0xc8(sp) +; sd s7, 0xd0(sp) +; sd s6, 0xd8(sp) +; sd s5, 0xe0(sp) +; sd s4, 0xe8(sp) +; sd s3, 0xf0(sp) +; sd s2, 0xf8(sp) +; sd s1, 0x100(sp) +; sd t2, 0x108(sp) +; sd t1, 0x110(sp) +; sd t0, 0x118(sp) ; sd a0, 0x120(sp) ; auipc t0, 0 ; ld t0, 0xc(t0) @@ -742,22 +742,22 @@ block2: ; ld a0, 0x10(sp) ; sd a2, 0xa0(sp) ; sd a1, 0xa8(sp) -; sd s5, 0xb0(sp) -; sd s4, 0xb8(sp) -; sd s3, 0xc0(sp) -; sd s2, 0xc8(sp) -; sd s1, 0xd0(sp) -; sd t4, 0xd8(sp) -; sd t3, 0xe0(sp) -; sd t2, 0xe8(sp) -; sd t1, 0xf0(sp) -; sd t0, 0xf8(sp) -; sd s11, 0x100(sp) -; sd s10, 0x108(sp) -; sd s9, 0x110(sp) -; sd s8, 0x118(sp) -; sd s7, 0x120(sp) -; sd s6, 0x128(sp) +; sd t4, 0xb0(sp) +; sd t3, 0xb8(sp) +; sd s11, 0xc0(sp) +; sd s10, 0xc8(sp) +; sd s9, 0xd0(sp) +; sd s8, 0xd8(sp) +; sd s7, 0xe0(sp) +; sd s6, 0xe8(sp) +; sd s5, 0xf0(sp) +; sd s4, 0xf8(sp) +; sd s3, 0x100(sp) +; sd s2, 0x108(sp) +; sd s1, 0x110(sp) +; sd t2, 0x118(sp) +; sd t1, 0x120(sp) +; sd t0, 0x128(sp) ; auipc t0, 0 ; ld t0, 0xc(t0) ; j 0xc diff --git a/cranelift/filetests/filetests/isa/riscv64/rotl.clif b/cranelift/filetests/filetests/isa/riscv64/rotl.clif index baa526eae016..03cb0c1afdcc 100644 --- a/cranelift/filetests/filetests/isa/riscv64/rotl.clif +++ b/cranelift/filetests/filetests/isa/riscv64/rotl.clif @@ -9,78 +9,47 @@ block0(v0: i128, v1: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-32 -; sd s1,24(sp) -; sd s7,16(sp) -; sd s9,8(sp) ; block0: -; andi a5,a2,63 +; mv a5,a0 +; andi a0,a2,63 ; li a3,64 -; sub a4,a3,a5 -; sll a3,a0,a5 -; srl s7,a1,a4 -; select s9,zero,s7##condition=(a5 eq zero) -; or a3,a3,s9 -; sll a1,a1,a5 -; srl a4,a0,a4 -; select a0,zero,a4##condition=(a5 eq zero) -; or s1,a1,a0 -; li a4,64 -; andi a5,a2,127 -; select [a0,a1],[s1,a3],[a3,s1]##condition=(a5 uge a4) -; ld s1,24(sp) -; ld s7,16(sp) -; ld s9,8(sp) -; addi sp,sp,32 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; sub a4,a3,a0 +; sll a3,a5,a0 +; srl a6,a1,a4 +; select a6,zero,a6##condition=(a0 eq zero) +; or a3,a3,a6 +; sll a1,a1,a0 +; srl a4,a5,a4 +; select a4,zero,a4##condition=(a0 eq zero) +; or a4,a1,a4 +; li a5,64 +; andi a2,a2,127 +; select [a0,a1],[a4,a3],[a3,a4]##condition=(a2 uge a5) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x20 -; sd s1, 0x18(sp) -; sd s7, 0x10(sp) -; sd s9, 8(sp) -; block1: ; offset 0x20 -; andi a5, a2, 0x3f +; mv a5, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a4, a3, a5 -; sll a3, a0, a5 -; srl s7, a1, a4 -; mv s9, zero -; beqz a5, 8 -; mv s9, s7 -; or a3, a3, s9 -; sll a1, a1, a5 -; srl a4, a0, a4 -; mv a0, zero -; beqz a5, 8 +; sub a4, a3, a0 +; sll a3, a5, a0 +; srl a6, a1, a4 +; bnez a0, 8 +; mv a6, zero +; or a3, a3, a6 +; sll a1, a1, a0 +; srl a4, a5, a4 +; bnez a0, 8 +; mv a4, zero +; or a4, a1, a4 +; addi a5, zero, 0x40 +; andi a2, a2, 0x7f ; mv a0, a4 -; or s1, a1, a0 -; addi a4, zero, 0x40 -; andi a5, a2, 0x7f -; mv a0, s1 ; mv a1, a3 -; bgeu a5, a4, 0xc +; bgeu a2, a5, 0xc ; mv a0, a3 -; mv a1, s1 -; ld s1, 0x18(sp) -; ld s7, 0x10(sp) -; ld s9, 8(sp) -; addi sp, sp, 0x20 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 +; mv a1, a4 ; ret function %f4(i64, i64) -> i64 { @@ -91,26 +60,28 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; andi a3,a1,63 -; li a5,64 -; sub a1,a5,a3 -; sll a4,a0,a3 -; srl a5,a0,a1 -; select a1,zero,a5##condition=(a3 eq zero) -; or a0,a4,a1 +; mv a3,a0 +; andi a0,a1,63 +; li a1,64 +; sub a2,a1,a0 +; sll a1,a3,a0 +; srl a2,a3,a2 +; select a3,zero,a2##condition=(a0 eq zero) +; or a0,a1,a3 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a1, 0x3f -; addi a5, zero, 0x40 -; sub a1, a5, a3 -; sll a4, a0, a3 -; srl a5, a0, a1 -; mv a1, zero -; beqz a3, 8 -; mv a1, a5 -; or a0, a4, a1 +; mv a3, a0 +; andi a0, a1, 0x3f +; addi a1, zero, 0x40 +; sub a2, a1, a0 +; sll a1, a3, a0 +; srl a2, a3, a2 +; mv a3, zero +; beqz a0, 8 +; mv a3, a2 +; or a0, a1, a3 ; ret function %f5(i32, i32) -> i32 { @@ -121,30 +92,30 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; slli a3,a0,32 -; srli a5,a3,32 -; andi a1,a1,31 -; li a3,32 -; sub a0,a3,a1 -; sll a2,a5,a1 -; srl a3,a5,a0 -; select a5,zero,a3##condition=(a1 eq zero) -; or a0,a2,a5 +; slli a0,a0,32 +; srli a2,a0,32 +; andi a0,a1,31 +; li a1,32 +; sub a1,a1,a0 +; sll a3,a2,a0 +; srl a5,a2,a1 +; select a1,zero,a5##condition=(a0 eq zero) +; or a0,a3,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x20 -; srli a5, a3, 0x20 -; andi a1, a1, 0x1f -; addi a3, zero, 0x20 -; sub a0, a3, a1 -; sll a2, a5, a1 -; srl a3, a5, a0 -; mv a5, zero -; beqz a1, 8 -; mv a5, a3 -; or a0, a2, a5 +; slli a0, a0, 0x20 +; srli a2, a0, 0x20 +; andi a0, a1, 0x1f +; addi a1, zero, 0x20 +; sub a1, a1, a0 +; sll a3, a2, a0 +; srl a5, a2, a1 +; mv a1, zero +; beqz a0, 8 +; mv a1, a5 +; or a0, a3, a1 ; ret function %f6(i16, i16) -> i16 { @@ -155,30 +126,30 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; slli a3,a0,48 -; srli a5,a3,48 -; andi a1,a1,15 -; li a3,16 -; sub a0,a3,a1 -; sll a2,a5,a1 -; srl a3,a5,a0 -; select a5,zero,a3##condition=(a1 eq zero) -; or a0,a2,a5 +; slli a0,a0,48 +; srli a2,a0,48 +; andi a0,a1,15 +; li a1,16 +; sub a1,a1,a0 +; sll a3,a2,a0 +; srl a5,a2,a1 +; select a1,zero,a5##condition=(a0 eq zero) +; or a0,a3,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srli a5, a3, 0x30 -; andi a1, a1, 0xf -; addi a3, zero, 0x10 -; sub a0, a3, a1 -; sll a2, a5, a1 -; srl a3, a5, a0 -; mv a5, zero -; beqz a1, 8 -; mv a5, a3 -; or a0, a2, a5 +; slli a0, a0, 0x30 +; srli a2, a0, 0x30 +; andi a0, a1, 0xf +; addi a1, zero, 0x10 +; sub a1, a1, a0 +; sll a3, a2, a0 +; srl a5, a2, a1 +; mv a1, zero +; beqz a0, 8 +; mv a1, a5 +; or a0, a3, a1 ; ret function %f7(i8, i8) -> i8 { @@ -189,28 +160,28 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; andi a3,a0,255 -; andi a5,a1,7 +; andi a4,a0,255 +; andi a0,a1,7 ; li a1,8 -; sub a4,a1,a5 -; sll a0,a3,a5 -; srl a1,a3,a4 -; select a3,zero,a1##condition=(a5 eq zero) -; or a0,a0,a3 +; sub a3,a1,a0 +; sll a2,a4,a0 +; srl a3,a4,a3 +; select a5,zero,a3##condition=(a0 eq zero) +; or a0,a2,a5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a0, 0xff -; andi a5, a1, 7 +; andi a4, a0, 0xff +; andi a0, a1, 7 ; addi a1, zero, 8 -; sub a4, a1, a5 -; sll a0, a3, a5 -; srl a1, a3, a4 -; mv a3, zero -; beqz a5, 8 -; mv a3, a1 -; or a0, a0, a3 +; sub a3, a1, a0 +; sll a2, a4, a0 +; srl a3, a4, a3 +; mv a5, zero +; beqz a0, 8 +; mv a5, a3 +; or a0, a2, a5 ; ret function %f21(i64) -> i64 { @@ -222,28 +193,32 @@ block0(v0: i64): ; VCode: ; block0: -; li a3,17 -; andi a3,a3,63 -; li a5,64 -; sub a1,a5,a3 -; sll a4,a0,a3 -; srl a5,a0,a1 -; select a1,zero,a5##condition=(a3 eq zero) -; or a0,a4,a1 +; mv a3,a0 +; li a0,17 +; andi a0,a0,63 +; li a1,64 +; sub a2,a1,a0 +; mv a4,a3 +; sll a1,a4,a0 +; srl a2,a4,a2 +; select a3,zero,a2##condition=(a0 eq zero) +; or a0,a1,a3 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 0x11 -; andi a3, a3, 0x3f -; addi a5, zero, 0x40 -; sub a1, a5, a3 -; sll a4, a0, a3 -; srl a5, a0, a1 -; mv a1, zero -; beqz a3, 8 -; mv a1, a5 -; or a0, a4, a1 +; mv a3, a0 +; addi a0, zero, 0x11 +; andi a0, a0, 0x3f +; addi a1, zero, 0x40 +; sub a2, a1, a0 +; mv a4, a3 +; sll a1, a4, a0 +; srl a2, a4, a2 +; mv a3, zero +; beqz a0, 8 +; mv a3, a2 +; or a0, a1, a3 ; ret function %f22(i32) -> i32 { @@ -255,32 +230,32 @@ block0(v0: i32): ; VCode: ; block0: -; li a1,17 -; slli a3,a0,32 -; srli a5,a3,32 -; andi a1,a1,31 -; li a3,32 -; sub a0,a3,a1 -; sll a2,a5,a1 -; srl a3,a5,a0 -; select a5,zero,a3##condition=(a1 eq zero) -; or a0,a2,a5 +; li a2,17 +; slli a0,a0,32 +; srli a1,a0,32 +; andi a0,a2,31 +; li a2,32 +; sub a2,a2,a0 +; sll a3,a1,a0 +; srl a5,a1,a2 +; select a1,zero,a5##condition=(a0 eq zero) +; or a0,a3,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a1, zero, 0x11 -; slli a3, a0, 0x20 -; srli a5, a3, 0x20 -; andi a1, a1, 0x1f -; addi a3, zero, 0x20 -; sub a0, a3, a1 -; sll a2, a5, a1 -; srl a3, a5, a0 -; mv a5, zero -; beqz a1, 8 -; mv a5, a3 -; or a0, a2, a5 +; addi a2, zero, 0x11 +; slli a0, a0, 0x20 +; srli a1, a0, 0x20 +; andi a0, a2, 0x1f +; addi a2, zero, 0x20 +; sub a2, a2, a0 +; sll a3, a1, a0 +; srl a5, a1, a2 +; mv a1, zero +; beqz a0, 8 +; mv a1, a5 +; or a0, a3, a1 ; ret function %f23(i16) -> i16 { @@ -292,32 +267,32 @@ block0(v0: i16): ; VCode: ; block0: -; li a1,10 -; slli a3,a0,48 -; srli a5,a3,48 -; andi a1,a1,15 -; li a3,16 -; sub a0,a3,a1 -; sll a2,a5,a1 -; srl a3,a5,a0 -; select a5,zero,a3##condition=(a1 eq zero) -; or a0,a2,a5 +; li a2,10 +; slli a0,a0,48 +; srli a1,a0,48 +; andi a0,a2,15 +; li a2,16 +; sub a2,a2,a0 +; sll a3,a1,a0 +; srl a5,a1,a2 +; select a1,zero,a5##condition=(a0 eq zero) +; or a0,a3,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a1, zero, 0xa -; slli a3, a0, 0x30 -; srli a5, a3, 0x30 -; andi a1, a1, 0xf -; addi a3, zero, 0x10 -; sub a0, a3, a1 -; sll a2, a5, a1 -; srl a3, a5, a0 -; mv a5, zero -; beqz a1, 8 -; mv a5, a3 -; or a0, a2, a5 +; addi a2, zero, 0xa +; slli a0, a0, 0x30 +; srli a1, a0, 0x30 +; andi a0, a2, 0xf +; addi a2, zero, 0x10 +; sub a2, a2, a0 +; sll a3, a1, a0 +; srl a5, a1, a2 +; mv a1, zero +; beqz a0, 8 +; mv a1, a5 +; or a0, a3, a1 ; ret function %f24(i8) -> i8 { @@ -329,29 +304,31 @@ block0(v0: i8): ; VCode: ; block0: -; li a4,3 -; andi a3,a0,255 -; andi a5,a4,7 -; li a1,8 -; sub a4,a1,a5 -; sll a0,a3,a5 -; srl a1,a3,a4 -; select a3,zero,a1##condition=(a5 eq zero) -; or a0,a0,a3 +; mv a2,a0 +; li a0,3 +; andi a1,a2,255 +; andi a0,a0,7 +; li a2,8 +; sub a3,a2,a0 +; sll a2,a1,a0 +; srl a3,a1,a3 +; select a5,zero,a3##condition=(a0 eq zero) +; or a0,a2,a5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 3 -; andi a3, a0, 0xff -; andi a5, a4, 7 -; addi a1, zero, 8 -; sub a4, a1, a5 -; sll a0, a3, a5 -; srl a1, a3, a4 -; mv a3, zero -; beqz a5, 8 -; mv a3, a1 -; or a0, a0, a3 +; mv a2, a0 +; addi a0, zero, 3 +; andi a1, a2, 0xff +; andi a0, a0, 7 +; addi a2, zero, 8 +; sub a3, a2, a0 +; sll a2, a1, a0 +; srl a3, a1, a3 +; mv a5, zero +; beqz a0, 8 +; mv a5, a3 +; or a0, a2, a5 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/rotr.clif b/cranelift/filetests/filetests/isa/riscv64/rotr.clif index 3528c4e07b29..f0e19b8319e7 100644 --- a/cranelift/filetests/filetests/isa/riscv64/rotr.clif +++ b/cranelift/filetests/filetests/isa/riscv64/rotr.clif @@ -9,78 +9,47 @@ block0(v0: i128, v1: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-32 -; sd s1,24(sp) -; sd s7,16(sp) -; sd s9,8(sp) ; block0: -; andi a5,a2,63 +; mv a5,a0 +; andi a0,a2,63 ; li a3,64 -; sub a4,a3,a5 -; srl a3,a0,a5 -; sll s7,a1,a4 -; select s9,zero,s7##condition=(a5 eq zero) -; or a3,a3,s9 -; srl a1,a1,a5 -; sll a4,a0,a4 -; select a0,zero,a4##condition=(a5 eq zero) -; or s1,a1,a0 -; li a4,64 -; andi a5,a2,127 -; select [a0,a1],[s1,a3],[a3,s1]##condition=(a5 uge a4) -; ld s1,24(sp) -; ld s7,16(sp) -; ld s9,8(sp) -; addi sp,sp,32 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; sub a4,a3,a0 +; srl a3,a5,a0 +; sll a6,a1,a4 +; select a6,zero,a6##condition=(a0 eq zero) +; or a3,a3,a6 +; srl a1,a1,a0 +; sll a4,a5,a4 +; select a4,zero,a4##condition=(a0 eq zero) +; or a4,a1,a4 +; li a5,64 +; andi a2,a2,127 +; select [a0,a1],[a4,a3],[a3,a4]##condition=(a2 uge a5) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x20 -; sd s1, 0x18(sp) -; sd s7, 0x10(sp) -; sd s9, 8(sp) -; block1: ; offset 0x20 -; andi a5, a2, 0x3f +; mv a5, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a4, a3, a5 -; srl a3, a0, a5 -; sll s7, a1, a4 -; mv s9, zero -; beqz a5, 8 -; mv s9, s7 -; or a3, a3, s9 -; srl a1, a1, a5 -; sll a4, a0, a4 -; mv a0, zero -; beqz a5, 8 +; sub a4, a3, a0 +; srl a3, a5, a0 +; sll a6, a1, a4 +; bnez a0, 8 +; mv a6, zero +; or a3, a3, a6 +; srl a1, a1, a0 +; sll a4, a5, a4 +; bnez a0, 8 +; mv a4, zero +; or a4, a1, a4 +; addi a5, zero, 0x40 +; andi a2, a2, 0x7f ; mv a0, a4 -; or s1, a1, a0 -; addi a4, zero, 0x40 -; andi a5, a2, 0x7f -; mv a0, s1 ; mv a1, a3 -; bgeu a5, a4, 0xc +; bgeu a2, a5, 0xc ; mv a0, a3 -; mv a1, s1 -; ld s1, 0x18(sp) -; ld s7, 0x10(sp) -; ld s9, 8(sp) -; addi sp, sp, 0x20 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 +; mv a1, a4 ; ret function %f0(i64, i64) -> i64 { @@ -91,26 +60,28 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; andi a3,a1,63 -; li a5,64 -; sub a1,a5,a3 -; srl a4,a0,a3 -; sll a5,a0,a1 -; select a1,zero,a5##condition=(a3 eq zero) -; or a0,a4,a1 +; mv a3,a0 +; andi a0,a1,63 +; li a1,64 +; sub a2,a1,a0 +; srl a1,a3,a0 +; sll a2,a3,a2 +; select a3,zero,a2##condition=(a0 eq zero) +; or a0,a1,a3 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a1, 0x3f -; addi a5, zero, 0x40 -; sub a1, a5, a3 -; srl a4, a0, a3 -; sll a5, a0, a1 -; mv a1, zero -; beqz a3, 8 -; mv a1, a5 -; or a0, a4, a1 +; mv a3, a0 +; andi a0, a1, 0x3f +; addi a1, zero, 0x40 +; sub a2, a1, a0 +; srl a1, a3, a0 +; sll a2, a3, a2 +; mv a3, zero +; beqz a0, 8 +; mv a3, a2 +; or a0, a1, a3 ; ret function %f1(i32, i32) -> i32 { @@ -121,30 +92,30 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; slli a3,a0,32 -; srli a5,a3,32 -; andi a1,a1,31 -; li a3,32 -; sub a0,a3,a1 -; srl a2,a5,a1 -; sll a3,a5,a0 -; select a5,zero,a3##condition=(a1 eq zero) -; or a0,a2,a5 +; slli a0,a0,32 +; srli a2,a0,32 +; andi a0,a1,31 +; li a1,32 +; sub a1,a1,a0 +; srl a3,a2,a0 +; sll a5,a2,a1 +; select a1,zero,a5##condition=(a0 eq zero) +; or a0,a3,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x20 -; srli a5, a3, 0x20 -; andi a1, a1, 0x1f -; addi a3, zero, 0x20 -; sub a0, a3, a1 -; srl a2, a5, a1 -; sll a3, a5, a0 -; mv a5, zero -; beqz a1, 8 -; mv a5, a3 -; or a0, a2, a5 +; slli a0, a0, 0x20 +; srli a2, a0, 0x20 +; andi a0, a1, 0x1f +; addi a1, zero, 0x20 +; sub a1, a1, a0 +; srl a3, a2, a0 +; sll a5, a2, a1 +; mv a1, zero +; beqz a0, 8 +; mv a1, a5 +; or a0, a3, a1 ; ret function %f2(i16, i16) -> i16 { @@ -155,30 +126,30 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; slli a3,a0,48 -; srli a5,a3,48 -; andi a1,a1,15 -; li a3,16 -; sub a0,a3,a1 -; srl a2,a5,a1 -; sll a3,a5,a0 -; select a5,zero,a3##condition=(a1 eq zero) -; or a0,a2,a5 +; slli a0,a0,48 +; srli a2,a0,48 +; andi a0,a1,15 +; li a1,16 +; sub a1,a1,a0 +; srl a3,a2,a0 +; sll a5,a2,a1 +; select a1,zero,a5##condition=(a0 eq zero) +; or a0,a3,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srli a5, a3, 0x30 -; andi a1, a1, 0xf -; addi a3, zero, 0x10 -; sub a0, a3, a1 -; srl a2, a5, a1 -; sll a3, a5, a0 -; mv a5, zero -; beqz a1, 8 -; mv a5, a3 -; or a0, a2, a5 +; slli a0, a0, 0x30 +; srli a2, a0, 0x30 +; andi a0, a1, 0xf +; addi a1, zero, 0x10 +; sub a1, a1, a0 +; srl a3, a2, a0 +; sll a5, a2, a1 +; mv a1, zero +; beqz a0, 8 +; mv a1, a5 +; or a0, a3, a1 ; ret function %f3(i8, i8) -> i8 { @@ -189,28 +160,28 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; andi a3,a0,255 -; andi a5,a1,7 +; andi a4,a0,255 +; andi a0,a1,7 ; li a1,8 -; sub a4,a1,a5 -; srl a0,a3,a5 -; sll a1,a3,a4 -; select a3,zero,a1##condition=(a5 eq zero) -; or a0,a0,a3 +; sub a3,a1,a0 +; srl a2,a4,a0 +; sll a3,a4,a3 +; select a5,zero,a3##condition=(a0 eq zero) +; or a0,a2,a5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a0, 0xff -; andi a5, a1, 7 +; andi a4, a0, 0xff +; andi a0, a1, 7 ; addi a1, zero, 8 -; sub a4, a1, a5 -; srl a0, a3, a5 -; sll a1, a3, a4 -; mv a3, zero -; beqz a5, 8 -; mv a3, a1 -; or a0, a0, a3 +; sub a3, a1, a0 +; srl a2, a4, a0 +; sll a3, a4, a3 +; mv a5, zero +; beqz a0, 8 +; mv a5, a3 +; or a0, a2, a5 ; ret function %rotr_i64_const_i32(i64) -> i64 { @@ -222,27 +193,31 @@ block0(v0: i64): ; VCode: ; block0: -; li a3,17 -; andi a3,a3,63 -; li a5,64 -; sub a1,a5,a3 -; srl a4,a0,a3 -; sll a5,a0,a1 -; select a1,zero,a5##condition=(a3 eq zero) -; or a0,a4,a1 +; mv a3,a0 +; li a0,17 +; andi a0,a0,63 +; li a1,64 +; sub a2,a1,a0 +; mv a4,a3 +; srl a1,a4,a0 +; sll a2,a4,a2 +; select a3,zero,a2##condition=(a0 eq zero) +; or a0,a1,a3 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 0x11 -; andi a3, a3, 0x3f -; addi a5, zero, 0x40 -; sub a1, a5, a3 -; srl a4, a0, a3 -; sll a5, a0, a1 -; mv a1, zero -; beqz a3, 8 -; mv a1, a5 -; or a0, a4, a1 +; mv a3, a0 +; addi a0, zero, 0x11 +; andi a0, a0, 0x3f +; addi a1, zero, 0x40 +; sub a2, a1, a0 +; mv a4, a3 +; srl a1, a4, a0 +; sll a2, a4, a2 +; mv a3, zero +; beqz a0, 8 +; mv a3, a2 +; or a0, a1, a3 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/select-float.clif b/cranelift/filetests/filetests/isa/riscv64/select-float.clif index 7eeccf2693c6..6616b62d9d41 100644 --- a/cranelift/filetests/filetests/isa/riscv64/select-float.clif +++ b/cranelift/filetests/filetests/isa/riscv64/select-float.clif @@ -14,17 +14,17 @@ block0(v0: i8, v1: f16, v2: f16): ; VCode: ; block0: ; li a1,42 -; andi a5,a0,255 +; andi a0,a0,255 ; andi a1,a1,255 -; select fa0,fa0,fa1##condition=(a5 eq a1) +; select fa0,fa0,fa1##condition=(a0 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a1, zero, 0x2a -; andi a5, a0, 0xff +; andi a0, a0, 0xff ; andi a1, a1, 0xff -; beq a5, a1, 8 +; beq a0, a1, 8 ; fmv.d fa0, fa1 ; ret @@ -39,17 +39,17 @@ block0(v0: i8, v1: f32, v2: f32): ; VCode: ; block0: ; li a1,42 -; andi a5,a0,255 +; andi a0,a0,255 ; andi a1,a1,255 -; select fa0,fa0,fa1##condition=(a5 eq a1) +; select fa0,fa0,fa1##condition=(a0 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a1, zero, 0x2a -; andi a5, a0, 0xff +; andi a0, a0, 0xff ; andi a1, a1, 0xff -; beq a5, a1, 8 +; beq a0, a1, 8 ; fmv.d fa0, fa1 ; ret @@ -64,17 +64,17 @@ block0(v0: i8, v1: f64, v2: f64): ; VCode: ; block0: ; li a1,42 -; andi a5,a0,255 +; andi a0,a0,255 ; andi a1,a1,255 -; select fa0,fa0,fa1##condition=(a5 eq a1) +; select fa0,fa0,fa1##condition=(a0 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a1, zero, 0x2a -; andi a5, a0, 0xff +; andi a0, a0, 0xff ; andi a1, a1, 0xff -; beq a5, a1, 8 +; beq a0, a1, 8 ; fmv.d fa0, fa1 ; ret @@ -87,54 +87,25 @@ block0(v0: i8, v1: f128, v2: f128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s3,8(sp) -; sd s10,0(sp) ; block0: -; mv s10,a1 -; mv a1,a0 -; li a0,42 -; andi a5,a1,255 -; andi s3,a0,255 -; select [a0,a1],[s10,a2],[a3,a4]##condition=(a5 eq s3) -; ld s3,8(sp) -; ld s10,0(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; mv a6,a1 +; li a5,42 +; andi t2,a0,255 +; andi a5,a5,255 +; select [a0,a1],[a6,a2],[a3,a4]##condition=(t2 eq a5) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s3, 8(sp) -; sd s10, 0(sp) -; block1: ; offset 0x1c -; mv s10, a1 -; mv a1, a0 -; addi a0, zero, 0x2a -; andi a5, a1, 0xff -; andi s3, a0, 0xff -; mv a0, s10 +; mv a6, a1 +; addi a5, zero, 0x2a +; andi t2, a0, 0xff +; andi a5, a5, 0xff +; mv a0, a6 ; mv a1, a2 -; beq a5, s3, 0xc +; beq t2, a5, 0xc ; mv a0, a3 ; mv a1, a4 -; ld s3, 8(sp) -; ld s10, 0(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret function %select_icmp_i16_f16(i16, f16, f16) -> f16 { @@ -147,22 +118,22 @@ block0(v0: i16, v1: f16, v2: f16): ; VCode: ; block0: -; li a3,42 -; slli a5,a0,48 -; srai a1,a5,48 -; slli a3,a3,48 -; srai a5,a3,48 -; select fa0,fa0,fa1##condition=(a1 eq a5) +; li a1,42 +; slli a0,a0,48 +; srai a0,a0,48 +; slli a1,a1,48 +; srai a1,a1,48 +; select fa0,fa0,fa1##condition=(a0 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 0x2a -; slli a5, a0, 0x30 -; srai a1, a5, 0x30 -; slli a3, a3, 0x30 -; srai a5, a3, 0x30 -; beq a1, a5, 8 +; addi a1, zero, 0x2a +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 +; slli a1, a1, 0x30 +; srai a1, a1, 0x30 +; beq a0, a1, 8 ; fmv.d fa0, fa1 ; ret @@ -176,22 +147,22 @@ block0(v0: i16, v1: f32, v2: f32): ; VCode: ; block0: -; li a3,42 -; slli a5,a0,48 -; srai a1,a5,48 -; slli a3,a3,48 -; srai a5,a3,48 -; select fa0,fa0,fa1##condition=(a1 eq a5) +; li a1,42 +; slli a0,a0,48 +; srai a0,a0,48 +; slli a1,a1,48 +; srai a1,a1,48 +; select fa0,fa0,fa1##condition=(a0 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 0x2a -; slli a5, a0, 0x30 -; srai a1, a5, 0x30 -; slli a3, a3, 0x30 -; srai a5, a3, 0x30 -; beq a1, a5, 8 +; addi a1, zero, 0x2a +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 +; slli a1, a1, 0x30 +; srai a1, a1, 0x30 +; beq a0, a1, 8 ; fmv.d fa0, fa1 ; ret @@ -205,22 +176,22 @@ block0(v0: i16, v1: f64, v2: f64): ; VCode: ; block0: -; li a3,42 -; slli a5,a0,48 -; srai a1,a5,48 -; slli a3,a3,48 -; srai a5,a3,48 -; select fa0,fa0,fa1##condition=(a1 eq a5) +; li a1,42 +; slli a0,a0,48 +; srai a0,a0,48 +; slli a1,a1,48 +; srai a1,a1,48 +; select fa0,fa0,fa1##condition=(a0 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 0x2a -; slli a5, a0, 0x30 -; srai a1, a5, 0x30 -; slli a3, a3, 0x30 -; srai a5, a3, 0x30 -; beq a1, a5, 8 +; addi a1, zero, 0x2a +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 +; slli a1, a1, 0x30 +; srai a1, a1, 0x30 +; beq a0, a1, 8 ; fmv.d fa0, fa1 ; ret @@ -238,18 +209,18 @@ block0(v0: i16, v1: f128, v2: f128): ; sd fp,0(sp) ; mv fp,sp ; addi sp,sp,-16 -; sd s6,8(sp) -; sd s7,0(sp) +; sd s1,8(sp) +; sd s2,0(sp) ; block0: -; mv t0,a1 -; li s6,42 -; slli a1,a0,48 +; mv s2,a1 +; li a5,42 +; slli a0,a0,48 +; srai s1,a0,48 +; slli a1,a5,48 ; srai a5,a1,48 -; slli a0,s6,48 -; srai s7,a0,48 -; select [a0,a1],[t0,a2],[a3,a4]##condition=(a5 eq s7) -; ld s6,8(sp) -; ld s7,0(sp) +; select [a0,a1],[s2,a2],[a3,a4]##condition=(s1 eq a5) +; ld s1,8(sp) +; ld s2,0(sp) ; addi sp,sp,16 ; ld ra,8(sp) ; ld fp,0(sp) @@ -263,22 +234,22 @@ block0(v0: i16, v1: f128, v2: f128): ; sd s0, 0(sp) ; mv s0, sp ; addi sp, sp, -0x10 -; sd s6, 8(sp) -; sd s7, 0(sp) +; sd s1, 8(sp) +; sd s2, 0(sp) ; block1: ; offset 0x1c -; mv t0, a1 -; addi s6, zero, 0x2a -; slli a1, a0, 0x30 +; mv s2, a1 +; addi a5, zero, 0x2a +; slli a0, a0, 0x30 +; srai s1, a0, 0x30 +; slli a1, a5, 0x30 ; srai a5, a1, 0x30 -; slli a0, s6, 0x30 -; srai s7, a0, 0x30 -; mv a0, t0 +; mv a0, s2 ; mv a1, a2 -; beq a5, s7, 0xc +; beq s1, a5, 0xc ; mv a0, a3 ; mv a1, a4 -; ld s6, 8(sp) -; ld s7, 0(sp) +; ld s1, 8(sp) +; ld s2, 0(sp) ; addi sp, sp, 0x10 ; ld ra, 8(sp) ; ld s0, 0(sp) @@ -296,17 +267,17 @@ block0(v0: i32, v1: f16, v2: f16): ; VCode: ; block0: ; li a1,42 -; sext.w a5,a0 +; sext.w a0,a0 ; sext.w a1,a1 -; select fa0,fa0,fa1##condition=(a5 eq a1) +; select fa0,fa0,fa1##condition=(a0 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a1, zero, 0x2a -; sext.w a5, a0 +; sext.w a0, a0 ; sext.w a1, a1 -; beq a5, a1, 8 +; beq a0, a1, 8 ; fmv.d fa0, fa1 ; ret @@ -321,17 +292,17 @@ block0(v0: i32, v1: f32, v2: f32): ; VCode: ; block0: ; li a1,42 -; sext.w a5,a0 +; sext.w a0,a0 ; sext.w a1,a1 -; select fa0,fa0,fa1##condition=(a5 eq a1) +; select fa0,fa0,fa1##condition=(a0 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a1, zero, 0x2a -; sext.w a5, a0 +; sext.w a0, a0 ; sext.w a1, a1 -; beq a5, a1, 8 +; beq a0, a1, 8 ; fmv.d fa0, fa1 ; ret @@ -346,17 +317,17 @@ block0(v0: i32, v1: f64, v2: f64): ; VCode: ; block0: ; li a1,42 -; sext.w a5,a0 +; sext.w a0,a0 ; sext.w a1,a1 -; select fa0,fa0,fa1##condition=(a5 eq a1) +; select fa0,fa0,fa1##condition=(a0 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a1, zero, 0x2a -; sext.w a5, a0 +; sext.w a0, a0 ; sext.w a1, a1 -; beq a5, a1, 8 +; beq a0, a1, 8 ; fmv.d fa0, fa1 ; ret @@ -369,54 +340,25 @@ block0(v0: i32, v1: f128, v2: f128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s3,8(sp) -; sd s10,0(sp) ; block0: -; mv s10,a1 -; mv a1,a0 -; li a0,42 -; sext.w a5,a1 -; sext.w s3,a0 -; select [a0,a1],[s10,a2],[a3,a4]##condition=(a5 eq s3) -; ld s3,8(sp) -; ld s10,0(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; mv a6,a1 +; li a5,42 +; sext.w t2,a0 +; sext.w a5,a5 +; select [a0,a1],[a6,a2],[a3,a4]##condition=(t2 eq a5) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s3, 8(sp) -; sd s10, 0(sp) -; block1: ; offset 0x1c -; mv s10, a1 -; mv a1, a0 -; addi a0, zero, 0x2a -; sext.w a5, a1 -; sext.w s3, a0 -; mv a0, s10 +; mv a6, a1 +; addi a5, zero, 0x2a +; sext.w t2, a0 +; sext.w a5, a5 +; mv a0, a6 ; mv a1, a2 -; beq a5, s3, 0xc +; beq t2, a5, 0xc ; mv a0, a3 ; mv a1, a4 -; ld s3, 8(sp) -; ld s10, 0(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret function %select_icmp_i64_f16(i64, f16, f16) -> f16 { @@ -429,14 +371,14 @@ block0(v0: i64, v1: f16, v2: f16): ; VCode: ; block0: -; li a5,42 -; select fa0,fa0,fa1##condition=(a0 eq a5) +; li a1,42 +; select fa0,fa0,fa1##condition=(a0 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a5, zero, 0x2a -; beq a0, a5, 8 +; addi a1, zero, 0x2a +; beq a0, a1, 8 ; fmv.d fa0, fa1 ; ret @@ -450,14 +392,14 @@ block0(v0: i64, v1: f32, v2: f32): ; VCode: ; block0: -; li a5,42 -; select fa0,fa0,fa1##condition=(a0 eq a5) +; li a1,42 +; select fa0,fa0,fa1##condition=(a0 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a5, zero, 0x2a -; beq a0, a5, 8 +; addi a1, zero, 0x2a +; beq a0, a1, 8 ; fmv.d fa0, fa1 ; ret @@ -471,14 +413,14 @@ block0(v0: i64, v1: f64, v2: f64): ; VCode: ; block0: -; li a5,42 -; select fa0,fa0,fa1##condition=(a0 eq a5) +; li a1,42 +; select fa0,fa0,fa1##condition=(a0 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a5, zero, 0x2a -; beq a0, a5, 8 +; addi a1, zero, 0x2a +; beq a0, a1, 8 ; fmv.d fa0, fa1 ; ret @@ -491,52 +433,23 @@ block0(v0: i64, v1: f128, v2: f128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s4,8(sp) -; sd s6,0(sp) ; block0: -; mv s4,a2 -; mv s6,a1 -; li a2,42 -; mv a5,a0 -; select [a0,a1],[s6,s4],[a3,a4]##condition=(a5 eq a2) -; ld s4,8(sp) -; ld s6,0(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; mv a7,a1 +; li a5,42 +; mv a6,a0 +; select [a0,a1],[a7,a2],[a3,a4]##condition=(a6 eq a5) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s4, 8(sp) -; sd s6, 0(sp) -; block1: ; offset 0x1c -; mv s4, a2 -; mv s6, a1 -; addi a2, zero, 0x2a -; mv a5, a0 -; mv a0, s6 -; mv a1, s4 -; beq a5, a2, 0xc +; mv a7, a1 +; addi a5, zero, 0x2a +; mv a6, a0 +; mv a0, a7 +; mv a1, a2 +; beq a6, a5, 0xc ; mv a0, a3 ; mv a1, a4 -; ld s4, 8(sp) -; ld s6, 0(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret function %select_icmp_i128_f16(i128, f16, f16) -> f16 { @@ -550,24 +463,24 @@ block0(v0: i128, v1: f16, v2: f16): ; VCode: ; block0: -; li a5,42 -; li a2,0 -; xor a3,a0,a5 -; xor a4,a1,a2 -; or a0,a3,a4 -; seqz a2,a0 -; select fa0,fa0,fa1##condition=(a2 ne zero) +; li a2,42 +; li a3,0 +; xor a0,a0,a2 +; xor a1,a1,a3 +; or a2,a0,a1 +; seqz a4,a2 +; select fa0,fa0,fa1##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a5, zero, 0x2a -; mv a2, zero -; xor a3, a0, a5 -; xor a4, a1, a2 -; or a0, a3, a4 -; seqz a2, a0 -; bnez a2, 8 +; addi a2, zero, 0x2a +; mv a3, zero +; xor a0, a0, a2 +; xor a1, a1, a3 +; or a2, a0, a1 +; seqz a4, a2 +; bnez a4, 8 ; fmv.d fa0, fa1 ; ret @@ -582,24 +495,24 @@ block0(v0: i128, v1: f32, v2: f32): ; VCode: ; block0: -; li a5,42 -; li a2,0 -; xor a3,a0,a5 -; xor a4,a1,a2 -; or a0,a3,a4 -; seqz a2,a0 -; select fa0,fa0,fa1##condition=(a2 ne zero) +; li a2,42 +; li a3,0 +; xor a0,a0,a2 +; xor a1,a1,a3 +; or a2,a0,a1 +; seqz a4,a2 +; select fa0,fa0,fa1##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a5, zero, 0x2a -; mv a2, zero -; xor a3, a0, a5 -; xor a4, a1, a2 -; or a0, a3, a4 -; seqz a2, a0 -; bnez a2, 8 +; addi a2, zero, 0x2a +; mv a3, zero +; xor a0, a0, a2 +; xor a1, a1, a3 +; or a2, a0, a1 +; seqz a4, a2 +; bnez a4, 8 ; fmv.d fa0, fa1 ; ret @@ -614,24 +527,24 @@ block0(v0: i128, v1: f64, v2: f64): ; VCode: ; block0: -; li a5,42 -; li a2,0 -; xor a3,a0,a5 -; xor a4,a1,a2 -; or a0,a3,a4 -; seqz a2,a0 -; select fa0,fa0,fa1##condition=(a2 ne zero) +; li a2,42 +; li a3,0 +; xor a0,a0,a2 +; xor a1,a1,a3 +; or a2,a0,a1 +; seqz a4,a2 +; select fa0,fa0,fa1##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a5, zero, 0x2a -; mv a2, zero -; xor a3, a0, a5 -; xor a4, a1, a2 -; or a0, a3, a4 -; seqz a2, a0 -; bnez a2, 8 +; addi a2, zero, 0x2a +; mv a3, zero +; xor a0, a0, a2 +; xor a1, a1, a3 +; or a2, a0, a1 +; seqz a4, a2 +; bnez a4, 8 ; fmv.d fa0, fa1 ; ret @@ -645,59 +558,28 @@ block0(v0: i128, v1: f128, v2: f128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-32 -; sd s8,24(sp) -; sd s9,16(sp) -; sd s11,8(sp) ; block0: -; li s8,42 -; li s9,0 -; xor a0,a0,s8 -; xor a1,a1,s9 +; li a6,42 +; li a7,0 +; xor a0,a0,a6 +; xor a1,a1,a7 ; or a0,a0,a1 -; seqz s11,a0 -; select [a0,a1],[a2,a3],[a4,a5]##condition=(s11 ne zero) -; ld s8,24(sp) -; ld s9,16(sp) -; ld s11,8(sp) -; addi sp,sp,32 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; seqz a7,a0 +; select [a0,a1],[a2,a3],[a4,a5]##condition=(a7 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x20 -; sd s8, 0x18(sp) -; sd s9, 0x10(sp) -; sd s11, 8(sp) -; block1: ; offset 0x20 -; addi s8, zero, 0x2a -; mv s9, zero -; xor a0, a0, s8 -; xor a1, a1, s9 +; addi a6, zero, 0x2a +; mv a7, zero +; xor a0, a0, a6 +; xor a1, a1, a7 ; or a0, a0, a1 -; seqz s11, a0 +; seqz a7, a0 ; mv a0, a2 ; mv a1, a3 -; bnez s11, 0xc +; bnez a7, 0xc ; mv a0, a4 ; mv a1, a5 -; ld s8, 0x18(sp) -; ld s9, 0x10(sp) -; ld s11, 8(sp) -; addi sp, sp, 0x20 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/select.clif b/cranelift/filetests/filetests/isa/riscv64/select.clif index 3b27d8eb704e..b22df283d72b 100644 --- a/cranelift/filetests/filetests/isa/riscv64/select.clif +++ b/cranelift/filetests/filetests/isa/riscv64/select.clif @@ -13,18 +13,18 @@ block0(v0: i8, v1: i8, v2: i8): ; VCode: ; block0: ; li a3,42 -; andi a5,a0,255 +; andi a4,a0,255 ; andi a3,a3,255 -; select a0,a1,a2##condition=(a5 eq a3) +; select a0,a1,a2##condition=(a4 eq a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a3, zero, 0x2a -; andi a5, a0, 0xff +; andi a4, a0, 0xff ; andi a3, a3, 0xff ; mv a0, a1 -; beq a5, a3, 8 +; beq a4, a3, 8 ; mv a0, a2 ; ret @@ -39,18 +39,18 @@ block0(v0: i8, v1: i16, v2: i16): ; VCode: ; block0: ; li a3,42 -; andi a5,a0,255 +; andi a4,a0,255 ; andi a3,a3,255 -; select a0,a1,a2##condition=(a5 eq a3) +; select a0,a1,a2##condition=(a4 eq a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a3, zero, 0x2a -; andi a5, a0, 0xff +; andi a4, a0, 0xff ; andi a3, a3, 0xff ; mv a0, a1 -; beq a5, a3, 8 +; beq a4, a3, 8 ; mv a0, a2 ; ret @@ -65,18 +65,18 @@ block0(v0: i8, v1: i32, v2: i32): ; VCode: ; block0: ; li a3,42 -; andi a5,a0,255 +; andi a4,a0,255 ; andi a3,a3,255 -; select a0,a1,a2##condition=(a5 eq a3) +; select a0,a1,a2##condition=(a4 eq a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a3, zero, 0x2a -; andi a5, a0, 0xff +; andi a4, a0, 0xff ; andi a3, a3, 0xff ; mv a0, a1 -; beq a5, a3, 8 +; beq a4, a3, 8 ; mv a0, a2 ; ret @@ -91,18 +91,18 @@ block0(v0: i8, v1: i64, v2: i64): ; VCode: ; block0: ; li a3,42 -; andi a5,a0,255 +; andi a4,a0,255 ; andi a3,a3,255 -; select a0,a1,a2##condition=(a5 eq a3) +; select a0,a1,a2##condition=(a4 eq a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a3, zero, 0x2a -; andi a5, a0, 0xff +; andi a4, a0, 0xff ; andi a3, a3, 0xff ; mv a0, a1 -; beq a5, a3, 8 +; beq a4, a3, 8 ; mv a0, a2 ; ret @@ -115,54 +115,25 @@ block0(v0: i8, v1: i128, v2: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s3,8(sp) -; sd s10,0(sp) ; block0: -; mv s10,a1 -; mv a1,a0 -; li a0,42 -; andi a5,a1,255 -; andi s3,a0,255 -; select [a0,a1],[s10,a2],[a3,a4]##condition=(a5 eq s3) -; ld s3,8(sp) -; ld s10,0(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; mv a6,a1 +; li a5,42 +; andi t2,a0,255 +; andi a5,a5,255 +; select [a0,a1],[a6,a2],[a3,a4]##condition=(t2 eq a5) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s3, 8(sp) -; sd s10, 0(sp) -; block1: ; offset 0x1c -; mv s10, a1 -; mv a1, a0 -; addi a0, zero, 0x2a -; andi a5, a1, 0xff -; andi s3, a0, 0xff -; mv a0, s10 +; mv a6, a1 +; addi a5, zero, 0x2a +; andi t2, a0, 0xff +; andi a5, a5, 0xff +; mv a0, a6 ; mv a1, a2 -; beq a5, s3, 0xc +; beq t2, a5, 0xc ; mv a0, a3 ; mv a1, a4 -; ld s3, 8(sp) -; ld s10, 0(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret function %select_icmp_i16_i8(i16, i8, i8) -> i8 { @@ -175,23 +146,23 @@ block0(v0: i16, v1: i8, v2: i8): ; VCode: ; block0: -; li a4,42 -; slli a5,a0,48 -; srai a3,a5,48 -; slli a4,a4,48 -; srai a5,a4,48 -; select a0,a1,a2##condition=(a3 eq a5) +; li a3,42 +; slli a0,a0,48 +; srai a4,a0,48 +; slli a0,a3,48 +; srai a3,a0,48 +; select a0,a1,a2##condition=(a4 eq a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 0x2a -; slli a5, a0, 0x30 -; srai a3, a5, 0x30 -; slli a4, a4, 0x30 -; srai a5, a4, 0x30 +; addi a3, zero, 0x2a +; slli a0, a0, 0x30 +; srai a4, a0, 0x30 +; slli a0, a3, 0x30 +; srai a3, a0, 0x30 ; mv a0, a1 -; beq a3, a5, 8 +; beq a4, a3, 8 ; mv a0, a2 ; ret @@ -205,23 +176,23 @@ block0(v0: i16, v1: i16, v2: i16): ; VCode: ; block0: -; li a4,42 -; slli a5,a0,48 -; srai a3,a5,48 -; slli a4,a4,48 -; srai a5,a4,48 -; select a0,a1,a2##condition=(a3 eq a5) +; li a3,42 +; slli a0,a0,48 +; srai a4,a0,48 +; slli a0,a3,48 +; srai a3,a0,48 +; select a0,a1,a2##condition=(a4 eq a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 0x2a -; slli a5, a0, 0x30 -; srai a3, a5, 0x30 -; slli a4, a4, 0x30 -; srai a5, a4, 0x30 +; addi a3, zero, 0x2a +; slli a0, a0, 0x30 +; srai a4, a0, 0x30 +; slli a0, a3, 0x30 +; srai a3, a0, 0x30 ; mv a0, a1 -; beq a3, a5, 8 +; beq a4, a3, 8 ; mv a0, a2 ; ret @@ -235,23 +206,23 @@ block0(v0: i16, v1: i32, v2: i32): ; VCode: ; block0: -; li a4,42 -; slli a5,a0,48 -; srai a3,a5,48 -; slli a4,a4,48 -; srai a5,a4,48 -; select a0,a1,a2##condition=(a3 eq a5) +; li a3,42 +; slli a0,a0,48 +; srai a4,a0,48 +; slli a0,a3,48 +; srai a3,a0,48 +; select a0,a1,a2##condition=(a4 eq a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 0x2a -; slli a5, a0, 0x30 -; srai a3, a5, 0x30 -; slli a4, a4, 0x30 -; srai a5, a4, 0x30 +; addi a3, zero, 0x2a +; slli a0, a0, 0x30 +; srai a4, a0, 0x30 +; slli a0, a3, 0x30 +; srai a3, a0, 0x30 ; mv a0, a1 -; beq a3, a5, 8 +; beq a4, a3, 8 ; mv a0, a2 ; ret @@ -265,23 +236,23 @@ block0(v0: i16, v1: i64, v2: i64): ; VCode: ; block0: -; li a4,42 -; slli a5,a0,48 -; srai a3,a5,48 -; slli a4,a4,48 -; srai a5,a4,48 -; select a0,a1,a2##condition=(a3 eq a5) +; li a3,42 +; slli a0,a0,48 +; srai a4,a0,48 +; slli a0,a3,48 +; srai a3,a0,48 +; select a0,a1,a2##condition=(a4 eq a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 0x2a -; slli a5, a0, 0x30 -; srai a3, a5, 0x30 -; slli a4, a4, 0x30 -; srai a5, a4, 0x30 +; addi a3, zero, 0x2a +; slli a0, a0, 0x30 +; srai a4, a0, 0x30 +; slli a0, a3, 0x30 +; srai a3, a0, 0x30 ; mv a0, a1 -; beq a3, a5, 8 +; beq a4, a3, 8 ; mv a0, a2 ; ret @@ -299,18 +270,18 @@ block0(v0: i16, v1: i128, v2: i128): ; sd fp,0(sp) ; mv fp,sp ; addi sp,sp,-16 -; sd s6,8(sp) -; sd s7,0(sp) +; sd s1,8(sp) +; sd s2,0(sp) ; block0: -; mv t0,a1 -; li s6,42 -; slli a1,a0,48 +; mv s2,a1 +; li a5,42 +; slli a0,a0,48 +; srai s1,a0,48 +; slli a1,a5,48 ; srai a5,a1,48 -; slli a0,s6,48 -; srai s7,a0,48 -; select [a0,a1],[t0,a2],[a3,a4]##condition=(a5 eq s7) -; ld s6,8(sp) -; ld s7,0(sp) +; select [a0,a1],[s2,a2],[a3,a4]##condition=(s1 eq a5) +; ld s1,8(sp) +; ld s2,0(sp) ; addi sp,sp,16 ; ld ra,8(sp) ; ld fp,0(sp) @@ -324,22 +295,22 @@ block0(v0: i16, v1: i128, v2: i128): ; sd s0, 0(sp) ; mv s0, sp ; addi sp, sp, -0x10 -; sd s6, 8(sp) -; sd s7, 0(sp) +; sd s1, 8(sp) +; sd s2, 0(sp) ; block1: ; offset 0x1c -; mv t0, a1 -; addi s6, zero, 0x2a -; slli a1, a0, 0x30 +; mv s2, a1 +; addi a5, zero, 0x2a +; slli a0, a0, 0x30 +; srai s1, a0, 0x30 +; slli a1, a5, 0x30 ; srai a5, a1, 0x30 -; slli a0, s6, 0x30 -; srai s7, a0, 0x30 -; mv a0, t0 +; mv a0, s2 ; mv a1, a2 -; beq a5, s7, 0xc +; beq s1, a5, 0xc ; mv a0, a3 ; mv a1, a4 -; ld s6, 8(sp) -; ld s7, 0(sp) +; ld s1, 8(sp) +; ld s2, 0(sp) ; addi sp, sp, 0x10 ; ld ra, 8(sp) ; ld s0, 0(sp) @@ -357,18 +328,18 @@ block0(v0: i32, v1: i8, v2: i8): ; VCode: ; block0: ; li a3,42 -; sext.w a5,a0 +; sext.w a4,a0 ; sext.w a3,a3 -; select a0,a1,a2##condition=(a5 eq a3) +; select a0,a1,a2##condition=(a4 eq a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a3, zero, 0x2a -; sext.w a5, a0 +; sext.w a4, a0 ; sext.w a3, a3 ; mv a0, a1 -; beq a5, a3, 8 +; beq a4, a3, 8 ; mv a0, a2 ; ret @@ -383,18 +354,18 @@ block0(v0: i32, v1: i16, v2: i16): ; VCode: ; block0: ; li a3,42 -; sext.w a5,a0 +; sext.w a4,a0 ; sext.w a3,a3 -; select a0,a1,a2##condition=(a5 eq a3) +; select a0,a1,a2##condition=(a4 eq a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a3, zero, 0x2a -; sext.w a5, a0 +; sext.w a4, a0 ; sext.w a3, a3 ; mv a0, a1 -; beq a5, a3, 8 +; beq a4, a3, 8 ; mv a0, a2 ; ret @@ -409,18 +380,18 @@ block0(v0: i32, v1: i32, v2: i32): ; VCode: ; block0: ; li a3,42 -; sext.w a5,a0 +; sext.w a4,a0 ; sext.w a3,a3 -; select a0,a1,a2##condition=(a5 eq a3) +; select a0,a1,a2##condition=(a4 eq a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a3, zero, 0x2a -; sext.w a5, a0 +; sext.w a4, a0 ; sext.w a3, a3 ; mv a0, a1 -; beq a5, a3, 8 +; beq a4, a3, 8 ; mv a0, a2 ; ret @@ -435,18 +406,18 @@ block0(v0: i32, v1: i64, v2: i64): ; VCode: ; block0: ; li a3,42 -; sext.w a5,a0 +; sext.w a4,a0 ; sext.w a3,a3 -; select a0,a1,a2##condition=(a5 eq a3) +; select a0,a1,a2##condition=(a4 eq a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a3, zero, 0x2a -; sext.w a5, a0 +; sext.w a4, a0 ; sext.w a3, a3 ; mv a0, a1 -; beq a5, a3, 8 +; beq a4, a3, 8 ; mv a0, a2 ; ret @@ -459,54 +430,25 @@ block0(v0: i32, v1: i128, v2: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s3,8(sp) -; sd s10,0(sp) ; block0: -; mv s10,a1 -; mv a1,a0 -; li a0,42 -; sext.w a5,a1 -; sext.w s3,a0 -; select [a0,a1],[s10,a2],[a3,a4]##condition=(a5 eq s3) -; ld s3,8(sp) -; ld s10,0(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; mv a6,a1 +; li a5,42 +; sext.w t2,a0 +; sext.w a5,a5 +; select [a0,a1],[a6,a2],[a3,a4]##condition=(t2 eq a5) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s3, 8(sp) -; sd s10, 0(sp) -; block1: ; offset 0x1c -; mv s10, a1 -; mv a1, a0 -; addi a0, zero, 0x2a -; sext.w a5, a1 -; sext.w s3, a0 -; mv a0, s10 +; mv a6, a1 +; addi a5, zero, 0x2a +; sext.w t2, a0 +; sext.w a5, a5 +; mv a0, a6 ; mv a1, a2 -; beq a5, s3, 0xc +; beq t2, a5, 0xc ; mv a0, a3 ; mv a1, a4 -; ld s3, 8(sp) -; ld s10, 0(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret function %select_icmp_i64_i8(i64, i8, i8) -> i8 { @@ -519,17 +461,18 @@ block0(v0: i64, v1: i8, v2: i8): ; VCode: ; block0: -; li a5,42 ; mv a3,a0 -; select a0,a1,a2##condition=(a3 eq a5) +; mv a0,a1 +; li a1,42 +; select a0,a0,a2##condition=(a3 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a5, zero, 0x2a ; mv a3, a0 ; mv a0, a1 -; beq a3, a5, 8 +; addi a1, zero, 0x2a +; beq a3, a1, 8 ; mv a0, a2 ; ret @@ -543,17 +486,18 @@ block0(v0: i64, v1: i16, v2: i16): ; VCode: ; block0: -; li a5,42 ; mv a3,a0 -; select a0,a1,a2##condition=(a3 eq a5) +; mv a0,a1 +; li a1,42 +; select a0,a0,a2##condition=(a3 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a5, zero, 0x2a ; mv a3, a0 ; mv a0, a1 -; beq a3, a5, 8 +; addi a1, zero, 0x2a +; beq a3, a1, 8 ; mv a0, a2 ; ret @@ -567,17 +511,18 @@ block0(v0: i64, v1: i32, v2: i32): ; VCode: ; block0: -; li a5,42 ; mv a3,a0 -; select a0,a1,a2##condition=(a3 eq a5) +; mv a0,a1 +; li a1,42 +; select a0,a0,a2##condition=(a3 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a5, zero, 0x2a ; mv a3, a0 ; mv a0, a1 -; beq a3, a5, 8 +; addi a1, zero, 0x2a +; beq a3, a1, 8 ; mv a0, a2 ; ret @@ -591,17 +536,18 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; li a5,42 ; mv a3,a0 -; select a0,a1,a2##condition=(a3 eq a5) +; mv a0,a1 +; li a1,42 +; select a0,a0,a2##condition=(a3 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a5, zero, 0x2a ; mv a3, a0 ; mv a0, a1 -; beq a3, a5, 8 +; addi a1, zero, 0x2a +; beq a3, a1, 8 ; mv a0, a2 ; ret @@ -614,52 +560,23 @@ block0(v0: i64, v1: i128, v2: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s4,8(sp) -; sd s6,0(sp) ; block0: -; mv s4,a2 -; mv s6,a1 -; li a2,42 -; mv a5,a0 -; select [a0,a1],[s6,s4],[a3,a4]##condition=(a5 eq a2) -; ld s4,8(sp) -; ld s6,0(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; mv a7,a1 +; li a5,42 +; mv a6,a0 +; select [a0,a1],[a7,a2],[a3,a4]##condition=(a6 eq a5) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s4, 8(sp) -; sd s6, 0(sp) -; block1: ; offset 0x1c -; mv s4, a2 -; mv s6, a1 -; addi a2, zero, 0x2a -; mv a5, a0 -; mv a0, s6 -; mv a1, s4 -; beq a5, a2, 0xc +; mv a7, a1 +; addi a5, zero, 0x2a +; mv a6, a0 +; mv a0, a7 +; mv a1, a2 +; beq a6, a5, 0xc ; mv a0, a3 ; mv a1, a4 -; ld s4, 8(sp) -; ld s6, 0(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret function %select_icmp_i128_i8(i128, i8, i8) -> i8 { @@ -673,23 +590,23 @@ block0(v0: i128, v1: i8, v2: i8): ; VCode: ; block0: -; li a5,42 -; li a4,0 -; xor a5,a0,a5 -; xor a4,a1,a4 -; or a0,a5,a4 -; seqz a4,a0 +; li a4,42 +; li a5,0 +; xor a0,a0,a4 +; xor a1,a1,a5 +; or a4,a0,a1 +; seqz a4,a4 ; select a0,a2,a3##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a5, zero, 0x2a -; mv a4, zero -; xor a5, a0, a5 -; xor a4, a1, a4 -; or a0, a5, a4 -; seqz a4, a0 +; addi a4, zero, 0x2a +; mv a5, zero +; xor a0, a0, a4 +; xor a1, a1, a5 +; or a4, a0, a1 +; seqz a4, a4 ; mv a0, a2 ; bnez a4, 8 ; mv a0, a3 @@ -706,23 +623,23 @@ block0(v0: i128, v1: i16, v2: i16): ; VCode: ; block0: -; li a5,42 -; li a4,0 -; xor a5,a0,a5 -; xor a4,a1,a4 -; or a0,a5,a4 -; seqz a4,a0 +; li a4,42 +; li a5,0 +; xor a0,a0,a4 +; xor a1,a1,a5 +; or a4,a0,a1 +; seqz a4,a4 ; select a0,a2,a3##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a5, zero, 0x2a -; mv a4, zero -; xor a5, a0, a5 -; xor a4, a1, a4 -; or a0, a5, a4 -; seqz a4, a0 +; addi a4, zero, 0x2a +; mv a5, zero +; xor a0, a0, a4 +; xor a1, a1, a5 +; or a4, a0, a1 +; seqz a4, a4 ; mv a0, a2 ; bnez a4, 8 ; mv a0, a3 @@ -739,23 +656,23 @@ block0(v0: i128, v1: i32, v2: i32): ; VCode: ; block0: -; li a5,42 -; li a4,0 -; xor a5,a0,a5 -; xor a4,a1,a4 -; or a0,a5,a4 -; seqz a4,a0 +; li a4,42 +; li a5,0 +; xor a0,a0,a4 +; xor a1,a1,a5 +; or a4,a0,a1 +; seqz a4,a4 ; select a0,a2,a3##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a5, zero, 0x2a -; mv a4, zero -; xor a5, a0, a5 -; xor a4, a1, a4 -; or a0, a5, a4 -; seqz a4, a0 +; addi a4, zero, 0x2a +; mv a5, zero +; xor a0, a0, a4 +; xor a1, a1, a5 +; or a4, a0, a1 +; seqz a4, a4 ; mv a0, a2 ; bnez a4, 8 ; mv a0, a3 @@ -772,23 +689,23 @@ block0(v0: i128, v1: i64, v2: i64): ; VCode: ; block0: -; li a5,42 -; li a4,0 -; xor a5,a0,a5 -; xor a4,a1,a4 -; or a0,a5,a4 -; seqz a4,a0 +; li a4,42 +; li a5,0 +; xor a0,a0,a4 +; xor a1,a1,a5 +; or a4,a0,a1 +; seqz a4,a4 ; select a0,a2,a3##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a5, zero, 0x2a -; mv a4, zero -; xor a5, a0, a5 -; xor a4, a1, a4 -; or a0, a5, a4 -; seqz a4, a0 +; addi a4, zero, 0x2a +; mv a5, zero +; xor a0, a0, a4 +; xor a1, a1, a5 +; or a4, a0, a1 +; seqz a4, a4 ; mv a0, a2 ; bnez a4, 8 ; mv a0, a3 @@ -804,59 +721,28 @@ block0(v0: i128, v1: i128, v2: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-32 -; sd s8,24(sp) -; sd s9,16(sp) -; sd s11,8(sp) ; block0: -; li s8,42 -; li s9,0 -; xor a0,a0,s8 -; xor a1,a1,s9 +; li a6,42 +; li a7,0 +; xor a0,a0,a6 +; xor a1,a1,a7 ; or a0,a0,a1 -; seqz s11,a0 -; select [a0,a1],[a2,a3],[a4,a5]##condition=(s11 ne zero) -; ld s8,24(sp) -; ld s9,16(sp) -; ld s11,8(sp) -; addi sp,sp,32 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; seqz a7,a0 +; select [a0,a1],[a2,a3],[a4,a5]##condition=(a7 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x20 -; sd s8, 0x18(sp) -; sd s9, 0x10(sp) -; sd s11, 8(sp) -; block1: ; offset 0x20 -; addi s8, zero, 0x2a -; mv s9, zero -; xor a0, a0, s8 -; xor a1, a1, s9 +; addi a6, zero, 0x2a +; mv a7, zero +; xor a0, a0, a6 +; xor a1, a1, a7 ; or a0, a0, a1 -; seqz s11, a0 +; seqz a7, a0 ; mv a0, a2 ; mv a1, a3 -; bnez s11, 0xc +; bnez a7, 0xc ; mv a0, a4 ; mv a1, a5 -; ld s8, 0x18(sp) -; ld s9, 0x10(sp) -; ld s11, 8(sp) -; addi sp, sp, 0x20 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/select_spectre_guard.clif b/cranelift/filetests/filetests/isa/riscv64/select_spectre_guard.clif index 8466d02feff2..d15c4074f8e9 100644 --- a/cranelift/filetests/filetests/isa/riscv64/select_spectre_guard.clif +++ b/cranelift/filetests/filetests/isa/riscv64/select_spectre_guard.clif @@ -12,28 +12,28 @@ block0(v0: i8, v1: i8, v2: i8): ; VCode: ; block0: -; slli a3,a0,56 -; srai a5,a3,56 -; xori a3,a5,42 -; seqz a3,a3 -; sub a3,zero,a3 -; and a4,a3,a1 -; not a0,a3 -; and a2,a2,a0 -; or a0,a4,a2 +; slli a0,a0,56 +; srai a3,a0,56 +; xori a3,a3,42 +; seqz a5,a3 +; sub a3,zero,a5 +; and a0,a3,a1 +; not a3,a3 +; and a4,a2,a3 +; or a0,a0,a4 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x38 -; srai a5, a3, 0x38 -; xori a3, a5, 0x2a -; seqz a3, a3 -; neg a3, a3 -; and a4, a3, a1 -; not a0, a3 -; and a2, a2, a0 -; or a0, a4, a2 +; slli a0, a0, 0x38 +; srai a3, a0, 0x38 +; xori a3, a3, 0x2a +; seqz a5, a3 +; neg a3, a5 +; and a0, a3, a1 +; not a3, a3 +; and a4, a2, a3 +; or a0, a0, a4 ; ret function %f(i8, i16, i16) -> i16 { @@ -46,28 +46,28 @@ block0(v0: i8, v1: i16, v2: i16): ; VCode: ; block0: -; slli a3,a0,56 -; srai a5,a3,56 -; xori a3,a5,42 -; seqz a3,a3 -; sub a3,zero,a3 -; and a4,a3,a1 -; not a0,a3 -; and a2,a2,a0 -; or a0,a4,a2 +; slli a0,a0,56 +; srai a3,a0,56 +; xori a3,a3,42 +; seqz a5,a3 +; sub a3,zero,a5 +; and a0,a3,a1 +; not a3,a3 +; and a4,a2,a3 +; or a0,a0,a4 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x38 -; srai a5, a3, 0x38 -; xori a3, a5, 0x2a -; seqz a3, a3 -; neg a3, a3 -; and a4, a3, a1 -; not a0, a3 -; and a2, a2, a0 -; or a0, a4, a2 +; slli a0, a0, 0x38 +; srai a3, a0, 0x38 +; xori a3, a3, 0x2a +; seqz a5, a3 +; neg a3, a5 +; and a0, a3, a1 +; not a3, a3 +; and a4, a2, a3 +; or a0, a0, a4 ; ret function %f(i8, i32, i32) -> i32 { @@ -80,28 +80,28 @@ block0(v0: i8, v1: i32, v2: i32): ; VCode: ; block0: -; slli a3,a0,56 -; srai a5,a3,56 -; xori a3,a5,42 -; seqz a3,a3 -; sub a3,zero,a3 -; and a4,a3,a1 -; not a0,a3 -; and a2,a2,a0 -; or a0,a4,a2 +; slli a0,a0,56 +; srai a3,a0,56 +; xori a3,a3,42 +; seqz a5,a3 +; sub a3,zero,a5 +; and a0,a3,a1 +; not a3,a3 +; and a4,a2,a3 +; or a0,a0,a4 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x38 -; srai a5, a3, 0x38 -; xori a3, a5, 0x2a -; seqz a3, a3 -; neg a3, a3 -; and a4, a3, a1 -; not a0, a3 -; and a2, a2, a0 -; or a0, a4, a2 +; slli a0, a0, 0x38 +; srai a3, a0, 0x38 +; xori a3, a3, 0x2a +; seqz a5, a3 +; neg a3, a5 +; and a0, a3, a1 +; not a3, a3 +; and a4, a2, a3 +; or a0, a0, a4 ; ret function %f(i8, i64, i64) -> i64 { @@ -114,28 +114,28 @@ block0(v0: i8, v1: i64, v2: i64): ; VCode: ; block0: -; slli a3,a0,56 -; srai a5,a3,56 -; xori a3,a5,42 -; seqz a3,a3 -; sub a3,zero,a3 -; and a4,a3,a1 -; not a0,a3 -; and a2,a2,a0 -; or a0,a4,a2 +; slli a0,a0,56 +; srai a3,a0,56 +; xori a3,a3,42 +; seqz a5,a3 +; sub a3,zero,a5 +; and a0,a3,a1 +; not a3,a3 +; and a4,a2,a3 +; or a0,a0,a4 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x38 -; srai a5, a3, 0x38 -; xori a3, a5, 0x2a -; seqz a3, a3 -; neg a3, a3 -; and a4, a3, a1 -; not a0, a3 -; and a2, a2, a0 -; or a0, a4, a2 +; slli a0, a0, 0x38 +; srai a3, a0, 0x38 +; xori a3, a3, 0x2a +; seqz a5, a3 +; neg a3, a5 +; and a0, a3, a1 +; not a3, a3 +; and a4, a2, a3 +; or a0, a0, a4 ; ret function %f(i8, i128, i128) -> i128 { @@ -149,35 +149,37 @@ block0(v0: i8, v1: i128, v2: i128): ; VCode: ; block0: ; slli a5,a0,56 -; srai a5,a5,56 -; xori a5,a5,42 -; seqz a5,a5 -; sub a5,zero,a5 -; and a0,a5,a1 -; not a1,a5 +; srai a0,a5,56 +; xori a0,a0,42 +; seqz a0,a0 +; sub a0,zero,a0 +; and a5,a0,a1 +; not a1,a0 ; and a1,a3,a1 -; or a0,a0,a1 -; and a2,a5,a2 -; not a5,a5 -; and a1,a4,a5 -; or a1,a2,a1 +; or a3,a5,a1 +; and a2,a0,a2 +; not a0,a0 +; and a0,a4,a0 +; or a1,a2,a0 +; mv a0,a3 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; slli a5, a0, 0x38 -; srai a5, a5, 0x38 -; xori a5, a5, 0x2a -; seqz a5, a5 -; neg a5, a5 -; and a0, a5, a1 -; not a1, a5 +; srai a0, a5, 0x38 +; xori a0, a0, 0x2a +; seqz a0, a0 +; neg a0, a0 +; and a5, a0, a1 +; not a1, a0 ; and a1, a3, a1 -; or a0, a0, a1 -; and a2, a5, a2 -; not a5, a5 -; and a1, a4, a5 -; or a1, a2, a1 +; or a3, a5, a1 +; and a2, a0, a2 +; not a0, a0 +; and a0, a4, a0 +; or a1, a2, a0 +; mv a0, a3 ; ret function %f(i16, i8, i8) -> i8 { @@ -190,28 +192,28 @@ block0(v0: i16, v1: i8, v2: i8): ; VCode: ; block0: -; slli a3,a0,48 -; srai a5,a3,48 -; xori a3,a5,42 -; seqz a3,a3 -; sub a3,zero,a3 -; and a4,a3,a1 -; not a0,a3 -; and a2,a2,a0 -; or a0,a4,a2 +; slli a0,a0,48 +; srai a3,a0,48 +; xori a3,a3,42 +; seqz a5,a3 +; sub a3,zero,a5 +; and a0,a3,a1 +; not a3,a3 +; and a4,a2,a3 +; or a0,a0,a4 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srai a5, a3, 0x30 -; xori a3, a5, 0x2a -; seqz a3, a3 -; neg a3, a3 -; and a4, a3, a1 -; not a0, a3 -; and a2, a2, a0 -; or a0, a4, a2 +; slli a0, a0, 0x30 +; srai a3, a0, 0x30 +; xori a3, a3, 0x2a +; seqz a5, a3 +; neg a3, a5 +; and a0, a3, a1 +; not a3, a3 +; and a4, a2, a3 +; or a0, a0, a4 ; ret function %f(i16, i16, i16) -> i16 { @@ -224,28 +226,28 @@ block0(v0: i16, v1: i16, v2: i16): ; VCode: ; block0: -; slli a3,a0,48 -; srai a5,a3,48 -; xori a3,a5,42 -; seqz a3,a3 -; sub a3,zero,a3 -; and a4,a3,a1 -; not a0,a3 -; and a2,a2,a0 -; or a0,a4,a2 +; slli a0,a0,48 +; srai a3,a0,48 +; xori a3,a3,42 +; seqz a5,a3 +; sub a3,zero,a5 +; and a0,a3,a1 +; not a3,a3 +; and a4,a2,a3 +; or a0,a0,a4 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srai a5, a3, 0x30 -; xori a3, a5, 0x2a -; seqz a3, a3 -; neg a3, a3 -; and a4, a3, a1 -; not a0, a3 -; and a2, a2, a0 -; or a0, a4, a2 +; slli a0, a0, 0x30 +; srai a3, a0, 0x30 +; xori a3, a3, 0x2a +; seqz a5, a3 +; neg a3, a5 +; and a0, a3, a1 +; not a3, a3 +; and a4, a2, a3 +; or a0, a0, a4 ; ret function %f(i16, i32, i32) -> i32 { @@ -258,28 +260,28 @@ block0(v0: i16, v1: i32, v2: i32): ; VCode: ; block0: -; slli a3,a0,48 -; srai a5,a3,48 -; xori a3,a5,42 -; seqz a3,a3 -; sub a3,zero,a3 -; and a4,a3,a1 -; not a0,a3 -; and a2,a2,a0 -; or a0,a4,a2 +; slli a0,a0,48 +; srai a3,a0,48 +; xori a3,a3,42 +; seqz a5,a3 +; sub a3,zero,a5 +; and a0,a3,a1 +; not a3,a3 +; and a4,a2,a3 +; or a0,a0,a4 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srai a5, a3, 0x30 -; xori a3, a5, 0x2a -; seqz a3, a3 -; neg a3, a3 -; and a4, a3, a1 -; not a0, a3 -; and a2, a2, a0 -; or a0, a4, a2 +; slli a0, a0, 0x30 +; srai a3, a0, 0x30 +; xori a3, a3, 0x2a +; seqz a5, a3 +; neg a3, a5 +; and a0, a3, a1 +; not a3, a3 +; and a4, a2, a3 +; or a0, a0, a4 ; ret function %f(i16, i64, i64) -> i64 { @@ -292,28 +294,28 @@ block0(v0: i16, v1: i64, v2: i64): ; VCode: ; block0: -; slli a3,a0,48 -; srai a5,a3,48 -; xori a3,a5,42 -; seqz a3,a3 -; sub a3,zero,a3 -; and a4,a3,a1 -; not a0,a3 -; and a2,a2,a0 -; or a0,a4,a2 +; slli a0,a0,48 +; srai a3,a0,48 +; xori a3,a3,42 +; seqz a5,a3 +; sub a3,zero,a5 +; and a0,a3,a1 +; not a3,a3 +; and a4,a2,a3 +; or a0,a0,a4 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srai a5, a3, 0x30 -; xori a3, a5, 0x2a -; seqz a3, a3 -; neg a3, a3 -; and a4, a3, a1 -; not a0, a3 -; and a2, a2, a0 -; or a0, a4, a2 +; slli a0, a0, 0x30 +; srai a3, a0, 0x30 +; xori a3, a3, 0x2a +; seqz a5, a3 +; neg a3, a5 +; and a0, a3, a1 +; not a3, a3 +; and a4, a2, a3 +; or a0, a0, a4 ; ret function %f(i16, i128, i128) -> i128 { @@ -327,35 +329,37 @@ block0(v0: i16, v1: i128, v2: i128): ; VCode: ; block0: ; slli a5,a0,48 -; srai a5,a5,48 -; xori a5,a5,42 -; seqz a5,a5 -; sub a5,zero,a5 -; and a0,a5,a1 -; not a1,a5 +; srai a0,a5,48 +; xori a0,a0,42 +; seqz a0,a0 +; sub a0,zero,a0 +; and a5,a0,a1 +; not a1,a0 ; and a1,a3,a1 -; or a0,a0,a1 -; and a2,a5,a2 -; not a5,a5 -; and a1,a4,a5 -; or a1,a2,a1 +; or a3,a5,a1 +; and a2,a0,a2 +; not a0,a0 +; and a0,a4,a0 +; or a1,a2,a0 +; mv a0,a3 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; slli a5, a0, 0x30 -; srai a5, a5, 0x30 -; xori a5, a5, 0x2a -; seqz a5, a5 -; neg a5, a5 -; and a0, a5, a1 -; not a1, a5 +; srai a0, a5, 0x30 +; xori a0, a0, 0x2a +; seqz a0, a0 +; neg a0, a0 +; and a5, a0, a1 +; not a1, a0 ; and a1, a3, a1 -; or a0, a0, a1 -; and a2, a5, a2 -; not a5, a5 -; and a1, a4, a5 -; or a1, a2, a1 +; or a3, a5, a1 +; and a2, a0, a2 +; not a0, a0 +; and a0, a4, a0 +; or a1, a2, a0 +; mv a0, a3 ; ret function %f(i32, i8, i8) -> i8 { @@ -368,26 +372,26 @@ block0(v0: i32, v1: i8, v2: i8): ; VCode: ; block0: -; sext.w a3,a0 -; xori a5,a3,42 -; seqz a3,a5 -; sub a4,zero,a3 -; and a3,a4,a1 -; not a5,a4 -; and a1,a2,a5 -; or a0,a3,a1 +; sext.w a0,a0 +; xori a3,a0,42 +; seqz a3,a3 +; sub a3,zero,a3 +; and a0,a3,a1 +; not a1,a3 +; and a3,a2,a1 +; or a0,a0,a3 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a3, a0 -; xori a5, a3, 0x2a -; seqz a3, a5 -; neg a4, a3 -; and a3, a4, a1 -; not a5, a4 -; and a1, a2, a5 -; or a0, a3, a1 +; sext.w a0, a0 +; xori a3, a0, 0x2a +; seqz a3, a3 +; neg a3, a3 +; and a0, a3, a1 +; not a1, a3 +; and a3, a2, a1 +; or a0, a0, a3 ; ret function %f(i32, i16, i16) -> i16 { @@ -400,26 +404,26 @@ block0(v0: i32, v1: i16, v2: i16): ; VCode: ; block0: -; sext.w a3,a0 -; xori a5,a3,42 -; seqz a3,a5 -; sub a4,zero,a3 -; and a3,a4,a1 -; not a5,a4 -; and a1,a2,a5 -; or a0,a3,a1 +; sext.w a0,a0 +; xori a3,a0,42 +; seqz a3,a3 +; sub a3,zero,a3 +; and a0,a3,a1 +; not a1,a3 +; and a3,a2,a1 +; or a0,a0,a3 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a3, a0 -; xori a5, a3, 0x2a -; seqz a3, a5 -; neg a4, a3 -; and a3, a4, a1 -; not a5, a4 -; and a1, a2, a5 -; or a0, a3, a1 +; sext.w a0, a0 +; xori a3, a0, 0x2a +; seqz a3, a3 +; neg a3, a3 +; and a0, a3, a1 +; not a1, a3 +; and a3, a2, a1 +; or a0, a0, a3 ; ret function %f(i32, i32, i32) -> i32 { @@ -432,26 +436,26 @@ block0(v0: i32, v1: i32, v2: i32): ; VCode: ; block0: -; sext.w a3,a0 -; xori a5,a3,42 -; seqz a3,a5 -; sub a4,zero,a3 -; and a3,a4,a1 -; not a5,a4 -; and a1,a2,a5 -; or a0,a3,a1 +; sext.w a0,a0 +; xori a3,a0,42 +; seqz a3,a3 +; sub a3,zero,a3 +; and a0,a3,a1 +; not a1,a3 +; and a3,a2,a1 +; or a0,a0,a3 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a3, a0 -; xori a5, a3, 0x2a -; seqz a3, a5 -; neg a4, a3 -; and a3, a4, a1 -; not a5, a4 -; and a1, a2, a5 -; or a0, a3, a1 +; sext.w a0, a0 +; xori a3, a0, 0x2a +; seqz a3, a3 +; neg a3, a3 +; and a0, a3, a1 +; not a1, a3 +; and a3, a2, a1 +; or a0, a0, a3 ; ret function %f(i32, i64, i64) -> i64 { @@ -464,26 +468,26 @@ block0(v0: i32, v1: i64, v2: i64): ; VCode: ; block0: -; sext.w a3,a0 -; xori a5,a3,42 -; seqz a3,a5 -; sub a4,zero,a3 -; and a3,a4,a1 -; not a5,a4 -; and a1,a2,a5 -; or a0,a3,a1 +; sext.w a0,a0 +; xori a3,a0,42 +; seqz a3,a3 +; sub a3,zero,a3 +; and a0,a3,a1 +; not a1,a3 +; and a3,a2,a1 +; or a0,a0,a3 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a3, a0 -; xori a5, a3, 0x2a -; seqz a3, a5 -; neg a4, a3 -; and a3, a4, a1 -; not a5, a4 -; and a1, a2, a5 -; or a0, a3, a1 +; sext.w a0, a0 +; xori a3, a0, 0x2a +; seqz a3, a3 +; neg a3, a3 +; and a0, a3, a1 +; not a1, a3 +; and a3, a2, a1 +; or a0, a0, a3 ; ret function %f(i32, i128, i128) -> i128 { @@ -497,33 +501,35 @@ block0(v0: i32, v1: i128, v2: i128): ; VCode: ; block0: ; sext.w a5,a0 -; xori a5,a5,42 -; seqz a5,a5 -; sub a5,zero,a5 -; and a0,a5,a1 -; not a1,a5 -; and a3,a3,a1 -; or a0,a0,a3 -; and a1,a5,a2 -; not a3,a5 -; and a5,a4,a3 -; or a1,a1,a5 +; xori a0,a5,42 +; seqz a0,a0 +; sub a0,zero,a0 +; and a1,a0,a1 +; not a5,a0 +; and a5,a3,a5 +; or a3,a1,a5 +; and a2,a0,a2 +; not a0,a0 +; and a0,a4,a0 +; or a1,a2,a0 +; mv a0,a3 ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; sext.w a5, a0 -; xori a5, a5, 0x2a -; seqz a5, a5 -; neg a5, a5 -; and a0, a5, a1 -; not a1, a5 -; and a3, a3, a1 -; or a0, a0, a3 -; and a1, a5, a2 -; not a3, a5 -; and a5, a4, a3 -; or a1, a1, a5 +; xori a0, a5, 0x2a +; seqz a0, a0 +; neg a0, a0 +; and a1, a0, a1 +; not a5, a0 +; and a5, a3, a5 +; or a3, a1, a5 +; and a2, a0, a2 +; not a0, a0 +; and a0, a4, a0 +; or a1, a2, a0 +; mv a0, a3 ; ret function %f(i64, i8, i8) -> i8 { @@ -536,24 +542,24 @@ block0(v0: i64, v1: i8, v2: i8): ; VCode: ; block0: -; xori a3,a0,42 -; seqz a5,a3 -; sub a0,zero,a5 -; and a3,a0,a1 -; not a4,a0 -; and a0,a2,a4 -; or a0,a3,a0 +; xori a0,a0,42 +; seqz a3,a0 +; sub a3,zero,a3 +; and a0,a3,a1 +; not a1,a3 +; and a2,a2,a1 +; or a0,a0,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; xori a3, a0, 0x2a -; seqz a5, a3 -; neg a0, a5 -; and a3, a0, a1 -; not a4, a0 -; and a0, a2, a4 -; or a0, a3, a0 +; xori a0, a0, 0x2a +; seqz a3, a0 +; neg a3, a3 +; and a0, a3, a1 +; not a1, a3 +; and a2, a2, a1 +; or a0, a0, a2 ; ret function %f(i64, i16, i16) -> i16 { @@ -566,24 +572,24 @@ block0(v0: i64, v1: i16, v2: i16): ; VCode: ; block0: -; xori a3,a0,42 -; seqz a5,a3 -; sub a0,zero,a5 -; and a3,a0,a1 -; not a4,a0 -; and a0,a2,a4 -; or a0,a3,a0 +; xori a0,a0,42 +; seqz a3,a0 +; sub a3,zero,a3 +; and a0,a3,a1 +; not a1,a3 +; and a2,a2,a1 +; or a0,a0,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; xori a3, a0, 0x2a -; seqz a5, a3 -; neg a0, a5 -; and a3, a0, a1 -; not a4, a0 -; and a0, a2, a4 -; or a0, a3, a0 +; xori a0, a0, 0x2a +; seqz a3, a0 +; neg a3, a3 +; and a0, a3, a1 +; not a1, a3 +; and a2, a2, a1 +; or a0, a0, a2 ; ret function %f(i64, i32, i32) -> i32 { @@ -596,24 +602,24 @@ block0(v0: i64, v1: i32, v2: i32): ; VCode: ; block0: -; xori a3,a0,42 -; seqz a5,a3 -; sub a0,zero,a5 -; and a3,a0,a1 -; not a4,a0 -; and a0,a2,a4 -; or a0,a3,a0 +; xori a0,a0,42 +; seqz a3,a0 +; sub a3,zero,a3 +; and a0,a3,a1 +; not a1,a3 +; and a2,a2,a1 +; or a0,a0,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; xori a3, a0, 0x2a -; seqz a5, a3 -; neg a0, a5 -; and a3, a0, a1 -; not a4, a0 -; and a0, a2, a4 -; or a0, a3, a0 +; xori a0, a0, 0x2a +; seqz a3, a0 +; neg a3, a3 +; and a0, a3, a1 +; not a1, a3 +; and a2, a2, a1 +; or a0, a0, a2 ; ret function %f(i64, i64, i64) -> i64 { @@ -626,24 +632,24 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; xori a3,a0,42 -; seqz a5,a3 -; sub a0,zero,a5 -; and a3,a0,a1 -; not a4,a0 -; and a0,a2,a4 -; or a0,a3,a0 +; xori a0,a0,42 +; seqz a3,a0 +; sub a3,zero,a3 +; and a0,a3,a1 +; not a1,a3 +; and a2,a2,a1 +; or a0,a0,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; xori a3, a0, 0x2a -; seqz a5, a3 -; neg a0, a5 -; and a3, a0, a1 -; not a4, a0 -; and a0, a2, a4 -; or a0, a3, a0 +; xori a0, a0, 0x2a +; seqz a3, a0 +; neg a3, a3 +; and a0, a3, a1 +; not a1, a3 +; and a2, a2, a1 +; or a0, a0, a2 ; ret function %f(i64, i128, i128) -> i128 { @@ -655,60 +661,35 @@ block0(v0: i64, v1: i128, v2: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s11,8(sp) ; block0: -; mv s11,a3 -; xori a3,a0,42 -; seqz a5,a3 -; sub a3,zero,a5 -; and a5,a3,a1 -; not a0,a3 -; mv a1,s11 -; and a0,a1,a0 -; or a0,a5,a0 -; and a1,a3,a2 -; not a2,a3 -; and a4,a4,a2 -; or a1,a1,a4 -; ld s11,8(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; xori a5,a0,42 +; seqz a0,a5 +; sub a0,zero,a0 +; and a1,a0,a1 +; not a5,a0 +; and a5,a3,a5 +; or a3,a1,a5 +; and a2,a0,a2 +; not a0,a0 +; and a0,a4,a0 +; or a1,a2,a0 +; mv a0,a3 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s11, 8(sp) -; block1: ; offset 0x18 -; mv s11, a3 -; xori a3, a0, 0x2a -; seqz a5, a3 -; neg a3, a5 -; and a5, a3, a1 -; not a0, a3 -; mv a1, s11 -; and a0, a1, a0 -; or a0, a5, a0 -; and a1, a3, a2 -; not a2, a3 -; and a4, a4, a2 -; or a1, a1, a4 -; ld s11, 8(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 +; xori a5, a0, 0x2a +; seqz a0, a5 +; neg a0, a0 +; and a1, a0, a1 +; not a5, a0 +; and a5, a3, a5 +; or a3, a1, a5 +; and a2, a0, a2 +; not a0, a0 +; and a0, a4, a0 +; or a1, a2, a0 +; mv a0, a3 ; ret function %f(i128, i8, i8) -> i8 { @@ -722,32 +703,32 @@ block0(v0: i128, v1: i8, v2: i8): ; VCode: ; block0: -; li a4,42 -; li a5,0 -; xor a0,a0,a4 -; xor a4,a1,a5 -; or a4,a0,a4 -; seqz a0,a4 -; sub a5,zero,a0 -; and a1,a5,a2 -; not a4,a5 -; and a5,a3,a4 -; or a0,a1,a5 +; li a5,42 +; li a4,0 +; xor a5,a0,a5 +; xor a4,a1,a4 +; or a0,a5,a4 +; seqz a0,a0 +; sub a1,zero,a0 +; and a4,a1,a2 +; not a5,a1 +; and a0,a3,a5 +; or a0,a4,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 0x2a -; mv a5, zero -; xor a0, a0, a4 -; xor a4, a1, a5 -; or a4, a0, a4 -; seqz a0, a4 -; neg a5, a0 -; and a1, a5, a2 -; not a4, a5 -; and a5, a3, a4 -; or a0, a1, a5 +; addi a5, zero, 0x2a +; mv a4, zero +; xor a5, a0, a5 +; xor a4, a1, a4 +; or a0, a5, a4 +; seqz a0, a0 +; neg a1, a0 +; and a4, a1, a2 +; not a5, a1 +; and a0, a3, a5 +; or a0, a4, a0 ; ret function %f(i128, i16, i16) -> i16 { @@ -761,32 +742,32 @@ block0(v0: i128, v1: i16, v2: i16): ; VCode: ; block0: -; li a4,42 -; li a5,0 -; xor a0,a0,a4 -; xor a4,a1,a5 -; or a4,a0,a4 -; seqz a0,a4 -; sub a5,zero,a0 -; and a1,a5,a2 -; not a4,a5 -; and a5,a3,a4 -; or a0,a1,a5 +; li a5,42 +; li a4,0 +; xor a5,a0,a5 +; xor a4,a1,a4 +; or a0,a5,a4 +; seqz a0,a0 +; sub a1,zero,a0 +; and a4,a1,a2 +; not a5,a1 +; and a0,a3,a5 +; or a0,a4,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 0x2a -; mv a5, zero -; xor a0, a0, a4 -; xor a4, a1, a5 -; or a4, a0, a4 -; seqz a0, a4 -; neg a5, a0 -; and a1, a5, a2 -; not a4, a5 -; and a5, a3, a4 -; or a0, a1, a5 +; addi a5, zero, 0x2a +; mv a4, zero +; xor a5, a0, a5 +; xor a4, a1, a4 +; or a0, a5, a4 +; seqz a0, a0 +; neg a1, a0 +; and a4, a1, a2 +; not a5, a1 +; and a0, a3, a5 +; or a0, a4, a0 ; ret function %f(i128, i32, i32) -> i32 { @@ -800,32 +781,32 @@ block0(v0: i128, v1: i32, v2: i32): ; VCode: ; block0: -; li a4,42 -; li a5,0 -; xor a0,a0,a4 -; xor a4,a1,a5 -; or a4,a0,a4 -; seqz a0,a4 -; sub a5,zero,a0 -; and a1,a5,a2 -; not a4,a5 -; and a5,a3,a4 -; or a0,a1,a5 +; li a5,42 +; li a4,0 +; xor a5,a0,a5 +; xor a4,a1,a4 +; or a0,a5,a4 +; seqz a0,a0 +; sub a1,zero,a0 +; and a4,a1,a2 +; not a5,a1 +; and a0,a3,a5 +; or a0,a4,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 0x2a -; mv a5, zero -; xor a0, a0, a4 -; xor a4, a1, a5 -; or a4, a0, a4 -; seqz a0, a4 -; neg a5, a0 -; and a1, a5, a2 -; not a4, a5 -; and a5, a3, a4 -; or a0, a1, a5 +; addi a5, zero, 0x2a +; mv a4, zero +; xor a5, a0, a5 +; xor a4, a1, a4 +; or a0, a5, a4 +; seqz a0, a0 +; neg a1, a0 +; and a4, a1, a2 +; not a5, a1 +; and a0, a3, a5 +; or a0, a4, a0 ; ret function %f(i128, i64, i64) -> i64 { @@ -839,32 +820,32 @@ block0(v0: i128, v1: i64, v2: i64): ; VCode: ; block0: -; li a4,42 -; li a5,0 -; xor a0,a0,a4 -; xor a4,a1,a5 -; or a4,a0,a4 -; seqz a0,a4 -; sub a5,zero,a0 -; and a1,a5,a2 -; not a4,a5 -; and a5,a3,a4 -; or a0,a1,a5 +; li a5,42 +; li a4,0 +; xor a5,a0,a5 +; xor a4,a1,a4 +; or a0,a5,a4 +; seqz a0,a0 +; sub a1,zero,a0 +; and a4,a1,a2 +; not a5,a1 +; and a0,a3,a5 +; or a0,a4,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a4, zero, 0x2a -; mv a5, zero -; xor a0, a0, a4 -; xor a4, a1, a5 -; or a4, a0, a4 -; seqz a0, a4 -; neg a5, a0 -; and a1, a5, a2 -; not a4, a5 -; and a5, a3, a4 -; or a0, a1, a5 +; addi a5, zero, 0x2a +; mv a4, zero +; xor a5, a0, a5 +; xor a4, a1, a4 +; or a0, a5, a4 +; seqz a0, a0 +; neg a1, a0 +; and a4, a1, a2 +; not a5, a1 +; and a0, a3, a5 +; or a0, a4, a0 ; ret function %f(i128, i128, i128) -> i128 { @@ -877,40 +858,69 @@ block0(v0: i128, v1: i128, v2: i128): } ; VCode: +; addi sp,sp,-16 +; sd ra,8(sp) +; sd fp,0(sp) +; mv fp,sp +; addi sp,sp,-16 +; sd s5,8(sp) +; sd s6,0(sp) ; block0: -; li a6,42 -; li a7,0 -; xor a0,a0,a6 -; xor a1,a1,a7 +; li s5,42 +; li s6,0 +; xor a0,a0,s5 +; xor a1,a1,s6 ; or a0,a0,a1 ; seqz a0,a0 -; sub a1,zero,a0 -; and a0,a1,a2 -; not a2,a1 +; sub a0,zero,a0 +; and a1,a0,a2 +; not a2,a0 ; and a2,a4,a2 -; or a0,a0,a2 -; and a2,a1,a3 -; not a1,a1 -; and a3,a5,a1 -; or a1,a2,a3 +; or a4,a1,a2 +; and a2,a0,a3 +; not a0,a0 +; and a0,a5,a0 +; or a1,a2,a0 +; mv a0,a4 +; ld s5,8(sp) +; ld s6,0(sp) +; addi sp,sp,16 +; ld ra,8(sp) +; ld fp,0(sp) +; addi sp,sp,16 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a6, zero, 0x2a -; mv a7, zero -; xor a0, a0, a6 -; xor a1, a1, a7 +; addi sp, sp, -0x10 +; sd ra, 8(sp) +; sd s0, 0(sp) +; mv s0, sp +; addi sp, sp, -0x10 +; sd s5, 8(sp) +; sd s6, 0(sp) +; block1: ; offset 0x1c +; addi s5, zero, 0x2a +; mv s6, zero +; xor a0, a0, s5 +; xor a1, a1, s6 ; or a0, a0, a1 ; seqz a0, a0 -; neg a1, a0 -; and a0, a1, a2 -; not a2, a1 +; neg a0, a0 +; and a1, a0, a2 +; not a2, a0 ; and a2, a4, a2 -; or a0, a0, a2 -; and a2, a1, a3 -; not a1, a1 -; and a3, a5, a1 -; or a1, a2, a3 +; or a4, a1, a2 +; and a2, a0, a3 +; not a0, a0 +; and a0, a5, a0 +; or a1, a2, a0 +; mv a0, a4 +; ld s5, 8(sp) +; ld s6, 0(sp) +; addi sp, sp, 0x10 +; ld ra, 8(sp) +; ld s0, 0(sp) +; addi sp, sp, 0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-abi-large-spill.clif b/cranelift/filetests/filetests/isa/riscv64/simd-abi-large-spill.clif index d4278ce21732..1ea00b4e6715 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-abi-large-spill.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-abi-large-spill.clif @@ -25,12 +25,12 @@ block0: ; sd s1,1032(sp) ; block0: ; mv s1,a0 -; vle16.v v11,[const(0)] #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v11,0(slot) #avl=16, #vtype=(e8, m1, ta, ma) +; vle16.v v8,[const(0)] #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(slot) #avl=16, #vtype=(e8, m1, ta, ma) ; call userextname0 ; mv a0,s1 -; vle8.v v11,0(slot) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,0(slot) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld s1,1032(sp) ; addi sp,sp,1040 ; ld ra,8(sp) @@ -51,15 +51,15 @@ block0: ; .byte 0x57, 0x70, 0x84, 0xcc ; auipc t6, 0 ; addi t6, t6, 0x50 -; .byte 0x87, 0xd5, 0x0f, 0x02 +; .byte 0x07, 0xd4, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x05, 0x01, 0x02 +; .byte 0x27, 0x04, 0x01, 0x02 ; auipc ra, 0 ; reloc_external RiscvCallPlt u2:0 0 ; jalr ra ; mv a0, s1 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x87, 0x05, 0x01, 0x02 -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x07, 0x04, 0x01, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld s1, 0x408(sp) ; addi sp, sp, 0x410 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-abi.clif b/cranelift/filetests/filetests/isa/riscv64/simd-abi.clif index 9562a90160a5..c91f2c585986 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-abi.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-abi.clif @@ -40,78 +40,78 @@ block0( ; mv fp,sp ; addi sp,sp,-256 ; block0: -; vle8.v v11,-544(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v11,0(slot) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v13,-528(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,128(slot) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v13,-512(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v15,-496(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v10,-480(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v12,-464(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-544(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v9,0(slot) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-528(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v10,128(slot) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-512(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v11,-496(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v12,-480(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v13,-464(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vle8.v v14,-448(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v8,-432(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v25,-416(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v27,-400(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v29,-384(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v31,-368(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v1,-352(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v3,-336(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v5,-320(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v7,-304(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v17,-288(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v19,-272(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v21,-256(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v23,-240(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v26,-224(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v28,-208(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v30,-192(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v0,-176(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v2,-160(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v4,-144(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v6,-128(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v16,-112(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v18,-96(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v20,-80(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v22,-64(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v24,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v9,16(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v24,32(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v22,48(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v20,64(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v18,80(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v16,96(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v6,112(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v4,128(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v2,144(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v0,160(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v30,176(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v28,192(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v26,208(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v23,224(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v21,240(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v19,256(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v17,272(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v7,288(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v5,304(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v3,320(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v1,336(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v31,352(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v29,368(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v27,384(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v25,400(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v8,416(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v15,-432(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v17,-416(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v19,-400(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v21,-384(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v23,-368(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v25,-352(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v27,-336(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v29,-320(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v31,-304(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v0,-288(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v1,-272(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v2,-256(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v3,-240(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v4,-224(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v5,-208(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v6,-192(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v7,-176(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v16,-160(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v18,-144(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v20,-128(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v22,-112(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v24,-96(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v26,-80(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v28,-64(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v30,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,16(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v30,32(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v28,48(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v26,64(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v24,80(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v22,96(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v20,112(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v18,128(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v16,144(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v7,160(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v6,176(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v5,192(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v4,208(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v3,224(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v2,240(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v1,256(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v0,272(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v31,288(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v29,304(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v27,320(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v25,336(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v23,352(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v21,368(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v19,384(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v17,400(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v15,416(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v14,432(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,448(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v10,464(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v15,480(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,496(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v13,128(slot) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,512(a0) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,0(slot) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v11,528(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v13,448(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v12,464(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v11,480(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v10,496(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,128(slot) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v10,512(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,0(slot) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v9,528(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; addi sp,sp,256 ; ld ra,8(sp) ; ld fp,0(sp) @@ -128,146 +128,146 @@ block0( ; block1: ; offset 0x14 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x110 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0xa7, 0x05, 0x01, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0xa7, 0x04, 0x01, 0x02 ; addi t6, sp, 0x120 -; .byte 0x87, 0x86, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; addi t6, sp, 0x80 -; .byte 0xa7, 0x86, 0x0f, 0x02 +; .byte 0x27, 0x85, 0x0f, 0x02 ; addi t6, sp, 0x130 -; .byte 0x87, 0x86, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; addi t6, sp, 0x140 -; .byte 0x87, 0x87, 0x0f, 0x02 +; .byte 0x87, 0x85, 0x0f, 0x02 ; addi t6, sp, 0x150 -; .byte 0x07, 0x85, 0x0f, 0x02 -; addi t6, sp, 0x160 ; .byte 0x07, 0x86, 0x0f, 0x02 +; addi t6, sp, 0x160 +; .byte 0x87, 0x86, 0x0f, 0x02 ; addi t6, sp, 0x170 ; .byte 0x07, 0x87, 0x0f, 0x02 ; addi t6, sp, 0x180 -; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x87, 0x87, 0x0f, 0x02 ; addi t6, sp, 0x190 -; .byte 0x87, 0x8c, 0x0f, 0x02 +; .byte 0x87, 0x88, 0x0f, 0x02 ; addi t6, sp, 0x1a0 -; .byte 0x87, 0x8d, 0x0f, 0x02 +; .byte 0x87, 0x89, 0x0f, 0x02 ; addi t6, sp, 0x1b0 -; .byte 0x87, 0x8e, 0x0f, 0x02 +; .byte 0x87, 0x8a, 0x0f, 0x02 ; addi t6, sp, 0x1c0 -; .byte 0x87, 0x8f, 0x0f, 0x02 +; .byte 0x87, 0x8b, 0x0f, 0x02 ; addi t6, sp, 0x1d0 -; .byte 0x87, 0x80, 0x0f, 0x02 +; .byte 0x87, 0x8c, 0x0f, 0x02 ; addi t6, sp, 0x1e0 -; .byte 0x87, 0x81, 0x0f, 0x02 +; .byte 0x87, 0x8d, 0x0f, 0x02 ; addi t6, sp, 0x1f0 -; .byte 0x87, 0x82, 0x0f, 0x02 +; .byte 0x87, 0x8e, 0x0f, 0x02 ; addi t6, sp, 0x200 -; .byte 0x87, 0x83, 0x0f, 0x02 +; .byte 0x87, 0x8f, 0x0f, 0x02 ; addi t6, sp, 0x210 -; .byte 0x87, 0x88, 0x0f, 0x02 +; .byte 0x07, 0x80, 0x0f, 0x02 ; addi t6, sp, 0x220 -; .byte 0x87, 0x89, 0x0f, 0x02 +; .byte 0x87, 0x80, 0x0f, 0x02 ; addi t6, sp, 0x230 -; .byte 0x87, 0x8a, 0x0f, 0x02 +; .byte 0x07, 0x81, 0x0f, 0x02 ; addi t6, sp, 0x240 -; .byte 0x87, 0x8b, 0x0f, 0x02 +; .byte 0x87, 0x81, 0x0f, 0x02 ; addi t6, sp, 0x250 -; .byte 0x07, 0x8d, 0x0f, 0x02 +; .byte 0x07, 0x82, 0x0f, 0x02 ; addi t6, sp, 0x260 -; .byte 0x07, 0x8e, 0x0f, 0x02 +; .byte 0x87, 0x82, 0x0f, 0x02 ; addi t6, sp, 0x270 -; .byte 0x07, 0x8f, 0x0f, 0x02 +; .byte 0x07, 0x83, 0x0f, 0x02 ; addi t6, sp, 0x280 -; .byte 0x07, 0x80, 0x0f, 0x02 +; .byte 0x87, 0x83, 0x0f, 0x02 ; addi t6, sp, 0x290 -; .byte 0x07, 0x81, 0x0f, 0x02 +; .byte 0x07, 0x88, 0x0f, 0x02 ; addi t6, sp, 0x2a0 -; .byte 0x07, 0x82, 0x0f, 0x02 +; .byte 0x07, 0x89, 0x0f, 0x02 ; addi t6, sp, 0x2b0 -; .byte 0x07, 0x83, 0x0f, 0x02 +; .byte 0x07, 0x8a, 0x0f, 0x02 ; addi t6, sp, 0x2c0 -; .byte 0x07, 0x88, 0x0f, 0x02 +; .byte 0x07, 0x8b, 0x0f, 0x02 ; addi t6, sp, 0x2d0 -; .byte 0x07, 0x89, 0x0f, 0x02 +; .byte 0x07, 0x8c, 0x0f, 0x02 ; addi t6, sp, 0x2e0 -; .byte 0x07, 0x8a, 0x0f, 0x02 +; .byte 0x07, 0x8d, 0x0f, 0x02 ; addi t6, sp, 0x2f0 -; .byte 0x07, 0x8b, 0x0f, 0x02 +; .byte 0x07, 0x8e, 0x0f, 0x02 ; addi t6, sp, 0x300 -; .byte 0x07, 0x8c, 0x0f, 0x02 +; .byte 0x07, 0x8f, 0x0f, 0x02 ; addi t6, sp, 0x310 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x320 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; addi t6, a0, 0x10 -; .byte 0xa7, 0x84, 0x0f, 0x02 +; .byte 0x27, 0x84, 0x0f, 0x02 ; addi t6, a0, 0x20 -; .byte 0x27, 0x8c, 0x0f, 0x02 +; .byte 0x27, 0x8f, 0x0f, 0x02 ; addi t6, a0, 0x30 -; .byte 0x27, 0x8b, 0x0f, 0x02 +; .byte 0x27, 0x8e, 0x0f, 0x02 ; addi t6, a0, 0x40 -; .byte 0x27, 0x8a, 0x0f, 0x02 +; .byte 0x27, 0x8d, 0x0f, 0x02 ; addi t6, a0, 0x50 -; .byte 0x27, 0x89, 0x0f, 0x02 +; .byte 0x27, 0x8c, 0x0f, 0x02 ; addi t6, a0, 0x60 -; .byte 0x27, 0x88, 0x0f, 0x02 +; .byte 0x27, 0x8b, 0x0f, 0x02 ; addi t6, a0, 0x70 -; .byte 0x27, 0x83, 0x0f, 0x02 +; .byte 0x27, 0x8a, 0x0f, 0x02 ; addi t6, a0, 0x80 -; .byte 0x27, 0x82, 0x0f, 0x02 +; .byte 0x27, 0x89, 0x0f, 0x02 ; addi t6, a0, 0x90 -; .byte 0x27, 0x81, 0x0f, 0x02 +; .byte 0x27, 0x88, 0x0f, 0x02 ; addi t6, a0, 0xa0 -; .byte 0x27, 0x80, 0x0f, 0x02 +; .byte 0xa7, 0x83, 0x0f, 0x02 ; addi t6, a0, 0xb0 -; .byte 0x27, 0x8f, 0x0f, 0x02 +; .byte 0x27, 0x83, 0x0f, 0x02 ; addi t6, a0, 0xc0 -; .byte 0x27, 0x8e, 0x0f, 0x02 +; .byte 0xa7, 0x82, 0x0f, 0x02 ; addi t6, a0, 0xd0 -; .byte 0x27, 0x8d, 0x0f, 0x02 +; .byte 0x27, 0x82, 0x0f, 0x02 ; addi t6, a0, 0xe0 -; .byte 0xa7, 0x8b, 0x0f, 0x02 +; .byte 0xa7, 0x81, 0x0f, 0x02 ; addi t6, a0, 0xf0 -; .byte 0xa7, 0x8a, 0x0f, 0x02 +; .byte 0x27, 0x81, 0x0f, 0x02 ; addi t6, a0, 0x100 -; .byte 0xa7, 0x89, 0x0f, 0x02 +; .byte 0xa7, 0x80, 0x0f, 0x02 ; addi t6, a0, 0x110 -; .byte 0xa7, 0x88, 0x0f, 0x02 +; .byte 0x27, 0x80, 0x0f, 0x02 ; addi t6, a0, 0x120 -; .byte 0xa7, 0x83, 0x0f, 0x02 +; .byte 0xa7, 0x8f, 0x0f, 0x02 ; addi t6, a0, 0x130 -; .byte 0xa7, 0x82, 0x0f, 0x02 +; .byte 0xa7, 0x8e, 0x0f, 0x02 ; addi t6, a0, 0x140 -; .byte 0xa7, 0x81, 0x0f, 0x02 +; .byte 0xa7, 0x8d, 0x0f, 0x02 ; addi t6, a0, 0x150 -; .byte 0xa7, 0x80, 0x0f, 0x02 +; .byte 0xa7, 0x8c, 0x0f, 0x02 ; addi t6, a0, 0x160 -; .byte 0xa7, 0x8f, 0x0f, 0x02 +; .byte 0xa7, 0x8b, 0x0f, 0x02 ; addi t6, a0, 0x170 -; .byte 0xa7, 0x8e, 0x0f, 0x02 +; .byte 0xa7, 0x8a, 0x0f, 0x02 ; addi t6, a0, 0x180 -; .byte 0xa7, 0x8d, 0x0f, 0x02 +; .byte 0xa7, 0x89, 0x0f, 0x02 ; addi t6, a0, 0x190 -; .byte 0xa7, 0x8c, 0x0f, 0x02 +; .byte 0xa7, 0x88, 0x0f, 0x02 ; addi t6, a0, 0x1a0 -; .byte 0x27, 0x84, 0x0f, 0x02 +; .byte 0xa7, 0x87, 0x0f, 0x02 ; addi t6, a0, 0x1b0 ; .byte 0x27, 0x87, 0x0f, 0x02 ; addi t6, a0, 0x1c0 -; .byte 0x27, 0x86, 0x0f, 0x02 +; .byte 0xa7, 0x86, 0x0f, 0x02 ; addi t6, a0, 0x1d0 -; .byte 0x27, 0x85, 0x0f, 0x02 +; .byte 0x27, 0x86, 0x0f, 0x02 ; addi t6, a0, 0x1e0 -; .byte 0xa7, 0x87, 0x0f, 0x02 +; .byte 0xa7, 0x85, 0x0f, 0x02 ; addi t6, a0, 0x1f0 -; .byte 0xa7, 0x86, 0x0f, 0x02 +; .byte 0x27, 0x85, 0x0f, 0x02 ; addi t6, sp, 0x80 -; .byte 0x87, 0x86, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; addi t6, a0, 0x200 -; .byte 0xa7, 0x86, 0x0f, 0x02 -; .byte 0x87, 0x05, 0x01, 0x02 +; .byte 0x27, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x04, 0x01, 0x02 ; addi t6, a0, 0x210 -; .byte 0xa7, 0x85, 0x0f, 0x02 +; .byte 0xa7, 0x84, 0x0f, 0x02 ; addi sp, sp, 0x100 ; ld ra, 8(sp) ; ld s0, 0(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-avg_round.clif b/cranelift/filetests/filetests/isa/riscv64/simd-avg_round.clif index 1aab9b38c20f..10b74f95eb78 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-avg_round.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-avg_round.clif @@ -16,11 +16,11 @@ block0(v0: i8x16, v1: i8x16): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vxor.vv v8,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vssrl.vi v10,v8,1 #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vv v12,v14,v10 #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vv v8,v9,v10 #avl=16, #vtype=(e8, m1, ta, ma) +; vxor.vv v9,v9,v10 #avl=16, #vtype=(e8, m1, ta, ma) +; vssrl.vi v10,v9,1 #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vv v12,v8,v10 #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -38,11 +38,11 @@ block0(v0: i8x16, v1: i8x16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x26 -; .byte 0x57, 0x84, 0x95, 0x2e -; .byte 0x57, 0xb5, 0x80, 0xaa -; .byte 0x57, 0x06, 0xe5, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x57, 0x04, 0x95, 0x26 +; .byte 0xd7, 0x04, 0x95, 0x2e +; .byte 0x57, 0xb5, 0x90, 0xaa +; .byte 0x57, 0x06, 0x85, 0x02 ; .byte 0x27, 0x06, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) @@ -62,11 +62,11 @@ block0(v0: i16x8, v1: i16x8): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vxor.vv v8,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vssrl.vi v10,v8,1 #avl=8, #vtype=(e16, m1, ta, ma) -; vadd.vv v12,v14,v10 #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vv v8,v9,v10 #avl=8, #vtype=(e16, m1, ta, ma) +; vxor.vv v9,v9,v10 #avl=8, #vtype=(e16, m1, ta, ma) +; vssrl.vi v10,v9,1 #avl=8, #vtype=(e16, m1, ta, ma) +; vadd.vv v12,v8,v10 #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -84,12 +84,12 @@ block0(v0: i16x8, v1: i16x8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x26 -; .byte 0x57, 0x84, 0x95, 0x2e -; .byte 0x57, 0xb5, 0x80, 0xaa -; .byte 0x57, 0x06, 0xe5, 0x02 +; .byte 0x57, 0x04, 0x95, 0x26 +; .byte 0xd7, 0x04, 0x95, 0x2e +; .byte 0x57, 0xb5, 0x90, 0xaa +; .byte 0x57, 0x06, 0x85, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x06, 0x05, 0x02 ; ld ra, 8(sp) @@ -110,11 +110,11 @@ block0(v0: i32x4, v1: i32x4): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vxor.vv v8,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vssrl.vi v10,v8,1 #avl=4, #vtype=(e32, m1, ta, ma) -; vadd.vv v12,v14,v10 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vv v8,v9,v10 #avl=4, #vtype=(e32, m1, ta, ma) +; vxor.vv v9,v9,v10 #avl=4, #vtype=(e32, m1, ta, ma) +; vssrl.vi v10,v9,1 #avl=4, #vtype=(e32, m1, ta, ma) +; vadd.vv v12,v8,v10 #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -132,12 +132,12 @@ block0(v0: i32x4, v1: i32x4): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x26 -; .byte 0x57, 0x84, 0x95, 0x2e -; .byte 0x57, 0xb5, 0x80, 0xaa -; .byte 0x57, 0x06, 0xe5, 0x02 +; .byte 0x57, 0x04, 0x95, 0x26 +; .byte 0xd7, 0x04, 0x95, 0x2e +; .byte 0x57, 0xb5, 0x90, 0xaa +; .byte 0x57, 0x06, 0x85, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x06, 0x05, 0x02 ; ld ra, 8(sp) @@ -158,11 +158,11 @@ block0(v0: i64x2, v1: i64x2): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vxor.vv v8,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vssrl.vi v10,v8,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vadd.vv v12,v14,v10 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vv v8,v9,v10 #avl=2, #vtype=(e64, m1, ta, ma) +; vxor.vv v9,v9,v10 #avl=2, #vtype=(e64, m1, ta, ma) +; vssrl.vi v10,v9,1 #avl=2, #vtype=(e64, m1, ta, ma) +; vadd.vv v12,v8,v10 #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -180,12 +180,12 @@ block0(v0: i64x2, v1: i64x2): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x87, 0x95, 0x26 -; .byte 0x57, 0x84, 0x95, 0x2e -; .byte 0x57, 0xb5, 0x80, 0xaa -; .byte 0x57, 0x06, 0xe5, 0x02 +; .byte 0x57, 0x04, 0x95, 0x26 +; .byte 0xd7, 0x04, 0x95, 0x2e +; .byte 0x57, 0xb5, 0x90, 0xaa +; .byte 0x57, 0x06, 0x85, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x06, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-band.clif b/cranelift/filetests/filetests/isa/riscv64/simd-band.clif index 0433cb22647c..8fe6998dead8 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-band.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-band.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x26 -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x26 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x26 +; .byte 0x57, 0x84, 0x84, 0x26 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x26 +; .byte 0x57, 0x84, 0x84, 0x26 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x87, 0x95, 0x26 +; .byte 0x57, 0x84, 0x84, 0x26 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -184,9 +184,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -201,9 +201,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0x26 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0x26 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -223,9 +223,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vi v12,v9,-16 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vi v8,v8,-16 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -240,11 +240,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x36, 0x98, 0x26 +; .byte 0x57, 0x34, 0x88, 0x26 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -264,9 +264,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vi v12,v9,15 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vi v8,v8,15 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -281,11 +281,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x97, 0x26 +; .byte 0x57, 0xb4, 0x87, 0x26 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -305,9 +305,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vi v12,v9,-5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vi v8,v8,-5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -322,11 +322,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x9d, 0x26 +; .byte 0x57, 0xb4, 0x8d, 0x26 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -345,9 +345,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -362,9 +362,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x26 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x26 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -383,9 +383,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -400,11 +400,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x26 +; .byte 0x57, 0xc4, 0x85, 0x26 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -423,9 +423,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -440,11 +440,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x26 +; .byte 0x57, 0xc4, 0x85, 0x26 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -463,9 +463,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -480,11 +480,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x26 +; .byte 0x57, 0xc4, 0x85, 0x26 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -504,11 +504,11 @@ block0(v0: f32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; fmv.w.x fa1,a1 -; vfmv.v.f v15,fa1 #avl=4, #vtype=(e32, m1, ta, ma) -; vand.vv v15,v9,v15 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; fmv.w.x fa0,a1 +; vfmv.v.f v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vand.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -523,13 +523,13 @@ block0(v0: f32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; fmv.w.x fa1, a1 +; .byte 0x07, 0x84, 0x0f, 0x02 +; fmv.w.x fa0, a1 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xd7, 0x05, 0x5e -; .byte 0xd7, 0x87, 0x97, 0x26 +; .byte 0xd7, 0x54, 0x05, 0x5e +; .byte 0x57, 0x84, 0x84, 0x26 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -549,11 +549,11 @@ block0(v0: f64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; fmv.d.x fa1,a1 -; vfmv.v.f v15,fa1 #avl=2, #vtype=(e64, m1, ta, ma) -; vand.vv v15,v9,v15 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; fmv.d.x fa0,a1 +; vfmv.v.f v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vand.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -568,13 +568,13 @@ block0(v0: f64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; fmv.d.x fa1, a1 +; .byte 0x07, 0x84, 0x0f, 0x02 +; fmv.d.x fa0, a1 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xd7, 0x05, 0x5e -; .byte 0xd7, 0x87, 0x97, 0x26 +; .byte 0xd7, 0x54, 0x05, 0x5e +; .byte 0x57, 0x84, 0x84, 0x26 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-bitselect.clif b/cranelift/filetests/filetests/isa/riscv64/simd-bitselect.clif index 640a71d0fc1a..e15a0ec17819 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-bitselect.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-bitselect.clif @@ -15,13 +15,13 @@ block0(v0: i64x2, v1: i64x2, v2: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v13,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vv v8,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vnot.v v10,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vand.vv v12,v10,v13 #avl=2, #vtype=(e64, m1, ta, ma) -; vor.vv v14,v8,v12 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vv v9,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vnot.v v11,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vand.vv v12,v11,v10 #avl=2, #vtype=(e64, m1, ta, ma) +; vor.vv v14,v9,v12 #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -37,16 +37,16 @@ block0(v0: i64x2, v1: i64x2, v2: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x86, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x84, 0x95, 0x26 -; .byte 0x57, 0xb5, 0x9f, 0x2e -; .byte 0x57, 0x86, 0xa6, 0x26 -; .byte 0x57, 0x07, 0x86, 0x2a +; .byte 0xd7, 0x84, 0x84, 0x26 +; .byte 0xd7, 0xb5, 0x8f, 0x2e +; .byte 0x57, 0x06, 0xb5, 0x26 +; .byte 0x57, 0x07, 0x96, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x07, 0x05, 0x02 ; ld ra, 8(sp) @@ -66,13 +66,13 @@ block0(v0: i32x4, v1: i32x4, v2: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v13,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vv v8,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vnot.v v10,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vand.vv v12,v10,v13 #avl=4, #vtype=(e32, m1, ta, ma) -; vor.vv v14,v8,v12 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vv v9,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vnot.v v11,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vand.vv v12,v11,v10 #avl=4, #vtype=(e32, m1, ta, ma) +; vor.vv v14,v9,v12 #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -88,16 +88,16 @@ block0(v0: i32x4, v1: i32x4, v2: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x86, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x84, 0x95, 0x26 -; .byte 0x57, 0xb5, 0x9f, 0x2e -; .byte 0x57, 0x86, 0xa6, 0x26 -; .byte 0x57, 0x07, 0x86, 0x2a +; .byte 0xd7, 0x84, 0x84, 0x26 +; .byte 0xd7, 0xb5, 0x8f, 0x2e +; .byte 0x57, 0x06, 0xb5, 0x26 +; .byte 0x57, 0x07, 0x96, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x07, 0x05, 0x02 ; ld ra, 8(sp) @@ -117,13 +117,13 @@ block0(v0: i16x8, v1: i16x8, v2: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v13,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vv v8,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vnot.v v10,v9 #avl=8, #vtype=(e16, m1, ta, ma) -; vand.vv v12,v10,v13 #avl=8, #vtype=(e16, m1, ta, ma) -; vor.vv v14,v8,v12 #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v8,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vv v9,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vnot.v v11,v8 #avl=8, #vtype=(e16, m1, ta, ma) +; vand.vv v12,v11,v10 #avl=8, #vtype=(e16, m1, ta, ma) +; vor.vv v14,v9,v12 #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -139,16 +139,16 @@ block0(v0: i16x8, v1: i16x8, v2: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x86, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x84, 0x95, 0x26 -; .byte 0x57, 0xb5, 0x9f, 0x2e -; .byte 0x57, 0x86, 0xa6, 0x26 -; .byte 0x57, 0x07, 0x86, 0x2a +; .byte 0xd7, 0x84, 0x84, 0x26 +; .byte 0xd7, 0xb5, 0x8f, 0x2e +; .byte 0x57, 0x06, 0xb5, 0x26 +; .byte 0x57, 0x07, 0x96, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x07, 0x05, 0x02 ; ld ra, 8(sp) @@ -168,13 +168,13 @@ block0(v0: i8x16, v1: i8x16, v2: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v13,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vv v8,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vnot.v v10,v9 #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vv v12,v10,v13 #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vv v14,v8,v12 #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vv v9,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vnot.v v11,v8 #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vv v12,v11,v10 #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vv v14,v9,v12 #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -190,15 +190,15 @@ block0(v0: i8x16, v1: i8x16, v2: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x86, 0x0f, 0x02 -; .byte 0x57, 0x84, 0x95, 0x26 -; .byte 0x57, 0xb5, 0x9f, 0x2e -; .byte 0x57, 0x86, 0xa6, 0x26 -; .byte 0x57, 0x07, 0x86, 0x2a +; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0xd7, 0x84, 0x84, 0x26 +; .byte 0xd7, 0xb5, 0x8f, 0x2e +; .byte 0x57, 0x06, 0xb5, 0x26 +; .byte 0x57, 0x07, 0x96, 0x2a ; .byte 0x27, 0x07, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) @@ -218,12 +218,12 @@ block0(v0: i64x2, v1: i64x2, v2: i64x2, v3: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-64(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v13,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v15,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmseq.vv v0,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v12,v15,v13,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-64(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmseq.vv v0,v8,v10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v12,v11,v9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -239,16 +239,16 @@ block0(v0: i64x2, v1: i64x2, v2: i64x2, v3: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x86, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x40 -; .byte 0x87, 0x87, 0x0f, 0x02 +; .byte 0x87, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x80, 0x95, 0x62 -; .byte 0x57, 0x86, 0xf6, 0x5c +; .byte 0x57, 0x00, 0x85, 0x62 +; .byte 0x57, 0x86, 0xb4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x06, 0x05, 0x02 ; ld ra, 8(sp) @@ -269,12 +269,12 @@ block0(v0: f64x2, v1: f64x2, v2: i64x2, v3: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-64(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v13,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v15,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vv v0,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v12,v15,v13,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-64(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vv v0,v8,v10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v12,v11,v9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -290,16 +290,16 @@ block0(v0: f64x2, v1: f64x2, v2: i64x2, v3: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x86, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x40 -; .byte 0x87, 0x87, 0x0f, 0x02 +; .byte 0x87, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x90, 0x95, 0x66 -; .byte 0x57, 0x86, 0xf6, 0x5c +; .byte 0x57, 0x10, 0x85, 0x66 +; .byte 0x57, 0x86, 0xb4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x06, 0x05, 0x02 ; ld ra, 8(sp) @@ -321,12 +321,12 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2, v3: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-64(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v13,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v15,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vv v0,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v12,v15,v13,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-64(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vv v0,v8,v10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v12,v11,v9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -342,16 +342,16 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2, v3: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x86, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x40 -; .byte 0x87, 0x87, 0x0f, 0x02 +; .byte 0x87, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x90, 0x95, 0x66 -; .byte 0x57, 0x86, 0xf6, 0x5c +; .byte 0x57, 0x10, 0x85, 0x66 +; .byte 0x57, 0x86, 0xb4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x06, 0x05, 0x02 ; ld ra, 8(sp) @@ -373,12 +373,12 @@ block0(v0: i64x2, v1: i64x2, v2: f64x2, v3: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-64(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v13,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v15,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmseq.vv v0,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v12,v15,v13,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-64(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmseq.vv v0,v8,v10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v12,v11,v9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -394,16 +394,16 @@ block0(v0: i64x2, v1: i64x2, v2: f64x2, v3: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x86, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x40 -; .byte 0x87, 0x87, 0x0f, 0x02 +; .byte 0x87, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x80, 0x95, 0x62 -; .byte 0x57, 0x86, 0xf6, 0x5c +; .byte 0x57, 0x00, 0x85, 0x62 +; .byte 0x57, 0x86, 0xb4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x06, 0x05, 0x02 ; ld ra, 8(sp) @@ -429,11 +429,11 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v14,[const(0)] #avl=16, #vtype=(e8, m1, ta, ma) -; vmfeq.vv v0,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v15,v14,v9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,[const(0)] #avl=16, #vtype=(e8, m1, ta, ma) +; vmfeq.vv v0,v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v10,v9,v8,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -448,15 +448,15 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; auipc t6, 0 ; addi t6, t6, 0x34 -; .byte 0x07, 0x87, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x90, 0x94, 0x62 -; .byte 0xd7, 0x87, 0xe4, 0x5c +; .byte 0x57, 0x10, 0x84, 0x62 +; .byte 0x57, 0x05, 0x94, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -485,11 +485,11 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v14,[const(0)] #avl=16, #vtype=(e8, m1, ta, ma) -; vmseq.vv v0,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v15,v14,v9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,[const(0)] #avl=16, #vtype=(e8, m1, ta, ma) +; vmseq.vv v0,v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v10,v9,v8,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -504,15 +504,15 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; auipc t6, 0 ; addi t6, t6, 0x34 -; .byte 0x07, 0x87, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x80, 0x94, 0x62 -; .byte 0xd7, 0x87, 0xe4, 0x5c +; .byte 0x57, 0x00, 0x84, 0x62 +; .byte 0x57, 0x05, 0x94, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-bnot.clif b/cranelift/filetests/filetests/isa/riscv64/simd-bnot.clif index 2f3c5a61c7cf..df7ff4533c68 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-bnot.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-bnot.clif @@ -16,9 +16,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vnot.v v12,v9 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vnot.v v8,v8 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -33,9 +33,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x9f, 0x2e -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x8f, 0x2e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -53,9 +53,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vnot.v v12,v9 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vnot.v v8,v8 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -70,11 +70,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x9f, 0x2e +; .byte 0x57, 0xb4, 0x8f, 0x2e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -92,9 +92,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vnot.v v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vnot.v v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -109,11 +109,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x9f, 0x2e +; .byte 0x57, 0xb4, 0x8f, 0x2e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -131,9 +131,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vnot.v v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vnot.v v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -148,11 +148,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x9f, 0x2e +; .byte 0x57, 0xb4, 0x8f, 0x2e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-bor.clif b/cranelift/filetests/filetests/isa/riscv64/simd-bor.clif index 052cbe682ee8..46cec44074e2 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-bor.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-bor.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x2a -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x2a +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x2a +; .byte 0x57, 0x84, 0x84, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x2a +; .byte 0x57, 0x84, 0x84, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x87, 0x95, 0x2a +; .byte 0x57, 0x84, 0x84, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -184,9 +184,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -201,9 +201,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0x2a -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0x2a +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -223,9 +223,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vi v12,v9,-16 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vi v8,v8,-16 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -240,11 +240,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x36, 0x98, 0x2a +; .byte 0x57, 0x34, 0x88, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -264,9 +264,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vi v12,v9,15 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vi v8,v8,15 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -281,11 +281,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x97, 0x2a +; .byte 0x57, 0xb4, 0x87, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -305,9 +305,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vi v12,v9,-5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vi v8,v8,-5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -322,11 +322,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x9d, 0x2a +; .byte 0x57, 0xb4, 0x8d, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -345,9 +345,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -362,9 +362,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x2a -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x2a +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -383,9 +383,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -400,11 +400,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x2a +; .byte 0x57, 0xc4, 0x85, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -423,9 +423,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -440,11 +440,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x2a +; .byte 0x57, 0xc4, 0x85, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -463,9 +463,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -480,11 +480,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x2a +; .byte 0x57, 0xc4, 0x85, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -504,11 +504,11 @@ block0(v0: f32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; fmv.w.x fa1,a1 -; vfmv.v.f v15,fa1 #avl=4, #vtype=(e32, m1, ta, ma) -; vor.vv v15,v9,v15 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; fmv.w.x fa0,a1 +; vfmv.v.f v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vor.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -523,13 +523,13 @@ block0(v0: f32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; fmv.w.x fa1, a1 +; .byte 0x07, 0x84, 0x0f, 0x02 +; fmv.w.x fa0, a1 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xd7, 0x05, 0x5e -; .byte 0xd7, 0x87, 0x97, 0x2a +; .byte 0xd7, 0x54, 0x05, 0x5e +; .byte 0x57, 0x84, 0x84, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -549,11 +549,11 @@ block0(v0: f64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; fmv.d.x fa1,a1 -; vfmv.v.f v15,fa1 #avl=2, #vtype=(e64, m1, ta, ma) -; vor.vv v15,v9,v15 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; fmv.d.x fa0,a1 +; vfmv.v.f v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vor.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -568,13 +568,13 @@ block0(v0: f64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; fmv.d.x fa1, a1 +; .byte 0x07, 0x84, 0x0f, 0x02 +; fmv.d.x fa0, a1 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xd7, 0x05, 0x5e -; .byte 0xd7, 0x87, 0x97, 0x2a +; .byte 0xd7, 0x54, 0x05, 0x5e +; .byte 0x57, 0x84, 0x84, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -593,9 +593,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -610,9 +610,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0x2a -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0x2a +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-bxor.clif b/cranelift/filetests/filetests/isa/riscv64/simd-bxor.clif index def8a36b71ab..258f4b5c4cd0 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-bxor.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-bxor.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vxor.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vxor.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x2e -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x2e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vxor.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vxor.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x2e +; .byte 0x57, 0x84, 0x84, 0x2e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vxor.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vxor.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x2e +; .byte 0x57, 0x84, 0x84, 0x2e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vxor.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vxor.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x87, 0x95, 0x2e +; .byte 0x57, 0x84, 0x84, 0x2e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -184,9 +184,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vxor.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vxor.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -201,9 +201,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0x2e -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0x2e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -223,9 +223,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vxor.vi v12,v9,-16 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vxor.vi v8,v8,-16 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -240,11 +240,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x36, 0x98, 0x2e +; .byte 0x57, 0x34, 0x88, 0x2e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -264,9 +264,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vxor.vi v12,v9,15 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vxor.vi v8,v8,15 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -281,11 +281,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x97, 0x2e +; .byte 0x57, 0xb4, 0x87, 0x2e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -305,9 +305,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vxor.vi v12,v9,-5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vxor.vi v8,v8,-5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -322,11 +322,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x9d, 0x2e +; .byte 0x57, 0xb4, 0x8d, 0x2e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -345,9 +345,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vxor.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vxor.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -362,9 +362,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x2e -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x2e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -383,9 +383,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vxor.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vxor.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -400,11 +400,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x2e +; .byte 0x57, 0xc4, 0x85, 0x2e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -423,9 +423,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vxor.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vxor.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -440,11 +440,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x2e +; .byte 0x57, 0xc4, 0x85, 0x2e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -463,9 +463,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vxor.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vxor.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -480,11 +480,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x2e +; .byte 0x57, 0xc4, 0x85, 0x2e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -504,11 +504,11 @@ block0(v0: f32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; fmv.w.x fa1,a1 -; vfmv.v.f v15,fa1 #avl=4, #vtype=(e32, m1, ta, ma) -; vxor.vv v15,v9,v15 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; fmv.w.x fa0,a1 +; vfmv.v.f v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vxor.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -523,13 +523,13 @@ block0(v0: f32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; fmv.w.x fa1, a1 +; .byte 0x07, 0x84, 0x0f, 0x02 +; fmv.w.x fa0, a1 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xd7, 0x05, 0x5e -; .byte 0xd7, 0x87, 0x97, 0x2e +; .byte 0xd7, 0x54, 0x05, 0x5e +; .byte 0x57, 0x84, 0x84, 0x2e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -549,11 +549,11 @@ block0(v0: f64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; fmv.d.x fa1,a1 -; vfmv.v.f v15,fa1 #avl=2, #vtype=(e64, m1, ta, ma) -; vxor.vv v15,v9,v15 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; fmv.d.x fa0,a1 +; vfmv.v.f v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vxor.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -568,13 +568,13 @@ block0(v0: f64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; fmv.d.x fa1, a1 +; .byte 0x07, 0x84, 0x0f, 0x02 +; fmv.d.x fa0, a1 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xd7, 0x05, 0x5e -; .byte 0xd7, 0x87, 0x97, 0x2e +; .byte 0xd7, 0x54, 0x05, 0x5e +; .byte 0x57, 0x84, 0x84, 0x2e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-ceil.clif b/cranelift/filetests/filetests/isa/riscv64/simd-ceil.clif index 423bd41c625f..d3ea699750b4 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-ceil.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-ceil.clif @@ -16,20 +16,20 @@ block0(v0: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfabs.v v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfabs.v v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; lui a1,307200 -; fmv.w.x fa2,a1 -; vmflt.vf v0,v12,fa2 #avl=4, #vtype=(e32, m1, ta, ma) -; fsrmi a1,3 -; vfcvt.x.f.v v14,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; fsrm a1 -; vfcvt.f.x.v v10,v14 #avl=4, #vtype=(e32, m1, ta, ma) -; vfsgnj.vv v11,v10,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; fmv.w.x fa3,zero -; vfadd.vf v15,v9,fa3 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vvm v9,v15,v11,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; fmv.w.x fa0,a1 +; vmflt.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; fsrmi a2,3 +; vfcvt.x.f.v v14,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; fsrm a2 +; vfcvt.f.x.v v9,v14 #avl=4, #vtype=(e32, m1, ta, ma) +; vfsgnj.vv v9,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; fmv.w.x fa0,zero +; vfadd.vf v10,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vvm v8,v10,v9,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -44,22 +44,22 @@ block0(v0: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x96, 0x94, 0x2a +; .byte 0xd7, 0x14, 0x84, 0x2a ; lui a1, 0x4b000 -; fmv.w.x fa2, a1 -; .byte 0x57, 0x50, 0xc6, 0x6e -; fsrmi a1, 3 -; .byte 0x57, 0x97, 0x90, 0x4a -; fsrm a1 -; .byte 0x57, 0x95, 0xe1, 0x4a -; .byte 0xd7, 0x95, 0xa4, 0x22 -; fmv.w.x fa3, zero -; .byte 0xd7, 0xd7, 0x96, 0x02 -; .byte 0xd7, 0x84, 0xf5, 0x5c +; fmv.w.x fa0, a1 +; .byte 0x57, 0x50, 0x95, 0x6e +; fsrmi a2, 3 +; .byte 0x57, 0x97, 0x80, 0x4a +; fsrm a2 +; .byte 0xd7, 0x94, 0xe1, 0x4a +; .byte 0xd7, 0x14, 0x94, 0x22 +; fmv.w.x fa0, zero +; .byte 0x57, 0x55, 0x85, 0x02 +; .byte 0x57, 0x84, 0xa4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x04, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -77,21 +77,21 @@ block0(v0: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfabs.v v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfabs.v v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; lui a1,1075 -; slli a2,a1,40 -; fmv.d.x fa4,a2 -; vmflt.vf v0,v12,fa4 #avl=2, #vtype=(e64, m1, ta, ma) -; fsrmi a2,3 -; vfcvt.x.f.v v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; fsrm a2 -; vfcvt.f.x.v v11,v8 #avl=2, #vtype=(e64, m1, ta, ma) -; vfsgnj.vv v13,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; fmv.d.x fa5,zero -; vfadd.vf v9,v9,fa5 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v11,v9,v13,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; slli a1,a1,40 +; fmv.d.x fa0,a1 +; vmflt.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; fsrmi a4,3 +; vfcvt.x.f.v v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; fsrm a4 +; vfcvt.f.x.v v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vfsgnj.vv v9,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; fmv.d.x fa0,zero +; vfadd.vf v10,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v8,v10,v9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -106,23 +106,23 @@ block0(v0: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x96, 0x94, 0x2a +; .byte 0xd7, 0x14, 0x84, 0x2a ; lui a1, 0x433 -; slli a2, a1, 0x28 -; fmv.d.x fa4, a2 -; .byte 0x57, 0x50, 0xc7, 0x6e -; fsrmi a2, 3 -; .byte 0x57, 0x94, 0x90, 0x4a -; fsrm a2 -; .byte 0xd7, 0x95, 0x81, 0x4a -; .byte 0xd7, 0x96, 0xb4, 0x22 -; fmv.d.x fa5, zero -; .byte 0xd7, 0xd4, 0x97, 0x02 -; .byte 0xd7, 0x85, 0x96, 0x5c +; slli a1, a1, 0x28 +; fmv.d.x fa0, a1 +; .byte 0x57, 0x50, 0x95, 0x6e +; fsrmi a4, 3 +; .byte 0xd7, 0x94, 0x80, 0x4a +; fsrm a4 +; .byte 0xd7, 0x94, 0x91, 0x4a +; .byte 0xd7, 0x14, 0x94, 0x22 +; fmv.d.x fa0, zero +; .byte 0x57, 0x55, 0x85, 0x02 +; .byte 0x57, 0x84, 0xa4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-extractlane.clif b/cranelift/filetests/filetests/isa/riscv64/simd-extractlane.clif index 00149623c600..75a20c7671e1 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-extractlane.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-extractlane.clif @@ -231,8 +231,8 @@ block0(v0: i8x16): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v10,v8,1 #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.x.s a0,v10 #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v8,v8,1 #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.x.s a0,v8 #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -248,8 +248,8 @@ block0(v0: i8x16): ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb5, 0x80, 0x3e -; .byte 0x57, 0x25, 0xa0, 0x42 +; .byte 0x57, 0xb4, 0x80, 0x3e +; .byte 0x57, 0x25, 0x80, 0x42 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -268,8 +268,8 @@ block0(v0: i16x8): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v10,v8,1 #avl=8, #vtype=(e16, m1, ta, ma) -; vmv.x.s a0,v10 #avl=8, #vtype=(e16, m1, ta, ma) +; vslidedown.vi v8,v8,1 #avl=8, #vtype=(e16, m1, ta, ma) +; vmv.x.s a0,v8 #avl=8, #vtype=(e16, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -286,8 +286,8 @@ block0(v0: i16x8): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb5, 0x80, 0x3e -; .byte 0x57, 0x25, 0xa0, 0x42 +; .byte 0x57, 0xb4, 0x80, 0x3e +; .byte 0x57, 0x25, 0x80, 0x42 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -306,8 +306,8 @@ block0(v0: i32x4): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v10,v8,1 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.x.s a0,v10 #avl=4, #vtype=(e32, m1, ta, ma) +; vslidedown.vi v8,v8,1 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.x.s a0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -324,8 +324,8 @@ block0(v0: i32x4): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb5, 0x80, 0x3e -; .byte 0x57, 0x25, 0xa0, 0x42 +; .byte 0x57, 0xb4, 0x80, 0x3e +; .byte 0x57, 0x25, 0x80, 0x42 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -344,8 +344,8 @@ block0(v0: i64x2): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v10,v8,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.x.s a0,v10 #avl=2, #vtype=(e64, m1, ta, ma) +; vslidedown.vi v8,v8,1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.x.s a0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -362,8 +362,8 @@ block0(v0: i64x2): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb5, 0x80, 0x3e -; .byte 0x57, 0x25, 0xa0, 0x42 +; .byte 0x57, 0xb4, 0x80, 0x3e +; .byte 0x57, 0x25, 0x80, 0x42 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -382,8 +382,8 @@ block0(v0: f32x4): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v10,v8,1 #avl=4, #vtype=(e32, m1, ta, ma) -; vfmv.f.s fa0,v10 #avl=4, #vtype=(e32, m1, ta, ma) +; vslidedown.vi v8,v8,1 #avl=4, #vtype=(e32, m1, ta, ma) +; vfmv.f.s fa0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -400,8 +400,8 @@ block0(v0: f32x4): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb5, 0x80, 0x3e -; .byte 0x57, 0x15, 0xa0, 0x42 +; .byte 0x57, 0xb4, 0x80, 0x3e +; .byte 0x57, 0x15, 0x80, 0x42 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -420,8 +420,8 @@ block0(v0: f64x2): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v10,v8,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vfmv.f.s fa0,v10 #avl=2, #vtype=(e64, m1, ta, ma) +; vslidedown.vi v8,v8,1 #avl=2, #vtype=(e64, m1, ta, ma) +; vfmv.f.s fa0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -438,8 +438,8 @@ block0(v0: f64x2): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb5, 0x80, 0x3e -; .byte 0x57, 0x15, 0xa0, 0x42 +; .byte 0x57, 0xb4, 0x80, 0x3e +; .byte 0x57, 0x15, 0x80, 0x42 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fabs.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fabs.clif index 0b599c9fd9df..5abe3b66ae3f 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fabs.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fabs.clif @@ -16,9 +16,9 @@ block0(v0: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfabs.v v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfabs.v v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -33,11 +33,11 @@ block0(v0: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x96, 0x94, 0x2a +; .byte 0x57, 0x14, 0x84, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -55,9 +55,9 @@ block0(v0: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfabs.v v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfabs.v v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -72,11 +72,11 @@ block0(v0: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x96, 0x94, 0x2a +; .byte 0x57, 0x14, 0x84, 0x2a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fadd.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fadd.clif index 736ac97bcc73..80e44c33d024 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fadd.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fadd.clif @@ -16,10 +16,10 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfadd.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfadd.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,13 +34,13 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0x95, 0x02 +; .byte 0x57, 0x94, 0x84, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -59,9 +59,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfadd.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfadd.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -76,11 +76,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x02 +; .byte 0x57, 0x54, 0x85, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -99,9 +99,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfadd.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfadd.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,11 +116,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x02 +; .byte 0x57, 0x54, 0x85, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -138,10 +138,10 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfadd.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfadd.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -156,13 +156,13 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0x95, 0x02 +; .byte 0x57, 0x94, 0x84, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -181,9 +181,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfadd.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfadd.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -198,11 +198,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x02 +; .byte 0x57, 0x54, 0x85, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -221,9 +221,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfadd.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfadd.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -238,11 +238,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x02 +; .byte 0x57, 0x54, 0x85, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-eq.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-eq.clif index 3a0dcdfc29a3..3e5a755c5b1c 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-eq.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-eq.clif @@ -15,9 +15,9 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfeq.vv v0,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfeq.vv v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,11 +35,11 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x90, 0x95, 0x62 +; .byte 0x57, 0x90, 0x84, 0x62 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -62,10 +62,10 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfeq.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.v.i v15,0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfeq.vf v0,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -81,11 +81,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x50, 0x95, 0x62 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x62 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -106,10 +106,10 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfeq.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.v.i v15,0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfeq.vf v0,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -125,11 +125,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x50, 0x95, 0x62 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x62 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -149,9 +149,9 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfeq.vv v0,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfeq.vv v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -169,11 +169,11 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x90, 0x95, 0x62 +; .byte 0x57, 0x90, 0x84, 0x62 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -196,10 +196,10 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfeq.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfeq.vf v0,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -215,11 +215,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x50, 0x95, 0x62 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x62 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -240,10 +240,10 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfeq.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfeq.vf v0,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -259,11 +259,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x50, 0x95, 0x62 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x62 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ge.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ge.clif index 1f179fb279b4..6fe646a17bda 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ge.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ge.clif @@ -15,9 +15,9 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vv v0,v11,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vv v0,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,11 +35,11 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x90, 0xb4, 0x66 +; .byte 0x57, 0x10, 0x94, 0x66 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -62,10 +62,10 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfge.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.v.i v15,0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfge.vf v0,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -81,11 +81,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x50, 0x95, 0x7e -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x7e +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -106,10 +106,10 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.v.i v15,0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vf v0,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -125,11 +125,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x50, 0x95, 0x66 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x66 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -149,9 +149,9 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vv v0,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vv v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -169,11 +169,11 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x90, 0xb4, 0x66 +; .byte 0x57, 0x10, 0x94, 0x66 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -196,10 +196,10 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfge.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfge.vf v0,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -215,11 +215,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x50, 0x95, 0x7e -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x7e +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -240,10 +240,10 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vf v0,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -259,11 +259,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x50, 0x95, 0x66 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x66 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-gt.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-gt.clif index bbc17fa01839..34c80692d50f 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-gt.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-gt.clif @@ -15,9 +15,9 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vv v0,v11,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vv v0,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,11 +35,11 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x90, 0xb4, 0x6e +; .byte 0x57, 0x10, 0x94, 0x6e ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -62,10 +62,10 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfgt.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.v.i v15,0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfgt.vf v0,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -81,11 +81,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x50, 0x95, 0x76 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x76 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -106,10 +106,10 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.v.i v15,0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vf v0,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -125,11 +125,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x50, 0x95, 0x6e -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x6e +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -149,9 +149,9 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vv v0,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vv v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -169,11 +169,11 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x90, 0xb4, 0x6e +; .byte 0x57, 0x10, 0x94, 0x6e ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -196,10 +196,10 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfgt.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfgt.vf v0,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -215,11 +215,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x50, 0x95, 0x76 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x76 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -240,10 +240,10 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vf v0,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -259,11 +259,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x50, 0x95, 0x6e -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x6e +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-le.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-le.clif index f80a2b4afc8a..0114fc7ad91d 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-le.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-le.clif @@ -15,9 +15,9 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vv v0,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vv v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,11 +35,11 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x90, 0x95, 0x66 +; .byte 0x57, 0x90, 0x84, 0x66 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -62,10 +62,10 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.v.i v15,0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vf v0,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -81,11 +81,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x50, 0x95, 0x66 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x66 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -106,10 +106,10 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfge.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.v.i v15,0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfge.vf v0,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -125,11 +125,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x50, 0x95, 0x7e -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x7e +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -149,9 +149,9 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vv v0,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vv v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -169,11 +169,11 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x90, 0x95, 0x66 +; .byte 0x57, 0x90, 0x84, 0x66 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -196,10 +196,10 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vf v0,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -215,11 +215,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x50, 0x95, 0x66 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x66 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -240,10 +240,10 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfge.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfge.vf v0,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -259,11 +259,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x50, 0x95, 0x7e -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x7e +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-lt.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-lt.clif index dada4b2130da..3a4cf19f6bea 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-lt.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-lt.clif @@ -15,9 +15,9 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vv v0,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vv v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,11 +35,11 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x90, 0x95, 0x6e +; .byte 0x57, 0x90, 0x84, 0x6e ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -62,10 +62,10 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.v.i v15,0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vf v0,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -81,11 +81,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x50, 0x95, 0x6e -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x6e +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -106,10 +106,10 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfgt.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.v.i v15,0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfgt.vf v0,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -125,11 +125,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x50, 0x95, 0x76 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x76 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -149,9 +149,9 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vv v0,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vv v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -169,11 +169,11 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x90, 0x95, 0x6e +; .byte 0x57, 0x90, 0x84, 0x6e ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -196,10 +196,10 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vf v0,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -215,11 +215,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x50, 0x95, 0x6e -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x6e +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -240,10 +240,10 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfgt.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfgt.vf v0,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -259,11 +259,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x50, 0x95, 0x76 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x76 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ne.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ne.clif index 03a34bb27f3e..5428a5a98b41 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ne.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ne.clif @@ -15,9 +15,9 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfne.vv v0,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfne.vv v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,11 +35,11 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x90, 0x95, 0x72 +; .byte 0x57, 0x90, 0x84, 0x72 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -62,10 +62,10 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfne.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.v.i v15,0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfne.vf v0,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -81,11 +81,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x50, 0x95, 0x72 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x72 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -106,10 +106,10 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfne.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.v.i v15,0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfne.vf v0,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -125,11 +125,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x50, 0x95, 0x72 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x72 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -149,9 +149,9 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfne.vv v0,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfne.vv v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -169,11 +169,11 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x90, 0x95, 0x72 +; .byte 0x57, 0x90, 0x84, 0x72 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -196,10 +196,10 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfne.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfne.vf v0,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -215,11 +215,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x50, 0x95, 0x72 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x72 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -240,10 +240,10 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfne.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfne.vf v0,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -259,11 +259,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x50, 0x95, 0x72 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0x50, 0x85, 0x72 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-one.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-one.clif index d53d958deae6..a10a30ee6b4e 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-one.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-one.clif @@ -15,11 +15,11 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vmflt.vv v8,v11,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmor.mm v0,v14,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vv v9,v8,v10 #avl=4, #vtype=(e32, m1, ta, ma) +; vmflt.vv v8,v10,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmor.mm v0,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v12,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -37,13 +37,13 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0x95, 0x6e -; .byte 0x57, 0x94, 0xb4, 0x6e -; .byte 0x57, 0x20, 0xe4, 0x6a +; .byte 0xd7, 0x14, 0x85, 0x6e +; .byte 0x57, 0x14, 0xa4, 0x6e +; .byte 0x57, 0x20, 0x94, 0x6a ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -67,9 +67,9 @@ block0(v0: f32x4, v1: f32): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfgt.vf v15,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmor.mm v0,v13,v15 #avl=4, #vtype=(e32, m1, ta, ma) +; vmflt.vf v8,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfgt.vf v9,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmor.mm v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v11,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v13,v11,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -89,9 +89,9 @@ block0(v0: f32x4, v1: f32): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x6e -; .byte 0xd7, 0x57, 0x95, 0x76 -; .byte 0x57, 0xa0, 0xd7, 0x6a +; .byte 0x57, 0x54, 0x95, 0x6e +; .byte 0xd7, 0x54, 0x95, 0x76 +; .byte 0x57, 0xa0, 0x84, 0x6a ; .byte 0xd7, 0x35, 0x00, 0x5e ; .byte 0xd7, 0xb6, 0xbf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -115,9 +115,9 @@ block0(v0: f32x4, v1: f32): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfgt.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmflt.vf v15,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmor.mm v0,v13,v15 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfgt.vf v8,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmflt.vf v9,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmor.mm v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v11,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v13,v11,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -137,9 +137,9 @@ block0(v0: f32x4, v1: f32): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x76 -; .byte 0xd7, 0x57, 0x95, 0x6e -; .byte 0x57, 0xa0, 0xd7, 0x6a +; .byte 0x57, 0x54, 0x95, 0x76 +; .byte 0xd7, 0x54, 0x95, 0x6e +; .byte 0x57, 0xa0, 0x84, 0x6a ; .byte 0xd7, 0x35, 0x00, 0x5e ; .byte 0xd7, 0xb6, 0xbf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -161,11 +161,11 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmflt.vv v8,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmor.mm v0,v14,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vv v9,v8,v10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmflt.vv v8,v10,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmor.mm v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v12,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -183,13 +183,13 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0x95, 0x6e -; .byte 0x57, 0x94, 0xb4, 0x6e -; .byte 0x57, 0x20, 0xe4, 0x6a +; .byte 0xd7, 0x14, 0x85, 0x6e +; .byte 0x57, 0x14, 0xa4, 0x6e +; .byte 0x57, 0x20, 0x94, 0x6a ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -213,9 +213,9 @@ block0(v0: f64x2, v1: f64): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfgt.vf v15,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmor.mm v0,v13,v15 #avl=2, #vtype=(e64, m1, ta, ma) +; vmflt.vf v8,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfgt.vf v9,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmor.mm v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v11,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v13,v11,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -235,9 +235,9 @@ block0(v0: f64x2, v1: f64): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x6e -; .byte 0xd7, 0x57, 0x95, 0x76 -; .byte 0x57, 0xa0, 0xd7, 0x6a +; .byte 0x57, 0x54, 0x95, 0x6e +; .byte 0xd7, 0x54, 0x95, 0x76 +; .byte 0x57, 0xa0, 0x84, 0x6a ; .byte 0xd7, 0x35, 0x00, 0x5e ; .byte 0xd7, 0xb6, 0xbf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -261,9 +261,9 @@ block0(v0: f64x2, v1: f64): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfgt.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmflt.vf v15,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmor.mm v0,v13,v15 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfgt.vf v8,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmflt.vf v9,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmor.mm v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v11,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v13,v11,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -283,9 +283,9 @@ block0(v0: f64x2, v1: f64): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x76 -; .byte 0xd7, 0x57, 0x95, 0x6e -; .byte 0x57, 0xa0, 0xd7, 0x6a +; .byte 0x57, 0x54, 0x95, 0x76 +; .byte 0xd7, 0x54, 0x95, 0x6e +; .byte 0x57, 0xa0, 0x84, 0x6a ; .byte 0xd7, 0x35, 0x00, 0x5e ; .byte 0xd7, 0xb6, 0xbf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ord.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ord.clif index 932b32afcfeb..04f85210de40 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ord.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ord.clif @@ -15,11 +15,11 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfeq.vv v14,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfeq.vv v8,v11,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vmand.mm v0,v14,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfeq.vv v8,v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfeq.vv v9,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vmand.mm v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v12,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -37,13 +37,13 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0x94, 0x62 -; .byte 0x57, 0x94, 0xb5, 0x62 -; .byte 0x57, 0x20, 0xe4, 0x66 +; .byte 0x57, 0x14, 0x84, 0x62 +; .byte 0xd7, 0x94, 0x94, 0x62 +; .byte 0x57, 0xa0, 0x84, 0x66 ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -66,11 +66,11 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vfmv.v.f v10,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfeq.vv v14,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfeq.vf v8,v10,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmand.mm v0,v14,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfeq.vv v8,v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfeq.vf v9,v10,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmand.mm v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v12,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -88,12 +88,12 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd ; .byte 0x57, 0x55, 0x05, 0x5e -; .byte 0x57, 0x97, 0x94, 0x62 -; .byte 0x57, 0x54, 0xa5, 0x62 -; .byte 0x57, 0x20, 0xe4, 0x66 +; .byte 0x57, 0x14, 0x84, 0x62 +; .byte 0xd7, 0x54, 0xa5, 0x62 +; .byte 0x57, 0xa0, 0x84, 0x66 ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -116,11 +116,11 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vfmv.v.f v10,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfeq.vf v14,v10,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfeq.vv v8,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmand.mm v0,v14,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfeq.vf v9,v10,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfeq.vv v8,v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmand.mm v0,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v12,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -138,12 +138,12 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd ; .byte 0x57, 0x55, 0x05, 0x5e -; .byte 0x57, 0x57, 0xa5, 0x62 -; .byte 0x57, 0x94, 0x94, 0x62 -; .byte 0x57, 0x20, 0xe4, 0x66 +; .byte 0xd7, 0x54, 0xa5, 0x62 +; .byte 0x57, 0x14, 0x84, 0x62 +; .byte 0x57, 0x20, 0x94, 0x66 ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -165,11 +165,11 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfeq.vv v14,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfeq.vv v8,v11,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmand.mm v0,v14,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfeq.vv v8,v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfeq.vv v9,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vmand.mm v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v12,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -187,13 +187,13 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0x94, 0x62 -; .byte 0x57, 0x94, 0xb5, 0x62 -; .byte 0x57, 0x20, 0xe4, 0x66 +; .byte 0x57, 0x14, 0x84, 0x62 +; .byte 0xd7, 0x94, 0x94, 0x62 +; .byte 0x57, 0xa0, 0x84, 0x66 ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -216,11 +216,11 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vfmv.v.f v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfeq.vv v14,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfeq.vf v8,v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmand.mm v0,v14,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfeq.vv v8,v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfeq.vf v9,v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmand.mm v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v12,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -238,12 +238,12 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0x55, 0x05, 0x5e -; .byte 0x57, 0x97, 0x94, 0x62 -; .byte 0x57, 0x54, 0xa5, 0x62 -; .byte 0x57, 0x20, 0xe4, 0x66 +; .byte 0x57, 0x14, 0x84, 0x62 +; .byte 0xd7, 0x54, 0xa5, 0x62 +; .byte 0x57, 0xa0, 0x84, 0x66 ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -266,11 +266,11 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vfmv.v.f v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfeq.vf v14,v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfeq.vv v8,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmand.mm v0,v14,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfeq.vf v9,v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfeq.vv v8,v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmand.mm v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v12,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -288,12 +288,12 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0x55, 0x05, 0x5e -; .byte 0x57, 0x57, 0xa5, 0x62 -; .byte 0x57, 0x94, 0x94, 0x62 -; .byte 0x57, 0x20, 0xe4, 0x66 +; .byte 0xd7, 0x54, 0xa5, 0x62 +; .byte 0x57, 0x14, 0x84, 0x62 +; .byte 0x57, 0x20, 0x94, 0x66 ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ueq.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ueq.clif index 576edcc52869..d9c9642134bc 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ueq.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ueq.clif @@ -15,11 +15,11 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vmflt.vv v8,v11,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnor.mm v0,v14,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vv v9,v8,v10 #avl=4, #vtype=(e32, m1, ta, ma) +; vmflt.vv v8,v10,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnor.mm v0,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v12,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -37,13 +37,13 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0x95, 0x6e -; .byte 0x57, 0x94, 0xb4, 0x6e -; .byte 0x57, 0x20, 0xe4, 0x7a +; .byte 0xd7, 0x14, 0x85, 0x6e +; .byte 0x57, 0x14, 0xa4, 0x6e +; .byte 0x57, 0x20, 0x94, 0x7a ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -67,9 +67,9 @@ block0(v0: f32x4, v1: f32): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfgt.vf v15,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnor.mm v0,v13,v15 #avl=4, #vtype=(e32, m1, ta, ma) +; vmflt.vf v8,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfgt.vf v9,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnor.mm v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v11,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v13,v11,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -89,9 +89,9 @@ block0(v0: f32x4, v1: f32): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x6e -; .byte 0xd7, 0x57, 0x95, 0x76 -; .byte 0x57, 0xa0, 0xd7, 0x7a +; .byte 0x57, 0x54, 0x95, 0x6e +; .byte 0xd7, 0x54, 0x95, 0x76 +; .byte 0x57, 0xa0, 0x84, 0x7a ; .byte 0xd7, 0x35, 0x00, 0x5e ; .byte 0xd7, 0xb6, 0xbf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -115,9 +115,9 @@ block0(v0: f32x4, v1: f32): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfgt.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmflt.vf v15,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnor.mm v0,v13,v15 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfgt.vf v8,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmflt.vf v9,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnor.mm v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v11,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v13,v11,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -137,9 +137,9 @@ block0(v0: f32x4, v1: f32): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x76 -; .byte 0xd7, 0x57, 0x95, 0x6e -; .byte 0x57, 0xa0, 0xd7, 0x7a +; .byte 0x57, 0x54, 0x95, 0x76 +; .byte 0xd7, 0x54, 0x95, 0x6e +; .byte 0x57, 0xa0, 0x84, 0x7a ; .byte 0xd7, 0x35, 0x00, 0x5e ; .byte 0xd7, 0xb6, 0xbf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -161,11 +161,11 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmflt.vv v8,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnor.mm v0,v14,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vv v9,v8,v10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmflt.vv v8,v10,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnor.mm v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v12,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -183,13 +183,13 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0x95, 0x6e -; .byte 0x57, 0x94, 0xb4, 0x6e -; .byte 0x57, 0x20, 0xe4, 0x7a +; .byte 0xd7, 0x14, 0x85, 0x6e +; .byte 0x57, 0x14, 0xa4, 0x6e +; .byte 0x57, 0x20, 0x94, 0x7a ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -213,9 +213,9 @@ block0(v0: f64x2, v1: f64): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfgt.vf v15,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnor.mm v0,v13,v15 #avl=2, #vtype=(e64, m1, ta, ma) +; vmflt.vf v8,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfgt.vf v9,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnor.mm v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v11,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v13,v11,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -235,9 +235,9 @@ block0(v0: f64x2, v1: f64): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x6e -; .byte 0xd7, 0x57, 0x95, 0x76 -; .byte 0x57, 0xa0, 0xd7, 0x7a +; .byte 0x57, 0x54, 0x95, 0x6e +; .byte 0xd7, 0x54, 0x95, 0x76 +; .byte 0x57, 0xa0, 0x84, 0x7a ; .byte 0xd7, 0x35, 0x00, 0x5e ; .byte 0xd7, 0xb6, 0xbf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -261,9 +261,9 @@ block0(v0: f64x2, v1: f64): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfgt.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmflt.vf v15,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnor.mm v0,v13,v15 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfgt.vf v8,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmflt.vf v9,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnor.mm v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v11,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v13,v11,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -283,9 +283,9 @@ block0(v0: f64x2, v1: f64): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x76 -; .byte 0xd7, 0x57, 0x95, 0x6e -; .byte 0x57, 0xa0, 0xd7, 0x7a +; .byte 0x57, 0x54, 0x95, 0x76 +; .byte 0xd7, 0x54, 0x95, 0x6e +; .byte 0x57, 0xa0, 0x84, 0x7a ; .byte 0xd7, 0x35, 0x00, 0x5e ; .byte 0xd7, 0xb6, 0xbf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-uge.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-uge.clif index e0cc47561a55..2629eb23e093 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-uge.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-uge.clif @@ -15,10 +15,10 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnot.m v0,v14 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnot.m v0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v10,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v12,v10,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -36,12 +36,12 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0x95, 0x6e -; .byte 0x57, 0x20, 0xe7, 0x76 +; .byte 0x57, 0x94, 0x84, 0x6e +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0x57, 0x35, 0x00, 0x5e ; .byte 0x57, 0xb6, 0xaf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -64,9 +64,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnot.m v0,v13 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnot.m v0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v9,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -84,10 +84,10 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x6e -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x6e +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -110,9 +110,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfgt.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnot.m v0,v13 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfgt.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnot.m v0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v9,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -130,10 +130,10 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x76 -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x76 +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -155,10 +155,10 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnot.m v0,v14 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnot.m v0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v10,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v12,v10,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -176,12 +176,12 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0x95, 0x6e -; .byte 0x57, 0x20, 0xe7, 0x76 +; .byte 0x57, 0x94, 0x84, 0x6e +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0x57, 0x35, 0x00, 0x5e ; .byte 0x57, 0xb6, 0xaf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -204,9 +204,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnot.m v0,v13 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnot.m v0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -224,10 +224,10 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x6e -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x6e +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -250,9 +250,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfgt.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnot.m v0,v13 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfgt.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnot.m v0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -270,10 +270,10 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x76 -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x76 +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ugt.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ugt.clif index 743938b913cd..5452f7f45fa2 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ugt.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ugt.clif @@ -15,10 +15,10 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnot.m v0,v14 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnot.m v0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v10,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v12,v10,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -36,12 +36,12 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0x95, 0x66 -; .byte 0x57, 0x20, 0xe7, 0x76 +; .byte 0x57, 0x94, 0x84, 0x66 +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0x57, 0x35, 0x00, 0x5e ; .byte 0x57, 0xb6, 0xaf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -64,9 +64,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnot.m v0,v13 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnot.m v0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v9,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -84,10 +84,10 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x66 -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x66 +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -110,9 +110,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfge.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnot.m v0,v13 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfge.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnot.m v0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v9,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -130,10 +130,10 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x7e -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x7e +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -155,10 +155,10 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnot.m v0,v14 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnot.m v0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v10,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v12,v10,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -176,12 +176,12 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0x95, 0x66 -; .byte 0x57, 0x20, 0xe7, 0x76 +; .byte 0x57, 0x94, 0x84, 0x66 +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0x57, 0x35, 0x00, 0x5e ; .byte 0x57, 0xb6, 0xaf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -204,9 +204,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnot.m v0,v13 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnot.m v0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -224,10 +224,10 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x66 -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x66 +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -250,9 +250,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfge.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnot.m v0,v13 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfge.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnot.m v0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -270,10 +270,10 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x7e -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x7e +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ule.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ule.clif index ceb4c5f1b6b9..337765e14209 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ule.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ule.clif @@ -15,10 +15,10 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vv v14,v11,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnot.m v0,v14 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vv v8,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnot.m v0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v10,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v12,v10,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -36,12 +36,12 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0xb4, 0x6e -; .byte 0x57, 0x20, 0xe7, 0x76 +; .byte 0x57, 0x14, 0x94, 0x6e +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0x57, 0x35, 0x00, 0x5e ; .byte 0x57, 0xb6, 0xaf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -64,9 +64,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfgt.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnot.m v0,v13 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfgt.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnot.m v0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v9,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -84,10 +84,10 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x76 -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x76 +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -110,9 +110,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnot.m v0,v13 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnot.m v0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v9,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -130,10 +130,10 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x6e -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x6e +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -155,10 +155,10 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vv v14,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnot.m v0,v14 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vv v8,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnot.m v0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v10,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v12,v10,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -176,12 +176,12 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0xb4, 0x6e -; .byte 0x57, 0x20, 0xe7, 0x76 +; .byte 0x57, 0x14, 0x94, 0x6e +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0x57, 0x35, 0x00, 0x5e ; .byte 0x57, 0xb6, 0xaf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -204,9 +204,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfgt.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnot.m v0,v13 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfgt.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnot.m v0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -224,10 +224,10 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x76 -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x76 +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -250,9 +250,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmflt.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnot.m v0,v13 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmflt.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnot.m v0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -270,10 +270,10 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x6e -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x6e +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ult.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ult.clif index 8afe69c8a644..2ab480fd8adf 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ult.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-ult.clif @@ -15,10 +15,10 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vv v14,v11,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnot.m v0,v14 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vv v8,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnot.m v0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v10,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v12,v10,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -36,12 +36,12 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0xb4, 0x66 -; .byte 0x57, 0x20, 0xe7, 0x76 +; .byte 0x57, 0x14, 0x94, 0x66 +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0x57, 0x35, 0x00, 0x5e ; .byte 0x57, 0xb6, 0xaf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -64,9 +64,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfge.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnot.m v0,v13 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfge.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnot.m v0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v9,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -84,10 +84,10 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x7e -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x7e +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -110,9 +110,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmnot.m v0,v13 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmnot.m v0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v9,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -130,10 +130,10 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x66 -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x66 +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -155,10 +155,10 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vv v14,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnot.m v0,v14 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vv v8,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnot.m v0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v10,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v12,v10,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -176,12 +176,12 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0xb4, 0x66 -; .byte 0x57, 0x20, 0xe7, 0x76 +; .byte 0x57, 0x14, 0x94, 0x66 +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0x57, 0x35, 0x00, 0x5e ; .byte 0x57, 0xb6, 0xaf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -204,9 +204,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfge.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnot.m v0,v13 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfge.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnot.m v0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -224,10 +224,10 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x7e -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x7e +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -250,9 +250,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfle.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmnot.m v0,v13 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfle.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmnot.m v0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v11,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -270,10 +270,10 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x66 -; .byte 0x57, 0xa0, 0xd6, 0x76 +; .byte 0x57, 0x54, 0x85, 0x66 +; .byte 0x57, 0x20, 0x84, 0x76 ; .byte 0xd7, 0x34, 0x00, 0x5e ; .byte 0xd7, 0xb5, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-uno.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-uno.clif index 940d73c06412..4623348a90a4 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-uno.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcmp-uno.clif @@ -15,11 +15,11 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfne.vv v14,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfne.vv v8,v11,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vmor.mm v0,v14,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfne.vv v8,v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfne.vv v9,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vmor.mm v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v12,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -37,13 +37,13 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0x94, 0x72 -; .byte 0x57, 0x94, 0xb5, 0x72 -; .byte 0x57, 0x20, 0xe4, 0x6a +; .byte 0x57, 0x14, 0x84, 0x72 +; .byte 0xd7, 0x94, 0x94, 0x72 +; .byte 0x57, 0xa0, 0x84, 0x6a ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -66,11 +66,11 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vfmv.v.f v10,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfne.vv v14,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfne.vf v8,v10,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmor.mm v0,v14,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfne.vv v8,v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfne.vf v9,v10,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmor.mm v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v12,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -88,12 +88,12 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd ; .byte 0x57, 0x55, 0x05, 0x5e -; .byte 0x57, 0x97, 0x94, 0x72 -; .byte 0x57, 0x54, 0xa5, 0x72 -; .byte 0x57, 0x20, 0xe4, 0x6a +; .byte 0x57, 0x14, 0x84, 0x72 +; .byte 0xd7, 0x54, 0xa5, 0x72 +; .byte 0x57, 0xa0, 0x84, 0x6a ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -116,11 +116,11 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vfmv.v.f v10,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfne.vf v14,v10,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfne.vv v8,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmor.mm v0,v14,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfne.vf v9,v10,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfne.vv v8,v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmor.mm v0,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v12,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -138,12 +138,12 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd ; .byte 0x57, 0x55, 0x05, 0x5e -; .byte 0x57, 0x57, 0xa5, 0x72 -; .byte 0x57, 0x94, 0x94, 0x72 -; .byte 0x57, 0x20, 0xe4, 0x6a +; .byte 0xd7, 0x54, 0xa5, 0x72 +; .byte 0x57, 0x14, 0x84, 0x72 +; .byte 0x57, 0x20, 0x94, 0x6a ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -165,11 +165,11 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfne.vv v14,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfne.vv v8,v11,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmor.mm v0,v14,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfne.vv v8,v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfne.vv v9,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vmor.mm v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v12,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -187,13 +187,13 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0x94, 0x72 -; .byte 0x57, 0x94, 0xb5, 0x72 -; .byte 0x57, 0x20, 0xe4, 0x6a +; .byte 0x57, 0x14, 0x84, 0x72 +; .byte 0xd7, 0x94, 0x94, 0x72 +; .byte 0x57, 0xa0, 0x84, 0x6a ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -216,11 +216,11 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vfmv.v.f v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfne.vv v14,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfne.vf v8,v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmor.mm v0,v14,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfne.vv v8,v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfne.vf v9,v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmor.mm v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v12,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -238,12 +238,12 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0x55, 0x05, 0x5e -; .byte 0x57, 0x97, 0x94, 0x72 -; .byte 0x57, 0x54, 0xa5, 0x72 -; .byte 0x57, 0x20, 0xe4, 0x6a +; .byte 0x57, 0x14, 0x84, 0x72 +; .byte 0xd7, 0x54, 0xa5, 0x72 +; .byte 0x57, 0xa0, 0x84, 0x6a ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -266,11 +266,11 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vfmv.v.f v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfne.vf v14,v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfne.vv v8,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmor.mm v0,v14,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfne.vf v9,v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfne.vv v8,v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmor.mm v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v12,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v14,v12,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -288,12 +288,12 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0x55, 0x05, 0x5e -; .byte 0x57, 0x57, 0xa5, 0x72 -; .byte 0x57, 0x94, 0x94, 0x72 -; .byte 0x57, 0x20, 0xe4, 0x6a +; .byte 0xd7, 0x54, 0xa5, 0x72 +; .byte 0x57, 0x14, 0x84, 0x72 +; .byte 0x57, 0x20, 0x94, 0x6a ; .byte 0x57, 0x36, 0x00, 0x5e ; .byte 0x57, 0xb7, 0xcf, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcopysign.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcopysign.clif index b113cda46d4f..43e7633fd041 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcopysign.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcopysign.clif @@ -16,10 +16,10 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfsgnj.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfsgnj.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,13 +34,13 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0x95, 0x22 +; .byte 0x57, 0x94, 0x84, 0x22 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -59,9 +59,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfsgnj.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfsgnj.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -76,11 +76,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x22 +; .byte 0x57, 0x54, 0x85, 0x22 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfsgnj.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfsgnj.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0x95, 0x22 +; .byte 0x57, 0x94, 0x84, 0x22 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -141,9 +141,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfsgnj.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfsgnj.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,11 +158,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x22 +; .byte 0x57, 0x54, 0x85, 0x22 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-from-sint.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-from-sint.clif index 7c592050e42e..5364bd69ca72 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-from-sint.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-from-sint.clif @@ -15,9 +15,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfcvt.f.x.v v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfcvt.f.x.v v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -32,11 +32,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x96, 0x91, 0x4a +; .byte 0x57, 0x94, 0x81, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-from-uint.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-from-uint.clif index ee31cc1dd9ab..35c5da6a55af 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-from-uint.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-from-uint.clif @@ -15,9 +15,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfcvt.f.xu.v v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfcvt.f.xu.v v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -32,11 +32,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x16, 0x91, 0x4a +; .byte 0x57, 0x14, 0x81, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-to-sint-sat.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-to-sint-sat.clif index 20be2fbd498b..7f5b414a2766 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-to-sint-sat.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-to-sint-sat.clif @@ -15,10 +15,10 @@ block0(v0:f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfne.vv v0,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vfcvt.rtz.x.f.v v14,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vim v8,v14,0,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfne.vv v0,v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vfcvt.rtz.x.f.v v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vim v8,v9,0,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -34,11 +34,11 @@ block0(v0:f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x90, 0x94, 0x72 -; .byte 0x57, 0x97, 0x93, 0x4a -; .byte 0x57, 0x34, 0xe0, 0x5c +; .byte 0x57, 0x10, 0x84, 0x72 +; .byte 0xd7, 0x94, 0x83, 0x4a +; .byte 0x57, 0x34, 0x90, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-to-uint-sat.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-to-uint-sat.clif index 39b6ca963ddf..8c2ed7ff0b4c 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-to-uint-sat.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fcvt-to-uint-sat.clif @@ -15,10 +15,10 @@ block0(v0:f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfne.vv v0,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vfcvt.rtz.xu.f.v v14,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vim v8,v14,0,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfne.vv v0,v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vfcvt.rtz.xu.f.v v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vim v8,v9,0,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -34,11 +34,11 @@ block0(v0:f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x90, 0x94, 0x72 -; .byte 0x57, 0x17, 0x93, 0x4a -; .byte 0x57, 0x34, 0xe0, 0x5c +; .byte 0x57, 0x10, 0x84, 0x72 +; .byte 0xd7, 0x14, 0x83, 0x4a +; .byte 0x57, 0x34, 0x90, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fdiv.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fdiv.clif index 22813b236f76..bb3fd8a54195 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fdiv.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fdiv.clif @@ -16,10 +16,10 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfdiv.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfdiv.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,13 +34,13 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0x95, 0x82 +; .byte 0x57, 0x94, 0x84, 0x82 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -59,9 +59,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfdiv.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfdiv.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -76,11 +76,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x82 +; .byte 0x57, 0x54, 0x85, 0x82 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -99,9 +99,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfrdiv.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfrdiv.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,11 +116,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x86 +; .byte 0x57, 0x54, 0x85, 0x86 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -138,10 +138,10 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfdiv.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfdiv.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -156,13 +156,13 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0x95, 0x82 +; .byte 0x57, 0x94, 0x84, 0x82 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -181,9 +181,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfdiv.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfdiv.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -198,11 +198,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x82 +; .byte 0x57, 0x54, 0x85, 0x82 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -221,9 +221,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfrdiv.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfrdiv.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -238,11 +238,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x86 +; .byte 0x57, 0x54, 0x85, 0x86 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-floor.clif b/cranelift/filetests/filetests/isa/riscv64/simd-floor.clif index d79a3e605fb2..e194379d8341 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-floor.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-floor.clif @@ -16,20 +16,20 @@ block0(v0: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfabs.v v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfabs.v v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; lui a1,307200 -; fmv.w.x fa2,a1 -; vmflt.vf v0,v12,fa2 #avl=4, #vtype=(e32, m1, ta, ma) -; fsrmi a1,2 -; vfcvt.x.f.v v14,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; fsrm a1 -; vfcvt.f.x.v v10,v14 #avl=4, #vtype=(e32, m1, ta, ma) -; vfsgnj.vv v11,v10,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; fmv.w.x fa3,zero -; vfadd.vf v15,v9,fa3 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vvm v9,v15,v11,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; fmv.w.x fa0,a1 +; vmflt.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; fsrmi a2,2 +; vfcvt.x.f.v v14,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; fsrm a2 +; vfcvt.f.x.v v9,v14 #avl=4, #vtype=(e32, m1, ta, ma) +; vfsgnj.vv v9,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; fmv.w.x fa0,zero +; vfadd.vf v10,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vvm v8,v10,v9,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -44,22 +44,22 @@ block0(v0: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x96, 0x94, 0x2a +; .byte 0xd7, 0x14, 0x84, 0x2a ; lui a1, 0x4b000 -; fmv.w.x fa2, a1 -; .byte 0x57, 0x50, 0xc6, 0x6e -; fsrmi a1, 2 -; .byte 0x57, 0x97, 0x90, 0x4a -; fsrm a1 -; .byte 0x57, 0x95, 0xe1, 0x4a -; .byte 0xd7, 0x95, 0xa4, 0x22 -; fmv.w.x fa3, zero -; .byte 0xd7, 0xd7, 0x96, 0x02 -; .byte 0xd7, 0x84, 0xf5, 0x5c +; fmv.w.x fa0, a1 +; .byte 0x57, 0x50, 0x95, 0x6e +; fsrmi a2, 2 +; .byte 0x57, 0x97, 0x80, 0x4a +; fsrm a2 +; .byte 0xd7, 0x94, 0xe1, 0x4a +; .byte 0xd7, 0x14, 0x94, 0x22 +; fmv.w.x fa0, zero +; .byte 0x57, 0x55, 0x85, 0x02 +; .byte 0x57, 0x84, 0xa4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x04, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -77,21 +77,21 @@ block0(v0: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfabs.v v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfabs.v v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; lui a1,1075 -; slli a2,a1,40 -; fmv.d.x fa4,a2 -; vmflt.vf v0,v12,fa4 #avl=2, #vtype=(e64, m1, ta, ma) -; fsrmi a2,2 -; vfcvt.x.f.v v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; fsrm a2 -; vfcvt.f.x.v v11,v8 #avl=2, #vtype=(e64, m1, ta, ma) -; vfsgnj.vv v13,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; fmv.d.x fa5,zero -; vfadd.vf v9,v9,fa5 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v11,v9,v13,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; slli a1,a1,40 +; fmv.d.x fa0,a1 +; vmflt.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; fsrmi a4,2 +; vfcvt.x.f.v v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; fsrm a4 +; vfcvt.f.x.v v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vfsgnj.vv v9,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; fmv.d.x fa0,zero +; vfadd.vf v10,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v8,v10,v9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -106,23 +106,23 @@ block0(v0: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x96, 0x94, 0x2a +; .byte 0xd7, 0x14, 0x84, 0x2a ; lui a1, 0x433 -; slli a2, a1, 0x28 -; fmv.d.x fa4, a2 -; .byte 0x57, 0x50, 0xc7, 0x6e -; fsrmi a2, 2 -; .byte 0x57, 0x94, 0x90, 0x4a -; fsrm a2 -; .byte 0xd7, 0x95, 0x81, 0x4a -; .byte 0xd7, 0x96, 0xb4, 0x22 -; fmv.d.x fa5, zero -; .byte 0xd7, 0xd4, 0x97, 0x02 -; .byte 0xd7, 0x85, 0x96, 0x5c +; slli a1, a1, 0x28 +; fmv.d.x fa0, a1 +; .byte 0x57, 0x50, 0x95, 0x6e +; fsrmi a4, 2 +; .byte 0xd7, 0x94, 0x80, 0x4a +; fsrm a4 +; .byte 0xd7, 0x94, 0x91, 0x4a +; .byte 0xd7, 0x14, 0x94, 0x22 +; fmv.d.x fa0, zero +; .byte 0x57, 0x55, 0x85, 0x02 +; .byte 0x57, 0x84, 0xa4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fma.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fma.clif index 8c06f4169cd6..9b443a305236 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fma.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fma.clif @@ -15,11 +15,11 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v15,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfmacc.vv v15,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmacc.vv v10,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,15 +34,15 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x87, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x97, 0xb4, 0xb2 +; .byte 0x57, 0x15, 0x94, 0xb2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -61,10 +61,10 @@ block0(v0: f64, v1: f64x2, v2: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v10,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v14,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfmacc.vf v14,v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmacc.vf v9,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -79,13 +79,13 @@ block0(v0: f64, v1: f64x2, v2: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x07, 0x87, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x57, 0xa5, 0xb2 +; .byte 0xd7, 0x54, 0x85, 0xb2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -104,11 +104,11 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v15,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfmsac.vv v15,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmsac.vv v10,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -123,15 +123,15 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x87, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x97, 0xb4, 0xba +; .byte 0x57, 0x15, 0x94, 0xba ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -151,10 +151,10 @@ block0(v0: f64, v1: f64x2, v2: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v10,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v14,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfmsac.vf v14,v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmsac.vf v9,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -169,13 +169,13 @@ block0(v0: f64, v1: f64x2, v2: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x07, 0x87, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x57, 0xa5, 0xba +; .byte 0xd7, 0x54, 0x85, 0xba ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -195,11 +195,11 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v15,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfnmacc.vv v15,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfnmacc.vv v10,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -214,15 +214,15 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x87, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x97, 0xb4, 0xb6 +; .byte 0x57, 0x15, 0x94, 0xb6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -243,10 +243,10 @@ block0(v0: f64, v1: f64x2, v2: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v10,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v14,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfnmacc.vf v14,v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfnmacc.vf v9,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -261,13 +261,13 @@ block0(v0: f64, v1: f64x2, v2: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x07, 0x87, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x57, 0xa5, 0xb6 +; .byte 0xd7, 0x54, 0x85, 0xb6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -286,11 +286,11 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v15,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfnmsac.vv v15,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfnmsac.vv v10,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -305,15 +305,15 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x87, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x97, 0xb4, 0xbe +; .byte 0x57, 0x15, 0x94, 0xbe ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -333,10 +333,10 @@ block0(v0: f64, v1: f64x2, v2: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v10,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v14,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfnmsac.vf v14,v10,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfnmsac.vf v9,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -351,19 +351,18 @@ block0(v0: f64, v1: f64x2, v2: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x07, 0x87, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x57, 0xa5, 0xbe +; .byte 0xd7, 0x54, 0x85, 0xbe ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 ; ret - function %fma_splat_y_f32x4(f32x4, f32, f32x4) -> f32x4 { block0(v0: f32x4, v1: f32, v2: f32x4): v3 = splat.f32x4 v1 @@ -377,10 +376,10 @@ block0(v0: f32x4, v1: f32, v2: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v14,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfmacc.vf v14,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmacc.vf v9,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -395,13 +394,13 @@ block0(v0: f32x4, v1: f32, v2: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x07, 0x87, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x57, 0x95, 0xb2 +; .byte 0xd7, 0x54, 0x85, 0xb2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -420,10 +419,10 @@ block0(v0: f64x2, v1: f64, v2: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v14,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfmacc.vf v14,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmacc.vf v9,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -438,13 +437,13 @@ block0(v0: f64x2, v1: f64, v2: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x07, 0x87, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x57, 0x95, 0xb2 +; .byte 0xd7, 0x54, 0x85, 0xb2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fmax.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fmax.clif index b5191c0cda85..d2cfe9e08fa3 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fmax.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fmax.clif @@ -15,17 +15,17 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfeq.vv v14,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfeq.vv v8,v11,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmand.mm v0,v14,v8 #avl=2, #vtype=(e64, m1, ta, ma) -; lui a1,4095 -; slli a2,a1,39 -; vmv.v.x v8,a2 #avl=2, #vtype=(e64, m1, ta, ma) -; vfmax.vv v10,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v12,v8,v10,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfeq.vv v10,v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfeq.vv v11,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vmand.mm v0,v10,v11 #avl=2, #vtype=(e64, m1, ta, ma) +; lui a2,4095 +; slli a4,a2,39 +; vmv.v.x v10,a4 #avl=2, #vtype=(e64, m1, ta, ma) +; vfmax.vv v9,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v8,v10,v9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -40,20 +40,20 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0x94, 0x62 -; .byte 0x57, 0x94, 0xb5, 0x62 -; .byte 0x57, 0x20, 0xe4, 0x66 -; lui a1, 0xfff -; slli a2, a1, 0x27 -; .byte 0x57, 0x44, 0x06, 0x5e -; .byte 0x57, 0x95, 0x95, 0x1a -; .byte 0x57, 0x06, 0x85, 0x5c +; .byte 0x57, 0x15, 0x84, 0x62 +; .byte 0xd7, 0x95, 0x94, 0x62 +; .byte 0x57, 0xa0, 0xa5, 0x66 +; lui a2, 0xfff +; slli a4, a2, 0x27 +; .byte 0x57, 0x45, 0x07, 0x5e +; .byte 0xd7, 0x94, 0x84, 0x1a +; .byte 0x57, 0x84, 0xa4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -71,16 +71,16 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfeq.vv v14,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfeq.vv v8,v11,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vmand.mm v0,v14,v8 #avl=4, #vtype=(e32, m1, ta, ma) -; lui a1,523264 -; vmv.v.x v14,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vfmax.vv v8,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vvm v10,v14,v8,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfeq.vv v10,v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfeq.vv v11,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vmand.mm v0,v10,v11 #avl=4, #vtype=(e32, m1, ta, ma) +; lui a2,523264 +; vmv.v.x v14,a2 #avl=4, #vtype=(e32, m1, ta, ma) +; vfmax.vv v9,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vvm v8,v14,v9,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -95,19 +95,19 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0x94, 0x62 -; .byte 0x57, 0x94, 0xb5, 0x62 -; .byte 0x57, 0x20, 0xe4, 0x66 -; lui a1, 0x7fc00 -; .byte 0x57, 0xc7, 0x05, 0x5e -; .byte 0x57, 0x94, 0x95, 0x1a -; .byte 0x57, 0x05, 0xe4, 0x5c +; .byte 0x57, 0x15, 0x84, 0x62 +; .byte 0xd7, 0x95, 0x94, 0x62 +; .byte 0x57, 0xa0, 0xa5, 0x66 +; lui a2, 0x7fc00 +; .byte 0x57, 0x47, 0x06, 0x5e +; .byte 0xd7, 0x94, 0x84, 0x1a +; .byte 0x57, 0x84, 0xe4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fmin.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fmin.clif index b76addaa2a47..d21dbeea2fe4 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fmin.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fmin.clif @@ -15,17 +15,17 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfeq.vv v14,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmfeq.vv v8,v11,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmand.mm v0,v14,v8 #avl=2, #vtype=(e64, m1, ta, ma) -; lui a1,4095 -; slli a2,a1,39 -; vmv.v.x v8,a2 #avl=2, #vtype=(e64, m1, ta, ma) -; vfmin.vv v10,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v12,v8,v10,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfeq.vv v10,v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmfeq.vv v11,v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vmand.mm v0,v10,v11 #avl=2, #vtype=(e64, m1, ta, ma) +; lui a2,4095 +; slli a4,a2,39 +; vmv.v.x v10,a4 #avl=2, #vtype=(e64, m1, ta, ma) +; vfmin.vv v9,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v8,v10,v9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -40,20 +40,20 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0x94, 0x62 -; .byte 0x57, 0x94, 0xb5, 0x62 -; .byte 0x57, 0x20, 0xe4, 0x66 -; lui a1, 0xfff -; slli a2, a1, 0x27 -; .byte 0x57, 0x44, 0x06, 0x5e -; .byte 0x57, 0x95, 0x95, 0x12 -; .byte 0x57, 0x06, 0x85, 0x5c +; .byte 0x57, 0x15, 0x84, 0x62 +; .byte 0xd7, 0x95, 0x94, 0x62 +; .byte 0x57, 0xa0, 0xa5, 0x66 +; lui a2, 0xfff +; slli a4, a2, 0x27 +; .byte 0x57, 0x45, 0x07, 0x5e +; .byte 0xd7, 0x94, 0x84, 0x12 +; .byte 0x57, 0x84, 0xa4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -71,16 +71,16 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmfeq.vv v14,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmfeq.vv v8,v11,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vmand.mm v0,v14,v8 #avl=4, #vtype=(e32, m1, ta, ma) -; lui a1,523264 -; vmv.v.x v14,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vfmin.vv v8,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vvm v10,v14,v8,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmfeq.vv v10,v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmfeq.vv v11,v9,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vmand.mm v0,v10,v11 #avl=4, #vtype=(e32, m1, ta, ma) +; lui a2,523264 +; vmv.v.x v14,a2 #avl=4, #vtype=(e32, m1, ta, ma) +; vfmin.vv v9,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vvm v8,v14,v9,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -95,19 +95,19 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0x94, 0x62 -; .byte 0x57, 0x94, 0xb5, 0x62 -; .byte 0x57, 0x20, 0xe4, 0x66 -; lui a1, 0x7fc00 -; .byte 0x57, 0xc7, 0x05, 0x5e -; .byte 0x57, 0x94, 0x95, 0x12 -; .byte 0x57, 0x05, 0xe4, 0x5c +; .byte 0x57, 0x15, 0x84, 0x62 +; .byte 0xd7, 0x95, 0x94, 0x62 +; .byte 0x57, 0xa0, 0xa5, 0x66 +; lui a2, 0x7fc00 +; .byte 0x57, 0x47, 0x06, 0x5e +; .byte 0xd7, 0x94, 0x84, 0x12 +; .byte 0x57, 0x84, 0xe4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fmul.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fmul.clif index f802bc66c729..1e344cf80428 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fmul.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fmul.clif @@ -16,10 +16,10 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfmul.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmul.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,13 +34,13 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0x95, 0x92 +; .byte 0x57, 0x94, 0x84, 0x92 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -59,9 +59,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfmul.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmul.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -76,11 +76,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x92 +; .byte 0x57, 0x54, 0x85, 0x92 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -99,9 +99,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfmul.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmul.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,11 +116,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x92 +; .byte 0x57, 0x54, 0x85, 0x92 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -138,10 +138,10 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfmul.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmul.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -156,13 +156,13 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0x95, 0x92 +; .byte 0x57, 0x94, 0x84, 0x92 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -181,9 +181,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfmul.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmul.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -198,11 +198,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x92 +; .byte 0x57, 0x54, 0x85, 0x92 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -221,9 +221,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfmul.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmul.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -238,11 +238,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x92 +; .byte 0x57, 0x54, 0x85, 0x92 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fneg.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fneg.clif index cfa96bcfa9ba..b0f78665b6db 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fneg.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fneg.clif @@ -16,9 +16,9 @@ block0(v0: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfneg.v v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfneg.v v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -33,11 +33,11 @@ block0(v0: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x96, 0x94, 0x26 +; .byte 0x57, 0x14, 0x84, 0x26 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -55,9 +55,9 @@ block0(v0: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfneg.v v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfneg.v v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -72,11 +72,11 @@ block0(v0: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x96, 0x94, 0x26 +; .byte 0x57, 0x14, 0x84, 0x26 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fsub.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fsub.clif index 308faa980ba8..a5db5d8a6269 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fsub.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fsub.clif @@ -16,10 +16,10 @@ block0(v0: f32x4, v1: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfsub.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfsub.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,13 +34,13 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x97, 0x95, 0x0a +; .byte 0x57, 0x94, 0x84, 0x0a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -59,9 +59,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfsub.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfsub.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -76,11 +76,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x0a +; .byte 0x57, 0x54, 0x85, 0x0a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -99,9 +99,9 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfrsub.vf v13,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfrsub.vf v8,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,11 +116,11 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x9e +; .byte 0x57, 0x54, 0x85, 0x9e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -138,10 +138,10 @@ block0(v0: f64x2, v1: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfsub.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfsub.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -156,13 +156,13 @@ block0(v0: f64x2, v1: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x97, 0x95, 0x0a +; .byte 0x57, 0x94, 0x84, 0x0a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -181,9 +181,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfsub.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfsub.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -198,11 +198,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x0a +; .byte 0x57, 0x54, 0x85, 0x0a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -221,9 +221,9 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfrsub.vf v13,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfrsub.vf v8,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -238,11 +238,11 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x56, 0x95, 0x9e +; .byte 0x57, 0x54, 0x85, 0x9e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fvdemote.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fvdemote.clif index 24daaf860d05..dfd9499dbcc3 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fvdemote.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fvdemote.clif @@ -16,10 +16,10 @@ block0(v0: f64x2): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfncvt.f.f.w v12,v9 #avl=4, #vtype=(e32, mf2, ta, ma) +; vfncvt.f.f.w v8,v9 #avl=4, #vtype=(e32, mf2, ta, ma) ; vmv.v.i v0,12 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v12,0,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vim v9,v8,0,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -36,13 +36,13 @@ block0(v0: f64x2): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x72, 0xcd -; .byte 0x57, 0x16, 0x9a, 0x4a +; .byte 0x57, 0x14, 0x9a, 0x4a ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0x30, 0x06, 0x5e ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x34, 0xc0, 0x5c +; .byte 0xd7, 0x34, 0x80, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-fvpromote-low.clif b/cranelift/filetests/filetests/isa/riscv64/simd-fvpromote-low.clif index cc23456ae37f..519ba8da625f 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-fvpromote-low.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-fvpromote-low.clif @@ -16,8 +16,8 @@ block0(v0: f32x4): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfwcvt.f.f.v v12,v9 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vfwcvt.f.f.v v8,v9 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,9 +34,9 @@ block0(v0: f32x4): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0x16, 0x96, 0x4a +; .byte 0x57, 0x14, 0x96, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-iabs.clif b/cranelift/filetests/filetests/isa/riscv64/simd-iabs.clif index d031275aa59a..5fd58b0c8a47 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-iabs.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-iabs.clif @@ -15,10 +15,10 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vneg.v v12,v9 #avl=16, #vtype=(e8, m1, ta, ma) -; vmax.vv v14,v9,v12 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vneg.v v9,v8 #avl=16, #vtype=(e8, m1, ta, ma) +; vmax.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -33,10 +33,10 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0x46, 0x90, 0x0e -; .byte 0x57, 0x07, 0x96, 0x1e -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0xd7, 0x44, 0x80, 0x0e +; .byte 0x57, 0x84, 0x84, 0x1e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -54,10 +54,10 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vneg.v v12,v9 #avl=8, #vtype=(e16, m1, ta, ma) -; vmax.vv v14,v9,v12 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vneg.v v9,v8 #avl=8, #vtype=(e16, m1, ta, ma) +; vmax.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -72,12 +72,12 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x46, 0x90, 0x0e -; .byte 0x57, 0x07, 0x96, 0x1e +; .byte 0xd7, 0x44, 0x80, 0x0e +; .byte 0x57, 0x84, 0x84, 0x1e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -95,10 +95,10 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vneg.v v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vmax.vv v14,v9,v12 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vneg.v v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmax.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -113,12 +113,12 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x46, 0x90, 0x0e -; .byte 0x57, 0x07, 0x96, 0x1e +; .byte 0xd7, 0x44, 0x80, 0x0e +; .byte 0x57, 0x84, 0x84, 0x1e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -136,10 +136,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vneg.v v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmax.vv v14,v9,v12 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vneg.v v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmax.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -154,12 +154,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x46, 0x90, 0x0e -; .byte 0x57, 0x07, 0x96, 0x1e +; .byte 0xd7, 0x44, 0x80, 0x0e +; .byte 0x57, 0x84, 0x84, 0x1e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-big.clif b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-big.clif index 7489feb4f1a8..d349fada7a86 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-big.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-big.clif @@ -17,10 +17,10 @@ block0(v0:i64x4, v1:i64x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle16.v v9,-64(incoming_arg) #avl=16, #vtype=(e16, m1, ta, ma) -; vle16.v v11,-32(incoming_arg) #avl=16, #vtype=(e16, m1, ta, ma) -; vadd.vv v14,v9,v11 #avl=4, #vtype=(e64, m1, ta, ma) -; vse16.v v14,0(a0) #avl=16, #vtype=(e16, m1, ta, ma) +; vle16.v v8,-64(incoming_arg) #avl=16, #vtype=(e16, m1, ta, ma) +; vle16.v v9,-32(incoming_arg) #avl=16, #vtype=(e16, m1, ta, ma) +; vadd.vv v8,v8,v9 #avl=4, #vtype=(e64, m1, ta, ma) +; vse16.v v8,0(a0) #avl=16, #vtype=(e16, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -35,13 +35,13 @@ block0(v0:i64x4, v1:i64x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x88, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0xd4, 0x0f, 0x02 +; .byte 0x07, 0xd4, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0xd5, 0x0f, 0x02 +; .byte 0x87, 0xd4, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x82, 0xcd -; .byte 0x57, 0x87, 0x95, 0x02 +; .byte 0x57, 0x84, 0x84, 0x02 ; .byte 0x57, 0x70, 0x88, 0xcc -; .byte 0x27, 0x57, 0x05, 0x02 +; .byte 0x27, 0x54, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -59,10 +59,10 @@ block0(v0:i64x8, v1:i64x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle32.v v9,-128(incoming_arg) #avl=16, #vtype=(e32, m1, ta, ma) -; vle32.v v11,-64(incoming_arg) #avl=16, #vtype=(e32, m1, ta, ma) -; vadd.vv v14,v9,v11 #avl=8, #vtype=(e64, m1, ta, ma) -; vse32.v v14,0(a0) #avl=16, #vtype=(e32, m1, ta, ma) +; vle32.v v8,-128(incoming_arg) #avl=16, #vtype=(e32, m1, ta, ma) +; vle32.v v9,-64(incoming_arg) #avl=16, #vtype=(e32, m1, ta, ma) +; vadd.vv v8,v8,v9 #avl=8, #vtype=(e64, m1, ta, ma) +; vse32.v v8,0(a0) #avl=16, #vtype=(e32, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -77,13 +77,13 @@ block0(v0:i64x8, v1:i64x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcd ; addi t6, sp, 0x10 -; .byte 0x87, 0xe4, 0x0f, 0x02 +; .byte 0x07, 0xe4, 0x0f, 0x02 ; addi t6, sp, 0x50 -; .byte 0x87, 0xe5, 0x0f, 0x02 +; .byte 0x87, 0xe4, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcd -; .byte 0x57, 0x87, 0x95, 0x02 +; .byte 0x57, 0x84, 0x84, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcd -; .byte 0x27, 0x67, 0x05, 0x02 +; .byte 0x27, 0x64, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-small.clif b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-small.clif index cdee35e9459c..09e3aed3c726 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-small.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-small.clif @@ -16,10 +16,10 @@ block0(v0:i8x8, v1:i8x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=8, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-8(incoming_arg) #avl=8, #vtype=(e8, m1, ta, ma) -; vadd.vv v14,v9,v11 #avl=8, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=8, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-8(incoming_arg) #avl=8, #vtype=(e8, m1, ta, ma) +; vadd.vv v8,v8,v9 #avl=8, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0:i8x8, v1:i8x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x04, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x18 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x02 -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0:i16x4, v1:i16x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=8, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-8(incoming_arg) #avl=8, #vtype=(e8, m1, ta, ma) -; vadd.vv v14,v9,v11 #avl=4, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=8, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-8(incoming_arg) #avl=8, #vtype=(e8, m1, ta, ma) +; vadd.vv v8,v8,v9 #avl=4, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0:i16x4, v1:i16x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x04, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x18 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x82, 0xcc -; .byte 0x57, 0x87, 0x95, 0x02 +; .byte 0x57, 0x84, 0x84, 0x02 ; .byte 0x57, 0x70, 0x04, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0:i32x2, v1:i32x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=8, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-8(incoming_arg) #avl=8, #vtype=(e8, m1, ta, ma) -; vadd.vv v14,v9,v11 #avl=2, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=8, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-8(incoming_arg) #avl=8, #vtype=(e8, m1, ta, ma) +; vadd.vv v8,v8,v9 #avl=2, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0:i32x2, v1:i32x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x04, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x18 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x01, 0xcd -; .byte 0x57, 0x87, 0x95, 0x02 +; .byte 0x57, 0x84, 0x84, 0x02 ; .byte 0x57, 0x70, 0x04, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-splat-extend.clif b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-splat-extend.clif index 8b426a06aa0a..61a6b4d8df92 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-splat-extend.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-splat-extend.clif @@ -18,8 +18,8 @@ block0(v0: i16x8, v1: i8): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.wx v13,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.wx v8,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -36,9 +36,9 @@ block0(v0: i16x8, v1: i8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xd6 +; .byte 0x57, 0xe4, 0x95, 0xd6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -59,8 +59,8 @@ block0(v0: i32x4, v1: i16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.wx v13,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.wx v8,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -77,9 +77,9 @@ block0(v0: i32x4, v1: i16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xd6 +; .byte 0x57, 0xe4, 0x95, 0xd6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -100,8 +100,8 @@ block0(v0: i64x2, v1: i32): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.wx v13,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.wx v8,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -118,9 +118,9 @@ block0(v0: i64x2, v1: i32): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0xd7, 0xe6, 0x95, 0xd6 +; .byte 0x57, 0xe4, 0x95, 0xd6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -141,8 +141,8 @@ block0(v0: i16x8, v1: i8): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.wx v13,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.wx v8,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -159,9 +159,9 @@ block0(v0: i16x8, v1: i8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xd2 +; .byte 0x57, 0xe4, 0x95, 0xd2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -182,8 +182,8 @@ block0(v0: i32x4, v1: i16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.wx v13,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.wx v8,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -200,9 +200,9 @@ block0(v0: i32x4, v1: i16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xd2 +; .byte 0x57, 0xe4, 0x95, 0xd2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -223,8 +223,8 @@ block0(v0: i64x2, v1: i32): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.wx v13,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.wx v8,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -241,9 +241,9 @@ block0(v0: i64x2, v1: i32): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0xd7, 0xe6, 0x95, 0xd2 +; .byte 0x57, 0xe4, 0x95, 0xd2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-swiden-high.clif b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-swiden-high.clif index cd4a08742eba..8ef94eb04b5b 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-swiden-high.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-swiden-high.clif @@ -17,11 +17,11 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vslidedown.vi v8,v11,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwadd.vv v10,v14,v8 #avl=2, #vtype=(e32, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v8,v8,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vslidedown.vi v9,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwadd.vv v10,v8,v9 #avl=2, #vtype=(e32, mf2, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -37,14 +37,14 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x37, 0x91, 0x3e -; .byte 0x57, 0x34, 0xb1, 0x3e +; .byte 0x57, 0x34, 0x81, 0x3e +; .byte 0xd7, 0x34, 0x91, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0x25, 0xe4, 0xc6 +; .byte 0x57, 0xa5, 0x84, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) @@ -66,11 +66,11 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vslidedown.vi v8,v11,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwadd.vv v10,v14,v8 #avl=4, #vtype=(e16, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v8,v8,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vslidedown.vi v9,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwadd.vv v10,v8,v9 #avl=4, #vtype=(e16, mf2, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -86,14 +86,14 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x37, 0x92, 0x3e -; .byte 0x57, 0x34, 0xb2, 0x3e +; .byte 0x57, 0x34, 0x82, 0x3e +; .byte 0xd7, 0x34, 0x92, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0x25, 0xe4, 0xc6 +; .byte 0x57, 0xa5, 0x84, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) @@ -115,11 +115,11 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v8,v11,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.vv v10,v14,v8 #avl=8, #vtype=(e8, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v8,v8,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.vv v10,v8,v9 #avl=8, #vtype=(e8, mf2, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -135,13 +135,13 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x37, 0x94, 0x3e -; .byte 0x57, 0x34, 0xb4, 0x3e +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x34, 0x84, 0x3e +; .byte 0xd7, 0x34, 0x94, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0x25, 0xe4, 0xc6 +; .byte 0x57, 0xa5, 0x84, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) @@ -164,10 +164,10 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v13,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwadd.vx v15,v13,a1 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwadd.vx v8,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -182,13 +182,13 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x36, 0x91, 0x3e +; .byte 0xd7, 0x34, 0x81, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0xd7, 0xe7, 0xd5, 0xc6 +; .byte 0x57, 0xe4, 0x95, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -209,10 +209,10 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v13,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwadd.vx v15,v13,a1 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwadd.vx v8,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -227,13 +227,13 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0x36, 0x92, 0x3e +; .byte 0xd7, 0x34, 0x82, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0xd7, 0xe7, 0xd5, 0xc6 +; .byte 0x57, 0xe4, 0x95, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -254,10 +254,10 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v13,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.vx v15,v13,a1 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.vx v8,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -272,12 +272,12 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0x36, 0x94, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0xd7, 0x34, 0x84, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0xd7, 0xe7, 0xd5, 0xc6 +; .byte 0x57, 0xe4, 0x95, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -297,10 +297,10 @@ block0(v0: i32x4, v1: i64x2): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwadd.wv v8,v11,v14 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwadd.wv v9,v8,v10 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -317,13 +317,13 @@ block0(v0: i32x4, v1: i64x2): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x37, 0x91, 0x3e +; .byte 0x57, 0x35, 0x91, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0x24, 0xb7, 0xd6 +; .byte 0xd7, 0x24, 0x85, 0xd6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -343,10 +343,10 @@ block0(v0: i16x8, v1: i32x4): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwadd.wv v8,v11,v14 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwadd.wv v9,v8,v10 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -363,13 +363,13 @@ block0(v0: i16x8, v1: i32x4): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x37, 0x92, 0x3e +; .byte 0x57, 0x35, 0x92, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0x24, 0xb7, 0xd6 +; .byte 0xd7, 0x24, 0x85, 0xd6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -389,10 +389,10 @@ block0(v0: i8x16, v1: i16x8): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.wv v8,v11,v14 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.wv v9,v8,v10 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -409,12 +409,12 @@ block0(v0: i8x16, v1: i16x8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x37, 0x94, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x35, 0x94, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0x24, 0xb7, 0xd6 +; .byte 0xd7, 0x24, 0x85, 0xd6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-swiden-low.clif b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-swiden-low.clif index 9098ba608629..d9ee55230363 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-swiden-low.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-swiden-low.clif @@ -17,10 +17,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.vv v14,v9,v11 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.vv v9,v8,v10 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -35,13 +35,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0xa7, 0x95, 0xc6 +; .byte 0xd7, 0x24, 0x85, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -61,10 +61,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.vv v14,v9,v11 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.vv v9,v8,v10 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -79,13 +79,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0xa7, 0x95, 0xc6 +; .byte 0xd7, 0x24, 0x85, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -105,10 +105,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.vv v14,v9,v11 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.vv v9,v8,v10 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -123,13 +123,13 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0xa7, 0x95, 0xc6 +; .byte 0xd7, 0x24, 0x85, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -151,8 +151,8 @@ block0(v0: i32x4, v1: i32): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.vx v13,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.vx v8,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -169,9 +169,9 @@ block0(v0: i32x4, v1: i32): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0xd7, 0xe6, 0x95, 0xc6 +; .byte 0x57, 0xe4, 0x95, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -193,8 +193,8 @@ block0(v0: i16x8, v1: i16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.vx v13,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.vx v8,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -211,9 +211,9 @@ block0(v0: i16x8, v1: i16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xc6 +; .byte 0x57, 0xe4, 0x95, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -235,8 +235,8 @@ block0(v0: i8x16, v1: i8): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.vx v13,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.vx v8,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -253,9 +253,9 @@ block0(v0: i8x16, v1: i8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xc6 +; .byte 0x57, 0xe4, 0x95, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -274,10 +274,10 @@ block0(v0: i32x4, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.wv v14,v11,v9 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.wv v9,v10,v8 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -292,13 +292,13 @@ block0(v0: i32x4, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0xa7, 0xb4, 0xd6 +; .byte 0xd7, 0x24, 0xa4, 0xd6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -317,10 +317,10 @@ block0(v0: i16x8, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.wv v14,v11,v9 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.wv v9,v10,v8 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -335,13 +335,13 @@ block0(v0: i16x8, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0xa7, 0xb4, 0xd6 +; .byte 0xd7, 0x24, 0xa4, 0xd6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -360,10 +360,10 @@ block0(v0: i8x16, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.wv v14,v11,v9 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.wv v9,v10,v8 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -378,13 +378,13 @@ block0(v0: i8x16, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0xa7, 0xb4, 0xd6 +; .byte 0xd7, 0x24, 0xa4, 0xd6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -403,10 +403,10 @@ block0(v0: i8x16, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.wv v14,v11,v9 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.wv v9,v10,v8 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -421,13 +421,13 @@ block0(v0: i8x16, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0xa7, 0xb4, 0xd6 +; .byte 0xd7, 0x24, 0xa4, 0xd6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-swiden-mix.clif b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-swiden-mix.clif index 8e521ebdd742..5646291dec80 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-swiden-mix.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-swiden-mix.clif @@ -18,10 +18,10 @@ block0(v0: i32x4, v1: i32x4): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwadd.vv v8,v14,v11 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwadd.vv v9,v10,v8 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -38,13 +38,13 @@ block0(v0: i32x4, v1: i32x4): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x37, 0x91, 0x3e +; .byte 0x57, 0x35, 0x91, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0xa4, 0xe5, 0xc6 +; .byte 0xd7, 0x24, 0xa4, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -65,10 +65,10 @@ block0(v0: i16x8, v1: i16x8): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwadd.vv v8,v14,v11 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwadd.vv v9,v10,v8 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -85,13 +85,13 @@ block0(v0: i16x8, v1: i16x8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x37, 0x92, 0x3e +; .byte 0x57, 0x35, 0x92, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0xa4, 0xe5, 0xc6 +; .byte 0xd7, 0x24, 0xa4, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -112,10 +112,10 @@ block0(v0: i8x16, v1: i8x16): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.vv v8,v14,v11 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.vv v9,v10,v8 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -132,12 +132,12 @@ block0(v0: i8x16, v1: i8x16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x37, 0x94, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x35, 0x94, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0xa4, 0xe5, 0xc6 +; .byte 0xd7, 0x24, 0xa4, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -157,11 +157,11 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v11,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwadd.vv v8,v9,v14 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwadd.vv v9,v8,v10 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -176,15 +176,15 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x37, 0xb1, 0x3e +; .byte 0x57, 0x35, 0x91, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0x24, 0x97, 0xc6 +; .byte 0xd7, 0x24, 0x85, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -204,11 +204,11 @@ block0(v0: i16x8, v1:i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v11,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwadd.vv v8,v9,v14 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwadd.vv v9,v8,v10 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -223,15 +223,15 @@ block0(v0: i16x8, v1:i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x37, 0xb2, 0x3e +; .byte 0x57, 0x35, 0x92, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0x24, 0x97, 0xc6 +; .byte 0xd7, 0x24, 0x85, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -251,11 +251,11 @@ block0(v0: i8x16, v1:i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v11,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwadd.vv v8,v9,v14 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwadd.vv v9,v8,v10 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -270,14 +270,14 @@ block0(v0: i8x16, v1:i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x37, 0xb4, 0x3e +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x35, 0x94, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0x24, 0x97, 0xc6 +; .byte 0xd7, 0x24, 0x85, 0xc6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-uwiden-high.clif b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-uwiden-high.clif index 2a33121b1166..9955857f7453 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-uwiden-high.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-uwiden-high.clif @@ -18,11 +18,11 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vslidedown.vi v8,v11,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwaddu.vv v10,v14,v8 #avl=2, #vtype=(e32, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v8,v8,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vslidedown.vi v9,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwaddu.vv v10,v8,v9 #avl=2, #vtype=(e32, mf2, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -38,14 +38,14 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x37, 0x91, 0x3e -; .byte 0x57, 0x34, 0xb1, 0x3e +; .byte 0x57, 0x34, 0x81, 0x3e +; .byte 0xd7, 0x34, 0x91, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0x25, 0xe4, 0xc2 +; .byte 0x57, 0xa5, 0x84, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) @@ -67,11 +67,11 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vslidedown.vi v8,v11,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwaddu.vv v10,v14,v8 #avl=4, #vtype=(e16, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v8,v8,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vslidedown.vi v9,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwaddu.vv v10,v8,v9 #avl=4, #vtype=(e16, mf2, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -87,14 +87,14 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x37, 0x92, 0x3e -; .byte 0x57, 0x34, 0xb2, 0x3e +; .byte 0x57, 0x34, 0x82, 0x3e +; .byte 0xd7, 0x34, 0x92, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0x25, 0xe4, 0xc2 +; .byte 0x57, 0xa5, 0x84, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) @@ -116,11 +116,11 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v8,v11,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.vv v10,v14,v8 #avl=8, #vtype=(e8, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v8,v8,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.vv v10,v8,v9 #avl=8, #vtype=(e8, mf2, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -136,13 +136,13 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x37, 0x94, 0x3e -; .byte 0x57, 0x34, 0xb4, 0x3e +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x34, 0x84, 0x3e +; .byte 0xd7, 0x34, 0x94, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0x25, 0xe4, 0xc2 +; .byte 0x57, 0xa5, 0x84, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) @@ -165,10 +165,10 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v13,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwaddu.vx v15,v13,a1 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwaddu.vx v8,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -183,13 +183,13 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x36, 0x91, 0x3e +; .byte 0xd7, 0x34, 0x81, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0xd7, 0xe7, 0xd5, 0xc2 +; .byte 0x57, 0xe4, 0x95, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -210,10 +210,10 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v13,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwaddu.vx v15,v13,a1 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwaddu.vx v8,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -228,13 +228,13 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0x36, 0x92, 0x3e +; .byte 0xd7, 0x34, 0x82, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0xd7, 0xe7, 0xd5, 0xc2 +; .byte 0x57, 0xe4, 0x95, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -255,10 +255,10 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v13,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.vx v15,v13,a1 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.vx v8,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -273,12 +273,12 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0x36, 0x94, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0xd7, 0x34, 0x84, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0xd7, 0xe7, 0xd5, 0xc2 +; .byte 0x57, 0xe4, 0x95, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -298,10 +298,10 @@ block0(v0: i32x4, v1: i64x2): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwaddu.wv v8,v11,v14 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwaddu.wv v9,v8,v10 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -318,13 +318,13 @@ block0(v0: i32x4, v1: i64x2): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x37, 0x91, 0x3e +; .byte 0x57, 0x35, 0x91, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0x24, 0xb7, 0xd2 +; .byte 0xd7, 0x24, 0x85, 0xd2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -344,10 +344,10 @@ block0(v0: i16x8, v1: i32x4): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwaddu.wv v8,v11,v14 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwaddu.wv v9,v8,v10 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -364,13 +364,13 @@ block0(v0: i16x8, v1: i32x4): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x37, 0x92, 0x3e +; .byte 0x57, 0x35, 0x92, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0x24, 0xb7, 0xd2 +; .byte 0xd7, 0x24, 0x85, 0xd2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -390,10 +390,10 @@ block0(v0: i8x16, v1: i16x8): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.wv v8,v11,v14 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.wv v9,v8,v10 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -410,12 +410,12 @@ block0(v0: i8x16, v1: i16x8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x37, 0x94, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x35, 0x94, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0x24, 0xb7, 0xd2 +; .byte 0xd7, 0x24, 0x85, 0xd2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-uwiden-low.clif b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-uwiden-low.clif index b3a429fd4fe1..fab33b6448ba 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-uwiden-low.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-uwiden-low.clif @@ -18,10 +18,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.vv v14,v9,v11 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.vv v9,v8,v10 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -36,13 +36,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0xa7, 0x95, 0xc2 +; .byte 0xd7, 0x24, 0x85, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -62,10 +62,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.vv v14,v9,v11 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.vv v9,v8,v10 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -80,13 +80,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0xa7, 0x95, 0xc2 +; .byte 0xd7, 0x24, 0x85, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -106,10 +106,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.vv v14,v9,v11 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.vv v9,v8,v10 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -124,13 +124,13 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0xa7, 0x95, 0xc2 +; .byte 0xd7, 0x24, 0x85, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -152,8 +152,8 @@ block0(v0: i32x4, v1: i32): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.vx v13,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.vx v8,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -170,9 +170,9 @@ block0(v0: i32x4, v1: i32): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0xd7, 0xe6, 0x95, 0xc2 +; .byte 0x57, 0xe4, 0x95, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -194,8 +194,8 @@ block0(v0: i16x8, v1: i16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.vx v13,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.vx v8,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -212,9 +212,9 @@ block0(v0: i16x8, v1: i16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xc2 +; .byte 0x57, 0xe4, 0x95, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -236,8 +236,8 @@ block0(v0: i8x16, v1: i8): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.vx v13,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.vx v8,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -254,9 +254,9 @@ block0(v0: i8x16, v1: i8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xc2 +; .byte 0x57, 0xe4, 0x95, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -275,10 +275,10 @@ block0(v0: i32x4, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.wv v14,v11,v9 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.wv v9,v10,v8 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -293,13 +293,13 @@ block0(v0: i32x4, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0xa7, 0xb4, 0xd2 +; .byte 0xd7, 0x24, 0xa4, 0xd2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -318,10 +318,10 @@ block0(v0: i16x8, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.wv v14,v11,v9 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.wv v9,v10,v8 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -336,13 +336,13 @@ block0(v0: i16x8, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0xa7, 0xb4, 0xd2 +; .byte 0xd7, 0x24, 0xa4, 0xd2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -361,10 +361,10 @@ block0(v0: i8x16, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.wv v14,v11,v9 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.wv v9,v10,v8 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -379,13 +379,13 @@ block0(v0: i8x16, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0xa7, 0xb4, 0xd2 +; .byte 0xd7, 0x24, 0xa4, 0xd2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-uwiden-mix.clif b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-uwiden-mix.clif index db295831f1f1..a09cd3229f66 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-iadd-uwiden-mix.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-iadd-uwiden-mix.clif @@ -20,10 +20,10 @@ block0(v0: i32x4, v1: i32x4): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwaddu.vv v8,v14,v11 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwaddu.vv v9,v10,v8 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -40,13 +40,13 @@ block0(v0: i32x4, v1: i32x4): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x37, 0x91, 0x3e +; .byte 0x57, 0x35, 0x91, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0xa4, 0xe5, 0xc2 +; .byte 0xd7, 0x24, 0xa4, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -67,10 +67,10 @@ block0(v0: i16x8, v1: i16x8): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwaddu.vv v8,v14,v11 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwaddu.vv v9,v10,v8 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -87,13 +87,13 @@ block0(v0: i16x8, v1: i16x8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x37, 0x92, 0x3e +; .byte 0x57, 0x35, 0x92, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0xa4, 0xe5, 0xc2 +; .byte 0xd7, 0x24, 0xa4, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -114,10 +114,10 @@ block0(v0: i8x16, v1: i8x16): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.vv v8,v14,v11 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.vv v9,v10,v8 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -134,12 +134,12 @@ block0(v0: i8x16, v1: i8x16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x37, 0x94, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x35, 0x94, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0xa4, 0xe5, 0xc2 +; .byte 0xd7, 0x24, 0xa4, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -159,11 +159,11 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v11,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwaddu.vv v8,v9,v14 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwaddu.vv v9,v8,v10 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -178,15 +178,15 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x37, 0xb1, 0x3e +; .byte 0x57, 0x35, 0x91, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0x24, 0x97, 0xc2 +; .byte 0xd7, 0x24, 0x85, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -206,11 +206,11 @@ block0(v0: i16x8, v1:i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v11,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwaddu.vv v8,v9,v14 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwaddu.vv v9,v8,v10 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -225,15 +225,15 @@ block0(v0: i16x8, v1:i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x37, 0xb2, 0x3e +; .byte 0x57, 0x35, 0x92, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0x24, 0x97, 0xc2 +; .byte 0xd7, 0x24, 0x85, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -253,11 +253,11 @@ block0(v0: i8x16, v1:i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v11,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwaddu.vv v8,v9,v14 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwaddu.vv v9,v8,v10 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -272,14 +272,14 @@ block0(v0: i8x16, v1:i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x37, 0xb4, 0x3e +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x35, 0x94, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0x24, 0x97, 0xc2 +; .byte 0xd7, 0x24, 0x85, 0xc2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-iadd.clif b/cranelift/filetests/filetests/isa/riscv64/simd-iadd.clif index 5a13227267c1..7c5667ebe88c 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-iadd.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-iadd.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x02 -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x02 +; .byte 0x57, 0x84, 0x84, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x02 +; .byte 0x57, 0x84, 0x84, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x87, 0x95, 0x02 +; .byte 0x57, 0x84, 0x84, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -184,9 +184,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -201,9 +201,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0x02 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -223,9 +223,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vi v12,v9,-16 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vi v8,v8,-16 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -240,11 +240,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x36, 0x98, 0x02 +; .byte 0x57, 0x34, 0x88, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -264,9 +264,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vi v12,v9,15 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vi v8,v8,15 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -281,11 +281,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x97, 0x02 +; .byte 0x57, 0xb4, 0x87, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -305,9 +305,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vi v12,v9,-5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vi v8,v8,-5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -322,11 +322,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x9d, 0x02 +; .byte 0x57, 0xb4, 0x8d, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -345,9 +345,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -362,9 +362,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x02 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -383,9 +383,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -400,11 +400,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -423,9 +423,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -440,11 +440,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -463,9 +463,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -480,11 +480,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-iadd_pairwise.clif b/cranelift/filetests/filetests/isa/riscv64/simd-iadd_pairwise.clif index 660de8797a9f..93d9ebd2ac22 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-iadd_pairwise.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-iadd_pairwise.clif @@ -16,19 +16,19 @@ block0(v0: i8x16, v1: i8x16): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; ld a1,[const(0)] -; vmv.s.x v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vcompress.vm v12,v9,v8 #avl=16, #vtype=(e8, m1, ta, ma) -; vcompress.vm v13,v11,v8 #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.s.x v10,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vcompress.vm v12,v9,v10 #avl=16, #vtype=(e8, m1, ta, ma) +; vcompress.vm v13,v8,v10 #avl=16, #vtype=(e8, m1, ta, ma) ; vslideup.vi v12,v13,8 #avl=16, #vtype=(e8, m1, ta, ma) -; ld a4,[const(1)] -; vmv.s.x v10,a4 #avl=2, #vtype=(e64, m1, ta, ma) -; vcompress.vm v14,v9,v10 #avl=16, #vtype=(e8, m1, ta, ma) -; vcompress.vm v15,v11,v10 #avl=16, #vtype=(e8, m1, ta, ma) -; vslideup.vi v14,v15,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vv v10,v12,v14 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; ld a1,[const(1)] +; vmv.s.x v11,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vcompress.vm v10,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) +; vcompress.vm v9,v8,v11 #avl=16, #vtype=(e8, m1, ta, ma) +; vslideup.vi v10,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vv v8,v12,v10 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -45,25 +45,25 @@ block0(v0: i8x16, v1: i8x16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; auipc a1, 0 ; ld a1, 0x5c(a1) ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xe4, 0x05, 0x42 +; .byte 0x57, 0xe5, 0x05, 0x42 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x57, 0x26, 0x94, 0x5e -; .byte 0xd7, 0x26, 0xb4, 0x5e +; .byte 0x57, 0x26, 0x95, 0x5e +; .byte 0xd7, 0x26, 0x85, 0x5e ; .byte 0x57, 0x36, 0xd4, 0x3a -; auipc a4, 0 -; ld a4, 0x44(a4) +; auipc a1, 0 +; ld a1, 0x44(a1) ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x65, 0x07, 0x42 +; .byte 0xd7, 0xe5, 0x05, 0x42 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x57, 0x27, 0x95, 0x5e -; .byte 0xd7, 0x27, 0xb5, 0x5e -; .byte 0x57, 0x37, 0xf4, 0x3a -; .byte 0x57, 0x05, 0xc7, 0x02 -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x57, 0xa5, 0x95, 0x5e +; .byte 0xd7, 0xa4, 0x85, 0x5e +; .byte 0x57, 0x35, 0x94, 0x3a +; .byte 0x57, 0x04, 0xc5, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -87,19 +87,19 @@ block0(v0: i16x8, v1: i16x8): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; ld a1,[const(0)] -; vmv.s.x v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vcompress.vm v12,v9,v8 #avl=8, #vtype=(e16, m1, ta, ma) -; vcompress.vm v13,v11,v8 #avl=8, #vtype=(e16, m1, ta, ma) +; vmv.s.x v10,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vcompress.vm v12,v9,v10 #avl=8, #vtype=(e16, m1, ta, ma) +; vcompress.vm v13,v8,v10 #avl=8, #vtype=(e16, m1, ta, ma) ; vslideup.vi v12,v13,4 #avl=8, #vtype=(e16, m1, ta, ma) -; ld a4,[const(1)] -; vmv.s.x v10,a4 #avl=2, #vtype=(e64, m1, ta, ma) -; vcompress.vm v14,v9,v10 #avl=8, #vtype=(e16, m1, ta, ma) -; vcompress.vm v15,v11,v10 #avl=8, #vtype=(e16, m1, ta, ma) -; vslideup.vi v14,v15,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vadd.vv v10,v12,v14 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; ld a1,[const(1)] +; vmv.s.x v11,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vcompress.vm v10,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) +; vcompress.vm v9,v8,v11 #avl=8, #vtype=(e16, m1, ta, ma) +; vslideup.vi v10,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vadd.vv v8,v12,v10 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,26 +116,26 @@ block0(v0: i16x8, v1: i16x8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; auipc a1, 0 ; ld a1, 0x5c(a1) ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xe4, 0x05, 0x42 +; .byte 0x57, 0xe5, 0x05, 0x42 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x26, 0x94, 0x5e -; .byte 0xd7, 0x26, 0xb4, 0x5e +; .byte 0x57, 0x26, 0x95, 0x5e +; .byte 0xd7, 0x26, 0x85, 0x5e ; .byte 0x57, 0x36, 0xd2, 0x3a -; auipc a4, 0 -; ld a4, 0x44(a4) +; auipc a1, 0 +; ld a1, 0x44(a1) ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x65, 0x07, 0x42 +; .byte 0xd7, 0xe5, 0x05, 0x42 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x27, 0x95, 0x5e -; .byte 0xd7, 0x27, 0xb5, 0x5e -; .byte 0x57, 0x37, 0xf2, 0x3a -; .byte 0x57, 0x05, 0xc7, 0x02 +; .byte 0x57, 0xa5, 0x95, 0x5e +; .byte 0xd7, 0xa4, 0x85, 0x5e +; .byte 0x57, 0x35, 0x92, 0x3a +; .byte 0x57, 0x04, 0xc5, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -158,19 +158,19 @@ block0(v0: i32x4, v1: i32x4): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; ld a1,[const(0)] -; vmv.s.x v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vcompress.vm v12,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) -; vcompress.vm v13,v11,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.s.x v10,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vcompress.vm v12,v9,v10 #avl=4, #vtype=(e32, m1, ta, ma) +; vcompress.vm v13,v8,v10 #avl=4, #vtype=(e32, m1, ta, ma) ; vslideup.vi v12,v13,2 #avl=4, #vtype=(e32, m1, ta, ma) -; ld a4,[const(1)] -; vmv.s.x v10,a4 #avl=2, #vtype=(e64, m1, ta, ma) -; vcompress.vm v14,v9,v10 #avl=4, #vtype=(e32, m1, ta, ma) -; vcompress.vm v15,v11,v10 #avl=4, #vtype=(e32, m1, ta, ma) -; vslideup.vi v14,v15,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vadd.vv v10,v12,v14 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; ld a1,[const(1)] +; vmv.s.x v11,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vcompress.vm v10,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) +; vcompress.vm v9,v8,v11 #avl=4, #vtype=(e32, m1, ta, ma) +; vslideup.vi v10,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vadd.vv v8,v12,v10 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -187,26 +187,26 @@ block0(v0: i32x4, v1: i32x4): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; auipc a1, 0 ; ld a1, 0x5c(a1) ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xe4, 0x05, 0x42 +; .byte 0x57, 0xe5, 0x05, 0x42 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x26, 0x94, 0x5e -; .byte 0xd7, 0x26, 0xb4, 0x5e +; .byte 0x57, 0x26, 0x95, 0x5e +; .byte 0xd7, 0x26, 0x85, 0x5e ; .byte 0x57, 0x36, 0xd1, 0x3a -; auipc a4, 0 -; ld a4, 0x44(a4) +; auipc a1, 0 +; ld a1, 0x44(a1) ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x65, 0x07, 0x42 +; .byte 0xd7, 0xe5, 0x05, 0x42 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x27, 0x95, 0x5e -; .byte 0xd7, 0x27, 0xb5, 0x5e -; .byte 0x57, 0x37, 0xf1, 0x3a -; .byte 0x57, 0x05, 0xc7, 0x02 +; .byte 0x57, 0xa5, 0x95, 0x5e +; .byte 0xd7, 0xa4, 0x85, 0x5e +; .byte 0x57, 0x35, 0x91, 0x3a +; .byte 0x57, 0x04, 0xc5, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-eq.clif b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-eq.clif index 182dc3d398c4..cc76b2bf9205 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-eq.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-eq.clif @@ -15,9 +15,9 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmseq.vv v0,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmseq.vv v0,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v8,0 #avl=16, #vtype=(e8, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,10 +35,10 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x80, 0x95, 0x62 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x80, 0x84, 0x62 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x27, 0x05, 0x05, 0x02 @@ -59,9 +59,9 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmseq.vv v0,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmseq.vv v0,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) ; vmv.v.i v8,0 #avl=8, #vtype=(e16, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -79,11 +79,11 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x80, 0x95, 0x62 +; .byte 0x57, 0x80, 0x84, 0x62 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -105,9 +105,9 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmseq.vv v0,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmseq.vv v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -125,11 +125,11 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x80, 0x95, 0x62 +; .byte 0x57, 0x80, 0x84, 0x62 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -151,9 +151,9 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmseq.vv v0,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmseq.vv v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -171,11 +171,11 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x80, 0x95, 0x62 +; .byte 0x57, 0x80, 0x84, 0x62 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -198,10 +198,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmseq.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmseq.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -217,11 +217,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x62 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x62 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -242,10 +242,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmseq.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmseq.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -261,11 +261,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x62 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x62 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -287,10 +287,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmseq.vi v0,v9,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v14,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v14,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmseq.vi v0,v8,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v8,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -306,11 +306,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x30, 0x95, 0x62 -; .byte 0x57, 0x37, 0x00, 0x5e -; .byte 0x57, 0xb4, 0xef, 0x5c +; .byte 0x57, 0x30, 0x85, 0x62 +; .byte 0xd7, 0x34, 0x00, 0x5e +; .byte 0x57, 0xb4, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -332,10 +332,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmseq.vi v0,v9,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v14,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v14,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmseq.vi v0,v8,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v8,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -351,11 +351,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x30, 0x95, 0x62 -; .byte 0x57, 0x37, 0x00, 0x5e -; .byte 0x57, 0xb4, 0xef, 0x5c +; .byte 0x57, 0x30, 0x85, 0x62 +; .byte 0xd7, 0x34, 0x00, 0x5e +; .byte 0x57, 0xb4, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ne.clif b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ne.clif index 081798f9428c..cae2cb52d7b2 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ne.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ne.clif @@ -15,9 +15,9 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsne.vv v0,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsne.vv v0,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v8,0 #avl=16, #vtype=(e8, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,10 +35,10 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x80, 0x95, 0x66 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x80, 0x84, 0x66 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x27, 0x05, 0x05, 0x02 @@ -59,9 +59,9 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsne.vv v0,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsne.vv v0,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) ; vmv.v.i v8,0 #avl=8, #vtype=(e16, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -79,11 +79,11 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x80, 0x95, 0x66 +; .byte 0x57, 0x80, 0x84, 0x66 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -105,9 +105,9 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsne.vv v0,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsne.vv v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -125,11 +125,11 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x80, 0x95, 0x66 +; .byte 0x57, 0x80, 0x84, 0x66 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -151,9 +151,9 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsne.vv v0,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsne.vv v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -171,11 +171,11 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x80, 0x95, 0x66 +; .byte 0x57, 0x80, 0x84, 0x66 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -198,10 +198,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsne.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsne.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -217,11 +217,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x66 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x66 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -242,10 +242,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsne.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsne.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -261,11 +261,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x66 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x66 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -287,10 +287,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsne.vi v0,v9,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v14,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v14,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsne.vi v0,v8,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v8,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -306,11 +306,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x30, 0x95, 0x66 -; .byte 0x57, 0x37, 0x00, 0x5e -; .byte 0x57, 0xb4, 0xef, 0x5c +; .byte 0x57, 0x30, 0x85, 0x66 +; .byte 0xd7, 0x34, 0x00, 0x5e +; .byte 0x57, 0xb4, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -332,10 +332,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsne.vi v0,v9,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v14,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v14,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsne.vi v0,v8,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v8,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -351,11 +351,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x30, 0x95, 0x66 -; .byte 0x57, 0x37, 0x00, 0x5e -; .byte 0x57, 0xb4, 0xef, 0x5c +; .byte 0x57, 0x30, 0x85, 0x66 +; .byte 0xd7, 0x34, 0x00, 0x5e +; .byte 0x57, 0xb4, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-sge.clif b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-sge.clif index c6afd130e63e..e11a646b1598 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-sge.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-sge.clif @@ -15,9 +15,9 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsle.vv v0,v11,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsle.vv v0,v9,v8 #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v8,0 #avl=16, #vtype=(e8, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,10 +35,10 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x80, 0xb4, 0x76 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x00, 0x94, 0x76 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x27, 0x05, 0x05, 0x02 @@ -59,9 +59,9 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsle.vv v0,v11,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsle.vv v0,v9,v8 #avl=8, #vtype=(e16, m1, ta, ma) ; vmv.v.i v8,0 #avl=8, #vtype=(e16, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -79,11 +79,11 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x80, 0xb4, 0x76 +; .byte 0x57, 0x00, 0x94, 0x76 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -105,9 +105,9 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsle.vv v0,v11,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsle.vv v0,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -125,11 +125,11 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x80, 0xb4, 0x76 +; .byte 0x57, 0x00, 0x94, 0x76 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -151,9 +151,9 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsle.vv v0,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsle.vv v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -171,11 +171,11 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x80, 0xb4, 0x76 +; .byte 0x57, 0x00, 0x94, 0x76 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -198,9 +198,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.x v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmsle.vv v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.x v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmsle.vv v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -218,10 +218,10 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc4, 0x05, 0x5e -; .byte 0x57, 0x80, 0x84, 0x76 +; .byte 0xd7, 0xc4, 0x05, 0x5e +; .byte 0x57, 0x00, 0x94, 0x76 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -244,10 +244,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsle.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsle.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -263,11 +263,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x76 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x76 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -289,11 +289,11 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.i v15,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmsle.vv v0,v15,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.i v9,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmsle.vv v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -309,12 +309,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x37, 0x05, 0x5e -; .byte 0x57, 0x80, 0xf4, 0x76 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0xd7, 0x34, 0x05, 0x5e +; .byte 0x57, 0x00, 0x94, 0x76 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -336,10 +336,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsle.vi v0,v9,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v14,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v14,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsle.vi v0,v8,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v8,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -355,11 +355,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x30, 0x95, 0x76 -; .byte 0x57, 0x37, 0x00, 0x5e -; .byte 0x57, 0xb4, 0xef, 0x5c +; .byte 0x57, 0x30, 0x85, 0x76 +; .byte 0xd7, 0x34, 0x00, 0x5e +; .byte 0x57, 0xb4, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-sgt.clif b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-sgt.clif index e0c585579202..e9c0f787c204 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-sgt.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-sgt.clif @@ -15,9 +15,9 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmslt.vv v0,v11,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmslt.vv v0,v9,v8 #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v8,0 #avl=16, #vtype=(e8, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,10 +35,10 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x80, 0xb4, 0x6e +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x00, 0x94, 0x6e ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x27, 0x05, 0x05, 0x02 @@ -59,9 +59,9 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmslt.vv v0,v11,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmslt.vv v0,v9,v8 #avl=8, #vtype=(e16, m1, ta, ma) ; vmv.v.i v8,0 #avl=8, #vtype=(e16, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -79,11 +79,11 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x80, 0xb4, 0x6e +; .byte 0x57, 0x00, 0x94, 0x6e ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -105,9 +105,9 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmslt.vv v0,v11,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmslt.vv v0,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -125,11 +125,11 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x80, 0xb4, 0x6e +; .byte 0x57, 0x00, 0x94, 0x6e ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -151,9 +151,9 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmslt.vv v0,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmslt.vv v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -171,11 +171,11 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x80, 0xb4, 0x6e +; .byte 0x57, 0x00, 0x94, 0x6e ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -198,10 +198,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsgt.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsgt.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -217,11 +217,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x7e -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x7e +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -242,10 +242,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmslt.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmslt.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -261,11 +261,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x6e -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x6e +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -287,10 +287,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsgt.vi v0,v9,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v14,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v14,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsgt.vi v0,v8,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v8,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -306,11 +306,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x30, 0x95, 0x7e -; .byte 0x57, 0x37, 0x00, 0x5e -; .byte 0x57, 0xb4, 0xef, 0x5c +; .byte 0x57, 0x30, 0x85, 0x7e +; .byte 0xd7, 0x34, 0x00, 0x5e +; .byte 0x57, 0xb4, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -332,11 +332,11 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; li a1,10 -; vmslt.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vmslt.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -352,12 +352,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi a1, zero, 0xa ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x6e -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x6e +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-sle.clif b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-sle.clif index 2288a045ad8f..410eae5a95f6 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-sle.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-sle.clif @@ -15,9 +15,9 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsle.vv v0,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsle.vv v0,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v8,0 #avl=16, #vtype=(e8, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,10 +35,10 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x80, 0x95, 0x76 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x80, 0x84, 0x76 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x27, 0x05, 0x05, 0x02 @@ -59,9 +59,9 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsle.vv v0,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsle.vv v0,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) ; vmv.v.i v8,0 #avl=8, #vtype=(e16, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -79,11 +79,11 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x80, 0x95, 0x76 +; .byte 0x57, 0x80, 0x84, 0x76 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -105,9 +105,9 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsle.vv v0,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsle.vv v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -125,11 +125,11 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x80, 0x95, 0x76 +; .byte 0x57, 0x80, 0x84, 0x76 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -151,9 +151,9 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsle.vv v0,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsle.vv v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -171,11 +171,11 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x80, 0x95, 0x76 +; .byte 0x57, 0x80, 0x84, 0x76 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -198,10 +198,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsle.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsle.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -217,11 +217,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x76 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x76 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -242,9 +242,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.x v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmsle.vv v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.x v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmsle.vv v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -262,10 +262,10 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc4, 0x05, 0x5e -; .byte 0x57, 0x80, 0x84, 0x76 +; .byte 0xd7, 0xc4, 0x05, 0x5e +; .byte 0x57, 0x00, 0x94, 0x76 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -289,10 +289,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsle.vi v0,v9,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v14,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v14,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsle.vi v0,v8,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v8,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -308,11 +308,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x30, 0x95, 0x76 -; .byte 0x57, 0x37, 0x00, 0x5e -; .byte 0x57, 0xb4, 0xef, 0x5c +; .byte 0x57, 0x30, 0x85, 0x76 +; .byte 0xd7, 0x34, 0x00, 0x5e +; .byte 0x57, 0xb4, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -334,11 +334,11 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.i v15,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmsle.vv v0,v15,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.i v9,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmsle.vv v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -354,12 +354,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x37, 0x05, 0x5e -; .byte 0x57, 0x80, 0xf4, 0x76 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0xd7, 0x34, 0x05, 0x5e +; .byte 0x57, 0x00, 0x94, 0x76 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-slt.clif b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-slt.clif index c6479fe82513..fbb911bc1abe 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-slt.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-slt.clif @@ -15,9 +15,9 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmslt.vv v0,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmslt.vv v0,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v8,0 #avl=16, #vtype=(e8, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,10 +35,10 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x80, 0x95, 0x6e +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x80, 0x84, 0x6e ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x27, 0x05, 0x05, 0x02 @@ -59,9 +59,9 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmslt.vv v0,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmslt.vv v0,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) ; vmv.v.i v8,0 #avl=8, #vtype=(e16, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -79,11 +79,11 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x80, 0x95, 0x6e +; .byte 0x57, 0x80, 0x84, 0x6e ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -105,9 +105,9 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmslt.vv v0,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmslt.vv v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -125,11 +125,11 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x80, 0x95, 0x6e +; .byte 0x57, 0x80, 0x84, 0x6e ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -151,9 +151,9 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmslt.vv v0,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmslt.vv v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -171,11 +171,11 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x80, 0x95, 0x6e +; .byte 0x57, 0x80, 0x84, 0x6e ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -198,10 +198,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmslt.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmslt.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -217,11 +217,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x6e -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x6e +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -242,10 +242,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsgt.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsgt.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -261,11 +261,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x7e -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x7e +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -287,11 +287,11 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; li a1,10 -; vmslt.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vmslt.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -307,12 +307,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi a1, zero, 0xa ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x6e -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x6e +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -334,10 +334,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsgt.vi v0,v9,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v14,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v14,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsgt.vi v0,v8,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v8,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -353,11 +353,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x30, 0x95, 0x7e -; .byte 0x57, 0x37, 0x00, 0x5e -; .byte 0x57, 0xb4, 0xef, 0x5c +; .byte 0x57, 0x30, 0x85, 0x7e +; .byte 0xd7, 0x34, 0x00, 0x5e +; .byte 0x57, 0xb4, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-uge.clif b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-uge.clif index 26b1620070a0..573e78a6a20e 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-uge.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-uge.clif @@ -15,9 +15,9 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsleu.vv v0,v11,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsleu.vv v0,v9,v8 #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v8,0 #avl=16, #vtype=(e8, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,10 +35,10 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x80, 0xb4, 0x72 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x00, 0x94, 0x72 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x27, 0x05, 0x05, 0x02 @@ -59,9 +59,9 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsleu.vv v0,v11,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsleu.vv v0,v9,v8 #avl=8, #vtype=(e16, m1, ta, ma) ; vmv.v.i v8,0 #avl=8, #vtype=(e16, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -79,11 +79,11 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x80, 0xb4, 0x72 +; .byte 0x57, 0x00, 0x94, 0x72 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -105,9 +105,9 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsleu.vv v0,v11,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsleu.vv v0,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -125,11 +125,11 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x80, 0xb4, 0x72 +; .byte 0x57, 0x00, 0x94, 0x72 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -151,9 +151,9 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsleu.vv v0,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsleu.vv v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -171,11 +171,11 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x80, 0xb4, 0x72 +; .byte 0x57, 0x00, 0x94, 0x72 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -198,9 +198,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.x v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmsleu.vv v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.x v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmsleu.vv v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -218,10 +218,10 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc4, 0x05, 0x5e -; .byte 0x57, 0x80, 0x84, 0x72 +; .byte 0xd7, 0xc4, 0x05, 0x5e +; .byte 0x57, 0x00, 0x94, 0x72 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -244,10 +244,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsleu.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsleu.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -263,11 +263,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x72 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x72 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -289,11 +289,11 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.i v15,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmsleu.vv v0,v15,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.i v9,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmsleu.vv v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -309,12 +309,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x37, 0x05, 0x5e -; .byte 0x57, 0x80, 0xf4, 0x72 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0xd7, 0x34, 0x05, 0x5e +; .byte 0x57, 0x00, 0x94, 0x72 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -336,10 +336,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsleu.vi v0,v9,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v14,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v14,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsleu.vi v0,v8,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v8,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -355,11 +355,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x30, 0x95, 0x72 -; .byte 0x57, 0x37, 0x00, 0x5e -; .byte 0x57, 0xb4, 0xef, 0x5c +; .byte 0x57, 0x30, 0x85, 0x72 +; .byte 0xd7, 0x34, 0x00, 0x5e +; .byte 0x57, 0xb4, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ugt.clif b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ugt.clif index e512f993c438..dcd56365c897 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ugt.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ugt.clif @@ -15,9 +15,9 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsltu.vv v0,v11,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsltu.vv v0,v9,v8 #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v8,0 #avl=16, #vtype=(e8, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,10 +35,10 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x80, 0xb4, 0x6a +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x00, 0x94, 0x6a ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x27, 0x05, 0x05, 0x02 @@ -59,9 +59,9 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsltu.vv v0,v11,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsltu.vv v0,v9,v8 #avl=8, #vtype=(e16, m1, ta, ma) ; vmv.v.i v8,0 #avl=8, #vtype=(e16, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -79,11 +79,11 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x80, 0xb4, 0x6a +; .byte 0x57, 0x00, 0x94, 0x6a ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -105,9 +105,9 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsltu.vv v0,v11,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsltu.vv v0,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -125,11 +125,11 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x80, 0xb4, 0x6a +; .byte 0x57, 0x00, 0x94, 0x6a ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -151,9 +151,9 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsltu.vv v0,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsltu.vv v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -171,11 +171,11 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x80, 0xb4, 0x6a +; .byte 0x57, 0x00, 0x94, 0x6a ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -198,10 +198,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsgtu.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsgtu.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -217,11 +217,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x7a -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x7a +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -242,10 +242,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsltu.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsltu.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -261,11 +261,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x6a -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x6a +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -287,10 +287,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsgtu.vi v0,v9,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v14,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v14,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsgtu.vi v0,v8,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v8,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -306,11 +306,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x30, 0x95, 0x7a -; .byte 0x57, 0x37, 0x00, 0x5e -; .byte 0x57, 0xb4, 0xef, 0x5c +; .byte 0x57, 0x30, 0x85, 0x7a +; .byte 0xd7, 0x34, 0x00, 0x5e +; .byte 0x57, 0xb4, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -332,11 +332,11 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; li a1,10 -; vmsltu.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vmsltu.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -352,12 +352,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi a1, zero, 0xa ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x6a -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x6a +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ule.clif b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ule.clif index 10ee6bef3c23..e58284e88b19 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ule.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ule.clif @@ -15,9 +15,9 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsleu.vv v0,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsleu.vv v0,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v8,0 #avl=16, #vtype=(e8, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,10 +35,10 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x80, 0x95, 0x72 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x80, 0x84, 0x72 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x27, 0x05, 0x05, 0x02 @@ -59,9 +59,9 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsleu.vv v0,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsleu.vv v0,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) ; vmv.v.i v8,0 #avl=8, #vtype=(e16, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -79,11 +79,11 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x80, 0x95, 0x72 +; .byte 0x57, 0x80, 0x84, 0x72 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -105,9 +105,9 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsleu.vv v0,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsleu.vv v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -125,11 +125,11 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x80, 0x95, 0x72 +; .byte 0x57, 0x80, 0x84, 0x72 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -151,9 +151,9 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsleu.vv v0,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsleu.vv v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -171,11 +171,11 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x80, 0x95, 0x72 +; .byte 0x57, 0x80, 0x84, 0x72 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -198,10 +198,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsleu.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsleu.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -217,11 +217,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x72 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x72 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -242,9 +242,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.x v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmsleu.vv v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.x v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmsleu.vv v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -262,10 +262,10 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc4, 0x05, 0x5e -; .byte 0x57, 0x80, 0x84, 0x72 +; .byte 0xd7, 0xc4, 0x05, 0x5e +; .byte 0x57, 0x00, 0x94, 0x72 ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -289,10 +289,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsleu.vi v0,v9,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v14,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v14,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsleu.vi v0,v8,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v8,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -308,11 +308,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x30, 0x95, 0x72 -; .byte 0x57, 0x37, 0x00, 0x5e -; .byte 0x57, 0xb4, 0xef, 0x5c +; .byte 0x57, 0x30, 0x85, 0x72 +; .byte 0xd7, 0x34, 0x00, 0x5e +; .byte 0x57, 0xb4, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -334,11 +334,11 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.i v15,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmsleu.vv v0,v15,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.i v9,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmsleu.vv v0,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -354,12 +354,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x37, 0x05, 0x5e -; .byte 0x57, 0x80, 0xf4, 0x72 -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0xd7, 0x34, 0x05, 0x5e +; .byte 0x57, 0x00, 0x94, 0x72 +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ult.clif b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ult.clif index b5707bef8e42..caef97fd85e8 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ult.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-icmp-ult.clif @@ -15,9 +15,9 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsltu.vv v0,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsltu.vv v0,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v8,0 #avl=16, #vtype=(e8, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -35,10 +35,10 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x80, 0x95, 0x6a +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x80, 0x84, 0x6a ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x27, 0x05, 0x05, 0x02 @@ -59,9 +59,9 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsltu.vv v0,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsltu.vv v0,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) ; vmv.v.i v8,0 #avl=8, #vtype=(e16, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -79,11 +79,11 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x80, 0x95, 0x6a +; .byte 0x57, 0x80, 0x84, 0x6a ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -105,9 +105,9 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsltu.vv v0,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsltu.vv v0,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v8,0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -125,11 +125,11 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x80, 0x95, 0x6a +; .byte 0x57, 0x80, 0x84, 0x6a ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -151,9 +151,9 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsltu.vv v0,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsltu.vv v0,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmerge.vim v10,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -171,11 +171,11 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x80, 0x95, 0x6a +; .byte 0x57, 0x80, 0x84, 0x6a ; .byte 0x57, 0x34, 0x00, 0x5e ; .byte 0x57, 0xb5, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc @@ -198,10 +198,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsltu.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsltu.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -217,11 +217,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x6a -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x6a +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -242,10 +242,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsgtu.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsgtu.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -261,11 +261,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x7a -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x7a +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -287,11 +287,11 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; li a1,10 -; vmsltu.vx v0,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v15,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v9,v15,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vmsltu.vx v0,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v8,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -307,12 +307,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi a1, zero, 0xa ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc0, 0x95, 0x6a -; .byte 0xd7, 0x37, 0x00, 0x5e -; .byte 0xd7, 0xb4, 0xff, 0x5c +; .byte 0x57, 0xc0, 0x85, 0x6a +; .byte 0x57, 0x34, 0x00, 0x5e +; .byte 0xd7, 0xb4, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -334,10 +334,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmsgtu.vi v0,v9,10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.v.i v14,0 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v14,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmsgtu.vi v0,v8,10 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v9,0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v8,v9,-1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -353,11 +353,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x30, 0x95, 0x7a -; .byte 0x57, 0x37, 0x00, 0x5e -; .byte 0x57, 0xb4, 0xef, 0x5c +; .byte 0x57, 0x30, 0x85, 0x7a +; .byte 0xd7, 0x34, 0x00, 0x5e +; .byte 0x57, 0xb4, 0x9f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-ifma.clif b/cranelift/filetests/filetests/isa/riscv64/simd-ifma.clif index 044a3565eb68..c685a26b5a8d 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-ifma.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-ifma.clif @@ -17,11 +17,11 @@ block0(v0: i64x2, v1: i64x2, v2: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v15,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmacc.vv v15,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmacc.vv v10,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -36,15 +36,15 @@ block0(v0: i64x2, v1: i64x2, v2: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x87, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xa7, 0x95, 0xb6 +; .byte 0x57, 0xa5, 0x84, 0xb6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -64,11 +64,11 @@ block0(v0: i64x2, v1: i64x2, v2: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.x v15,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmacc.vv v15,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.x v10,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmacc.vv v10,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -83,14 +83,14 @@ block0(v0: i64x2, v1: i64x2, v2: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc7, 0x05, 0x5e -; .byte 0xd7, 0xa7, 0x95, 0xb6 +; .byte 0x57, 0xc5, 0x05, 0x5e +; .byte 0x57, 0xa5, 0x84, 0xb6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -110,11 +110,11 @@ block0(v0: i64x2, v1: i64x2, v2: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v15,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vnmsac.vv v15,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vnmsac.vv v10,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -129,15 +129,15 @@ block0(v0: i64x2, v1: i64x2, v2: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x87, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xa7, 0x95, 0xbe +; .byte 0x57, 0xa5, 0x84, 0xbe ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -158,11 +158,11 @@ block0(v0: i64x2, v1: i64x2, v2: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.x v15,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vnmsac.vv v15,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.x v10,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vnmsac.vv v10,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -177,14 +177,14 @@ block0(v0: i64x2, v1: i64x2, v2: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc7, 0x05, 0x5e -; .byte 0xd7, 0xa7, 0x95, 0xbe +; .byte 0x57, 0xc5, 0x05, 0x5e +; .byte 0x57, 0xa5, 0x84, 0xbe ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -203,11 +203,11 @@ block0(v0: i64x2, v1: i64x2, v2: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v13,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmul.vv v9,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vsub.vv v9,v13,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-48(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmul.vv v10,v8,v10 #avl=2, #vtype=(e64, m1, ta, ma) +; vsub.vv v9,v9,v10 #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -223,14 +223,14 @@ block0(v0: i64x2, v1: i64x2, v2: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; addi t6, sp, 0x30 -; .byte 0x87, 0x86, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xa4, 0x95, 0x96 -; .byte 0xd7, 0x84, 0xd4, 0x0a +; .byte 0x57, 0x25, 0x85, 0x96 +; .byte 0xd7, 0x04, 0x95, 0x0a ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) @@ -252,9 +252,9 @@ block0(v0: i64x2, v1: i64x2, v2: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmul.vv v8,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmul.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) ; vsub.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) @@ -271,11 +271,11 @@ block0(v0: i64x2, v1: i64x2, v2: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xa4, 0x95, 0x96 +; .byte 0x57, 0xa4, 0x84, 0x96 ; .byte 0x57, 0xc4, 0x85, 0x0a ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-imul.clif b/cranelift/filetests/filetests/isa/riscv64/simd-imul.clif index dda2c0130590..b1763a2e194f 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-imul.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-imul.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmul.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmul.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0xa7, 0x95, 0x96 -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xa4, 0x84, 0x96 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmul.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmul.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xa7, 0x95, 0x96 +; .byte 0x57, 0xa4, 0x84, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmul.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmul.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xa7, 0x95, 0x96 +; .byte 0x57, 0xa4, 0x84, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmul.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmul.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xa7, 0x95, 0x96 +; .byte 0x57, 0xa4, 0x84, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -183,9 +183,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmul.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmul.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -200,9 +200,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xe6, 0x95, 0x96 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xe4, 0x85, 0x96 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -221,9 +221,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmul.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmul.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -238,11 +238,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0x96 +; .byte 0x57, 0xe4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -261,9 +261,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmul.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmul.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -278,11 +278,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xe6, 0x95, 0x96 +; .byte 0x57, 0xe4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -301,9 +301,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmul.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmul.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -318,11 +318,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xe6, 0x95, 0x96 +; .byte 0x57, 0xe4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-ineg.clif b/cranelift/filetests/filetests/isa/riscv64/simd-ineg.clif index d7f92a097e1f..00420bff6ef2 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-ineg.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-ineg.clif @@ -16,9 +16,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vneg.v v12,v9 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vneg.v v8,v8 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -33,9 +33,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0x46, 0x90, 0x0e -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x44, 0x80, 0x0e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -53,9 +53,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vneg.v v12,v9 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vneg.v v8,v8 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -70,11 +70,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x46, 0x90, 0x0e +; .byte 0x57, 0x44, 0x80, 0x0e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -92,9 +92,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vneg.v v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vneg.v v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -109,11 +109,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x46, 0x90, 0x0e +; .byte 0x57, 0x44, 0x80, 0x0e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -131,9 +131,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vneg.v v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vneg.v v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -148,11 +148,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x46, 0x90, 0x0e +; .byte 0x57, 0x44, 0x80, 0x0e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-insertlane.clif b/cranelift/filetests/filetests/isa/riscv64/simd-insertlane.clif index f7e2601041ca..e47e9af6b63e 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-insertlane.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-insertlane.clif @@ -15,11 +15,11 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; lui a5,8 -; vmv.s.x v0,a5 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vxm v10,v9,a1,v0.t #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; lui a2,8 +; vmv.s.x v0,a2 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vxm v9,v8,a1,v0.t #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,13 +34,13 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; lui a5, 8 +; .byte 0x07, 0x84, 0x0f, 0x02 +; lui a2, 8 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xe0, 0x07, 0x42 +; .byte 0x57, 0x60, 0x06, 0x42 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x57, 0xc5, 0x95, 0x5c -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0xd7, 0xc4, 0x85, 0x5c +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -58,11 +58,11 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,32 -; vmv.s.x v0,a5 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vxm v10,v9,a1,v0.t #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a2,32 +; vmv.s.x v0,a2 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vxm v9,v8,a1,v0.t #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -77,14 +77,14 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, 0x20 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a2, zero, 0x20 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xe0, 0x07, 0x42 +; .byte 0x57, 0x60, 0x06, 0x42 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xc5, 0x95, 0x5c +; .byte 0xd7, 0xc4, 0x85, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -102,10 +102,10 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v0,4 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vxm v15,v9,a1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vxm v9,v8,a1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -120,13 +120,13 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0x30, 0x02, 0x5e ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc7, 0x95, 0x5c +; .byte 0xd7, 0xc4, 0x85, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -144,10 +144,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vxm v15,v9,a1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vxm v9,v8,a1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -162,12 +162,12 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0xb0, 0x00, 0x5e -; .byte 0xd7, 0xc7, 0x95, 0x5c +; .byte 0xd7, 0xc4, 0x85, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -185,10 +185,10 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vfmerge.vfm v15,v9,fa0,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmerge.vfm v9,v8,fa0,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -203,12 +203,12 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0xb0, 0x00, 0x5e -; .byte 0xd7, 0x57, 0x95, 0x5c +; .byte 0xd7, 0x54, 0x85, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -226,10 +226,10 @@ block0(v0: f64x2, v1: f64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v0,2 #avl=2, #vtype=(e64, m1, ta, ma) -; vfmerge.vfm v15,v9,fa0,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmerge.vfm v9,v8,fa0,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -244,12 +244,12 @@ block0(v0: f64x2, v1: f64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0x30, 0x01, 0x5e -; .byte 0xd7, 0x57, 0x95, 0x5c +; .byte 0xd7, 0x54, 0x85, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -267,10 +267,10 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vfmerge.vfm v15,v9,fa0,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmerge.vfm v9,v8,fa0,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -285,13 +285,13 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0xb0, 0x00, 0x5e ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x57, 0x95, 0x5c +; .byte 0xd7, 0x54, 0x85, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -309,10 +309,10 @@ block0(v0: f32x4, v1: f32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v0,2 #avl=2, #vtype=(e64, m1, ta, ma) -; vfmerge.vfm v15,v9,fa0,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmerge.vfm v9,v8,fa0,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -327,13 +327,13 @@ block0(v0: f32x4, v1: f32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0x30, 0x01, 0x5e ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x57, 0x95, 0x5c +; .byte 0xd7, 0x54, 0x85, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -352,11 +352,11 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; lui a4,8 -; vmv.s.x v0,a4 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v9,10,v0.t #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; lui a1,8 +; vmv.s.x v0,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,10,v0.t #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -371,13 +371,13 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; lui a4, 8 +; .byte 0x07, 0x84, 0x0f, 0x02 +; lui a1, 8 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x60, 0x07, 0x42 +; .byte 0x57, 0xe0, 0x05, 0x42 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x57, 0x34, 0x95, 0x5c -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xd7, 0x34, 0x85, 0x5c +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -396,11 +396,11 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a4,32 -; vmv.s.x v0,a4 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v8,v9,-2,v0.t #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,32 +; vmv.s.x v0,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vim v9,v8,-2,v0.t #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -415,14 +415,14 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a4, zero, 0x20 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, 0x20 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x60, 0x07, 0x42 +; .byte 0x57, 0xe0, 0x05, 0x42 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x34, 0x9f, 0x5c +; .byte 0xd7, 0x34, 0x8f, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -441,10 +441,10 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v0,4 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v14,v9,15,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vim v9,v8,15,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -459,13 +459,13 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0x30, 0x02, 0x5e ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb7, 0x97, 0x5c +; .byte 0xd7, 0xb4, 0x87, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -484,10 +484,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v14,v9,-9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vim v9,v8,-9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -502,12 +502,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0xb0, 0x00, 0x5e -; .byte 0x57, 0xb7, 0x9b, 0x5c +; .byte 0xd7, 0xb4, 0x8b, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-ishl-const.clif b/cranelift/filetests/filetests/isa/riscv64/simd-ishl-const.clif index 6b40be67e0f3..a37569615eb8 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-ishl-const.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-ishl-const.clif @@ -17,9 +17,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,9 +34,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0x96 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0x96 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -55,9 +55,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -72,9 +72,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0x96 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0x96 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -93,9 +93,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -110,9 +110,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0x96 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0x96 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -131,9 +131,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -148,9 +148,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0x96 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0x96 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -170,9 +170,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -187,9 +187,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0x96 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0x96 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -208,9 +208,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -225,11 +225,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -248,9 +248,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -265,11 +265,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -288,9 +288,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -305,11 +305,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -328,9 +328,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -345,11 +345,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -369,9 +369,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -386,11 +386,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -409,9 +409,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -426,11 +426,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -449,9 +449,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -466,11 +466,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -489,9 +489,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -506,11 +506,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -529,9 +529,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -546,11 +546,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -570,9 +570,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -587,11 +587,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -610,9 +610,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -627,11 +627,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -650,9 +650,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -667,11 +667,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -690,9 +690,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -707,11 +707,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -730,9 +730,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -747,11 +747,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -771,9 +771,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -788,11 +788,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0x96 +; .byte 0x57, 0xb4, 0x82, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-ishl.clif b/cranelift/filetests/filetests/isa/riscv64/simd-ishl.clif index 742d6699f016..3cdecb929c40 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-ishl.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-ishl.clif @@ -15,9 +15,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -32,9 +32,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x96 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x96 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -52,9 +52,9 @@ block0(v0: i8x16, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -69,9 +69,9 @@ block0(v0: i8x16, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x96 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x96 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -89,9 +89,9 @@ block0(v0: i8x16, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -106,9 +106,9 @@ block0(v0: i8x16, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x96 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x96 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -126,9 +126,9 @@ block0(v0: i8x16, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -143,9 +143,9 @@ block0(v0: i8x16, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x96 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x96 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -163,9 +163,9 @@ block0(v0: i8x16, v1: i128): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v14,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -180,9 +180,9 @@ block0(v0: i8x16, v1: i128): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xc7, 0x95, 0x96 -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x96 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -200,9 +200,9 @@ block0(v0: i16x8, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -217,11 +217,11 @@ block0(v0: i16x8, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -239,9 +239,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -256,11 +256,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -278,9 +278,9 @@ block0(v0: i16x8, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -295,11 +295,11 @@ block0(v0: i16x8, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -317,9 +317,9 @@ block0(v0: i16x8, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -334,11 +334,11 @@ block0(v0: i16x8, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -356,9 +356,9 @@ block0(v0: i16x8, v1: i128): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v14,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -373,11 +373,11 @@ block0(v0: i16x8, v1: i128): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xc7, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -395,9 +395,9 @@ block0(v0: i32x4, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -412,11 +412,11 @@ block0(v0: i32x4, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -434,9 +434,9 @@ block0(v0: i32x4, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -451,11 +451,11 @@ block0(v0: i32x4, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -473,9 +473,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -490,11 +490,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -512,9 +512,9 @@ block0(v0: i32x4, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -529,11 +529,11 @@ block0(v0: i32x4, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -551,9 +551,9 @@ block0(v0: i32x4, v1: i128): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v14,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -568,11 +568,11 @@ block0(v0: i32x4, v1: i128): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xc7, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -590,9 +590,9 @@ block0(v0: i64x2, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -607,11 +607,11 @@ block0(v0: i64x2, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -629,9 +629,9 @@ block0(v0: i64x2, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -646,11 +646,11 @@ block0(v0: i64x2, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -668,9 +668,9 @@ block0(v0: i64x2, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -685,11 +685,11 @@ block0(v0: i64x2, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -707,9 +707,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -724,11 +724,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -746,9 +746,9 @@ block0(v0: i64x2, v1: i128): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsll.vx v14,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsll.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -763,11 +763,11 @@ block0(v0: i64x2, v1: i128): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc7, 0x95, 0x96 +; .byte 0x57, 0xc4, 0x85, 0x96 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-isub-splat.clif b/cranelift/filetests/filetests/isa/riscv64/simd-isub-splat.clif index 3aaebfd1907d..2f98d5c7d1f6 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-isub-splat.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-isub-splat.clif @@ -17,9 +17,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vrsub.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vrsub.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,9 +34,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x0e -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x0e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -55,9 +55,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vrsub.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vrsub.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -72,11 +72,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x0e +; .byte 0x57, 0xc4, 0x85, 0x0e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -95,9 +95,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vrsub.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vrsub.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -112,11 +112,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x0e +; .byte 0x57, 0xc4, 0x85, 0x0e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -135,9 +135,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vrsub.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vrsub.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -152,11 +152,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x0e +; .byte 0x57, 0xc4, 0x85, 0x0e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -175,9 +175,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsub.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsub.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -192,9 +192,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x0a -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x0a +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -213,9 +213,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsub.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsub.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -230,11 +230,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x0a +; .byte 0x57, 0xc4, 0x85, 0x0a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -253,9 +253,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsub.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsub.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -270,11 +270,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x0a +; .byte 0x57, 0xc4, 0x85, 0x0a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -293,9 +293,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsub.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsub.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -310,11 +310,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x0a +; .byte 0x57, 0xc4, 0x85, 0x0a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -334,9 +334,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vi v12,v9,-5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vi v8,v8,-5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -351,9 +351,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x9d, 0x02 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x8d, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -373,10 +373,10 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,-16 -; vsub.vx v13,v9,a5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,-16 +; vsub.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -391,12 +391,12 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, -0x10 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, -0x10 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x97, 0x0a +; .byte 0x57, 0xc4, 0x85, 0x0a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -416,9 +416,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vi v12,v9,-15 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vi v8,v8,-15 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -433,11 +433,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x98, 0x02 +; .byte 0x57, 0xb4, 0x88, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -457,9 +457,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -474,11 +474,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0x02 +; .byte 0x57, 0xb4, 0x82, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -498,9 +498,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vrsub.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vrsub.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -515,9 +515,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0x0e -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0x0e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -537,9 +537,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vrsub.vi v12,v9,-16 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vrsub.vi v8,v8,-16 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -554,11 +554,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x36, 0x98, 0x0e +; .byte 0x57, 0x34, 0x88, 0x0e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -578,9 +578,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vrsub.vi v12,v9,15 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vrsub.vi v8,v8,15 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -595,11 +595,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x97, 0x0e +; .byte 0x57, 0xb4, 0x87, 0x0e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -619,9 +619,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vrsub.vi v12,v9,-5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vrsub.vi v8,v8,-5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -636,11 +636,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x9d, 0x0e +; .byte 0x57, 0xb4, 0x8d, 0x0e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -661,8 +661,8 @@ block0(v0: i16x8, v1: i8): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.wx v13,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.wx v8,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -679,9 +679,9 @@ block0(v0: i16x8, v1: i8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xde +; .byte 0x57, 0xe4, 0x95, 0xde ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -702,8 +702,8 @@ block0(v0: i32x4, v1: i16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.wx v13,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.wx v8,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -720,9 +720,9 @@ block0(v0: i32x4, v1: i16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xde +; .byte 0x57, 0xe4, 0x95, 0xde ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -743,8 +743,8 @@ block0(v0: i64x2, v1: i32): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.wx v13,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.wx v8,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -761,9 +761,9 @@ block0(v0: i64x2, v1: i32): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0xd7, 0xe6, 0x95, 0xde +; .byte 0x57, 0xe4, 0x95, 0xde ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -784,8 +784,8 @@ block0(v0: i16x8, v1: i8): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.wx v13,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.wx v8,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -802,9 +802,9 @@ block0(v0: i16x8, v1: i8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xda +; .byte 0x57, 0xe4, 0x95, 0xda ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -825,8 +825,8 @@ block0(v0: i32x4, v1: i16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.wx v13,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.wx v8,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -843,9 +843,9 @@ block0(v0: i32x4, v1: i16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xda +; .byte 0x57, 0xe4, 0x95, 0xda ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -866,8 +866,8 @@ block0(v0: i64x2, v1: i32): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.wx v13,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.wx v8,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -884,9 +884,9 @@ block0(v0: i64x2, v1: i32): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0xd7, 0xe6, 0x95, 0xda +; .byte 0x57, 0xe4, 0x95, 0xda ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-isub-swiden-high.clif b/cranelift/filetests/filetests/isa/riscv64/simd-isub-swiden-high.clif index b63360f1acf8..9b25aa4ebb65 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-isub-swiden-high.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-isub-swiden-high.clif @@ -17,11 +17,11 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vslidedown.vi v8,v11,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwsub.vv v10,v14,v8 #avl=2, #vtype=(e32, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v8,v8,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vslidedown.vi v9,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwsub.vv v10,v8,v9 #avl=2, #vtype=(e32, mf2, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -37,14 +37,14 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x37, 0x91, 0x3e -; .byte 0x57, 0x34, 0xb1, 0x3e +; .byte 0x57, 0x34, 0x81, 0x3e +; .byte 0xd7, 0x34, 0x91, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0x25, 0xe4, 0xce +; .byte 0x57, 0xa5, 0x84, 0xce ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) @@ -66,11 +66,11 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vslidedown.vi v8,v11,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwsub.vv v10,v14,v8 #avl=4, #vtype=(e16, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v8,v8,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vslidedown.vi v9,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwsub.vv v10,v8,v9 #avl=4, #vtype=(e16, mf2, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -86,14 +86,14 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x37, 0x92, 0x3e -; .byte 0x57, 0x34, 0xb2, 0x3e +; .byte 0x57, 0x34, 0x82, 0x3e +; .byte 0xd7, 0x34, 0x92, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0x25, 0xe4, 0xce +; .byte 0x57, 0xa5, 0x84, 0xce ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) @@ -115,11 +115,11 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v8,v11,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.vv v10,v14,v8 #avl=8, #vtype=(e8, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v8,v8,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.vv v10,v8,v9 #avl=8, #vtype=(e8, mf2, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -135,13 +135,13 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x37, 0x94, 0x3e -; .byte 0x57, 0x34, 0xb4, 0x3e +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x34, 0x84, 0x3e +; .byte 0xd7, 0x34, 0x94, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0x25, 0xe4, 0xce +; .byte 0x57, 0xa5, 0x84, 0xce ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) @@ -164,10 +164,10 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v13,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwsub.vx v15,v13,a1 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwsub.vx v8,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -182,13 +182,13 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x36, 0x91, 0x3e +; .byte 0xd7, 0x34, 0x81, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0xd7, 0xe7, 0xd5, 0xce +; .byte 0x57, 0xe4, 0x95, 0xce ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -209,10 +209,10 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v13,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwsub.vx v15,v13,a1 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwsub.vx v8,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -227,13 +227,13 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0x36, 0x92, 0x3e +; .byte 0xd7, 0x34, 0x82, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0xd7, 0xe7, 0xd5, 0xce +; .byte 0x57, 0xe4, 0x95, 0xce ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -254,10 +254,10 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v13,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.vx v15,v13,a1 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.vx v8,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -272,12 +272,12 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0x36, 0x94, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0xd7, 0x34, 0x84, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0xd7, 0xe7, 0xd5, 0xce +; .byte 0x57, 0xe4, 0x95, 0xce ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -297,10 +297,10 @@ block0(v0: i32x4, v1: i64x2): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwsub.wv v8,v11,v14 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwsub.wv v9,v8,v10 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -317,13 +317,13 @@ block0(v0: i32x4, v1: i64x2): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x37, 0x91, 0x3e +; .byte 0x57, 0x35, 0x91, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0x24, 0xb7, 0xde +; .byte 0xd7, 0x24, 0x85, 0xde ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -343,10 +343,10 @@ block0(v0: i16x8, v1: i32x4): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwsub.wv v8,v11,v14 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwsub.wv v9,v8,v10 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -363,13 +363,13 @@ block0(v0: i16x8, v1: i32x4): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x37, 0x92, 0x3e +; .byte 0x57, 0x35, 0x92, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0x24, 0xb7, 0xde +; .byte 0xd7, 0x24, 0x85, 0xde ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -389,10 +389,10 @@ block0(v0: i8x16, v1: i16x8): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.wv v8,v11,v14 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.wv v9,v8,v10 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -409,12 +409,12 @@ block0(v0: i8x16, v1: i16x8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x37, 0x94, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x35, 0x94, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0x24, 0xb7, 0xde +; .byte 0xd7, 0x24, 0x85, 0xde ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-isub-swiden-low.clif b/cranelift/filetests/filetests/isa/riscv64/simd-isub-swiden-low.clif index 7359dacbcfc1..9cf54dfcb853 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-isub-swiden-low.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-isub-swiden-low.clif @@ -17,10 +17,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.vv v14,v9,v11 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.vv v9,v8,v10 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -35,13 +35,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0xa7, 0x95, 0xce +; .byte 0xd7, 0x24, 0x85, 0xce ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -61,10 +61,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.vv v14,v9,v11 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.vv v9,v8,v10 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -79,13 +79,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0xa7, 0x95, 0xce +; .byte 0xd7, 0x24, 0x85, 0xce ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -105,10 +105,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.vv v14,v9,v11 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.vv v9,v8,v10 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -123,13 +123,13 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0xa7, 0x95, 0xce +; .byte 0xd7, 0x24, 0x85, 0xce ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -151,8 +151,8 @@ block0(v0: i32x4, v1: i32): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.vx v13,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.vx v8,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -169,9 +169,9 @@ block0(v0: i32x4, v1: i32): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0xd7, 0xe6, 0x95, 0xce +; .byte 0x57, 0xe4, 0x95, 0xce ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -193,8 +193,8 @@ block0(v0: i16x8, v1: i16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.vx v13,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.vx v8,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -211,9 +211,9 @@ block0(v0: i16x8, v1: i16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xce +; .byte 0x57, 0xe4, 0x95, 0xce ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -235,8 +235,8 @@ block0(v0: i8x16, v1: i8): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.vx v13,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.vx v8,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -253,9 +253,9 @@ block0(v0: i8x16, v1: i8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xce +; .byte 0x57, 0xe4, 0x95, 0xce ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -274,10 +274,10 @@ block0(v0: i32x4, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.wv v14,v11,v9 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.wv v9,v10,v8 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -292,13 +292,13 @@ block0(v0: i32x4, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0xa7, 0xb4, 0xde +; .byte 0xd7, 0x24, 0xa4, 0xde ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -317,10 +317,10 @@ block0(v0: i16x8, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.wv v14,v11,v9 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.wv v9,v10,v8 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -335,13 +335,13 @@ block0(v0: i16x8, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0xa7, 0xb4, 0xde +; .byte 0xd7, 0x24, 0xa4, 0xde ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -360,10 +360,10 @@ block0(v0: i8x16, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsub.wv v14,v11,v9 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsub.wv v9,v10,v8 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -378,13 +378,13 @@ block0(v0: i8x16, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0xa7, 0xb4, 0xde +; .byte 0xd7, 0x24, 0xa4, 0xde ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-isub-uwiden-high.clif b/cranelift/filetests/filetests/isa/riscv64/simd-isub-uwiden-high.clif index 3908da375ecf..87e4d43ae160 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-isub-uwiden-high.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-isub-uwiden-high.clif @@ -17,11 +17,11 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vslidedown.vi v8,v11,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwsubu.vv v10,v14,v8 #avl=2, #vtype=(e32, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v8,v8,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vslidedown.vi v9,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwsubu.vv v10,v8,v9 #avl=2, #vtype=(e32, mf2, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -37,14 +37,14 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x37, 0x91, 0x3e -; .byte 0x57, 0x34, 0xb1, 0x3e +; .byte 0x57, 0x34, 0x81, 0x3e +; .byte 0xd7, 0x34, 0x91, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0x25, 0xe4, 0xca +; .byte 0x57, 0xa5, 0x84, 0xca ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) @@ -66,11 +66,11 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vslidedown.vi v8,v11,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwsubu.vv v10,v14,v8 #avl=4, #vtype=(e16, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v8,v8,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vslidedown.vi v9,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwsubu.vv v10,v8,v9 #avl=4, #vtype=(e16, mf2, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -86,14 +86,14 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x37, 0x92, 0x3e -; .byte 0x57, 0x34, 0xb2, 0x3e +; .byte 0x57, 0x34, 0x82, 0x3e +; .byte 0xd7, 0x34, 0x92, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0x25, 0xe4, 0xca +; .byte 0x57, 0xa5, 0x84, 0xca ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) @@ -115,11 +115,11 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v8,v11,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.vv v10,v14,v8 #avl=8, #vtype=(e8, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v8,v8,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.vv v10,v8,v9 #avl=8, #vtype=(e8, mf2, ta, ma) ; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -135,13 +135,13 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x37, 0x94, 0x3e -; .byte 0x57, 0x34, 0xb4, 0x3e +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x34, 0x84, 0x3e +; .byte 0xd7, 0x34, 0x94, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0x25, 0xe4, 0xca +; .byte 0x57, 0xa5, 0x84, 0xca ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x05, 0x05, 0x02 ; ld ra, 8(sp) @@ -164,10 +164,10 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v13,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwsubu.vx v15,v13,a1 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwsubu.vx v8,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -182,13 +182,13 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x36, 0x91, 0x3e +; .byte 0xd7, 0x34, 0x81, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0xd7, 0xe7, 0xd5, 0xca +; .byte 0x57, 0xe4, 0x95, 0xca ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -209,10 +209,10 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v13,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwsubu.vx v15,v13,a1 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwsubu.vx v8,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -227,13 +227,13 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0x36, 0x92, 0x3e +; .byte 0xd7, 0x34, 0x82, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0xd7, 0xe7, 0xd5, 0xca +; .byte 0x57, 0xe4, 0x95, 0xca ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -254,10 +254,10 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v13,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.vx v15,v13,a1 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.vx v8,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -272,12 +272,12 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0x36, 0x94, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0xd7, 0x34, 0x84, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0xd7, 0xe7, 0xd5, 0xca +; .byte 0x57, 0xe4, 0x95, 0xca ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -297,10 +297,10 @@ block0(v0: i32x4, v1: i64x2): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vwsubu.wv v8,v11,v14 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vwsubu.wv v9,v8,v10 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -317,13 +317,13 @@ block0(v0: i32x4, v1: i64x2): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x37, 0x91, 0x3e +; .byte 0x57, 0x35, 0x91, 0x3e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0x24, 0xb7, 0xda +; .byte 0xd7, 0x24, 0x85, 0xda ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -343,10 +343,10 @@ block0(v0: i16x8, v1: i32x4): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vwsubu.wv v8,v11,v14 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vwsubu.wv v9,v8,v10 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -363,13 +363,13 @@ block0(v0: i16x8, v1: i32x4): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x37, 0x92, 0x3e +; .byte 0x57, 0x35, 0x92, 0x3e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0x24, 0xb7, 0xda +; .byte 0xd7, 0x24, 0x85, 0xda ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -389,10 +389,10 @@ block0(v0: i8x16, v1: i16x8): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v14,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.wv v8,v11,v14 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v10,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.wv v9,v8,v10 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -409,12 +409,12 @@ block0(v0: i8x16, v1: i16x8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x37, 0x94, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x35, 0x94, 0x3e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0x24, 0xb7, 0xda +; .byte 0xd7, 0x24, 0x85, 0xda ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x04, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-isub-uwiden-low.clif b/cranelift/filetests/filetests/isa/riscv64/simd-isub-uwiden-low.clif index bf08d9c2421d..898c1a7e6f5f 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-isub-uwiden-low.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-isub-uwiden-low.clif @@ -17,10 +17,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.vv v14,v9,v11 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.vv v9,v8,v10 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -35,13 +35,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0xa7, 0x95, 0xca +; .byte 0xd7, 0x24, 0x85, 0xca ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -61,10 +61,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.vv v14,v9,v11 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.vv v9,v8,v10 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -79,13 +79,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0xa7, 0x95, 0xca +; .byte 0xd7, 0x24, 0x85, 0xca ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -105,10 +105,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.vv v14,v9,v11 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.vv v9,v8,v10 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -123,13 +123,13 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0xa7, 0x95, 0xca +; .byte 0xd7, 0x24, 0x85, 0xca ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -151,8 +151,8 @@ block0(v0: i32x4, v1: i32): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.vx v13,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.vx v8,v9,a1 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -169,9 +169,9 @@ block0(v0: i32x4, v1: i32): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0xd7, 0xe6, 0x95, 0xca +; .byte 0x57, 0xe4, 0x95, 0xca ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -193,8 +193,8 @@ block0(v0: i16x8, v1: i16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.vx v13,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.vx v8,v9,a1 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -211,9 +211,9 @@ block0(v0: i16x8, v1: i16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xca +; .byte 0x57, 0xe4, 0x95, 0xca ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -235,8 +235,8 @@ block0(v0: i8x16, v1: i8): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.vx v13,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.vx v8,v9,a1 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -253,9 +253,9 @@ block0(v0: i8x16, v1: i8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0xca +; .byte 0x57, 0xe4, 0x95, 0xca ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -274,10 +274,10 @@ block0(v0: i32x4, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.wv v14,v11,v9 #avl=2, #vtype=(e32, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.wv v9,v10,v8 #avl=2, #vtype=(e32, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -292,13 +292,13 @@ block0(v0: i32x4, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0xa7, 0xb4, 0xda +; .byte 0xd7, 0x24, 0xa4, 0xda ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -317,10 +317,10 @@ block0(v0: i16x8, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.wv v14,v11,v9 #avl=4, #vtype=(e16, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.wv v9,v10,v8 #avl=4, #vtype=(e16, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -335,13 +335,13 @@ block0(v0: i16x8, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0xa7, 0xb4, 0xda +; .byte 0xd7, 0x24, 0xa4, 0xda ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -360,10 +360,10 @@ block0(v0: i8x16, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vwsubu.wv v14,v11,v9 #avl=8, #vtype=(e8, mf2, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vwsubu.wv v9,v10,v8 #avl=8, #vtype=(e8, mf2, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -378,13 +378,13 @@ block0(v0: i8x16, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0xa7, 0xb4, 0xda +; .byte 0xd7, 0x24, 0xa4, 0xda ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-isub.clif b/cranelift/filetests/filetests/isa/riscv64/simd-isub.clif index 9ddfa1769249..3446b796ba08 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-isub.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-isub.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsub.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsub.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x0a -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x0a +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsub.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsub.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x0a +; .byte 0x57, 0x84, 0x84, 0x0a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsub.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsub.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x0a +; .byte 0x57, 0x84, 0x84, 0x0a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsub.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsub.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x87, 0x95, 0x0a +; .byte 0x57, 0x84, 0x84, 0x0a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -183,9 +183,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsub.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsub.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -200,11 +200,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x0a +; .byte 0x57, 0xc4, 0x85, 0x0a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -223,9 +223,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vrsub.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vrsub.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -240,11 +240,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x0e +; .byte 0x57, 0xc4, 0x85, 0x0e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-load-extend.clif b/cranelift/filetests/filetests/isa/riscv64/simd-load-extend.clif index fe4fb297681c..cd2cb13ade24 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-load-extend.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-load-extend.clif @@ -11,19 +11,19 @@ block0(v0: i64): ; VCode: ; block0: -; vle64.v v11,0(a1) #avl=1, #vtype=(e64, m1, ta, ma) -; vzext.vf2 v13,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle64.v v9,0(a1) #avl=1, #vtype=(e64, m1, ta, ma) +; vzext.vf2 v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0xf0, 0x80, 0xcd -; .byte 0x87, 0xf5, 0x05, 0x02 ; trap: heap_oob +; .byte 0x87, 0xf4, 0x05, 0x02 ; trap: heap_oob ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0x26, 0xb3, 0x4a +; .byte 0x57, 0x24, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %sload8x8(i64) -> i16x8 { @@ -34,19 +34,19 @@ block0(v0: i64): ; VCode: ; block0: -; vle64.v v11,0(a1) #avl=1, #vtype=(e64, m1, ta, ma) -; vsext.vf2 v13,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle64.v v9,0(a1) #avl=1, #vtype=(e64, m1, ta, ma) +; vsext.vf2 v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0xf0, 0x80, 0xcd -; .byte 0x87, 0xf5, 0x05, 0x02 ; trap: heap_oob +; .byte 0x87, 0xf4, 0x05, 0x02 ; trap: heap_oob ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xa6, 0xb3, 0x4a +; .byte 0x57, 0xa4, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %uload16x4(i64) -> i32x4 { @@ -57,19 +57,19 @@ block0(v0: i64): ; VCode: ; block0: -; vle64.v v11,0(a1) #avl=1, #vtype=(e64, m1, ta, ma) -; vzext.vf2 v13,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle64.v v9,0(a1) #avl=1, #vtype=(e64, m1, ta, ma) +; vzext.vf2 v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0xf0, 0x80, 0xcd -; .byte 0x87, 0xf5, 0x05, 0x02 ; trap: heap_oob +; .byte 0x87, 0xf4, 0x05, 0x02 ; trap: heap_oob ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x26, 0xb3, 0x4a +; .byte 0x57, 0x24, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %sload16x4(i64) -> i32x4 { @@ -80,19 +80,19 @@ block0(v0: i64): ; VCode: ; block0: -; vle64.v v11,0(a1) #avl=1, #vtype=(e64, m1, ta, ma) -; vsext.vf2 v13,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle64.v v9,0(a1) #avl=1, #vtype=(e64, m1, ta, ma) +; vsext.vf2 v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0xf0, 0x80, 0xcd -; .byte 0x87, 0xf5, 0x05, 0x02 ; trap: heap_oob +; .byte 0x87, 0xf4, 0x05, 0x02 ; trap: heap_oob ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xa6, 0xb3, 0x4a +; .byte 0x57, 0xa4, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %uload32x2(i64) -> i64x2 { @@ -103,19 +103,19 @@ block0(v0: i64): ; VCode: ; block0: -; vle64.v v11,0(a1) #avl=1, #vtype=(e64, m1, ta, ma) -; vzext.vf2 v13,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle64.v v9,0(a1) #avl=1, #vtype=(e64, m1, ta, ma) +; vzext.vf2 v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0xf0, 0x80, 0xcd -; .byte 0x87, 0xf5, 0x05, 0x02 ; trap: heap_oob +; .byte 0x87, 0xf4, 0x05, 0x02 ; trap: heap_oob ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x26, 0xb3, 0x4a +; .byte 0x57, 0x24, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %sload32x2(i64) -> i64x2 { @@ -126,18 +126,18 @@ block0(v0: i64): ; VCode: ; block0: -; vle64.v v11,0(a1) #avl=1, #vtype=(e64, m1, ta, ma) -; vsext.vf2 v13,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle64.v v9,0(a1) #avl=1, #vtype=(e64, m1, ta, ma) +; vsext.vf2 v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0xf0, 0x80, 0xcd -; .byte 0x87, 0xf5, 0x05, 0x02 ; trap: heap_oob +; .byte 0x87, 0xf4, 0x05, 0x02 ; trap: heap_oob ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xa6, 0xb3, 0x4a +; .byte 0x57, 0xa4, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-load-splat.clif b/cranelift/filetests/filetests/isa/riscv64/simd-load-splat.clif index eb6741e78668..079a90ad7669 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-load-splat.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-load-splat.clif @@ -12,17 +12,17 @@ block0(v0: i64): ; VCode: ; block0: -; lb a4,0(a1) -; vmv.v.x v12,a4 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; lb a1,0(a1) +; vmv.v.x v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lb a4, 0(a1) ; trap: heap_oob +; lb a1, 0(a1) ; trap: heap_oob ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x57, 0x46, 0x07, 0x5e -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x57, 0xc4, 0x05, 0x5e +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %load_splat_i16x8(i64) -> i16x8 { @@ -34,18 +34,18 @@ block0(v0: i64): ; VCode: ; block0: -; lh a4,0(a1) -; vmv.v.x v12,a4 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; lh a1,0(a1) +; vmv.v.x v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lh a4, 0(a1) ; trap: heap_oob +; lh a1, 0(a1) ; trap: heap_oob ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x46, 0x07, 0x5e +; .byte 0x57, 0xc4, 0x05, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %load_splat_i32x4(i64) -> i32x4 { @@ -57,18 +57,18 @@ block0(v0: i64): ; VCode: ; block0: -; lw a4,0(a1) -; vmv.v.x v12,a4 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; lw a1,0(a1) +; vmv.v.x v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lw a4, 0(a1) ; trap: heap_oob +; lw a1, 0(a1) ; trap: heap_oob ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x46, 0x07, 0x5e +; .byte 0x57, 0xc4, 0x05, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %load_splat_i64x2(i64) -> i64x2 { @@ -80,18 +80,18 @@ block0(v0: i64): ; VCode: ; block0: -; ld a4,0(a1) -; vmv.v.x v12,a4 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; ld a1,0(a1) +; vmv.v.x v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; ld a4, 0(a1) ; trap: heap_oob +; ld a1, 0(a1) ; trap: heap_oob ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x46, 0x07, 0x5e +; .byte 0x57, 0xc4, 0x05, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %load_splat_f32x4(i64) -> f32x4 { @@ -103,18 +103,18 @@ block0(v0: i64): ; VCode: ; block0: -; flw fa4,0(a1) -; vfmv.v.f v12,fa4 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; flw fa0,0(a1) +; vfmv.v.f v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; flw fa4, 0(a1) ; trap: heap_oob +; flw fa0, 0(a1) ; trap: heap_oob ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x56, 0x07, 0x5e +; .byte 0x57, 0x54, 0x05, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %load_splat_f64x2(i64) -> f64x2 { @@ -126,17 +126,17 @@ block0(v0: i64): ; VCode: ; block0: -; fld fa4,0(a1) -; vfmv.v.f v12,fa4 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; fld fa0,0(a1) +; vfmv.v.f v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fld fa4, 0(a1) ; trap: heap_oob +; fld fa0, 0(a1) ; trap: heap_oob ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x56, 0x07, 0x5e +; .byte 0x57, 0x54, 0x05, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-loads.clif b/cranelift/filetests/filetests/isa/riscv64/simd-loads.clif index 62c37edc0f5c..c3f9cfccddc3 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-loads.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-loads.clif @@ -12,15 +12,15 @@ block0(v0: i64): ; VCode: ; block0: -; vle8.v v11,0(a1) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,0(a1) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x87, 0x85, 0x05, 0x02 ; trap: heap_oob -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x07, 0x84, 0x05, 0x02 ; trap: heap_oob +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %load_i16x8(i64) -> i16x8 { @@ -31,16 +31,16 @@ block0(v0: i64): ; VCode: ; block0: -; vle16.v v11,0(a1) #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle16.v v8,0(a1) #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x87, 0xd5, 0x05, 0x02 ; trap: heap_oob +; .byte 0x07, 0xd4, 0x05, 0x02 ; trap: heap_oob ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %load_i32x4(i64) -> i32x4 { @@ -51,16 +51,16 @@ block0(v0: i64): ; VCode: ; block0: -; vle32.v v11,0(a1) #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle32.v v8,0(a1) #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x87, 0xe5, 0x05, 0x02 ; trap: heap_oob +; .byte 0x07, 0xe4, 0x05, 0x02 ; trap: heap_oob ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %load_i64x2(i64) -> i64x2 { @@ -71,15 +71,15 @@ block0(v0: i64): ; VCode: ; block0: -; vle64.v v11,0(a1) #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle64.v v8,0(a1) #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x87, 0xf5, 0x05, 0x02 ; trap: heap_oob +; .byte 0x07, 0xf4, 0x05, 0x02 ; trap: heap_oob ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-nearest.clif b/cranelift/filetests/filetests/isa/riscv64/simd-nearest.clif index 3626de730a1f..dd88e191fba4 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-nearest.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-nearest.clif @@ -16,20 +16,20 @@ block0(v0: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfabs.v v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfabs.v v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; lui a1,307200 -; fmv.w.x fa2,a1 -; vmflt.vf v0,v12,fa2 #avl=4, #vtype=(e32, m1, ta, ma) -; fsrmi a1,0 -; vfcvt.x.f.v v14,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; fsrm a1 -; vfcvt.f.x.v v10,v14 #avl=4, #vtype=(e32, m1, ta, ma) -; vfsgnj.vv v11,v10,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; fmv.w.x fa3,zero -; vfadd.vf v15,v9,fa3 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vvm v9,v15,v11,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; fmv.w.x fa0,a1 +; vmflt.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; fsrmi a2,0 +; vfcvt.x.f.v v14,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; fsrm a2 +; vfcvt.f.x.v v9,v14 #avl=4, #vtype=(e32, m1, ta, ma) +; vfsgnj.vv v9,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; fmv.w.x fa0,zero +; vfadd.vf v10,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vvm v8,v10,v9,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -44,22 +44,22 @@ block0(v0: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x96, 0x94, 0x2a +; .byte 0xd7, 0x14, 0x84, 0x2a ; lui a1, 0x4b000 -; fmv.w.x fa2, a1 -; .byte 0x57, 0x50, 0xc6, 0x6e -; fsrmi a1, 0 -; .byte 0x57, 0x97, 0x90, 0x4a -; fsrm a1 -; .byte 0x57, 0x95, 0xe1, 0x4a -; .byte 0xd7, 0x95, 0xa4, 0x22 -; fmv.w.x fa3, zero -; .byte 0xd7, 0xd7, 0x96, 0x02 -; .byte 0xd7, 0x84, 0xf5, 0x5c +; fmv.w.x fa0, a1 +; .byte 0x57, 0x50, 0x95, 0x6e +; fsrmi a2, 0 +; .byte 0x57, 0x97, 0x80, 0x4a +; fsrm a2 +; .byte 0xd7, 0x94, 0xe1, 0x4a +; .byte 0xd7, 0x14, 0x94, 0x22 +; fmv.w.x fa0, zero +; .byte 0x57, 0x55, 0x85, 0x02 +; .byte 0x57, 0x84, 0xa4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x04, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -77,21 +77,21 @@ block0(v0: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfabs.v v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfabs.v v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; lui a1,1075 -; slli a2,a1,40 -; fmv.d.x fa4,a2 -; vmflt.vf v0,v12,fa4 #avl=2, #vtype=(e64, m1, ta, ma) -; fsrmi a2,0 -; vfcvt.x.f.v v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; fsrm a2 -; vfcvt.f.x.v v11,v8 #avl=2, #vtype=(e64, m1, ta, ma) -; vfsgnj.vv v13,v11,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; fmv.d.x fa5,zero -; vfadd.vf v9,v9,fa5 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v11,v9,v13,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; slli a1,a1,40 +; fmv.d.x fa0,a1 +; vmflt.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; fsrmi a4,0 +; vfcvt.x.f.v v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; fsrm a4 +; vfcvt.f.x.v v9,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vfsgnj.vv v9,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; fmv.d.x fa0,zero +; vfadd.vf v10,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v8,v10,v9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -106,23 +106,23 @@ block0(v0: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x96, 0x94, 0x2a +; .byte 0xd7, 0x14, 0x84, 0x2a ; lui a1, 0x433 -; slli a2, a1, 0x28 -; fmv.d.x fa4, a2 -; .byte 0x57, 0x50, 0xc7, 0x6e -; fsrmi a2, 0 -; .byte 0x57, 0x94, 0x90, 0x4a -; fsrm a2 -; .byte 0xd7, 0x95, 0x81, 0x4a -; .byte 0xd7, 0x96, 0xb4, 0x22 -; fmv.d.x fa5, zero -; .byte 0xd7, 0xd4, 0x97, 0x02 -; .byte 0xd7, 0x85, 0x96, 0x5c +; slli a1, a1, 0x28 +; fmv.d.x fa0, a1 +; .byte 0x57, 0x50, 0x95, 0x6e +; fsrmi a4, 0 +; .byte 0xd7, 0x94, 0x80, 0x4a +; fsrm a4 +; .byte 0xd7, 0x94, 0x91, 0x4a +; .byte 0xd7, 0x14, 0x94, 0x22 +; fmv.d.x fa0, zero +; .byte 0x57, 0x55, 0x85, 0x02 +; .byte 0x57, 0x84, 0xa4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-popcnt.clif b/cranelift/filetests/filetests/isa/riscv64/simd-popcnt.clif index df9ca74065da..360743380178 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-popcnt.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-popcnt.clif @@ -15,25 +15,25 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a4,85 -; vsrl.vi v14,v9,1 #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vx v8,v14,a4 #avl=16, #vtype=(e8, m1, ta, ma) -; vsub.vv v10,v9,v8 #avl=16, #vtype=(e8, m1, ta, ma) -; li a1,51 +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,85 +; vsrl.vi v9,v8,1 #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vx v9,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vsub.vv v10,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; li a2,51 ; vsrl.vi v14,v10,2 #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vx v8,v14,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vx v10,v10,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vv v12,v10,v8 #avl=16, #vtype=(e8, m1, ta, ma) -; li a4,15 -; vsrl.vi v8,v12,4 #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vv v10,v12,v8 #avl=16, #vtype=(e8, m1, ta, ma) -; vand.vx v12,v10,a4 #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vx v8,v14,a2 #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vx v9,v10,a2 #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vv v8,v9,v8 #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,15 +; vsrl.vi v9,v8,4 #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vand.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) ; li a1,1 -; vmul.vx v8,v12,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; li a4,0 -; vsrl.vx v12,v8,a4 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmul.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,0 +; vsrl.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -48,25 +48,25 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a4, zero, 0x55 -; .byte 0x57, 0xb7, 0x90, 0xa2 -; .byte 0x57, 0x44, 0xe7, 0x26 -; .byte 0x57, 0x05, 0x94, 0x0a -; addi a1, zero, 0x33 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, 0x55 +; .byte 0xd7, 0xb4, 0x80, 0xa2 +; .byte 0xd7, 0xc4, 0x95, 0x26 +; .byte 0x57, 0x85, 0x84, 0x0a +; addi a2, zero, 0x33 ; .byte 0x57, 0x37, 0xa1, 0xa2 -; .byte 0x57, 0xc4, 0xe5, 0x26 -; .byte 0x57, 0xc5, 0xa5, 0x26 -; .byte 0x57, 0x06, 0xa4, 0x02 -; addi a4, zero, 0xf -; .byte 0x57, 0x34, 0xc2, 0xa2 -; .byte 0x57, 0x05, 0xc4, 0x02 -; .byte 0x57, 0x46, 0xa7, 0x26 +; .byte 0x57, 0x44, 0xe6, 0x26 +; .byte 0xd7, 0x44, 0xa6, 0x26 +; .byte 0x57, 0x04, 0x94, 0x02 +; addi a1, zero, 0xf +; .byte 0xd7, 0x34, 0x82, 0xa2 +; .byte 0x57, 0x84, 0x84, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x26 ; addi a1, zero, 1 -; .byte 0x57, 0xe4, 0xc5, 0x96 -; mv a4, zero -; .byte 0x57, 0x46, 0x87, 0xa2 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x57, 0xe4, 0x85, 0x96 +; mv a1, zero +; .byte 0x57, 0xc4, 0x85, 0xa2 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -84,28 +84,28 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; lui a4,5 -; addi a1,a4,1365 -; vsrl.vi v8,v9,1 #avl=8, #vtype=(e16, m1, ta, ma) -; vand.vx v10,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vsub.vv v12,v9,v10 #avl=8, #vtype=(e16, m1, ta, ma) -; lui a2,3 -; addi a4,a2,819 -; vsrl.vi v10,v12,2 #avl=8, #vtype=(e16, m1, ta, ma) -; vand.vx v13,v10,a4 #avl=8, #vtype=(e16, m1, ta, ma) -; vand.vx v14,v12,a4 #avl=8, #vtype=(e16, m1, ta, ma) -; vadd.vv v8,v14,v13 #avl=8, #vtype=(e16, m1, ta, ma) -; lui a2,1 -; addi a4,a2,-241 -; vsrl.vi v14,v8,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vadd.vv v8,v8,v14 #avl=8, #vtype=(e16, m1, ta, ma) -; vand.vx v10,v8,a4 #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; lui a1,5 +; addi a1,a1,1365 +; vsrl.vi v9,v8,1 #avl=8, #vtype=(e16, m1, ta, ma) +; vand.vx v10,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vsub.vv v12,v8,v10 #avl=8, #vtype=(e16, m1, ta, ma) +; lui a4,3 +; addi a1,a4,819 +; vsrl.vi v8,v12,2 #avl=8, #vtype=(e16, m1, ta, ma) +; vand.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vand.vx v9,v12,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vadd.vv v8,v9,v8 #avl=8, #vtype=(e16, m1, ta, ma) +; lui a1,1 +; addi a1,a1,-241 +; vsrl.vi v9,v8,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vadd.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vand.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) ; li a1,257 -; vmul.vx v14,v10,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; li a4,8 -; vsrl.vx v10,v14,a4 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmul.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; li a1,8 +; vsrl.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -120,30 +120,30 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; lui a4, 5 -; addi a1, a4, 0x555 +; .byte 0x07, 0x84, 0x0f, 0x02 +; lui a1, 5 +; addi a1, a1, 0x555 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb4, 0x90, 0xa2 -; .byte 0x57, 0xc5, 0x85, 0x26 -; .byte 0x57, 0x06, 0x95, 0x0a -; lui a2, 3 -; addi a4, a2, 0x333 -; .byte 0x57, 0x35, 0xc1, 0xa2 -; .byte 0xd7, 0x46, 0xa7, 0x26 -; .byte 0x57, 0x47, 0xc7, 0x26 -; .byte 0x57, 0x84, 0xe6, 0x02 -; lui a2, 1 -; addi a4, a2, -0xf1 -; .byte 0x57, 0x37, 0x82, 0xa2 -; .byte 0x57, 0x04, 0x87, 0x02 -; .byte 0x57, 0x45, 0x87, 0x26 +; .byte 0xd7, 0xb4, 0x80, 0xa2 +; .byte 0x57, 0xc5, 0x95, 0x26 +; .byte 0x57, 0x06, 0x85, 0x0a +; lui a4, 3 +; addi a1, a4, 0x333 +; .byte 0x57, 0x34, 0xc1, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0x26 +; .byte 0xd7, 0xc4, 0xc5, 0x26 +; .byte 0x57, 0x04, 0x94, 0x02 +; lui a1, 1 +; addi a1, a1, -0xf1 +; .byte 0xd7, 0x34, 0x82, 0xa2 +; .byte 0x57, 0x84, 0x84, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x26 ; addi a1, zero, 0x101 -; .byte 0x57, 0xe7, 0xa5, 0x96 -; addi a4, zero, 8 -; .byte 0x57, 0x45, 0xe7, 0xa2 +; .byte 0x57, 0xe4, 0x85, 0x96 +; addi a1, zero, 8 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -161,29 +161,29 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; lui a4,349525 -; addi a1,a4,1365 -; vsrl.vi v8,v9,1 #avl=4, #vtype=(e32, m1, ta, ma) -; vand.vx v10,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vsub.vv v12,v9,v10 #avl=4, #vtype=(e32, m1, ta, ma) -; lui a2,209715 -; addi a4,a2,819 -; vsrl.vi v10,v12,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vand.vx v13,v10,a4 #avl=4, #vtype=(e32, m1, ta, ma) -; vand.vx v14,v12,a4 #avl=4, #vtype=(e32, m1, ta, ma) -; vadd.vv v8,v14,v13 #avl=4, #vtype=(e32, m1, ta, ma) -; lui a2,61681 -; addi a4,a2,-241 -; vsrl.vi v14,v8,4 #avl=4, #vtype=(e32, m1, ta, ma) -; vadd.vv v8,v8,v14 #avl=4, #vtype=(e32, m1, ta, ma) -; vand.vx v10,v8,a4 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; lui a1,349525 +; addi a1,a1,1365 +; vsrl.vi v9,v8,1 #avl=4, #vtype=(e32, m1, ta, ma) +; vand.vx v10,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vsub.vv v12,v8,v10 #avl=4, #vtype=(e32, m1, ta, ma) +; lui a4,209715 +; addi a1,a4,819 +; vsrl.vi v8,v12,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vand.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vand.vx v9,v12,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vadd.vv v8,v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; lui a1,61681 +; addi a1,a1,-241 +; vsrl.vi v9,v8,4 #avl=4, #vtype=(e32, m1, ta, ma) +; vadd.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vand.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) ; lui a1,4112 -; addi a2,a1,257 -; vmul.vx v8,v10,a2 #avl=4, #vtype=(e32, m1, ta, ma) +; addi a1,a1,257 +; vmul.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) ; li a1,24 -; vsrl.vx v12,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -198,31 +198,31 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; lui a4, 0x55555 -; addi a1, a4, 0x555 +; .byte 0x07, 0x84, 0x0f, 0x02 +; lui a1, 0x55555 +; addi a1, a1, 0x555 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb4, 0x90, 0xa2 -; .byte 0x57, 0xc5, 0x85, 0x26 -; .byte 0x57, 0x06, 0x95, 0x0a -; lui a2, 0x33333 -; addi a4, a2, 0x333 -; .byte 0x57, 0x35, 0xc1, 0xa2 -; .byte 0xd7, 0x46, 0xa7, 0x26 -; .byte 0x57, 0x47, 0xc7, 0x26 -; .byte 0x57, 0x84, 0xe6, 0x02 -; lui a2, 0xf0f1 -; addi a4, a2, -0xf1 -; .byte 0x57, 0x37, 0x82, 0xa2 -; .byte 0x57, 0x04, 0x87, 0x02 -; .byte 0x57, 0x45, 0x87, 0x26 +; .byte 0xd7, 0xb4, 0x80, 0xa2 +; .byte 0x57, 0xc5, 0x95, 0x26 +; .byte 0x57, 0x06, 0x85, 0x0a +; lui a4, 0x33333 +; addi a1, a4, 0x333 +; .byte 0x57, 0x34, 0xc1, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0x26 +; .byte 0xd7, 0xc4, 0xc5, 0x26 +; .byte 0x57, 0x04, 0x94, 0x02 +; lui a1, 0xf0f1 +; addi a1, a1, -0xf1 +; .byte 0xd7, 0x34, 0x82, 0xa2 +; .byte 0x57, 0x84, 0x84, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x26 ; lui a1, 0x1010 -; addi a2, a1, 0x101 -; .byte 0x57, 0x64, 0xa6, 0x96 +; addi a1, a1, 0x101 +; .byte 0x57, 0xe4, 0x85, 0x96 ; addi a1, zero, 0x18 -; .byte 0x57, 0xc6, 0x85, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -240,25 +240,25 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; ld a4,[const(0)] -; vsrl.vi v14,v9,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vand.vx v8,v14,a4 #avl=2, #vtype=(e64, m1, ta, ma) -; vsub.vv v10,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) -; ld a1,[const(1)] +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; ld a1,[const(0)] +; vsrl.vi v9,v8,1 #avl=2, #vtype=(e64, m1, ta, ma) +; vand.vx v9,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vsub.vv v10,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; ld a2,[const(1)] ; vsrl.vi v14,v10,2 #avl=2, #vtype=(e64, m1, ta, ma) -; vand.vx v8,v14,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vand.vx v10,v10,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vadd.vv v12,v10,v8 #avl=2, #vtype=(e64, m1, ta, ma) -; ld a4,[const(2)] -; vsrl.vi v8,v12,4 #avl=2, #vtype=(e64, m1, ta, ma) -; vadd.vv v10,v12,v8 #avl=2, #vtype=(e64, m1, ta, ma) -; vand.vx v12,v10,a4 #avl=2, #vtype=(e64, m1, ta, ma) +; vand.vx v8,v14,a2 #avl=2, #vtype=(e64, m1, ta, ma) +; vand.vx v9,v10,a2 #avl=2, #vtype=(e64, m1, ta, ma) +; vadd.vv v8,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; ld a1,[const(2)] +; vsrl.vi v9,v8,4 #avl=2, #vtype=(e64, m1, ta, ma) +; vadd.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vand.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) ; ld a1,[const(3)] -; vmul.vx v8,v12,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; li a4,56 -; vsrl.vx v12,v8,a4 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmul.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; li a1,56 +; vsrl.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -273,31 +273,31 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; auipc a4, 0 -; ld a4, 0x74(a4) -; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb7, 0x90, 0xa2 -; .byte 0x57, 0x44, 0xe7, 0x26 -; .byte 0x57, 0x05, 0x94, 0x0a +; .byte 0x07, 0x84, 0x0f, 0x02 ; auipc a1, 0 -; ld a1, 0x64(a1) +; ld a1, 0x74(a1) +; .byte 0x57, 0x70, 0x81, 0xcd +; .byte 0xd7, 0xb4, 0x80, 0xa2 +; .byte 0xd7, 0xc4, 0x95, 0x26 +; .byte 0x57, 0x85, 0x84, 0x0a +; auipc a2, 0 +; ld a2, 0x64(a2) ; .byte 0x57, 0x37, 0xa1, 0xa2 -; .byte 0x57, 0xc4, 0xe5, 0x26 -; .byte 0x57, 0xc5, 0xa5, 0x26 -; .byte 0x57, 0x06, 0xa4, 0x02 -; auipc a4, 0 -; ld a4, 0x54(a4) -; .byte 0x57, 0x34, 0xc2, 0xa2 -; .byte 0x57, 0x05, 0xc4, 0x02 -; .byte 0x57, 0x46, 0xa7, 0x26 +; .byte 0x57, 0x44, 0xe6, 0x26 +; .byte 0xd7, 0x44, 0xa6, 0x26 +; .byte 0x57, 0x04, 0x94, 0x02 +; auipc a1, 0 +; ld a1, 0x54(a1) +; .byte 0xd7, 0x34, 0x82, 0xa2 +; .byte 0x57, 0x84, 0x84, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x26 ; auipc a1, 0 ; ld a1, 0x48(a1) -; .byte 0x57, 0xe4, 0xc5, 0x96 -; addi a4, zero, 0x38 -; .byte 0x57, 0x46, 0x87, 0xa2 +; .byte 0x57, 0xe4, 0x85, 0x96 +; addi a1, zero, 0x38 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-saddsat.clif b/cranelift/filetests/filetests/isa/riscv64/simd-saddsat.clif index f7f186b345b8..8c62fa2dc985 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-saddsat.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-saddsat.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsadd.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsadd.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x86 -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x86 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsadd.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsadd.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x86 +; .byte 0x57, 0x84, 0x84, 0x86 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsadd.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsadd.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x86 +; .byte 0x57, 0x84, 0x84, 0x86 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsadd.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsadd.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x87, 0x95, 0x86 +; .byte 0x57, 0x84, 0x84, 0x86 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -184,9 +184,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsadd.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsadd.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -201,9 +201,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0x86 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0x86 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -223,9 +223,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsadd.vi v12,v9,-16 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsadd.vi v8,v8,-16 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -240,11 +240,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x36, 0x98, 0x86 +; .byte 0x57, 0x34, 0x88, 0x86 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -264,9 +264,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsadd.vi v12,v9,15 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsadd.vi v8,v8,15 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -281,11 +281,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x97, 0x86 +; .byte 0x57, 0xb4, 0x87, 0x86 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -305,9 +305,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsadd.vi v12,v9,-5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsadd.vi v8,v8,-5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -322,11 +322,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x9d, 0x86 +; .byte 0x57, 0xb4, 0x8d, 0x86 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -345,9 +345,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsadd.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsadd.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -362,9 +362,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x86 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x86 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -383,9 +383,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsadd.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsadd.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -400,11 +400,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x86 +; .byte 0x57, 0xc4, 0x85, 0x86 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -423,9 +423,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsadd.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsadd.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -440,11 +440,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x86 +; .byte 0x57, 0xc4, 0x85, 0x86 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -463,9 +463,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsadd.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsadd.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -480,11 +480,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x86 +; .byte 0x57, 0xc4, 0x85, 0x86 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-scalartovector.clif b/cranelift/filetests/filetests/isa/riscv64/simd-scalartovector.clif index 550ff84895dd..ae381963acb6 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-scalartovector.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-scalartovector.clif @@ -11,21 +11,21 @@ block0(v0: i8): ; VCode: ; block0: -; vmv.v.x v11,zero #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.x v8,zero #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vxm v15,v11,a1,v0.t #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vxm v9,v8,a1,v0.t #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xd7, 0x45, 0x00, 0x5e +; .byte 0x57, 0x44, 0x00, 0x5e ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0xb0, 0x00, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xd7, 0xc7, 0xb5, 0x5c -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0xd7, 0xc4, 0x85, 0x5c +; .byte 0xa7, 0x04, 0x05, 0x02 ; ret function %scalartovector_i16(i16) -> i16x8 { @@ -36,22 +36,22 @@ block0(v0: i16): ; VCode: ; block0: -; vmv.v.x v11,zero #avl=8, #vtype=(e16, m1, ta, ma) +; vmv.v.x v8,zero #avl=8, #vtype=(e16, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vxm v15,v11,a1,v0.t #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vxm v9,v8,a1,v0.t #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0x45, 0x00, 0x5e +; .byte 0x57, 0x44, 0x00, 0x5e ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0xb0, 0x00, 0x5e ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc7, 0xb5, 0x5c +; .byte 0xd7, 0xc4, 0x85, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ret function %scalartovector_i32(i32) -> i32x4 { @@ -62,22 +62,22 @@ block0(v0: i32): ; VCode: ; block0: -; vmv.v.x v11,zero #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.x v8,zero #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vxm v15,v11,a1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vxm v9,v8,a1,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x45, 0x00, 0x5e +; .byte 0x57, 0x44, 0x00, 0x5e ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0xb0, 0x00, 0x5e ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc7, 0xb5, 0x5c +; .byte 0xd7, 0xc4, 0x85, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ret function %scalartovector_i64(i64) -> i64x2 { @@ -88,20 +88,20 @@ block0(v0: i64): ; VCode: ; block0: -; vmv.v.x v11,zero #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.x v8,zero #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vxm v15,v11,a1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vxm v9,v8,a1,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x45, 0x00, 0x5e +; .byte 0x57, 0x44, 0x00, 0x5e ; .byte 0x57, 0xb0, 0x00, 0x5e -; .byte 0xd7, 0xc7, 0xb5, 0x5c +; .byte 0xd7, 0xc4, 0x85, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ret function %scalartovector_f32(f32) -> f32x4 { @@ -112,24 +112,24 @@ block0(v0: f32): ; VCode: ; block0: -; vmv.v.x v11,zero #avl=4, #vtype=(e32, m1, ta, ma) -; vfmv.s.f v13,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.x v8,zero #avl=4, #vtype=(e32, m1, ta, ma) +; vfmv.s.f v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v9,v11,v13,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vvm v10,v8,v9,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x45, 0x00, 0x5e -; .byte 0xd7, 0x56, 0x05, 0x42 +; .byte 0x57, 0x44, 0x00, 0x5e +; .byte 0xd7, 0x54, 0x05, 0x42 ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0xb0, 0x00, 0x5e ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x84, 0xb6, 0x5c +; .byte 0x57, 0x85, 0x84, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x04, 0x05, 0x02 +; .byte 0x27, 0x05, 0x05, 0x02 ; ret function %scalartovector_f64(f64) -> f64x2 { @@ -140,22 +140,22 @@ block0(v0: f64): ; VCode: ; block0: -; vmv.v.x v11,zero #avl=2, #vtype=(e64, m1, ta, ma) -; vfmv.s.f v13,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.x v8,zero #avl=2, #vtype=(e64, m1, ta, ma) +; vfmv.s.f v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v9,v11,v13,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vvm v10,v8,v9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x45, 0x00, 0x5e -; .byte 0xd7, 0x56, 0x05, 0x42 +; .byte 0x57, 0x44, 0x00, 0x5e +; .byte 0xd7, 0x54, 0x05, 0x42 ; .byte 0x57, 0xb0, 0x00, 0x5e -; .byte 0xd7, 0x84, 0xb6, 0x5c +; .byte 0x57, 0x85, 0x84, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x04, 0x05, 0x02 +; .byte 0x27, 0x05, 0x05, 0x02 ; ret function %scalartovector_i8_imm(i8) -> i8x16 { @@ -167,21 +167,21 @@ block0(v0: i8): ; VCode: ; block0: -; vmv.v.x v10,zero #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.x v8,zero #avl=16, #vtype=(e8, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v14,v10,7,v0.t #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vim v9,v8,7,v0.t #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x57, 0x45, 0x00, 0x5e +; .byte 0x57, 0x44, 0x00, 0x5e ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0xb0, 0x00, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x57, 0xb7, 0xa3, 0x5c -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xd7, 0xb4, 0x83, 0x5c +; .byte 0xa7, 0x04, 0x05, 0x02 ; ret function %scalartovector_i16_imm(i16) -> i16x8 { @@ -193,22 +193,22 @@ block0(v0: i16): ; VCode: ; block0: -; vmv.v.x v10,zero #avl=8, #vtype=(e16, m1, ta, ma) +; vmv.v.x v8,zero #avl=8, #vtype=(e16, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v14,v10,7,v0.t #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vim v9,v8,7,v0.t #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x45, 0x00, 0x5e +; .byte 0x57, 0x44, 0x00, 0x5e ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0xb0, 0x00, 0x5e ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb7, 0xa3, 0x5c +; .byte 0xd7, 0xb4, 0x83, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ret function %scalartovector_i32_imm(i32) -> i32x4 { @@ -220,22 +220,22 @@ block0(v0: i32): ; VCode: ; block0: -; vmv.v.x v10,zero #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.x v8,zero #avl=4, #vtype=(e32, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v14,v10,7,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vim v9,v8,7,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x45, 0x00, 0x5e +; .byte 0x57, 0x44, 0x00, 0x5e ; .byte 0x57, 0x70, 0x81, 0xcd ; .byte 0x57, 0xb0, 0x00, 0x5e ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb7, 0xa3, 0x5c +; .byte 0xd7, 0xb4, 0x83, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ret function %scalartovector_i64_imm(i64) -> i64x2 { @@ -247,19 +247,19 @@ block0(v0: i64): ; VCode: ; block0: -; vmv.v.x v10,zero #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.x v8,zero #avl=2, #vtype=(e64, m1, ta, ma) ; vmv.v.i v0,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vim v14,v10,7,v0.t #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmerge.vim v9,v8,7,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x45, 0x00, 0x5e +; .byte 0x57, 0x44, 0x00, 0x5e ; .byte 0x57, 0xb0, 0x00, 0x5e -; .byte 0x57, 0xb7, 0xa3, 0x5c +; .byte 0xd7, 0xb4, 0x83, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-select.clif b/cranelift/filetests/filetests/isa/riscv64/simd-select.clif index 6dfef422de72..df4d7b2b349e 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-select.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-select.clif @@ -15,10 +15,10 @@ block0(v0: i64, v1: i64x2, v2: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v10,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v12,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; select v15,v10,v12##condition=(a1 ne zero) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; select v8,v8,v9##condition=(a1 ne zero) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -33,13 +33,12 @@ block0(v0: i64, v1: i64x2, v2: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x07, 0x86, 0x0f, 0x02 -; .byte 0xd7, 0x37, 0xa0, 0x9e +; .byte 0x87, 0x84, 0x0f, 0x02 ; bnez a1, 8 -; .byte 0xd7, 0x37, 0xc0, 0x9e -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x57, 0x34, 0x90, 0x9e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -57,10 +56,10 @@ block0(v0: i32, v1: i32x4, v2: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v10,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v12,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; sext.w a1,a1 -; select v9,v10,v12##condition=(a1 ne zero) +; select v9,v8,v9##condition=(a1 ne zero) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -76,13 +75,12 @@ block0(v0: i32, v1: i32x4, v2: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x07, 0x86, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; sext.w a1, a1 -; .byte 0xd7, 0x34, 0xa0, 0x9e -; bnez a1, 8 -; .byte 0xd7, 0x34, 0xc0, 0x9e +; beqz a1, 8 +; .byte 0xd7, 0x34, 0x80, 0x9e ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) @@ -101,11 +99,11 @@ block0(v0: i16, v1: i16x8, v2: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v10,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v12,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; slli a1,a1,48 -; srai a3,a1,48 -; select v11,v10,v12##condition=(a3 ne zero) +; srai a1,a1,48 +; select v11,v8,v9##condition=(a1 ne zero) ; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -121,14 +119,14 @@ block0(v0: i16, v1: i16x8, v2: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x07, 0x86, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; slli a1, a1, 0x30 -; srai a3, a1, 0x30 -; .byte 0xd7, 0x35, 0xa0, 0x9e -; bnez a3, 8 -; .byte 0xd7, 0x35, 0xc0, 0x9e +; srai a1, a1, 0x30 +; .byte 0xd7, 0x35, 0x80, 0x9e +; bnez a1, 8 +; .byte 0xd7, 0x35, 0x90, 0x9e ; .byte 0xa7, 0x05, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) @@ -147,10 +145,10 @@ block0(v0: i8, v1: i8x16, v2: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v10,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v12,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) ; andi a1,a1,255 -; select v9,v10,v12##condition=(a1 ne zero) +; select v9,v8,v9##condition=(a1 ne zero) ; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -166,13 +164,12 @@ block0(v0: i8, v1: i8x16, v2: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x07, 0x86, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; andi a1, a1, 0xff -; .byte 0xd7, 0x34, 0xa0, 0x9e -; bnez a1, 8 -; .byte 0xd7, 0x34, 0xc0, 0x9e +; beqz a1, 8 +; .byte 0xd7, 0x34, 0x80, 0x9e ; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) @@ -191,10 +188,10 @@ block0(v0: i64, v1: f64x2, v2: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v10,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v12,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; select v15,v10,v12##condition=(a1 ne zero) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; select v8,v8,v9##condition=(a1 ne zero) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -209,13 +206,12 @@ block0(v0: i64, v1: f64x2, v2: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x07, 0x86, 0x0f, 0x02 -; .byte 0xd7, 0x37, 0xa0, 0x9e +; .byte 0x87, 0x84, 0x0f, 0x02 ; bnez a1, 8 -; .byte 0xd7, 0x37, 0xc0, 0x9e -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x57, 0x34, 0x90, 0x9e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -233,10 +229,10 @@ block0(v0: i64, v1: f32x4, v2: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v10,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v12,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; select v15,v10,v12##condition=(a1 ne zero) -; vse8.v v15,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; select v8,v8,v9##condition=(a1 ne zero) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -251,13 +247,12 @@ block0(v0: i64, v1: f32x4, v2: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x07, 0x86, 0x0f, 0x02 -; .byte 0xd7, 0x37, 0xa0, 0x9e +; .byte 0x87, 0x84, 0x0f, 0x02 ; bnez a1, 8 -; .byte 0xd7, 0x37, 0xc0, 0x9e -; .byte 0xa7, 0x07, 0x05, 0x02 +; .byte 0x57, 0x34, 0x90, 0x9e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-shuffle.clif b/cranelift/filetests/filetests/isa/riscv64/simd-shuffle.clif index 1bd2d0fa2094..0dd85ea259c8 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-shuffle.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-shuffle.clif @@ -15,13 +15,13 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v14,[const(0)] #avl=16, #vtype=(e8, m1, ta, ma) -; vrgather.vv v8,v9,v14 #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vi v10,v14,-16 #avl=16, #vtype=(e8, m1, ta, ma) -; vrgather.vv v12,v11,v10 #avl=16, #vtype=(e8, m1, ta, ma) -; vor.vv v14,v8,v12 #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v11,[const(0)] #avl=16, #vtype=(e8, m1, ta, ma) +; vrgather.vv v9,v10,v11 #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vi v10,v11,-16 #avl=16, #vtype=(e8, m1, ta, ma) +; vrgather.vv v12,v8,v10 #avl=16, #vtype=(e8, m1, ta, ma) +; vor.vv v14,v9,v12 #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -37,16 +37,16 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; auipc t6, 0 ; addi t6, t6, 0x3c -; .byte 0x07, 0x87, 0x0f, 0x02 -; .byte 0x57, 0x04, 0x97, 0x32 -; .byte 0x57, 0x35, 0xe8, 0x02 -; .byte 0x57, 0x06, 0xb5, 0x32 -; .byte 0x57, 0x07, 0x86, 0x2a +; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0xd7, 0x84, 0xa5, 0x32 +; .byte 0x57, 0x35, 0xb8, 0x02 +; .byte 0x57, 0x06, 0x85, 0x32 +; .byte 0x57, 0x07, 0x96, 0x2a ; .byte 0x27, 0x07, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-smax.clif b/cranelift/filetests/filetests/isa/riscv64/simd-smax.clif index 5122727cddf4..7c936a092e46 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-smax.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-smax.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmax.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmax.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x1e -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x1e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmax.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmax.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x1e +; .byte 0x57, 0x84, 0x84, 0x1e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmax.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmax.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x1e +; .byte 0x57, 0x84, 0x84, 0x1e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmax.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmax.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x87, 0x95, 0x1e +; .byte 0x57, 0x84, 0x84, 0x1e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -184,10 +184,10 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,5 -; vmax.vx v13,v9,a5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,5 +; vmax.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -202,10 +202,10 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, 5 -; .byte 0xd7, 0xc6, 0x97, 0x1e -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, 5 +; .byte 0x57, 0xc4, 0x85, 0x1e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -225,10 +225,10 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,-16 -; vmax.vx v13,v9,a5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,-16 +; vmax.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -243,12 +243,12 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, -0x10 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, -0x10 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x97, 0x1e +; .byte 0x57, 0xc4, 0x85, 0x1e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -268,10 +268,10 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,15 -; vmax.vx v13,v9,a5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,15 +; vmax.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -286,12 +286,12 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, 0xf +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, 0xf ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x97, 0x1e +; .byte 0x57, 0xc4, 0x85, 0x1e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -311,10 +311,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,-5 -; vmax.vx v13,v9,a5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,-5 +; vmax.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -329,12 +329,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, -5 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, -5 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x97, 0x1e +; .byte 0x57, 0xc4, 0x85, 0x1e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -353,9 +353,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmax.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmax.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -370,9 +370,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x1e -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x1e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -391,9 +391,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmax.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmax.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -408,11 +408,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x1e +; .byte 0x57, 0xc4, 0x85, 0x1e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -431,9 +431,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmax.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmax.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -448,11 +448,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x1e +; .byte 0x57, 0xc4, 0x85, 0x1e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -471,9 +471,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmax.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmax.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -488,11 +488,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x1e +; .byte 0x57, 0xc4, 0x85, 0x1e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-smin.clif b/cranelift/filetests/filetests/isa/riscv64/simd-smin.clif index f43a3bf306d9..e5fd66d02282 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-smin.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-smin.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmin.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmin.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x16 -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x16 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmin.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmin.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x16 +; .byte 0x57, 0x84, 0x84, 0x16 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmin.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmin.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x16 +; .byte 0x57, 0x84, 0x84, 0x16 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmin.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmin.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x87, 0x95, 0x16 +; .byte 0x57, 0x84, 0x84, 0x16 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -184,10 +184,10 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,5 -; vmin.vx v13,v9,a5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,5 +; vmin.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -202,10 +202,10 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, 5 -; .byte 0xd7, 0xc6, 0x97, 0x16 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, 5 +; .byte 0x57, 0xc4, 0x85, 0x16 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -225,10 +225,10 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,-16 -; vmin.vx v13,v9,a5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,-16 +; vmin.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -243,12 +243,12 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, -0x10 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, -0x10 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x97, 0x16 +; .byte 0x57, 0xc4, 0x85, 0x16 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -268,10 +268,10 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,15 -; vmin.vx v13,v9,a5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,15 +; vmin.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -286,12 +286,12 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, 0xf +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, 0xf ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x97, 0x16 +; .byte 0x57, 0xc4, 0x85, 0x16 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -311,10 +311,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,-5 -; vmin.vx v13,v9,a5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,-5 +; vmin.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -329,12 +329,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, -5 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, -5 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x97, 0x16 +; .byte 0x57, 0xc4, 0x85, 0x16 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -353,9 +353,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmin.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmin.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -370,9 +370,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x16 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x16 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -391,9 +391,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmin.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmin.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -408,11 +408,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x16 +; .byte 0x57, 0xc4, 0x85, 0x16 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -431,9 +431,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmin.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmin.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -448,11 +448,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x16 +; .byte 0x57, 0xc4, 0x85, 0x16 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -471,9 +471,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmin.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmin.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -488,11 +488,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x16 +; .byte 0x57, 0xc4, 0x85, 0x16 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-smulhi.clif b/cranelift/filetests/filetests/isa/riscv64/simd-smulhi.clif index bf4ad330c611..59399f6430f2 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-smulhi.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-smulhi.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulh.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulh.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0xa7, 0x95, 0x9e -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xa4, 0x84, 0x9e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulh.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulh.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xa7, 0x95, 0x9e +; .byte 0x57, 0xa4, 0x84, 0x9e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulh.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulh.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xa7, 0x95, 0x9e +; .byte 0x57, 0xa4, 0x84, 0x9e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulh.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulh.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xa7, 0x95, 0x9e +; .byte 0x57, 0xa4, 0x84, 0x9e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -183,9 +183,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulh.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulh.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -200,9 +200,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xe6, 0x95, 0x9e -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xe4, 0x85, 0x9e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -221,9 +221,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulh.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulh.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -238,11 +238,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0x9e +; .byte 0x57, 0xe4, 0x85, 0x9e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -261,9 +261,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulh.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulh.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -278,11 +278,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xe6, 0x95, 0x9e +; .byte 0x57, 0xe4, 0x85, 0x9e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -301,9 +301,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulh.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulh.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -318,11 +318,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xe6, 0x95, 0x9e +; .byte 0x57, 0xe4, 0x85, 0x9e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-snarrow.clif b/cranelift/filetests/filetests/isa/riscv64/simd-snarrow.clif index 8176bc671521..20f0eb463e21 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-snarrow.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-snarrow.clif @@ -15,10 +15,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vnclip.wi v8,v9,0 #avl=8, #vtype=(e8, mf2, ta, ma) -; vnclip.wi v9,v11,0 #avl=8, #vtype=(e8, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vnclip.wi v8,v8,0 #avl=8, #vtype=(e8, mf2, ta, ma) +; vnclip.wi v9,v9,0 #avl=8, #vtype=(e8, mf2, ta, ma) ; vslideup.vi v8,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) @@ -35,12 +35,12 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0x34, 0x90, 0xbe -; .byte 0xd7, 0x34, 0xb0, 0xbe +; .byte 0x57, 0x34, 0x80, 0xbe +; .byte 0xd7, 0x34, 0x90, 0xbe ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x57, 0x34, 0x94, 0x3a ; .byte 0x27, 0x04, 0x05, 0x02 @@ -61,10 +61,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vnclip.wi v8,v9,0 #avl=4, #vtype=(e16, mf2, ta, ma) -; vnclip.wi v9,v11,0 #avl=4, #vtype=(e16, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vnclip.wi v8,v8,0 #avl=4, #vtype=(e16, mf2, ta, ma) +; vnclip.wi v9,v9,0 #avl=4, #vtype=(e16, mf2, ta, ma) ; vslideup.vi v8,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) @@ -81,12 +81,12 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0x34, 0x90, 0xbe -; .byte 0xd7, 0x34, 0xb0, 0xbe +; .byte 0x57, 0x34, 0x80, 0xbe +; .byte 0xd7, 0x34, 0x90, 0xbe ; .byte 0x57, 0x70, 0x84, 0xcc ; .byte 0x57, 0x34, 0x92, 0x3a ; .byte 0x57, 0x70, 0x08, 0xcc @@ -108,10 +108,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vnclip.wi v8,v9,0 #avl=2, #vtype=(e32, mf2, ta, ma) -; vnclip.wi v9,v11,0 #avl=2, #vtype=(e32, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vnclip.wi v8,v8,0 #avl=2, #vtype=(e32, mf2, ta, ma) +; vnclip.wi v9,v9,0 #avl=2, #vtype=(e32, mf2, ta, ma) ; vslideup.vi v8,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) @@ -128,12 +128,12 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0x34, 0x90, 0xbe -; .byte 0xd7, 0x34, 0xb0, 0xbe +; .byte 0x57, 0x34, 0x80, 0xbe +; .byte 0xd7, 0x34, 0x90, 0xbe ; .byte 0x57, 0x70, 0x02, 0xcd ; .byte 0x57, 0x34, 0x91, 0x3a ; .byte 0x57, 0x70, 0x08, 0xcc diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-splat.clif b/cranelift/filetests/filetests/isa/riscv64/simd-splat.clif index 19928a85f6aa..22b89097661e 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-splat.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-splat.clif @@ -11,15 +11,15 @@ block0(v0: i8): ; VCode: ; block0: -; vmv.v.x v11,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.x v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xd7, 0xc5, 0x05, 0x5e -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x57, 0xc4, 0x05, 0x5e +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %splat_i16x8(i16) -> i16x8 { @@ -30,16 +30,16 @@ block0(v0: i16): ; VCode: ; block0: -; vmv.v.x v11,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.x v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc5, 0x05, 0x5e +; .byte 0x57, 0xc4, 0x05, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %splat_i32x4(i32) -> i32x4 { @@ -50,16 +50,16 @@ block0(v0: i32): ; VCode: ; block0: -; vmv.v.x v11,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.x v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc5, 0x05, 0x5e +; .byte 0x57, 0xc4, 0x05, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %splat_i64x2(i64) -> i64x2 { @@ -70,16 +70,16 @@ block0(v0: i64): ; VCode: ; block0: -; vmv.v.x v11,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.x v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc5, 0x05, 0x5e +; .byte 0x57, 0xc4, 0x05, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %splat_const_i8x16() -> i8x16 { @@ -91,15 +91,15 @@ block0: ; VCode: ; block0: -; vmv.v.i v10,2 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.i v8,2 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x57, 0x35, 0x01, 0x5e -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x57, 0x34, 0x01, 0x5e +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %splat_const_i16x8() -> i16x8 { @@ -111,16 +111,16 @@ block0: ; VCode: ; block0: -; vmv.v.i v10,2 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.i v8,2 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x35, 0x01, 0x5e +; .byte 0x57, 0x34, 0x01, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %splat_const_i32x4() -> i32x4 { @@ -132,16 +132,16 @@ block0: ; VCode: ; block0: -; vmv.v.i v10,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.i v8,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x35, 0x01, 0x5e +; .byte 0x57, 0x34, 0x01, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %splat_const_i64x2() -> i64x2 { @@ -153,16 +153,16 @@ block0: ; VCode: ; block0: -; vmv.v.i v10,2 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.i v8,2 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x35, 0x01, 0x5e +; .byte 0x57, 0x34, 0x01, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %splat_f32x4(f32) -> f32x4 { @@ -173,16 +173,16 @@ block0(v0: f32): ; VCode: ; block0: -; vfmv.v.f v11,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmv.v.f v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0x55, 0x05, 0x5e +; .byte 0x57, 0x54, 0x05, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret function %splat_f64x2(f64) -> f64x2 { @@ -193,15 +193,15 @@ block0(v0: f64): ; VCode: ; block0: -; vfmv.v.f v11,fa0 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v11,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vfmv.v.f v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0x55, 0x05, 0x5e +; .byte 0x57, 0x54, 0x05, 0x5e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-sqmulroundsat.clif b/cranelift/filetests/filetests/isa/riscv64/simd-sqmulroundsat.clif index 863caec2c23d..adea6f2dd0b0 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-sqmulroundsat.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-sqmulroundsat.clif @@ -15,10 +15,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsmul.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsmul.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -33,13 +33,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x9e +; .byte 0x57, 0x84, 0x84, 0x9e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -57,10 +57,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsmul.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsmul.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -75,13 +75,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x9e +; .byte 0x57, 0x84, 0x84, 0x9e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -100,9 +100,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsmul.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsmul.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -117,11 +117,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x9e +; .byte 0x57, 0xc4, 0x85, 0x9e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,9 +140,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsmul.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsmul.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -157,11 +157,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x9e +; .byte 0x57, 0xc4, 0x85, 0x9e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-sqrt.clif b/cranelift/filetests/filetests/isa/riscv64/simd-sqrt.clif index 90df5f92b79b..049b75e42d03 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-sqrt.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-sqrt.clif @@ -16,9 +16,9 @@ block0(v0: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfsqrt.v v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfsqrt.v v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -33,11 +33,11 @@ block0(v0: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x16, 0x90, 0x4e +; .byte 0x57, 0x14, 0x80, 0x4e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -55,9 +55,9 @@ block0(v0: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfsqrt.v v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfsqrt.v v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -72,11 +72,11 @@ block0(v0: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x16, 0x90, 0x4e +; .byte 0x57, 0x14, 0x80, 0x4e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-sshr-const.clif b/cranelift/filetests/filetests/isa/riscv64/simd-sshr-const.clif index 2d59b4670496..50058bd23c5d 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-sshr-const.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-sshr-const.clif @@ -17,9 +17,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,9 +34,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0xa6 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0xa6 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -55,9 +55,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -72,9 +72,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0xa6 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0xa6 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -93,9 +93,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -110,9 +110,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0xa6 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0xa6 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -131,9 +131,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -148,9 +148,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0xa6 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0xa6 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -170,9 +170,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -187,9 +187,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0xa6 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0xa6 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -208,9 +208,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -225,11 +225,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -248,9 +248,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -265,11 +265,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -288,9 +288,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -305,11 +305,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -328,9 +328,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -345,11 +345,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -369,9 +369,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -386,11 +386,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -409,9 +409,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -426,11 +426,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -449,9 +449,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -466,11 +466,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -489,9 +489,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -506,11 +506,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -529,9 +529,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -546,11 +546,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -570,9 +570,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -587,11 +587,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -610,9 +610,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -627,11 +627,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -650,9 +650,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -667,11 +667,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -690,9 +690,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -707,11 +707,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -730,9 +730,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -747,11 +747,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -771,9 +771,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -788,11 +788,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa6 +; .byte 0x57, 0xb4, 0x82, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-sshr.clif b/cranelift/filetests/filetests/isa/riscv64/simd-sshr.clif index 451621e12db1..221e066747cd 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-sshr.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-sshr.clif @@ -15,9 +15,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -32,9 +32,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0xa6 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0xa6 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -52,9 +52,9 @@ block0(v0: i8x16, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -69,9 +69,9 @@ block0(v0: i8x16, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0xa6 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0xa6 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -89,9 +89,9 @@ block0(v0: i8x16, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -106,9 +106,9 @@ block0(v0: i8x16, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0xa6 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0xa6 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -126,9 +126,9 @@ block0(v0: i8x16, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -143,9 +143,9 @@ block0(v0: i8x16, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0xa6 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0xa6 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -163,9 +163,9 @@ block0(v0: i8x16, v1: i128): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v14,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -180,9 +180,9 @@ block0(v0: i8x16, v1: i128): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xc7, 0x95, 0xa6 -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0xa6 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -200,9 +200,9 @@ block0(v0: i16x8, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -217,11 +217,11 @@ block0(v0: i16x8, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -239,9 +239,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -256,11 +256,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -278,9 +278,9 @@ block0(v0: i16x8, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -295,11 +295,11 @@ block0(v0: i16x8, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -317,9 +317,9 @@ block0(v0: i16x8, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -334,11 +334,11 @@ block0(v0: i16x8, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -356,9 +356,9 @@ block0(v0: i16x8, v1: i128): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v14,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -373,11 +373,11 @@ block0(v0: i16x8, v1: i128): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xc7, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -395,9 +395,9 @@ block0(v0: i32x4, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -412,11 +412,11 @@ block0(v0: i32x4, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -434,9 +434,9 @@ block0(v0: i32x4, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -451,11 +451,11 @@ block0(v0: i32x4, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -473,9 +473,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -490,11 +490,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -512,9 +512,9 @@ block0(v0: i32x4, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -529,11 +529,11 @@ block0(v0: i32x4, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -551,9 +551,9 @@ block0(v0: i32x4, v1: i128): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v14,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -568,11 +568,11 @@ block0(v0: i32x4, v1: i128): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xc7, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -590,9 +590,9 @@ block0(v0: i64x2, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -607,11 +607,11 @@ block0(v0: i64x2, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -629,9 +629,9 @@ block0(v0: i64x2, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -646,11 +646,11 @@ block0(v0: i64x2, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -668,9 +668,9 @@ block0(v0: i64x2, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -685,11 +685,11 @@ block0(v0: i64x2, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -707,9 +707,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -724,11 +724,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -746,9 +746,9 @@ block0(v0: i64x2, v1: i128): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsra.vx v14,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsra.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -763,11 +763,11 @@ block0(v0: i64x2, v1: i128): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc7, 0x95, 0xa6 +; .byte 0x57, 0xc4, 0x85, 0xa6 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-ssubsat.clif b/cranelift/filetests/filetests/isa/riscv64/simd-ssubsat.clif index ea30d334e013..39ff1f078b56 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-ssubsat.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-ssubsat.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vssub.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vssub.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x8e -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x8e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vssub.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vssub.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x8e +; .byte 0x57, 0x84, 0x84, 0x8e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vssub.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vssub.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x8e +; .byte 0x57, 0x84, 0x84, 0x8e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vssub.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vssub.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x87, 0x95, 0x8e +; .byte 0x57, 0x84, 0x84, 0x8e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -184,10 +184,10 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,5 -; vssub.vx v13,v9,a5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,5 +; vssub.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -202,10 +202,10 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, 5 -; .byte 0xd7, 0xc6, 0x97, 0x8e -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, 5 +; .byte 0x57, 0xc4, 0x85, 0x8e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -225,10 +225,10 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,-16 -; vssub.vx v13,v9,a5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,-16 +; vssub.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -243,12 +243,12 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, -0x10 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, -0x10 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x97, 0x8e +; .byte 0x57, 0xc4, 0x85, 0x8e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -268,10 +268,10 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,15 -; vssub.vx v13,v9,a5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,15 +; vssub.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -286,12 +286,12 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, 0xf +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, 0xf ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x97, 0x8e +; .byte 0x57, 0xc4, 0x85, 0x8e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -311,10 +311,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.i v13,-5 #avl=2, #vtype=(e64, m1, ta, ma) -; vssub.vv v13,v13,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.i v9,-5 #avl=2, #vtype=(e64, m1, ta, ma) +; vssub.vv v8,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -329,12 +329,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xb6, 0x0d, 0x5e -; .byte 0xd7, 0x86, 0xd4, 0x8e +; .byte 0xd7, 0xb4, 0x0d, 0x5e +; .byte 0x57, 0x04, 0x94, 0x8e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -353,9 +353,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vssub.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vssub.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -370,9 +370,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x8e -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x8e +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -391,9 +391,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vssub.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vssub.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -408,11 +408,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x8e +; .byte 0x57, 0xc4, 0x85, 0x8e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -431,9 +431,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vssub.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vssub.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -448,11 +448,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x8e +; .byte 0x57, 0xc4, 0x85, 0x8e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -471,10 +471,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.x v14,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vssub.vv v14,v14,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.x v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vssub.vv v8,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -489,12 +489,12 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc7, 0x05, 0x5e -; .byte 0x57, 0x87, 0xe4, 0x8e +; .byte 0xd7, 0xc4, 0x05, 0x5e +; .byte 0x57, 0x04, 0x94, 0x8e ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-stores.clif b/cranelift/filetests/filetests/isa/riscv64/simd-stores.clif index c51740ba2387..8b4522918e71 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-stores.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-stores.clif @@ -16,8 +16,8 @@ block0(v0: i64, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -32,8 +32,8 @@ block0(v0: i64, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x07, 0x85, 0x0f, 0x02 -; .byte 0x27, 0x05, 0x05, 0x02 ; trap: heap_oob +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; trap: heap_oob ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -51,8 +51,8 @@ block0(v0: i64, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vse16.v v10,0(a0) #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vse16.v v8,0(a0) #avl=8, #vtype=(e16, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -67,9 +67,9 @@ block0(v0: i64, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x27, 0x55, 0x05, 0x02 ; trap: heap_oob +; .byte 0x27, 0x54, 0x05, 0x02 ; trap: heap_oob ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -87,8 +87,8 @@ block0(v0: i64, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vse32.v v10,0(a0) #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vse32.v v8,0(a0) #avl=4, #vtype=(e32, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -103,9 +103,9 @@ block0(v0: i64, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x27, 0x65, 0x05, 0x02 ; trap: heap_oob +; .byte 0x27, 0x64, 0x05, 0x02 ; trap: heap_oob ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -123,8 +123,8 @@ block0(v0: i64, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vse64.v v10,0(a0) #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vse64.v v8,0(a0) #avl=2, #vtype=(e64, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -139,9 +139,9 @@ block0(v0: i64, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x27, 0x75, 0x05, 0x02 ; trap: heap_oob +; .byte 0x27, 0x74, 0x05, 0x02 ; trap: heap_oob ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-swiden_high.clif b/cranelift/filetests/filetests/isa/riscv64/simd-swiden_high.clif index d8e647c7024d..4072bcd19150 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-swiden_high.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-swiden_high.clif @@ -15,10 +15,10 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v12,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vsext.vf2 v14,v12 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vsext.vf2 v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -33,12 +33,12 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0x36, 0x94, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0xd7, 0x34, 0x84, 0x3e ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xa7, 0xc3, 0x4a +; .byte 0x57, 0xa4, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v12,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vsext.vf2 v14,v12 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vsext.vf2 v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x36, 0x92, 0x3e +; .byte 0xd7, 0x34, 0x82, 0x3e ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xa7, 0xc3, 0x4a +; .byte 0x57, 0xa4, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v12,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vsext.vf2 v14,v12 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vsext.vf2 v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x36, 0x91, 0x3e +; .byte 0xd7, 0x34, 0x81, 0x3e ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xa7, 0xc3, 0x4a +; .byte 0x57, 0xa4, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -141,10 +141,10 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v12,v9,12 #avl=16, #vtype=(e8, m1, ta, ma) -; vsext.vf4 v14,v12 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,12 #avl=16, #vtype=(e8, m1, ta, ma) +; vsext.vf4 v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -159,12 +159,12 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0x36, 0x96, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0xd7, 0x34, 0x86, 0x3e ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xa7, 0xc2, 0x4a +; .byte 0x57, 0xa4, 0x92, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -183,10 +183,10 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v12,v9,6 #avl=8, #vtype=(e16, m1, ta, ma) -; vsext.vf4 v14,v12 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,6 #avl=8, #vtype=(e16, m1, ta, ma) +; vsext.vf4 v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -201,13 +201,13 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x36, 0x93, 0x3e +; .byte 0xd7, 0x34, 0x83, 0x3e ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xa7, 0xc2, 0x4a +; .byte 0x57, 0xa4, 0x92, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -227,10 +227,10 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v12,v9,14 #avl=16, #vtype=(e8, m1, ta, ma) -; vsext.vf8 v14,v12 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,14 #avl=16, #vtype=(e8, m1, ta, ma) +; vsext.vf8 v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -245,12 +245,12 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0x36, 0x97, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0xd7, 0x34, 0x87, 0x3e ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xa7, 0xc1, 0x4a +; .byte 0x57, 0xa4, 0x91, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-swiden_low.clif b/cranelift/filetests/filetests/isa/riscv64/simd-swiden_low.clif index 271076591763..2b72b7a299bb 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-swiden_low.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-swiden_low.clif @@ -16,8 +16,8 @@ block0(v0: i8x16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsext.vf2 v12,v9 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vsext.vf2 v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,9 +34,9 @@ block0(v0: i8x16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xa6, 0x93, 0x4a +; .byte 0x57, 0xa4, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -55,8 +55,8 @@ block0(v0: i16x8): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsext.vf2 v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vsext.vf2 v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -73,9 +73,9 @@ block0(v0: i16x8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xa6, 0x93, 0x4a +; .byte 0x57, 0xa4, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -94,8 +94,8 @@ block0(v0: i32x4): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsext.vf2 v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vsext.vf2 v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -112,9 +112,9 @@ block0(v0: i32x4): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xa6, 0x93, 0x4a +; .byte 0x57, 0xa4, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -134,8 +134,8 @@ block0(v0: i8x16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsext.vf4 v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vsext.vf4 v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -152,9 +152,9 @@ block0(v0: i8x16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xa6, 0x92, 0x4a +; .byte 0x57, 0xa4, 0x92, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -174,8 +174,8 @@ block0(v0: i16x8): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsext.vf4 v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vsext.vf4 v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -192,9 +192,9 @@ block0(v0: i16x8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xa6, 0x92, 0x4a +; .byte 0x57, 0xa4, 0x92, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -215,8 +215,8 @@ block0(v0: i8x16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsext.vf8 v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vsext.vf8 v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -233,9 +233,9 @@ block0(v0: i8x16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xa6, 0x91, 0x4a +; .byte 0x57, 0xa4, 0x91, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-swizzle.clif b/cranelift/filetests/filetests/isa/riscv64/simd-swizzle.clif index 7d11818ac9ab..04a93b690b4e 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-swizzle.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-swizzle.clif @@ -15,10 +15,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vrgather.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v10,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vrgather.vv v9,v8,v10 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v9,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -33,11 +33,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x32 -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x07, 0x85, 0x0f, 0x02 +; .byte 0xd7, 0x04, 0x85, 0x32 +; .byte 0xa7, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -57,8 +57,8 @@ block0(v0: i8x16, v1: i8): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vrgather.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vrgather.vx v8,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,8 +74,8 @@ block0(v0: i8x16, v1: i8): ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x32 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x57, 0xc4, 0x95, 0x32 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -96,8 +96,8 @@ block0(v0: i8x16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vrgather.vi v12,v9,2 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vrgather.vi v8,v9,2 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -113,8 +113,8 @@ block0(v0: i8x16): ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0x36, 0x91, 0x32 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x57, 0x34, 0x91, 0x32 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-trunc.clif b/cranelift/filetests/filetests/isa/riscv64/simd-trunc.clif index c73bbe0b8cad..e98df8e5d272 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-trunc.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-trunc.clif @@ -16,18 +16,18 @@ block0(v0: f32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfabs.v v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfabs.v v9,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; lui a1,307200 -; fmv.w.x fa2,a1 -; vmflt.vf v0,v12,fa2 #avl=4, #vtype=(e32, m1, ta, ma) -; vfcvt.rtz.x.f.v v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; fmv.w.x fa0,a1 +; vmflt.vf v0,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vfcvt.rtz.x.f.v v12,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; vfcvt.f.x.v v14,v12 #avl=4, #vtype=(e32, m1, ta, ma) -; vfsgnj.vv v8,v14,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vfsgnj.vv v9,v14,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; fmv.w.x fa0,zero -; vfadd.vf v12,v9,fa0 #avl=4, #vtype=(e32, m1, ta, ma) -; vmerge.vvm v14,v12,v8,v0.t #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vfadd.vf v10,v8,fa0 #avl=4, #vtype=(e32, m1, ta, ma) +; vmerge.vvm v8,v10,v9,v0.t #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -42,20 +42,20 @@ block0(v0: f32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x96, 0x94, 0x2a +; .byte 0xd7, 0x14, 0x84, 0x2a ; lui a1, 0x4b000 -; fmv.w.x fa2, a1 -; .byte 0x57, 0x50, 0xc6, 0x6e -; .byte 0x57, 0x96, 0x93, 0x4a +; fmv.w.x fa0, a1 +; .byte 0x57, 0x50, 0x95, 0x6e +; .byte 0x57, 0x96, 0x83, 0x4a ; .byte 0x57, 0x97, 0xc1, 0x4a -; .byte 0x57, 0x94, 0xe4, 0x22 +; .byte 0xd7, 0x14, 0xe4, 0x22 ; fmv.w.x fa0, zero -; .byte 0x57, 0x56, 0x95, 0x02 -; .byte 0x57, 0x07, 0xc4, 0x5c +; .byte 0x57, 0x55, 0x85, 0x02 +; .byte 0x57, 0x84, 0xa4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -73,18 +73,18 @@ block0(v0: f64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vfabs.v v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vfabs.v v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; lui a1,1075 -; slli a2,a1,40 -; fmv.d.x fa4,a2 -; vmflt.vf v0,v12,fa4 #avl=2, #vtype=(e64, m1, ta, ma) -; vfcvt.rtz.x.f.v v14,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vfcvt.f.x.v v8,v14 #avl=2, #vtype=(e64, m1, ta, ma) -; vfsgnj.vv v10,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; fmv.d.x fa2,zero -; vfadd.vf v14,v9,fa2 #avl=2, #vtype=(e64, m1, ta, ma) -; vmerge.vvm v8,v14,v10,v0.t #avl=2, #vtype=(e64, m1, ta, ma) +; slli a1,a1,40 +; fmv.d.x fa0,a1 +; vmflt.vf v0,v9,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vfcvt.rtz.x.f.v v14,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vfcvt.f.x.v v9,v14 #avl=2, #vtype=(e64, m1, ta, ma) +; vfsgnj.vv v9,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; fmv.d.x fa0,zero +; vfadd.vf v10,v8,fa0 #avl=2, #vtype=(e64, m1, ta, ma) +; vmerge.vvm v8,v10,v9,v0.t #avl=2, #vtype=(e64, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) @@ -100,19 +100,19 @@ block0(v0: f64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x96, 0x94, 0x2a +; .byte 0xd7, 0x14, 0x84, 0x2a ; lui a1, 0x433 -; slli a2, a1, 0x28 -; fmv.d.x fa4, a2 -; .byte 0x57, 0x50, 0xc7, 0x6e -; .byte 0x57, 0x97, 0x93, 0x4a -; .byte 0x57, 0x94, 0xe1, 0x4a -; .byte 0x57, 0x95, 0x84, 0x22 -; fmv.d.x fa2, zero -; .byte 0x57, 0x57, 0x96, 0x02 -; .byte 0x57, 0x04, 0xe5, 0x5c +; slli a1, a1, 0x28 +; fmv.d.x fa0, a1 +; .byte 0x57, 0x50, 0x95, 0x6e +; .byte 0x57, 0x97, 0x83, 0x4a +; .byte 0xd7, 0x94, 0xe1, 0x4a +; .byte 0xd7, 0x14, 0x94, 0x22 +; fmv.d.x fa0, zero +; .byte 0x57, 0x55, 0x85, 0x02 +; .byte 0x57, 0x84, 0xa4, 0x5c ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-uaddsat.clif b/cranelift/filetests/filetests/isa/riscv64/simd-uaddsat.clif index 02efba71d27c..10380e3ec7de 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-uaddsat.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-uaddsat.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsaddu.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsaddu.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x82 -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x82 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsaddu.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsaddu.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x82 +; .byte 0x57, 0x84, 0x84, 0x82 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsaddu.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsaddu.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x82 +; .byte 0x57, 0x84, 0x84, 0x82 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsaddu.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsaddu.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x87, 0x95, 0x82 +; .byte 0x57, 0x84, 0x84, 0x82 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -184,9 +184,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsaddu.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsaddu.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -201,9 +201,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0x82 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0x82 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -223,9 +223,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsaddu.vi v12,v9,-16 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsaddu.vi v8,v8,-16 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -240,11 +240,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x36, 0x98, 0x82 +; .byte 0x57, 0x34, 0x88, 0x82 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -264,9 +264,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsaddu.vi v12,v9,15 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsaddu.vi v8,v8,15 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -281,11 +281,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x97, 0x82 +; .byte 0x57, 0xb4, 0x87, 0x82 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -305,9 +305,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsaddu.vi v12,v9,-5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsaddu.vi v8,v8,-5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -322,11 +322,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x9d, 0x82 +; .byte 0x57, 0xb4, 0x8d, 0x82 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -345,9 +345,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsaddu.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsaddu.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -362,9 +362,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x82 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x82 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -383,9 +383,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsaddu.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsaddu.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -400,11 +400,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x82 +; .byte 0x57, 0xc4, 0x85, 0x82 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -423,9 +423,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsaddu.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsaddu.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -440,11 +440,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x82 +; .byte 0x57, 0xc4, 0x85, 0x82 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -463,9 +463,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsaddu.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsaddu.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -480,11 +480,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x82 +; .byte 0x57, 0xc4, 0x85, 0x82 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-umax.clif b/cranelift/filetests/filetests/isa/riscv64/simd-umax.clif index c5f1c5c813e7..3f6715f415c1 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-umax.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-umax.clif @@ -15,10 +15,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmaxu.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmaxu.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -33,11 +33,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x1a -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x1a +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -55,10 +55,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmaxu.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmaxu.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -73,13 +73,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x1a +; .byte 0x57, 0x84, 0x84, 0x1a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -97,10 +97,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmaxu.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmaxu.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -115,13 +115,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x1a +; .byte 0x57, 0x84, 0x84, 0x1a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -139,10 +139,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmaxu.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmaxu.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -157,13 +157,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x87, 0x95, 0x1a +; .byte 0x57, 0x84, 0x84, 0x1a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -183,10 +183,10 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,5 -; vmaxu.vx v13,v9,a5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,5 +; vmaxu.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -201,10 +201,10 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, 5 -; .byte 0xd7, 0xc6, 0x97, 0x1a -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, 5 +; .byte 0x57, 0xc4, 0x85, 0x1a +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -224,10 +224,10 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,-16 -; vmaxu.vx v13,v9,a5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,-16 +; vmaxu.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -242,12 +242,12 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, -0x10 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, -0x10 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x97, 0x1a +; .byte 0x57, 0xc4, 0x85, 0x1a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -267,10 +267,10 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,15 -; vmaxu.vx v13,v9,a5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,15 +; vmaxu.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -285,12 +285,12 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, 0xf +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, 0xf ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x97, 0x1a +; .byte 0x57, 0xc4, 0x85, 0x1a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -310,10 +310,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,-5 -; vmaxu.vx v13,v9,a5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,-5 +; vmaxu.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -328,12 +328,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, -5 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, -5 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x97, 0x1a +; .byte 0x57, 0xc4, 0x85, 0x1a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -352,9 +352,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmaxu.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmaxu.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -369,9 +369,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x1a -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x1a +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -390,9 +390,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmaxu.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmaxu.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -407,11 +407,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x1a +; .byte 0x57, 0xc4, 0x85, 0x1a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -430,9 +430,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmaxu.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmaxu.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -447,11 +447,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x1a +; .byte 0x57, 0xc4, 0x85, 0x1a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -470,9 +470,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmaxu.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmaxu.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -487,11 +487,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x1a +; .byte 0x57, 0xc4, 0x85, 0x1a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-umin.clif b/cranelift/filetests/filetests/isa/riscv64/simd-umin.clif index a0129ee64a83..e4aa62632031 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-umin.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-umin.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vminu.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vminu.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x12 -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x12 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vminu.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vminu.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x12 +; .byte 0x57, 0x84, 0x84, 0x12 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vminu.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vminu.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x12 +; .byte 0x57, 0x84, 0x84, 0x12 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vminu.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vminu.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x87, 0x95, 0x12 +; .byte 0x57, 0x84, 0x84, 0x12 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -184,10 +184,10 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,5 -; vminu.vx v13,v9,a5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,5 +; vminu.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -202,10 +202,10 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, 5 -; .byte 0xd7, 0xc6, 0x97, 0x12 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, 5 +; .byte 0x57, 0xc4, 0x85, 0x12 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -225,10 +225,10 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,-16 -; vminu.vx v13,v9,a5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,-16 +; vminu.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -243,12 +243,12 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, -0x10 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, -0x10 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x97, 0x12 +; .byte 0x57, 0xc4, 0x85, 0x12 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -268,10 +268,10 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,15 -; vminu.vx v13,v9,a5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,15 +; vminu.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -286,12 +286,12 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, 0xf +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, 0xf ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x97, 0x12 +; .byte 0x57, 0xc4, 0x85, 0x12 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -311,10 +311,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,-5 -; vminu.vx v13,v9,a5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,-5 +; vminu.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -329,12 +329,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, -5 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, -5 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x97, 0x12 +; .byte 0x57, 0xc4, 0x85, 0x12 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -353,9 +353,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vminu.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vminu.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -370,9 +370,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x12 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x12 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -391,9 +391,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vminu.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vminu.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -408,11 +408,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x12 +; .byte 0x57, 0xc4, 0x85, 0x12 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -431,9 +431,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vminu.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vminu.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -448,11 +448,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x12 +; .byte 0x57, 0xc4, 0x85, 0x12 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -471,9 +471,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vminu.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vminu.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -488,11 +488,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x12 +; .byte 0x57, 0xc4, 0x85, 0x12 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-umulhi.clif b/cranelift/filetests/filetests/isa/riscv64/simd-umulhi.clif index 32638c067ecf..c20cba93111e 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-umulhi.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-umulhi.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulhu.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulhu.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0xa7, 0x95, 0x92 -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xa4, 0x84, 0x92 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulhu.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulhu.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xa7, 0x95, 0x92 +; .byte 0x57, 0xa4, 0x84, 0x92 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulhu.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulhu.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xa7, 0x95, 0x92 +; .byte 0x57, 0xa4, 0x84, 0x92 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulhu.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulhu.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xa7, 0x95, 0x92 +; .byte 0x57, 0xa4, 0x84, 0x92 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -183,9 +183,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulhu.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulhu.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -200,9 +200,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xe6, 0x95, 0x92 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xe4, 0x85, 0x92 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -221,9 +221,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulhu.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulhu.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -238,11 +238,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xe6, 0x95, 0x92 +; .byte 0x57, 0xe4, 0x85, 0x92 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -261,9 +261,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulhu.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulhu.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -278,11 +278,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xe6, 0x95, 0x92 +; .byte 0x57, 0xe4, 0x85, 0x92 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -301,9 +301,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmulhu.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmulhu.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -318,11 +318,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xe6, 0x95, 0x92 +; .byte 0x57, 0xe4, 0x85, 0x92 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-unarrow.clif b/cranelift/filetests/filetests/isa/riscv64/simd-unarrow.clif index 41fa2fdb72d8..03d90ec3a093 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-unarrow.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-unarrow.clif @@ -16,10 +16,10 @@ block0(v0: i16x8, v1: i16x8): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmax.vx v14,v9,zero #avl=8, #vtype=(e16, m1, ta, ma) -; vmax.vx v8,v11,zero #avl=8, #vtype=(e16, m1, ta, ma) -; vnclipu.wi v12,v14,0 #avl=8, #vtype=(e8, mf2, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmax.vx v9,v9,zero #avl=8, #vtype=(e16, m1, ta, ma) +; vmax.vx v8,v8,zero #avl=8, #vtype=(e16, m1, ta, ma) +; vnclipu.wi v12,v9,0 #avl=8, #vtype=(e8, mf2, ta, ma) ; vnclipu.wi v13,v8,0 #avl=8, #vtype=(e8, mf2, ta, ma) ; vslideup.vi v12,v13,8 #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -39,12 +39,12 @@ block0(v0: i16x8, v1: i16x8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x47, 0x90, 0x1e -; .byte 0x57, 0x44, 0xb0, 0x1e +; .byte 0xd7, 0x44, 0x90, 0x1e +; .byte 0x57, 0x44, 0x80, 0x1e ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0x36, 0xe0, 0xba +; .byte 0x57, 0x36, 0x90, 0xba ; .byte 0xd7, 0x36, 0x80, 0xba ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x57, 0x36, 0xd4, 0x3a @@ -67,10 +67,10 @@ block0(v0: i32x4, v1: i32x4): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmax.vx v14,v9,zero #avl=4, #vtype=(e32, m1, ta, ma) -; vmax.vx v8,v11,zero #avl=4, #vtype=(e32, m1, ta, ma) -; vnclipu.wi v12,v14,0 #avl=4, #vtype=(e16, mf2, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmax.vx v9,v9,zero #avl=4, #vtype=(e32, m1, ta, ma) +; vmax.vx v8,v8,zero #avl=4, #vtype=(e32, m1, ta, ma) +; vnclipu.wi v12,v9,0 #avl=4, #vtype=(e16, mf2, ta, ma) ; vnclipu.wi v13,v8,0 #avl=4, #vtype=(e16, mf2, ta, ma) ; vslideup.vi v12,v13,4 #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -90,12 +90,12 @@ block0(v0: i32x4, v1: i32x4): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x47, 0x90, 0x1e -; .byte 0x57, 0x44, 0xb0, 0x1e +; .byte 0xd7, 0x44, 0x90, 0x1e +; .byte 0x57, 0x44, 0x80, 0x1e ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0x36, 0xe0, 0xba +; .byte 0x57, 0x36, 0x90, 0xba ; .byte 0xd7, 0x36, 0x80, 0xba ; .byte 0x57, 0x70, 0x84, 0xcc ; .byte 0x57, 0x36, 0xd2, 0x3a @@ -119,10 +119,10 @@ block0(v0: i64x2, v1: i64x2): ; mv fp,sp ; block0: ; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmax.vx v14,v9,zero #avl=2, #vtype=(e64, m1, ta, ma) -; vmax.vx v8,v11,zero #avl=2, #vtype=(e64, m1, ta, ma) -; vnclipu.wi v12,v14,0 #avl=2, #vtype=(e32, mf2, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmax.vx v9,v9,zero #avl=2, #vtype=(e64, m1, ta, ma) +; vmax.vx v8,v8,zero #avl=2, #vtype=(e64, m1, ta, ma) +; vnclipu.wi v12,v9,0 #avl=2, #vtype=(e32, mf2, ta, ma) ; vnclipu.wi v13,v8,0 #avl=2, #vtype=(e32, mf2, ta, ma) ; vslideup.vi v12,v13,2 #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -142,12 +142,12 @@ block0(v0: i64x2, v1: i64x2): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x47, 0x90, 0x1e -; .byte 0x57, 0x44, 0xb0, 0x1e +; .byte 0xd7, 0x44, 0x90, 0x1e +; .byte 0x57, 0x44, 0x80, 0x1e ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0x36, 0xe0, 0xba +; .byte 0x57, 0x36, 0x90, 0xba ; .byte 0xd7, 0x36, 0x80, 0xba ; .byte 0x57, 0x70, 0x02, 0xcd ; .byte 0x57, 0x36, 0xd1, 0x3a diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-ushr-const.clif b/cranelift/filetests/filetests/isa/riscv64/simd-ushr-const.clif index a22ce2d4a53e..8029090d7553 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-ushr-const.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-ushr-const.clif @@ -17,9 +17,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,9 +34,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0xa2 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0xa2 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -55,9 +55,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -72,9 +72,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0xa2 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0xa2 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -93,9 +93,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -110,9 +110,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0xa2 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0xa2 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -131,9 +131,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -148,9 +148,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0xa2 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0xa2 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -170,9 +170,9 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -187,9 +187,9 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb6, 0x92, 0xa2 -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xb4, 0x82, 0xa2 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -208,9 +208,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -225,11 +225,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -248,9 +248,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -265,11 +265,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -288,9 +288,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -305,11 +305,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -328,9 +328,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -345,11 +345,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -369,9 +369,9 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -386,11 +386,11 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -409,9 +409,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -426,11 +426,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -449,9 +449,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -466,11 +466,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -489,9 +489,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -506,11 +506,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -529,9 +529,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -546,11 +546,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -570,9 +570,9 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -587,11 +587,11 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -610,9 +610,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -627,11 +627,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -650,9 +650,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -667,11 +667,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -690,9 +690,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -707,11 +707,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -730,9 +730,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -747,11 +747,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -771,9 +771,9 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vi v12,v9,5 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vi v8,v8,5 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -788,11 +788,11 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb6, 0x92, 0xa2 +; .byte 0x57, 0xb4, 0x82, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-ushr.clif b/cranelift/filetests/filetests/isa/riscv64/simd-ushr.clif index aab93f4e0797..456e7571af60 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-ushr.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-ushr.clif @@ -15,9 +15,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -32,9 +32,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0xa2 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0xa2 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -52,9 +52,9 @@ block0(v0: i8x16, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -69,9 +69,9 @@ block0(v0: i8x16, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0xa2 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0xa2 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -89,9 +89,9 @@ block0(v0: i8x16, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -106,9 +106,9 @@ block0(v0: i8x16, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0xa2 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0xa2 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -126,9 +126,9 @@ block0(v0: i8x16, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -143,9 +143,9 @@ block0(v0: i8x16, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0xa2 -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0xa2 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -163,9 +163,9 @@ block0(v0: i8x16, v1: i128): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v14,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -180,9 +180,9 @@ block0(v0: i8x16, v1: i128): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xc7, 0x95, 0xa2 -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0xa2 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -200,9 +200,9 @@ block0(v0: i16x8, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -217,11 +217,11 @@ block0(v0: i16x8, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -239,9 +239,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -256,11 +256,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -278,9 +278,9 @@ block0(v0: i16x8, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -295,11 +295,11 @@ block0(v0: i16x8, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -317,9 +317,9 @@ block0(v0: i16x8, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -334,11 +334,11 @@ block0(v0: i16x8, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -356,9 +356,9 @@ block0(v0: i16x8, v1: i128): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v14,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -373,11 +373,11 @@ block0(v0: i16x8, v1: i128): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xc7, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -395,9 +395,9 @@ block0(v0: i32x4, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -412,11 +412,11 @@ block0(v0: i32x4, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -434,9 +434,9 @@ block0(v0: i32x4, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -451,11 +451,11 @@ block0(v0: i32x4, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -473,9 +473,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -490,11 +490,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -512,9 +512,9 @@ block0(v0: i32x4, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -529,11 +529,11 @@ block0(v0: i32x4, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -551,9 +551,9 @@ block0(v0: i32x4, v1: i128): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v14,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -568,11 +568,11 @@ block0(v0: i32x4, v1: i128): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xc7, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -590,9 +590,9 @@ block0(v0: i64x2, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -607,11 +607,11 @@ block0(v0: i64x2, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -629,9 +629,9 @@ block0(v0: i64x2, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -646,11 +646,11 @@ block0(v0: i64x2, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -668,9 +668,9 @@ block0(v0: i64x2, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -685,11 +685,11 @@ block0(v0: i64x2, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -707,9 +707,9 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v13,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -724,11 +724,11 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -746,9 +746,9 @@ block0(v0: i64x2, v1: i128): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vsrl.vx v14,v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vsrl.vx v8,v8,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -763,11 +763,11 @@ block0(v0: i64x2, v1: i128): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc7, 0x95, 0xa2 +; .byte 0x57, 0xc4, 0x85, 0xa2 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-usubsat.clif b/cranelift/filetests/filetests/isa/riscv64/simd-usubsat.clif index 942f50f6669a..8b3fba1e2ddc 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-usubsat.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-usubsat.clif @@ -16,10 +16,10 @@ block0(v0: i8x16, v1: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vssubu.vv v14,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vssubu.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,11 +34,11 @@ block0(v0: i8x16, v1: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x87, 0x95, 0x8a -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x8a +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,10 +56,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vssubu.vv v14,v9,v11 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vssubu.vv v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,13 +74,13 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x87, 0x95, 0x8a +; .byte 0x57, 0x84, 0x84, 0x8a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -98,10 +98,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vssubu.vv v14,v9,v11 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vssubu.vv v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -116,13 +116,13 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x87, 0x95, 0x8a +; .byte 0x57, 0x84, 0x84, 0x8a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -140,10 +140,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vssubu.vv v14,v9,v11 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vssubu.vv v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -158,13 +158,13 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x87, 0x95, 0x8a +; .byte 0x57, 0x84, 0x84, 0x8a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -184,10 +184,10 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,5 -; vssubu.vx v13,v9,a5 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,5 +; vssubu.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -202,10 +202,10 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, 5 -; .byte 0xd7, 0xc6, 0x97, 0x8a -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, 5 +; .byte 0x57, 0xc4, 0x85, 0x8a +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -225,10 +225,10 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,-16 -; vssubu.vx v13,v9,a5 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,-16 +; vssubu.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -243,12 +243,12 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, -0x10 +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, -0x10 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x97, 0x8a +; .byte 0x57, 0xc4, 0x85, 0x8a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -268,10 +268,10 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; li a5,15 -; vssubu.vx v13,v9,a5 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; li a1,15 +; vssubu.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -286,12 +286,12 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; addi a5, zero, 0xf +; .byte 0x07, 0x84, 0x0f, 0x02 +; addi a1, zero, 0xf ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x97, 0x8a +; .byte 0x57, 0xc4, 0x85, 0x8a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -311,10 +311,10 @@ block0(v0: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.i v13,-5 #avl=2, #vtype=(e64, m1, ta, ma) -; vssubu.vv v13,v13,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.i v9,-5 #avl=2, #vtype=(e64, m1, ta, ma) +; vssubu.vv v8,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -329,12 +329,12 @@ block0(v0: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0xd7, 0xb6, 0x0d, 0x5e -; .byte 0xd7, 0x86, 0xd4, 0x8a +; .byte 0xd7, 0xb4, 0x0d, 0x5e +; .byte 0x57, 0x04, 0x94, 0x8a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -353,9 +353,9 @@ block0(v0: i8x16, v1: i8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vssubu.vx v13,v9,a1 #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vssubu.vx v8,v8,a1 #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -370,9 +370,9 @@ block0(v0: i8x16, v1: i8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0xd7, 0xc6, 0x95, 0x8a -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x57, 0xc4, 0x85, 0x8a +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -391,9 +391,9 @@ block0(v0: i16x8, v1: i16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vssubu.vx v13,v9,a1 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vssubu.vx v8,v8,a1 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -408,11 +408,11 @@ block0(v0: i16x8, v1: i16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0xc6, 0x95, 0x8a +; .byte 0x57, 0xc4, 0x85, 0x8a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -431,9 +431,9 @@ block0(v0: i32x4, v1: i32): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vssubu.vx v13,v9,a1 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v13,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vssubu.vx v8,v8,a1 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -448,11 +448,11 @@ block0(v0: i32x4, v1: i32): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0xd7, 0xc6, 0x95, 0x8a +; .byte 0x57, 0xc4, 0x85, 0x8a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xa7, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -471,10 +471,10 @@ block0(v0: i64x2, v1: i64): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.x v14,a1 #avl=2, #vtype=(e64, m1, ta, ma) -; vssubu.vv v14,v14,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.x v9,a1 #avl=2, #vtype=(e64, m1, ta, ma) +; vssubu.vv v8,v9,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -489,12 +489,12 @@ block0(v0: i64x2, v1: i64): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xc7, 0x05, 0x5e -; .byte 0x57, 0x87, 0xe4, 0x8a +; .byte 0xd7, 0xc4, 0x05, 0x5e +; .byte 0x57, 0x04, 0x94, 0x8a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-uunarrow.clif b/cranelift/filetests/filetests/isa/riscv64/simd-uunarrow.clif index a31ddeeb2aa0..0ffb542b905b 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-uunarrow.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-uunarrow.clif @@ -15,10 +15,10 @@ block0(v0: i16x8, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vnclipu.wi v8,v9,0 #avl=8, #vtype=(e8, mf2, ta, ma) -; vnclipu.wi v9,v11,0 #avl=8, #vtype=(e8, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vnclipu.wi v8,v8,0 #avl=8, #vtype=(e8, mf2, ta, ma) +; vnclipu.wi v9,v9,0 #avl=8, #vtype=(e8, mf2, ta, ma) ; vslideup.vi v8,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) @@ -35,12 +35,12 @@ block0(v0: i16x8, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x74, 0xcc -; .byte 0x57, 0x34, 0x90, 0xba -; .byte 0xd7, 0x34, 0xb0, 0xba +; .byte 0x57, 0x34, 0x80, 0xba +; .byte 0xd7, 0x34, 0x90, 0xba ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x57, 0x34, 0x94, 0x3a ; .byte 0x27, 0x04, 0x05, 0x02 @@ -61,10 +61,10 @@ block0(v0: i32x4, v1: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vnclipu.wi v8,v9,0 #avl=4, #vtype=(e16, mf2, ta, ma) -; vnclipu.wi v9,v11,0 #avl=4, #vtype=(e16, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vnclipu.wi v8,v8,0 #avl=4, #vtype=(e16, mf2, ta, ma) +; vnclipu.wi v9,v9,0 #avl=4, #vtype=(e16, mf2, ta, ma) ; vslideup.vi v8,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) @@ -81,12 +81,12 @@ block0(v0: i32x4, v1: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0xf2, 0xcc -; .byte 0x57, 0x34, 0x90, 0xba -; .byte 0xd7, 0x34, 0xb0, 0xba +; .byte 0x57, 0x34, 0x80, 0xba +; .byte 0xd7, 0x34, 0x90, 0xba ; .byte 0x57, 0x70, 0x84, 0xcc ; .byte 0x57, 0x34, 0x92, 0x3a ; .byte 0x57, 0x70, 0x08, 0xcc @@ -108,10 +108,10 @@ block0(v0: i64x2, v1: i64x2): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vnclipu.wi v8,v9,0 #avl=2, #vtype=(e32, mf2, ta, ma) -; vnclipu.wi v9,v11,0 #avl=2, #vtype=(e32, mf2, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vnclipu.wi v8,v8,0 #avl=2, #vtype=(e32, mf2, ta, ma) +; vnclipu.wi v9,v9,0 #avl=2, #vtype=(e32, mf2, ta, ma) ; vslideup.vi v8,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) @@ -128,12 +128,12 @@ block0(v0: i64x2, v1: i64x2): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x71, 0xcd -; .byte 0x57, 0x34, 0x90, 0xba -; .byte 0xd7, 0x34, 0xb0, 0xba +; .byte 0x57, 0x34, 0x80, 0xba +; .byte 0xd7, 0x34, 0x90, 0xba ; .byte 0x57, 0x70, 0x02, 0xcd ; .byte 0x57, 0x34, 0x91, 0x3a ; .byte 0x57, 0x70, 0x08, 0xcc diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-uwiden_high.clif b/cranelift/filetests/filetests/isa/riscv64/simd-uwiden_high.clif index 20abbbab5065..80051f9cddee 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-uwiden_high.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-uwiden_high.clif @@ -16,10 +16,10 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v12,v9,8 #avl=16, #vtype=(e8, m1, ta, ma) -; vzext.vf2 v14,v12 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,8 #avl=16, #vtype=(e8, m1, ta, ma) +; vzext.vf2 v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,12 +34,12 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0x36, 0x94, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0xd7, 0x34, 0x84, 0x3e ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x27, 0xc3, 0x4a +; .byte 0x57, 0x24, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -57,10 +57,10 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v12,v9,4 #avl=8, #vtype=(e16, m1, ta, ma) -; vzext.vf2 v14,v12 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,4 #avl=8, #vtype=(e16, m1, ta, ma) +; vzext.vf2 v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -75,13 +75,13 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x36, 0x92, 0x3e +; .byte 0xd7, 0x34, 0x82, 0x3e ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x27, 0xc3, 0x4a +; .byte 0x57, 0x24, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -99,10 +99,10 @@ block0(v0: i32x4): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v12,v9,2 #avl=4, #vtype=(e32, m1, ta, ma) -; vzext.vf2 v14,v12 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,2 #avl=4, #vtype=(e32, m1, ta, ma) +; vzext.vf2 v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -117,13 +117,13 @@ block0(v0: i32x4): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x36, 0x91, 0x3e +; .byte 0xd7, 0x34, 0x81, 0x3e ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x27, 0xc3, 0x4a +; .byte 0x57, 0x24, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -142,10 +142,10 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v12,v9,12 #avl=16, #vtype=(e8, m1, ta, ma) -; vzext.vf4 v14,v12 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,12 #avl=16, #vtype=(e8, m1, ta, ma) +; vzext.vf4 v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -160,12 +160,12 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0x36, 0x96, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0xd7, 0x34, 0x86, 0x3e ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x27, 0xc2, 0x4a +; .byte 0x57, 0x24, 0x92, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -184,10 +184,10 @@ block0(v0: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v12,v9,6 #avl=8, #vtype=(e16, m1, ta, ma) -; vzext.vf4 v14,v12 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,6 #avl=8, #vtype=(e16, m1, ta, ma) +; vzext.vf4 v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -202,13 +202,13 @@ block0(v0: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x36, 0x93, 0x3e +; .byte 0xd7, 0x34, 0x83, 0x3e ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x27, 0xc2, 0x4a +; .byte 0x57, 0x24, 0x92, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -228,10 +228,10 @@ block0(v0: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vslidedown.vi v12,v9,14 #avl=16, #vtype=(e8, m1, ta, ma) -; vzext.vf8 v14,v12 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v14,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vslidedown.vi v9,v8,14 #avl=16, #vtype=(e8, m1, ta, ma) +; vzext.vf8 v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -246,12 +246,12 @@ block0(v0: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 -; .byte 0x57, 0x36, 0x97, 0x3e +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0xd7, 0x34, 0x87, 0x3e ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x27, 0xc1, 0x4a +; .byte 0x57, 0x24, 0x91, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x07, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-uwiden_low.clif b/cranelift/filetests/filetests/isa/riscv64/simd-uwiden_low.clif index 26ad1154cab7..d029ef688d84 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-uwiden_low.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-uwiden_low.clif @@ -17,8 +17,8 @@ block0(v0: i8x16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vzext.vf2 v12,v9 #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vzext.vf2 v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -35,9 +35,9 @@ block0(v0: i8x16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x26, 0x93, 0x4a +; .byte 0x57, 0x24, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,8 +56,8 @@ block0(v0: i16x8): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vzext.vf2 v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vzext.vf2 v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,9 +74,9 @@ block0(v0: i16x8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x26, 0x93, 0x4a +; .byte 0x57, 0x24, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -95,8 +95,8 @@ block0(v0: i32x4): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vzext.vf2 v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vzext.vf2 v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -113,9 +113,9 @@ block0(v0: i32x4): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x26, 0x93, 0x4a +; .byte 0x57, 0x24, 0x93, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -135,8 +135,8 @@ block0(v0: i8x16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vzext.vf4 v12,v9 #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vzext.vf4 v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -153,9 +153,9 @@ block0(v0: i8x16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x26, 0x92, 0x4a +; .byte 0x57, 0x24, 0x92, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -175,8 +175,8 @@ block0(v0: i16x8): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vzext.vf4 v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vzext.vf4 v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -193,9 +193,9 @@ block0(v0: i16x8): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x26, 0x92, 0x4a +; .byte 0x57, 0x24, 0x92, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -216,8 +216,8 @@ block0(v0: i8x16): ; mv fp,sp ; block0: ; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vzext.vf8 v12,v9 #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v12,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vzext.vf8 v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -234,9 +234,9 @@ block0(v0: i8x16): ; addi t6, sp, 0x10 ; .byte 0x87, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x26, 0x91, 0x4a +; .byte 0x57, 0x24, 0x91, 0x4a ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x06, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-valltrue.clif b/cranelift/filetests/filetests/isa/riscv64/simd-valltrue.clif index 9df58a9000d0..b20428b6221b 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-valltrue.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-valltrue.clif @@ -16,9 +16,9 @@ block0(v0: i8x16): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.i v10,1 #avl=16, #vtype=(e8, m1, ta, ma) -; vredminu.vs v12,v8,v10 #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.x.s a0,v12 #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.v.i v9,1 #avl=16, #vtype=(e8, m1, ta, ma) +; vredminu.vs v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.x.s a0,v8 #avl=16, #vtype=(e8, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -34,9 +34,9 @@ block0(v0: i8x16): ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 -; .byte 0x57, 0xb5, 0x00, 0x5e -; .byte 0x57, 0x26, 0x85, 0x12 -; .byte 0x57, 0x25, 0xc0, 0x42 +; .byte 0xd7, 0xb4, 0x00, 0x5e +; .byte 0x57, 0xa4, 0x84, 0x12 +; .byte 0x57, 0x25, 0x80, 0x42 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -55,9 +55,9 @@ block0(v0: i16x8): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.i v10,1 #avl=8, #vtype=(e16, m1, ta, ma) -; vredminu.vs v12,v8,v10 #avl=8, #vtype=(e16, m1, ta, ma) -; vmv.x.s a0,v12 #avl=8, #vtype=(e16, m1, ta, ma) +; vmv.v.i v9,1 #avl=8, #vtype=(e16, m1, ta, ma) +; vredminu.vs v8,v8,v9 #avl=8, #vtype=(e16, m1, ta, ma) +; vmv.x.s a0,v8 #avl=8, #vtype=(e16, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -74,9 +74,9 @@ block0(v0: i16x8): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0xb5, 0x00, 0x5e -; .byte 0x57, 0x26, 0x85, 0x12 -; .byte 0x57, 0x25, 0xc0, 0x42 +; .byte 0xd7, 0xb4, 0x00, 0x5e +; .byte 0x57, 0xa4, 0x84, 0x12 +; .byte 0x57, 0x25, 0x80, 0x42 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -95,9 +95,9 @@ block0(v0: i32x4): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.i v10,1 #avl=4, #vtype=(e32, m1, ta, ma) -; vredminu.vs v12,v8,v10 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.x.s a0,v12 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.v.i v9,1 #avl=4, #vtype=(e32, m1, ta, ma) +; vredminu.vs v8,v8,v9 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.x.s a0,v8 #avl=4, #vtype=(e32, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -114,9 +114,9 @@ block0(v0: i32x4): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0xb5, 0x00, 0x5e -; .byte 0x57, 0x26, 0x85, 0x12 -; .byte 0x57, 0x25, 0xc0, 0x42 +; .byte 0xd7, 0xb4, 0x00, 0x5e +; .byte 0x57, 0xa4, 0x84, 0x12 +; .byte 0x57, 0x25, 0x80, 0x42 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -135,9 +135,9 @@ block0(v0: i64x2): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.v.i v10,1 #avl=2, #vtype=(e64, m1, ta, ma) -; vredminu.vs v12,v8,v10 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.x.s a0,v12 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.v.i v9,1 #avl=2, #vtype=(e64, m1, ta, ma) +; vredminu.vs v8,v8,v9 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.x.s a0,v8 #avl=2, #vtype=(e64, m1, ta, ma) ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -154,9 +154,9 @@ block0(v0: i64x2): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0xb5, 0x00, 0x5e -; .byte 0x57, 0x26, 0x85, 0x12 -; .byte 0x57, 0x25, 0xc0, 0x42 +; .byte 0xd7, 0xb4, 0x00, 0x5e +; .byte 0x57, 0xa4, 0x84, 0x12 +; .byte 0x57, 0x25, 0x80, 0x42 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-vanytrue.clif b/cranelift/filetests/filetests/isa/riscv64/simd-vanytrue.clif index bfba6e3daee1..f717c46c4874 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-vanytrue.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-vanytrue.clif @@ -17,9 +17,9 @@ block0(v0: i8x16): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vredmaxu.vs v10,v8,v8 #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.x.s a4,v10 #avl=16, #vtype=(e8, m1, ta, ma) -; sltu a0,zero,a4 +; vredmaxu.vs v8,v8,v8 #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.x.s a0,v8 #avl=16, #vtype=(e8, m1, ta, ma) +; sltu a0,zero,a0 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -35,9 +35,9 @@ block0(v0: i8x16): ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 -; .byte 0x57, 0x25, 0x84, 0x1a -; .byte 0x57, 0x27, 0xa0, 0x42 -; snez a0, a4 +; .byte 0x57, 0x24, 0x84, 0x1a +; .byte 0x57, 0x25, 0x80, 0x42 +; snez a0, a0 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -56,9 +56,9 @@ block0(v0: i16x8): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vredmaxu.vs v10,v8,v8 #avl=8, #vtype=(e16, m1, ta, ma) -; vmv.x.s a4,v10 #avl=8, #vtype=(e16, m1, ta, ma) -; sltu a0,zero,a4 +; vredmaxu.vs v8,v8,v8 #avl=8, #vtype=(e16, m1, ta, ma) +; vmv.x.s a0,v8 #avl=8, #vtype=(e16, m1, ta, ma) +; sltu a0,zero,a0 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -75,9 +75,9 @@ block0(v0: i16x8): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x25, 0x84, 0x1a -; .byte 0x57, 0x27, 0xa0, 0x42 -; snez a0, a4 +; .byte 0x57, 0x24, 0x84, 0x1a +; .byte 0x57, 0x25, 0x80, 0x42 +; snez a0, a0 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -96,9 +96,9 @@ block0(v0: i32x4): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vredmaxu.vs v10,v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.x.s a4,v10 #avl=4, #vtype=(e32, m1, ta, ma) -; sltu a0,zero,a4 +; vredmaxu.vs v8,v8,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.x.s a0,v8 #avl=4, #vtype=(e32, m1, ta, ma) +; sltu a0,zero,a0 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -115,9 +115,9 @@ block0(v0: i32x4): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x25, 0x84, 0x1a -; .byte 0x57, 0x27, 0xa0, 0x42 -; snez a0, a4 +; .byte 0x57, 0x24, 0x84, 0x1a +; .byte 0x57, 0x25, 0x80, 0x42 +; snez a0, a0 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -136,9 +136,9 @@ block0(v0: i64x2): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vredmaxu.vs v10,v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.x.s a4,v10 #avl=2, #vtype=(e64, m1, ta, ma) -; sltu a0,zero,a4 +; vredmaxu.vs v8,v8,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.x.s a0,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; sltu a0,zero,a0 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -155,9 +155,9 @@ block0(v0: i64x2): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x25, 0x84, 0x1a -; .byte 0x57, 0x27, 0xa0, 0x42 -; snez a0, a4 +; .byte 0x57, 0x24, 0x84, 0x1a +; .byte 0x57, 0x25, 0x80, 0x42 +; snez a0, a0 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-vconst-64bit.clif b/cranelift/filetests/filetests/isa/riscv64/simd-vconst-64bit.clif index f74a863c0da0..2665b688e9c6 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-vconst-64bit.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-vconst-64bit.clif @@ -11,8 +11,8 @@ block0: ; VCode: ; block0: -; vle8.v v10,[const(0)] #avl=8, #vtype=(e8, m1, ta, ma) -; vse8.v v10,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) +; vle8.v v8,[const(0)] #avl=8, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: @@ -20,8 +20,8 @@ block0: ; .byte 0x57, 0x70, 0x04, 0xcc ; auipc t6, 0 ; addi t6, t6, 0x14 -; .byte 0x07, 0x85, 0x0f, 0x02 -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret ; .byte 0x00, 0x00, 0x00, 0x00 ; .byte 0x00, 0x00, 0x00, 0x00 @@ -34,8 +34,8 @@ block0: ; VCode: ; block0: -; vle8.v v10,[const(0)] #avl=8, #vtype=(e8, m1, ta, ma) -; vse8.v v10,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) +; vle8.v v8,[const(0)] #avl=8, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: @@ -43,8 +43,8 @@ block0: ; .byte 0x57, 0x70, 0x04, 0xcc ; auipc t6, 0 ; addi t6, t6, 0x14 -; .byte 0x07, 0x85, 0x0f, 0x02 -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret ; .byte 0xff, 0xff, 0xff, 0xff ; .byte 0xff, 0xff, 0xff, 0xff @@ -57,8 +57,8 @@ block0: ; VCode: ; block0: -; vle8.v v10,[const(0)] #avl=8, #vtype=(e8, m1, ta, ma) -; vse8.v v10,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) +; vle8.v v8,[const(0)] #avl=8, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: @@ -66,8 +66,8 @@ block0: ; .byte 0x57, 0x70, 0x04, 0xcc ; auipc t6, 0 ; addi t6, t6, 0x14 -; .byte 0x07, 0x85, 0x0f, 0x02 -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret ; .byte 0x00, 0x1f, 0x3f, 0x5f ; .byte 0x7f, 0x9f, 0xbf, 0xff @@ -80,8 +80,8 @@ block0: ; VCode: ; block0: -; vle16.v v10,[const(0)] #avl=4, #vtype=(e16, m1, ta, ma) -; vse8.v v10,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) +; vle16.v v8,[const(0)] #avl=4, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: @@ -89,9 +89,9 @@ block0: ; .byte 0x57, 0x70, 0x82, 0xcc ; auipc t6, 0 ; addi t6, t6, 0x1c -; .byte 0x07, 0xd5, 0x0f, 0x02 +; .byte 0x07, 0xd4, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x04, 0xcc -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret ; .byte 0x00, 0x00, 0x00, 0x00 ; .byte 0x00, 0x00, 0xff, 0x00 @@ -105,8 +105,8 @@ block0: ; VCode: ; block0: -; vle32.v v10,[const(0)] #avl=2, #vtype=(e32, m1, ta, ma) -; vse8.v v10,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) +; vle32.v v8,[const(0)] #avl=2, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=8, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: @@ -114,9 +114,9 @@ block0: ; .byte 0x57, 0x70, 0x01, 0xcd ; auipc t6, 0 ; addi t6, t6, 0x1c -; .byte 0x07, 0xe5, 0x0f, 0x02 +; .byte 0x07, 0xe4, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x04, 0xcc -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret ; .byte 0x00, 0x00, 0x00, 0x00 ; .byte 0x00, 0x00, 0x00, 0x00 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-vconst.clif b/cranelift/filetests/filetests/isa/riscv64/simd-vconst.clif index 67714c56a5dd..1674844dfa77 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-vconst.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-vconst.clif @@ -11,8 +11,8 @@ block0: ; VCode: ; block0: -; vle8.v v10,[const(0)] #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,[const(0)] #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: @@ -20,8 +20,8 @@ block0: ; .byte 0x57, 0x70, 0x08, 0xcc ; auipc t6, 0 ; addi t6, t6, 0x1c -; .byte 0x07, 0x85, 0x0f, 0x02 -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret ; .byte 0x00, 0x00, 0x00, 0x00 ; .byte 0x00, 0x00, 0x00, 0x00 @@ -38,8 +38,8 @@ block0: ; VCode: ; block0: -; vle8.v v10,[const(0)] #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,[const(0)] #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: @@ -47,8 +47,8 @@ block0: ; .byte 0x57, 0x70, 0x08, 0xcc ; auipc t6, 0 ; addi t6, t6, 0x1c -; .byte 0x07, 0x85, 0x0f, 0x02 -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret ; .byte 0x00, 0x00, 0x00, 0x00 ; .byte 0x00, 0x00, 0x00, 0x00 @@ -65,8 +65,8 @@ block0: ; VCode: ; block0: -; vle8.v v10,[const(0)] #avl=16, #vtype=(e8, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,[const(0)] #avl=16, #vtype=(e8, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: @@ -74,8 +74,8 @@ block0: ; .byte 0x57, 0x70, 0x08, 0xcc ; auipc t6, 0 ; addi t6, t6, 0x1c -; .byte 0x07, 0x85, 0x0f, 0x02 -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret ; .byte 0x00, 0x00, 0x00, 0x00 ; .byte 0x00, 0x00, 0x00, 0x00 @@ -92,8 +92,8 @@ block0: ; VCode: ; block0: -; vle16.v v10,[const(0)] #avl=8, #vtype=(e16, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle16.v v8,[const(0)] #avl=8, #vtype=(e16, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: @@ -101,9 +101,9 @@ block0: ; .byte 0x57, 0x70, 0x84, 0xcc ; auipc t6, 0 ; addi t6, t6, 0x1c -; .byte 0x07, 0xd5, 0x0f, 0x02 +; .byte 0x07, 0xd4, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret ; .byte 0x00, 0x00, 0x00, 0x00 ; .byte 0x00, 0x00, 0xff, 0x00 @@ -119,8 +119,8 @@ block0: ; VCode: ; block0: -; vle32.v v10,[const(0)] #avl=4, #vtype=(e32, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle32.v v8,[const(0)] #avl=4, #vtype=(e32, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: @@ -128,9 +128,9 @@ block0: ; .byte 0x57, 0x70, 0x02, 0xcd ; auipc t6, 0 ; addi t6, t6, 0x1c -; .byte 0x07, 0xe5, 0x0f, 0x02 +; .byte 0x07, 0xe4, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret ; .byte 0x00, 0x00, 0x00, 0x00 ; .byte 0x00, 0x00, 0x00, 0x00 @@ -146,8 +146,8 @@ block0: ; VCode: ; block0: -; vle64.v v10,[const(0)] #avl=2, #vtype=(e64, m1, ta, ma) -; vse8.v v10,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) +; vle64.v v8,[const(0)] #avl=2, #vtype=(e64, m1, ta, ma) +; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; ret ; ; Disassembled: @@ -155,9 +155,9 @@ block0: ; .byte 0x57, 0x70, 0x81, 0xcd ; auipc t6, 0 ; addi t6, t6, 0x1c -; .byte 0x07, 0xf5, 0x0f, 0x02 +; .byte 0x07, 0xf4, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0x27, 0x05, 0x05, 0x02 +; .byte 0x27, 0x04, 0x05, 0x02 ; ret ; .byte 0x00, 0x00, 0x00, 0x00 ; .byte 0x00, 0x00, 0x00, 0x00 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-vhighbits.clif b/cranelift/filetests/filetests/isa/riscv64/simd-vhighbits.clif index c0d70e706570..5427dda05a28 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-vhighbits.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-vhighbits.clif @@ -17,11 +17,11 @@ block0(v0: i8x16): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmslt.vx v10,v8,zero #avl=16, #vtype=(e8, m1, ta, ma) -; vmv.x.s a4,v10 #avl=2, #vtype=(e64, m1, ta, ma) -; lui a0,16 -; addi a2,a0,-1 -; and a0,a4,a2 +; vmslt.vx v8,v8,zero #avl=16, #vtype=(e8, m1, ta, ma) +; vmv.x.s a0,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; lui a1,16 +; addi a1,a1,-1 +; and a0,a0,a1 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -37,12 +37,12 @@ block0(v0: i8x16): ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 -; .byte 0x57, 0x45, 0x80, 0x6e +; .byte 0x57, 0x44, 0x80, 0x6e ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x27, 0xa0, 0x42 -; lui a0, 0x10 -; addi a2, a0, -1 -; and a0, a4, a2 +; .byte 0x57, 0x25, 0x80, 0x42 +; lui a1, 0x10 +; addi a1, a1, -1 +; and a0, a0, a1 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -61,9 +61,9 @@ block0(v0: i16x8): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmslt.vx v10,v8,zero #avl=8, #vtype=(e16, m1, ta, ma) -; vmv.x.s a4,v10 #avl=2, #vtype=(e64, m1, ta, ma) -; andi a0,a4,255 +; vmslt.vx v8,v8,zero #avl=8, #vtype=(e16, m1, ta, ma) +; vmv.x.s a0,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; andi a0,a0,255 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -80,10 +80,10 @@ block0(v0: i16x8): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0x57, 0x45, 0x80, 0x6e +; .byte 0x57, 0x44, 0x80, 0x6e ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x27, 0xa0, 0x42 -; andi a0, a4, 0xff +; .byte 0x57, 0x25, 0x80, 0x42 +; andi a0, a0, 0xff ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -102,9 +102,9 @@ block0(v0: i32x4): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmslt.vx v10,v8,zero #avl=4, #vtype=(e32, m1, ta, ma) -; vmv.x.s a4,v10 #avl=2, #vtype=(e64, m1, ta, ma) -; andi a0,a4,15 +; vmslt.vx v8,v8,zero #avl=4, #vtype=(e32, m1, ta, ma) +; vmv.x.s a0,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; andi a0,a0,15 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -121,10 +121,10 @@ block0(v0: i32x4): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x02, 0xcd -; .byte 0x57, 0x45, 0x80, 0x6e +; .byte 0x57, 0x44, 0x80, 0x6e ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x27, 0xa0, 0x42 -; andi a0, a4, 0xf +; .byte 0x57, 0x25, 0x80, 0x42 +; andi a0, a0, 0xf ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 @@ -143,9 +143,9 @@ block0(v0: i64x2): ; mv fp,sp ; block0: ; vle8.v v8,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vmslt.vx v10,v8,zero #avl=2, #vtype=(e64, m1, ta, ma) -; vmv.x.s a4,v10 #avl=2, #vtype=(e64, m1, ta, ma) -; andi a0,a4,3 +; vmslt.vx v8,v8,zero #avl=2, #vtype=(e64, m1, ta, ma) +; vmv.x.s a0,v8 #avl=2, #vtype=(e64, m1, ta, ma) +; andi a0,a0,3 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -162,9 +162,9 @@ block0(v0: i64x2): ; addi t6, sp, 0x10 ; .byte 0x07, 0x84, 0x0f, 0x02 ; .byte 0x57, 0x70, 0x81, 0xcd -; .byte 0x57, 0x45, 0x80, 0x6e -; .byte 0x57, 0x27, 0xa0, 0x42 -; andi a0, a4, 3 +; .byte 0x57, 0x44, 0x80, 0x6e +; .byte 0x57, 0x25, 0x80, 0x42 +; andi a0, a0, 3 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/simd-vstate.clif b/cranelift/filetests/filetests/isa/riscv64/simd-vstate.clif index 68748634cdd6..8ce55bfee3d2 100644 --- a/cranelift/filetests/filetests/isa/riscv64/simd-vstate.clif +++ b/cranelift/filetests/filetests/isa/riscv64/simd-vstate.clif @@ -18,10 +18,10 @@ block0(v0: i8x16, v1: i16x8): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vv v8,v9,v9 #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vv v9,v11,v11 #avl=8, #vtype=(e16, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vv v8,v8,v8 #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vv v9,v9,v9 #avl=8, #vtype=(e16, m1, ta, ma) ; vadd.vv v9,v9,v9 #avl=8, #vtype=(e16, m1, ta, ma) ; vse8.v v8,0(a0) #avl=16, #vtype=(e8, m1, ta, ma) ; vse8.v v9,16(a0) #avl=16, #vtype=(e8, m1, ta, ma) @@ -39,12 +39,12 @@ block0(v0: i8x16, v1: i16x8): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x84, 0x94, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x04, 0x84, 0x02 ; .byte 0x57, 0x70, 0x84, 0xcc -; .byte 0xd7, 0x84, 0xb5, 0x02 +; .byte 0xd7, 0x84, 0x94, 0x02 ; .byte 0xd7, 0x84, 0x94, 0x02 ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x27, 0x04, 0x05, 0x02 @@ -77,12 +77,12 @@ block2(v6: i8x16, v7: i8x16): ; sd fp,0(sp) ; mv fp,sp ; block0: -; vle8.v v9,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vle8.v v11,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) -; vadd.vv v8,v9,v11 #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v8,-32(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vle8.v v9,-16(incoming_arg) #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vv v8,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) ; j label1 ; block1: -; vadd.vv v9,v11,v8 #avl=16, #vtype=(e8, m1, ta, ma) +; vadd.vv v9,v9,v8 #avl=16, #vtype=(e8, m1, ta, ma) ; j label2 ; block2: ; vadd.vv v10,v8,v9 #avl=16, #vtype=(e8, m1, ta, ma) @@ -101,13 +101,13 @@ block2(v6: i8x16, v7: i8x16): ; block1: ; offset 0x10 ; .byte 0x57, 0x70, 0x08, 0xcc ; addi t6, sp, 0x10 -; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x07, 0x84, 0x0f, 0x02 ; addi t6, sp, 0x20 -; .byte 0x87, 0x85, 0x0f, 0x02 -; .byte 0x57, 0x84, 0x95, 0x02 +; .byte 0x87, 0x84, 0x0f, 0x02 +; .byte 0x57, 0x84, 0x84, 0x02 ; block2: ; offset 0x28 ; .byte 0x57, 0x70, 0x08, 0xcc -; .byte 0xd7, 0x04, 0xb4, 0x02 +; .byte 0xd7, 0x04, 0x94, 0x02 ; block3: ; offset 0x30 ; .byte 0x57, 0x70, 0x08, 0xcc ; .byte 0x57, 0x85, 0x84, 0x02 diff --git a/cranelift/filetests/filetests/isa/riscv64/smax-zbb.clif b/cranelift/filetests/filetests/isa/riscv64/smax-zbb.clif index c14a89689e97..9b765b0bb402 100644 --- a/cranelift/filetests/filetests/isa/riscv64/smax-zbb.clif +++ b/cranelift/filetests/filetests/isa/riscv64/smax-zbb.clif @@ -10,16 +10,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; sext.b a3,a0 -; sext.b a5,a1 -; max a0,a3,a5 +; sext.b a0,a0 +; sext.b a1,a1 +; max a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x93, 0x16, 0x45, 0x60 -; .byte 0x93, 0x97, 0x45, 0x60 -; .byte 0x33, 0xe5, 0xf6, 0x0a +; .byte 0x13, 0x15, 0x45, 0x60 +; .byte 0x93, 0x95, 0x45, 0x60 +; .byte 0x33, 0x65, 0xb5, 0x0a ; ret function %smax_i16(i16, i16) -> i16{ @@ -30,16 +30,16 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; sext.h a3,a0 -; sext.h a5,a1 -; max a0,a3,a5 +; sext.h a0,a0 +; sext.h a1,a1 +; max a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x93, 0x16, 0x55, 0x60 -; .byte 0x93, 0x97, 0x55, 0x60 -; .byte 0x33, 0xe5, 0xf6, 0x0a +; .byte 0x13, 0x15, 0x55, 0x60 +; .byte 0x93, 0x95, 0x55, 0x60 +; .byte 0x33, 0x65, 0xb5, 0x0a ; ret function %smax_i32(i32, i32) -> i32{ @@ -50,16 +50,16 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; sext.w a3,a0 -; sext.w a5,a1 -; max a0,a3,a5 +; sext.w a0,a0 +; sext.w a1,a1 +; max a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a3, a0 -; sext.w a5, a1 -; .byte 0x33, 0xe5, 0xf6, 0x0a +; sext.w a0, a0 +; sext.w a1, a1 +; .byte 0x33, 0x65, 0xb5, 0x0a ; ret function %smax_i64(i64, i64) -> i64{ @@ -85,56 +85,29 @@ block0(v0: i128, v1: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s1,8(sp) -; sd s9,0(sp) ; block0: -; slt a5,a3,a1 -; sltu s1,a2,a0 -; xor a4,a3,a1 -; mv s9,a1 -; select a5,s1,a5##condition=(a4 eq zero) -; mv a4,a0 -; select [a0,a1],[a4,s9],[a2,a3]##condition=(a5 ne zero) -; ld s1,8(sp) -; ld s9,0(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; slt a4,a3,a1 +; sltu t2,a2,a0 +; xor a5,a3,a1 +; mv a6,a1 +; select a4,t2,a4##condition=(a5 eq zero) +; mv a5,a0 +; select [a0,a1],[a5,a6],[a2,a3]##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s1, 8(sp) -; sd s9, 0(sp) -; block1: ; offset 0x1c -; slt a5, a3, a1 -; sltu s1, a2, a0 -; xor a4, a3, a1 -; mv s9, a1 -; bnez a4, 8 -; mv a5, s1 -; mv a4, a0 -; mv a0, a4 -; mv a1, s9 -; bnez a5, 0xc +; slt a4, a3, a1 +; sltu t2, a2, a0 +; xor a5, a3, a1 +; mv a6, a1 +; bnez a5, 8 +; mv a4, t2 +; mv a5, a0 +; mv a0, a5 +; mv a1, a6 +; bnez a4, 0xc ; mv a0, a2 ; mv a1, a3 -; ld s1, 8(sp) -; ld s9, 0(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/smax.clif b/cranelift/filetests/filetests/isa/riscv64/smax.clif index 72fe68009665..7729d17f4727 100644 --- a/cranelift/filetests/filetests/isa/riscv64/smax.clif +++ b/cranelift/filetests/filetests/isa/riscv64/smax.clif @@ -10,22 +10,22 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; slli a3,a0,56 -; srai a5,a3,56 -; slli a1,a1,56 -; srai a3,a1,56 -; select a0,a5,a3##condition=(a5 sgt a3) +; slli a0,a0,56 +; srai a2,a0,56 +; slli a0,a1,56 +; srai a1,a0,56 +; select a0,a2,a1##condition=(a2 sgt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x38 -; srai a5, a3, 0x38 -; slli a1, a1, 0x38 -; srai a3, a1, 0x38 -; mv a0, a5 -; blt a3, a5, 8 -; mv a0, a3 +; slli a0, a0, 0x38 +; srai a2, a0, 0x38 +; slli a0, a1, 0x38 +; srai a1, a0, 0x38 +; mv a0, a2 +; blt a1, a2, 8 +; mv a0, a1 ; ret function %smax_i16(i16, i16) -> i16{ @@ -36,22 +36,22 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; slli a3,a0,48 -; srai a5,a3,48 -; slli a1,a1,48 -; srai a3,a1,48 -; select a0,a5,a3##condition=(a5 sgt a3) +; slli a0,a0,48 +; srai a2,a0,48 +; slli a0,a1,48 +; srai a1,a0,48 +; select a0,a2,a1##condition=(a2 sgt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srai a5, a3, 0x30 -; slli a1, a1, 0x30 -; srai a3, a1, 0x30 -; mv a0, a5 -; blt a3, a5, 8 -; mv a0, a3 +; slli a0, a0, 0x30 +; srai a2, a0, 0x30 +; slli a0, a1, 0x30 +; srai a1, a0, 0x30 +; mv a0, a2 +; blt a1, a2, 8 +; mv a0, a1 ; ret function %smax_i32(i32, i32) -> i32{ @@ -62,18 +62,18 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; sext.w a3,a0 -; sext.w a5,a1 -; select a0,a3,a5##condition=(a3 sgt a5) +; sext.w a2,a0 +; sext.w a1,a1 +; select a0,a2,a1##condition=(a2 sgt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a3, a0 -; sext.w a5, a1 -; mv a0, a3 -; blt a5, a3, 8 -; mv a0, a5 +; sext.w a2, a0 +; sext.w a1, a1 +; mv a0, a2 +; blt a1, a2, 8 +; mv a0, a1 ; ret function %smax_i64(i64, i64) -> i64{ @@ -84,15 +84,15 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; mv a4,a0 -; select a0,a4,a1##condition=(a4 sgt a1) +; mv a2,a0 +; select a0,a2,a1##condition=(a2 sgt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a4, a0 -; mv a0, a4 -; blt a1, a4, 8 +; mv a2, a0 +; mv a0, a2 +; blt a1, a2, 8 ; mv a0, a1 ; ret @@ -103,56 +103,29 @@ block0(v0: i128, v1: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s1,8(sp) -; sd s9,0(sp) ; block0: -; slt a5,a3,a1 -; sltu s1,a2,a0 -; xor a4,a3,a1 -; mv s9,a1 -; select a5,s1,a5##condition=(a4 eq zero) -; mv a4,a0 -; select [a0,a1],[a4,s9],[a2,a3]##condition=(a5 ne zero) -; ld s1,8(sp) -; ld s9,0(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; slt a4,a3,a1 +; sltu t2,a2,a0 +; xor a5,a3,a1 +; mv a6,a1 +; select a4,t2,a4##condition=(a5 eq zero) +; mv a5,a0 +; select [a0,a1],[a5,a6],[a2,a3]##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s1, 8(sp) -; sd s9, 0(sp) -; block1: ; offset 0x1c -; slt a5, a3, a1 -; sltu s1, a2, a0 -; xor a4, a3, a1 -; mv s9, a1 -; bnez a4, 8 -; mv a5, s1 -; mv a4, a0 -; mv a0, a4 -; mv a1, s9 -; bnez a5, 0xc +; slt a4, a3, a1 +; sltu t2, a2, a0 +; xor a5, a3, a1 +; mv a6, a1 +; bnez a5, 8 +; mv a4, t2 +; mv a5, a0 +; mv a0, a5 +; mv a1, a6 +; bnez a4, 0xc ; mv a0, a2 ; mv a1, a3 -; ld s1, 8(sp) -; ld s9, 0(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/smin-zbb.clif b/cranelift/filetests/filetests/isa/riscv64/smin-zbb.clif index 5af7c25ff86f..1bbac68c3c5d 100644 --- a/cranelift/filetests/filetests/isa/riscv64/smin-zbb.clif +++ b/cranelift/filetests/filetests/isa/riscv64/smin-zbb.clif @@ -10,16 +10,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; sext.b a3,a0 -; sext.b a5,a1 -; min a0,a3,a5 +; sext.b a0,a0 +; sext.b a1,a1 +; min a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x93, 0x16, 0x45, 0x60 -; .byte 0x93, 0x97, 0x45, 0x60 -; .byte 0x33, 0xc5, 0xf6, 0x0a +; .byte 0x13, 0x15, 0x45, 0x60 +; .byte 0x93, 0x95, 0x45, 0x60 +; .byte 0x33, 0x45, 0xb5, 0x0a ; ret function %smin_i16(i16, i16) -> i16{ @@ -30,16 +30,16 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; sext.h a3,a0 -; sext.h a5,a1 -; min a0,a3,a5 +; sext.h a0,a0 +; sext.h a1,a1 +; min a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x93, 0x16, 0x55, 0x60 -; .byte 0x93, 0x97, 0x55, 0x60 -; .byte 0x33, 0xc5, 0xf6, 0x0a +; .byte 0x13, 0x15, 0x55, 0x60 +; .byte 0x93, 0x95, 0x55, 0x60 +; .byte 0x33, 0x45, 0xb5, 0x0a ; ret function %smin_i32(i32, i32) -> i32{ @@ -50,16 +50,16 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; sext.w a3,a0 -; sext.w a5,a1 -; min a0,a3,a5 +; sext.w a0,a0 +; sext.w a1,a1 +; min a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a3, a0 -; sext.w a5, a1 -; .byte 0x33, 0xc5, 0xf6, 0x0a +; sext.w a0, a0 +; sext.w a1, a1 +; .byte 0x33, 0x45, 0xb5, 0x0a ; ret function %smin_i64(i64, i64) -> i64{ @@ -85,56 +85,29 @@ block0(v0: i128, v1: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s1,8(sp) -; sd s9,0(sp) ; block0: -; slt a5,a1,a3 -; sltu s1,a0,a2 -; xor a4,a1,a3 -; mv s9,a1 -; select a5,s1,a5##condition=(a4 eq zero) -; mv a4,a0 -; select [a0,a1],[a4,s9],[a2,a3]##condition=(a5 ne zero) -; ld s1,8(sp) -; ld s9,0(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; slt a4,a1,a3 +; sltu t2,a0,a2 +; xor a5,a1,a3 +; mv a6,a1 +; select a4,t2,a4##condition=(a5 eq zero) +; mv a5,a0 +; select [a0,a1],[a5,a6],[a2,a3]##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s1, 8(sp) -; sd s9, 0(sp) -; block1: ; offset 0x1c -; slt a5, a1, a3 -; sltu s1, a0, a2 -; xor a4, a1, a3 -; mv s9, a1 -; bnez a4, 8 -; mv a5, s1 -; mv a4, a0 -; mv a0, a4 -; mv a1, s9 -; bnez a5, 0xc +; slt a4, a1, a3 +; sltu t2, a0, a2 +; xor a5, a1, a3 +; mv a6, a1 +; bnez a5, 8 +; mv a4, t2 +; mv a5, a0 +; mv a0, a5 +; mv a1, a6 +; bnez a4, 0xc ; mv a0, a2 ; mv a1, a3 -; ld s1, 8(sp) -; ld s9, 0(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/smin.clif b/cranelift/filetests/filetests/isa/riscv64/smin.clif index 8528fdc3a090..0c789ef94277 100644 --- a/cranelift/filetests/filetests/isa/riscv64/smin.clif +++ b/cranelift/filetests/filetests/isa/riscv64/smin.clif @@ -10,22 +10,22 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; slli a3,a0,56 -; srai a5,a3,56 -; slli a1,a1,56 -; srai a3,a1,56 -; select a0,a5,a3##condition=(a5 slt a3) +; slli a0,a0,56 +; srai a2,a0,56 +; slli a0,a1,56 +; srai a1,a0,56 +; select a0,a2,a1##condition=(a2 slt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x38 -; srai a5, a3, 0x38 -; slli a1, a1, 0x38 -; srai a3, a1, 0x38 -; mv a0, a5 -; blt a5, a3, 8 -; mv a0, a3 +; slli a0, a0, 0x38 +; srai a2, a0, 0x38 +; slli a0, a1, 0x38 +; srai a1, a0, 0x38 +; mv a0, a2 +; blt a2, a1, 8 +; mv a0, a1 ; ret function %smin_i16(i16, i16) -> i16{ @@ -36,22 +36,22 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; slli a3,a0,48 -; srai a5,a3,48 -; slli a1,a1,48 -; srai a3,a1,48 -; select a0,a5,a3##condition=(a5 slt a3) +; slli a0,a0,48 +; srai a2,a0,48 +; slli a0,a1,48 +; srai a1,a0,48 +; select a0,a2,a1##condition=(a2 slt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srai a5, a3, 0x30 -; slli a1, a1, 0x30 -; srai a3, a1, 0x30 -; mv a0, a5 -; blt a5, a3, 8 -; mv a0, a3 +; slli a0, a0, 0x30 +; srai a2, a0, 0x30 +; slli a0, a1, 0x30 +; srai a1, a0, 0x30 +; mv a0, a2 +; blt a2, a1, 8 +; mv a0, a1 ; ret function %smin_i32(i32, i32) -> i32{ @@ -62,18 +62,18 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; sext.w a3,a0 -; sext.w a5,a1 -; select a0,a3,a5##condition=(a3 slt a5) +; sext.w a2,a0 +; sext.w a1,a1 +; select a0,a2,a1##condition=(a2 slt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; sext.w a3, a0 -; sext.w a5, a1 -; mv a0, a3 -; blt a3, a5, 8 -; mv a0, a5 +; sext.w a2, a0 +; sext.w a1, a1 +; mv a0, a2 +; blt a2, a1, 8 +; mv a0, a1 ; ret function %smin_i64(i64, i64) -> i64{ @@ -84,15 +84,15 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; mv a4,a0 -; select a0,a4,a1##condition=(a4 slt a1) +; mv a2,a0 +; select a0,a2,a1##condition=(a2 slt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a4, a0 -; mv a0, a4 -; blt a4, a1, 8 +; mv a2, a0 +; mv a0, a2 +; blt a2, a1, 8 ; mv a0, a1 ; ret @@ -103,56 +103,29 @@ block0(v0: i128, v1: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s1,8(sp) -; sd s9,0(sp) ; block0: -; slt a5,a1,a3 -; sltu s1,a0,a2 -; xor a4,a1,a3 -; mv s9,a1 -; select a5,s1,a5##condition=(a4 eq zero) -; mv a4,a0 -; select [a0,a1],[a4,s9],[a2,a3]##condition=(a5 ne zero) -; ld s1,8(sp) -; ld s9,0(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; slt a4,a1,a3 +; sltu t2,a0,a2 +; xor a5,a1,a3 +; mv a6,a1 +; select a4,t2,a4##condition=(a5 eq zero) +; mv a5,a0 +; select [a0,a1],[a5,a6],[a2,a3]##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s1, 8(sp) -; sd s9, 0(sp) -; block1: ; offset 0x1c -; slt a5, a1, a3 -; sltu s1, a0, a2 -; xor a4, a1, a3 -; mv s9, a1 -; bnez a4, 8 -; mv a5, s1 -; mv a4, a0 -; mv a0, a4 -; mv a1, s9 -; bnez a5, 0xc +; slt a4, a1, a3 +; sltu t2, a0, a2 +; xor a5, a1, a3 +; mv a6, a1 +; bnez a5, 8 +; mv a4, t2 +; mv a5, a0 +; mv a0, a5 +; mv a1, a6 +; bnez a4, 0xc ; mv a0, a2 ; mv a1, a3 -; ld s1, 8(sp) -; ld s9, 0(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/sshr-const.clif b/cranelift/filetests/filetests/isa/riscv64/sshr-const.clif index dbb3ed234850..e34ff2177d3a 100644 --- a/cranelift/filetests/filetests/isa/riscv64/sshr-const.clif +++ b/cranelift/filetests/filetests/isa/riscv64/sshr-const.clif @@ -12,16 +12,16 @@ block0(v0: i8): ; VCode: ; block0: -; slli a2,a0,56 -; srai a4,a2,56 -; sraiw a0,a4,5 +; slli a0,a0,56 +; srai a0,a0,56 +; sraiw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x38 -; srai a4, a2, 0x38 -; sraiw a0, a4, 5 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 +; sraiw a0, a0, 5 ; ret function %sshr_i8_const_i16(i8) -> i8 { @@ -33,16 +33,16 @@ block0(v0: i8): ; VCode: ; block0: -; slli a2,a0,56 -; srai a4,a2,56 -; sraiw a0,a4,5 +; slli a0,a0,56 +; srai a0,a0,56 +; sraiw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x38 -; srai a4, a2, 0x38 -; sraiw a0, a4, 5 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 +; sraiw a0, a0, 5 ; ret function %sshr_i8_const_i32(i8) -> i8 { @@ -54,16 +54,16 @@ block0(v0: i8): ; VCode: ; block0: -; slli a2,a0,56 -; srai a4,a2,56 -; sraiw a0,a4,5 +; slli a0,a0,56 +; srai a0,a0,56 +; sraiw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x38 -; srai a4, a2, 0x38 -; sraiw a0, a4, 5 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 +; sraiw a0, a0, 5 ; ret function %sshr_i8_const_i64(i8) -> i8 { @@ -75,16 +75,16 @@ block0(v0: i8): ; VCode: ; block0: -; slli a2,a0,56 -; srai a4,a2,56 -; sraiw a0,a4,5 +; slli a0,a0,56 +; srai a0,a0,56 +; sraiw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x38 -; srai a4, a2, 0x38 -; sraiw a0, a4, 5 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 +; sraiw a0, a0, 5 ; ret function %sshr_i8_const_i128(i8) -> i8 { @@ -97,16 +97,16 @@ block0(v0: i8): ; VCode: ; block0: -; slli a2,a0,56 -; srai a4,a2,56 -; sraiw a0,a4,5 +; slli a0,a0,56 +; srai a0,a0,56 +; sraiw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x38 -; srai a4, a2, 0x38 -; sraiw a0, a4, 5 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 +; sraiw a0, a0, 5 ; ret function %sshr_i16_const_i8(i16) -> i16 { @@ -118,16 +118,16 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srai a4,a2,48 -; sraiw a0,a4,5 +; slli a0,a0,48 +; srai a0,a0,48 +; sraiw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srai a4, a2, 0x30 -; sraiw a0, a4, 5 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 +; sraiw a0, a0, 5 ; ret function %sshr_i16_const_i16(i16) -> i16 { @@ -139,16 +139,16 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srai a4,a2,48 -; sraiw a0,a4,5 +; slli a0,a0,48 +; srai a0,a0,48 +; sraiw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srai a4, a2, 0x30 -; sraiw a0, a4, 5 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 +; sraiw a0, a0, 5 ; ret function %sshr_i16_const_i32(i16) -> i16 { @@ -160,16 +160,16 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srai a4,a2,48 -; sraiw a0,a4,5 +; slli a0,a0,48 +; srai a0,a0,48 +; sraiw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srai a4, a2, 0x30 -; sraiw a0, a4, 5 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 +; sraiw a0, a0, 5 ; ret function %sshr_i16_const_i64(i16) -> i16 { @@ -181,16 +181,16 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srai a4,a2,48 -; sraiw a0,a4,5 +; slli a0,a0,48 +; srai a0,a0,48 +; sraiw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srai a4, a2, 0x30 -; sraiw a0, a4, 5 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 +; sraiw a0, a0, 5 ; ret function %sshr_i16_const_i128(i16) -> i16 { @@ -203,16 +203,16 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srai a4,a2,48 -; sraiw a0,a4,5 +; slli a0,a0,48 +; srai a0,a0,48 +; sraiw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srai a4, a2, 0x30 -; sraiw a0, a4, 5 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 +; sraiw a0, a0, 5 ; ret function %sshr_i32_const_i8(i32) -> i32 { @@ -395,49 +395,71 @@ block0(v0: i128): } ; VCode: +; addi sp,sp,-16 +; sd ra,8(sp) +; sd fp,0(sp) +; mv fp,sp +; addi sp,sp,-16 +; sd s4,8(sp) ; block0: -; li a2,5 -; andi a4,a2,63 +; li a4,5 +; andi a2,a4,63 ; li a3,64 -; sub a3,a3,a4 -; sll a5,a1,a3 -; select a3,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a3,a5 +; sub a3,a3,a2 +; sll a3,a1,a3 +; select a3,zero,a3##condition=(a2 eq zero) +; srl a5,a0,a2 +; or a3,a3,a5 ; li a0,64 -; sra a3,a1,a4 -; li a4,-1 -; select t4,a4,zero##condition=(a1 slt zero) -; li a4,64 -; andi a2,a2,127 -; select [a0,a1],[a3,t4],[a5,a3]##condition=(a2 uge a4) +; sra s4,a1,a2 +; li a0,-1 +; select a2,a0,zero##condition=(a1 slt zero) +; li a5,64 +; andi a4,a4,127 +; select [a0,a1],[s4,a2],[a3,s4]##condition=(a4 uge a5) +; ld s4,8(sp) +; addi sp,sp,16 +; ld ra,8(sp) +; ld fp,0(sp) +; addi sp,sp,16 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a2, zero, 5 -; andi a4, a2, 0x3f +; addi sp, sp, -0x10 +; sd ra, 8(sp) +; sd s0, 0(sp) +; mv s0, sp +; addi sp, sp, -0x10 +; sd s4, 8(sp) +; block1: ; offset 0x18 +; addi a4, zero, 5 +; andi a2, a4, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a4 -; sll a5, a1, a3 +; sub a3, a3, a2 +; sll a3, a1, a3 +; bnez a2, 8 ; mv a3, zero -; beqz a4, 8 -; mv a3, a5 -; srl a5, a0, a4 -; or a5, a3, a5 +; srl a5, a0, a2 +; or a3, a3, a5 ; addi a0, zero, 0x40 -; sra a3, a1, a4 -; addi a4, zero, -1 -; mv t4, a4 +; sra s4, a1, a2 +; addi a0, zero, -1 +; mv a2, a0 ; bltz a1, 8 -; mv t4, zero -; addi a4, zero, 0x40 -; andi a2, a2, 0x7f +; mv a2, zero +; addi a5, zero, 0x40 +; andi a4, a4, 0x7f +; mv a0, s4 +; mv a1, a2 +; bgeu a4, a5, 0xc ; mv a0, a3 -; mv a1, t4 -; bgeu a2, a4, 0xc -; mv a0, a5 -; mv a1, a3 +; mv a1, s4 +; ld s4, 8(sp) +; addi sp, sp, 0x10 +; ld ra, 8(sp) +; ld s0, 0(sp) +; addi sp, sp, 0x10 ; ret function %sshr_i128_const_i16(i128) -> i128 { @@ -448,49 +470,71 @@ block0(v0: i128): } ; VCode: +; addi sp,sp,-16 +; sd ra,8(sp) +; sd fp,0(sp) +; mv fp,sp +; addi sp,sp,-16 +; sd s4,8(sp) ; block0: -; li a2,5 -; andi a4,a2,63 +; li a4,5 +; andi a2,a4,63 ; li a3,64 -; sub a3,a3,a4 -; sll a5,a1,a3 -; select a3,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a3,a5 +; sub a3,a3,a2 +; sll a3,a1,a3 +; select a3,zero,a3##condition=(a2 eq zero) +; srl a5,a0,a2 +; or a3,a3,a5 ; li a0,64 -; sra a3,a1,a4 -; li a4,-1 -; select t4,a4,zero##condition=(a1 slt zero) -; li a4,64 -; andi a2,a2,127 -; select [a0,a1],[a3,t4],[a5,a3]##condition=(a2 uge a4) +; sra s4,a1,a2 +; li a0,-1 +; select a2,a0,zero##condition=(a1 slt zero) +; li a5,64 +; andi a4,a4,127 +; select [a0,a1],[s4,a2],[a3,s4]##condition=(a4 uge a5) +; ld s4,8(sp) +; addi sp,sp,16 +; ld ra,8(sp) +; ld fp,0(sp) +; addi sp,sp,16 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a2, zero, 5 -; andi a4, a2, 0x3f +; addi sp, sp, -0x10 +; sd ra, 8(sp) +; sd s0, 0(sp) +; mv s0, sp +; addi sp, sp, -0x10 +; sd s4, 8(sp) +; block1: ; offset 0x18 +; addi a4, zero, 5 +; andi a2, a4, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a4 -; sll a5, a1, a3 +; sub a3, a3, a2 +; sll a3, a1, a3 +; bnez a2, 8 ; mv a3, zero -; beqz a4, 8 -; mv a3, a5 -; srl a5, a0, a4 -; or a5, a3, a5 +; srl a5, a0, a2 +; or a3, a3, a5 ; addi a0, zero, 0x40 -; sra a3, a1, a4 -; addi a4, zero, -1 -; mv t4, a4 +; sra s4, a1, a2 +; addi a0, zero, -1 +; mv a2, a0 ; bltz a1, 8 -; mv t4, zero -; addi a4, zero, 0x40 -; andi a2, a2, 0x7f +; mv a2, zero +; addi a5, zero, 0x40 +; andi a4, a4, 0x7f +; mv a0, s4 +; mv a1, a2 +; bgeu a4, a5, 0xc ; mv a0, a3 -; mv a1, t4 -; bgeu a2, a4, 0xc -; mv a0, a5 -; mv a1, a3 +; mv a1, s4 +; ld s4, 8(sp) +; addi sp, sp, 0x10 +; ld ra, 8(sp) +; ld s0, 0(sp) +; addi sp, sp, 0x10 ; ret function %sshr_i128_const_i32(i128) -> i128 { @@ -501,49 +545,71 @@ block0(v0: i128): } ; VCode: +; addi sp,sp,-16 +; sd ra,8(sp) +; sd fp,0(sp) +; mv fp,sp +; addi sp,sp,-16 +; sd s4,8(sp) ; block0: -; li a2,5 -; andi a4,a2,63 +; li a4,5 +; andi a2,a4,63 ; li a3,64 -; sub a3,a3,a4 -; sll a5,a1,a3 -; select a3,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a3,a5 +; sub a3,a3,a2 +; sll a3,a1,a3 +; select a3,zero,a3##condition=(a2 eq zero) +; srl a5,a0,a2 +; or a3,a3,a5 ; li a0,64 -; sra a3,a1,a4 -; li a4,-1 -; select t4,a4,zero##condition=(a1 slt zero) -; li a4,64 -; andi a2,a2,127 -; select [a0,a1],[a3,t4],[a5,a3]##condition=(a2 uge a4) +; sra s4,a1,a2 +; li a0,-1 +; select a2,a0,zero##condition=(a1 slt zero) +; li a5,64 +; andi a4,a4,127 +; select [a0,a1],[s4,a2],[a3,s4]##condition=(a4 uge a5) +; ld s4,8(sp) +; addi sp,sp,16 +; ld ra,8(sp) +; ld fp,0(sp) +; addi sp,sp,16 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a2, zero, 5 -; andi a4, a2, 0x3f +; addi sp, sp, -0x10 +; sd ra, 8(sp) +; sd s0, 0(sp) +; mv s0, sp +; addi sp, sp, -0x10 +; sd s4, 8(sp) +; block1: ; offset 0x18 +; addi a4, zero, 5 +; andi a2, a4, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a4 -; sll a5, a1, a3 +; sub a3, a3, a2 +; sll a3, a1, a3 +; bnez a2, 8 ; mv a3, zero -; beqz a4, 8 -; mv a3, a5 -; srl a5, a0, a4 -; or a5, a3, a5 +; srl a5, a0, a2 +; or a3, a3, a5 ; addi a0, zero, 0x40 -; sra a3, a1, a4 -; addi a4, zero, -1 -; mv t4, a4 +; sra s4, a1, a2 +; addi a0, zero, -1 +; mv a2, a0 ; bltz a1, 8 -; mv t4, zero -; addi a4, zero, 0x40 -; andi a2, a2, 0x7f +; mv a2, zero +; addi a5, zero, 0x40 +; andi a4, a4, 0x7f +; mv a0, s4 +; mv a1, a2 +; bgeu a4, a5, 0xc ; mv a0, a3 -; mv a1, t4 -; bgeu a2, a4, 0xc -; mv a0, a5 -; mv a1, a3 +; mv a1, s4 +; ld s4, 8(sp) +; addi sp, sp, 0x10 +; ld ra, 8(sp) +; ld s0, 0(sp) +; addi sp, sp, 0x10 ; ret function %sshr_i128_const_i64(i128) -> i128 { @@ -553,85 +619,30 @@ block0(v0: i128): return v2 } -; VCode: -; block0: -; li a2,5 -; andi a4,a2,63 -; li a3,64 -; sub a3,a3,a4 -; sll a5,a1,a3 -; select a3,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a3,a5 -; li a0,64 -; sra a3,a1,a4 -; li a4,-1 -; select t4,a4,zero##condition=(a1 slt zero) -; li a4,64 -; andi a2,a2,127 -; select [a0,a1],[a3,t4],[a5,a3]##condition=(a2 uge a4) -; ret -; -; Disassembled: -; block0: ; offset 0x0 -; addi a2, zero, 5 -; andi a4, a2, 0x3f -; addi a3, zero, 0x40 -; sub a3, a3, a4 -; sll a5, a1, a3 -; mv a3, zero -; beqz a4, 8 -; mv a3, a5 -; srl a5, a0, a4 -; or a5, a3, a5 -; addi a0, zero, 0x40 -; sra a3, a1, a4 -; addi a4, zero, -1 -; mv t4, a4 -; bltz a1, 8 -; mv t4, zero -; addi a4, zero, 0x40 -; andi a2, a2, 0x7f -; mv a0, a3 -; mv a1, t4 -; bgeu a2, a4, 0xc -; mv a0, a5 -; mv a1, a3 -; ret - -function %sshr_i128_const_i128(i128) -> i128 { -block0(v0: i128): - v1 = iconst.i64 5 - v2 = uextend.i128 v1 - v3 = sshr v0, v2 - return v3 -} - ; VCode: ; addi sp,sp,-16 ; sd ra,8(sp) ; sd fp,0(sp) ; mv fp,sp ; addi sp,sp,-16 -; sd s11,8(sp) +; sd s4,8(sp) ; block0: -; li a2,5 -; li a3,0 -; andi a5,a2,63 +; li a4,5 +; andi a2,a4,63 ; li a3,64 -; sub a3,a3,a5 +; sub a3,a3,a2 ; sll a3,a1,a3 -; select a3,zero,a3##condition=(a5 eq zero) -; srl a4,a0,a5 -; or s11,a3,a4 -; li a3,64 -; sra a3,a1,a5 -; li a5,-1 -; select a5,a5,zero##condition=(a1 slt zero) -; li a4,64 -; andi a2,a2,127 -; select [a0,a1],[a3,a5],[s11,a3]##condition=(a2 uge a4) -; ld s11,8(sp) +; select a3,zero,a3##condition=(a2 eq zero) +; srl a5,a0,a2 +; or a3,a3,a5 +; li a0,64 +; sra s4,a1,a2 +; li a0,-1 +; select a2,a0,zero##condition=(a1 slt zero) +; li a5,64 +; andi a4,a4,127 +; select [a0,a1],[s4,a2],[a3,s4]##condition=(a4 uge a5) +; ld s4,8(sp) ; addi sp,sp,16 ; ld ra,8(sp) ; ld fp,0(sp) @@ -645,34 +656,89 @@ block0(v0: i128): ; sd s0, 0(sp) ; mv s0, sp ; addi sp, sp, -0x10 -; sd s11, 8(sp) +; sd s4, 8(sp) ; block1: ; offset 0x18 -; addi a2, zero, 5 -; mv a3, zero -; andi a5, a2, 0x3f +; addi a4, zero, 5 +; andi a2, a4, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a5 +; sub a3, a3, a2 ; sll a3, a1, a3 -; bnez a5, 8 +; bnez a2, 8 ; mv a3, zero -; srl a4, a0, a5 -; or s11, a3, a4 -; addi a3, zero, 0x40 -; sra a3, a1, a5 -; addi a5, zero, -1 +; srl a5, a0, a2 +; or a3, a3, a5 +; addi a0, zero, 0x40 +; sra s4, a1, a2 +; addi a0, zero, -1 +; mv a2, a0 ; bltz a1, 8 -; mv a5, zero -; addi a4, zero, 0x40 -; andi a2, a2, 0x7f +; mv a2, zero +; addi a5, zero, 0x40 +; andi a4, a4, 0x7f +; mv a0, s4 +; mv a1, a2 +; bgeu a4, a5, 0xc ; mv a0, a3 -; mv a1, a5 -; bgeu a2, a4, 0xc -; mv a0, s11 -; mv a1, a3 -; ld s11, 8(sp) +; mv a1, s4 +; ld s4, 8(sp) ; addi sp, sp, 0x10 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 ; ret +function %sshr_i128_const_i128(i128) -> i128 { +block0(v0: i128): + v1 = iconst.i64 5 + v2 = uextend.i128 v1 + v3 = sshr v0, v2 + return v3 +} + +; VCode: +; block0: +; li a3,5 +; li a2,0 +; andi a2,a3,63 +; li a4,64 +; sub a4,a4,a2 +; sll a4,a1,a4 +; select a4,zero,a4##condition=(a2 eq zero) +; srl a5,a0,a2 +; or a7,a4,a5 +; li a0,64 +; sra a2,a1,a2 +; li a0,-1 +; select a4,a0,zero##condition=(a1 slt zero) +; li a5,64 +; andi a3,a3,127 +; select [a0,a1],[a2,a4],[a7,a2]##condition=(a3 uge a5) +; ret +; +; Disassembled: +; block0: ; offset 0x0 +; addi a3, zero, 5 +; mv a2, zero +; andi a2, a3, 0x3f +; addi a4, zero, 0x40 +; sub a4, a4, a2 +; sll a4, a1, a4 +; bnez a2, 8 +; mv a4, zero +; srl a5, a0, a2 +; or a7, a4, a5 +; addi a0, zero, 0x40 +; sra a2, a1, a2 +; addi a0, zero, -1 +; mv a4, a0 +; bltz a1, 8 +; mv a4, zero +; addi a5, zero, 0x40 +; andi a3, a3, 0x7f +; mv a0, a2 +; mv a1, a4 +; bgeu a3, a5, 0xc +; mv a0, a7 +; mv a1, a2 +; ret + diff --git a/cranelift/filetests/filetests/isa/riscv64/sshr.clif b/cranelift/filetests/filetests/isa/riscv64/sshr.clif index 2901c74d805d..7b68338c1e49 100644 --- a/cranelift/filetests/filetests/isa/riscv64/sshr.clif +++ b/cranelift/filetests/filetests/isa/riscv64/sshr.clif @@ -11,18 +11,18 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; slli a3,a0,56 -; srai a5,a3,56 +; slli a0,a0,56 +; srai a0,a0,56 ; andi a1,a1,7 -; sraw a0,a5,a1 +; sraw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x38 -; srai a5, a3, 0x38 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 ; andi a1, a1, 7 -; sraw a0, a5, a1 +; sraw a0, a0, a1 ; ret function %sshr_i8_i16(i8, i16) -> i8 { @@ -33,18 +33,18 @@ block0(v0: i8, v1: i16): ; VCode: ; block0: -; slli a3,a0,56 -; srai a5,a3,56 +; slli a0,a0,56 +; srai a0,a0,56 ; andi a1,a1,7 -; sraw a0,a5,a1 +; sraw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x38 -; srai a5, a3, 0x38 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 ; andi a1, a1, 7 -; sraw a0, a5, a1 +; sraw a0, a0, a1 ; ret function %sshr_i8_i32(i8, i32) -> i8 { @@ -55,18 +55,18 @@ block0(v0: i8, v1: i32): ; VCode: ; block0: -; slli a3,a0,56 -; srai a5,a3,56 +; slli a0,a0,56 +; srai a0,a0,56 ; andi a1,a1,7 -; sraw a0,a5,a1 +; sraw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x38 -; srai a5, a3, 0x38 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 ; andi a1, a1, 7 -; sraw a0, a5, a1 +; sraw a0, a0, a1 ; ret function %sshr_i8_i64(i8, i64) -> i8 { @@ -77,18 +77,18 @@ block0(v0: i8, v1: i64): ; VCode: ; block0: -; slli a3,a0,56 -; srai a5,a3,56 +; slli a0,a0,56 +; srai a0,a0,56 ; andi a1,a1,7 -; sraw a0,a5,a1 +; sraw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x38 -; srai a5, a3, 0x38 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 ; andi a1, a1, 7 -; sraw a0, a5, a1 +; sraw a0, a0, a1 ; ret function %sshr_i8_i128(i8, i128) -> i8 { @@ -99,18 +99,18 @@ block0(v0: i8, v1: i128): ; VCode: ; block0: -; slli a4,a0,56 -; srai a0,a4,56 -; andi a2,a1,7 -; sraw a0,a0,a2 +; slli a0,a0,56 +; srai a0,a0,56 +; andi a1,a1,7 +; sraw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a4, a0, 0x38 -; srai a0, a4, 0x38 -; andi a2, a1, 7 -; sraw a0, a0, a2 +; slli a0, a0, 0x38 +; srai a0, a0, 0x38 +; andi a1, a1, 7 +; sraw a0, a0, a1 ; ret function %sshr_i16_i8(i16, i8) -> i16 { @@ -121,18 +121,18 @@ block0(v0: i16, v1: i8): ; VCode: ; block0: -; slli a3,a0,48 -; srai a5,a3,48 +; slli a0,a0,48 +; srai a0,a0,48 ; andi a1,a1,15 -; sraw a0,a5,a1 +; sraw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srai a5, a3, 0x30 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 ; andi a1, a1, 0xf -; sraw a0, a5, a1 +; sraw a0, a0, a1 ; ret function %sshr_i16_i16(i16, i16) -> i16 { @@ -143,18 +143,18 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; slli a3,a0,48 -; srai a5,a3,48 +; slli a0,a0,48 +; srai a0,a0,48 ; andi a1,a1,15 -; sraw a0,a5,a1 +; sraw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srai a5, a3, 0x30 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 ; andi a1, a1, 0xf -; sraw a0, a5, a1 +; sraw a0, a0, a1 ; ret function %sshr_i16_i32(i16, i32) -> i16 { @@ -165,18 +165,18 @@ block0(v0: i16, v1: i32): ; VCode: ; block0: -; slli a3,a0,48 -; srai a5,a3,48 +; slli a0,a0,48 +; srai a0,a0,48 ; andi a1,a1,15 -; sraw a0,a5,a1 +; sraw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srai a5, a3, 0x30 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 ; andi a1, a1, 0xf -; sraw a0, a5, a1 +; sraw a0, a0, a1 ; ret function %sshr_i16_i64(i16, i64) -> i16 { @@ -187,18 +187,18 @@ block0(v0: i16, v1: i64): ; VCode: ; block0: -; slli a3,a0,48 -; srai a5,a3,48 +; slli a0,a0,48 +; srai a0,a0,48 ; andi a1,a1,15 -; sraw a0,a5,a1 +; sraw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srai a5, a3, 0x30 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 ; andi a1, a1, 0xf -; sraw a0, a5, a1 +; sraw a0, a0, a1 ; ret function %sshr_i16_i128(i16, i128) -> i16 { @@ -209,18 +209,18 @@ block0(v0: i16, v1: i128): ; VCode: ; block0: -; slli a4,a0,48 -; srai a0,a4,48 -; andi a2,a1,15 -; sraw a0,a0,a2 +; slli a0,a0,48 +; srai a0,a0,48 +; andi a1,a1,15 +; sraw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a4, a0, 0x30 -; srai a0, a4, 0x30 -; andi a2, a1, 0xf -; sraw a0, a0, a2 +; slli a0, a0, 0x30 +; srai a0, a0, 0x30 +; andi a1, a1, 0xf +; sraw a0, a0, a1 ; ret function %sshr_i32_i8(i32, i8) -> i32 { @@ -390,47 +390,71 @@ block0(v0: i128, v1: i8): } ; VCode: +; addi sp,sp,-16 +; sd ra,8(sp) +; sd fp,0(sp) +; mv fp,sp +; addi sp,sp,-16 +; sd s4,8(sp) ; block0: -; andi a4,a2,63 +; mv a4,a0 +; andi a0,a2,63 ; li a3,64 -; sub a3,a3,a4 -; sll a5,a1,a3 -; select a3,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a3,a5 -; li a0,64 -; sra a3,a1,a4 -; li a4,-1 -; select t4,a4,zero##condition=(a1 slt zero) +; sub a3,a3,a0 +; sll a3,a1,a3 +; select a3,zero,a3##condition=(a0 eq zero) +; srl a4,a4,a0 +; or a3,a3,a4 ; li a4,64 +; sra s4,a1,a0 +; li a0,-1 +; select a4,a0,zero##condition=(a1 slt zero) +; li a5,64 ; andi a2,a2,127 -; select [a0,a1],[a3,t4],[a5,a3]##condition=(a2 uge a4) +; select [a0,a1],[s4,a4],[a3,s4]##condition=(a2 uge a5) +; ld s4,8(sp) +; addi sp,sp,16 +; ld ra,8(sp) +; ld fp,0(sp) +; addi sp,sp,16 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a2, 0x3f +; addi sp, sp, -0x10 +; sd ra, 8(sp) +; sd s0, 0(sp) +; mv s0, sp +; addi sp, sp, -0x10 +; sd s4, 8(sp) +; block1: ; offset 0x18 +; mv a4, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a4 -; sll a5, a1, a3 +; sub a3, a3, a0 +; sll a3, a1, a3 +; bnez a0, 8 ; mv a3, zero -; beqz a4, 8 -; mv a3, a5 -; srl a5, a0, a4 -; or a5, a3, a5 -; addi a0, zero, 0x40 -; sra a3, a1, a4 -; addi a4, zero, -1 -; mv t4, a4 -; bltz a1, 8 -; mv t4, zero +; srl a4, a4, a0 +; or a3, a3, a4 ; addi a4, zero, 0x40 +; sra s4, a1, a0 +; addi a0, zero, -1 +; mv a4, a0 +; bltz a1, 8 +; mv a4, zero +; addi a5, zero, 0x40 ; andi a2, a2, 0x7f +; mv a0, s4 +; mv a1, a4 +; bgeu a2, a5, 0xc ; mv a0, a3 -; mv a1, t4 -; bgeu a2, a4, 0xc -; mv a0, a5 -; mv a1, a3 +; mv a1, s4 +; ld s4, 8(sp) +; addi sp, sp, 0x10 +; ld ra, 8(sp) +; ld s0, 0(sp) +; addi sp, sp, 0x10 ; ret function %sshr_i128_i16(i128, i16) -> i128 { @@ -440,47 +464,71 @@ block0(v0: i128, v1: i16): } ; VCode: +; addi sp,sp,-16 +; sd ra,8(sp) +; sd fp,0(sp) +; mv fp,sp +; addi sp,sp,-16 +; sd s4,8(sp) ; block0: -; andi a4,a2,63 +; mv a4,a0 +; andi a0,a2,63 ; li a3,64 -; sub a3,a3,a4 -; sll a5,a1,a3 -; select a3,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a3,a5 -; li a0,64 -; sra a3,a1,a4 -; li a4,-1 -; select t4,a4,zero##condition=(a1 slt zero) +; sub a3,a3,a0 +; sll a3,a1,a3 +; select a3,zero,a3##condition=(a0 eq zero) +; srl a4,a4,a0 +; or a3,a3,a4 ; li a4,64 +; sra s4,a1,a0 +; li a0,-1 +; select a4,a0,zero##condition=(a1 slt zero) +; li a5,64 ; andi a2,a2,127 -; select [a0,a1],[a3,t4],[a5,a3]##condition=(a2 uge a4) +; select [a0,a1],[s4,a4],[a3,s4]##condition=(a2 uge a5) +; ld s4,8(sp) +; addi sp,sp,16 +; ld ra,8(sp) +; ld fp,0(sp) +; addi sp,sp,16 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a2, 0x3f +; addi sp, sp, -0x10 +; sd ra, 8(sp) +; sd s0, 0(sp) +; mv s0, sp +; addi sp, sp, -0x10 +; sd s4, 8(sp) +; block1: ; offset 0x18 +; mv a4, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a4 -; sll a5, a1, a3 +; sub a3, a3, a0 +; sll a3, a1, a3 +; bnez a0, 8 ; mv a3, zero -; beqz a4, 8 -; mv a3, a5 -; srl a5, a0, a4 -; or a5, a3, a5 -; addi a0, zero, 0x40 -; sra a3, a1, a4 -; addi a4, zero, -1 -; mv t4, a4 -; bltz a1, 8 -; mv t4, zero +; srl a4, a4, a0 +; or a3, a3, a4 ; addi a4, zero, 0x40 +; sra s4, a1, a0 +; addi a0, zero, -1 +; mv a4, a0 +; bltz a1, 8 +; mv a4, zero +; addi a5, zero, 0x40 ; andi a2, a2, 0x7f +; mv a0, s4 +; mv a1, a4 +; bgeu a2, a5, 0xc ; mv a0, a3 -; mv a1, t4 -; bgeu a2, a4, 0xc -; mv a0, a5 -; mv a1, a3 +; mv a1, s4 +; ld s4, 8(sp) +; addi sp, sp, 0x10 +; ld ra, 8(sp) +; ld s0, 0(sp) +; addi sp, sp, 0x10 ; ret function %sshr_i128_i32(i128, i32) -> i128 { @@ -490,47 +538,71 @@ block0(v0: i128, v1: i32): } ; VCode: +; addi sp,sp,-16 +; sd ra,8(sp) +; sd fp,0(sp) +; mv fp,sp +; addi sp,sp,-16 +; sd s4,8(sp) ; block0: -; andi a4,a2,63 +; mv a4,a0 +; andi a0,a2,63 ; li a3,64 -; sub a3,a3,a4 -; sll a5,a1,a3 -; select a3,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a3,a5 -; li a0,64 -; sra a3,a1,a4 -; li a4,-1 -; select t4,a4,zero##condition=(a1 slt zero) +; sub a3,a3,a0 +; sll a3,a1,a3 +; select a3,zero,a3##condition=(a0 eq zero) +; srl a4,a4,a0 +; or a3,a3,a4 ; li a4,64 +; sra s4,a1,a0 +; li a0,-1 +; select a4,a0,zero##condition=(a1 slt zero) +; li a5,64 ; andi a2,a2,127 -; select [a0,a1],[a3,t4],[a5,a3]##condition=(a2 uge a4) +; select [a0,a1],[s4,a4],[a3,s4]##condition=(a2 uge a5) +; ld s4,8(sp) +; addi sp,sp,16 +; ld ra,8(sp) +; ld fp,0(sp) +; addi sp,sp,16 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a2, 0x3f +; addi sp, sp, -0x10 +; sd ra, 8(sp) +; sd s0, 0(sp) +; mv s0, sp +; addi sp, sp, -0x10 +; sd s4, 8(sp) +; block1: ; offset 0x18 +; mv a4, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a4 -; sll a5, a1, a3 +; sub a3, a3, a0 +; sll a3, a1, a3 +; bnez a0, 8 ; mv a3, zero -; beqz a4, 8 -; mv a3, a5 -; srl a5, a0, a4 -; or a5, a3, a5 -; addi a0, zero, 0x40 -; sra a3, a1, a4 -; addi a4, zero, -1 -; mv t4, a4 -; bltz a1, 8 -; mv t4, zero +; srl a4, a4, a0 +; or a3, a3, a4 ; addi a4, zero, 0x40 +; sra s4, a1, a0 +; addi a0, zero, -1 +; mv a4, a0 +; bltz a1, 8 +; mv a4, zero +; addi a5, zero, 0x40 ; andi a2, a2, 0x7f +; mv a0, s4 +; mv a1, a4 +; bgeu a2, a5, 0xc ; mv a0, a3 -; mv a1, t4 -; bgeu a2, a4, 0xc -; mv a0, a5 -; mv a1, a3 +; mv a1, s4 +; ld s4, 8(sp) +; addi sp, sp, 0x10 +; ld ra, 8(sp) +; ld s0, 0(sp) +; addi sp, sp, 0x10 ; ret function %sshr_i128_i64(i128, i64) -> i128 { @@ -540,47 +612,71 @@ block0(v0: i128, v1: i64): } ; VCode: +; addi sp,sp,-16 +; sd ra,8(sp) +; sd fp,0(sp) +; mv fp,sp +; addi sp,sp,-16 +; sd s4,8(sp) ; block0: -; andi a4,a2,63 +; mv a4,a0 +; andi a0,a2,63 ; li a3,64 -; sub a3,a3,a4 -; sll a5,a1,a3 -; select a3,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a3,a5 -; li a0,64 -; sra a3,a1,a4 -; li a4,-1 -; select t4,a4,zero##condition=(a1 slt zero) +; sub a3,a3,a0 +; sll a3,a1,a3 +; select a3,zero,a3##condition=(a0 eq zero) +; srl a4,a4,a0 +; or a3,a3,a4 ; li a4,64 +; sra s4,a1,a0 +; li a0,-1 +; select a4,a0,zero##condition=(a1 slt zero) +; li a5,64 ; andi a2,a2,127 -; select [a0,a1],[a3,t4],[a5,a3]##condition=(a2 uge a4) +; select [a0,a1],[s4,a4],[a3,s4]##condition=(a2 uge a5) +; ld s4,8(sp) +; addi sp,sp,16 +; ld ra,8(sp) +; ld fp,0(sp) +; addi sp,sp,16 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a2, 0x3f +; addi sp, sp, -0x10 +; sd ra, 8(sp) +; sd s0, 0(sp) +; mv s0, sp +; addi sp, sp, -0x10 +; sd s4, 8(sp) +; block1: ; offset 0x18 +; mv a4, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a4 -; sll a5, a1, a3 +; sub a3, a3, a0 +; sll a3, a1, a3 +; bnez a0, 8 ; mv a3, zero -; beqz a4, 8 -; mv a3, a5 -; srl a5, a0, a4 -; or a5, a3, a5 -; addi a0, zero, 0x40 -; sra a3, a1, a4 -; addi a4, zero, -1 -; mv t4, a4 -; bltz a1, 8 -; mv t4, zero +; srl a4, a4, a0 +; or a3, a3, a4 ; addi a4, zero, 0x40 +; sra s4, a1, a0 +; addi a0, zero, -1 +; mv a4, a0 +; bltz a1, 8 +; mv a4, zero +; addi a5, zero, 0x40 ; andi a2, a2, 0x7f +; mv a0, s4 +; mv a1, a4 +; bgeu a2, a5, 0xc ; mv a0, a3 -; mv a1, t4 -; bgeu a2, a4, 0xc -; mv a0, a5 -; mv a1, a3 +; mv a1, s4 +; ld s4, 8(sp) +; addi sp, sp, 0x10 +; ld ra, 8(sp) +; ld s0, 0(sp) +; addi sp, sp, 0x10 ; ret function %sshr_i128_i128(i128, i128) -> i128 { @@ -595,23 +691,24 @@ block0(v0: i128, v1: i128): ; sd fp,0(sp) ; mv fp,sp ; addi sp,sp,-16 -; sd s11,8(sp) +; sd s5,8(sp) ; block0: -; andi a5,a2,63 +; mv a4,a0 +; andi a0,a2,63 ; li a3,64 -; sub a3,a3,a5 +; sub a3,a3,a0 ; sll a3,a1,a3 -; select a3,zero,a3##condition=(a5 eq zero) -; srl a4,a0,a5 -; or s11,a3,a4 -; li a3,64 -; sra a3,a1,a5 -; li a5,-1 -; select a5,a5,zero##condition=(a1 slt zero) +; select a3,zero,a3##condition=(a0 eq zero) +; srl a5,a4,a0 +; or a3,a3,a5 ; li a4,64 +; sra s5,a1,a0 +; li a0,-1 +; select a4,a0,zero##condition=(a1 slt zero) +; li a5,64 ; andi a2,a2,127 -; select [a0,a1],[a3,a5],[s11,a3]##condition=(a2 uge a4) -; ld s11,8(sp) +; select [a0,a1],[s5,a4],[a3,s5]##condition=(a2 uge a5) +; ld s5,8(sp) ; addi sp,sp,16 ; ld ra,8(sp) ; ld fp,0(sp) @@ -625,29 +722,31 @@ block0(v0: i128, v1: i128): ; sd s0, 0(sp) ; mv s0, sp ; addi sp, sp, -0x10 -; sd s11, 8(sp) +; sd s5, 8(sp) ; block1: ; offset 0x18 -; andi a5, a2, 0x3f +; mv a4, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a5 +; sub a3, a3, a0 ; sll a3, a1, a3 -; bnez a5, 8 +; bnez a0, 8 ; mv a3, zero -; srl a4, a0, a5 -; or s11, a3, a4 -; addi a3, zero, 0x40 -; sra a3, a1, a5 -; addi a5, zero, -1 -; bltz a1, 8 -; mv a5, zero +; srl a5, a4, a0 +; or a3, a3, a5 ; addi a4, zero, 0x40 +; sra s5, a1, a0 +; addi a0, zero, -1 +; mv a4, a0 +; bltz a1, 8 +; mv a4, zero +; addi a5, zero, 0x40 ; andi a2, a2, 0x7f +; mv a0, s5 +; mv a1, a4 +; bgeu a2, a5, 0xc ; mv a0, a3 -; mv a1, a5 -; bgeu a2, a4, 0xc -; mv a0, s11 -; mv a1, a3 -; ld s11, 8(sp) +; mv a1, s5 +; ld s5, 8(sp) ; addi sp, sp, 0x10 ; ld ra, 8(sp) ; ld s0, 0(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/stack.clif b/cranelift/filetests/filetests/isa/riscv64/stack.clif index 88df5938e976..2abd0c06b9fb 100644 --- a/cranelift/filetests/filetests/isa/riscv64/stack.clif +++ b/cranelift/filetests/filetests/isa/riscv64/stack.clif @@ -429,112 +429,112 @@ block0(v0: i8): ; sd ra,8(sp) ; sd fp,0(sp) ; mv fp,sp -; addi sp,sp,-1360 -; sd s1,1352(sp) -; sd s2,1344(sp) -; sd s3,1336(sp) -; sd s4,1328(sp) -; sd s5,1320(sp) -; sd s6,1312(sp) -; sd s7,1304(sp) -; sd s8,1296(sp) -; sd s9,1288(sp) -; sd s10,1280(sp) -; sd s11,1272(sp) +; addi sp,sp,-1376 +; sd s1,1368(sp) +; sd s2,1360(sp) +; sd s3,1352(sp) +; sd s4,1344(sp) +; sd s5,1336(sp) +; sd s6,1328(sp) +; sd s7,1320(sp) +; sd s8,1312(sp) +; sd s9,1304(sp) +; sd s10,1296(sp) +; sd s11,1288(sp) ; block0: ; sd a0,1000(slot) -; li a2,2 -; sd a2,1008(slot) +; li a1,2 ; li a2,4 ; li a3,6 ; li a4,8 ; li a5,10 -; li s7,12 -; li s8,14 -; li s9,16 -; li s10,18 -; li s11,20 -; li t0,22 -; li t1,24 -; li t2,26 -; li a6,28 -; li a7,30 -; li t3,32 -; li t4,34 -; li s1,36 -; li s2,38 -; li s3,30 -; li s4,32 -; li s5,34 -; li s6,36 +; li t0,12 +; li t1,14 +; li t2,16 +; li s1,18 +; li a6,20 +; li a7,22 +; li s2,24 +; li s3,26 +; li s4,28 +; li s5,30 +; li s6,32 +; li s7,34 +; li s8,36 +; li s9,38 +; li s10,30 +; li s11,32 +; li t3,34 +; li t4,36 ; li a0,38 -; li a1,30 -; sd a1,1256(slot) -; li a1,32 -; sd a1,1248(slot) -; li a1,34 -; sd a1,1240(slot) -; li a1,36 -; sd a1,1232(slot) -; li a1,38 -; sd a1,1224(slot) -; li a1,30 -; sd a1,1216(slot) -; li a1,32 -; sd a1,1208(slot) -; li a1,34 -; sd a1,1200(slot) -; li a1,36 -; sd a1,1192(slot) -; li a1,38 -; sd a1,1184(slot) -; ld a1,1008(slot) -; addi a1,a1,1 -; sd a1,1176(slot) -; addi a1,a2,3 -; sd a1,1168(slot) -; addi a1,a3,5 -; sd a1,1160(slot) -; addi a1,a4,7 -; sd a1,1152(slot) -; addi a1,a5,9 -; sd a1,1144(slot) -; addi a1,s7,11 -; sd a1,1136(slot) -; addi a1,s8,13 -; sd a1,1128(slot) -; addi a1,s9,15 -; sd a1,1120(slot) -; addi a1,s10,17 -; sd a1,1112(slot) -; addi a1,s11,19 -; sd a1,1104(slot) -; addi a1,t0,21 -; sd a1,1096(slot) -; addi a1,t1,23 -; sd a1,1088(slot) -; addi a1,t2,25 -; sd a1,1080(slot) -; addi a1,a6,27 -; sd a1,1072(slot) -; addi a1,a7,29 -; sd a1,1064(slot) -; addi a1,t3,31 -; sd a1,1056(slot) -; addi a1,t4,33 -; sd a1,1048(slot) -; addi a1,s1,35 -; sd a1,1040(slot) -; addi a1,s2,37 -; sd a1,1032(slot) -; addi a1,s3,39 -; sd a1,1024(slot) -; addi a1,s4,31 -; sd a1,1016(slot) -; addi a1,s5,33 -; sd a1,1008(slot) -; addi s6,s6,35 -; addi a0,a0,37 +; sd a0,1264(slot) +; li a0,30 +; sd a0,1256(slot) +; li a0,32 +; sd a0,1248(slot) +; li a0,34 +; sd a0,1240(slot) +; li a0,36 +; sd a0,1232(slot) +; li a0,38 +; sd a0,1224(slot) +; li a0,30 +; sd a0,1216(slot) +; li a0,32 +; sd a0,1208(slot) +; li a0,34 +; sd a0,1200(slot) +; li a0,36 +; sd a0,1192(slot) +; li a0,38 +; sd a0,1184(slot) +; addi a0,a1,1 +; sd a0,1176(slot) +; addi a0,a2,3 +; sd a0,1168(slot) +; addi a0,a3,5 +; sd a0,1160(slot) +; addi a0,a4,7 +; sd a0,1152(slot) +; addi a0,a5,9 +; sd a0,1144(slot) +; addi a0,t0,11 +; sd a0,1136(slot) +; addi a0,t1,13 +; sd a0,1128(slot) +; addi a0,t2,15 +; sd a0,1120(slot) +; addi a0,s1,17 +; sd a0,1112(slot) +; addi a0,a6,19 +; sd a0,1104(slot) +; addi a0,a7,21 +; sd a0,1096(slot) +; addi a0,s2,23 +; sd a0,1088(slot) +; addi a0,s3,25 +; sd a0,1080(slot) +; addi a0,s4,27 +; sd a0,1072(slot) +; addi a0,s5,29 +; sd a0,1064(slot) +; addi a0,s6,31 +; sd a0,1056(slot) +; addi a0,s7,33 +; sd a0,1048(slot) +; addi a0,s8,35 +; sd a0,1040(slot) +; addi a0,s9,37 +; sd a0,1032(slot) +; addi a0,s10,39 +; sd a0,1024(slot) +; addi a0,s11,31 +; sd a0,1016(slot) +; addi a0,t3,33 +; sd a0,1008(slot) +; addi t4,t4,35 +; ld a1,1264(slot) +; addi a0,a1,37 ; ld a1,1256(slot) ; addi a1,a1,39 ; ld a2,1248(slot) @@ -546,84 +546,84 @@ block0(v0: i8): ; ld a5,1224(slot) ; addi a5,a5,37 ; ld t0,1216(slot) -; addi s7,t0,39 -; ld a6,1208(slot) -; addi s8,a6,31 -; ld t4,1200(slot) -; addi s9,t4,33 -; ld s3,1192(slot) -; addi s10,s3,35 -; ld s11,1184(slot) -; addi s11,s11,37 -; ld t0,1176(slot) ; addi t0,t0,39 -; ld t1,1168(slot) -; ld t2,1160(slot) -; add t1,t1,t2 -; ld s1,1144(slot) -; ld t3,1152(slot) -; add t2,t3,s1 -; ld a6,1128(slot) -; ld s4,1136(slot) -; add a6,s4,a6 -; ld a7,1112(slot) -; ld t3,1120(slot) -; add a7,t3,a7 -; ld t3,1096(slot) -; ld t4,1104(slot) -; add t3,t4,t3 -; ld s1,1088(slot) -; ld s3,1080(slot) -; add t4,s1,s3 -; ld s1,1064(slot) -; ld s2,1072(slot) -; add s1,s2,s1 -; ld s2,1048(slot) -; ld s3,1056(slot) +; ld t1,1208(slot) +; addi t1,t1,31 +; ld t2,1200(slot) +; addi t2,t2,33 +; ld s1,1192(slot) +; addi s1,s1,35 +; ld a6,1184(slot) +; addi a6,a6,37 +; ld a7,1176(slot) +; addi a7,a7,39 +; ld s2,1160(slot) +; ld s3,1168(slot) ; add s2,s3,s2 -; ld s3,1032(slot) -; ld s4,1040(slot) +; ld s3,1144(slot) +; ld s4,1152(slot) ; add s3,s4,s3 -; ld s4,1024(slot) -; ld s5,1016(slot) +; ld s4,1136(slot) +; ld s5,1128(slot) ; add s4,s4,s5 -; ld s5,1008(slot) -; add s5,s5,s6 -; add a1,a0,a1 -; add a2,a2,a3 -; add a3,a4,a5 -; add a4,s7,s8 -; add a5,s9,s10 -; add a0,s11,t0 -; add s7,t1,t2 -; add s8,a6,a7 -; add s9,t3,t4 -; add s10,s1,s2 -; add s11,s3,s4 -; add a1,s5,a1 -; add a2,a2,a3 -; add a3,a4,a5 -; add a4,a0,s7 -; add a5,s8,s9 -; add a0,s10,s11 +; ld s8,1120(slot) +; ld s10,1112(slot) +; add s5,s8,s10 +; ld s6,1096(slot) +; ld s7,1104(slot) +; add s6,s7,s6 +; ld s7,1080(slot) +; ld s8,1088(slot) +; add s7,s8,s7 +; ld s8,1064(slot) +; ld s9,1072(slot) +; add s8,s9,s8 +; ld s9,1048(slot) +; ld s10,1056(slot) +; add s9,s10,s9 +; ld s10,1032(slot) +; ld s11,1040(slot) +; add s10,s11,s10 +; ld s11,1024(slot) +; ld t3,1016(slot) +; add s11,s11,t3 +; ld t3,1008(slot) +; add t3,t3,t4 +; add a0,a0,a1 +; add a1,a2,a3 +; add a2,a4,a5 +; add a3,t0,t1 +; add a4,t2,s1 +; add a5,a6,a7 +; add t0,s2,s3 +; add t1,s4,s5 +; add t2,s6,s7 +; add s1,s8,s9 +; add a6,s10,s11 +; add a0,t3,a0 ; add a1,a1,a2 ; add a2,a3,a4 -; add a3,a5,a0 -; add a1,a1,a2 -; add a1,a3,a1 +; add a3,a5,t0 +; add a4,t1,t2 +; add a5,s1,a6 +; add a0,a0,a1 +; add a1,a2,a3 +; add a2,a4,a5 +; add a0,a0,a1 +; add a1,a2,a0 ; ld a0,1000(slot) -; ld s1,1352(sp) -; ld s2,1344(sp) -; ld s3,1336(sp) -; ld s4,1328(sp) -; ld s5,1320(sp) -; ld s6,1312(sp) -; ld s7,1304(sp) -; ld s8,1296(sp) -; ld s9,1288(sp) -; ld s10,1280(sp) -; ld s11,1272(sp) -; addi sp,sp,1360 +; ld s1,1368(sp) +; ld s2,1360(sp) +; ld s3,1352(sp) +; ld s4,1344(sp) +; ld s5,1336(sp) +; ld s6,1328(sp) +; ld s7,1320(sp) +; ld s8,1312(sp) +; ld s9,1304(sp) +; ld s10,1296(sp) +; ld s11,1288(sp) +; addi sp,sp,1376 ; ld ra,8(sp) ; ld fp,0(sp) ; addi sp,sp,16 @@ -635,112 +635,112 @@ block0(v0: i8): ; sd ra, 8(sp) ; sd s0, 0(sp) ; mv s0, sp -; addi sp, sp, -0x550 -; sd s1, 0x548(sp) -; sd s2, 0x540(sp) -; sd s3, 0x538(sp) -; sd s4, 0x530(sp) -; sd s5, 0x528(sp) -; sd s6, 0x520(sp) -; sd s7, 0x518(sp) -; sd s8, 0x510(sp) -; sd s9, 0x508(sp) -; sd s10, 0x500(sp) -; sd s11, 0x4f8(sp) +; addi sp, sp, -0x560 +; sd s1, 0x558(sp) +; sd s2, 0x550(sp) +; sd s3, 0x548(sp) +; sd s4, 0x540(sp) +; sd s5, 0x538(sp) +; sd s6, 0x530(sp) +; sd s7, 0x528(sp) +; sd s8, 0x520(sp) +; sd s9, 0x518(sp) +; sd s10, 0x510(sp) +; sd s11, 0x508(sp) ; block1: ; offset 0x40 ; sd a0, 0x3e8(sp) -; addi a2, zero, 2 -; sd a2, 0x3f0(sp) +; addi a1, zero, 2 ; addi a2, zero, 4 ; addi a3, zero, 6 ; addi a4, zero, 8 ; addi a5, zero, 0xa -; addi s7, zero, 0xc -; addi s8, zero, 0xe -; addi s9, zero, 0x10 -; addi s10, zero, 0x12 -; addi s11, zero, 0x14 -; addi t0, zero, 0x16 -; addi t1, zero, 0x18 -; addi t2, zero, 0x1a -; addi a6, zero, 0x1c -; addi a7, zero, 0x1e -; addi t3, zero, 0x20 -; addi t4, zero, 0x22 -; addi s1, zero, 0x24 -; addi s2, zero, 0x26 -; addi s3, zero, 0x1e -; addi s4, zero, 0x20 -; addi s5, zero, 0x22 -; addi s6, zero, 0x24 +; addi t0, zero, 0xc +; addi t1, zero, 0xe +; addi t2, zero, 0x10 +; addi s1, zero, 0x12 +; addi a6, zero, 0x14 +; addi a7, zero, 0x16 +; addi s2, zero, 0x18 +; addi s3, zero, 0x1a +; addi s4, zero, 0x1c +; addi s5, zero, 0x1e +; addi s6, zero, 0x20 +; addi s7, zero, 0x22 +; addi s8, zero, 0x24 +; addi s9, zero, 0x26 +; addi s10, zero, 0x1e +; addi s11, zero, 0x20 +; addi t3, zero, 0x22 +; addi t4, zero, 0x24 ; addi a0, zero, 0x26 -; addi a1, zero, 0x1e -; sd a1, 0x4e8(sp) -; addi a1, zero, 0x20 -; sd a1, 0x4e0(sp) -; addi a1, zero, 0x22 -; sd a1, 0x4d8(sp) -; addi a1, zero, 0x24 -; sd a1, 0x4d0(sp) -; addi a1, zero, 0x26 -; sd a1, 0x4c8(sp) -; addi a1, zero, 0x1e -; sd a1, 0x4c0(sp) -; addi a1, zero, 0x20 -; sd a1, 0x4b8(sp) -; addi a1, zero, 0x22 -; sd a1, 0x4b0(sp) -; addi a1, zero, 0x24 -; sd a1, 0x4a8(sp) -; addi a1, zero, 0x26 -; sd a1, 0x4a0(sp) -; ld a1, 0x3f0(sp) -; addi a1, a1, 1 -; sd a1, 0x498(sp) -; addi a1, a2, 3 -; sd a1, 0x490(sp) -; addi a1, a3, 5 -; sd a1, 0x488(sp) -; addi a1, a4, 7 -; sd a1, 0x480(sp) -; addi a1, a5, 9 -; sd a1, 0x478(sp) -; addi a1, s7, 0xb -; sd a1, 0x470(sp) -; addi a1, s8, 0xd -; sd a1, 0x468(sp) -; addi a1, s9, 0xf -; sd a1, 0x460(sp) -; addi a1, s10, 0x11 -; sd a1, 0x458(sp) -; addi a1, s11, 0x13 -; sd a1, 0x450(sp) -; addi a1, t0, 0x15 -; sd a1, 0x448(sp) -; addi a1, t1, 0x17 -; sd a1, 0x440(sp) -; addi a1, t2, 0x19 -; sd a1, 0x438(sp) -; addi a1, a6, 0x1b -; sd a1, 0x430(sp) -; addi a1, a7, 0x1d -; sd a1, 0x428(sp) -; addi a1, t3, 0x1f -; sd a1, 0x420(sp) -; addi a1, t4, 0x21 -; sd a1, 0x418(sp) -; addi a1, s1, 0x23 -; sd a1, 0x410(sp) -; addi a1, s2, 0x25 -; sd a1, 0x408(sp) -; addi a1, s3, 0x27 -; sd a1, 0x400(sp) -; addi a1, s4, 0x1f -; sd a1, 0x3f8(sp) -; addi a1, s5, 0x21 -; sd a1, 0x3f0(sp) -; addi s6, s6, 0x23 -; addi a0, a0, 0x25 +; sd a0, 0x4f0(sp) +; addi a0, zero, 0x1e +; sd a0, 0x4e8(sp) +; addi a0, zero, 0x20 +; sd a0, 0x4e0(sp) +; addi a0, zero, 0x22 +; sd a0, 0x4d8(sp) +; addi a0, zero, 0x24 +; sd a0, 0x4d0(sp) +; addi a0, zero, 0x26 +; sd a0, 0x4c8(sp) +; addi a0, zero, 0x1e +; sd a0, 0x4c0(sp) +; addi a0, zero, 0x20 +; sd a0, 0x4b8(sp) +; addi a0, zero, 0x22 +; sd a0, 0x4b0(sp) +; addi a0, zero, 0x24 +; sd a0, 0x4a8(sp) +; addi a0, zero, 0x26 +; sd a0, 0x4a0(sp) +; addi a0, a1, 1 +; sd a0, 0x498(sp) +; addi a0, a2, 3 +; sd a0, 0x490(sp) +; addi a0, a3, 5 +; sd a0, 0x488(sp) +; addi a0, a4, 7 +; sd a0, 0x480(sp) +; addi a0, a5, 9 +; sd a0, 0x478(sp) +; addi a0, t0, 0xb +; sd a0, 0x470(sp) +; addi a0, t1, 0xd +; sd a0, 0x468(sp) +; addi a0, t2, 0xf +; sd a0, 0x460(sp) +; addi a0, s1, 0x11 +; sd a0, 0x458(sp) +; addi a0, a6, 0x13 +; sd a0, 0x450(sp) +; addi a0, a7, 0x15 +; sd a0, 0x448(sp) +; addi a0, s2, 0x17 +; sd a0, 0x440(sp) +; addi a0, s3, 0x19 +; sd a0, 0x438(sp) +; addi a0, s4, 0x1b +; sd a0, 0x430(sp) +; addi a0, s5, 0x1d +; sd a0, 0x428(sp) +; addi a0, s6, 0x1f +; sd a0, 0x420(sp) +; addi a0, s7, 0x21 +; sd a0, 0x418(sp) +; addi a0, s8, 0x23 +; sd a0, 0x410(sp) +; addi a0, s9, 0x25 +; sd a0, 0x408(sp) +; addi a0, s10, 0x27 +; sd a0, 0x400(sp) +; addi a0, s11, 0x1f +; sd a0, 0x3f8(sp) +; addi a0, t3, 0x21 +; sd a0, 0x3f0(sp) +; addi t4, t4, 0x23 +; ld a1, 0x4f0(sp) +; addi a0, a1, 0x25 ; ld a1, 0x4e8(sp) ; addi a1, a1, 0x27 ; ld a2, 0x4e0(sp) @@ -752,84 +752,84 @@ block0(v0: i8): ; ld a5, 0x4c8(sp) ; addi a5, a5, 0x25 ; ld t0, 0x4c0(sp) -; addi s7, t0, 0x27 -; ld a6, 0x4b8(sp) -; addi s8, a6, 0x1f -; ld t4, 0x4b0(sp) -; addi s9, t4, 0x21 -; ld s3, 0x4a8(sp) -; addi s10, s3, 0x23 -; ld s11, 0x4a0(sp) -; addi s11, s11, 0x25 -; ld t0, 0x498(sp) ; addi t0, t0, 0x27 -; ld t1, 0x490(sp) -; ld t2, 0x488(sp) -; add t1, t1, t2 -; ld s1, 0x478(sp) -; ld t3, 0x480(sp) -; add t2, t3, s1 -; ld a6, 0x468(sp) -; ld s4, 0x470(sp) -; add a6, s4, a6 -; ld a7, 0x458(sp) -; ld t3, 0x460(sp) -; add a7, t3, a7 -; ld t3, 0x448(sp) -; ld t4, 0x450(sp) -; add t3, t4, t3 -; ld s1, 0x440(sp) -; ld s3, 0x438(sp) -; add t4, s1, s3 -; ld s1, 0x428(sp) -; ld s2, 0x430(sp) -; add s1, s2, s1 -; ld s2, 0x418(sp) -; ld s3, 0x420(sp) +; ld t1, 0x4b8(sp) +; addi t1, t1, 0x1f +; ld t2, 0x4b0(sp) +; addi t2, t2, 0x21 +; ld s1, 0x4a8(sp) +; addi s1, s1, 0x23 +; ld a6, 0x4a0(sp) +; addi a6, a6, 0x25 +; ld a7, 0x498(sp) +; addi a7, a7, 0x27 +; ld s2, 0x488(sp) +; ld s3, 0x490(sp) ; add s2, s3, s2 -; ld s3, 0x408(sp) -; ld s4, 0x410(sp) +; ld s3, 0x478(sp) +; ld s4, 0x480(sp) ; add s3, s4, s3 -; ld s4, 0x400(sp) -; ld s5, 0x3f8(sp) +; ld s4, 0x470(sp) +; ld s5, 0x468(sp) ; add s4, s4, s5 -; ld s5, 0x3f0(sp) -; add s5, s5, s6 -; add a1, a0, a1 -; add a2, a2, a3 -; add a3, a4, a5 -; add a4, s7, s8 -; add a5, s9, s10 -; add a0, s11, t0 -; add s7, t1, t2 -; add s8, a6, a7 -; add s9, t3, t4 -; add s10, s1, s2 -; add s11, s3, s4 -; add a1, s5, a1 -; add a2, a2, a3 -; add a3, a4, a5 -; add a4, a0, s7 -; add a5, s8, s9 -; add a0, s10, s11 +; ld s8, 0x460(sp) +; ld s10, 0x458(sp) +; add s5, s8, s10 +; ld s6, 0x448(sp) +; ld s7, 0x450(sp) +; add s6, s7, s6 +; ld s7, 0x438(sp) +; ld s8, 0x440(sp) +; add s7, s8, s7 +; ld s8, 0x428(sp) +; ld s9, 0x430(sp) +; add s8, s9, s8 +; ld s9, 0x418(sp) +; ld s10, 0x420(sp) +; add s9, s10, s9 +; ld s10, 0x408(sp) +; ld s11, 0x410(sp) +; add s10, s11, s10 +; ld s11, 0x400(sp) +; ld t3, 0x3f8(sp) +; add s11, s11, t3 +; ld t3, 0x3f0(sp) +; add t3, t3, t4 +; add a0, a0, a1 +; add a1, a2, a3 +; add a2, a4, a5 +; add a3, t0, t1 +; add a4, t2, s1 +; add a5, a6, a7 +; add t0, s2, s3 +; add t1, s4, s5 +; add t2, s6, s7 +; add s1, s8, s9 +; add a6, s10, s11 +; add a0, t3, a0 ; add a1, a1, a2 ; add a2, a3, a4 -; add a3, a5, a0 -; add a1, a1, a2 -; add a1, a3, a1 +; add a3, a5, t0 +; add a4, t1, t2 +; add a5, s1, a6 +; add a0, a0, a1 +; add a1, a2, a3 +; add a2, a4, a5 +; add a0, a0, a1 +; add a1, a2, a0 ; ld a0, 0x3e8(sp) -; ld s1, 0x548(sp) -; ld s2, 0x540(sp) -; ld s3, 0x538(sp) -; ld s4, 0x530(sp) -; ld s5, 0x528(sp) -; ld s6, 0x520(sp) -; ld s7, 0x518(sp) -; ld s8, 0x510(sp) -; ld s9, 0x508(sp) -; ld s10, 0x500(sp) -; ld s11, 0x4f8(sp) -; addi sp, sp, 0x550 +; ld s1, 0x558(sp) +; ld s2, 0x550(sp) +; ld s3, 0x548(sp) +; ld s4, 0x540(sp) +; ld s5, 0x538(sp) +; ld s6, 0x530(sp) +; ld s7, 0x528(sp) +; ld s8, 0x520(sp) +; ld s9, 0x518(sp) +; ld s10, 0x510(sp) +; ld s11, 0x508(sp) +; addi sp, sp, 0x560 ; ld ra, 8(sp) ; ld s0, 0(sp) ; addi sp, sp, 0x10 diff --git a/cranelift/filetests/filetests/isa/riscv64/store-f16-f128.clif b/cranelift/filetests/filetests/isa/riscv64/store-f16-f128.clif index 70ffdfa81763..0852ee0baa93 100644 --- a/cranelift/filetests/filetests/isa/riscv64/store-f16-f128.clif +++ b/cranelift/filetests/filetests/isa/riscv64/store-f16-f128.clif @@ -10,14 +10,14 @@ block0(v0: f16, v1: i64): ; VCode: ; block0: -; fmv.x.w a3,fa0 -; sh a3,0(a0) +; fmv.x.w a1,fa0 +; sh a1,0(a0) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; fmv.x.w a3, fa0 -; sh a3, 0(a0) ; trap: heap_oob +; fmv.x.w a1, fa0 +; sh a1, 0(a0) ; trap: heap_oob ; ret function %store_f128(f128, i64) { @@ -47,13 +47,13 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; lh a3,0(a0) -; sh a3,0(a1) +; lh a0,0(a0) +; sh a0,0(a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lh a3, 0(a0) ; trap: heap_oob -; sh a3, 0(a1) ; trap: heap_oob +; lh a0, 0(a0) ; trap: heap_oob +; sh a0, 0(a1) ; trap: heap_oob ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/tail-call-conv.clif b/cranelift/filetests/filetests/isa/riscv64/tail-call-conv.clif index 0e67ebbfc4c5..46ae63be52d9 100644 --- a/cranelift/filetests/filetests/isa/riscv64/tail-call-conv.clif +++ b/cranelift/filetests/filetests/isa/riscv64/tail-call-conv.clif @@ -98,46 +98,46 @@ block0: ; li a5,35 ; li a6,40 ; li a7,45 -; li s3,50 -; li s4,55 -; li s5,60 -; li s6,65 -; li s7,70 -; li s8,75 -; li s9,80 -; li s10,85 -; li s11,90 -; li t0,95 -; li t1,100 -; li t2,105 -; li t3,110 -; li t4,115 -; li s1,120 -; li s2,125 +; li s11,50 +; li t3,55 +; li t4,60 +; li t0,65 +; li t1,70 +; li t2,75 +; li s1,80 +; li s2,85 +; li s3,90 +; li s4,95 +; li s5,100 +; li s6,105 +; li s7,110 +; li s8,115 +; li s9,120 +; li s10,125 ; li a0,130 ; li a1,135 -; sd s3,0(sp) -; sd s4,8(sp) -; sd s5,16(sp) -; sd s6,24(sp) -; sd s7,32(sp) -; sd s8,40(sp) -; sd s9,48(sp) -; sd s10,56(sp) -; sd s11,64(sp) -; sd t0,72(sp) -; sd t1,80(sp) -; sd t2,88(sp) -; sd t3,96(sp) -; sd t4,104(sp) -; sd s1,112(sp) -; sd s2,120(sp) +; sd s11,0(sp) +; sd t3,8(sp) +; sd t4,16(sp) +; sd t0,24(sp) +; sd t1,32(sp) +; sd t2,40(sp) +; sd s1,48(sp) +; sd s2,56(sp) +; sd s3,64(sp) +; sd s4,72(sp) +; sd s5,80(sp) +; sd s6,88(sp) +; sd s7,96(sp) +; sd s8,104(sp) +; sd s9,112(sp) +; sd s10,120(sp) ; sd a0,128(sp) ; sd a1,136(sp) -; load_ext_name_far s3,%tail_callee_stack_args+0 +; load_ext_name_far t0,%tail_callee_stack_args+0 ; ld a0,8(slot) ; ld a1,0(slot) -; callind s3 +; callind t0 ; ld s1,248(sp) ; ld s2,240(sp) ; ld s3,232(sp) @@ -184,50 +184,50 @@ block0: ; addi a5, zero, 0x23 ; addi a6, zero, 0x28 ; addi a7, zero, 0x2d -; addi s3, zero, 0x32 -; addi s4, zero, 0x37 -; addi s5, zero, 0x3c -; addi s6, zero, 0x41 -; addi s7, zero, 0x46 -; addi s8, zero, 0x4b -; addi s9, zero, 0x50 -; addi s10, zero, 0x55 -; addi s11, zero, 0x5a -; addi t0, zero, 0x5f -; addi t1, zero, 0x64 -; addi t2, zero, 0x69 -; addi t3, zero, 0x6e -; addi t4, zero, 0x73 -; addi s1, zero, 0x78 -; addi s2, zero, 0x7d +; addi s11, zero, 0x32 +; addi t3, zero, 0x37 +; addi t4, zero, 0x3c +; addi t0, zero, 0x41 +; addi t1, zero, 0x46 +; addi t2, zero, 0x4b +; addi s1, zero, 0x50 +; addi s2, zero, 0x55 +; addi s3, zero, 0x5a +; addi s4, zero, 0x5f +; addi s5, zero, 0x64 +; addi s6, zero, 0x69 +; addi s7, zero, 0x6e +; addi s8, zero, 0x73 +; addi s9, zero, 0x78 +; addi s10, zero, 0x7d ; addi a0, zero, 0x82 ; addi a1, zero, 0x87 -; sd s3, 0(sp) -; sd s4, 8(sp) -; sd s5, 0x10(sp) -; sd s6, 0x18(sp) -; sd s7, 0x20(sp) -; sd s8, 0x28(sp) -; sd s9, 0x30(sp) -; sd s10, 0x38(sp) -; sd s11, 0x40(sp) -; sd t0, 0x48(sp) -; sd t1, 0x50(sp) -; sd t2, 0x58(sp) -; sd t3, 0x60(sp) -; sd t4, 0x68(sp) -; sd s1, 0x70(sp) -; sd s2, 0x78(sp) +; sd s11, 0(sp) +; sd t3, 8(sp) +; sd t4, 0x10(sp) +; sd t0, 0x18(sp) +; sd t1, 0x20(sp) +; sd t2, 0x28(sp) +; sd s1, 0x30(sp) +; sd s2, 0x38(sp) +; sd s3, 0x40(sp) +; sd s4, 0x48(sp) +; sd s5, 0x50(sp) +; sd s6, 0x58(sp) +; sd s7, 0x60(sp) +; sd s8, 0x68(sp) +; sd s9, 0x70(sp) +; sd s10, 0x78(sp) ; sd a0, 0x80(sp) ; sd a1, 0x88(sp) -; auipc s3, 0 -; ld s3, 0xc(s3) +; auipc t0, 0 +; ld t0, 0xc(t0) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %tail_callee_stack_args 0 ; .byte 0x00, 0x00, 0x00, 0x00 ; ld a0, 0x98(sp) ; ld a1, 0x90(sp) -; jalr s3 +; jalr t0 ; addi sp, sp, -0x90 ; ld s1, 0xf8(sp) ; ld s2, 0xf0(sp) @@ -301,56 +301,56 @@ block0: ; sd a1,16(slot) ; li a1,15 ; sd a1,8(slot) -; li a4,20 -; li a5,25 -; li a2,30 -; li s3,35 -; li s4,40 -; li s5,45 -; li s6,50 -; li s7,55 -; li s8,60 -; li s9,65 -; li s10,70 -; li s11,75 -; li t0,80 -; li t1,85 -; li t2,90 -; li a6,95 -; li a7,100 -; li t3,105 -; li t4,110 -; li s1,115 -; li s2,120 -; li a3,125 -; sd a3,0(slot) +; li a3,20 +; li a4,25 +; li a5,30 +; li s11,35 +; li t3,40 +; li t4,45 +; li t0,50 +; li t1,55 +; li t2,60 +; li s1,65 +; li a6,70 +; li a7,75 +; li s2,80 +; li s3,85 +; li s4,90 +; li s5,95 +; li s6,100 +; li s7,105 +; li s8,110 +; li s9,115 +; li s10,120 +; li a2,125 ; li a1,130 -; li a3,135 -; sd a4,0(a0) -; sd a5,8(a0) -; sd a2,16(a0) -; sd s3,24(a0) -; sd s4,32(a0) -; sd s5,40(a0) -; sd s6,48(a0) -; sd s7,56(a0) -; sd s8,64(a0) -; sd s9,72(a0) -; sd s10,80(a0) -; sd s11,88(a0) -; sd t0,96(a0) -; sd t1,104(a0) -; sd t2,112(a0) -; sd a6,120(a0) -; sd a7,128(a0) -; sd t3,136(a0) -; sd t4,144(a0) -; sd s1,152(a0) -; sd s2,160(a0) -; ld a4,0(slot) -; sd a4,168(a0) -; sd a1,176(a0) -; sd a3,184(a0) +; sd a1,0(slot) +; li a1,135 +; sd a3,0(a0) +; sd a4,8(a0) +; sd a5,16(a0) +; sd s11,24(a0) +; sd t3,32(a0) +; sd t4,40(a0) +; sd t0,48(a0) +; sd t1,56(a0) +; sd t2,64(a0) +; sd s1,72(a0) +; sd a6,80(a0) +; sd a7,88(a0) +; sd s2,96(a0) +; sd s3,104(a0) +; sd s4,112(a0) +; sd s5,120(a0) +; sd s6,128(a0) +; sd s7,136(a0) +; sd s8,144(a0) +; sd s9,152(a0) +; sd s10,160(a0) +; sd a2,168(a0) +; ld a2,0(slot) +; sd a2,176(a0) +; sd a1,184(a0) ; ld a0,16(slot) ; ld a1,8(slot) ; ld s1,120(sp) @@ -393,56 +393,56 @@ block0: ; sd a1, 0x10(sp) ; addi a1, zero, 0xf ; sd a1, 8(sp) -; addi a4, zero, 0x14 -; addi a5, zero, 0x19 -; addi a2, zero, 0x1e -; addi s3, zero, 0x23 -; addi s4, zero, 0x28 -; addi s5, zero, 0x2d -; addi s6, zero, 0x32 -; addi s7, zero, 0x37 -; addi s8, zero, 0x3c -; addi s9, zero, 0x41 -; addi s10, zero, 0x46 -; addi s11, zero, 0x4b -; addi t0, zero, 0x50 -; addi t1, zero, 0x55 -; addi t2, zero, 0x5a -; addi a6, zero, 0x5f -; addi a7, zero, 0x64 -; addi t3, zero, 0x69 -; addi t4, zero, 0x6e -; addi s1, zero, 0x73 -; addi s2, zero, 0x78 -; addi a3, zero, 0x7d -; sd a3, 0(sp) +; addi a3, zero, 0x14 +; addi a4, zero, 0x19 +; addi a5, zero, 0x1e +; addi s11, zero, 0x23 +; addi t3, zero, 0x28 +; addi t4, zero, 0x2d +; addi t0, zero, 0x32 +; addi t1, zero, 0x37 +; addi t2, zero, 0x3c +; addi s1, zero, 0x41 +; addi a6, zero, 0x46 +; addi a7, zero, 0x4b +; addi s2, zero, 0x50 +; addi s3, zero, 0x55 +; addi s4, zero, 0x5a +; addi s5, zero, 0x5f +; addi s6, zero, 0x64 +; addi s7, zero, 0x69 +; addi s8, zero, 0x6e +; addi s9, zero, 0x73 +; addi s10, zero, 0x78 +; addi a2, zero, 0x7d ; addi a1, zero, 0x82 -; addi a3, zero, 0x87 -; sd a4, 0(a0) -; sd a5, 8(a0) -; sd a2, 0x10(a0) -; sd s3, 0x18(a0) -; sd s4, 0x20(a0) -; sd s5, 0x28(a0) -; sd s6, 0x30(a0) -; sd s7, 0x38(a0) -; sd s8, 0x40(a0) -; sd s9, 0x48(a0) -; sd s10, 0x50(a0) -; sd s11, 0x58(a0) -; sd t0, 0x60(a0) -; sd t1, 0x68(a0) -; sd t2, 0x70(a0) -; sd a6, 0x78(a0) -; sd a7, 0x80(a0) -; sd t3, 0x88(a0) -; sd t4, 0x90(a0) -; sd s1, 0x98(a0) -; sd s2, 0xa0(a0) -; ld a4, 0(sp) -; sd a4, 0xa8(a0) -; sd a1, 0xb0(a0) -; sd a3, 0xb8(a0) +; sd a1, 0(sp) +; addi a1, zero, 0x87 +; sd a3, 0(a0) +; sd a4, 8(a0) +; sd a5, 0x10(a0) +; sd s11, 0x18(a0) +; sd t3, 0x20(a0) +; sd t4, 0x28(a0) +; sd t0, 0x30(a0) +; sd t1, 0x38(a0) +; sd t2, 0x40(a0) +; sd s1, 0x48(a0) +; sd a6, 0x50(a0) +; sd a7, 0x58(a0) +; sd s2, 0x60(a0) +; sd s3, 0x68(a0) +; sd s4, 0x70(a0) +; sd s5, 0x78(a0) +; sd s6, 0x80(a0) +; sd s7, 0x88(a0) +; sd s8, 0x90(a0) +; sd s9, 0x98(a0) +; sd s10, 0xa0(a0) +; sd a2, 0xa8(a0) +; ld a2, 0(sp) +; sd a2, 0xb0(a0) +; sd a1, 0xb8(a0) ; ld a0, 0x10(sp) ; ld a1, 8(sp) ; ld s1, 0x78(sp) @@ -489,8 +489,8 @@ block0: ; sd s11,312(sp) ; block0: ; load_addr a0,0(sp) -; load_ext_name_far a4,%tail_callee_stack_rets+0 -; callind a4 +; load_ext_name_far a1,%tail_callee_stack_rets+0 +; callind a1 ; ld a0,96(slot) ; ld s1,392(sp) ; ld s2,384(sp) @@ -529,12 +529,12 @@ block0: ; sd s11, 0x138(sp) ; block1: ; offset 0x40 ; mv a0, sp -; auipc a4, 0 -; ld a4, 0xc(a4) +; auipc a1, 0 +; ld a1, 0xc(a1) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %tail_callee_stack_rets 0 ; .byte 0x00, 0x00, 0x00, 0x00 -; jalr a4 +; jalr a1 ; ld a2, 0(sp) ; sd a2, 0xc0(sp) ; ld a2, 8(sp) @@ -559,17 +559,17 @@ block0: ; sd a2, 0x110(sp) ; ld a2, 0x58(sp) ; sd a2, 0x118(sp) -; ld s8, 0x60(sp) -; ld s9, 0x68(sp) +; ld s1, 0x60(sp) +; ld s11, 0x68(sp) ; ld s10, 0x70(sp) -; ld s11, 0x78(sp) -; ld s7, 0x80(sp) -; ld s6, 0x88(sp) -; ld s5, 0x90(sp) -; ld s4, 0x98(sp) -; ld s3, 0xa0(sp) -; ld s2, 0xa8(sp) -; ld s1, 0xb0(sp) +; ld s9, 0x78(sp) +; ld s2, 0x80(sp) +; ld s3, 0x88(sp) +; ld s4, 0x90(sp) +; ld s5, 0x98(sp) +; ld s6, 0xa0(sp) +; ld s7, 0xa8(sp) +; ld s8, 0xb0(sp) ; ld a2, 0xb8(sp) ; sd a2, 0x120(sp) ; ld a0, 0x120(sp) @@ -619,48 +619,48 @@ block0(v0: i64, v1: i64, v2: i64, v3: i64, v4: i64, v5: i64, v6: i64, v7: i64, v ; sd a1,0(slot) ; sd a2,8(slot) ; sd a7,16(slot) -; ld s3,-160(incoming_arg) -; ld s5,-152(incoming_arg) -; ld s7,-144(incoming_arg) -; ld s9,-136(incoming_arg) -; ld s11,-128(incoming_arg) -; ld t1,-120(incoming_arg) -; ld t3,-112(incoming_arg) -; ld a7,-104(incoming_arg) -; ld t4,-96(incoming_arg) -; ld s2,-88(incoming_arg) -; ld s4,-80(incoming_arg) -; ld s6,-72(incoming_arg) -; ld s8,-64(incoming_arg) -; ld s10,-56(incoming_arg) -; ld t0,-48(incoming_arg) -; ld t2,-40(incoming_arg) -; ld s1,-32(incoming_arg) +; ld s9,-160(incoming_arg) +; ld s7,-152(incoming_arg) +; ld s5,-144(incoming_arg) +; ld s3,-136(incoming_arg) +; ld a7,-128(incoming_arg) +; ld s2,-120(incoming_arg) +; ld s4,-112(incoming_arg) +; ld s6,-104(incoming_arg) +; ld s8,-96(incoming_arg) +; ld s10,-88(incoming_arg) +; ld t3,-80(incoming_arg) +; ld s11,-72(incoming_arg) +; ld s1,-64(incoming_arg) +; ld t2,-56(incoming_arg) +; ld t1,-48(incoming_arg) +; ld t0,-40(incoming_arg) +; ld t4,-32(incoming_arg) ; ld a1,-24(incoming_arg) ; ld a2,-16(incoming_arg) ; sd a3,0(a0) ; sd a4,8(a0) ; sd a5,16(a0) ; sd a6,24(a0) -; ld a5,16(slot) -; sd a5,32(a0) -; sd s3,40(a0) -; sd s5,48(a0) -; sd s7,56(a0) -; sd s9,64(a0) -; sd s11,72(a0) -; sd t1,80(a0) -; sd t3,88(a0) -; sd a7,96(a0) -; sd t4,104(a0) -; sd s2,112(a0) -; sd s4,120(a0) -; sd s6,128(a0) -; sd s8,136(a0) -; sd s10,144(a0) -; sd t0,152(a0) -; sd t2,160(a0) -; sd s1,168(a0) +; ld a3,16(slot) +; sd a3,32(a0) +; sd s9,40(a0) +; sd s7,48(a0) +; sd s5,56(a0) +; sd s3,64(a0) +; sd a7,72(a0) +; sd s2,80(a0) +; sd s4,88(a0) +; sd s6,96(a0) +; sd s8,104(a0) +; sd s10,112(a0) +; sd t3,120(a0) +; sd s11,128(a0) +; sd s1,136(a0) +; sd t2,144(a0) +; sd t1,152(a0) +; sd t0,160(a0) +; sd t4,168(a0) ; sd a1,176(a0) ; sd a2,184(a0) ; ld a0,0(slot) @@ -705,48 +705,48 @@ block0(v0: i64, v1: i64, v2: i64, v3: i64, v4: i64, v5: i64, v6: i64, v7: i64, v ; sd a1, 0(sp) ; sd a2, 8(sp) ; sd a7, 0x10(sp) -; ld s3, 0x90(sp) -; ld s5, 0x98(sp) -; ld s7, 0xa0(sp) -; ld s9, 0xa8(sp) -; ld s11, 0xb0(sp) -; ld t1, 0xb8(sp) -; ld t3, 0xc0(sp) -; ld a7, 0xc8(sp) -; ld t4, 0xd0(sp) -; ld s2, 0xd8(sp) -; ld s4, 0xe0(sp) -; ld s6, 0xe8(sp) -; ld s8, 0xf0(sp) -; ld s10, 0xf8(sp) -; ld t0, 0x100(sp) -; ld t2, 0x108(sp) -; ld s1, 0x110(sp) +; ld s9, 0x90(sp) +; ld s7, 0x98(sp) +; ld s5, 0xa0(sp) +; ld s3, 0xa8(sp) +; ld a7, 0xb0(sp) +; ld s2, 0xb8(sp) +; ld s4, 0xc0(sp) +; ld s6, 0xc8(sp) +; ld s8, 0xd0(sp) +; ld s10, 0xd8(sp) +; ld t3, 0xe0(sp) +; ld s11, 0xe8(sp) +; ld s1, 0xf0(sp) +; ld t2, 0xf8(sp) +; ld t1, 0x100(sp) +; ld t0, 0x108(sp) +; ld t4, 0x110(sp) ; ld a1, 0x118(sp) ; ld a2, 0x120(sp) ; sd a3, 0(a0) ; sd a4, 8(a0) ; sd a5, 0x10(a0) ; sd a6, 0x18(a0) -; ld a5, 0x10(sp) -; sd a5, 0x20(a0) -; sd s3, 0x28(a0) -; sd s5, 0x30(a0) -; sd s7, 0x38(a0) -; sd s9, 0x40(a0) -; sd s11, 0x48(a0) -; sd t1, 0x50(a0) -; sd t3, 0x58(a0) -; sd a7, 0x60(a0) -; sd t4, 0x68(a0) -; sd s2, 0x70(a0) -; sd s4, 0x78(a0) -; sd s6, 0x80(a0) -; sd s8, 0x88(a0) -; sd s10, 0x90(a0) -; sd t0, 0x98(a0) -; sd t2, 0xa0(a0) -; sd s1, 0xa8(a0) +; ld a3, 0x10(sp) +; sd a3, 0x20(a0) +; sd s9, 0x28(a0) +; sd s7, 0x30(a0) +; sd s5, 0x38(a0) +; sd s3, 0x40(a0) +; sd a7, 0x48(a0) +; sd s2, 0x50(a0) +; sd s4, 0x58(a0) +; sd s6, 0x60(a0) +; sd s8, 0x68(a0) +; sd s10, 0x70(a0) +; sd t3, 0x78(a0) +; sd s11, 0x80(a0) +; sd s1, 0x88(a0) +; sd t2, 0x90(a0) +; sd t1, 0x98(a0) +; sd t0, 0xa0(a0) +; sd t4, 0xa8(a0) ; sd a1, 0xb0(a0) ; sd a2, 0xb8(a0) ; ld a0, 0(sp) @@ -831,48 +831,48 @@ block0: ; li a6,35 ; li a7,40 ; li a0,45 -; li s11,50 -; li t0,55 -; li t1,60 -; li t2,65 -; li t3,70 -; li t4,75 -; li s1,80 -; li s2,85 -; li s3,90 -; li s4,95 -; li s5,100 -; li s6,105 -; li s7,110 -; li s8,115 -; li s9,120 -; li s10,125 +; li t0,50 +; li t1,55 +; li t2,60 +; li s1,65 +; li s2,70 +; li s3,75 +; li s4,80 +; li s5,85 +; li s6,90 +; li s7,95 +; li s8,100 +; li s9,105 +; li s10,110 +; li s11,115 +; li t3,120 +; li t4,125 ; li a1,130 ; li a2,135 ; sd a0,0(sp) -; sd s11,8(sp) -; sd t0,16(sp) -; sd t1,24(sp) -; sd t2,32(sp) -; sd t3,40(sp) -; sd t4,48(sp) -; sd s1,56(sp) -; sd s2,64(sp) -; sd s3,72(sp) -; sd s4,80(sp) -; sd s5,88(sp) -; sd s6,96(sp) -; sd s7,104(sp) -; sd s8,112(sp) -; sd s9,120(sp) -; sd s10,128(sp) +; sd t0,8(sp) +; sd t1,16(sp) +; sd t2,24(sp) +; sd s1,32(sp) +; sd s2,40(sp) +; sd s3,48(sp) +; sd s4,56(sp) +; sd s5,64(sp) +; sd s6,72(sp) +; sd s7,80(sp) +; sd s8,88(sp) +; sd s9,96(sp) +; sd s10,104(sp) +; sd s11,112(sp) +; sd t3,120(sp) +; sd t4,128(sp) ; sd a1,136(sp) ; sd a2,144(sp) ; load_addr a0,160(sp) -; load_ext_name_far t1,%tail_callee_stack_args_and_rets+0 +; load_ext_name_far s2,%tail_callee_stack_args_and_rets+0 ; ld a1,0(slot) ; ld a2,96(slot) -; callind t1 +; callind s2 ; ld a0,96(slot) ; ld s1,552(sp) ; ld s2,544(sp) @@ -920,52 +920,52 @@ block0: ; addi a6, zero, 0x23 ; addi a7, zero, 0x28 ; addi a0, zero, 0x2d -; addi s11, zero, 0x32 -; addi t0, zero, 0x37 -; addi t1, zero, 0x3c -; addi t2, zero, 0x41 -; addi t3, zero, 0x46 -; addi t4, zero, 0x4b -; addi s1, zero, 0x50 -; addi s2, zero, 0x55 -; addi s3, zero, 0x5a -; addi s4, zero, 0x5f -; addi s5, zero, 0x64 -; addi s6, zero, 0x69 -; addi s7, zero, 0x6e -; addi s8, zero, 0x73 -; addi s9, zero, 0x78 -; addi s10, zero, 0x7d +; addi t0, zero, 0x32 +; addi t1, zero, 0x37 +; addi t2, zero, 0x3c +; addi s1, zero, 0x41 +; addi s2, zero, 0x46 +; addi s3, zero, 0x4b +; addi s4, zero, 0x50 +; addi s5, zero, 0x55 +; addi s6, zero, 0x5a +; addi s7, zero, 0x5f +; addi s8, zero, 0x64 +; addi s9, zero, 0x69 +; addi s10, zero, 0x6e +; addi s11, zero, 0x73 +; addi t3, zero, 0x78 +; addi t4, zero, 0x7d ; addi a1, zero, 0x82 ; addi a2, zero, 0x87 ; sd a0, 0(sp) -; sd s11, 8(sp) -; sd t0, 0x10(sp) -; sd t1, 0x18(sp) -; sd t2, 0x20(sp) -; sd t3, 0x28(sp) -; sd t4, 0x30(sp) -; sd s1, 0x38(sp) -; sd s2, 0x40(sp) -; sd s3, 0x48(sp) -; sd s4, 0x50(sp) -; sd s5, 0x58(sp) -; sd s6, 0x60(sp) -; sd s7, 0x68(sp) -; sd s8, 0x70(sp) -; sd s9, 0x78(sp) -; sd s10, 0x80(sp) +; sd t0, 8(sp) +; sd t1, 0x10(sp) +; sd t2, 0x18(sp) +; sd s1, 0x20(sp) +; sd s2, 0x28(sp) +; sd s3, 0x30(sp) +; sd s4, 0x38(sp) +; sd s5, 0x40(sp) +; sd s6, 0x48(sp) +; sd s7, 0x50(sp) +; sd s8, 0x58(sp) +; sd s9, 0x60(sp) +; sd s10, 0x68(sp) +; sd s11, 0x70(sp) +; sd t3, 0x78(sp) +; sd t4, 0x80(sp) ; sd a1, 0x88(sp) ; sd a2, 0x90(sp) ; addi a0, sp, 0xa0 -; auipc t1, 0 -; ld t1, 0xc(t1) +; auipc s2, 0 +; ld s2, 0xc(s2) ; j 0xc ; .byte 0x00, 0x00, 0x00, 0x00 ; reloc_external Abs8 %tail_callee_stack_args_and_rets 0 ; .byte 0x00, 0x00, 0x00, 0x00 ; ld a1, 0x160(sp) ; ld a2, 0x1c0(sp) -; jalr t1 +; jalr s2 ; addi sp, sp, -0xa0 ; ld a2, 0xa0(sp) ; sd a2, 0x160(sp) @@ -991,17 +991,17 @@ block0: ; sd a2, 0x1b0(sp) ; ld a2, 0xf8(sp) ; sd a2, 0x1b8(sp) -; ld s8, 0x100(sp) -; ld s9, 0x108(sp) +; ld s1, 0x100(sp) +; ld s11, 0x108(sp) ; ld s10, 0x110(sp) -; ld s11, 0x118(sp) -; ld s7, 0x120(sp) -; ld s6, 0x128(sp) -; ld s5, 0x130(sp) -; ld s4, 0x138(sp) -; ld s3, 0x140(sp) -; ld s2, 0x148(sp) -; ld s1, 0x150(sp) +; ld s9, 0x118(sp) +; ld s2, 0x120(sp) +; ld s3, 0x128(sp) +; ld s4, 0x130(sp) +; ld s5, 0x138(sp) +; ld s6, 0x140(sp) +; ld s7, 0x148(sp) +; ld s8, 0x150(sp) ; ld a2, 0x158(sp) ; sd a2, 0x1c0(sp) ; ld a0, 0x1c0(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/traps.clif b/cranelift/filetests/filetests/isa/riscv64/traps.clif index 9b5d08e1aa9a..5b0208f786c4 100644 --- a/cranelift/filetests/filetests/isa/riscv64/traps.clif +++ b/cranelift/filetests/filetests/isa/riscv64/traps.clif @@ -40,14 +40,14 @@ block0(v0: i128): ; VCode: ; block0: -; or a3,a1,a0 -; trap_if user1##(a3 ne zero) +; or a0,a1,a0 +; trap_if user1##(a0 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; or a3, a1, a0 -; beqz a3, 8 +; or a0, a1, a0 +; beqz a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: user1 ; ret @@ -61,14 +61,14 @@ block0(v0: i64): ; VCode: ; block0: -; li a2,42 -; trap_if user1##(a0 eq a2) +; li a1,42 +; trap_if user1##(a0 eq a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a2, zero, 0x2a -; bne a0, a2, 8 +; addi a1, zero, 0x2a +; bne a0, a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: user1 ; ret @@ -97,14 +97,14 @@ block0(v0: i128): ; VCode: ; block0: -; or a3,a1,a0 -; trap_if user1##(a3 eq zero) +; or a0,a1,a0 +; trap_if user1##(a0 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; or a3, a1, a0 -; bnez a3, 8 +; or a0, a1, a0 +; bnez a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: user1 ; ret @@ -118,14 +118,14 @@ block0(v0: i64): ; VCode: ; block0: -; li a2,42 -; trap_if user1##(a0 ne a2) +; li a1,42 +; trap_if user1##(a0 ne a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a2, zero, 0x2a -; beq a0, a2, 8 +; addi a1, zero, 0x2a +; beq a0, a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: user1 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/trunc.clif b/cranelift/filetests/filetests/isa/riscv64/trunc.clif index e554ccf18381..4092cdce1e6b 100644 --- a/cranelift/filetests/filetests/isa/riscv64/trunc.clif +++ b/cranelift/filetests/filetests/isa/riscv64/trunc.clif @@ -10,31 +10,31 @@ block0(v0: f32): ; VCode: ; block0: -; lui a2,307200 -; fmv.w.x fa4,a2 -; fabs.s fa1,fa0 -; flt.s a2,fa1,fa4 -; fcvt.w.s a4,fa0,rtz -; fcvt.s.w fa1,a4,rtz -; fsgnj.s fa2,fa1,fa0 -; fmv.w.x fa4,zero -; fadd.s fa0,fa0,fa4,rne -; select fa0,fa0,fa2##condition=(a2 eq zero) +; lui a0,307200 +; fmv.w.x fa1,a0 +; fabs.s fa2,fa0 +; flt.s a0,fa2,fa1 +; fcvt.w.s a1,fa0,rtz +; fcvt.s.w fa2,a1,rtz +; fsgnj.s fa4,fa2,fa0 +; fmv.w.x fa1,zero +; fadd.s fa0,fa0,fa1,rne +; select fa0,fa0,fa4##condition=(a0 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a2, 0x4b000 -; fmv.w.x fa4, a2 -; fabs.s fa1, fa0 -; flt.s a2, fa1, fa4 -; fcvt.w.s a4, fa0, rtz ; trap: bad_toint -; fcvt.s.w fa1, a4, rtz -; fsgnj.s fa2, fa1, fa0 -; fmv.w.x fa4, zero -; fadd.s fa0, fa0, fa4, rne -; beqz a2, 8 -; fmv.d fa0, fa2 +; lui a0, 0x4b000 +; fmv.w.x fa1, a0 +; fabs.s fa2, fa0 +; flt.s a0, fa2, fa1 +; fcvt.w.s a1, fa0, rtz ; trap: bad_toint +; fcvt.s.w fa2, a1, rtz +; fsgnj.s fa4, fa2, fa0 +; fmv.w.x fa1, zero +; fadd.s fa0, fa0, fa1, rne +; beqz a0, 8 +; fmv.d fa0, fa4 ; ret function %f26(f64) -> f64 { @@ -45,33 +45,32 @@ block0(v0: f64): ; VCode: ; block0: -; lui a2,1075 -; slli a4,a2,40 -; fmv.d.x fa1,a4 +; lui a0,1075 +; slli a0,a0,40 +; fmv.d.x fa1,a0 ; fabs.d fa2,fa0 -; flt.d a4,fa2,fa1 -; fcvt.l.d a0,fa0,rtz -; fcvt.d.l fa2,a0,rtz -; fsgnj.d fa4,fa2,fa0 -; fmv.d.x fa1,zero -; fadd.d fa2,fa0,fa1,rne -; select fa0,fa2,fa4##condition=(a4 eq zero) +; flt.d a0,fa2,fa1 +; fcvt.l.d a2,fa0,rtz +; fcvt.d.l fa4,a2,rtz +; fsgnj.d fa1,fa4,fa0 +; fmv.d.x fa2,zero +; fadd.d fa0,fa0,fa2,rne +; select fa0,fa0,fa1##condition=(a0 eq zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; lui a2, 0x433 -; slli a4, a2, 0x28 -; fmv.d.x fa1, a4 +; lui a0, 0x433 +; slli a0, a0, 0x28 +; fmv.d.x fa1, a0 ; fabs.d fa2, fa0 -; flt.d a4, fa2, fa1 -; fcvt.l.d a0, fa0, rtz ; trap: bad_toint -; fcvt.d.l fa2, a0, rtz -; fsgnj.d fa4, fa2, fa0 -; fmv.d.x fa1, zero -; fadd.d fa2, fa0, fa1, rne -; fmv.d fa0, fa2 -; beqz a4, 8 -; fmv.d fa0, fa4 +; flt.d a0, fa2, fa1 +; fcvt.l.d a2, fa0, rtz ; trap: bad_toint +; fcvt.d.l fa4, a2, rtz +; fsgnj.d fa1, fa4, fa0 +; fmv.d.x fa2, zero +; fadd.d fa0, fa0, fa2, rne +; beqz a0, 8 +; fmv.d fa0, fa1 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/uadd_overflow_trap.clif b/cranelift/filetests/filetests/isa/riscv64/uadd_overflow_trap.clif index 4c6a973dcf09..2b7dddcc6103 100644 --- a/cranelift/filetests/filetests/isa/riscv64/uadd_overflow_trap.clif +++ b/cranelift/filetests/filetests/isa/riscv64/uadd_overflow_trap.clif @@ -11,25 +11,25 @@ block0(v0: i32): ; VCode: ; block0: ; li a2,127 -; slli a3,a0,32 -; srli a5,a3,32 -; slli a1,a2,32 -; srli a3,a1,32 -; add a0,a5,a3 +; slli a0,a0,32 ; srli a1,a0,32 -; trap_if user1##(a1 ne zero) +; slli a0,a2,32 +; srli a0,a0,32 +; add a0,a1,a0 +; srli a3,a0,32 +; trap_if user1##(a3 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; addi a2, zero, 0x7f -; slli a3, a0, 0x20 -; srli a5, a3, 0x20 -; slli a1, a2, 0x20 -; srli a3, a1, 0x20 -; add a0, a5, a3 +; slli a0, a0, 0x20 ; srli a1, a0, 0x20 -; beqz a1, 8 +; slli a0, a2, 0x20 +; srli a0, a0, 0x20 +; add a0, a1, a0 +; srli a3, a0, 0x20 +; beqz a3, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: user1 ; ret @@ -42,26 +42,26 @@ block0(v0: i32): ; VCode: ; block0: -; li a2,127 -; slli a3,a2,32 -; srli a5,a3,32 -; slli a1,a0,32 -; srli a3,a1,32 -; add a0,a5,a3 -; srli a1,a0,32 -; trap_if user1##(a1 ne zero) +; li a1,127 +; slli a1,a1,32 +; srli a1,a1,32 +; slli a0,a0,32 +; srli a0,a0,32 +; add a0,a1,a0 +; srli a3,a0,32 +; trap_if user1##(a3 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a2, zero, 0x7f -; slli a3, a2, 0x20 -; srli a5, a3, 0x20 -; slli a1, a0, 0x20 -; srli a3, a1, 0x20 -; add a0, a5, a3 -; srli a1, a0, 0x20 -; beqz a1, 8 +; addi a1, zero, 0x7f +; slli a1, a1, 0x20 +; srli a1, a1, 0x20 +; slli a0, a0, 0x20 +; srli a0, a0, 0x20 +; add a0, a1, a0 +; srli a3, a0, 0x20 +; beqz a3, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: user1 ; ret @@ -73,24 +73,24 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; slli a3,a0,32 -; srli a5,a3,32 +; slli a0,a0,32 +; srli a0,a0,32 ; slli a1,a1,32 -; srli a3,a1,32 -; add a0,a5,a3 -; srli a1,a0,32 -; trap_if user1##(a1 ne zero) +; srli a1,a1,32 +; add a0,a0,a1 +; srli a3,a0,32 +; trap_if user1##(a3 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x20 -; srli a5, a3, 0x20 +; slli a0, a0, 0x20 +; srli a0, a0, 0x20 ; slli a1, a1, 0x20 -; srli a3, a1, 0x20 -; add a0, a5, a3 -; srli a1, a0, 0x20 -; beqz a1, 8 +; srli a1, a1, 0x20 +; add a0, a0, a1 +; srli a3, a0, 0x20 +; beqz a3, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: user1 ; ret @@ -104,16 +104,16 @@ block0(v0: i64): ; VCode: ; block0: ; mv a1,a0 -; li a3,127 -; add a0,a1,a3 +; li a0,127 +; add a0,a1,a0 ; trap_if user1##(a0 ult a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 ; mv a1, a0 -; addi a3, zero, 0x7f -; add a0, a1, a3 +; addi a0, zero, 0x7f +; add a0, a1, a0 ; bgeu a0, a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: user1 ; ret @@ -127,17 +127,21 @@ block0(v0: i64): ; VCode: ; block0: -; li a3,127 -; add a0,a3,a0 -; trap_if user1##(a0 ult a3) +; mv a1,a0 +; li a0,127 +; add a1,a0,a1 +; trap_if user1##(a1 ult a0) +; mv a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 0x7f -; add a0, a3, a0 -; bgeu a0, a3, 8 +; mv a1, a0 +; addi a0, zero, 0x7f +; add a1, a0, a1 +; bgeu a1, a0, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: user1 +; mv a0, a1 ; ret function %f4(i64, i64) -> i64 { @@ -148,16 +152,18 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; mv a5,a0 -; add a0,a5,a1 -; trap_if user1##(a0 ult a5) +; mv a2,a0 +; add a0,a2,a1 +; mv a1,a2 +; trap_if user1##(a0 ult a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a5, a0 -; add a0, a5, a1 -; bgeu a0, a5, 8 +; mv a2, a0 +; add a0, a2, a1 +; mv a1, a2 +; bgeu a0, a1, 8 ; .byte 0x00, 0x00, 0x00, 0x00 ; trap: user1 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/umax-zbb.clif b/cranelift/filetests/filetests/isa/riscv64/umax-zbb.clif index 583b7f14efef..b08a16e3081d 100644 --- a/cranelift/filetests/filetests/isa/riscv64/umax-zbb.clif +++ b/cranelift/filetests/filetests/isa/riscv64/umax-zbb.clif @@ -10,16 +10,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; andi a3,a0,255 -; andi a5,a1,255 -; maxu a0,a3,a5 +; andi a0,a0,255 +; andi a1,a1,255 +; maxu a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a0, 0xff -; andi a5, a1, 0xff -; .byte 0x33, 0xf5, 0xf6, 0x0a +; andi a0, a0, 0xff +; andi a1, a1, 0xff +; .byte 0x33, 0x75, 0xb5, 0x0a ; ret function %umax_i16(i16, i16) -> i16{ @@ -30,16 +30,16 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; zext.h a3,a0 -; zext.h a5,a1 -; maxu a0,a3,a5 +; zext.h a0,a0 +; zext.h a1,a1 +; maxu a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0xbb, 0x46, 0x05, 0x08 -; .byte 0xbb, 0xc7, 0x05, 0x08 -; .byte 0x33, 0xf5, 0xf6, 0x0a +; .byte 0x3b, 0x45, 0x05, 0x08 +; .byte 0xbb, 0xc5, 0x05, 0x08 +; .byte 0x33, 0x75, 0xb5, 0x0a ; ret function %umax_i32(i32, i32) -> i32{ @@ -50,20 +50,20 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; slli a3,a0,32 -; srli a5,a3,32 +; slli a0,a0,32 +; srli a0,a0,32 ; slli a1,a1,32 -; srli a3,a1,32 -; maxu a0,a5,a3 +; srli a1,a1,32 +; maxu a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x20 -; srli a5, a3, 0x20 +; slli a0, a0, 0x20 +; srli a0, a0, 0x20 ; slli a1, a1, 0x20 -; srli a3, a1, 0x20 -; .byte 0x33, 0xf5, 0xd7, 0x0a +; srli a1, a1, 0x20 +; .byte 0x33, 0x75, 0xb5, 0x0a ; ret function %umax_i64(i64, i64) -> i64{ @@ -89,56 +89,29 @@ block0(v0: i128, v1: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s1,8(sp) -; sd s9,0(sp) ; block0: -; sltu a5,a3,a1 -; sltu s1,a2,a0 -; xor a4,a3,a1 -; mv s9,a1 -; select a5,s1,a5##condition=(a4 eq zero) -; mv a4,a0 -; select [a0,a1],[a4,s9],[a2,a3]##condition=(a5 ne zero) -; ld s1,8(sp) -; ld s9,0(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; sltu a4,a3,a1 +; sltu t2,a2,a0 +; xor a5,a3,a1 +; mv a6,a1 +; select a4,t2,a4##condition=(a5 eq zero) +; mv a5,a0 +; select [a0,a1],[a5,a6],[a2,a3]##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s1, 8(sp) -; sd s9, 0(sp) -; block1: ; offset 0x1c -; sltu a5, a3, a1 -; sltu s1, a2, a0 -; xor a4, a3, a1 -; mv s9, a1 -; bnez a4, 8 -; mv a5, s1 -; mv a4, a0 -; mv a0, a4 -; mv a1, s9 -; bnez a5, 0xc +; sltu a4, a3, a1 +; sltu t2, a2, a0 +; xor a5, a3, a1 +; mv a6, a1 +; bnez a5, 8 +; mv a4, t2 +; mv a5, a0 +; mv a0, a5 +; mv a1, a6 +; bnez a4, 0xc ; mv a0, a2 ; mv a1, a3 -; ld s1, 8(sp) -; ld s9, 0(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/umax.clif b/cranelift/filetests/filetests/isa/riscv64/umax.clif index 4895758b78ef..dcc2a536aed5 100644 --- a/cranelift/filetests/filetests/isa/riscv64/umax.clif +++ b/cranelift/filetests/filetests/isa/riscv64/umax.clif @@ -10,18 +10,18 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; andi a3,a0,255 -; andi a5,a1,255 -; select a0,a3,a5##condition=(a3 ugt a5) +; andi a2,a0,255 +; andi a1,a1,255 +; select a0,a2,a1##condition=(a2 ugt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a0, 0xff -; andi a5, a1, 0xff -; mv a0, a3 -; bltu a5, a3, 8 -; mv a0, a5 +; andi a2, a0, 0xff +; andi a1, a1, 0xff +; mv a0, a2 +; bltu a1, a2, 8 +; mv a0, a1 ; ret function %umax_i16(i16, i16) -> i16{ @@ -32,22 +32,22 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; slli a3,a0,48 -; srli a5,a3,48 -; slli a1,a1,48 -; srli a3,a1,48 -; select a0,a5,a3##condition=(a5 ugt a3) +; slli a0,a0,48 +; srli a2,a0,48 +; slli a0,a1,48 +; srli a1,a0,48 +; select a0,a2,a1##condition=(a2 ugt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srli a5, a3, 0x30 -; slli a1, a1, 0x30 -; srli a3, a1, 0x30 -; mv a0, a5 -; bltu a3, a5, 8 -; mv a0, a3 +; slli a0, a0, 0x30 +; srli a2, a0, 0x30 +; slli a0, a1, 0x30 +; srli a1, a0, 0x30 +; mv a0, a2 +; bltu a1, a2, 8 +; mv a0, a1 ; ret function %umax_i32(i32, i32) -> i32{ @@ -58,22 +58,22 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; slli a3,a0,32 -; srli a5,a3,32 -; slli a1,a1,32 -; srli a3,a1,32 -; select a0,a5,a3##condition=(a5 ugt a3) +; slli a0,a0,32 +; srli a2,a0,32 +; slli a0,a1,32 +; srli a1,a0,32 +; select a0,a2,a1##condition=(a2 ugt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x20 -; srli a5, a3, 0x20 -; slli a1, a1, 0x20 -; srli a3, a1, 0x20 -; mv a0, a5 -; bltu a3, a5, 8 -; mv a0, a3 +; slli a0, a0, 0x20 +; srli a2, a0, 0x20 +; slli a0, a1, 0x20 +; srli a1, a0, 0x20 +; mv a0, a2 +; bltu a1, a2, 8 +; mv a0, a1 ; ret function %umax_i64(i64, i64) -> i64{ @@ -84,15 +84,15 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; mv a4,a0 -; select a0,a4,a1##condition=(a4 ugt a1) +; mv a2,a0 +; select a0,a2,a1##condition=(a2 ugt a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a4, a0 -; mv a0, a4 -; bltu a1, a4, 8 +; mv a2, a0 +; mv a0, a2 +; bltu a1, a2, 8 ; mv a0, a1 ; ret @@ -103,56 +103,29 @@ block0(v0: i128, v1: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s1,8(sp) -; sd s9,0(sp) ; block0: -; sltu a5,a3,a1 -; sltu s1,a2,a0 -; xor a4,a3,a1 -; mv s9,a1 -; select a5,s1,a5##condition=(a4 eq zero) -; mv a4,a0 -; select [a0,a1],[a4,s9],[a2,a3]##condition=(a5 ne zero) -; ld s1,8(sp) -; ld s9,0(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; sltu a4,a3,a1 +; sltu t2,a2,a0 +; xor a5,a3,a1 +; mv a6,a1 +; select a4,t2,a4##condition=(a5 eq zero) +; mv a5,a0 +; select [a0,a1],[a5,a6],[a2,a3]##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s1, 8(sp) -; sd s9, 0(sp) -; block1: ; offset 0x1c -; sltu a5, a3, a1 -; sltu s1, a2, a0 -; xor a4, a3, a1 -; mv s9, a1 -; bnez a4, 8 -; mv a5, s1 -; mv a4, a0 -; mv a0, a4 -; mv a1, s9 -; bnez a5, 0xc +; sltu a4, a3, a1 +; sltu t2, a2, a0 +; xor a5, a3, a1 +; mv a6, a1 +; bnez a5, 8 +; mv a4, t2 +; mv a5, a0 +; mv a0, a5 +; mv a1, a6 +; bnez a4, 0xc ; mv a0, a2 ; mv a1, a3 -; ld s1, 8(sp) -; ld s9, 0(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/umin-zbb.clif b/cranelift/filetests/filetests/isa/riscv64/umin-zbb.clif index 1aa46c528611..28edcb0a3e1f 100644 --- a/cranelift/filetests/filetests/isa/riscv64/umin-zbb.clif +++ b/cranelift/filetests/filetests/isa/riscv64/umin-zbb.clif @@ -10,16 +10,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; andi a3,a0,255 -; andi a5,a1,255 -; minu a0,a3,a5 +; andi a0,a0,255 +; andi a1,a1,255 +; minu a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a0, 0xff -; andi a5, a1, 0xff -; .byte 0x33, 0xd5, 0xf6, 0x0a +; andi a0, a0, 0xff +; andi a1, a1, 0xff +; .byte 0x33, 0x55, 0xb5, 0x0a ; ret function %umin_i16(i16, i16) -> i16{ @@ -30,16 +30,16 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; zext.h a3,a0 -; zext.h a5,a1 -; minu a0,a3,a5 +; zext.h a0,a0 +; zext.h a1,a1 +; minu a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0xbb, 0x46, 0x05, 0x08 -; .byte 0xbb, 0xc7, 0x05, 0x08 -; .byte 0x33, 0xd5, 0xf6, 0x0a +; .byte 0x3b, 0x45, 0x05, 0x08 +; .byte 0xbb, 0xc5, 0x05, 0x08 +; .byte 0x33, 0x55, 0xb5, 0x0a ; ret function %umin_i32(i32, i32) -> i32{ @@ -50,20 +50,20 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; slli a3,a0,32 -; srli a5,a3,32 +; slli a0,a0,32 +; srli a0,a0,32 ; slli a1,a1,32 -; srli a3,a1,32 -; minu a0,a5,a3 +; srli a1,a1,32 +; minu a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x20 -; srli a5, a3, 0x20 +; slli a0, a0, 0x20 +; srli a0, a0, 0x20 ; slli a1, a1, 0x20 -; srli a3, a1, 0x20 -; .byte 0x33, 0xd5, 0xd7, 0x0a +; srli a1, a1, 0x20 +; .byte 0x33, 0x55, 0xb5, 0x0a ; ret function %umin_i64(i64, i64) -> i64{ @@ -89,56 +89,29 @@ block0(v0: i128, v1: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s1,8(sp) -; sd s9,0(sp) ; block0: -; sltu a5,a1,a3 -; sltu s1,a0,a2 -; xor a4,a1,a3 -; mv s9,a1 -; select a5,s1,a5##condition=(a4 eq zero) -; mv a4,a0 -; select [a0,a1],[a4,s9],[a2,a3]##condition=(a5 ne zero) -; ld s1,8(sp) -; ld s9,0(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; sltu a4,a1,a3 +; sltu t2,a0,a2 +; xor a5,a1,a3 +; mv a6,a1 +; select a4,t2,a4##condition=(a5 eq zero) +; mv a5,a0 +; select [a0,a1],[a5,a6],[a2,a3]##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s1, 8(sp) -; sd s9, 0(sp) -; block1: ; offset 0x1c -; sltu a5, a1, a3 -; sltu s1, a0, a2 -; xor a4, a1, a3 -; mv s9, a1 -; bnez a4, 8 -; mv a5, s1 -; mv a4, a0 -; mv a0, a4 -; mv a1, s9 -; bnez a5, 0xc +; sltu a4, a1, a3 +; sltu t2, a0, a2 +; xor a5, a1, a3 +; mv a6, a1 +; bnez a5, 8 +; mv a4, t2 +; mv a5, a0 +; mv a0, a5 +; mv a1, a6 +; bnez a4, 0xc ; mv a0, a2 ; mv a1, a3 -; ld s1, 8(sp) -; ld s9, 0(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/umin.clif b/cranelift/filetests/filetests/isa/riscv64/umin.clif index fb99352d36bb..7f0176baa6b9 100644 --- a/cranelift/filetests/filetests/isa/riscv64/umin.clif +++ b/cranelift/filetests/filetests/isa/riscv64/umin.clif @@ -10,18 +10,18 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; andi a3,a0,255 -; andi a5,a1,255 -; select a0,a3,a5##condition=(a3 ult a5) +; andi a2,a0,255 +; andi a1,a1,255 +; select a0,a2,a1##condition=(a2 ult a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a0, 0xff -; andi a5, a1, 0xff -; mv a0, a3 -; bltu a3, a5, 8 -; mv a0, a5 +; andi a2, a0, 0xff +; andi a1, a1, 0xff +; mv a0, a2 +; bltu a2, a1, 8 +; mv a0, a1 ; ret function %umin_i16(i16, i16) -> i16{ @@ -32,22 +32,22 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; slli a3,a0,48 -; srli a5,a3,48 -; slli a1,a1,48 -; srli a3,a1,48 -; select a0,a5,a3##condition=(a5 ult a3) +; slli a0,a0,48 +; srli a2,a0,48 +; slli a0,a1,48 +; srli a1,a0,48 +; select a0,a2,a1##condition=(a2 ult a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srli a5, a3, 0x30 -; slli a1, a1, 0x30 -; srli a3, a1, 0x30 -; mv a0, a5 -; bltu a5, a3, 8 -; mv a0, a3 +; slli a0, a0, 0x30 +; srli a2, a0, 0x30 +; slli a0, a1, 0x30 +; srli a1, a0, 0x30 +; mv a0, a2 +; bltu a2, a1, 8 +; mv a0, a1 ; ret function %umin_i32(i32, i32) -> i32{ @@ -58,22 +58,22 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; slli a3,a0,32 -; srli a5,a3,32 -; slli a1,a1,32 -; srli a3,a1,32 -; select a0,a5,a3##condition=(a5 ult a3) +; slli a0,a0,32 +; srli a2,a0,32 +; slli a0,a1,32 +; srli a1,a0,32 +; select a0,a2,a1##condition=(a2 ult a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x20 -; srli a5, a3, 0x20 -; slli a1, a1, 0x20 -; srli a3, a1, 0x20 -; mv a0, a5 -; bltu a5, a3, 8 -; mv a0, a3 +; slli a0, a0, 0x20 +; srli a2, a0, 0x20 +; slli a0, a1, 0x20 +; srli a1, a0, 0x20 +; mv a0, a2 +; bltu a2, a1, 8 +; mv a0, a1 ; ret function %umin_i64(i64, i64) -> i64{ @@ -84,15 +84,15 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; mv a4,a0 -; select a0,a4,a1##condition=(a4 ult a1) +; mv a2,a0 +; select a0,a2,a1##condition=(a2 ult a1) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a4, a0 -; mv a0, a4 -; bltu a4, a1, 8 +; mv a2, a0 +; mv a0, a2 +; bltu a2, a1, 8 ; mv a0, a1 ; ret @@ -103,56 +103,29 @@ block0(v0: i128, v1: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s1,8(sp) -; sd s9,0(sp) ; block0: -; sltu a5,a1,a3 -; sltu s1,a0,a2 -; xor a4,a1,a3 -; mv s9,a1 -; select a5,s1,a5##condition=(a4 eq zero) -; mv a4,a0 -; select [a0,a1],[a4,s9],[a2,a3]##condition=(a5 ne zero) -; ld s1,8(sp) -; ld s9,0(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; sltu a4,a1,a3 +; sltu t2,a0,a2 +; xor a5,a1,a3 +; mv a6,a1 +; select a4,t2,a4##condition=(a5 eq zero) +; mv a5,a0 +; select [a0,a1],[a5,a6],[a2,a3]##condition=(a4 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s1, 8(sp) -; sd s9, 0(sp) -; block1: ; offset 0x1c -; sltu a5, a1, a3 -; sltu s1, a0, a2 -; xor a4, a1, a3 -; mv s9, a1 -; bnez a4, 8 -; mv a5, s1 -; mv a4, a0 -; mv a0, a4 -; mv a1, s9 -; bnez a5, 0xc +; sltu a4, a1, a3 +; sltu t2, a0, a2 +; xor a5, a1, a3 +; mv a6, a1 +; bnez a5, 8 +; mv a4, t2 +; mv a5, a0 +; mv a0, a5 +; mv a1, a6 +; bnez a4, 0xc ; mv a0, a2 ; mv a1, a3 -; ld s1, 8(sp) -; ld s9, 0(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/user_stack_maps.clif b/cranelift/filetests/filetests/isa/riscv64/user_stack_maps.clif index 30e78f588c8d..b3eb45027d8f 100644 --- a/cranelift/filetests/filetests/isa/riscv64/user_stack_maps.clif +++ b/cranelift/filetests/filetests/isa/riscv64/user_stack_maps.clif @@ -138,45 +138,45 @@ block0(v0: i8, v1: i16, v2: i32, v3: i64, v4: f32, v5: f64): ; sd fp,0(sp) ; mv fp,sp ; addi sp,sp,-160 -; sd s1,152(sp) -; sd s2,144(sp) -; sd s3,136(sp) -; sd s4,128(sp) -; sd s9,120(sp) -; fsd fs0,112(sp) -; fsd fs2,104(sp) +; sd s2,152(sp) +; sd s3,144(sp) +; sd s4,136(sp) +; sd s6,128(sp) +; sd s8,120(sp) +; fsd fs2,112(sp) +; fsd fs3,104(sp) ; block0: -; mv s2,a0 +; mv s4,a0 ; sb a1,0(slot) -; mv s1,a1 +; mv s3,a1 ; sh a2,8(slot) -; mv s9,a2 +; mv s2,a2 ; sw a3,16(slot) -; mv s4,a3 +; mv s8,a3 ; fsw fa0,20(slot) -; fmv.d fs2,fa0 +; fmv.d fs3,fa0 ; sd a4,24(slot) -; mv s3,a4 +; mv s6,a4 ; fsd fa1,32(slot) -; fmv.d fs0,fa1 +; fmv.d fs2,fa1 ; call userextname0 ; ; UserStackMap { by_type: [(types::I8, CompoundBitSet {0}), (types::I16, CompoundBitSet {8}), (types::I32, CompoundBitSet {16}), (types::F32, CompoundBitSet {20}), (types::I64, CompoundBitSet {24}), (types::F64, CompoundBitSet {32})], sp_to_sized_stack_slots: None } -; mv a0,s2 -; mv a3,s4 +; mv a0,s4 +; mv a3,s8 ; sw a3,0(a0) -; mv a4,s3 +; mv a4,s6 ; sd a4,8(a0) -; mv a0,s1 -; mv a1,s9 -; fmv.d fa0,fs2 -; fmv.d fa1,fs0 -; ld s1,152(sp) -; ld s2,144(sp) -; ld s3,136(sp) -; ld s4,128(sp) -; ld s9,120(sp) -; fld fs0,112(sp) -; fld fs2,104(sp) +; mv a0,s3 +; mv a1,s2 +; fmv.d fa0,fs3 +; fmv.d fa1,fs2 +; ld s2,152(sp) +; ld s3,144(sp) +; ld s4,136(sp) +; ld s6,128(sp) +; ld s8,120(sp) +; fld fs2,112(sp) +; fld fs3,104(sp) ; addi sp,sp,160 ; ld ra,8(sp) ; ld fp,0(sp) @@ -190,45 +190,45 @@ block0(v0: i8, v1: i16, v2: i32, v3: i64, v4: f32, v5: f64): ; sd s0, 0(sp) ; mv s0, sp ; addi sp, sp, -0xa0 -; sd s1, 0x98(sp) -; sd s2, 0x90(sp) -; sd s3, 0x88(sp) -; sd s4, 0x80(sp) -; sd s9, 0x78(sp) -; fsd fs0, 0x70(sp) -; fsd fs2, 0x68(sp) +; sd s2, 0x98(sp) +; sd s3, 0x90(sp) +; sd s4, 0x88(sp) +; sd s6, 0x80(sp) +; sd s8, 0x78(sp) +; fsd fs2, 0x70(sp) +; fsd fs3, 0x68(sp) ; block1: ; offset 0x30 -; mv s2, a0 +; mv s4, a0 ; sb a1, 0(sp) -; mv s1, a1 +; mv s3, a1 ; sh a2, 8(sp) -; mv s9, a2 +; mv s2, a2 ; sw a3, 0x10(sp) -; mv s4, a3 +; mv s8, a3 ; fsw fa0, 0x14(sp) -; fmv.d fs2, fa0 +; fmv.d fs3, fa0 ; sd a4, 0x18(sp) -; mv s3, a4 +; mv s6, a4 ; fsd fa1, 0x20(sp) -; fmv.d fs0, fa1 +; fmv.d fs2, fa1 ; auipc ra, 0 ; reloc_external RiscvCallPlt u0:0 0 ; jalr ra -; mv a0, s2 -; mv a3, s4 +; mv a0, s4 +; mv a3, s8 ; sw a3, 0(a0) -; mv a4, s3 +; mv a4, s6 ; sd a4, 8(a0) -; mv a0, s1 -; mv a1, s9 -; fmv.d fa0, fs2 -; fmv.d fa1, fs0 -; ld s1, 0x98(sp) -; ld s2, 0x90(sp) -; ld s3, 0x88(sp) -; ld s4, 0x80(sp) -; ld s9, 0x78(sp) -; fld fs0, 0x70(sp) -; fld fs2, 0x68(sp) +; mv a0, s3 +; mv a1, s2 +; fmv.d fa0, fs3 +; fmv.d fa1, fs2 +; ld s2, 0x98(sp) +; ld s3, 0x90(sp) +; ld s4, 0x88(sp) +; ld s6, 0x80(sp) +; ld s8, 0x78(sp) +; fld fs2, 0x70(sp) +; fld fs3, 0x68(sp) ; addi sp, sp, 0xa0 ; ld ra, 8(sp) ; ld s0, 0(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/ushr-const.clif b/cranelift/filetests/filetests/isa/riscv64/ushr-const.clif index 309df79c26fb..d7a3a7881073 100644 --- a/cranelift/filetests/filetests/isa/riscv64/ushr-const.clif +++ b/cranelift/filetests/filetests/isa/riscv64/ushr-const.clif @@ -12,14 +12,14 @@ block0(v0: i8): ; VCode: ; block0: -; andi a2,a0,255 -; srliw a0,a2,5 +; andi a0,a0,255 +; srliw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a2, a0, 0xff -; srliw a0, a2, 5 +; andi a0, a0, 0xff +; srliw a0, a0, 5 ; ret function %ushr_i8_const_i16(i8) -> i8 { @@ -31,14 +31,14 @@ block0(v0: i8): ; VCode: ; block0: -; andi a2,a0,255 -; srliw a0,a2,5 +; andi a0,a0,255 +; srliw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a2, a0, 0xff -; srliw a0, a2, 5 +; andi a0, a0, 0xff +; srliw a0, a0, 5 ; ret function %ushr_i8_const_i32(i8) -> i8 { @@ -50,14 +50,14 @@ block0(v0: i8): ; VCode: ; block0: -; andi a2,a0,255 -; srliw a0,a2,5 +; andi a0,a0,255 +; srliw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a2, a0, 0xff -; srliw a0, a2, 5 +; andi a0, a0, 0xff +; srliw a0, a0, 5 ; ret function %ushr_i8_const_i64(i8) -> i8 { @@ -69,14 +69,14 @@ block0(v0: i8): ; VCode: ; block0: -; andi a2,a0,255 -; srliw a0,a2,5 +; andi a0,a0,255 +; srliw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a2, a0, 0xff -; srliw a0, a2, 5 +; andi a0, a0, 0xff +; srliw a0, a0, 5 ; ret function %ushr_i8_const_i128(i8) -> i8 { @@ -89,14 +89,14 @@ block0(v0: i8): ; VCode: ; block0: -; andi a2,a0,255 -; srliw a0,a2,5 +; andi a0,a0,255 +; srliw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a2, a0, 0xff -; srliw a0, a2, 5 +; andi a0, a0, 0xff +; srliw a0, a0, 5 ; ret function %ushr_i16_const_i8(i16) -> i16 { @@ -108,16 +108,16 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srli a4,a2,48 -; srliw a0,a4,5 +; slli a0,a0,48 +; srli a0,a0,48 +; srliw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srli a4, a2, 0x30 -; srliw a0, a4, 5 +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 +; srliw a0, a0, 5 ; ret function %ushr_i16_const_i16(i16) -> i16 { @@ -129,16 +129,16 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srli a4,a2,48 -; srliw a0,a4,5 +; slli a0,a0,48 +; srli a0,a0,48 +; srliw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srli a4, a2, 0x30 -; srliw a0, a4, 5 +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 +; srliw a0, a0, 5 ; ret function %ushr_i16_const_i32(i16) -> i16 { @@ -150,16 +150,16 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srli a4,a2,48 -; srliw a0,a4,5 +; slli a0,a0,48 +; srli a0,a0,48 +; srliw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srli a4, a2, 0x30 -; srliw a0, a4, 5 +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 +; srliw a0, a0, 5 ; ret function %ushr_i16_const_i64(i16) -> i16 { @@ -171,16 +171,16 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srli a4,a2,48 -; srliw a0,a4,5 +; slli a0,a0,48 +; srli a0,a0,48 +; srliw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srli a4, a2, 0x30 -; srliw a0, a4, 5 +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 +; srliw a0, a0, 5 ; ret function %ushr_i16_const_i128(i16) -> i16 { @@ -193,16 +193,16 @@ block0(v0: i16): ; VCode: ; block0: -; slli a2,a0,48 -; srli a4,a2,48 -; srliw a0,a4,5 +; slli a0,a0,48 +; srli a0,a0,48 +; srliw a0,a0,5 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a2, a0, 0x30 -; srli a4, a2, 0x30 -; srliw a0, a4, 5 +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 +; srliw a0, a0, 5 ; ret function %ushr_i32_const_i8(i32) -> i32 { @@ -386,40 +386,41 @@ block0(v0: i128): ; VCode: ; block0: -; li a3,5 -; andi a4,a3,63 +; mv a3,a0 +; li a5,5 +; andi a0,a5,63 ; li a2,64 -; sub a2,a2,a4 -; sll a5,a1,a2 -; select a2,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a2,a5 -; li t0,64 -; srl a2,a1,a4 -; andi a4,a3,127 -; select [a0,a1],[a2,zero],[a5,a2]##condition=(a4 uge t0) +; sub a2,a2,a0 +; sll a2,a1,a2 +; select a2,zero,a2##condition=(a0 eq zero) +; srl a4,a3,a0 +; or a3,a2,a4 +; li a2,64 +; srl a4,a1,a0 +; andi a5,a5,127 +; select [a0,a1],[a4,zero],[a3,a4]##condition=(a5 uge a2) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 5 -; andi a4, a3, 0x3f +; mv a3, a0 +; addi a5, zero, 5 +; andi a0, a5, 0x3f ; addi a2, zero, 0x40 -; sub a2, a2, a4 -; sll a5, a1, a2 +; sub a2, a2, a0 +; sll a2, a1, a2 +; bnez a0, 8 ; mv a2, zero -; beqz a4, 8 -; mv a2, a5 -; srl a5, a0, a4 -; or a5, a2, a5 -; addi t0, zero, 0x40 -; srl a2, a1, a4 -; andi a4, a3, 0x7f -; mv a0, a2 +; srl a4, a3, a0 +; or a3, a2, a4 +; addi a2, zero, 0x40 +; srl a4, a1, a0 +; andi a5, a5, 0x7f +; mv a0, a4 ; mv a1, zero -; bgeu a4, t0, 0xc -; mv a0, a5 -; mv a1, a2 +; bgeu a5, a2, 0xc +; mv a0, a3 +; mv a1, a4 ; ret function %ushr_i128_const_i16(i128) -> i128 { @@ -431,40 +432,41 @@ block0(v0: i128): ; VCode: ; block0: -; li a3,5 -; andi a4,a3,63 +; mv a3,a0 +; li a5,5 +; andi a0,a5,63 ; li a2,64 -; sub a2,a2,a4 -; sll a5,a1,a2 -; select a2,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a2,a5 -; li t0,64 -; srl a2,a1,a4 -; andi a4,a3,127 -; select [a0,a1],[a2,zero],[a5,a2]##condition=(a4 uge t0) +; sub a2,a2,a0 +; sll a2,a1,a2 +; select a2,zero,a2##condition=(a0 eq zero) +; srl a4,a3,a0 +; or a3,a2,a4 +; li a2,64 +; srl a4,a1,a0 +; andi a5,a5,127 +; select [a0,a1],[a4,zero],[a3,a4]##condition=(a5 uge a2) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 5 -; andi a4, a3, 0x3f +; mv a3, a0 +; addi a5, zero, 5 +; andi a0, a5, 0x3f ; addi a2, zero, 0x40 -; sub a2, a2, a4 -; sll a5, a1, a2 +; sub a2, a2, a0 +; sll a2, a1, a2 +; bnez a0, 8 ; mv a2, zero -; beqz a4, 8 -; mv a2, a5 -; srl a5, a0, a4 -; or a5, a2, a5 -; addi t0, zero, 0x40 -; srl a2, a1, a4 -; andi a4, a3, 0x7f -; mv a0, a2 +; srl a4, a3, a0 +; or a3, a2, a4 +; addi a2, zero, 0x40 +; srl a4, a1, a0 +; andi a5, a5, 0x7f +; mv a0, a4 ; mv a1, zero -; bgeu a4, t0, 0xc -; mv a0, a5 -; mv a1, a2 +; bgeu a5, a2, 0xc +; mv a0, a3 +; mv a1, a4 ; ret function %ushr_i128_const_i32(i128) -> i128 { @@ -476,40 +478,41 @@ block0(v0: i128): ; VCode: ; block0: -; li a3,5 -; andi a4,a3,63 +; mv a3,a0 +; li a5,5 +; andi a0,a5,63 +; li a2,64 +; sub a2,a2,a0 +; sll a2,a1,a2 +; select a2,zero,a2##condition=(a0 eq zero) +; srl a4,a3,a0 +; or a3,a2,a4 ; li a2,64 -; sub a2,a2,a4 -; sll a5,a1,a2 -; select a2,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a2,a5 -; li t0,64 -; srl a2,a1,a4 -; andi a4,a3,127 -; select [a0,a1],[a2,zero],[a5,a2]##condition=(a4 uge t0) +; srl a4,a1,a0 +; andi a5,a5,127 +; select [a0,a1],[a4,zero],[a3,a4]##condition=(a5 uge a2) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 5 -; andi a4, a3, 0x3f +; mv a3, a0 +; addi a5, zero, 5 +; andi a0, a5, 0x3f ; addi a2, zero, 0x40 -; sub a2, a2, a4 -; sll a5, a1, a2 +; sub a2, a2, a0 +; sll a2, a1, a2 +; bnez a0, 8 ; mv a2, zero -; beqz a4, 8 -; mv a2, a5 -; srl a5, a0, a4 -; or a5, a2, a5 -; addi t0, zero, 0x40 -; srl a2, a1, a4 -; andi a4, a3, 0x7f -; mv a0, a2 +; srl a4, a3, a0 +; or a3, a2, a4 +; addi a2, zero, 0x40 +; srl a4, a1, a0 +; andi a5, a5, 0x7f +; mv a0, a4 ; mv a1, zero -; bgeu a4, t0, 0xc -; mv a0, a5 -; mv a1, a2 +; bgeu a5, a2, 0xc +; mv a0, a3 +; mv a1, a4 ; ret function %ushr_i128_const_i64(i128) -> i128 { @@ -521,40 +524,41 @@ block0(v0: i128): ; VCode: ; block0: -; li a3,5 -; andi a4,a3,63 +; mv a3,a0 +; li a5,5 +; andi a0,a5,63 +; li a2,64 +; sub a2,a2,a0 +; sll a2,a1,a2 +; select a2,zero,a2##condition=(a0 eq zero) +; srl a4,a3,a0 +; or a3,a2,a4 ; li a2,64 -; sub a2,a2,a4 -; sll a5,a1,a2 -; select a2,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a2,a5 -; li t0,64 -; srl a2,a1,a4 -; andi a4,a3,127 -; select [a0,a1],[a2,zero],[a5,a2]##condition=(a4 uge t0) +; srl a4,a1,a0 +; andi a5,a5,127 +; select [a0,a1],[a4,zero],[a3,a4]##condition=(a5 uge a2) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi a3, zero, 5 -; andi a4, a3, 0x3f +; mv a3, a0 +; addi a5, zero, 5 +; andi a0, a5, 0x3f ; addi a2, zero, 0x40 -; sub a2, a2, a4 -; sll a5, a1, a2 +; sub a2, a2, a0 +; sll a2, a1, a2 +; bnez a0, 8 ; mv a2, zero -; beqz a4, 8 -; mv a2, a5 -; srl a5, a0, a4 -; or a5, a2, a5 -; addi t0, zero, 0x40 -; srl a2, a1, a4 -; andi a4, a3, 0x7f -; mv a0, a2 +; srl a4, a3, a0 +; or a3, a2, a4 +; addi a2, zero, 0x40 +; srl a4, a1, a0 +; andi a5, a5, 0x7f +; mv a0, a4 ; mv a1, zero -; bgeu a4, t0, 0xc -; mv a0, a5 -; mv a1, a2 +; bgeu a5, a2, 0xc +; mv a0, a3 +; mv a1, a4 ; ret function %ushr_i128_const_i128(i128) -> i128 { @@ -566,64 +570,43 @@ block0(v0: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s11,8(sp) ; block0: -; li a4,5 -; li a5,0 -; andi a5,a4,63 -; li a2,64 -; sub a3,a2,a5 -; sll a2,a1,a3 -; select a2,zero,a2##condition=(a5 eq zero) -; srl a3,a0,a5 -; or s11,a2,a3 -; li a2,64 -; srl a3,a1,a5 -; andi a5,a4,127 -; select [a0,a1],[a3,zero],[s11,a3]##condition=(a5 uge a2) -; ld s11,8(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; mv a4,a0 +; li a0,5 +; li a2,0 +; andi a2,a0,63 +; li a3,64 +; sub a3,a3,a2 +; sll a3,a1,a3 +; select a3,zero,a3##condition=(a2 eq zero) +; srl a5,a4,a2 +; or a4,a3,a5 +; li a3,64 +; srl a2,a1,a2 +; andi a5,a0,127 +; select [a0,a1],[a2,zero],[a4,a2]##condition=(a5 uge a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s11, 8(sp) -; block1: ; offset 0x18 -; addi a4, zero, 5 -; mv a5, zero -; andi a5, a4, 0x3f -; addi a2, zero, 0x40 -; sub a3, a2, a5 -; sll a2, a1, a3 -; bnez a5, 8 +; mv a4, a0 +; addi a0, zero, 5 ; mv a2, zero -; srl a3, a0, a5 -; or s11, a2, a3 -; addi a2, zero, 0x40 -; srl a3, a1, a5 -; andi a5, a4, 0x7f -; mv a0, a3 +; andi a2, a0, 0x3f +; addi a3, zero, 0x40 +; sub a3, a3, a2 +; sll a3, a1, a3 +; bnez a2, 8 +; mv a3, zero +; srl a5, a4, a2 +; or a4, a3, a5 +; addi a3, zero, 0x40 +; srl a2, a1, a2 +; andi a5, a0, 0x7f +; mv a0, a2 ; mv a1, zero -; bgeu a5, a2, 0xc -; mv a0, s11 -; mv a1, a3 -; ld s11, 8(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 +; bgeu a5, a3, 0xc +; mv a0, a4 +; mv a1, a2 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/ushr.clif b/cranelift/filetests/filetests/isa/riscv64/ushr.clif index 2332389c33ef..86bfcdc7b33f 100644 --- a/cranelift/filetests/filetests/isa/riscv64/ushr.clif +++ b/cranelift/filetests/filetests/isa/riscv64/ushr.clif @@ -11,16 +11,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; andi a3,a0,255 -; andi a5,a1,7 -; srlw a0,a3,a5 +; andi a0,a0,255 +; andi a1,a1,7 +; srlw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a0, 0xff -; andi a5, a1, 7 -; srlw a0, a3, a5 +; andi a0, a0, 0xff +; andi a1, a1, 7 +; srlw a0, a0, a1 ; ret function %ushr_i8_i16(i8, i16) -> i8 { @@ -31,16 +31,16 @@ block0(v0: i8, v1: i16): ; VCode: ; block0: -; andi a3,a0,255 -; andi a5,a1,7 -; srlw a0,a3,a5 +; andi a0,a0,255 +; andi a1,a1,7 +; srlw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a0, 0xff -; andi a5, a1, 7 -; srlw a0, a3, a5 +; andi a0, a0, 0xff +; andi a1, a1, 7 +; srlw a0, a0, a1 ; ret function %ushr_i8_i32(i8, i32) -> i8 { @@ -51,16 +51,16 @@ block0(v0: i8, v1: i32): ; VCode: ; block0: -; andi a3,a0,255 -; andi a5,a1,7 -; srlw a0,a3,a5 +; andi a0,a0,255 +; andi a1,a1,7 +; srlw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a0, 0xff -; andi a5, a1, 7 -; srlw a0, a3, a5 +; andi a0, a0, 0xff +; andi a1, a1, 7 +; srlw a0, a0, a1 ; ret function %ushr_i8_i64(i8, i64) -> i8 { @@ -71,16 +71,16 @@ block0(v0: i8, v1: i64): ; VCode: ; block0: -; andi a3,a0,255 -; andi a5,a1,7 -; srlw a0,a3,a5 +; andi a0,a0,255 +; andi a1,a1,7 +; srlw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a3, a0, 0xff -; andi a5, a1, 7 -; srlw a0, a3, a5 +; andi a0, a0, 0xff +; andi a1, a1, 7 +; srlw a0, a0, a1 ; ret function %ushr_i8_i128(i8, i128) -> i8 { @@ -91,16 +91,16 @@ block0(v0: i8, v1: i128): ; VCode: ; block0: -; andi a4,a0,255 -; andi a0,a1,7 -; srlw a0,a4,a0 +; andi a0,a0,255 +; andi a1,a1,7 +; srlw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a0, 0xff -; andi a0, a1, 7 -; srlw a0, a4, a0 +; andi a0, a0, 0xff +; andi a1, a1, 7 +; srlw a0, a0, a1 ; ret function %ushr_i16_i8(i16, i8) -> i16 { @@ -111,18 +111,18 @@ block0(v0: i16, v1: i8): ; VCode: ; block0: -; slli a3,a0,48 -; srli a5,a3,48 +; slli a0,a0,48 +; srli a0,a0,48 ; andi a1,a1,15 -; srlw a0,a5,a1 +; srlw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srli a5, a3, 0x30 +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 ; andi a1, a1, 0xf -; srlw a0, a5, a1 +; srlw a0, a0, a1 ; ret function %ushr_i16_i16(i16, i16) -> i16 { @@ -133,18 +133,18 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; slli a3,a0,48 -; srli a5,a3,48 +; slli a0,a0,48 +; srli a0,a0,48 ; andi a1,a1,15 -; srlw a0,a5,a1 +; srlw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srli a5, a3, 0x30 +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 ; andi a1, a1, 0xf -; srlw a0, a5, a1 +; srlw a0, a0, a1 ; ret function %ushr_i16_i32(i16, i32) -> i16 { @@ -155,18 +155,18 @@ block0(v0: i16, v1: i32): ; VCode: ; block0: -; slli a3,a0,48 -; srli a5,a3,48 +; slli a0,a0,48 +; srli a0,a0,48 ; andi a1,a1,15 -; srlw a0,a5,a1 +; srlw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srli a5, a3, 0x30 +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 ; andi a1, a1, 0xf -; srlw a0, a5, a1 +; srlw a0, a0, a1 ; ret function %ushr_i16_i64(i16, i64) -> i16 { @@ -177,18 +177,18 @@ block0(v0: i16, v1: i64): ; VCode: ; block0: -; slli a3,a0,48 -; srli a5,a3,48 +; slli a0,a0,48 +; srli a0,a0,48 ; andi a1,a1,15 -; srlw a0,a5,a1 +; srlw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a3, a0, 0x30 -; srli a5, a3, 0x30 +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 ; andi a1, a1, 0xf -; srlw a0, a5, a1 +; srlw a0, a0, a1 ; ret function %ushr_i16_i128(i16, i128) -> i16 { @@ -199,18 +199,18 @@ block0(v0: i16, v1: i128): ; VCode: ; block0: -; slli a4,a0,48 -; srli a0,a4,48 -; andi a2,a1,15 -; srlw a0,a0,a2 +; slli a0,a0,48 +; srli a0,a0,48 +; andi a1,a1,15 +; srlw a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slli a4, a0, 0x30 -; srli a0, a4, 0x30 -; andi a2, a1, 0xf -; srlw a0, a0, a2 +; slli a0, a0, 0x30 +; srli a0, a0, 0x30 +; andi a1, a1, 0xf +; srlw a0, a0, a1 ; ret function %ushr_i32_i8(i32, i8) -> i32 { @@ -381,38 +381,39 @@ block0(v0: i128, v1: i8): ; VCode: ; block0: -; andi a4,a2,63 +; mv a4,a0 +; andi a0,a2,63 +; li a3,64 +; sub a3,a3,a0 +; sll a3,a1,a3 +; select a3,zero,a3##condition=(a0 eq zero) +; srl a4,a4,a0 +; or a4,a3,a4 ; li a3,64 -; sub a3,a3,a4 -; sll a5,a1,a3 -; select a3,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a3,a5 -; li t0,64 -; srl a3,a1,a4 -; andi a4,a2,127 -; select [a0,a1],[a3,zero],[a5,a3]##condition=(a4 uge t0) +; srl a5,a1,a0 +; andi a2,a2,127 +; select [a0,a1],[a5,zero],[a4,a5]##condition=(a2 uge a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a2, 0x3f +; mv a4, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a4 -; sll a5, a1, a3 +; sub a3, a3, a0 +; sll a3, a1, a3 +; bnez a0, 8 ; mv a3, zero -; beqz a4, 8 -; mv a3, a5 -; srl a5, a0, a4 -; or a5, a3, a5 -; addi t0, zero, 0x40 -; srl a3, a1, a4 -; andi a4, a2, 0x7f -; mv a0, a3 -; mv a1, zero -; bgeu a4, t0, 0xc +; srl a4, a4, a0 +; or a4, a3, a4 +; addi a3, zero, 0x40 +; srl a5, a1, a0 +; andi a2, a2, 0x7f ; mv a0, a5 -; mv a1, a3 +; mv a1, zero +; bgeu a2, a3, 0xc +; mv a0, a4 +; mv a1, a5 ; ret function %ushr_i128_i16(i128, i16) -> i128 { @@ -423,38 +424,39 @@ block0(v0: i128, v1: i16): ; VCode: ; block0: -; andi a4,a2,63 +; mv a4,a0 +; andi a0,a2,63 +; li a3,64 +; sub a3,a3,a0 +; sll a3,a1,a3 +; select a3,zero,a3##condition=(a0 eq zero) +; srl a4,a4,a0 +; or a4,a3,a4 ; li a3,64 -; sub a3,a3,a4 -; sll a5,a1,a3 -; select a3,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a3,a5 -; li t0,64 -; srl a3,a1,a4 -; andi a4,a2,127 -; select [a0,a1],[a3,zero],[a5,a3]##condition=(a4 uge t0) +; srl a5,a1,a0 +; andi a2,a2,127 +; select [a0,a1],[a5,zero],[a4,a5]##condition=(a2 uge a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a2, 0x3f +; mv a4, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a4 -; sll a5, a1, a3 +; sub a3, a3, a0 +; sll a3, a1, a3 +; bnez a0, 8 ; mv a3, zero -; beqz a4, 8 -; mv a3, a5 -; srl a5, a0, a4 -; or a5, a3, a5 -; addi t0, zero, 0x40 -; srl a3, a1, a4 -; andi a4, a2, 0x7f -; mv a0, a3 -; mv a1, zero -; bgeu a4, t0, 0xc +; srl a4, a4, a0 +; or a4, a3, a4 +; addi a3, zero, 0x40 +; srl a5, a1, a0 +; andi a2, a2, 0x7f ; mv a0, a5 -; mv a1, a3 +; mv a1, zero +; bgeu a2, a3, 0xc +; mv a0, a4 +; mv a1, a5 ; ret function %ushr_i128_i32(i128, i32) -> i128 { @@ -465,38 +467,39 @@ block0(v0: i128, v1: i32): ; VCode: ; block0: -; andi a4,a2,63 +; mv a4,a0 +; andi a0,a2,63 +; li a3,64 +; sub a3,a3,a0 +; sll a3,a1,a3 +; select a3,zero,a3##condition=(a0 eq zero) +; srl a4,a4,a0 +; or a4,a3,a4 ; li a3,64 -; sub a3,a3,a4 -; sll a5,a1,a3 -; select a3,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a3,a5 -; li t0,64 -; srl a3,a1,a4 -; andi a4,a2,127 -; select [a0,a1],[a3,zero],[a5,a3]##condition=(a4 uge t0) +; srl a5,a1,a0 +; andi a2,a2,127 +; select [a0,a1],[a5,zero],[a4,a5]##condition=(a2 uge a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a2, 0x3f +; mv a4, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a4 -; sll a5, a1, a3 +; sub a3, a3, a0 +; sll a3, a1, a3 +; bnez a0, 8 ; mv a3, zero -; beqz a4, 8 -; mv a3, a5 -; srl a5, a0, a4 -; or a5, a3, a5 -; addi t0, zero, 0x40 -; srl a3, a1, a4 -; andi a4, a2, 0x7f -; mv a0, a3 -; mv a1, zero -; bgeu a4, t0, 0xc +; srl a4, a4, a0 +; or a4, a3, a4 +; addi a3, zero, 0x40 +; srl a5, a1, a0 +; andi a2, a2, 0x7f ; mv a0, a5 -; mv a1, a3 +; mv a1, zero +; bgeu a2, a3, 0xc +; mv a0, a4 +; mv a1, a5 ; ret function %ushr_i128_i64(i128, i64) -> i128 { @@ -507,38 +510,39 @@ block0(v0: i128, v1: i64): ; VCode: ; block0: -; andi a4,a2,63 +; mv a4,a0 +; andi a0,a2,63 ; li a3,64 -; sub a3,a3,a4 -; sll a5,a1,a3 -; select a3,zero,a5##condition=(a4 eq zero) -; srl a5,a0,a4 -; or a5,a3,a5 -; li t0,64 -; srl a3,a1,a4 -; andi a4,a2,127 -; select [a0,a1],[a3,zero],[a5,a3]##condition=(a4 uge t0) +; sub a3,a3,a0 +; sll a3,a1,a3 +; select a3,zero,a3##condition=(a0 eq zero) +; srl a4,a4,a0 +; or a4,a3,a4 +; li a3,64 +; srl a5,a1,a0 +; andi a2,a2,127 +; select [a0,a1],[a5,zero],[a4,a5]##condition=(a2 uge a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a2, 0x3f +; mv a4, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a4 -; sll a5, a1, a3 +; sub a3, a3, a0 +; sll a3, a1, a3 +; bnez a0, 8 ; mv a3, zero -; beqz a4, 8 -; mv a3, a5 -; srl a5, a0, a4 -; or a5, a3, a5 -; addi t0, zero, 0x40 -; srl a3, a1, a4 -; andi a4, a2, 0x7f -; mv a0, a3 -; mv a1, zero -; bgeu a4, t0, 0xc +; srl a4, a4, a0 +; or a4, a3, a4 +; addi a3, zero, 0x40 +; srl a5, a1, a0 +; andi a2, a2, 0x7f ; mv a0, a5 -; mv a1, a3 +; mv a1, zero +; bgeu a2, a3, 0xc +; mv a0, a4 +; mv a1, a5 ; ret function %ushr_i128_i128(i128, i128) -> i128 { @@ -548,60 +552,39 @@ block0(v0: i128, v1: i128): } ; VCode: -; addi sp,sp,-16 -; sd ra,8(sp) -; sd fp,0(sp) -; mv fp,sp -; addi sp,sp,-16 -; sd s11,8(sp) ; block0: -; andi a5,a2,63 +; mv a4,a0 +; andi a0,a2,63 ; li a3,64 -; sub a3,a3,a5 +; sub a3,a3,a0 ; sll a3,a1,a3 -; select a3,zero,a3##condition=(a5 eq zero) -; srl a4,a0,a5 -; or s11,a3,a4 +; select a3,zero,a3##condition=(a0 eq zero) +; srl a5,a4,a0 +; or a4,a3,a5 ; li a3,64 -; srl a4,a1,a5 -; andi a5,a2,127 -; select [a0,a1],[a4,zero],[s11,a4]##condition=(a5 uge a3) -; ld s11,8(sp) -; addi sp,sp,16 -; ld ra,8(sp) -; ld fp,0(sp) -; addi sp,sp,16 +; srl a5,a1,a0 +; andi a2,a2,127 +; select [a0,a1],[a5,zero],[a4,a5]##condition=(a2 uge a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; addi sp, sp, -0x10 -; sd ra, 8(sp) -; sd s0, 0(sp) -; mv s0, sp -; addi sp, sp, -0x10 -; sd s11, 8(sp) -; block1: ; offset 0x18 -; andi a5, a2, 0x3f +; mv a4, a0 +; andi a0, a2, 0x3f ; addi a3, zero, 0x40 -; sub a3, a3, a5 +; sub a3, a3, a0 ; sll a3, a1, a3 -; bnez a5, 8 +; bnez a0, 8 ; mv a3, zero -; srl a4, a0, a5 -; or s11, a3, a4 +; srl a5, a4, a0 +; or a4, a3, a5 ; addi a3, zero, 0x40 -; srl a4, a1, a5 -; andi a5, a2, 0x7f -; mv a0, a4 +; srl a5, a1, a0 +; andi a2, a2, 0x7f +; mv a0, a5 ; mv a1, zero -; bgeu a5, a3, 0xc -; mv a0, s11 -; mv a1, a4 -; ld s11, 8(sp) -; addi sp, sp, 0x10 -; ld ra, 8(sp) -; ld s0, 0(sp) -; addi sp, sp, 0x10 +; bgeu a2, a3, 0xc +; mv a0, a4 +; mv a1, a5 ; ret diff --git a/cranelift/filetests/filetests/isa/riscv64/zca.clif b/cranelift/filetests/filetests/isa/riscv64/zca.clif index e61b377432a4..68213c196b1c 100644 --- a/cranelift/filetests/filetests/isa/riscv64/zca.clif +++ b/cranelift/filetests/filetests/isa/riscv64/zca.clif @@ -157,15 +157,15 @@ block0(v0: i8, v1: i8, v2: i8): ; VCode: ; block0: -; andi a4,a0,255 -; select a0,a1,a2##condition=(a4 ne zero) +; andi a3,a0,255 +; select a0,a1,a2##condition=(a3 ne zero) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; andi a4, a0, 0xff +; andi a3, a0, 0xff ; c.mv a0, a1 -; bnez a4, 6 +; bnez a3, 6 ; c.mv a0, a2 ; c.jr ra @@ -619,20 +619,20 @@ block0: ; VCode: ; block0: -; mv a2,a0 +; mv a3,a0 ; lui a0,4 ; lui a1,-1 -; lui a4,-32 -; sd a4,0(a2) +; lui a2,-32 +; sd a2,0(a3) ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; c.mv a2, a0 +; c.mv a3, a0 ; c.lui a0, 4 ; c.lui a1, 0xfffff -; c.lui a4, 0xfffe0 -; c.sd a4, 0(a2) +; c.lui a2, 0xfffe0 +; c.sd a2, 0(a3) ; c.jr ra function %c_andi_f(i64) -> i64 { diff --git a/cranelift/filetests/filetests/isa/riscv64/zcb.clif b/cranelift/filetests/filetests/isa/riscv64/zcb.clif index c5f73c1f725a..7e021c2572b3 100644 --- a/cranelift/filetests/filetests/isa/riscv64/zcb.clif +++ b/cranelift/filetests/filetests/isa/riscv64/zcb.clif @@ -126,16 +126,16 @@ block0(v0: i64): ; VCode: ; block0: -; lbu a3,0(a0) +; lbu a2,0(a0) ; lbu a1,3(a0) -; mv a0,a3 +; mv a0,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x14, 0x81 ; trap: heap_oob +; .byte 0x10, 0x81 ; trap: heap_oob ; .byte 0x6c, 0x81 ; trap: heap_oob -; c.mv a0, a3 +; c.mv a0, a2 ; c.jr ra function %c_lhu(i64) -> i32, i64 { @@ -147,16 +147,16 @@ block0(v0: i64): ; VCode: ; block0: -; lhu a3,0(a0) +; lhu a2,0(a0) ; lhu a1,2(a0) -; mv a0,a3 +; mv a0,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x14, 0x85 ; trap: heap_oob +; .byte 0x10, 0x85 ; trap: heap_oob ; .byte 0x2c, 0x85 ; trap: heap_oob -; c.mv a0, a3 +; c.mv a0, a2 ; c.jr ra function %c_lh(i64) -> i16, i16 { @@ -168,16 +168,16 @@ block0(v0: i64): ; VCode: ; block0: -; lh a3,0(a0) +; lh a2,0(a0) ; lh a1,2(a0) -; mv a0,a3 +; mv a0,a2 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x54, 0x85 ; trap: heap_oob +; .byte 0x50, 0x85 ; trap: heap_oob ; .byte 0x6c, 0x85 ; trap: heap_oob -; c.mv a0, a3 +; c.mv a0, a2 ; c.jr ra function %c_sb(i64, i8) { @@ -278,12 +278,12 @@ block0(v0: i64): ; sw zero,16(slot) ; sd zero,0(a0) ; sd zero,24(slot) -; fmv.w.x fa5,zero -; fsw fa5,0(a0) -; fsw fa5,16(slot) -; fmv.d.x fa1,zero -; fsd fa1,0(a0) -; fsd fa1,24(slot) +; fmv.w.x fa1,zero +; fsw fa1,0(a0) +; fsw fa1,16(slot) +; fmv.d.x fa3,zero +; fsd fa3,0(a0) +; fsd fa3,24(slot) ; addi sp,sp,32 ; ld ra,8(sp) ; ld fp,0(sp) @@ -306,12 +306,12 @@ block0(v0: i64): ; c.swsp zero, 0x10(sp) ; sd zero, 0(a0) ; c.sdsp zero, 0x18(sp) -; fmv.w.x fa5, zero -; fsw fa5, 0(a0) -; fsw fa5, 0x10(sp) -; fmv.d.x fa1, zero -; fsd fa1, 0(a0) -; fsd fa1, 0x18(sp) +; fmv.w.x fa1, zero +; fsw fa1, 0(a0) +; fsw fa1, 0x10(sp) +; fmv.d.x fa3, zero +; fsd fa3, 0(a0) +; fsd fa3, 0x18(sp) ; c.addi16sp sp, 0x20 ; c.ldsp ra, 8(sp) ; c.ldsp s0, 0(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/zfa.clif b/cranelift/filetests/filetests/isa/riscv64/zfa.clif index 5d372244a512..b9cf36cf2d68 100644 --- a/cranelift/filetests/filetests/isa/riscv64/zfa.clif +++ b/cranelift/filetests/filetests/isa/riscv64/zfa.clif @@ -268,60 +268,60 @@ block0: ; fli.h fa3,2^-15 ; fli.h fa4,2^-8 ; fli.h fa5,2^-7 -; fli.h ft6,0.0625 -; fli.h ft7,0.125 -; fli.h fa6,0.25 -; fli.h fa7,0.3125 -; fli.h ft8,0.375 -; fli.h ft9,0.4375 -; fli.h ft10,0.5 -; fli.h ft11,0.625 +; fli.h ft0,0.0625 +; fli.h ft1,0.125 +; fli.h ft2,0.25 +; fli.h ft3,0.3125 +; fli.h ft4,0.375 +; fli.h ft5,0.4375 +; fli.h ft6,0.5 +; fli.h ft7,0.625 ; fli.h fs0,0.75 ; fli.h fs1,0.875 -; fli.h fs2,1.0 -; fli.h fs3,1.25 -; fli.h fs4,1.5 -; fli.h fs5,1.75 -; fli.h fs6,2.0 -; fli.h fs7,2.5 -; fli.h fs8,3.0 -; fli.h fs9,4.0 -; fli.h fs10,8.0 -; fli.h fs11,16.0 -; fli.h ft0,128.0 -; fli.h ft1,256.0 -; fli.h ft2,32768.0 -; fli.h ft3,inf -; fli.h ft4,nan +; fli.h fa6,1.0 +; fli.h fa7,1.25 +; fli.h fs2,1.5 +; fli.h fs3,1.75 +; fli.h fs4,2.0 +; fli.h fs5,2.5 +; fli.h fs6,3.0 +; fli.h fs7,4.0 +; fli.h fs8,8.0 +; fli.h fs9,16.0 +; fli.h fs10,128.0 +; fli.h fs11,256.0 +; fli.h ft8,32768.0 +; fli.h ft9,inf +; fli.h ft10,nan ; fsh fa2,0(a0) ; fsh fa3,8(a0) ; fsh fa4,16(a0) ; fsh fa5,24(a0) -; fsh ft6,32(a0) -; fsh ft7,40(a0) -; fsh fa6,48(a0) -; fsh fa7,56(a0) -; fsh ft8,64(a0) -; fsh ft9,72(a0) -; fsh ft10,80(a0) -; fsh ft11,88(a0) +; fsh ft0,32(a0) +; fsh ft1,40(a0) +; fsh ft2,48(a0) +; fsh ft3,56(a0) +; fsh ft4,64(a0) +; fsh ft5,72(a0) +; fsh ft6,80(a0) +; fsh ft7,88(a0) ; fsh fs0,96(a0) ; fsh fs1,104(a0) -; fsh fs2,112(a0) -; fsh fs3,120(a0) -; fsh fs4,128(a0) -; fsh fs5,136(a0) -; fsh fs6,144(a0) -; fsh fs7,152(a0) -; fsh fs8,160(a0) -; fsh fs9,168(a0) -; fsh fs10,176(a0) -; fsh fs11,184(a0) -; fsh ft0,192(a0) -; fsh ft1,200(a0) -; fsh ft2,208(a0) -; fsh ft3,216(a0) -; fsh ft4,224(a0) +; fsh fa6,112(a0) +; fsh fa7,120(a0) +; fsh fs2,128(a0) +; fsh fs3,136(a0) +; fsh fs4,144(a0) +; fsh fs5,152(a0) +; fsh fs6,160(a0) +; fsh fs7,168(a0) +; fsh fs8,176(a0) +; fsh fs9,184(a0) +; fsh fs10,192(a0) +; fsh fs11,200(a0) +; fsh ft8,208(a0) +; fsh ft9,216(a0) +; fsh ft10,224(a0) ; fld fs0,88(sp) ; fld fs2,80(sp) ; fld fs3,72(sp) @@ -364,60 +364,60 @@ block0: ; .byte 0xd3, 0x86, 0x11, 0xf4 ; .byte 0x53, 0x07, 0x12, 0xf4 ; .byte 0xd3, 0x87, 0x12, 0xf4 -; .byte 0x53, 0x03, 0x13, 0xf4 -; .byte 0xd3, 0x83, 0x13, 0xf4 -; .byte 0x53, 0x08, 0x14, 0xf4 -; .byte 0xd3, 0x88, 0x14, 0xf4 -; .byte 0x53, 0x0e, 0x15, 0xf4 -; .byte 0xd3, 0x8e, 0x15, 0xf4 -; .byte 0x53, 0x0f, 0x16, 0xf4 -; .byte 0xd3, 0x8f, 0x16, 0xf4 +; .byte 0x53, 0x00, 0x13, 0xf4 +; .byte 0xd3, 0x80, 0x13, 0xf4 +; .byte 0x53, 0x01, 0x14, 0xf4 +; .byte 0xd3, 0x81, 0x14, 0xf4 +; .byte 0x53, 0x02, 0x15, 0xf4 +; .byte 0xd3, 0x82, 0x15, 0xf4 +; .byte 0x53, 0x03, 0x16, 0xf4 +; .byte 0xd3, 0x83, 0x16, 0xf4 ; .byte 0x53, 0x04, 0x17, 0xf4 ; .byte 0xd3, 0x84, 0x17, 0xf4 -; .byte 0x53, 0x09, 0x18, 0xf4 -; .byte 0xd3, 0x89, 0x18, 0xf4 -; .byte 0x53, 0x0a, 0x19, 0xf4 -; .byte 0xd3, 0x8a, 0x19, 0xf4 -; .byte 0x53, 0x0b, 0x1a, 0xf4 -; .byte 0xd3, 0x8b, 0x1a, 0xf4 -; .byte 0x53, 0x0c, 0x1b, 0xf4 -; .byte 0xd3, 0x8c, 0x1b, 0xf4 -; .byte 0x53, 0x0d, 0x1c, 0xf4 -; .byte 0xd3, 0x8d, 0x1c, 0xf4 -; .byte 0x53, 0x00, 0x1d, 0xf4 -; .byte 0xd3, 0x80, 0x1d, 0xf4 -; .byte 0x53, 0x01, 0x1e, 0xf4 -; .byte 0xd3, 0x01, 0x1f, 0xf4 -; .byte 0x53, 0x82, 0x1f, 0xf4 +; .byte 0x53, 0x08, 0x18, 0xf4 +; .byte 0xd3, 0x88, 0x18, 0xf4 +; .byte 0x53, 0x09, 0x19, 0xf4 +; .byte 0xd3, 0x89, 0x19, 0xf4 +; .byte 0x53, 0x0a, 0x1a, 0xf4 +; .byte 0xd3, 0x8a, 0x1a, 0xf4 +; .byte 0x53, 0x0b, 0x1b, 0xf4 +; .byte 0xd3, 0x8b, 0x1b, 0xf4 +; .byte 0x53, 0x0c, 0x1c, 0xf4 +; .byte 0xd3, 0x8c, 0x1c, 0xf4 +; .byte 0x53, 0x0d, 0x1d, 0xf4 +; .byte 0xd3, 0x8d, 0x1d, 0xf4 +; .byte 0x53, 0x0e, 0x1e, 0xf4 +; .byte 0xd3, 0x0e, 0x1f, 0xf4 +; .byte 0x53, 0x8f, 0x1f, 0xf4 ; .byte 0x27, 0x10, 0xc5, 0x00 ; .byte 0x27, 0x14, 0xd5, 0x00 ; .byte 0x27, 0x18, 0xe5, 0x00 ; .byte 0x27, 0x1c, 0xf5, 0x00 -; .byte 0x27, 0x10, 0x65, 0x02 -; .byte 0x27, 0x14, 0x75, 0x02 -; .byte 0x27, 0x18, 0x05, 0x03 -; .byte 0x27, 0x1c, 0x15, 0x03 -; .byte 0x27, 0x10, 0xc5, 0x05 -; .byte 0x27, 0x14, 0xd5, 0x05 -; .byte 0x27, 0x18, 0xe5, 0x05 -; .byte 0x27, 0x1c, 0xf5, 0x05 +; .byte 0x27, 0x10, 0x05, 0x02 +; .byte 0x27, 0x14, 0x15, 0x02 +; .byte 0x27, 0x18, 0x25, 0x02 +; .byte 0x27, 0x1c, 0x35, 0x02 +; .byte 0x27, 0x10, 0x45, 0x04 +; .byte 0x27, 0x14, 0x55, 0x04 +; .byte 0x27, 0x18, 0x65, 0x04 +; .byte 0x27, 0x1c, 0x75, 0x04 ; .byte 0x27, 0x10, 0x85, 0x06 ; .byte 0x27, 0x14, 0x95, 0x06 -; .byte 0x27, 0x18, 0x25, 0x07 -; .byte 0x27, 0x1c, 0x35, 0x07 -; .byte 0x27, 0x10, 0x45, 0x09 -; .byte 0x27, 0x14, 0x55, 0x09 -; .byte 0x27, 0x18, 0x65, 0x09 -; .byte 0x27, 0x1c, 0x75, 0x09 -; .byte 0x27, 0x10, 0x85, 0x0b -; .byte 0x27, 0x14, 0x95, 0x0b -; .byte 0x27, 0x18, 0xa5, 0x0b -; .byte 0x27, 0x1c, 0xb5, 0x0b -; .byte 0x27, 0x10, 0x05, 0x0c -; .byte 0x27, 0x14, 0x15, 0x0c -; .byte 0x27, 0x18, 0x25, 0x0c -; .byte 0x27, 0x1c, 0x35, 0x0c -; .byte 0x27, 0x10, 0x45, 0x0e +; .byte 0x27, 0x18, 0x05, 0x07 +; .byte 0x27, 0x1c, 0x15, 0x07 +; .byte 0x27, 0x10, 0x25, 0x09 +; .byte 0x27, 0x14, 0x35, 0x09 +; .byte 0x27, 0x18, 0x45, 0x09 +; .byte 0x27, 0x1c, 0x55, 0x09 +; .byte 0x27, 0x10, 0x65, 0x0b +; .byte 0x27, 0x14, 0x75, 0x0b +; .byte 0x27, 0x18, 0x85, 0x0b +; .byte 0x27, 0x1c, 0x95, 0x0b +; .byte 0x27, 0x10, 0xa5, 0x0d +; .byte 0x27, 0x14, 0xb5, 0x0d +; .byte 0x27, 0x18, 0xc5, 0x0d +; .byte 0x27, 0x1c, 0xd5, 0x0d +; .byte 0x27, 0x10, 0xe5, 0x0f ; fld fs0, 0x58(sp) ; fld fs2, 0x50(sp) ; fld fs3, 0x48(sp) @@ -493,66 +493,66 @@ block0: ; block0: ; fli.s fa0,-1.0 ; fli.s fa1,min -; fli.s fa3,2^-16 -; fli.s fa4,2^-15 -; fli.s fa5,2^-8 -; fli.s fa2,2^-7 -; fli.s ft7,0.0625 -; fli.s fa6,0.125 -; fli.s fa7,0.25 -; fli.s ft8,0.3125 -; fli.s ft9,0.375 -; fli.s ft10,0.4375 -; fli.s ft11,0.5 -; fli.s fs0,0.625 -; fli.s fs1,0.75 -; fli.s fs2,0.875 -; fli.s fs3,1.0 -; fli.s fs4,1.25 -; fli.s fs5,1.5 -; fli.s fs6,1.75 -; fli.s fs7,2.0 -; fli.s fs8,2.5 -; fli.s fs9,3.0 -; fli.s fs10,4.0 -; fli.s fs11,8.0 -; fli.s ft0,16.0 -; fli.s ft1,128.0 -; fli.s ft2,256.0 -; fli.s ft3,32768.0 -; fli.s ft4,65536.0 -; fli.s ft5,inf -; fli.s ft6,nan -; fsw fa3,0(a0) -; fsw fa4,8(a0) -; fsw fa5,16(a0) -; fsw fa2,24(a0) -; fsw ft7,32(a0) -; fsw fa6,40(a0) -; fsw fa7,48(a0) -; fsw ft8,56(a0) -; fsw ft9,64(a0) -; fsw ft10,72(a0) -; fsw ft11,80(a0) -; fsw fs0,88(a0) -; fsw fs1,96(a0) -; fsw fs2,104(a0) -; fsw fs3,112(a0) -; fsw fs4,120(a0) -; fsw fs5,128(a0) -; fsw fs6,136(a0) -; fsw fs7,144(a0) -; fsw fs8,152(a0) -; fsw fs9,160(a0) -; fsw fs10,168(a0) -; fsw fs11,176(a0) -; fsw ft0,184(a0) -; fsw ft1,192(a0) -; fsw ft2,200(a0) -; fsw ft3,208(a0) -; fsw ft4,216(a0) -; fsw ft5,224(a0) -; fsw ft6,232(a0) +; fli.s fa2,2^-16 +; fli.s fa3,2^-15 +; fli.s fa4,2^-8 +; fli.s fa5,2^-7 +; fli.s ft0,0.0625 +; fli.s ft1,0.125 +; fli.s ft2,0.25 +; fli.s ft3,0.3125 +; fli.s ft4,0.375 +; fli.s ft5,0.4375 +; fli.s ft6,0.5 +; fli.s ft7,0.625 +; fli.s fs0,0.75 +; fli.s fs1,0.875 +; fli.s fa6,1.0 +; fli.s fa7,1.25 +; fli.s fs2,1.5 +; fli.s fs3,1.75 +; fli.s fs4,2.0 +; fli.s fs5,2.5 +; fli.s fs6,3.0 +; fli.s fs7,4.0 +; fli.s fs8,8.0 +; fli.s fs9,16.0 +; fli.s fs10,128.0 +; fli.s fs11,256.0 +; fli.s ft8,32768.0 +; fli.s ft9,65536.0 +; fli.s ft10,inf +; fli.s ft11,nan +; fsw fa2,0(a0) +; fsw fa3,8(a0) +; fsw fa4,16(a0) +; fsw fa5,24(a0) +; fsw ft0,32(a0) +; fsw ft1,40(a0) +; fsw ft2,48(a0) +; fsw ft3,56(a0) +; fsw ft4,64(a0) +; fsw ft5,72(a0) +; fsw ft6,80(a0) +; fsw ft7,88(a0) +; fsw fs0,96(a0) +; fsw fs1,104(a0) +; fsw fa6,112(a0) +; fsw fa7,120(a0) +; fsw fs2,128(a0) +; fsw fs3,136(a0) +; fsw fs4,144(a0) +; fsw fs5,152(a0) +; fsw fs6,160(a0) +; fsw fs7,168(a0) +; fsw fs8,176(a0) +; fsw fs9,184(a0) +; fsw fs10,192(a0) +; fsw fs11,200(a0) +; fsw ft8,208(a0) +; fsw ft9,216(a0) +; fsw ft10,224(a0) +; fsw ft11,232(a0) ; fld fs0,88(sp) ; fld fs2,80(sp) ; fld fs3,72(sp) @@ -591,66 +591,66 @@ block0: ; block1: ; offset 0x40 ; .byte 0x53, 0x05, 0x10, 0xf0 ; .byte 0xd3, 0x85, 0x10, 0xf0 -; .byte 0xd3, 0x06, 0x11, 0xf0 -; .byte 0x53, 0x87, 0x11, 0xf0 -; .byte 0xd3, 0x07, 0x12, 0xf0 -; .byte 0x53, 0x86, 0x12, 0xf0 -; .byte 0xd3, 0x03, 0x13, 0xf0 -; .byte 0x53, 0x88, 0x13, 0xf0 -; .byte 0xd3, 0x08, 0x14, 0xf0 -; .byte 0x53, 0x8e, 0x14, 0xf0 -; .byte 0xd3, 0x0e, 0x15, 0xf0 -; .byte 0x53, 0x8f, 0x15, 0xf0 -; .byte 0xd3, 0x0f, 0x16, 0xf0 -; .byte 0x53, 0x84, 0x16, 0xf0 -; .byte 0xd3, 0x04, 0x17, 0xf0 -; .byte 0x53, 0x89, 0x17, 0xf0 -; .byte 0xd3, 0x09, 0x18, 0xf0 -; .byte 0x53, 0x8a, 0x18, 0xf0 -; .byte 0xd3, 0x0a, 0x19, 0xf0 -; .byte 0x53, 0x8b, 0x19, 0xf0 -; .byte 0xd3, 0x0b, 0x1a, 0xf0 -; .byte 0x53, 0x8c, 0x1a, 0xf0 -; .byte 0xd3, 0x0c, 0x1b, 0xf0 -; .byte 0x53, 0x8d, 0x1b, 0xf0 -; .byte 0xd3, 0x0d, 0x1c, 0xf0 -; .byte 0x53, 0x80, 0x1c, 0xf0 -; .byte 0xd3, 0x00, 0x1d, 0xf0 -; .byte 0x53, 0x81, 0x1d, 0xf0 -; .byte 0xd3, 0x01, 0x1e, 0xf0 -; .byte 0x53, 0x82, 0x1e, 0xf0 -; .byte 0xd3, 0x02, 0x1f, 0xf0 -; .byte 0x53, 0x83, 0x1f, 0xf0 -; fsw fa3, 0(a0) -; fsw fa4, 8(a0) -; fsw fa5, 0x10(a0) -; fsw fa2, 0x18(a0) -; fsw ft7, 0x20(a0) -; fsw fa6, 0x28(a0) -; fsw fa7, 0x30(a0) -; fsw ft8, 0x38(a0) -; fsw ft9, 0x40(a0) -; fsw ft10, 0x48(a0) -; fsw ft11, 0x50(a0) -; fsw fs0, 0x58(a0) -; fsw fs1, 0x60(a0) -; fsw fs2, 0x68(a0) -; fsw fs3, 0x70(a0) -; fsw fs4, 0x78(a0) -; fsw fs5, 0x80(a0) -; fsw fs6, 0x88(a0) -; fsw fs7, 0x90(a0) -; fsw fs8, 0x98(a0) -; fsw fs9, 0xa0(a0) -; fsw fs10, 0xa8(a0) -; fsw fs11, 0xb0(a0) -; fsw ft0, 0xb8(a0) -; fsw ft1, 0xc0(a0) -; fsw ft2, 0xc8(a0) -; fsw ft3, 0xd0(a0) -; fsw ft4, 0xd8(a0) -; fsw ft5, 0xe0(a0) -; fsw ft6, 0xe8(a0) +; .byte 0x53, 0x06, 0x11, 0xf0 +; .byte 0xd3, 0x86, 0x11, 0xf0 +; .byte 0x53, 0x07, 0x12, 0xf0 +; .byte 0xd3, 0x87, 0x12, 0xf0 +; .byte 0x53, 0x00, 0x13, 0xf0 +; .byte 0xd3, 0x80, 0x13, 0xf0 +; .byte 0x53, 0x01, 0x14, 0xf0 +; .byte 0xd3, 0x81, 0x14, 0xf0 +; .byte 0x53, 0x02, 0x15, 0xf0 +; .byte 0xd3, 0x82, 0x15, 0xf0 +; .byte 0x53, 0x03, 0x16, 0xf0 +; .byte 0xd3, 0x83, 0x16, 0xf0 +; .byte 0x53, 0x04, 0x17, 0xf0 +; .byte 0xd3, 0x84, 0x17, 0xf0 +; .byte 0x53, 0x08, 0x18, 0xf0 +; .byte 0xd3, 0x88, 0x18, 0xf0 +; .byte 0x53, 0x09, 0x19, 0xf0 +; .byte 0xd3, 0x89, 0x19, 0xf0 +; .byte 0x53, 0x0a, 0x1a, 0xf0 +; .byte 0xd3, 0x8a, 0x1a, 0xf0 +; .byte 0x53, 0x0b, 0x1b, 0xf0 +; .byte 0xd3, 0x8b, 0x1b, 0xf0 +; .byte 0x53, 0x0c, 0x1c, 0xf0 +; .byte 0xd3, 0x8c, 0x1c, 0xf0 +; .byte 0x53, 0x0d, 0x1d, 0xf0 +; .byte 0xd3, 0x8d, 0x1d, 0xf0 +; .byte 0x53, 0x0e, 0x1e, 0xf0 +; .byte 0xd3, 0x8e, 0x1e, 0xf0 +; .byte 0x53, 0x0f, 0x1f, 0xf0 +; .byte 0xd3, 0x8f, 0x1f, 0xf0 +; fsw fa2, 0(a0) +; fsw fa3, 8(a0) +; fsw fa4, 0x10(a0) +; fsw fa5, 0x18(a0) +; fsw ft0, 0x20(a0) +; fsw ft1, 0x28(a0) +; fsw ft2, 0x30(a0) +; fsw ft3, 0x38(a0) +; fsw ft4, 0x40(a0) +; fsw ft5, 0x48(a0) +; fsw ft6, 0x50(a0) +; fsw ft7, 0x58(a0) +; fsw fs0, 0x60(a0) +; fsw fs1, 0x68(a0) +; fsw fa6, 0x70(a0) +; fsw fa7, 0x78(a0) +; fsw fs2, 0x80(a0) +; fsw fs3, 0x88(a0) +; fsw fs4, 0x90(a0) +; fsw fs5, 0x98(a0) +; fsw fs6, 0xa0(a0) +; fsw fs7, 0xa8(a0) +; fsw fs8, 0xb0(a0) +; fsw fs9, 0xb8(a0) +; fsw fs10, 0xc0(a0) +; fsw fs11, 0xc8(a0) +; fsw ft8, 0xd0(a0) +; fsw ft9, 0xd8(a0) +; fsw ft10, 0xe0(a0) +; fsw ft11, 0xe8(a0) ; fld fs0, 0x58(sp) ; fld fs2, 0x50(sp) ; fld fs3, 0x48(sp) @@ -725,66 +725,66 @@ block0: ; block0: ; fli.d fa0,-1.0 ; fli.d fa1,min -; fli.d fa3,2^-16 -; fli.d fa4,2^-15 -; fli.d fa5,2^-8 -; fli.d fa2,2^-7 -; fli.d ft7,0.0625 -; fli.d fa6,0.125 -; fli.d fa7,0.25 -; fli.d ft8,0.3125 -; fli.d ft9,0.375 -; fli.d ft10,0.4375 -; fli.d ft11,0.5 -; fli.d fs0,0.625 -; fli.d fs1,0.75 -; fli.d fs2,0.875 -; fli.d fs3,1.0 -; fli.d fs4,1.25 -; fli.d fs5,1.5 -; fli.d fs6,1.75 -; fli.d fs7,2.0 -; fli.d fs8,2.5 -; fli.d fs9,3.0 -; fli.d fs10,4.0 -; fli.d fs11,8.0 -; fli.d ft0,16.0 -; fli.d ft1,128.0 -; fli.d ft2,256.0 -; fli.d ft3,32768.0 -; fli.d ft4,65536.0 -; fli.d ft5,inf -; fli.d ft6,nan -; fsd fa3,0(a0) -; fsd fa4,8(a0) -; fsd fa5,16(a0) -; fsd fa2,24(a0) -; fsd ft7,32(a0) -; fsd fa6,40(a0) -; fsd fa7,48(a0) -; fsd ft8,56(a0) -; fsd ft9,64(a0) -; fsd ft10,72(a0) -; fsd ft11,80(a0) -; fsd fs0,88(a0) -; fsd fs1,96(a0) -; fsd fs2,104(a0) -; fsd fs3,112(a0) -; fsd fs4,120(a0) -; fsd fs5,128(a0) -; fsd fs6,136(a0) -; fsd fs7,144(a0) -; fsd fs8,152(a0) -; fsd fs9,160(a0) -; fsd fs10,168(a0) -; fsd fs11,176(a0) -; fsd ft0,184(a0) -; fsd ft1,192(a0) -; fsd ft2,200(a0) -; fsd ft3,208(a0) -; fsd ft4,216(a0) -; fsd ft5,224(a0) -; fsd ft6,232(a0) +; fli.d fa2,2^-16 +; fli.d fa3,2^-15 +; fli.d fa4,2^-8 +; fli.d fa5,2^-7 +; fli.d ft0,0.0625 +; fli.d ft1,0.125 +; fli.d ft2,0.25 +; fli.d ft3,0.3125 +; fli.d ft4,0.375 +; fli.d ft5,0.4375 +; fli.d ft6,0.5 +; fli.d ft7,0.625 +; fli.d fs0,0.75 +; fli.d fs1,0.875 +; fli.d fa6,1.0 +; fli.d fa7,1.25 +; fli.d fs2,1.5 +; fli.d fs3,1.75 +; fli.d fs4,2.0 +; fli.d fs5,2.5 +; fli.d fs6,3.0 +; fli.d fs7,4.0 +; fli.d fs8,8.0 +; fli.d fs9,16.0 +; fli.d fs10,128.0 +; fli.d fs11,256.0 +; fli.d ft8,32768.0 +; fli.d ft9,65536.0 +; fli.d ft10,inf +; fli.d ft11,nan +; fsd fa2,0(a0) +; fsd fa3,8(a0) +; fsd fa4,16(a0) +; fsd fa5,24(a0) +; fsd ft0,32(a0) +; fsd ft1,40(a0) +; fsd ft2,48(a0) +; fsd ft3,56(a0) +; fsd ft4,64(a0) +; fsd ft5,72(a0) +; fsd ft6,80(a0) +; fsd ft7,88(a0) +; fsd fs0,96(a0) +; fsd fs1,104(a0) +; fsd fa6,112(a0) +; fsd fa7,120(a0) +; fsd fs2,128(a0) +; fsd fs3,136(a0) +; fsd fs4,144(a0) +; fsd fs5,152(a0) +; fsd fs6,160(a0) +; fsd fs7,168(a0) +; fsd fs8,176(a0) +; fsd fs9,184(a0) +; fsd fs10,192(a0) +; fsd fs11,200(a0) +; fsd ft8,208(a0) +; fsd ft9,216(a0) +; fsd ft10,224(a0) +; fsd ft11,232(a0) ; fld fs0,88(sp) ; fld fs2,80(sp) ; fld fs3,72(sp) @@ -823,66 +823,66 @@ block0: ; block1: ; offset 0x40 ; .byte 0x53, 0x05, 0x10, 0xf2 ; .byte 0xd3, 0x85, 0x10, 0xf2 -; .byte 0xd3, 0x06, 0x11, 0xf2 -; .byte 0x53, 0x87, 0x11, 0xf2 -; .byte 0xd3, 0x07, 0x12, 0xf2 -; .byte 0x53, 0x86, 0x12, 0xf2 -; .byte 0xd3, 0x03, 0x13, 0xf2 -; .byte 0x53, 0x88, 0x13, 0xf2 -; .byte 0xd3, 0x08, 0x14, 0xf2 -; .byte 0x53, 0x8e, 0x14, 0xf2 -; .byte 0xd3, 0x0e, 0x15, 0xf2 -; .byte 0x53, 0x8f, 0x15, 0xf2 -; .byte 0xd3, 0x0f, 0x16, 0xf2 -; .byte 0x53, 0x84, 0x16, 0xf2 -; .byte 0xd3, 0x04, 0x17, 0xf2 -; .byte 0x53, 0x89, 0x17, 0xf2 -; .byte 0xd3, 0x09, 0x18, 0xf2 -; .byte 0x53, 0x8a, 0x18, 0xf2 -; .byte 0xd3, 0x0a, 0x19, 0xf2 -; .byte 0x53, 0x8b, 0x19, 0xf2 -; .byte 0xd3, 0x0b, 0x1a, 0xf2 -; .byte 0x53, 0x8c, 0x1a, 0xf2 -; .byte 0xd3, 0x0c, 0x1b, 0xf2 -; .byte 0x53, 0x8d, 0x1b, 0xf2 -; .byte 0xd3, 0x0d, 0x1c, 0xf2 -; .byte 0x53, 0x80, 0x1c, 0xf2 -; .byte 0xd3, 0x00, 0x1d, 0xf2 -; .byte 0x53, 0x81, 0x1d, 0xf2 -; .byte 0xd3, 0x01, 0x1e, 0xf2 -; .byte 0x53, 0x82, 0x1e, 0xf2 -; .byte 0xd3, 0x02, 0x1f, 0xf2 -; .byte 0x53, 0x83, 0x1f, 0xf2 -; fsd fa3, 0(a0) -; fsd fa4, 8(a0) -; fsd fa5, 0x10(a0) -; fsd fa2, 0x18(a0) -; fsd ft7, 0x20(a0) -; fsd fa6, 0x28(a0) -; fsd fa7, 0x30(a0) -; fsd ft8, 0x38(a0) -; fsd ft9, 0x40(a0) -; fsd ft10, 0x48(a0) -; fsd ft11, 0x50(a0) -; fsd fs0, 0x58(a0) -; fsd fs1, 0x60(a0) -; fsd fs2, 0x68(a0) -; fsd fs3, 0x70(a0) -; fsd fs4, 0x78(a0) -; fsd fs5, 0x80(a0) -; fsd fs6, 0x88(a0) -; fsd fs7, 0x90(a0) -; fsd fs8, 0x98(a0) -; fsd fs9, 0xa0(a0) -; fsd fs10, 0xa8(a0) -; fsd fs11, 0xb0(a0) -; fsd ft0, 0xb8(a0) -; fsd ft1, 0xc0(a0) -; fsd ft2, 0xc8(a0) -; fsd ft3, 0xd0(a0) -; fsd ft4, 0xd8(a0) -; fsd ft5, 0xe0(a0) -; fsd ft6, 0xe8(a0) +; .byte 0x53, 0x06, 0x11, 0xf2 +; .byte 0xd3, 0x86, 0x11, 0xf2 +; .byte 0x53, 0x07, 0x12, 0xf2 +; .byte 0xd3, 0x87, 0x12, 0xf2 +; .byte 0x53, 0x00, 0x13, 0xf2 +; .byte 0xd3, 0x80, 0x13, 0xf2 +; .byte 0x53, 0x01, 0x14, 0xf2 +; .byte 0xd3, 0x81, 0x14, 0xf2 +; .byte 0x53, 0x02, 0x15, 0xf2 +; .byte 0xd3, 0x82, 0x15, 0xf2 +; .byte 0x53, 0x03, 0x16, 0xf2 +; .byte 0xd3, 0x83, 0x16, 0xf2 +; .byte 0x53, 0x04, 0x17, 0xf2 +; .byte 0xd3, 0x84, 0x17, 0xf2 +; .byte 0x53, 0x08, 0x18, 0xf2 +; .byte 0xd3, 0x88, 0x18, 0xf2 +; .byte 0x53, 0x09, 0x19, 0xf2 +; .byte 0xd3, 0x89, 0x19, 0xf2 +; .byte 0x53, 0x0a, 0x1a, 0xf2 +; .byte 0xd3, 0x8a, 0x1a, 0xf2 +; .byte 0x53, 0x0b, 0x1b, 0xf2 +; .byte 0xd3, 0x8b, 0x1b, 0xf2 +; .byte 0x53, 0x0c, 0x1c, 0xf2 +; .byte 0xd3, 0x8c, 0x1c, 0xf2 +; .byte 0x53, 0x0d, 0x1d, 0xf2 +; .byte 0xd3, 0x8d, 0x1d, 0xf2 +; .byte 0x53, 0x0e, 0x1e, 0xf2 +; .byte 0xd3, 0x8e, 0x1e, 0xf2 +; .byte 0x53, 0x0f, 0x1f, 0xf2 +; .byte 0xd3, 0x8f, 0x1f, 0xf2 +; fsd fa2, 0(a0) +; fsd fa3, 8(a0) +; fsd fa4, 0x10(a0) +; fsd fa5, 0x18(a0) +; fsd ft0, 0x20(a0) +; fsd ft1, 0x28(a0) +; fsd ft2, 0x30(a0) +; fsd ft3, 0x38(a0) +; fsd ft4, 0x40(a0) +; fsd ft5, 0x48(a0) +; fsd ft6, 0x50(a0) +; fsd ft7, 0x58(a0) +; fsd fs0, 0x60(a0) +; fsd fs1, 0x68(a0) +; fsd fa6, 0x70(a0) +; fsd fa7, 0x78(a0) +; fsd fs2, 0x80(a0) +; fsd fs3, 0x88(a0) +; fsd fs4, 0x90(a0) +; fsd fs5, 0x98(a0) +; fsd fs6, 0xa0(a0) +; fsd fs7, 0xa8(a0) +; fsd fs8, 0xb0(a0) +; fsd fs9, 0xb8(a0) +; fsd fs10, 0xc0(a0) +; fsd fs11, 0xc8(a0) +; fsd ft8, 0xd0(a0) +; fsd ft9, 0xd8(a0) +; fsd ft10, 0xe0(a0) +; fsd ft11, 0xe8(a0) ; fld fs0, 0x58(sp) ; fld fs2, 0x50(sp) ; fld fs3, 0x48(sp) @@ -900,7 +900,6 @@ block0: ; addi sp, sp, 0x10 ; ret - function %fli_h_neg() -> f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16, f16 { block0: v0 = f16const 0x1.p0 @@ -967,84 +966,84 @@ block0: ; fneg.h fa4,fa4 ; fli.h fa5,2^-7 ; fneg.h fa5,fa5 -; fli.h fa6,0.0625 -; fneg.h ft8,fa6 -; fli.h fa6,0.125 -; fneg.h ft9,fa6 -; fli.h fa6,0.25 -; fneg.h ft10,fa6 -; fli.h fa6,0.3125 -; fneg.h ft11,fa6 -; fli.h fa6,0.375 -; fneg.h fs0,fa6 -; fli.h fa6,0.4375 -; fneg.h fs1,fa6 -; fli.h fa6,0.5 -; fneg.h fs2,fa6 -; fli.h fa6,0.625 -; fneg.h fs3,fa6 -; fli.h fa6,0.75 -; fneg.h fs4,fa6 -; fli.h fa6,0.875 -; fneg.h fs5,fa6 -; fli.h fa7,-1.0 -; fli.h fa6,1.25 -; fneg.h fs6,fa6 -; fli.h fa6,1.5 -; fneg.h fs7,fa6 -; fli.h fa6,1.75 -; fneg.h fs8,fa6 -; fli.h fa6,2.0 -; fneg.h fs9,fa6 -; fli.h fa6,2.5 -; fneg.h fs10,fa6 -; fli.h fa6,3.0 -; fneg.h fs11,fa6 -; fli.h fa6,4.0 -; fneg.h ft0,fa6 -; fli.h fa6,8.0 -; fneg.h ft1,fa6 -; fli.h fa6,16.0 -; fneg.h ft2,fa6 -; fli.h fa6,128.0 -; fneg.h ft3,fa6 -; fli.h fa6,256.0 -; fneg.h ft4,fa6 -; fli.h fa6,32768.0 -; fneg.h ft5,fa6 -; fli.h fa6,inf -; fneg.h ft6,fa6 -; fli.h fa6,nan -; fneg.h ft7,fa6 +; fli.h ft0,0.0625 +; fneg.h ft0,ft0 +; fli.h ft1,0.125 +; fneg.h ft1,ft1 +; fli.h ft2,0.25 +; fneg.h ft2,ft2 +; fli.h ft3,0.3125 +; fneg.h ft3,ft3 +; fli.h ft4,0.375 +; fneg.h ft4,ft4 +; fli.h ft5,0.4375 +; fneg.h ft5,ft5 +; fli.h ft6,0.5 +; fneg.h ft6,ft6 +; fli.h ft7,0.625 +; fneg.h ft7,ft7 +; fli.h fs0,0.75 +; fneg.h fs0,fs0 +; fli.h fs1,0.875 +; fneg.h fs1,fs1 +; fli.h fa6,-1.0 +; fli.h fa7,1.25 +; fneg.h fa7,fa7 +; fli.h fs2,1.5 +; fneg.h fs2,fs2 +; fli.h fs3,1.75 +; fneg.h fs3,fs3 +; fli.h fs4,2.0 +; fneg.h fs4,fs4 +; fli.h fs5,2.5 +; fneg.h fs5,fs5 +; fli.h fs6,3.0 +; fneg.h fs6,fs6 +; fli.h fs7,4.0 +; fneg.h fs7,fs7 +; fli.h fs8,8.0 +; fneg.h fs8,fs8 +; fli.h fs9,16.0 +; fneg.h fs9,fs9 +; fli.h fs10,128.0 +; fneg.h fs10,fs10 +; fli.h fs11,256.0 +; fneg.h fs11,fs11 +; fli.h ft8,32768.0 +; fneg.h ft8,ft8 +; fli.h ft9,inf +; fneg.h ft9,ft9 +; fli.h ft10,nan +; fneg.h ft10,ft10 ; fsh fa2,0(a0) ; fsh fa3,8(a0) ; fsh fa4,16(a0) ; fsh fa5,24(a0) -; fsh ft8,32(a0) -; fsh ft9,40(a0) -; fsh ft10,48(a0) -; fsh ft11,56(a0) -; fsh fs0,64(a0) -; fsh fs1,72(a0) -; fsh fs2,80(a0) -; fsh fs3,88(a0) -; fsh fs4,96(a0) -; fsh fs5,104(a0) -; fsh fa7,112(a0) -; fsh fs6,120(a0) -; fsh fs7,128(a0) -; fsh fs8,136(a0) -; fsh fs9,144(a0) -; fsh fs10,152(a0) -; fsh fs11,160(a0) -; fsh ft0,168(a0) -; fsh ft1,176(a0) -; fsh ft2,184(a0) -; fsh ft3,192(a0) -; fsh ft4,200(a0) -; fsh ft5,208(a0) -; fsh ft6,216(a0) -; fsh ft7,224(a0) +; fsh ft0,32(a0) +; fsh ft1,40(a0) +; fsh ft2,48(a0) +; fsh ft3,56(a0) +; fsh ft4,64(a0) +; fsh ft5,72(a0) +; fsh ft6,80(a0) +; fsh ft7,88(a0) +; fsh fs0,96(a0) +; fsh fs1,104(a0) +; fsh fa6,112(a0) +; fsh fa7,120(a0) +; fsh fs2,128(a0) +; fsh fs3,136(a0) +; fsh fs4,144(a0) +; fsh fs5,152(a0) +; fsh fs6,160(a0) +; fsh fs7,168(a0) +; fsh fs8,176(a0) +; fsh fs9,184(a0) +; fsh fs10,192(a0) +; fsh fs11,200(a0) +; fsh ft8,208(a0) +; fsh ft9,216(a0) +; fsh ft10,224(a0) ; fld fs0,88(sp) ; fld fs2,80(sp) ; fld fs3,72(sp) @@ -1092,84 +1091,84 @@ block0: ; .byte 0x53, 0x17, 0xe7, 0x24 ; .byte 0xd3, 0x87, 0x12, 0xf4 ; .byte 0xd3, 0x97, 0xf7, 0x24 -; .byte 0x53, 0x08, 0x13, 0xf4 -; .byte 0x53, 0x1e, 0x08, 0x25 -; .byte 0x53, 0x88, 0x13, 0xf4 -; .byte 0xd3, 0x1e, 0x08, 0x25 -; .byte 0x53, 0x08, 0x14, 0xf4 -; .byte 0x53, 0x1f, 0x08, 0x25 -; .byte 0x53, 0x88, 0x14, 0xf4 -; .byte 0xd3, 0x1f, 0x08, 0x25 -; .byte 0x53, 0x08, 0x15, 0xf4 -; .byte 0x53, 0x14, 0x08, 0x25 -; .byte 0x53, 0x88, 0x15, 0xf4 -; .byte 0xd3, 0x14, 0x08, 0x25 -; .byte 0x53, 0x08, 0x16, 0xf4 -; .byte 0x53, 0x19, 0x08, 0x25 -; .byte 0x53, 0x88, 0x16, 0xf4 -; .byte 0xd3, 0x19, 0x08, 0x25 -; .byte 0x53, 0x08, 0x17, 0xf4 -; .byte 0x53, 0x1a, 0x08, 0x25 -; .byte 0x53, 0x88, 0x17, 0xf4 -; .byte 0xd3, 0x1a, 0x08, 0x25 -; .byte 0xd3, 0x08, 0x10, 0xf4 -; .byte 0x53, 0x88, 0x18, 0xf4 -; .byte 0x53, 0x1b, 0x08, 0x25 -; .byte 0x53, 0x08, 0x19, 0xf4 -; .byte 0xd3, 0x1b, 0x08, 0x25 -; .byte 0x53, 0x88, 0x19, 0xf4 -; .byte 0x53, 0x1c, 0x08, 0x25 -; .byte 0x53, 0x08, 0x1a, 0xf4 -; .byte 0xd3, 0x1c, 0x08, 0x25 -; .byte 0x53, 0x88, 0x1a, 0xf4 -; .byte 0x53, 0x1d, 0x08, 0x25 -; .byte 0x53, 0x08, 0x1b, 0xf4 -; .byte 0xd3, 0x1d, 0x08, 0x25 -; .byte 0x53, 0x88, 0x1b, 0xf4 -; .byte 0x53, 0x10, 0x08, 0x25 -; .byte 0x53, 0x08, 0x1c, 0xf4 -; .byte 0xd3, 0x10, 0x08, 0x25 -; .byte 0x53, 0x88, 0x1c, 0xf4 -; .byte 0x53, 0x11, 0x08, 0x25 -; .byte 0x53, 0x08, 0x1d, 0xf4 -; .byte 0xd3, 0x11, 0x08, 0x25 -; .byte 0x53, 0x88, 0x1d, 0xf4 -; .byte 0x53, 0x12, 0x08, 0x25 -; .byte 0x53, 0x08, 0x1e, 0xf4 -; .byte 0xd3, 0x12, 0x08, 0x25 -; .byte 0x53, 0x08, 0x1f, 0xf4 -; .byte 0x53, 0x13, 0x08, 0x25 -; .byte 0x53, 0x88, 0x1f, 0xf4 -; .byte 0xd3, 0x13, 0x08, 0x25 +; .byte 0x53, 0x00, 0x13, 0xf4 +; .byte 0x53, 0x10, 0x00, 0x24 +; .byte 0xd3, 0x80, 0x13, 0xf4 +; .byte 0xd3, 0x90, 0x10, 0x24 +; .byte 0x53, 0x01, 0x14, 0xf4 +; .byte 0x53, 0x11, 0x21, 0x24 +; .byte 0xd3, 0x81, 0x14, 0xf4 +; .byte 0xd3, 0x91, 0x31, 0x24 +; .byte 0x53, 0x02, 0x15, 0xf4 +; .byte 0x53, 0x12, 0x42, 0x24 +; .byte 0xd3, 0x82, 0x15, 0xf4 +; .byte 0xd3, 0x92, 0x52, 0x24 +; .byte 0x53, 0x03, 0x16, 0xf4 +; .byte 0x53, 0x13, 0x63, 0x24 +; .byte 0xd3, 0x83, 0x16, 0xf4 +; .byte 0xd3, 0x93, 0x73, 0x24 +; .byte 0x53, 0x04, 0x17, 0xf4 +; .byte 0x53, 0x14, 0x84, 0x24 +; .byte 0xd3, 0x84, 0x17, 0xf4 +; .byte 0xd3, 0x94, 0x94, 0x24 +; .byte 0x53, 0x08, 0x10, 0xf4 +; .byte 0xd3, 0x88, 0x18, 0xf4 +; .byte 0xd3, 0x98, 0x18, 0x25 +; .byte 0x53, 0x09, 0x19, 0xf4 +; .byte 0x53, 0x19, 0x29, 0x25 +; .byte 0xd3, 0x89, 0x19, 0xf4 +; .byte 0xd3, 0x99, 0x39, 0x25 +; .byte 0x53, 0x0a, 0x1a, 0xf4 +; .byte 0x53, 0x1a, 0x4a, 0x25 +; .byte 0xd3, 0x8a, 0x1a, 0xf4 +; .byte 0xd3, 0x9a, 0x5a, 0x25 +; .byte 0x53, 0x0b, 0x1b, 0xf4 +; .byte 0x53, 0x1b, 0x6b, 0x25 +; .byte 0xd3, 0x8b, 0x1b, 0xf4 +; .byte 0xd3, 0x9b, 0x7b, 0x25 +; .byte 0x53, 0x0c, 0x1c, 0xf4 +; .byte 0x53, 0x1c, 0x8c, 0x25 +; .byte 0xd3, 0x8c, 0x1c, 0xf4 +; .byte 0xd3, 0x9c, 0x9c, 0x25 +; .byte 0x53, 0x0d, 0x1d, 0xf4 +; .byte 0x53, 0x1d, 0xad, 0x25 +; .byte 0xd3, 0x8d, 0x1d, 0xf4 +; .byte 0xd3, 0x9d, 0xbd, 0x25 +; .byte 0x53, 0x0e, 0x1e, 0xf4 +; .byte 0x53, 0x1e, 0xce, 0x25 +; .byte 0xd3, 0x0e, 0x1f, 0xf4 +; .byte 0xd3, 0x9e, 0xde, 0x25 +; .byte 0x53, 0x8f, 0x1f, 0xf4 +; .byte 0x53, 0x1f, 0xef, 0x25 ; .byte 0x27, 0x10, 0xc5, 0x00 ; .byte 0x27, 0x14, 0xd5, 0x00 ; .byte 0x27, 0x18, 0xe5, 0x00 ; .byte 0x27, 0x1c, 0xf5, 0x00 -; .byte 0x27, 0x10, 0xc5, 0x03 -; .byte 0x27, 0x14, 0xd5, 0x03 -; .byte 0x27, 0x18, 0xe5, 0x03 -; .byte 0x27, 0x1c, 0xf5, 0x03 -; .byte 0x27, 0x10, 0x85, 0x04 -; .byte 0x27, 0x14, 0x95, 0x04 -; .byte 0x27, 0x18, 0x25, 0x05 -; .byte 0x27, 0x1c, 0x35, 0x05 -; .byte 0x27, 0x10, 0x45, 0x07 -; .byte 0x27, 0x14, 0x55, 0x07 -; .byte 0x27, 0x18, 0x15, 0x07 -; .byte 0x27, 0x1c, 0x65, 0x07 -; .byte 0x27, 0x10, 0x75, 0x09 -; .byte 0x27, 0x14, 0x85, 0x09 -; .byte 0x27, 0x18, 0x95, 0x09 -; .byte 0x27, 0x1c, 0xa5, 0x09 -; .byte 0x27, 0x10, 0xb5, 0x0b -; .byte 0x27, 0x14, 0x05, 0x0a -; .byte 0x27, 0x18, 0x15, 0x0a -; .byte 0x27, 0x1c, 0x25, 0x0a -; .byte 0x27, 0x10, 0x35, 0x0c -; .byte 0x27, 0x14, 0x45, 0x0c -; .byte 0x27, 0x18, 0x55, 0x0c -; .byte 0x27, 0x1c, 0x65, 0x0c -; .byte 0x27, 0x10, 0x75, 0x0e +; .byte 0x27, 0x10, 0x05, 0x02 +; .byte 0x27, 0x14, 0x15, 0x02 +; .byte 0x27, 0x18, 0x25, 0x02 +; .byte 0x27, 0x1c, 0x35, 0x02 +; .byte 0x27, 0x10, 0x45, 0x04 +; .byte 0x27, 0x14, 0x55, 0x04 +; .byte 0x27, 0x18, 0x65, 0x04 +; .byte 0x27, 0x1c, 0x75, 0x04 +; .byte 0x27, 0x10, 0x85, 0x06 +; .byte 0x27, 0x14, 0x95, 0x06 +; .byte 0x27, 0x18, 0x05, 0x07 +; .byte 0x27, 0x1c, 0x15, 0x07 +; .byte 0x27, 0x10, 0x25, 0x09 +; .byte 0x27, 0x14, 0x35, 0x09 +; .byte 0x27, 0x18, 0x45, 0x09 +; .byte 0x27, 0x1c, 0x55, 0x09 +; .byte 0x27, 0x10, 0x65, 0x0b +; .byte 0x27, 0x14, 0x75, 0x0b +; .byte 0x27, 0x18, 0x85, 0x0b +; .byte 0x27, 0x1c, 0x95, 0x0b +; .byte 0x27, 0x10, 0xa5, 0x0d +; .byte 0x27, 0x14, 0xb5, 0x0d +; .byte 0x27, 0x18, 0xc5, 0x0d +; .byte 0x27, 0x1c, 0xd5, 0x0d +; .byte 0x27, 0x10, 0xe5, 0x0f ; fld fs0, 0x58(sp) ; fld fs2, 0x50(sp) ; fld fs3, 0x48(sp) @@ -1244,97 +1243,97 @@ block0: ; fsd fs11,8(sp) ; block0: ; fli.s fa0,1.0 -; fli.s fa2,min -; fneg.s fa1,fa2 +; fli.s fa1,min +; fneg.s fa1,fa1 ; fli.s fa2,2^-16 -; fneg.s fa4,fa2 -; fli.s fa2,2^-15 -; fneg.s fa5,fa2 -; fli.s fa2,2^-8 ; fneg.s fa2,fa2 -; fli.s fa3,2^-7 +; fli.s fa3,2^-15 ; fneg.s fa3,fa3 -; fli.s ft8,0.0625 -; fneg.s ft10,ft8 -; fli.s ft8,0.125 -; fneg.s ft11,ft8 -; fli.s ft8,0.25 -; fneg.s fs0,ft8 -; fli.s ft8,0.3125 -; fneg.s fs1,ft8 -; fli.s ft8,0.375 -; fneg.s fs2,ft8 -; fli.s ft8,0.4375 -; fneg.s fs3,ft8 -; fli.s ft8,0.5 -; fneg.s fs4,ft8 -; fli.s ft8,0.625 -; fneg.s fs5,ft8 -; fli.s ft8,0.75 -; fneg.s fs6,ft8 -; fli.s ft8,0.875 -; fneg.s fs7,ft8 -; fli.s ft9,-1.0 -; fli.s ft8,1.25 -; fneg.s fs8,ft8 -; fli.s ft8,1.5 -; fneg.s fs9,ft8 -; fli.s ft8,1.75 -; fneg.s fs10,ft8 -; fli.s ft8,2.0 -; fneg.s fs11,ft8 -; fli.s ft8,2.5 -; fneg.s ft0,ft8 -; fli.s ft8,3.0 -; fneg.s ft1,ft8 -; fli.s ft8,4.0 -; fneg.s ft2,ft8 -; fli.s ft8,8.0 -; fneg.s ft3,ft8 -; fli.s ft8,16.0 -; fneg.s ft4,ft8 -; fli.s ft8,128.0 -; fneg.s ft5,ft8 -; fli.s ft8,256.0 -; fneg.s ft6,ft8 +; fli.s fa4,2^-8 +; fneg.s fa4,fa4 +; fli.s fa5,2^-7 +; fneg.s fa5,fa5 +; fli.s ft0,0.0625 +; fneg.s ft1,ft0 +; fli.s ft0,0.125 +; fneg.s ft2,ft0 +; fli.s ft0,0.25 +; fneg.s ft3,ft0 +; fli.s ft0,0.3125 +; fneg.s ft4,ft0 +; fli.s ft0,0.375 +; fneg.s ft5,ft0 +; fli.s ft0,0.4375 +; fneg.s ft6,ft0 +; fli.s ft0,0.5 +; fneg.s ft7,ft0 +; fli.s ft0,0.625 +; fneg.s fs0,ft0 +; fli.s ft0,0.75 +; fneg.s fs1,ft0 +; fli.s ft0,0.875 +; fneg.s fa6,ft0 +; fli.s ft0,-1.0 +; fli.s fa7,1.25 +; fneg.s fa7,fa7 +; fli.s fs2,1.5 +; fneg.s fs2,fs2 +; fli.s fs3,1.75 +; fneg.s fs3,fs3 +; fli.s fs4,2.0 +; fneg.s fs4,fs4 +; fli.s fs5,2.5 +; fneg.s fs5,fs5 +; fli.s fs6,3.0 +; fneg.s fs6,fs6 +; fli.s fs7,4.0 +; fneg.s fs7,fs7 +; fli.s fs8,8.0 +; fneg.s fs8,fs8 +; fli.s fs9,16.0 +; fneg.s fs9,fs9 +; fli.s fs10,128.0 +; fneg.s fs10,fs10 +; fli.s fs11,256.0 +; fneg.s fs11,fs11 ; fli.s ft8,32768.0 -; fneg.s ft7,ft8 -; fli.s ft8,65536.0 -; fneg.s fa6,ft8 -; fli.s ft8,inf -; fneg.s fa7,ft8 -; fli.s ft8,nan ; fneg.s ft8,ft8 -; fsw fa4,0(a0) -; fsw fa5,8(a0) -; fsw fa2,16(a0) -; fsw fa3,24(a0) -; fsw ft10,32(a0) -; fsw ft11,40(a0) -; fsw fs0,48(a0) -; fsw fs1,56(a0) -; fsw fs2,64(a0) -; fsw fs3,72(a0) -; fsw fs4,80(a0) -; fsw fs5,88(a0) -; fsw fs6,96(a0) -; fsw fs7,104(a0) -; fsw ft9,112(a0) -; fsw fs8,120(a0) -; fsw fs9,128(a0) -; fsw fs10,136(a0) -; fsw fs11,144(a0) -; fsw ft0,152(a0) -; fsw ft1,160(a0) -; fsw ft2,168(a0) -; fsw ft3,176(a0) -; fsw ft4,184(a0) -; fsw ft5,192(a0) -; fsw ft6,200(a0) -; fsw ft7,208(a0) -; fsw fa6,216(a0) -; fsw fa7,224(a0) -; fsw ft8,232(a0) +; fli.s ft9,65536.0 +; fneg.s ft9,ft9 +; fli.s ft10,inf +; fneg.s ft10,ft10 +; fli.s ft11,nan +; fneg.s ft11,ft11 +; fsw fa2,0(a0) +; fsw fa3,8(a0) +; fsw fa4,16(a0) +; fsw fa5,24(a0) +; fsw ft1,32(a0) +; fsw ft2,40(a0) +; fsw ft3,48(a0) +; fsw ft4,56(a0) +; fsw ft5,64(a0) +; fsw ft6,72(a0) +; fsw ft7,80(a0) +; fsw fs0,88(a0) +; fsw fs1,96(a0) +; fsw fa6,104(a0) +; fsw ft0,112(a0) +; fsw fa7,120(a0) +; fsw fs2,128(a0) +; fsw fs3,136(a0) +; fsw fs4,144(a0) +; fsw fs5,152(a0) +; fsw fs6,160(a0) +; fsw fs7,168(a0) +; fsw fs8,176(a0) +; fsw fs9,184(a0) +; fsw fs10,192(a0) +; fsw fs11,200(a0) +; fsw ft8,208(a0) +; fsw ft9,216(a0) +; fsw ft10,224(a0) +; fsw ft11,232(a0) ; fld fs0,88(sp) ; fld fs2,80(sp) ; fld fs3,72(sp) @@ -1372,97 +1371,97 @@ block0: ; fsd fs11, 8(sp) ; block1: ; offset 0x40 ; .byte 0x53, 0x05, 0x18, 0xf0 -; .byte 0x53, 0x86, 0x10, 0xf0 -; fneg.s fa1, fa2 +; .byte 0xd3, 0x85, 0x10, 0xf0 +; fneg.s fa1, fa1 ; .byte 0x53, 0x06, 0x11, 0xf0 -; fneg.s fa4, fa2 -; .byte 0x53, 0x86, 0x11, 0xf0 -; fneg.s fa5, fa2 -; .byte 0x53, 0x06, 0x12, 0xf0 ; fneg.s fa2, fa2 -; .byte 0xd3, 0x86, 0x12, 0xf0 +; .byte 0xd3, 0x86, 0x11, 0xf0 ; fneg.s fa3, fa3 -; .byte 0x53, 0x0e, 0x13, 0xf0 -; fneg.s ft10, ft8 -; .byte 0x53, 0x8e, 0x13, 0xf0 -; fneg.s ft11, ft8 -; .byte 0x53, 0x0e, 0x14, 0xf0 -; fneg.s fs0, ft8 -; .byte 0x53, 0x8e, 0x14, 0xf0 -; fneg.s fs1, ft8 -; .byte 0x53, 0x0e, 0x15, 0xf0 -; fneg.s fs2, ft8 -; .byte 0x53, 0x8e, 0x15, 0xf0 -; fneg.s fs3, ft8 -; .byte 0x53, 0x0e, 0x16, 0xf0 -; fneg.s fs4, ft8 -; .byte 0x53, 0x8e, 0x16, 0xf0 -; fneg.s fs5, ft8 -; .byte 0x53, 0x0e, 0x17, 0xf0 -; fneg.s fs6, ft8 -; .byte 0x53, 0x8e, 0x17, 0xf0 -; fneg.s fs7, ft8 -; .byte 0xd3, 0x0e, 0x10, 0xf0 -; .byte 0x53, 0x8e, 0x18, 0xf0 -; fneg.s fs8, ft8 -; .byte 0x53, 0x0e, 0x19, 0xf0 -; fneg.s fs9, ft8 -; .byte 0x53, 0x8e, 0x19, 0xf0 -; fneg.s fs10, ft8 -; .byte 0x53, 0x0e, 0x1a, 0xf0 -; fneg.s fs11, ft8 -; .byte 0x53, 0x8e, 0x1a, 0xf0 -; fneg.s ft0, ft8 -; .byte 0x53, 0x0e, 0x1b, 0xf0 -; fneg.s ft1, ft8 -; .byte 0x53, 0x8e, 0x1b, 0xf0 -; fneg.s ft2, ft8 -; .byte 0x53, 0x0e, 0x1c, 0xf0 -; fneg.s ft3, ft8 -; .byte 0x53, 0x8e, 0x1c, 0xf0 -; fneg.s ft4, ft8 -; .byte 0x53, 0x0e, 0x1d, 0xf0 -; fneg.s ft5, ft8 -; .byte 0x53, 0x8e, 0x1d, 0xf0 -; fneg.s ft6, ft8 +; .byte 0x53, 0x07, 0x12, 0xf0 +; fneg.s fa4, fa4 +; .byte 0xd3, 0x87, 0x12, 0xf0 +; fneg.s fa5, fa5 +; .byte 0x53, 0x00, 0x13, 0xf0 +; fneg.s ft1, ft0 +; .byte 0x53, 0x80, 0x13, 0xf0 +; fneg.s ft2, ft0 +; .byte 0x53, 0x00, 0x14, 0xf0 +; fneg.s ft3, ft0 +; .byte 0x53, 0x80, 0x14, 0xf0 +; fneg.s ft4, ft0 +; .byte 0x53, 0x00, 0x15, 0xf0 +; fneg.s ft5, ft0 +; .byte 0x53, 0x80, 0x15, 0xf0 +; fneg.s ft6, ft0 +; .byte 0x53, 0x00, 0x16, 0xf0 +; fneg.s ft7, ft0 +; .byte 0x53, 0x80, 0x16, 0xf0 +; fneg.s fs0, ft0 +; .byte 0x53, 0x00, 0x17, 0xf0 +; fneg.s fs1, ft0 +; .byte 0x53, 0x80, 0x17, 0xf0 +; fneg.s fa6, ft0 +; .byte 0x53, 0x00, 0x10, 0xf0 +; .byte 0xd3, 0x88, 0x18, 0xf0 +; fneg.s fa7, fa7 +; .byte 0x53, 0x09, 0x19, 0xf0 +; fneg.s fs2, fs2 +; .byte 0xd3, 0x89, 0x19, 0xf0 +; fneg.s fs3, fs3 +; .byte 0x53, 0x0a, 0x1a, 0xf0 +; fneg.s fs4, fs4 +; .byte 0xd3, 0x8a, 0x1a, 0xf0 +; fneg.s fs5, fs5 +; .byte 0x53, 0x0b, 0x1b, 0xf0 +; fneg.s fs6, fs6 +; .byte 0xd3, 0x8b, 0x1b, 0xf0 +; fneg.s fs7, fs7 +; .byte 0x53, 0x0c, 0x1c, 0xf0 +; fneg.s fs8, fs8 +; .byte 0xd3, 0x8c, 0x1c, 0xf0 +; fneg.s fs9, fs9 +; .byte 0x53, 0x0d, 0x1d, 0xf0 +; fneg.s fs10, fs10 +; .byte 0xd3, 0x8d, 0x1d, 0xf0 +; fneg.s fs11, fs11 ; .byte 0x53, 0x0e, 0x1e, 0xf0 -; fneg.s ft7, ft8 -; .byte 0x53, 0x8e, 0x1e, 0xf0 -; fneg.s fa6, ft8 -; .byte 0x53, 0x0e, 0x1f, 0xf0 -; fneg.s fa7, ft8 -; .byte 0x53, 0x8e, 0x1f, 0xf0 ; fneg.s ft8, ft8 -; fsw fa4, 0(a0) -; fsw fa5, 8(a0) -; fsw fa2, 0x10(a0) -; fsw fa3, 0x18(a0) -; fsw ft10, 0x20(a0) -; fsw ft11, 0x28(a0) -; fsw fs0, 0x30(a0) -; fsw fs1, 0x38(a0) -; fsw fs2, 0x40(a0) -; fsw fs3, 0x48(a0) -; fsw fs4, 0x50(a0) -; fsw fs5, 0x58(a0) -; fsw fs6, 0x60(a0) -; fsw fs7, 0x68(a0) -; fsw ft9, 0x70(a0) -; fsw fs8, 0x78(a0) -; fsw fs9, 0x80(a0) -; fsw fs10, 0x88(a0) -; fsw fs11, 0x90(a0) -; fsw ft0, 0x98(a0) -; fsw ft1, 0xa0(a0) -; fsw ft2, 0xa8(a0) -; fsw ft3, 0xb0(a0) -; fsw ft4, 0xb8(a0) -; fsw ft5, 0xc0(a0) -; fsw ft6, 0xc8(a0) -; fsw ft7, 0xd0(a0) -; fsw fa6, 0xd8(a0) -; fsw fa7, 0xe0(a0) -; fsw ft8, 0xe8(a0) +; .byte 0xd3, 0x8e, 0x1e, 0xf0 +; fneg.s ft9, ft9 +; .byte 0x53, 0x0f, 0x1f, 0xf0 +; fneg.s ft10, ft10 +; .byte 0xd3, 0x8f, 0x1f, 0xf0 +; fneg.s ft11, ft11 +; fsw fa2, 0(a0) +; fsw fa3, 8(a0) +; fsw fa4, 0x10(a0) +; fsw fa5, 0x18(a0) +; fsw ft1, 0x20(a0) +; fsw ft2, 0x28(a0) +; fsw ft3, 0x30(a0) +; fsw ft4, 0x38(a0) +; fsw ft5, 0x40(a0) +; fsw ft6, 0x48(a0) +; fsw ft7, 0x50(a0) +; fsw fs0, 0x58(a0) +; fsw fs1, 0x60(a0) +; fsw fa6, 0x68(a0) +; fsw ft0, 0x70(a0) +; fsw fa7, 0x78(a0) +; fsw fs2, 0x80(a0) +; fsw fs3, 0x88(a0) +; fsw fs4, 0x90(a0) +; fsw fs5, 0x98(a0) +; fsw fs6, 0xa0(a0) +; fsw fs7, 0xa8(a0) +; fsw fs8, 0xb0(a0) +; fsw fs9, 0xb8(a0) +; fsw fs10, 0xc0(a0) +; fsw fs11, 0xc8(a0) +; fsw ft8, 0xd0(a0) +; fsw ft9, 0xd8(a0) +; fsw ft10, 0xe0(a0) +; fsw ft11, 0xe8(a0) ; fld fs0, 0x58(sp) ; fld fs2, 0x50(sp) ; fld fs3, 0x48(sp) @@ -1537,97 +1536,97 @@ block0: ; fsd fs11,8(sp) ; block0: ; fli.d fa0,1.0 -; fli.d fa2,min -; fneg.d fa1,fa2 +; fli.d fa1,min +; fneg.d fa1,fa1 ; fli.d fa2,2^-16 -; fneg.d fa4,fa2 -; fli.d fa2,2^-15 -; fneg.d fa5,fa2 -; fli.d fa2,2^-8 ; fneg.d fa2,fa2 -; fli.d fa3,2^-7 +; fli.d fa3,2^-15 ; fneg.d fa3,fa3 -; fli.d ft8,0.0625 -; fneg.d ft10,ft8 -; fli.d ft8,0.125 -; fneg.d ft11,ft8 -; fli.d ft8,0.25 -; fneg.d fs0,ft8 -; fli.d ft8,0.3125 -; fneg.d fs1,ft8 -; fli.d ft8,0.375 -; fneg.d fs2,ft8 -; fli.d ft8,0.4375 -; fneg.d fs3,ft8 -; fli.d ft8,0.5 -; fneg.d fs4,ft8 -; fli.d ft8,0.625 -; fneg.d fs5,ft8 -; fli.d ft8,0.75 -; fneg.d fs6,ft8 -; fli.d ft8,0.875 -; fneg.d fs7,ft8 -; fli.d ft9,-1.0 -; fli.d ft8,1.25 -; fneg.d fs8,ft8 -; fli.d ft8,1.5 -; fneg.d fs9,ft8 -; fli.d ft8,1.75 -; fneg.d fs10,ft8 -; fli.d ft8,2.0 -; fneg.d fs11,ft8 -; fli.d ft8,2.5 -; fneg.d ft0,ft8 -; fli.d ft8,3.0 -; fneg.d ft1,ft8 -; fli.d ft8,4.0 -; fneg.d ft2,ft8 -; fli.d ft8,8.0 -; fneg.d ft3,ft8 -; fli.d ft8,16.0 -; fneg.d ft4,ft8 -; fli.d ft8,128.0 -; fneg.d ft5,ft8 -; fli.d ft8,256.0 -; fneg.d ft6,ft8 +; fli.d fa4,2^-8 +; fneg.d fa4,fa4 +; fli.d fa5,2^-7 +; fneg.d fa5,fa5 +; fli.d ft0,0.0625 +; fneg.d ft1,ft0 +; fli.d ft0,0.125 +; fneg.d ft2,ft0 +; fli.d ft0,0.25 +; fneg.d ft3,ft0 +; fli.d ft0,0.3125 +; fneg.d ft4,ft0 +; fli.d ft0,0.375 +; fneg.d ft5,ft0 +; fli.d ft0,0.4375 +; fneg.d ft6,ft0 +; fli.d ft0,0.5 +; fneg.d ft7,ft0 +; fli.d ft0,0.625 +; fneg.d fs0,ft0 +; fli.d ft0,0.75 +; fneg.d fs1,ft0 +; fli.d ft0,0.875 +; fneg.d fa6,ft0 +; fli.d ft0,-1.0 +; fli.d fa7,1.25 +; fneg.d fa7,fa7 +; fli.d fs2,1.5 +; fneg.d fs2,fs2 +; fli.d fs3,1.75 +; fneg.d fs3,fs3 +; fli.d fs4,2.0 +; fneg.d fs4,fs4 +; fli.d fs5,2.5 +; fneg.d fs5,fs5 +; fli.d fs6,3.0 +; fneg.d fs6,fs6 +; fli.d fs7,4.0 +; fneg.d fs7,fs7 +; fli.d fs8,8.0 +; fneg.d fs8,fs8 +; fli.d fs9,16.0 +; fneg.d fs9,fs9 +; fli.d fs10,128.0 +; fneg.d fs10,fs10 +; fli.d fs11,256.0 +; fneg.d fs11,fs11 ; fli.d ft8,32768.0 -; fneg.d ft7,ft8 -; fli.d ft8,65536.0 -; fneg.d fa6,ft8 -; fli.d ft8,inf -; fneg.d fa7,ft8 -; fli.d ft8,nan ; fneg.d ft8,ft8 -; fsd fa4,0(a0) -; fsd fa5,8(a0) -; fsd fa2,16(a0) -; fsd fa3,24(a0) -; fsd ft10,32(a0) -; fsd ft11,40(a0) -; fsd fs0,48(a0) -; fsd fs1,56(a0) -; fsd fs2,64(a0) -; fsd fs3,72(a0) -; fsd fs4,80(a0) -; fsd fs5,88(a0) -; fsd fs6,96(a0) -; fsd fs7,104(a0) -; fsd ft9,112(a0) -; fsd fs8,120(a0) -; fsd fs9,128(a0) -; fsd fs10,136(a0) -; fsd fs11,144(a0) -; fsd ft0,152(a0) -; fsd ft1,160(a0) -; fsd ft2,168(a0) -; fsd ft3,176(a0) -; fsd ft4,184(a0) -; fsd ft5,192(a0) -; fsd ft6,200(a0) -; fsd ft7,208(a0) -; fsd fa6,216(a0) -; fsd fa7,224(a0) -; fsd ft8,232(a0) +; fli.d ft9,65536.0 +; fneg.d ft9,ft9 +; fli.d ft10,inf +; fneg.d ft10,ft10 +; fli.d ft11,nan +; fneg.d ft11,ft11 +; fsd fa2,0(a0) +; fsd fa3,8(a0) +; fsd fa4,16(a0) +; fsd fa5,24(a0) +; fsd ft1,32(a0) +; fsd ft2,40(a0) +; fsd ft3,48(a0) +; fsd ft4,56(a0) +; fsd ft5,64(a0) +; fsd ft6,72(a0) +; fsd ft7,80(a0) +; fsd fs0,88(a0) +; fsd fs1,96(a0) +; fsd fa6,104(a0) +; fsd ft0,112(a0) +; fsd fa7,120(a0) +; fsd fs2,128(a0) +; fsd fs3,136(a0) +; fsd fs4,144(a0) +; fsd fs5,152(a0) +; fsd fs6,160(a0) +; fsd fs7,168(a0) +; fsd fs8,176(a0) +; fsd fs9,184(a0) +; fsd fs10,192(a0) +; fsd fs11,200(a0) +; fsd ft8,208(a0) +; fsd ft9,216(a0) +; fsd ft10,224(a0) +; fsd ft11,232(a0) ; fld fs0,88(sp) ; fld fs2,80(sp) ; fld fs3,72(sp) @@ -1665,97 +1664,97 @@ block0: ; fsd fs11, 8(sp) ; block1: ; offset 0x40 ; .byte 0x53, 0x05, 0x18, 0xf2 -; .byte 0x53, 0x86, 0x10, 0xf2 -; fneg.d fa1, fa2 +; .byte 0xd3, 0x85, 0x10, 0xf2 +; fneg.d fa1, fa1 ; .byte 0x53, 0x06, 0x11, 0xf2 -; fneg.d fa4, fa2 -; .byte 0x53, 0x86, 0x11, 0xf2 -; fneg.d fa5, fa2 -; .byte 0x53, 0x06, 0x12, 0xf2 ; fneg.d fa2, fa2 -; .byte 0xd3, 0x86, 0x12, 0xf2 +; .byte 0xd3, 0x86, 0x11, 0xf2 ; fneg.d fa3, fa3 -; .byte 0x53, 0x0e, 0x13, 0xf2 -; fneg.d ft10, ft8 -; .byte 0x53, 0x8e, 0x13, 0xf2 -; fneg.d ft11, ft8 -; .byte 0x53, 0x0e, 0x14, 0xf2 -; fneg.d fs0, ft8 -; .byte 0x53, 0x8e, 0x14, 0xf2 -; fneg.d fs1, ft8 -; .byte 0x53, 0x0e, 0x15, 0xf2 -; fneg.d fs2, ft8 -; .byte 0x53, 0x8e, 0x15, 0xf2 -; fneg.d fs3, ft8 -; .byte 0x53, 0x0e, 0x16, 0xf2 -; fneg.d fs4, ft8 -; .byte 0x53, 0x8e, 0x16, 0xf2 -; fneg.d fs5, ft8 -; .byte 0x53, 0x0e, 0x17, 0xf2 -; fneg.d fs6, ft8 -; .byte 0x53, 0x8e, 0x17, 0xf2 -; fneg.d fs7, ft8 -; .byte 0xd3, 0x0e, 0x10, 0xf2 -; .byte 0x53, 0x8e, 0x18, 0xf2 -; fneg.d fs8, ft8 -; .byte 0x53, 0x0e, 0x19, 0xf2 -; fneg.d fs9, ft8 -; .byte 0x53, 0x8e, 0x19, 0xf2 -; fneg.d fs10, ft8 -; .byte 0x53, 0x0e, 0x1a, 0xf2 -; fneg.d fs11, ft8 -; .byte 0x53, 0x8e, 0x1a, 0xf2 -; fneg.d ft0, ft8 -; .byte 0x53, 0x0e, 0x1b, 0xf2 -; fneg.d ft1, ft8 -; .byte 0x53, 0x8e, 0x1b, 0xf2 -; fneg.d ft2, ft8 -; .byte 0x53, 0x0e, 0x1c, 0xf2 -; fneg.d ft3, ft8 -; .byte 0x53, 0x8e, 0x1c, 0xf2 -; fneg.d ft4, ft8 -; .byte 0x53, 0x0e, 0x1d, 0xf2 -; fneg.d ft5, ft8 -; .byte 0x53, 0x8e, 0x1d, 0xf2 -; fneg.d ft6, ft8 +; .byte 0x53, 0x07, 0x12, 0xf2 +; fneg.d fa4, fa4 +; .byte 0xd3, 0x87, 0x12, 0xf2 +; fneg.d fa5, fa5 +; .byte 0x53, 0x00, 0x13, 0xf2 +; fneg.d ft1, ft0 +; .byte 0x53, 0x80, 0x13, 0xf2 +; fneg.d ft2, ft0 +; .byte 0x53, 0x00, 0x14, 0xf2 +; fneg.d ft3, ft0 +; .byte 0x53, 0x80, 0x14, 0xf2 +; fneg.d ft4, ft0 +; .byte 0x53, 0x00, 0x15, 0xf2 +; fneg.d ft5, ft0 +; .byte 0x53, 0x80, 0x15, 0xf2 +; fneg.d ft6, ft0 +; .byte 0x53, 0x00, 0x16, 0xf2 +; fneg.d ft7, ft0 +; .byte 0x53, 0x80, 0x16, 0xf2 +; fneg.d fs0, ft0 +; .byte 0x53, 0x00, 0x17, 0xf2 +; fneg.d fs1, ft0 +; .byte 0x53, 0x80, 0x17, 0xf2 +; fneg.d fa6, ft0 +; .byte 0x53, 0x00, 0x10, 0xf2 +; .byte 0xd3, 0x88, 0x18, 0xf2 +; fneg.d fa7, fa7 +; .byte 0x53, 0x09, 0x19, 0xf2 +; fneg.d fs2, fs2 +; .byte 0xd3, 0x89, 0x19, 0xf2 +; fneg.d fs3, fs3 +; .byte 0x53, 0x0a, 0x1a, 0xf2 +; fneg.d fs4, fs4 +; .byte 0xd3, 0x8a, 0x1a, 0xf2 +; fneg.d fs5, fs5 +; .byte 0x53, 0x0b, 0x1b, 0xf2 +; fneg.d fs6, fs6 +; .byte 0xd3, 0x8b, 0x1b, 0xf2 +; fneg.d fs7, fs7 +; .byte 0x53, 0x0c, 0x1c, 0xf2 +; fneg.d fs8, fs8 +; .byte 0xd3, 0x8c, 0x1c, 0xf2 +; fneg.d fs9, fs9 +; .byte 0x53, 0x0d, 0x1d, 0xf2 +; fneg.d fs10, fs10 +; .byte 0xd3, 0x8d, 0x1d, 0xf2 +; fneg.d fs11, fs11 ; .byte 0x53, 0x0e, 0x1e, 0xf2 -; fneg.d ft7, ft8 -; .byte 0x53, 0x8e, 0x1e, 0xf2 -; fneg.d fa6, ft8 -; .byte 0x53, 0x0e, 0x1f, 0xf2 -; fneg.d fa7, ft8 -; .byte 0x53, 0x8e, 0x1f, 0xf2 ; fneg.d ft8, ft8 -; fsd fa4, 0(a0) -; fsd fa5, 8(a0) -; fsd fa2, 0x10(a0) -; fsd fa3, 0x18(a0) -; fsd ft10, 0x20(a0) -; fsd ft11, 0x28(a0) -; fsd fs0, 0x30(a0) -; fsd fs1, 0x38(a0) -; fsd fs2, 0x40(a0) -; fsd fs3, 0x48(a0) -; fsd fs4, 0x50(a0) -; fsd fs5, 0x58(a0) -; fsd fs6, 0x60(a0) -; fsd fs7, 0x68(a0) -; fsd ft9, 0x70(a0) -; fsd fs8, 0x78(a0) -; fsd fs9, 0x80(a0) -; fsd fs10, 0x88(a0) -; fsd fs11, 0x90(a0) -; fsd ft0, 0x98(a0) -; fsd ft1, 0xa0(a0) -; fsd ft2, 0xa8(a0) -; fsd ft3, 0xb0(a0) -; fsd ft4, 0xb8(a0) -; fsd ft5, 0xc0(a0) -; fsd ft6, 0xc8(a0) -; fsd ft7, 0xd0(a0) -; fsd fa6, 0xd8(a0) -; fsd fa7, 0xe0(a0) -; fsd ft8, 0xe8(a0) +; .byte 0xd3, 0x8e, 0x1e, 0xf2 +; fneg.d ft9, ft9 +; .byte 0x53, 0x0f, 0x1f, 0xf2 +; fneg.d ft10, ft10 +; .byte 0xd3, 0x8f, 0x1f, 0xf2 +; fneg.d ft11, ft11 +; fsd fa2, 0(a0) +; fsd fa3, 8(a0) +; fsd fa4, 0x10(a0) +; fsd fa5, 0x18(a0) +; fsd ft1, 0x20(a0) +; fsd ft2, 0x28(a0) +; fsd ft3, 0x30(a0) +; fsd ft4, 0x38(a0) +; fsd ft5, 0x40(a0) +; fsd ft6, 0x48(a0) +; fsd ft7, 0x50(a0) +; fsd fs0, 0x58(a0) +; fsd fs1, 0x60(a0) +; fsd fa6, 0x68(a0) +; fsd ft0, 0x70(a0) +; fsd fa7, 0x78(a0) +; fsd fs2, 0x80(a0) +; fsd fs3, 0x88(a0) +; fsd fs4, 0x90(a0) +; fsd fs5, 0x98(a0) +; fsd fs6, 0xa0(a0) +; fsd fs7, 0xa8(a0) +; fsd fs8, 0xb0(a0) +; fsd fs9, 0xb8(a0) +; fsd fs10, 0xc0(a0) +; fsd fs11, 0xc8(a0) +; fsd ft8, 0xd0(a0) +; fsd ft9, 0xd8(a0) +; fsd ft10, 0xe0(a0) +; fsd ft11, 0xe8(a0) ; fld fs0, 0x58(sp) ; fld fs2, 0x50(sp) ; fld fs3, 0x48(sp) diff --git a/cranelift/filetests/filetests/isa/riscv64/zicond.clif b/cranelift/filetests/filetests/isa/riscv64/zicond.clif index 00b0c37dd024..4479497f6d6a 100644 --- a/cranelift/filetests/filetests/isa/riscv64/zicond.clif +++ b/cranelift/filetests/filetests/isa/riscv64/zicond.clif @@ -104,16 +104,16 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; czero.nez a4,a1,a0 +; czero.nez a1,a1,a0 ; czero.eqz a0,a2,a0 -; or a0,a4,a0 +; or a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x33, 0xf7, 0xa5, 0x0e +; .byte 0xb3, 0xf5, 0xa5, 0x0e ; .byte 0x33, 0x55, 0xa6, 0x0e -; or a0, a4, a0 +; or a0, a1, a0 ; ret function %select_icmp_ne_zero(i64, i64, i64) -> i64 { @@ -126,19 +126,18 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; czero.eqz a4,a1,a0 +; czero.eqz a1,a1,a0 ; czero.nez a0,a2,a0 -; or a0,a4,a0 +; or a0,a1,a0 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; .byte 0x33, 0xd7, 0xa5, 0x0e +; .byte 0xb3, 0xd5, 0xa5, 0x0e ; .byte 0x33, 0x75, 0xa6, 0x0e -; or a0, a4, a0 +; or a0, a1, a0 ; ret - function %select_icmp_sle(i64, i64, i64, i64) -> i64 { block0(v0: i64, v1: i64, v2: i64, v3: i64): v4 = icmp.i64 slt v0, v1 @@ -148,18 +147,18 @@ block0(v0: i64, v1: i64, v2: i64, v3: i64): ; VCode: ; block0: -; slt a5,a0,a1 -; czero.eqz a1,a2,a5 -; czero.nez a3,a3,a5 -; or a0,a1,a3 +; slt a1,a0,a1 +; czero.eqz a0,a2,a1 +; czero.nez a1,a3,a1 +; or a0,a0,a1 ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; slt a5, a0, a1 -; .byte 0xb3, 0x55, 0xf6, 0x0e -; .byte 0xb3, 0xf6, 0xf6, 0x0e -; or a0, a1, a3 +; slt a1, a0, a1 +; .byte 0x33, 0x55, 0xb6, 0x0e +; .byte 0xb3, 0xf5, 0xb6, 0x0e +; or a0, a0, a1 ; ret ;; This can trigger a stack overflow with if the rules don't prevent @@ -174,12 +173,13 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; li a1,0 -; czero.nez a0,a1,zero +; li a0,0 +; czero.nez a0,a0,zero ; ret ; ; Disassembled: ; block0: ; offset 0x0 -; mv a1, zero -; .byte 0x33, 0xf5, 0x05, 0x0e +; mv a0, zero +; .byte 0x33, 0x75, 0x05, 0x0e ; ret + diff --git a/cranelift/filetests/filetests/isa/s390x/arithmetic-arch15.clif b/cranelift/filetests/filetests/isa/s390x/arithmetic-arch15.clif index 2ea9a070ed26..e2f9046560e4 100644 --- a/cranelift/filetests/filetests/isa/s390x/arithmetic-arch15.clif +++ b/cranelift/filetests/filetests/isa/s390x/arithmetic-arch15.clif @@ -88,11 +88,11 @@ block0(v0: i128, v1: i128): ; vl %v3, 0(%r4) ; vlcq %v6, %v3 ; veval %v16, %v1, %v3, %v6, 9 -; vrepib %v18, 255 -; vecq %v16, %v18 +; vrepib %v17, 255 +; vecq %v16, %v17 ; jge .+2 # trap=int_ovf -; vdq %v22, %v1, %v3, 0 -; vst %v22, 0(%r2) +; vdq %v16, %v1, %v3, 0 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: @@ -102,14 +102,14 @@ block0(v0: i128, v1: i128): ; vlc %v6, %v3, 4 ; .byte 0xe7, 0x01 ; lper %f0, %f9 -; ld %f8, 0x720(%r8, %r14) +; ld %f8, 0x710(%r8, %r14) ; .byte 0x00, 0xff ; .byte 0x08, 0x45 -; vec %v16, %v18, 4 +; vec %v16, %v17, 4 ; jge 0x26 ; trap: int_ovf -; .byte 0xe7, 0x61 +; .byte 0xe7, 0x01 ; lper %f0, %f0 -; lh %r11, 0x760(%r2, %r14) ; trap: int_divz +; lh %r11, 0x700(%r2, %r14) ; trap: int_divz ; lpdr %f0, %f0 ; .byte 0x08, 0x0e ; br %r14 @@ -153,8 +153,8 @@ block0(v0: i128, v1: i128): ; vgbm %v16, 0 ; vecq %v3, %v7 ; jne 10 ; vlr %v6, %v16 -; vrq %v21, %v6, %v3, 0 -; vst %v21, 0(%r2) +; vrq %v16, %v6, %v3, 0 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: @@ -166,9 +166,9 @@ block0(v0: i128, v1: i128): ; vec %v3, %v7, 4 ; jne 0x28 ; vlr %v6, %v16 -; .byte 0xe7, 0x56 +; .byte 0xe7, 0x06 ; lper %f0, %f0 -; lh %r11, 0x750(%r3, %r14) ; trap: int_divz +; lh %r11, 0x700(%r3, %r14) ; trap: int_divz ; lpdr %f0, %f0 ; .byte 0x08, 0x0e ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/arithmetic.clif b/cranelift/filetests/filetests/isa/s390x/arithmetic.clif index 6d4378562d92..ae819219032d 100644 --- a/cranelift/filetests/filetests/isa/s390x/arithmetic.clif +++ b/cranelift/filetests/filetests/isa/s390x/arithmetic.clif @@ -352,14 +352,14 @@ block0(v0: i8, v1: i64): ; VCode: ; block0: -; llc %r3, 0(%r3) -; ar %r2, %r3 +; llc %r4, 0(%r3) +; ar %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llc %r3, 0(%r3) ; trap: heap_oob -; ar %r2, %r3 +; llc %r4, 0(%r3) ; trap: heap_oob +; ar %r2, %r4 ; br %r14 function %isub_i128(i128, i128) -> i128 { @@ -712,14 +712,14 @@ block0(v0: i8, v1: i64): ; VCode: ; block0: -; llc %r3, 0(%r3) -; sr %r2, %r3 +; llc %r4, 0(%r3) +; sr %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llc %r3, 0(%r3) ; trap: heap_oob -; sr %r2, %r3 +; llc %r4, 0(%r3) ; trap: heap_oob +; sr %r2, %r4 ; br %r14 function %iabs_i128(i128) -> i128 { @@ -734,9 +734,9 @@ block0(v0: i128): ; vgbm %v4, 0 ; vsq %v6, %v4, %v1 ; vrepg %v16, %v1, 0 -; vchg %v18, %v4, %v16 -; vsel %v20, %v6, %v1, %v18 -; vst %v20, 0(%r2) +; vchg %v16, %v4, %v16 +; vsel %v16, %v6, %v1, %v16 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: @@ -745,9 +745,9 @@ block0(v0: i128): ; vzero %v4 ; vsq %v6, %v4, %v1 ; vrepg %v16, %v1, 0 -; vchg %v18, %v4, %v16 -; vsel %v20, %v6, %v1, %v18 -; vst %v20, 0(%r2) +; vchg %v16, %v4, %v16 +; vsel %v16, %v6, %v1, %v16 +; vst %v16, 0(%r2) ; br %r14 function %iabs_i64(i64) -> i64 { @@ -807,14 +807,14 @@ block0(v0: i16): ; VCode: ; block0: -; lhr %r4, %r2 -; lpr %r2, %r4 +; lhr %r2, %r2 +; lpr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhr %r4, %r2 -; lpr %r2, %r4 +; lhr %r2, %r2 +; lpr %r2, %r2 ; br %r14 function %iabs_i8(i8) -> i8 { @@ -825,14 +825,14 @@ block0(v0: i8): ; VCode: ; block0: -; lbr %r4, %r2 -; lpr %r2, %r4 +; lbr %r2, %r2 +; lpr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r4, %r2 -; lpr %r2, %r4 +; lbr %r2, %r2 +; lpr %r2, %r2 ; br %r14 function %ineg_i128(i128) -> i128 { @@ -952,17 +952,17 @@ block0(v0: i128, v1: i128): ; vl %v3, 0(%r4) ; lgdr %r4, %f1 ; vlgvg %r5, %v1, 1 -; lgdr %r7, %f3 -; vlgvg %r9, %v3, 1 +; lgdr %r10, %f3 +; vlgvg %r12, %v3, 1 ; lgr %r3, %r5 -; mlgr %r2, %r9 -; msgrkc %r14, %r5, %r7 -; msgrkc %r5, %r4, %r9 -; agrk %r4, %r14, %r2 -; agr %r5, %r4 -; vlvgp %v1, %r5, %r3 +; mlgr %r2, %r12 +; msgr %r5, %r10 +; msgr %r4, %r12 +; agrk %r2, %r5, %r2 +; agrk %r2, %r4, %r2 +; vlvgp %v25, %r2, %r3 ; lgr %r2, %r6 -; vst %v1, 0(%r2) +; vst %v25, 0(%r2) ; lmg %r6, %r15, 48(%r15) ; br %r14 ; @@ -975,17 +975,17 @@ block0(v0: i128, v1: i128): ; vl %v3, 0(%r4) ; lgdr %r4, %f1 ; vlgvg %r5, %v1, 1 -; lgdr %r7, %f3 -; vlgvg %r9, %v3, 1 +; lgdr %r10, %f3 +; vlgvg %r12, %v3, 1 ; lgr %r3, %r5 -; mlgr %r2, %r9 -; msgrkc %r14, %r5, %r7 -; msgrkc %r5, %r4, %r9 -; agrk %r4, %r14, %r2 -; agr %r5, %r4 -; vlvgp %v1, %r5, %r3 +; mlgr %r2, %r12 +; msgr %r5, %r10 +; msgr %r4, %r12 +; agrk %r2, %r5, %r2 +; agrk %r2, %r4, %r2 +; vlvgp %v25, %r2, %r3 ; lgr %r2, %r6 -; vst %v1, 0(%r2) +; vst %v25, 0(%r2) ; lmg %r6, %r15, 0x30(%r15) ; br %r14 @@ -1300,14 +1300,14 @@ block0(v0: i8, v1: i64): ; VCode: ; block0: -; llc %r3, 0(%r3) -; msr %r2, %r3 +; llc %r4, 0(%r3) +; msr %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llc %r3, 0(%r3) ; trap: heap_oob -; msr %r2, %r3 +; llc %r4, 0(%r3) ; trap: heap_oob +; msr %r2, %r4 ; br %r14 function %umulhi_i64(i64, i64) -> i64 { @@ -1318,16 +1318,18 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; lgr %r4, %r3 -; lgr %r3, %r2 -; mlgr %r2, %r4 +; lgr %r4, %r2 +; lgr %r2, %r3 +; lgr %r3, %r4 +; mlgr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgr %r4, %r3 -; lgr %r3, %r2 -; mlgr %r2, %r4 +; lgr %r4, %r2 +; lgr %r2, %r3 +; lgr %r3, %r4 +; mlgr %r2, %r2 ; br %r14 function %umulhi_i32(i32, i32) -> i32 { @@ -1338,18 +1340,22 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: +; lgr %r4, %r3 +; llgfr %r3, %r2 +; lgr %r2, %r4 ; llgfr %r5, %r2 -; llgfr %r3, %r3 -; msgr %r5, %r3 -; srlg %r2, %r5, 32 +; msgrkc %r2, %r3, %r5 +; srlg %r2, %r2, 32 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 +; lgr %r4, %r3 +; llgfr %r3, %r2 +; lgr %r2, %r4 ; llgfr %r5, %r2 -; llgfr %r3, %r3 -; msgr %r5, %r3 -; srlg %r2, %r5, 0x20 +; msgrkc %r2, %r3, %r5 +; srlg %r2, %r2, 0x20 ; br %r14 function %umulhi_i16(i16, i16) -> i16 { @@ -1360,18 +1366,22 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: +; lgr %r4, %r3 +; llhr %r3, %r2 +; lgr %r2, %r4 ; llhr %r5, %r2 -; llhr %r3, %r3 -; msr %r5, %r3 -; srlk %r2, %r5, 16 +; msrkc %r2, %r3, %r5 +; srlk %r2, %r2, 16 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 +; lgr %r4, %r3 +; llhr %r3, %r2 +; lgr %r2, %r4 ; llhr %r5, %r2 -; llhr %r3, %r3 -; msr %r5, %r3 -; srlk %r2, %r5, 0x10 +; msrkc %r2, %r3, %r5 +; srlk %r2, %r2, 0x10 ; br %r14 function %umulhi_i8(i8, i8) -> i8 { @@ -1382,18 +1392,22 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: +; lgr %r4, %r3 +; llcr %r3, %r2 +; lgr %r2, %r4 ; llcr %r5, %r2 -; llcr %r3, %r3 -; msr %r5, %r3 -; srlk %r2, %r5, 8 +; msrkc %r2, %r3, %r5 +; srlk %r2, %r2, 8 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 +; lgr %r4, %r3 +; llcr %r3, %r2 +; lgr %r2, %r4 ; llcr %r5, %r2 -; llcr %r3, %r3 -; msr %r5, %r3 -; srlk %r2, %r5, 8 +; msrkc %r2, %r3, %r5 +; srlk %r2, %r2, 8 ; br %r14 function %smulhi_i64(i64, i64) -> i64 { @@ -1420,18 +1434,22 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: +; lgr %r4, %r3 +; lgfr %r3, %r2 +; lgr %r2, %r4 ; lgfr %r5, %r2 -; lgfr %r3, %r3 -; msgr %r5, %r3 -; srag %r2, %r5, 32 +; msgrkc %r2, %r3, %r5 +; srag %r2, %r2, 32 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 +; lgr %r4, %r3 +; lgfr %r3, %r2 +; lgr %r2, %r4 ; lgfr %r5, %r2 -; lgfr %r3, %r3 -; msgr %r5, %r3 -; srag %r2, %r5, 0x20 +; msgrkc %r2, %r3, %r5 +; srag %r2, %r2, 0x20 ; br %r14 function %smulhi_i16(i16, i16) -> i16 { @@ -1442,18 +1460,22 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: +; lgr %r4, %r3 +; lhr %r3, %r2 +; lgr %r2, %r4 ; lhr %r5, %r2 -; lhr %r3, %r3 -; msr %r5, %r3 -; srak %r2, %r5, 16 +; msrkc %r2, %r3, %r5 +; srak %r2, %r2, 16 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 +; lgr %r4, %r3 +; lhr %r3, %r2 +; lgr %r2, %r4 ; lhr %r5, %r2 -; lhr %r3, %r3 -; msr %r5, %r3 -; srak %r2, %r5, 0x10 +; msrkc %r2, %r3, %r5 +; srak %r2, %r2, 0x10 ; br %r14 function %smulhi_i8(i8, i8) -> i8 { @@ -1464,18 +1486,22 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: +; lgr %r4, %r3 +; lbr %r3, %r2 +; lgr %r2, %r4 ; lbr %r5, %r2 -; lbr %r3, %r3 -; msr %r5, %r3 -; srak %r2, %r5, 8 +; msrkc %r2, %r3, %r5 +; srak %r2, %r2, 8 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 +; lgr %r4, %r3 +; lbr %r3, %r2 +; lgr %r2, %r4 ; lbr %r5, %r2 -; lbr %r3, %r3 -; msr %r5, %r3 -; srak %r2, %r5, 8 +; msrkc %r2, %r3, %r5 +; srak %r2, %r2, 8 ; br %r14 function %sdiv_i64(i64, i64) -> i64 { @@ -1488,12 +1514,12 @@ block0(v0: i64, v1: i64): ; block0: ; llihf %r4, 2147483647 ; iilf %r4, 4294967295 -; xgrk %r5, %r4, %r2 -; ngrk %r4, %r5, %r3 -; lgr %r5, %r3 +; xgr %r4, %r2 +; ngr %r4, %r3 ; cgite %r4, -1 -; lgr %r3, %r2 -; lgr %r2, %r5 +; lgr %r4, %r2 +; lgr %r2, %r3 +; lgr %r3, %r4 ; dsgr %r2, %r2 ; lgr %r2, %r3 ; br %r14 @@ -1502,12 +1528,12 @@ block0(v0: i64, v1: i64): ; block0: ; offset 0x0 ; llihf %r4, 0x7fffffff ; iilf %r4, 0xffffffff -; xgrk %r5, %r4, %r2 -; ngrk %r4, %r5, %r3 -; lgr %r5, %r3 +; xgr %r4, %r2 +; ngr %r4, %r3 ; cgite %r4, -1 ; trap: int_ovf -; lgr %r3, %r2 -; lgr %r2, %r5 +; lgr %r4, %r2 +; lgr %r2, %r3 +; lgr %r3, %r4 ; dsgr %r2, %r2 ; trap: int_divz ; lgr %r2, %r3 ; br %r14 @@ -1542,34 +1568,34 @@ block0(v0: i32, v1: i32): } ; VCode: -; stmg %r6, %r15, 48(%r15) +; stmg %r9, %r15, 72(%r15) ; block0: -; lgr %r6, %r3 +; lgr %r9, %r3 ; lgfr %r3, %r2 -; iilf %r4, 2147483647 -; xrk %r5, %r4, %r3 -; lgr %r2, %r6 -; nrk %r4, %r5, %r2 +; iilf %r5, 2147483647 +; xrk %r4, %r5, %r3 +; lgr %r2, %r9 +; nr %r4, %r2 ; cite %r4, -1 ; dsgfr %r2, %r2 ; lgr %r2, %r3 -; lmg %r6, %r15, 48(%r15) +; lmg %r9, %r15, 72(%r15) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; stmg %r6, %r15, 0x30(%r15) +; stmg %r9, %r15, 0x48(%r15) ; block1: ; offset 0x6 -; lgr %r6, %r3 +; lgr %r9, %r3 ; lgfr %r3, %r2 -; iilf %r4, 0x7fffffff -; xrk %r5, %r4, %r3 -; lgr %r2, %r6 -; nrk %r4, %r5, %r2 +; iilf %r5, 0x7fffffff +; xrk %r4, %r5, %r3 +; lgr %r2, %r9 +; nr %r4, %r2 ; cite %r4, -1 ; trap: int_ovf ; dsgfr %r2, %r2 ; trap: int_divz ; lgr %r2, %r3 -; lmg %r6, %r15, 0x30(%r15) +; lmg %r9, %r15, 0x48(%r15) ; br %r14 function %sdiv_i32_imm(i32) -> i32 { @@ -1582,16 +1608,16 @@ block0(v0: i32): ; VCode: ; block0: ; lgfr %r3, %r2 -; lhi %r2, 2 -; dsgfr %r2, %r2 +; lhi %r4, 2 +; dsgfr %r2, %r4 ; lgr %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; lgfr %r3, %r2 -; lhi %r2, 2 -; dsgfr %r2, %r2 ; trap: int_divz +; lhi %r4, 2 +; dsgfr %r2, %r4 ; trap: int_divz ; lgr %r2, %r3 ; br %r14 @@ -1603,27 +1629,27 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; lghr %r5, %r2 -; lhr %r4, %r3 +; lghr %r4, %r2 +; lhr %r5, %r3 ; lhi %r2, 32767 -; xrk %r3, %r2, %r5 -; nrk %r2, %r3, %r4 +; xr %r2, %r4 +; nr %r2, %r5 ; cite %r2, -1 -; lgr %r3, %r5 -; dsgfr %r2, %r4 +; lgr %r3, %r4 +; dsgfr %r2, %r5 ; lgr %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lghr %r5, %r2 -; lhr %r4, %r3 +; lghr %r4, %r2 +; lhr %r5, %r3 ; lhi %r2, 0x7fff -; xrk %r3, %r2, %r5 -; nrk %r2, %r3, %r4 +; xr %r2, %r4 +; nr %r2, %r5 ; cite %r2, -1 ; trap: int_ovf -; lgr %r3, %r5 -; dsgfr %r2, %r4 ; trap: int_divz +; lgr %r3, %r4 +; dsgfr %r2, %r5 ; trap: int_divz ; lgr %r2, %r3 ; br %r14 @@ -1637,16 +1663,16 @@ block0(v0: i16): ; VCode: ; block0: ; lghr %r3, %r2 -; lhi %r2, 2 -; dsgfr %r2, %r2 +; lhi %r4, 2 +; dsgfr %r2, %r4 ; lgr %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; lghr %r3, %r2 -; lhi %r2, 2 -; dsgfr %r2, %r2 ; trap: int_divz +; lhi %r4, 2 +; dsgfr %r2, %r4 ; trap: int_divz ; lgr %r2, %r3 ; br %r14 @@ -1658,27 +1684,27 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lgbr %r5, %r2 -; lbr %r4, %r3 +; lgbr %r4, %r2 +; lbr %r5, %r3 ; lhi %r2, 127 -; xrk %r3, %r2, %r5 -; nrk %r2, %r3, %r4 +; xr %r2, %r4 +; nr %r2, %r5 ; cite %r2, -1 -; lgr %r3, %r5 -; dsgfr %r2, %r4 +; lgr %r3, %r4 +; dsgfr %r2, %r5 ; lgr %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgbr %r5, %r2 -; lbr %r4, %r3 +; lgbr %r4, %r2 +; lbr %r5, %r3 ; lhi %r2, 0x7f -; xrk %r3, %r2, %r5 -; nrk %r2, %r3, %r4 +; xr %r2, %r4 +; nr %r2, %r5 ; cite %r2, -1 ; trap: int_ovf -; lgr %r3, %r5 -; dsgfr %r2, %r4 ; trap: int_divz +; lgr %r3, %r4 +; dsgfr %r2, %r5 ; trap: int_divz ; lgr %r2, %r3 ; br %r14 @@ -1692,16 +1718,16 @@ block0(v0: i8): ; VCode: ; block0: ; lgbr %r3, %r2 -; lhi %r2, 2 -; dsgfr %r2, %r2 +; lhi %r4, 2 +; dsgfr %r2, %r4 ; lgr %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; lgbr %r3, %r2 -; lhi %r2, 2 -; dsgfr %r2, %r2 ; trap: int_divz +; lhi %r4, 2 +; dsgfr %r2, %r4 ; trap: int_divz ; lgr %r2, %r3 ; br %r14 @@ -1713,19 +1739,19 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; lgr %r5, %r3 +; lgr %r4, %r3 ; lgr %r3, %r2 ; lghi %r2, 0 -; dlgr %r2, %r5 +; dlgr %r2, %r4 ; lgr %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgr %r5, %r3 +; lgr %r4, %r3 ; lgr %r3, %r2 ; lghi %r2, 0 -; dlgr %r2, %r5 ; trap: int_divz +; dlgr %r2, %r4 ; trap: int_divz ; lgr %r2, %r3 ; br %r14 @@ -1762,19 +1788,19 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; lgr %r5, %r3 +; lgr %r4, %r3 ; lgr %r3, %r2 ; lhi %r2, 0 -; dlr %r2, %r5 +; dlr %r2, %r4 ; lgr %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgr %r5, %r3 +; lgr %r4, %r3 ; lgr %r3, %r2 ; lhi %r2, 0 -; dlr %r2, %r5 ; trap: int_divz +; dlr %r2, %r4 ; trap: int_divz ; lgr %r2, %r3 ; br %r14 @@ -1811,23 +1837,25 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; lgr %r4, %r3 -; lhi %r5, 0 +; lgr %r5, %r3 +; lhi %r4, 0 ; llhr %r3, %r2 -; llhr %r4, %r4 ; lgr %r2, %r5 -; dlr %r2, %r4 +; llhr %r5, %r2 +; lgr %r2, %r4 +; dlr %r2, %r5 ; lgr %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgr %r4, %r3 -; lhi %r5, 0 +; lgr %r5, %r3 +; lhi %r4, 0 ; llhr %r3, %r2 -; llhr %r4, %r4 ; lgr %r2, %r5 -; dlr %r2, %r4 ; trap: int_divz +; llhr %r5, %r2 +; lgr %r2, %r4 +; dlr %r2, %r5 ; trap: int_divz ; lgr %r2, %r3 ; br %r14 @@ -1866,23 +1894,25 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lgr %r4, %r3 -; lhi %r5, 0 +; lgr %r5, %r3 +; lhi %r4, 0 ; llcr %r3, %r2 -; llcr %r4, %r4 ; lgr %r2, %r5 -; dlr %r2, %r4 +; llcr %r5, %r2 +; lgr %r2, %r4 +; dlr %r2, %r5 ; lgr %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgr %r4, %r3 -; lhi %r5, 0 +; lgr %r5, %r3 +; lhi %r4, 0 ; llcr %r3, %r2 -; llcr %r4, %r4 ; lgr %r2, %r5 -; dlr %r2, %r4 ; trap: int_divz +; llcr %r5, %r2 +; lgr %r2, %r4 +; dlr %r2, %r5 ; trap: int_divz ; lgr %r2, %r3 ; br %r14 @@ -1947,16 +1977,18 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; lgr %r5, %r3 +; lgr %r4, %r3 ; lgfr %r3, %r2 -; dsgfr %r2, %r5 +; lgr %r2, %r4 +; dsgfr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgr %r5, %r3 +; lgr %r4, %r3 ; lgfr %r3, %r2 -; dsgfr %r2, %r5 ; trap: int_divz +; lgr %r2, %r4 +; dsgfr %r2, %r2 ; trap: int_divz ; br %r14 function %srem_i16(i16, i16) -> i16 { @@ -1967,20 +1999,20 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; lgr %r5, %r3 +; lgr %r4, %r3 ; lghr %r3, %r2 -; lgr %r2, %r5 -; lhr %r4, %r2 -; dsgfr %r2, %r4 +; lgr %r2, %r4 +; lhr %r5, %r2 +; dsgfr %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgr %r5, %r3 +; lgr %r4, %r3 ; lghr %r3, %r2 -; lgr %r2, %r5 -; lhr %r4, %r2 -; dsgfr %r2, %r4 ; trap: int_divz +; lgr %r2, %r4 +; lhr %r5, %r2 +; dsgfr %r2, %r5 ; trap: int_divz ; br %r14 function %srem_i8(i8, i8) -> i8 { @@ -1991,20 +2023,20 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lgr %r5, %r3 +; lgr %r4, %r3 ; lgbr %r3, %r2 -; lgr %r2, %r5 -; lbr %r4, %r2 -; dsgfr %r2, %r4 +; lgr %r2, %r4 +; lbr %r5, %r2 +; dsgfr %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgr %r5, %r3 +; lgr %r4, %r3 ; lgbr %r3, %r2 -; lgr %r2, %r5 -; lbr %r4, %r2 -; dsgfr %r2, %r4 ; trap: int_divz +; lgr %r2, %r4 +; lbr %r5, %r2 +; dsgfr %r2, %r5 ; trap: int_divz ; br %r14 function %urem_i64(i64, i64) -> i64 { @@ -2059,22 +2091,24 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; lgr %r4, %r3 -; lhi %r5, 0 +; lgr %r5, %r3 +; lhi %r4, 0 ; llhr %r3, %r2 -; llhr %r4, %r4 ; lgr %r2, %r5 -; dlr %r2, %r4 +; llhr %r5, %r2 +; lgr %r2, %r4 +; dlr %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgr %r4, %r3 -; lhi %r5, 0 +; lgr %r5, %r3 +; lhi %r4, 0 ; llhr %r3, %r2 -; llhr %r4, %r4 ; lgr %r2, %r5 -; dlr %r2, %r4 ; trap: int_divz +; llhr %r5, %r2 +; lgr %r2, %r4 +; dlr %r2, %r5 ; trap: int_divz ; br %r14 function %urem_i8(i8, i8) -> i8 { @@ -2085,21 +2119,23 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lgr %r4, %r3 -; lhi %r5, 0 +; lgr %r5, %r3 +; lhi %r4, 0 ; llcr %r3, %r2 -; llcr %r4, %r4 ; lgr %r2, %r5 -; dlr %r2, %r4 +; llcr %r5, %r2 +; lgr %r2, %r4 +; dlr %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgr %r4, %r3 -; lhi %r5, 0 +; lgr %r5, %r3 +; lhi %r4, 0 ; llcr %r3, %r2 -; llcr %r4, %r4 ; lgr %r2, %r5 -; dlr %r2, %r4 ; trap: int_divz +; llcr %r5, %r2 +; lgr %r2, %r4 +; dlr %r2, %r5 ; trap: int_divz ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/atomic_cas-little.clif b/cranelift/filetests/filetests/isa/s390x/atomic_cas-little.clif index 50c62ab0c84c..d8f95ba265f0 100644 --- a/cranelift/filetests/filetests/isa/s390x/atomic_cas-little.clif +++ b/cranelift/filetests/filetests/isa/s390x/atomic_cas-little.clif @@ -13,18 +13,18 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; lrvgr %r5, %r2 -; lrvgr %r2, %r3 -; csg %r5, %r2, 0(%r4) -; lrvgr %r2, %r5 +; lrvgr %r2, %r2 +; lrvgr %r3, %r3 +; csg %r2, %r3, 0(%r4) +; lrvgr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvgr %r5, %r2 -; lrvgr %r2, %r3 -; csg %r5, %r2, 0(%r4) ; trap: heap_oob -; lrvgr %r2, %r5 +; lrvgr %r2, %r2 +; lrvgr %r3, %r3 +; csg %r2, %r3, 0(%r4) ; trap: heap_oob +; lrvgr %r2, %r2 ; br %r14 function %atomic_cas_i32(i32, i32, i64) -> i32 { @@ -35,18 +35,18 @@ block0(v0: i32, v1: i32, v2: i64): ; VCode: ; block0: -; lrvr %r5, %r2 -; lrvr %r2, %r3 -; cs %r5, %r2, 0(%r4) -; lrvr %r2, %r5 +; lrvr %r2, %r2 +; lrvr %r3, %r3 +; cs %r2, %r3, 0(%r4) +; lrvr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvr %r5, %r2 -; lrvr %r2, %r3 -; cs %r5, %r2, 0(%r4) ; trap: heap_oob -; lrvr %r2, %r5 +; lrvr %r2, %r2 +; lrvr %r3, %r3 +; cs %r2, %r3, 0(%r4) ; trap: heap_oob +; lrvr %r2, %r2 ; br %r14 function %atomic_cas_i16(i64, i16, i16, i64) -> i16 { @@ -56,33 +56,42 @@ block0(v0: i64, v1: i16, v2: i16, v3: i64): } ; VCode: +; stmg %r10, %r15, 80(%r15) ; block0: -; sllk %r2, %r5, 3 +; lgr %r10, %r4 +; sllk %r4, %r5, 3 ; nill %r5, 65532 +; lrvr %r2, %r3 +; lgr %r3, %r10 ; lrvr %r3, %r3 -; lrvr %r4, %r4 ; l %r0, 0(%r5) -; 0: rll %r1, %r0, 16(%r2) ; rxsbg %r1, %r3, 176, 64, 48 ; jglh 1f ; risbgn %r1, %r4, 48, 64, 48 ; rll %r1, %r1, 16(%r2) ; cs %r0, %r1, 0(%r5) ; jglh 0b ; 1: -; rll %r4, %r0, 0(%r2) -; lrvr %r2, %r4 +; 0: rll %r1, %r0, 16(%r4) ; rxsbg %r1, %r2, 176, 64, 48 ; jglh 1f ; risbgn %r1, %r3, 48, 64, 48 ; rll %r1, %r1, 16(%r4) ; cs %r0, %r1, 0(%r5) ; jglh 0b ; 1: +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 +; lmg %r10, %r15, 80(%r15) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r2, %r5, 3 +; stmg %r10, %r15, 0x50(%r15) +; block1: ; offset 0x6 +; lgr %r10, %r4 +; sllk %r4, %r5, 3 ; nill %r5, 0xfffc +; lrvr %r2, %r3 +; lgr %r3, %r10 ; lrvr %r3, %r3 -; lrvr %r4, %r4 ; l %r0, 0(%r5) ; trap: heap_oob -; rll %r1, %r0, 0x10(%r2) -; rxsbg %r1, %r3, 0xb0, 0x40, 0x30 -; jglh 0x3e -; risbgn %r1, %r4, 0x30, 0x40, 0x30 -; rll %r1, %r1, 0x10(%r2) +; rll %r1, %r0, 0x10(%r4) +; rxsbg %r1, %r2, 0xb0, 0x40, 0x30 +; jglh 0x4c +; risbgn %r1, %r3, 0x30, 0x40, 0x30 +; rll %r1, %r1, 0x10(%r4) ; cs %r0, %r1, 0(%r5) ; trap: heap_oob -; jglh 0x16 -; rll %r4, %r0, 0(%r2) -; lrvr %r2, %r4 +; jglh 0x24 +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 +; lmg %r10, %r15, 0x50(%r15) ; br %r14 function %atomic_cas_i8(i64, i8, i8, i64) -> i8 { @@ -92,33 +101,35 @@ block0(v0: i64, v1: i8, v2: i8, v3: i64): } ; VCode: -; stmg %r14, %r15, 112(%r15) +; stmg %r12, %r15, 96(%r15) ; block0: -; sllk %r2, %r5, 3 +; lgr %r12, %r4 +; sllk %r4, %r5, 3 ; nill %r5, 65532 -; lcr %r14, %r2 +; lcr %r2, %r4 ; l %r0, 0(%r5) -; 0: rll %r1, %r0, 0(%r2) ; rxsbg %r1, %r3, 160, 40, 24 ; jglh 1f ; risbgn %r1, %r4, 32, 40, 24 ; rll %r1, %r1, 0(%r14) ; cs %r0, %r1, 0(%r5) ; jglh 0b ; 1: -; rll %r2, %r0, 8(%r2) -; lmg %r14, %r15, 112(%r15) +; 0: rll %r1, %r0, 0(%r4) ; rxsbg %r1, %r3, 160, 40, 24 ; jglh 1f ; risbgn %r1, %r12, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r5) ; jglh 0b ; 1: +; rll %r2, %r0, 8(%r4) +; lmg %r12, %r15, 96(%r15) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; stmg %r14, %r15, 0x70(%r15) +; stmg %r12, %r15, 0x60(%r15) ; block1: ; offset 0x6 -; sllk %r2, %r5, 3 +; lgr %r12, %r4 +; sllk %r4, %r5, 3 ; nill %r5, 0xfffc -; lcr %r14, %r2 +; lcr %r2, %r4 ; l %r0, 0(%r5) ; trap: heap_oob -; rll %r1, %r0, 0(%r2) +; rll %r1, %r0, 0(%r4) ; rxsbg %r1, %r3, 0xa0, 0x28, 0x18 -; jglh 0x3e -; risbgn %r1, %r4, 0x20, 0x28, 0x18 -; rll %r1, %r1, 0(%r14) +; jglh 0x42 +; risbgn %r1, %r12, 0x20, 0x28, 0x18 +; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r5) ; trap: heap_oob -; jglh 0x16 -; rll %r2, %r0, 8(%r2) -; lmg %r14, %r15, 0x70(%r15) +; jglh 0x1a +; rll %r2, %r0, 8(%r4) +; lmg %r12, %r15, 0x60(%r15) ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/atomic_cas.clif b/cranelift/filetests/filetests/isa/s390x/atomic_cas.clif index bab8d64b714b..7e6f65bb97c2 100644 --- a/cranelift/filetests/filetests/isa/s390x/atomic_cas.clif +++ b/cranelift/filetests/filetests/isa/s390x/atomic_cas.clif @@ -45,26 +45,28 @@ block0(v0: i64, v1: i16, v2: i16, v3: i64): ; VCode: ; block0: -; sllk %r2, %r5, 3 +; lgr %r2, %r4 +; sllk %r4, %r5, 3 ; nill %r5, 65532 ; l %r0, 0(%r5) -; 0: rll %r1, %r0, 0(%r2) ; rxsbg %r1, %r3, 160, 48, 16 ; jglh 1f ; risbgn %r1, %r4, 32, 48, 16 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r5) ; jglh 0b ; 1: -; rll %r2, %r0, 16(%r2) +; 0: rll %r1, %r0, 0(%r4) ; rxsbg %r1, %r3, 160, 48, 16 ; jglh 1f ; risbgn %r1, %r2, 32, 48, 16 ; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r5) ; jglh 0b ; 1: +; rll %r2, %r0, 16(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r2, %r5, 3 +; lgr %r2, %r4 +; sllk %r4, %r5, 3 ; nill %r5, 0xfffc ; l %r0, 0(%r5) ; trap: heap_oob -; rll %r1, %r0, 0(%r2) +; rll %r1, %r0, 0(%r4) ; rxsbg %r1, %r3, 0xa0, 0x30, 0x10 -; jglh 0x36 -; risbgn %r1, %r4, 0x20, 0x30, 0x10 -; rll %r1, %r1, 0(%r2) +; jglh 0x3a +; risbgn %r1, %r2, 0x20, 0x30, 0x10 +; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r5) ; trap: heap_oob -; jglh 0xe -; rll %r2, %r0, 0x10(%r2) +; jglh 0x12 +; rll %r2, %r0, 0x10(%r4) ; br %r14 function %atomic_cas_i8(i64, i8, i8, i64) -> i8 { @@ -74,33 +76,35 @@ block0(v0: i64, v1: i8, v2: i8, v3: i64): } ; VCode: -; stmg %r14, %r15, 112(%r15) +; stmg %r12, %r15, 96(%r15) ; block0: -; sllk %r2, %r5, 3 +; lgr %r12, %r4 +; sllk %r4, %r5, 3 ; nill %r5, 65532 -; lcr %r14, %r2 +; lcr %r2, %r4 ; l %r0, 0(%r5) -; 0: rll %r1, %r0, 0(%r2) ; rxsbg %r1, %r3, 160, 40, 24 ; jglh 1f ; risbgn %r1, %r4, 32, 40, 24 ; rll %r1, %r1, 0(%r14) ; cs %r0, %r1, 0(%r5) ; jglh 0b ; 1: -; rll %r2, %r0, 8(%r2) -; lmg %r14, %r15, 112(%r15) +; 0: rll %r1, %r0, 0(%r4) ; rxsbg %r1, %r3, 160, 40, 24 ; jglh 1f ; risbgn %r1, %r12, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r5) ; jglh 0b ; 1: +; rll %r2, %r0, 8(%r4) +; lmg %r12, %r15, 96(%r15) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; stmg %r14, %r15, 0x70(%r15) +; stmg %r12, %r15, 0x60(%r15) ; block1: ; offset 0x6 -; sllk %r2, %r5, 3 +; lgr %r12, %r4 +; sllk %r4, %r5, 3 ; nill %r5, 0xfffc -; lcr %r14, %r2 +; lcr %r2, %r4 ; l %r0, 0(%r5) ; trap: heap_oob -; rll %r1, %r0, 0(%r2) +; rll %r1, %r0, 0(%r4) ; rxsbg %r1, %r3, 0xa0, 0x28, 0x18 -; jglh 0x3e -; risbgn %r1, %r4, 0x20, 0x28, 0x18 -; rll %r1, %r1, 0(%r14) +; jglh 0x42 +; risbgn %r1, %r12, 0x20, 0x28, 0x18 +; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r5) ; trap: heap_oob -; jglh 0x16 -; rll %r2, %r0, 8(%r2) -; lmg %r14, %r15, 0x70(%r15) +; jglh 0x1a +; rll %r2, %r0, 8(%r4) +; lmg %r12, %r15, 0x60(%r15) ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/atomic_rmw-arch13.clif b/cranelift/filetests/filetests/isa/s390x/atomic_rmw-arch13.clif index aada84480f31..7518b21dd9fe 100644 --- a/cranelift/filetests/filetests/isa/s390x/atomic_rmw-arch13.clif +++ b/cranelift/filetests/filetests/isa/s390x/atomic_rmw-arch13.clif @@ -56,25 +56,27 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rnsbg %r1, %r4, 32, 48, 16 ; xilf %r1, 4294901760 ; rll %r1, %r1, 0(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 16(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rnsbg %r1, %r2, 32, 48, 16 ; xilf %r1, 4294901760 ; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 16(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rnsbg %r1, %r4, 0x20, 0x30, 0x10 +; rll %r1, %r0, 0(%r4) +; rnsbg %r1, %r2, 0x20, 0x30, 0x10 ; xilf %r1, 0xffff0000 -; rll %r1, %r1, 0(%r5) +; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0xe -; rll %r2, %r0, 0x10(%r5) +; jglh 0x12 +; rll %r2, %r0, 0x10(%r4) ; br %r14 function %atomic_rmw_nand_i8(i64, i64, i8) -> i8 { @@ -85,27 +87,29 @@ block0(v0: i64, v1: i64, v2: i8): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rnsbg %r1, %r4, 32, 40, 24 ; xilf %r1, 4278190080 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 8(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rnsbg %r1, %r5, 32, 40, 24 ; xilf %r1, 4278190080 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 8(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rnsbg %r1, %r4, 0x20, 0x28, 0x18 +; rll %r1, %r0, 0(%r4) +; rnsbg %r1, %r5, 0x20, 0x28, 0x18 ; xilf %r1, 0xff000000 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x10 -; rll %r2, %r0, 8(%r5) +; jglh 0x14 +; rll %r2, %r0, 8(%r4) ; br %r14 function %atomic_rmw_nand_i64(i64, i64, i64) -> i64 { @@ -116,18 +120,18 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; lrvgr %r5, %r4 +; lrvgr %r4, %r4 ; lg %r0, 0(%r3) -; 0: nngrk %r1, %r0, %r5 ; csg %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; 0: nngrk %r1, %r0, %r4 ; csg %r0, %r1, 0(%r3) ; jglh 0b ; 1: ; lrvgr %r2, %r0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvgr %r5, %r4 +; lrvgr %r4, %r4 ; lg %r0, 0(%r3) ; trap: heap_oob ; .byte 0xb9, 0x64 -; st %r1, 0xb01(%r14) ; trap: heap_oob +; sth %r1, 0xb01(%r14) ; trap: heap_oob ; lper %f0, %f0 ; .byte 0x00, 0x30 ; jglh 0xa @@ -142,18 +146,18 @@ block0(v0: i64, v1: i64, v2: i32): ; VCode: ; block0: -; lrvr %r5, %r4 +; lrvr %r4, %r4 ; l %r0, 0(%r3) -; 0: nnrk %r1, %r0, %r5 ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; 0: nnrk %r1, %r0, %r4 ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: ; lrvr %r2, %r0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvr %r5, %r4 +; lrvr %r4, %r4 ; l %r0, 0(%r3) ; trap: heap_oob ; .byte 0xb9, 0x74 -; st %r1, 0xa01(%r11) ; trap: heap_oob +; sth %r1, 0xa01(%r11) ; trap: heap_oob ; lper %f0, %f0 ; jglh 8 ; lrvr %r2, %r0 @@ -167,29 +171,31 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lrvr %r2, %r4 +; lrvr %r2, %r2 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 16(%r5) ; rnsbg %r1, %r2, 48, 64, 48 ; xilf %r1, 65535 ; rll %r1, %r1, 16(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; 0: rll %r1, %r0, 16(%r4) ; rnsbg %r1, %r2, 48, 64, 48 ; xilf %r1, 65535 ; rll %r1, %r1, 16(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lrvr %r2, %r4 +; lrvr %r2, %r2 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0x10(%r5) +; rll %r1, %r0, 0x10(%r4) ; rnsbg %r1, %r2, 0x30, 0x40, 0x30 ; xilf %r1, 0xffff -; rll %r1, %r1, 0x10(%r5) +; rll %r1, %r1, 0x10(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x12 -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; jglh 0x16 +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 function %atomic_rmw_nand_i8(i64, i64, i8) -> i8 { @@ -200,26 +206,28 @@ block0(v0: i64, v1: i64, v2: i8): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rnsbg %r1, %r4, 32, 40, 24 ; xilf %r1, 4278190080 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 8(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rnsbg %r1, %r5, 32, 40, 24 ; xilf %r1, 4278190080 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 8(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rnsbg %r1, %r4, 0x20, 0x28, 0x18 +; rll %r1, %r0, 0(%r4) +; rnsbg %r1, %r5, 0x20, 0x28, 0x18 ; xilf %r1, 0xff000000 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x10 -; rll %r2, %r0, 8(%r5) +; jglh 0x14 +; rll %r2, %r0, 8(%r4) ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/atomic_rmw-little.clif b/cranelift/filetests/filetests/isa/s390x/atomic_rmw-little.clif index e06d6fff265c..87934dc5f7d5 100644 --- a/cranelift/filetests/filetests/isa/s390x/atomic_rmw-little.clif +++ b/cranelift/filetests/filetests/isa/s390x/atomic_rmw-little.clif @@ -13,17 +13,17 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; lrvgr %r5, %r4 +; lrvgr %r4, %r4 ; lg %r0, 0(%r3) -; 0: csg %r0, %r5, 0(%r3) ; jglh 0b ; 1: +; 0: csg %r0, %r4, 0(%r3) ; jglh 0b ; 1: ; lrvgr %r2, %r0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvgr %r5, %r4 +; lrvgr %r4, %r4 ; lg %r0, 0(%r3) ; trap: heap_oob -; csg %r0, %r5, 0(%r3) ; trap: heap_oob +; csg %r0, %r4, 0(%r3) ; trap: heap_oob ; jglh 0xa ; lrvgr %r2, %r0 ; br %r14 @@ -36,17 +36,17 @@ block0(v0: i64, v1: i64, v2: i32): ; VCode: ; block0: -; lrvr %r5, %r4 +; lrvr %r4, %r4 ; l %r0, 0(%r3) -; 0: cs %r0, %r5, 0(%r3) ; jglh 0b ; 1: +; 0: cs %r0, %r4, 0(%r3) ; jglh 0b ; 1: ; lrvr %r2, %r0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvr %r5, %r4 +; lrvr %r4, %r4 ; l %r0, 0(%r3) ; trap: heap_oob -; cs %r0, %r5, 0(%r3) ; trap: heap_oob +; cs %r0, %r4, 0(%r3) ; trap: heap_oob ; jglh 8 ; lrvr %r2, %r0 ; br %r14 @@ -59,28 +59,30 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lrvr %r2, %r4 +; lrvr %r2, %r2 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 16(%r5) ; risbgn %r1, %r2, 48, 64, 48 ; rll %r1, %r1, 16(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; 0: rll %r1, %r0, 16(%r4) ; risbgn %r1, %r2, 48, 64, 48 ; rll %r1, %r1, 16(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lrvr %r2, %r4 +; lrvr %r2, %r2 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0x10(%r5) +; rll %r1, %r0, 0x10(%r4) ; risbgn %r1, %r2, 0x30, 0x40, 0x30 -; rll %r1, %r1, 0x10(%r5) +; rll %r1, %r1, 0x10(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x12 -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; jglh 0x16 +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 function %atomic_rmw_xchg_i8(i64, i64, i8) -> i8 { @@ -91,26 +93,28 @@ block0(v0: i64, v1: i64, v2: i8): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; risbgn %r1, %r4, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 8(%r5) +; 0: rll %r1, %r0, 0(%r4) ; risbgn %r1, %r5, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 8(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; risbgn %r1, %r4, 0x20, 0x28, 0x18 +; rll %r1, %r0, 0(%r4) +; risbgn %r1, %r5, 0x20, 0x28, 0x18 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x10 -; rll %r2, %r0, 8(%r5) +; jglh 0x14 +; rll %r2, %r0, 8(%r4) ; br %r14 function %atomic_rmw_add_i64(i64, i64, i64) -> i64 { @@ -169,30 +173,32 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; sllk %r2, %r4, 16 +; sllk %r2, %r2, 16 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 16(%r5) ; lrvr %r1, %r1 ; ar %r1, %r2 ; lrvr %r1, %r1 ; rll %r1, %r1, 16(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; 0: rll %r1, %r0, 16(%r4) ; lrvr %r1, %r1 ; ar %r1, %r2 ; lrvr %r1, %r1 ; rll %r1, %r1, 16(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; sllk %r2, %r4, 0x10 +; sllk %r2, %r2, 0x10 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0x10(%r5) +; rll %r1, %r0, 0x10(%r4) ; lrvr %r1, %r1 ; ar %r1, %r2 ; lrvr %r1, %r1 -; rll %r1, %r1, 0x10(%r5) +; rll %r1, %r1, 0x10(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x14 -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; jglh 0x18 +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 function %atomic_rmw_add_i8(i64, i64, i8) -> i8 { @@ -283,30 +289,32 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; sllk %r2, %r4, 16 +; sllk %r2, %r2, 16 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 16(%r5) ; lrvr %r1, %r1 ; sr %r1, %r2 ; lrvr %r1, %r1 ; rll %r1, %r1, 16(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; 0: rll %r1, %r0, 16(%r4) ; lrvr %r1, %r1 ; sr %r1, %r2 ; lrvr %r1, %r1 ; rll %r1, %r1, 16(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; sllk %r2, %r4, 0x10 +; sllk %r2, %r2, 0x10 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0x10(%r5) +; rll %r1, %r0, 0x10(%r4) ; lrvr %r1, %r1 ; sr %r1, %r2 ; lrvr %r1, %r1 -; rll %r1, %r1, 0x10(%r5) +; rll %r1, %r1, 0x10(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x14 -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; jglh 0x18 +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 function %atomic_rmw_sub_i8(i64, i64, i8) -> i8 { @@ -349,16 +357,16 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; lrvgr %r5, %r4 -; lang %r3, %r5, 0(%r3) -; lrvgr %r2, %r3 +; lrvgr %r4, %r4 +; lang %r5, %r4, 0(%r3) +; lrvgr %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvgr %r5, %r4 -; lang %r3, %r5, 0(%r3) ; trap: heap_oob -; lrvgr %r2, %r3 +; lrvgr %r4, %r4 +; lang %r5, %r4, 0(%r3) ; trap: heap_oob +; lrvgr %r2, %r5 ; br %r14 function %atomic_rmw_and_i32(i64, i64, i32) -> i32 { @@ -369,16 +377,16 @@ block0(v0: i64, v1: i64, v2: i32): ; VCode: ; block0: -; lrvr %r5, %r4 -; lan %r3, %r5, 0(%r3) -; lrvr %r2, %r3 +; lrvr %r4, %r4 +; lan %r5, %r4, 0(%r3) +; lrvr %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvr %r5, %r4 -; lan %r3, %r5, 0(%r3) ; trap: heap_oob -; lrvr %r2, %r3 +; lrvr %r4, %r4 +; lan %r5, %r4, 0(%r3) ; trap: heap_oob +; lrvr %r2, %r5 ; br %r14 function %atomic_rmw_and_i16(i64, i64, i16) -> i16 { @@ -389,28 +397,30 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lrvr %r2, %r4 +; lrvr %r2, %r2 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 16(%r5) ; rnsbg %r1, %r2, 48, 64, 48 ; rll %r1, %r1, 16(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; 0: rll %r1, %r0, 16(%r4) ; rnsbg %r1, %r2, 48, 64, 48 ; rll %r1, %r1, 16(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lrvr %r2, %r4 +; lrvr %r2, %r2 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0x10(%r5) +; rll %r1, %r0, 0x10(%r4) ; rnsbg %r1, %r2, 0x30, 0x40, 0x30 -; rll %r1, %r1, 0x10(%r5) +; rll %r1, %r1, 0x10(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x12 -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; jglh 0x16 +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 function %atomic_rmw_and_i8(i64, i64, i8) -> i8 { @@ -421,26 +431,28 @@ block0(v0: i64, v1: i64, v2: i8): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rnsbg %r1, %r4, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 8(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rnsbg %r1, %r5, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 8(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rnsbg %r1, %r4, 0x20, 0x28, 0x18 +; rll %r1, %r0, 0(%r4) +; rnsbg %r1, %r5, 0x20, 0x28, 0x18 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x10 -; rll %r2, %r0, 8(%r5) +; jglh 0x14 +; rll %r2, %r0, 8(%r4) ; br %r14 function %atomic_rmw_or_i64(i64, i64, i64) -> i64 { @@ -451,16 +463,16 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; lrvgr %r5, %r4 -; laog %r3, %r5, 0(%r3) -; lrvgr %r2, %r3 +; lrvgr %r4, %r4 +; laog %r5, %r4, 0(%r3) +; lrvgr %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvgr %r5, %r4 -; laog %r3, %r5, 0(%r3) ; trap: heap_oob -; lrvgr %r2, %r3 +; lrvgr %r4, %r4 +; laog %r5, %r4, 0(%r3) ; trap: heap_oob +; lrvgr %r2, %r5 ; br %r14 function %atomic_rmw_or_i32(i64, i64, i32) -> i32 { @@ -471,16 +483,16 @@ block0(v0: i64, v1: i64, v2: i32): ; VCode: ; block0: -; lrvr %r5, %r4 -; lao %r3, %r5, 0(%r3) -; lrvr %r2, %r3 +; lrvr %r4, %r4 +; lao %r5, %r4, 0(%r3) +; lrvr %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvr %r5, %r4 -; lao %r3, %r5, 0(%r3) ; trap: heap_oob -; lrvr %r2, %r3 +; lrvr %r4, %r4 +; lao %r5, %r4, 0(%r3) ; trap: heap_oob +; lrvr %r2, %r5 ; br %r14 function %atomic_rmw_or_i16(i64, i64, i16) -> i16 { @@ -491,28 +503,30 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lrvr %r2, %r4 +; lrvr %r2, %r2 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 16(%r5) ; rosbg %r1, %r2, 48, 64, 48 ; rll %r1, %r1, 16(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; 0: rll %r1, %r0, 16(%r4) ; rosbg %r1, %r2, 48, 64, 48 ; rll %r1, %r1, 16(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lrvr %r2, %r4 +; lrvr %r2, %r2 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0x10(%r5) +; rll %r1, %r0, 0x10(%r4) ; rosbg %r1, %r2, 0x30, 0x40, 0x30 -; rll %r1, %r1, 0x10(%r5) +; rll %r1, %r1, 0x10(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x12 -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; jglh 0x16 +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 function %atomic_rmw_or_i8(i64, i64, i8) -> i8 { @@ -523,26 +537,28 @@ block0(v0: i64, v1: i64, v2: i8): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rosbg %r1, %r4, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 8(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rosbg %r1, %r5, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 8(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rosbg %r1, %r4, 0x20, 0x28, 0x18 +; rll %r1, %r0, 0(%r4) +; rosbg %r1, %r5, 0x20, 0x28, 0x18 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x10 -; rll %r2, %r0, 8(%r5) +; jglh 0x14 +; rll %r2, %r0, 8(%r4) ; br %r14 function %atomic_rmw_xor_i64(i64, i64, i64) -> i64 { @@ -553,16 +569,16 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; lrvgr %r5, %r4 -; laxg %r3, %r5, 0(%r3) -; lrvgr %r2, %r3 +; lrvgr %r4, %r4 +; laxg %r5, %r4, 0(%r3) +; lrvgr %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvgr %r5, %r4 -; laxg %r3, %r5, 0(%r3) ; trap: heap_oob -; lrvgr %r2, %r3 +; lrvgr %r4, %r4 +; laxg %r5, %r4, 0(%r3) ; trap: heap_oob +; lrvgr %r2, %r5 ; br %r14 function %atomic_rmw_xor_i32(i64, i64, i32) -> i32 { @@ -573,16 +589,16 @@ block0(v0: i64, v1: i64, v2: i32): ; VCode: ; block0: -; lrvr %r5, %r4 -; lax %r3, %r5, 0(%r3) -; lrvr %r2, %r3 +; lrvr %r4, %r4 +; lax %r5, %r4, 0(%r3) +; lrvr %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvr %r5, %r4 -; lax %r3, %r5, 0(%r3) ; trap: heap_oob -; lrvr %r2, %r3 +; lrvr %r4, %r4 +; lax %r5, %r4, 0(%r3) ; trap: heap_oob +; lrvr %r2, %r5 ; br %r14 function %atomic_rmw_xor_i16(i64, i64, i16) -> i16 { @@ -593,28 +609,30 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lrvr %r2, %r4 +; lrvr %r2, %r2 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 16(%r5) ; rxsbg %r1, %r2, 48, 64, 48 ; rll %r1, %r1, 16(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; 0: rll %r1, %r0, 16(%r4) ; rxsbg %r1, %r2, 48, 64, 48 ; rll %r1, %r1, 16(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lrvr %r2, %r4 +; lrvr %r2, %r2 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0x10(%r5) +; rll %r1, %r0, 0x10(%r4) ; rxsbg %r1, %r2, 0x30, 0x40, 0x30 -; rll %r1, %r1, 0x10(%r5) +; rll %r1, %r1, 0x10(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x12 -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; jglh 0x16 +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 function %atomic_rmw_xor_i8(i64, i64, i8) -> i8 { @@ -625,26 +643,28 @@ block0(v0: i64, v1: i64, v2: i8): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rxsbg %r1, %r4, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 8(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rxsbg %r1, %r5, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 8(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rxsbg %r1, %r4, 0x20, 0x28, 0x18 +; rll %r1, %r0, 0(%r4) +; rxsbg %r1, %r5, 0x20, 0x28, 0x18 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x10 -; rll %r2, %r0, 8(%r5) +; jglh 0x14 +; rll %r2, %r0, 8(%r4) ; br %r14 function %atomic_rmw_nand_i64(i64, i64, i64) -> i64 { @@ -655,17 +675,17 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; lrvgr %r5, %r4 +; lrvgr %r4, %r4 ; lg %r0, 0(%r3) -; 0: ngrk %r1, %r0, %r5 ; xilf %r1, 4294967295 ; xihf %r1, 4294967295 ; csg %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; 0: ngrk %r1, %r0, %r4 ; xilf %r1, 4294967295 ; xihf %r1, 4294967295 ; csg %r0, %r1, 0(%r3) ; jglh 0b ; 1: ; lrvgr %r2, %r0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvgr %r5, %r4 +; lrvgr %r4, %r4 ; lg %r0, 0(%r3) ; trap: heap_oob -; ngrk %r1, %r0, %r5 +; ngrk %r1, %r0, %r4 ; xilf %r1, 0xffffffff ; xihf %r1, 0xffffffff ; csg %r0, %r1, 0(%r3) ; trap: heap_oob @@ -681,17 +701,17 @@ block0(v0: i64, v1: i64, v2: i32): ; VCode: ; block0: -; lrvr %r5, %r4 +; lrvr %r4, %r4 ; l %r0, 0(%r3) -; 0: nrk %r1, %r0, %r5 ; xilf %r1, 4294967295 ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; 0: nrk %r1, %r0, %r4 ; xilf %r1, 4294967295 ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: ; lrvr %r2, %r0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvr %r5, %r4 +; lrvr %r4, %r4 ; l %r0, 0(%r3) ; trap: heap_oob -; nrk %r1, %r0, %r5 +; nrk %r1, %r0, %r4 ; xilf %r1, 0xffffffff ; cs %r0, %r1, 0(%r3) ; trap: heap_oob ; jglh 8 @@ -706,29 +726,31 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lrvr %r2, %r4 +; lrvr %r2, %r2 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 16(%r5) ; rnsbg %r1, %r2, 48, 64, 48 ; xilf %r1, 65535 ; rll %r1, %r1, 16(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; 0: rll %r1, %r0, 16(%r4) ; rnsbg %r1, %r2, 48, 64, 48 ; xilf %r1, 65535 ; rll %r1, %r1, 16(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lrvr %r2, %r4 +; lrvr %r2, %r2 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0x10(%r5) +; rll %r1, %r0, 0x10(%r4) ; rnsbg %r1, %r2, 0x30, 0x40, 0x30 ; xilf %r1, 0xffff -; rll %r1, %r1, 0x10(%r5) +; rll %r1, %r1, 0x10(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x12 -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; jglh 0x16 +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 function %atomic_rmw_nand_i8(i64, i64, i8) -> i8 { @@ -739,27 +761,29 @@ block0(v0: i64, v1: i64, v2: i8): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rnsbg %r1, %r4, 32, 40, 24 ; xilf %r1, 4278190080 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 8(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rnsbg %r1, %r5, 32, 40, 24 ; xilf %r1, 4278190080 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 8(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rnsbg %r1, %r4, 0x20, 0x28, 0x18 +; rll %r1, %r0, 0(%r4) +; rnsbg %r1, %r5, 0x20, 0x28, 0x18 ; xilf %r1, 0xff000000 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x10 -; rll %r2, %r0, 8(%r5) +; jglh 0x14 +; rll %r2, %r0, 8(%r4) ; br %r14 function %atomic_rmw_smin_i64(i64, i64, i64) -> i64 { @@ -820,32 +844,34 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; sllk %r2, %r4, 16 +; sllk %r2, %r2, 16 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 16(%r5) ; lrvr %r1, %r1 ; cr %r2, %r1 ; jgnl 1f ; risbgn %r1, %r2, 32, 48, 0 ; lrvr %r1, %r1 ; rll %r1, %r1, 16(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; 0: rll %r1, %r0, 16(%r4) ; lrvr %r1, %r1 ; cr %r2, %r1 ; jgnl 1f ; risbgn %r1, %r2, 32, 48, 0 ; lrvr %r1, %r1 ; rll %r1, %r1, 16(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; sllk %r2, %r4, 0x10 +; sllk %r2, %r2, 0x10 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0x10(%r5) +; rll %r1, %r0, 0x10(%r4) ; lrvr %r1, %r1 ; cr %r2, %r1 -; jgnl 0x40 +; jgnl 0x44 ; risbgn %r1, %r2, 0x20, 0x30, 0 ; lrvr %r1, %r1 -; rll %r1, %r1, 0x10(%r5) +; rll %r1, %r1, 0x10(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x14 -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; jglh 0x18 +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 function %atomic_rmw_smin_i8(i64, i64, i8) -> i8 { @@ -940,32 +966,34 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; sllk %r2, %r4, 16 +; sllk %r2, %r2, 16 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 16(%r5) ; lrvr %r1, %r1 ; cr %r2, %r1 ; jgnh 1f ; risbgn %r1, %r2, 32, 48, 0 ; lrvr %r1, %r1 ; rll %r1, %r1, 16(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; 0: rll %r1, %r0, 16(%r4) ; lrvr %r1, %r1 ; cr %r2, %r1 ; jgnh 1f ; risbgn %r1, %r2, 32, 48, 0 ; lrvr %r1, %r1 ; rll %r1, %r1, 16(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; sllk %r2, %r4, 0x10 +; sllk %r2, %r2, 0x10 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0x10(%r5) +; rll %r1, %r0, 0x10(%r4) ; lrvr %r1, %r1 ; cr %r2, %r1 -; jgnh 0x40 +; jgnh 0x44 ; risbgn %r1, %r2, 0x20, 0x30, 0 ; lrvr %r1, %r1 -; rll %r1, %r1, 0x10(%r5) +; rll %r1, %r1, 0x10(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x14 -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; jglh 0x18 +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 function %atomic_rmw_smax_i8(i64, i64, i8) -> i8 { @@ -1060,32 +1088,34 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; sllk %r2, %r4, 16 +; sllk %r2, %r2, 16 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 16(%r5) ; lrvr %r1, %r1 ; clr %r2, %r1 ; jgnl 1f ; risbgn %r1, %r2, 32, 48, 0 ; lrvr %r1, %r1 ; rll %r1, %r1, 16(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; 0: rll %r1, %r0, 16(%r4) ; lrvr %r1, %r1 ; clr %r2, %r1 ; jgnl 1f ; risbgn %r1, %r2, 32, 48, 0 ; lrvr %r1, %r1 ; rll %r1, %r1, 16(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; sllk %r2, %r4, 0x10 +; sllk %r2, %r2, 0x10 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0x10(%r5) +; rll %r1, %r0, 0x10(%r4) ; lrvr %r1, %r1 ; clr %r2, %r1 -; jgnl 0x40 +; jgnl 0x44 ; risbgn %r1, %r2, 0x20, 0x30, 0 ; lrvr %r1, %r1 -; rll %r1, %r1, 0x10(%r5) +; rll %r1, %r1, 0x10(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x14 -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; jglh 0x18 +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 function %atomic_rmw_umin_i8(i64, i64, i8) -> i8 { @@ -1180,32 +1210,34 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; sllk %r2, %r4, 16 +; sllk %r2, %r2, 16 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 16(%r5) ; lrvr %r1, %r1 ; clr %r2, %r1 ; jgnh 1f ; risbgn %r1, %r2, 32, 48, 0 ; lrvr %r1, %r1 ; rll %r1, %r1, 16(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; 0: rll %r1, %r0, 16(%r4) ; lrvr %r1, %r1 ; clr %r2, %r1 ; jgnh 1f ; risbgn %r1, %r2, 32, 48, 0 ; lrvr %r1, %r1 ; rll %r1, %r1, 16(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; sllk %r2, %r4, 0x10 +; sllk %r2, %r2, 0x10 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0x10(%r5) +; rll %r1, %r0, 0x10(%r4) ; lrvr %r1, %r1 ; clr %r2, %r1 -; jgnh 0x40 +; jgnh 0x44 ; risbgn %r1, %r2, 0x20, 0x30, 0 ; lrvr %r1, %r1 -; rll %r1, %r1, 0x10(%r5) +; rll %r1, %r1, 0x10(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x14 -; rll %r5, %r0, 0(%r5) -; lrvr %r2, %r5 +; jglh 0x18 +; rll %r2, %r0, 0(%r4) +; lrvr %r2, %r2 ; br %r14 function %atomic_rmw_umax_i8(i64, i64, i8) -> i8 { diff --git a/cranelift/filetests/filetests/isa/s390x/atomic_rmw.clif b/cranelift/filetests/filetests/isa/s390x/atomic_rmw.clif index ec38ec433af4..357f7dc56192 100644 --- a/cranelift/filetests/filetests/isa/s390x/atomic_rmw.clif +++ b/cranelift/filetests/filetests/isa/s390x/atomic_rmw.clif @@ -55,24 +55,26 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; risbgn %r1, %r4, 32, 48, 16 ; rll %r1, %r1, 0(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 16(%r5) +; 0: rll %r1, %r0, 0(%r4) ; risbgn %r1, %r2, 32, 48, 16 ; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 16(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; risbgn %r1, %r4, 0x20, 0x30, 0x10 -; rll %r1, %r1, 0(%r5) +; rll %r1, %r0, 0(%r4) +; risbgn %r1, %r2, 0x20, 0x30, 0x10 +; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0xe -; rll %r2, %r0, 0x10(%r5) +; jglh 0x12 +; rll %r2, %r0, 0x10(%r4) ; br %r14 function %atomic_rmw_xchg_i8(i64, i64, i8) -> i8 { @@ -83,26 +85,28 @@ block0(v0: i64, v1: i64, v2: i8): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; risbgn %r1, %r4, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 8(%r5) +; 0: rll %r1, %r0, 0(%r4) ; risbgn %r1, %r5, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 8(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; risbgn %r1, %r4, 0x20, 0x28, 0x18 +; rll %r1, %r0, 0(%r4) +; risbgn %r1, %r5, 0x20, 0x28, 0x18 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x10 -; rll %r2, %r0, 8(%r5) +; jglh 0x14 +; rll %r2, %r0, 8(%r4) ; br %r14 function %atomic_rmw_add_i64(i64, i64) -> i64 { @@ -145,26 +149,28 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; sllk %r2, %r4, 16 +; sllk %r2, %r2, 16 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; ar %r1, %r2 ; rll %r1, %r1, 0(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 16(%r5) +; 0: rll %r1, %r0, 0(%r4) ; ar %r1, %r2 ; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 16(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; sllk %r2, %r4, 0x10 +; sllk %r2, %r2, 0x10 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) +; rll %r1, %r0, 0(%r4) ; ar %r1, %r2 -; rll %r1, %r1, 0(%r5) +; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x14 -; rll %r2, %r0, 0x10(%r5) +; jglh 0x18 +; rll %r2, %r0, 0x10(%r4) ; br %r14 function %atomic_rmw_add_i8(i64, i64, i8) -> i8 { @@ -207,14 +213,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; lcgr %r5, %r3 -; laag %r2, %r5, 0(%r2) +; lcgr %r3, %r3 +; laag %r2, %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lcgr %r5, %r3 -; laag %r2, %r5, 0(%r2) ; trap: heap_oob +; lcgr %r3, %r3 +; laag %r2, %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %atomic_rmw_sub_i32(i64, i32) -> i32 { @@ -225,14 +231,14 @@ block0(v0: i64, v1: i32): ; VCode: ; block0: -; lcr %r5, %r3 -; laa %r2, %r5, 0(%r2) +; lcr %r3, %r3 +; laa %r2, %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lcr %r5, %r3 -; laa %r2, %r5, 0(%r2) ; trap: heap_oob +; lcr %r3, %r3 +; laa %r2, %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %atomic_rmw_sub_i16(i64, i64, i16) -> i16 { @@ -243,26 +249,28 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; sllk %r2, %r4, 16 +; sllk %r2, %r2, 16 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; sr %r1, %r2 ; rll %r1, %r1, 0(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 16(%r5) +; 0: rll %r1, %r0, 0(%r4) ; sr %r1, %r2 ; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 16(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; sllk %r2, %r4, 0x10 +; sllk %r2, %r2, 0x10 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) +; rll %r1, %r0, 0(%r4) ; sr %r1, %r2 -; rll %r1, %r1, 0(%r5) +; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x14 -; rll %r2, %r0, 0x10(%r5) +; jglh 0x18 +; rll %r2, %r0, 0x10(%r4) ; br %r14 function %atomic_rmw_sub_i8(i64, i64, i8) -> i8 { @@ -337,24 +345,26 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rnsbg %r1, %r4, 32, 48, 16 ; rll %r1, %r1, 0(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 16(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rnsbg %r1, %r2, 32, 48, 16 ; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 16(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rnsbg %r1, %r4, 0x20, 0x30, 0x10 -; rll %r1, %r1, 0(%r5) +; rll %r1, %r0, 0(%r4) +; rnsbg %r1, %r2, 0x20, 0x30, 0x10 +; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0xe -; rll %r2, %r0, 0x10(%r5) +; jglh 0x12 +; rll %r2, %r0, 0x10(%r4) ; br %r14 function %atomic_rmw_and_i8(i64, i64, i8) -> i8 { @@ -365,26 +375,28 @@ block0(v0: i64, v1: i64, v2: i8): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rnsbg %r1, %r4, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 8(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rnsbg %r1, %r5, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 8(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rnsbg %r1, %r4, 0x20, 0x28, 0x18 +; rll %r1, %r0, 0(%r4) +; rnsbg %r1, %r5, 0x20, 0x28, 0x18 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x10 -; rll %r2, %r0, 8(%r5) +; jglh 0x14 +; rll %r2, %r0, 8(%r4) ; br %r14 function %atomic_rmw_or_i64(i64, i64) -> i64 { @@ -427,24 +439,26 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rosbg %r1, %r4, 32, 48, 16 ; rll %r1, %r1, 0(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 16(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rosbg %r1, %r2, 32, 48, 16 ; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 16(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rosbg %r1, %r4, 0x20, 0x30, 0x10 -; rll %r1, %r1, 0(%r5) +; rll %r1, %r0, 0(%r4) +; rosbg %r1, %r2, 0x20, 0x30, 0x10 +; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0xe -; rll %r2, %r0, 0x10(%r5) +; jglh 0x12 +; rll %r2, %r0, 0x10(%r4) ; br %r14 function %atomic_rmw_or_i8(i64, i64, i8) -> i8 { @@ -455,26 +469,28 @@ block0(v0: i64, v1: i64, v2: i8): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rosbg %r1, %r4, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 8(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rosbg %r1, %r5, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 8(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rosbg %r1, %r4, 0x20, 0x28, 0x18 +; rll %r1, %r0, 0(%r4) +; rosbg %r1, %r5, 0x20, 0x28, 0x18 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x10 -; rll %r2, %r0, 8(%r5) +; jglh 0x14 +; rll %r2, %r0, 8(%r4) ; br %r14 function %atomic_rmw_xor_i64(i64, i64) -> i64 { @@ -517,24 +533,26 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rxsbg %r1, %r4, 32, 48, 16 ; rll %r1, %r1, 0(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 16(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rxsbg %r1, %r2, 32, 48, 16 ; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 16(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rxsbg %r1, %r4, 0x20, 0x30, 0x10 -; rll %r1, %r1, 0(%r5) +; rll %r1, %r0, 0(%r4) +; rxsbg %r1, %r2, 0x20, 0x30, 0x10 +; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0xe -; rll %r2, %r0, 0x10(%r5) +; jglh 0x12 +; rll %r2, %r0, 0x10(%r4) ; br %r14 function %atomic_rmw_xor_i8(i64, i64, i8) -> i8 { @@ -545,26 +563,28 @@ block0(v0: i64, v1: i64, v2: i8): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rxsbg %r1, %r4, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 8(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rxsbg %r1, %r5, 32, 40, 24 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 8(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rxsbg %r1, %r4, 0x20, 0x28, 0x18 +; rll %r1, %r0, 0(%r4) +; rxsbg %r1, %r5, 0x20, 0x28, 0x18 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x10 -; rll %r2, %r0, 8(%r5) +; jglh 0x14 +; rll %r2, %r0, 8(%r4) ; br %r14 function %atomic_rmw_nand_i64(i64, i64, i64) -> i64 { @@ -622,25 +642,27 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rnsbg %r1, %r4, 32, 48, 16 ; xilf %r1, 4294901760 ; rll %r1, %r1, 0(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 16(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rnsbg %r1, %r2, 32, 48, 16 ; xilf %r1, 4294901760 ; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 16(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rnsbg %r1, %r4, 0x20, 0x30, 0x10 +; rll %r1, %r0, 0(%r4) +; rnsbg %r1, %r2, 0x20, 0x30, 0x10 ; xilf %r1, 0xffff0000 -; rll %r1, %r1, 0(%r5) +; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0xe -; rll %r2, %r0, 0x10(%r5) +; jglh 0x12 +; rll %r2, %r0, 0x10(%r4) ; br %r14 function %atomic_rmw_nand_i8(i64, i64, i8) -> i8 { @@ -651,27 +673,29 @@ block0(v0: i64, v1: i64, v2: i8): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; rnsbg %r1, %r4, 32, 40, 24 ; xilf %r1, 4278190080 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 8(%r5) +; 0: rll %r1, %r0, 0(%r4) ; rnsbg %r1, %r5, 32, 40, 24 ; xilf %r1, 4278190080 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 8(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r5, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; lcr %r2, %r5 +; lcr %r2, %r4 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) -; rnsbg %r1, %r4, 0x20, 0x28, 0x18 +; rll %r1, %r0, 0(%r4) +; rnsbg %r1, %r5, 0x20, 0x28, 0x18 ; xilf %r1, 0xff000000 ; rll %r1, %r1, 0(%r2) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x10 -; rll %r2, %r0, 8(%r5) +; jglh 0x14 +; rll %r2, %r0, 8(%r4) ; br %r14 function %atomic_rmw_smin_i64(i64, i64, i64) -> i64 { @@ -728,28 +752,30 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; sllk %r2, %r4, 16 +; sllk %r2, %r2, 16 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; cr %r2, %r1 ; jgnl 1f ; risbgn %r1, %r2, 32, 48, 0 ; rll %r1, %r1, 0(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 16(%r5) +; 0: rll %r1, %r0, 0(%r4) ; cr %r2, %r1 ; jgnl 1f ; risbgn %r1, %r2, 32, 48, 0 ; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 16(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; sllk %r2, %r4, 0x10 +; sllk %r2, %r2, 0x10 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) +; rll %r1, %r0, 0(%r4) ; cr %r2, %r1 -; jgnl 0x38 +; jgnl 0x3c ; risbgn %r1, %r2, 0x20, 0x30, 0 -; rll %r1, %r1, 0(%r5) +; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x14 -; rll %r2, %r0, 0x10(%r5) +; jglh 0x18 +; rll %r2, %r0, 0x10(%r4) ; br %r14 function %atomic_rmw_smin_i8(i64, i64, i8) -> i8 { @@ -840,28 +866,30 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; sllk %r2, %r4, 16 +; sllk %r2, %r2, 16 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; cr %r2, %r1 ; jgnh 1f ; risbgn %r1, %r2, 32, 48, 0 ; rll %r1, %r1, 0(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 16(%r5) +; 0: rll %r1, %r0, 0(%r4) ; cr %r2, %r1 ; jgnh 1f ; risbgn %r1, %r2, 32, 48, 0 ; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 16(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; sllk %r2, %r4, 0x10 +; sllk %r2, %r2, 0x10 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) +; rll %r1, %r0, 0(%r4) ; cr %r2, %r1 -; jgnh 0x38 +; jgnh 0x3c ; risbgn %r1, %r2, 0x20, 0x30, 0 -; rll %r1, %r1, 0(%r5) +; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x14 -; rll %r2, %r0, 0x10(%r5) +; jglh 0x18 +; rll %r2, %r0, 0x10(%r4) ; br %r14 function %atomic_rmw_smax_i8(i64, i64, i8) -> i8 { @@ -952,28 +980,30 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; sllk %r2, %r4, 16 +; sllk %r2, %r2, 16 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; clr %r2, %r1 ; jgnl 1f ; risbgn %r1, %r2, 32, 48, 0 ; rll %r1, %r1, 0(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 16(%r5) +; 0: rll %r1, %r0, 0(%r4) ; clr %r2, %r1 ; jgnl 1f ; risbgn %r1, %r2, 32, 48, 0 ; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 16(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; sllk %r2, %r4, 0x10 +; sllk %r2, %r2, 0x10 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) +; rll %r1, %r0, 0(%r4) ; clr %r2, %r1 -; jgnl 0x38 +; jgnl 0x3c ; risbgn %r1, %r2, 0x20, 0x30, 0 -; rll %r1, %r1, 0(%r5) +; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x14 -; rll %r2, %r0, 0x10(%r5) +; jglh 0x18 +; rll %r2, %r0, 0x10(%r4) ; br %r14 function %atomic_rmw_umin_i8(i64, i64, i8) -> i8 { @@ -1064,28 +1094,30 @@ block0(v0: i64, v1: i64, v2: i16): ; VCode: ; block0: -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 65532 -; sllk %r2, %r4, 16 +; sllk %r2, %r2, 16 ; l %r0, 0(%r3) -; 0: rll %r1, %r0, 0(%r5) ; clr %r2, %r1 ; jgnh 1f ; risbgn %r1, %r2, 32, 48, 0 ; rll %r1, %r1, 0(%r5) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: -; rll %r2, %r0, 16(%r5) +; 0: rll %r1, %r0, 0(%r4) ; clr %r2, %r1 ; jgnh 1f ; risbgn %r1, %r2, 32, 48, 0 ; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; jglh 0b ; 1: +; rll %r2, %r0, 16(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; sllk %r5, %r3, 3 +; lgr %r2, %r4 +; sllk %r4, %r3, 3 ; nill %r3, 0xfffc -; sllk %r2, %r4, 0x10 +; sllk %r2, %r2, 0x10 ; l %r0, 0(%r3) ; trap: heap_oob -; rll %r1, %r0, 0(%r5) +; rll %r1, %r0, 0(%r4) ; clr %r2, %r1 -; jgnh 0x38 +; jgnh 0x3c ; risbgn %r1, %r2, 0x20, 0x30, 0 -; rll %r1, %r1, 0(%r5) +; rll %r1, %r1, 0(%r4) ; cs %r0, %r1, 0(%r3) ; trap: heap_oob -; jglh 0x14 -; rll %r2, %r0, 0x10(%r5) +; jglh 0x18 +; rll %r2, %r0, 0x10(%r4) ; br %r14 function %atomic_rmw_umax_i8(i64, i64, i8) -> i8 { diff --git a/cranelift/filetests/filetests/isa/s390x/atomic_store-little.clif b/cranelift/filetests/filetests/isa/s390x/atomic_store-little.clif index 17f8591e4ed6..8fe0c35ad140 100644 --- a/cranelift/filetests/filetests/isa/s390x/atomic_store-little.clif +++ b/cranelift/filetests/filetests/isa/s390x/atomic_store-little.clif @@ -49,15 +49,15 @@ block0(v0: i64): ; VCode: ; block0: -; lghi %r4, 12345 -; strvg %r4, 0(%r2) +; lghi %r3, 12345 +; strvg %r3, 0(%r2) ; bcr 14, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lghi %r4, 0x3039 -; strvg %r4, 0(%r2) ; trap: heap_oob +; lghi %r3, 0x3039 +; strvg %r3, 0(%r2) ; trap: heap_oob ; bnor %r0 ; br %r14 @@ -109,15 +109,15 @@ block0(v0: i64): ; VCode: ; block0: -; lhi %r4, 12345 -; strv %r4, 0(%r2) +; lhi %r3, 12345 +; strv %r3, 0(%r2) ; bcr 14, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhi %r4, 0x3039 -; strv %r4, 0(%r2) ; trap: heap_oob +; lhi %r3, 0x3039 +; strv %r3, 0(%r2) ; trap: heap_oob ; bnor %r0 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/bitops-arch13.clif b/cranelift/filetests/filetests/isa/s390x/bitops-arch13.clif index 3e02e5b0d916..ec0feb034847 100644 --- a/cranelift/filetests/filetests/isa/s390x/bitops-arch13.clif +++ b/cranelift/filetests/filetests/isa/s390x/bitops-arch13.clif @@ -30,15 +30,15 @@ block0(v0: i32): ; VCode: ; block0: -; llgfr %r4, %r2 -; popcnt %r2, %r4, 8 +; llgfr %r2, %r2 +; popcnt %r2, %r2, 8 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llgfr %r4, %r2 +; llgfr %r2, %r2 ; .byte 0xb9, 0xe1 -; .byte 0x80, 0x24 +; .byte 0x80, 0x22 ; br %r14 function %popcnt_i16(i16) -> i16 { @@ -49,15 +49,15 @@ block0(v0: i16): ; VCode: ; block0: -; llghr %r4, %r2 -; popcnt %r2, %r4, 8 +; llghr %r2, %r2 +; popcnt %r2, %r2, 8 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llghr %r4, %r2 +; llghr %r2, %r2 ; .byte 0xb9, 0xe1 -; .byte 0x80, 0x24 +; .byte 0x80, 0x22 ; br %r14 function %popcnt_i8(i8) -> i8 { diff --git a/cranelift/filetests/filetests/isa/s390x/bitops-arch15.clif b/cranelift/filetests/filetests/isa/s390x/bitops-arch15.clif index 8f9b3b30ea89..27b4ed5ce785 100644 --- a/cranelift/filetests/filetests/isa/s390x/bitops-arch15.clif +++ b/cranelift/filetests/filetests/isa/s390x/bitops-arch15.clif @@ -47,17 +47,17 @@ block0(v0: i32): ; VCode: ; block0: -; llgfr %r4, %r2 -; clzg %r2, %r4 -; ahi %r2, -32 +; llgfr %r2, %r2 +; clzg %r4, %r2 +; ahik %r2, %r4, -32 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llgfr %r4, %r2 +; llgfr %r2, %r2 ; .byte 0xb9, 0x68 -; .byte 0x00, 0x24 -; ahi %r2, -0x20 +; .byte 0x00, 0x42 +; ahik %r2, %r4, -0x20 ; br %r14 function %clz_i16(i16) -> i16 { @@ -68,17 +68,17 @@ block0(v0: i16): ; VCode: ; block0: -; llghr %r4, %r2 -; clzg %r2, %r4 -; ahi %r2, -48 +; llghr %r2, %r2 +; clzg %r4, %r2 +; ahik %r2, %r4, -48 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llghr %r4, %r2 +; llghr %r2, %r2 ; .byte 0xb9, 0x68 -; .byte 0x00, 0x24 -; ahi %r2, -0x30 +; .byte 0x00, 0x42 +; ahik %r2, %r4, -0x30 ; br %r14 function %clz_i8(i8) -> i8 { @@ -89,17 +89,17 @@ block0(v0: i8): ; VCode: ; block0: -; llgcr %r4, %r2 -; clzg %r2, %r4 -; ahi %r2, -56 +; llgcr %r2, %r2 +; clzg %r4, %r2 +; ahik %r2, %r4, -56 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llgcr %r4, %r2 +; llgcr %r2, %r2 ; .byte 0xb9, 0x68 -; .byte 0x00, 0x24 -; ahi %r2, -0x38 +; .byte 0x00, 0x42 +; ahik %r2, %r4, -0x38 ; br %r14 function %cls_i128(i128) -> i128 { @@ -114,10 +114,10 @@ block0(v0: i128): ; vrepib %v4, 255 ; vsrab %v6, %v1, %v4 ; vsra %v16, %v6, %v4 -; vx %v18, %v1, %v16 -; vclzq %v20, %v18 -; vaq %v22, %v20, %v4 -; vst %v22, 0(%r2) +; vx %v16, %v1, %v16 +; vclzq %v16, %v16 +; vaq %v16, %v16, %v4 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: @@ -126,10 +126,10 @@ block0(v0: i128): ; vrepib %v4, 0xff ; vsrab %v6, %v1, %v4 ; vsra %v16, %v6, %v4 -; vx %v18, %v1, %v16 -; vclz %v20, %v18, 4 -; vaq %v22, %v20, %v4 -; vst %v22, 0(%r2) +; vx %v16, %v1, %v16 +; vclz %v16, %v16, 4 +; vaq %v16, %v16, %v4 +; vst %v16, 0(%r2) ; br %r14 function %cls_i64(i64) -> i64 { @@ -140,19 +140,19 @@ block0(v0: i64): ; VCode: ; block0: -; srag %r4, %r2, 63 -; xgr %r2, %r4 -; clzg %r4, %r2 -; aghik %r2, %r4, -1 +; srag %r3, %r2, 63 +; xgrk %r4, %r2, %r3 +; clzg %r2, %r4 +; aghi %r2, -1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; srag %r4, %r2, 0x3f -; xgr %r2, %r4 +; srag %r3, %r2, 0x3f +; xgrk %r4, %r2, %r3 ; .byte 0xb9, 0x68 -; .byte 0x00, 0x42 -; aghik %r2, %r4, -1 +; .byte 0x00, 0x24 +; aghi %r2, -1 ; br %r14 function %cls_i32(i32) -> i32 { @@ -163,20 +163,20 @@ block0(v0: i32): ; VCode: ; block0: -; lgfr %r4, %r2 -; srag %r2, %r4, 63 -; xgr %r4, %r2 -; clzg %r2, %r4 +; lgfr %r2, %r2 +; srag %r4, %r2, 63 +; xgr %r2, %r4 +; clzg %r2, %r2 ; ahi %r2, -33 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgfr %r4, %r2 -; srag %r2, %r4, 0x3f -; xgr %r4, %r2 +; lgfr %r2, %r2 +; srag %r4, %r2, 0x3f +; xgr %r2, %r4 ; .byte 0xb9, 0x68 -; .byte 0x00, 0x24 +; .byte 0x00, 0x22 ; ahi %r2, -0x21 ; br %r14 @@ -188,20 +188,20 @@ block0(v0: i16): ; VCode: ; block0: -; lghr %r4, %r2 -; srag %r2, %r4, 63 -; xgr %r4, %r2 -; clzg %r2, %r4 +; lghr %r2, %r2 +; srag %r4, %r2, 63 +; xgr %r2, %r4 +; clzg %r2, %r2 ; ahi %r2, -49 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lghr %r4, %r2 -; srag %r2, %r4, 0x3f -; xgr %r4, %r2 +; lghr %r2, %r2 +; srag %r4, %r2, 0x3f +; xgr %r2, %r4 ; .byte 0xb9, 0x68 -; .byte 0x00, 0x24 +; .byte 0x00, 0x22 ; ahi %r2, -0x31 ; br %r14 @@ -213,20 +213,20 @@ block0(v0: i8): ; VCode: ; block0: -; lgbr %r4, %r2 -; srag %r2, %r4, 63 -; xgr %r4, %r2 -; clzg %r2, %r4 +; lgbr %r2, %r2 +; srag %r4, %r2, 63 +; xgr %r2, %r4 +; clzg %r2, %r2 ; ahi %r2, -57 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgbr %r4, %r2 -; srag %r2, %r4, 0x3f -; xgr %r4, %r2 +; lgbr %r2, %r2 +; srag %r4, %r2, 0x3f +; xgr %r2, %r4 ; .byte 0xb9, 0x68 -; .byte 0x00, 0x24 +; .byte 0x00, 0x22 ; ahi %r2, -0x39 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/bitops.clif b/cranelift/filetests/filetests/isa/s390x/bitops.clif index 41967ddd1239..267bff8dfb27 100644 --- a/cranelift/filetests/filetests/isa/s390x/bitops.clif +++ b/cranelift/filetests/filetests/isa/s390x/bitops.clif @@ -14,21 +14,21 @@ block0(v0: i128): ; vrepib %v4, 170 ; vrepib %v6, 1 ; vsl %v16, %v1, %v6 -; vsrl %v18, %v1, %v6 -; vsel %v20, %v16, %v18, %v4 -; vrepib %v22, 204 -; vrepib %v24, 2 -; vsl %v26, %v20, %v24 -; vsrl %v28, %v20, %v24 -; vsel %v30, %v26, %v28, %v22 -; vrepib %v0, 240 -; vrepib %v2, 4 -; vsl %v4, %v30, %v2 -; vsrl %v6, %v30, %v2 -; vsel %v16, %v4, %v6, %v0 -; larl %r1, [const(0)] ; vl %v18, 0(%r1) -; vperm %v20, %v16, %v16, %v18 -; vst %v20, 0(%r2) +; vsrl %v17, %v1, %v6 +; vsel %v17, %v16, %v17, %v4 +; vrepib %v16, 204 +; vrepib %v19, 2 +; vsl %v18, %v17, %v19 +; vsrl %v20, %v17, %v19 +; vsel %v22, %v18, %v20, %v16 +; vrepib %v24, 240 +; vrepib %v26, 4 +; vsl %v28, %v22, %v26 +; vsrl %v30, %v22, %v26 +; vsel %v0, %v28, %v30, %v24 +; larl %r1, [const(0)] ; vl %v1, 0(%r1) +; vperm %v0, %v0, %v0, %v1 +; vst %v0, 0(%r2) ; br %r14 ; ; Disassembled: @@ -37,22 +37,22 @@ block0(v0: i128): ; vrepib %v4, 0xaa ; vrepib %v6, 1 ; vsl %v16, %v1, %v6 -; vsrl %v18, %v1, %v6 -; vsel %v20, %v16, %v18, %v4 -; vrepib %v22, 0xcc -; vrepib %v24, 2 -; vsl %v26, %v20, %v24 -; vsrl %v28, %v20, %v24 -; vsel %v30, %v26, %v28, %v22 -; vrepib %v0, 0xf0 -; vrepib %v2, 4 -; vsl %v4, %v30, %v2 -; vsrl %v6, %v30, %v2 -; vsel %v16, %v4, %v6, %v0 +; vsrl %v17, %v1, %v6 +; vsel %v17, %v16, %v17, %v4 +; vrepib %v16, 0xcc +; vrepib %v19, 2 +; vsl %v18, %v17, %v19 +; vsrl %v20, %v17, %v19 +; vsel %v22, %v18, %v20, %v16 +; vrepib %v24, 0xf0 +; vrepib %v26, 4 +; vsl %v28, %v22, %v26 +; vsrl %v30, %v22, %v26 +; vsel %v0, %v28, %v30, %v24 ; larl %r1, 0x80 -; vl %v18, 0(%r1) -; vperm %v20, %v16, %v16, %v18 -; vst %v20, 0(%r2) +; vl %v1, 0(%r1) +; vperm %v0, %v0, %v0, %v1 +; vst %v0, 0(%r2) ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -74,70 +74,70 @@ block0(v0: i64): ; VCode: ; block0: -; lgr %r4, %r2 +; lgr %r3, %r2 ; llihf %r2, 2863311530 ; iilf %r2, 2863311530 -; lgr %r3, %r4 -; sllg %r4, %r3, 1 -; srlg %r3, %r3, 1 -; ngr %r4, %r2 +; lgr %r4, %r3 +; sllg %r3, %r4, 1 +; srlg %r4, %r4, 1 +; ngr %r3, %r2 ; xilf %r2, 4294967295 ; xihf %r2, 4294967295 -; ngrk %r2, %r3, %r2 -; ogrk %r5, %r4, %r2 -; llihf %r4, 3435973836 -; iilf %r4, 3435973836 -; sllg %r2, %r5, 2 -; srlg %r5, %r5, 2 -; ngr %r2, %r4 -; xilf %r4, 4294967295 -; xihf %r4, 4294967295 -; ngrk %r4, %r5, %r4 -; ogrk %r3, %r2, %r4 +; ngrk %r2, %r4, %r2 +; ogr %r3, %r2 +; llihf %r2, 3435973836 +; iilf %r2, 3435973836 +; sllg %r5, %r3, 2 +; srlg %r4, %r3, 2 +; ngrk %r3, %r5, %r2 +; xilf %r2, 4294967295 +; xihf %r2, 4294967295 +; ngrk %r2, %r4, %r2 +; ogr %r3, %r2 ; llihf %r2, 4042322160 ; iilf %r2, 4042322160 -; sllg %r4, %r3, 4 -; srlg %r3, %r3, 4 -; ngr %r4, %r2 +; sllg %r5, %r3, 4 +; srlg %r4, %r3, 4 +; ngrk %r3, %r5, %r2 ; xilf %r2, 4294967295 ; xihf %r2, 4294967295 -; ngrk %r2, %r3, %r2 -; ogr %r4, %r2 -; lrvgr %r2, %r4 +; ngrk %r2, %r4, %r2 +; ogrk %r2, %r3, %r2 +; lrvgr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgr %r4, %r2 +; lgr %r3, %r2 ; llihf %r2, 0xaaaaaaaa ; iilf %r2, 0xaaaaaaaa -; lgr %r3, %r4 -; sllg %r4, %r3, 1 -; srlg %r3, %r3, 1 -; ngr %r4, %r2 +; lgr %r4, %r3 +; sllg %r3, %r4, 1 +; srlg %r4, %r4, 1 +; ngr %r3, %r2 ; xilf %r2, 0xffffffff ; xihf %r2, 0xffffffff -; ngrk %r2, %r3, %r2 -; ogrk %r5, %r4, %r2 -; llihf %r4, 0xcccccccc -; iilf %r4, 0xcccccccc -; sllg %r2, %r5, 2 -; srlg %r5, %r5, 2 -; ngr %r2, %r4 -; xilf %r4, 0xffffffff -; xihf %r4, 0xffffffff -; ngrk %r4, %r5, %r4 -; ogrk %r3, %r2, %r4 +; ngrk %r2, %r4, %r2 +; ogr %r3, %r2 +; llihf %r2, 0xcccccccc +; iilf %r2, 0xcccccccc +; sllg %r5, %r3, 2 +; srlg %r4, %r3, 2 +; ngrk %r3, %r5, %r2 +; xilf %r2, 0xffffffff +; xihf %r2, 0xffffffff +; ngrk %r2, %r4, %r2 +; ogr %r3, %r2 ; llihf %r2, 0xf0f0f0f0 ; iilf %r2, 0xf0f0f0f0 -; sllg %r4, %r3, 4 -; srlg %r3, %r3, 4 -; ngr %r4, %r2 +; sllg %r5, %r3, 4 +; srlg %r4, %r3, 4 +; ngrk %r3, %r5, %r2 ; xilf %r2, 0xffffffff ; xihf %r2, 0xffffffff -; ngrk %r2, %r3, %r2 -; ogr %r4, %r2 -; lrvgr %r2, %r4 +; ngrk %r2, %r4, %r2 +; ogrk %r2, %r3, %r2 +; lrvgr %r2, %r2 ; br %r14 function %bitrev_i32(i32) -> i32 { @@ -148,54 +148,54 @@ block0(v0: i32): ; VCode: ; block0: -; iilf %r4, 2863311530 +; iilf %r5, 2863311530 ; sllk %r3, %r2, 1 -; srlk %r5, %r2, 1 -; nrk %r2, %r3, %r4 -; xilf %r4, 4294967295 -; nrk %r3, %r5, %r4 -; ork %r4, %r2, %r3 +; srlk %r4, %r2, 1 +; nr %r3, %r5 +; xilf %r5, 4294967295 +; nrk %r2, %r4, %r5 +; or %r3, %r2 ; iilf %r2, 3435973836 -; sllk %r5, %r4, 2 -; srlk %r3, %r4, 2 -; nrk %r4, %r5, %r2 +; sllk %r5, %r3, 2 +; srlk %r4, %r3, 2 +; nrk %r3, %r5, %r2 +; xilf %r2, 4294967295 +; nrk %r2, %r4, %r2 +; or %r3, %r2 +; iilf %r2, 4042322160 +; sllk %r5, %r3, 4 +; srlk %r4, %r3, 4 +; nrk %r3, %r5, %r2 ; xilf %r2, 4294967295 -; nrk %r5, %r3, %r2 -; ork %r2, %r4, %r5 -; iilf %r4, 4042322160 -; sllk %r3, %r2, 4 -; srlk %r5, %r2, 4 -; nrk %r2, %r3, %r4 -; xilf %r4, 4294967295 -; nrk %r3, %r5, %r4 -; ork %r4, %r2, %r3 -; lrvr %r2, %r4 +; nrk %r2, %r4, %r2 +; ork %r2, %r3, %r2 +; lrvr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; iilf %r4, 0xaaaaaaaa +; iilf %r5, 0xaaaaaaaa ; sllk %r3, %r2, 1 -; srlk %r5, %r2, 1 -; nrk %r2, %r3, %r4 -; xilf %r4, 0xffffffff -; nrk %r3, %r5, %r4 -; ork %r4, %r2, %r3 +; srlk %r4, %r2, 1 +; nr %r3, %r5 +; xilf %r5, 0xffffffff +; nrk %r2, %r4, %r5 +; or %r3, %r2 ; iilf %r2, 0xcccccccc -; sllk %r5, %r4, 2 -; srlk %r3, %r4, 2 -; nrk %r4, %r5, %r2 +; sllk %r5, %r3, 2 +; srlk %r4, %r3, 2 +; nrk %r3, %r5, %r2 +; xilf %r2, 0xffffffff +; nrk %r2, %r4, %r2 +; or %r3, %r2 +; iilf %r2, 0xf0f0f0f0 +; sllk %r5, %r3, 4 +; srlk %r4, %r3, 4 +; nrk %r3, %r5, %r2 ; xilf %r2, 0xffffffff -; nrk %r5, %r3, %r2 -; ork %r2, %r4, %r5 -; iilf %r4, 0xf0f0f0f0 -; sllk %r3, %r2, 4 -; srlk %r5, %r2, 4 -; nrk %r2, %r3, %r4 -; xilf %r4, 0xffffffff -; nrk %r3, %r5, %r4 -; ork %r4, %r2, %r3 -; lrvr %r2, %r4 +; nrk %r2, %r4, %r2 +; ork %r2, %r3, %r2 +; lrvr %r2, %r2 ; br %r14 function %bitrev_i16(i16) -> i16 { @@ -206,55 +206,55 @@ block0(v0: i16): ; VCode: ; block0: -; lhi %r4, -21846 +; lhi %r5, -21846 ; sllk %r3, %r2, 1 -; srlk %r5, %r2, 1 -; nrk %r2, %r3, %r4 -; xilf %r4, 4294967295 -; nrk %r3, %r5, %r4 -; ork %r4, %r2, %r3 +; srlk %r4, %r2, 1 +; nr %r3, %r5 +; xilf %r5, 4294967295 +; nrk %r2, %r4, %r5 +; or %r3, %r2 ; lhi %r2, -13108 -; sllk %r5, %r4, 2 -; srlk %r3, %r4, 2 -; nrk %r4, %r5, %r2 +; sllk %r5, %r3, 2 +; srlk %r4, %r3, 2 +; nrk %r3, %r5, %r2 ; xilf %r2, 4294967295 -; nrk %r5, %r3, %r2 -; ork %r2, %r4, %r5 -; lhi %r4, -3856 -; sllk %r3, %r2, 4 -; srlk %r5, %r2, 4 -; nrk %r2, %r3, %r4 -; xilf %r4, 4294967295 -; nrk %r3, %r5, %r4 -; ork %r4, %r2, %r3 -; lrvr %r2, %r4 +; nrk %r2, %r4, %r2 +; or %r3, %r2 +; lhi %r2, -3856 +; sllk %r5, %r3, 4 +; srlk %r4, %r3, 4 +; nrk %r3, %r5, %r2 +; xilf %r2, 4294967295 +; nrk %r2, %r4, %r2 +; ork %r2, %r3, %r2 +; lrvr %r2, %r2 ; srlk %r2, %r2, 16 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhi %r4, -0x5556 +; lhi %r5, -0x5556 ; sllk %r3, %r2, 1 -; srlk %r5, %r2, 1 -; nrk %r2, %r3, %r4 -; xilf %r4, 0xffffffff -; nrk %r3, %r5, %r4 -; ork %r4, %r2, %r3 +; srlk %r4, %r2, 1 +; nr %r3, %r5 +; xilf %r5, 0xffffffff +; nrk %r2, %r4, %r5 +; or %r3, %r2 ; lhi %r2, -0x3334 -; sllk %r5, %r4, 2 -; srlk %r3, %r4, 2 -; nrk %r4, %r5, %r2 +; sllk %r5, %r3, 2 +; srlk %r4, %r3, 2 +; nrk %r3, %r5, %r2 +; xilf %r2, 0xffffffff +; nrk %r2, %r4, %r2 +; or %r3, %r2 +; lhi %r2, -0xf10 +; sllk %r5, %r3, 4 +; srlk %r4, %r3, 4 +; nrk %r3, %r5, %r2 ; xilf %r2, 0xffffffff -; nrk %r5, %r3, %r2 -; ork %r2, %r4, %r5 -; lhi %r4, -0xf10 -; sllk %r3, %r2, 4 -; srlk %r5, %r2, 4 -; nrk %r2, %r3, %r4 -; xilf %r4, 0xffffffff -; nrk %r3, %r5, %r4 -; ork %r4, %r2, %r3 -; lrvr %r2, %r4 +; nrk %r2, %r4, %r2 +; ork %r2, %r3, %r2 +; lrvr %r2, %r2 ; srlk %r2, %r2, 0x10 ; br %r14 @@ -266,52 +266,52 @@ block0(v0: i8): ; VCode: ; block0: -; lhi %r4, -21846 +; lhi %r5, -21846 ; sllk %r3, %r2, 1 -; srlk %r5, %r2, 1 -; nrk %r2, %r3, %r4 -; xilf %r4, 4294967295 -; nrk %r3, %r5, %r4 -; ork %r4, %r2, %r3 +; srlk %r4, %r2, 1 +; nr %r3, %r5 +; xilf %r5, 4294967295 +; nrk %r2, %r4, %r5 +; or %r3, %r2 ; lhi %r2, -13108 -; sllk %r5, %r4, 2 -; srlk %r3, %r4, 2 -; nrk %r4, %r5, %r2 +; sllk %r5, %r3, 2 +; srlk %r4, %r3, 2 +; nrk %r3, %r5, %r2 ; xilf %r2, 4294967295 -; nrk %r5, %r3, %r2 -; ork %r2, %r4, %r5 -; lhi %r4, -3856 -; sllk %r3, %r2, 4 -; srlk %r5, %r2, 4 -; nrk %r2, %r3, %r4 -; xilf %r4, 4294967295 -; nrk %r3, %r5, %r4 -; or %r2, %r3 +; nrk %r2, %r4, %r2 +; or %r3, %r2 +; lhi %r2, -3856 +; sllk %r5, %r3, 4 +; srlk %r4, %r3, 4 +; nrk %r3, %r5, %r2 +; xilf %r2, 4294967295 +; nrk %r2, %r4, %r2 +; ork %r2, %r3, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhi %r4, -0x5556 +; lhi %r5, -0x5556 ; sllk %r3, %r2, 1 -; srlk %r5, %r2, 1 -; nrk %r2, %r3, %r4 -; xilf %r4, 0xffffffff -; nrk %r3, %r5, %r4 -; ork %r4, %r2, %r3 +; srlk %r4, %r2, 1 +; nr %r3, %r5 +; xilf %r5, 0xffffffff +; nrk %r2, %r4, %r5 +; or %r3, %r2 ; lhi %r2, -0x3334 -; sllk %r5, %r4, 2 -; srlk %r3, %r4, 2 -; nrk %r4, %r5, %r2 +; sllk %r5, %r3, 2 +; srlk %r4, %r3, 2 +; nrk %r3, %r5, %r2 +; xilf %r2, 0xffffffff +; nrk %r2, %r4, %r2 +; or %r3, %r2 +; lhi %r2, -0xf10 +; sllk %r5, %r3, 4 +; srlk %r4, %r3, 4 +; nrk %r3, %r5, %r2 ; xilf %r2, 0xffffffff -; nrk %r5, %r3, %r2 -; ork %r2, %r4, %r5 -; lhi %r4, -0xf10 -; sllk %r3, %r2, 4 -; srlk %r5, %r2, 4 -; nrk %r2, %r3, %r4 -; xilf %r4, 0xffffffff -; nrk %r3, %r5, %r4 -; or %r2, %r3 +; nrk %r2, %r4, %r2 +; ork %r2, %r3, %r2 ; br %r14 function %clz_i128(i128) -> i128 { @@ -326,12 +326,12 @@ block0(v0: i128): ; vclzg %v4, %v1 ; vgbm %v6, 0 ; vpdi %v16, %v6, %v4, 0 -; vpdi %v18, %v6, %v4, 1 -; vag %v20, %v16, %v18 -; vrepig %v22, 64 -; vceqg %v24, %v16, %v22 -; vsel %v26, %v20, %v16, %v24 -; vst %v26, 0(%r2) +; vpdi %v17, %v6, %v4, 1 +; vag %v17, %v16, %v17 +; vrepig %v18, 64 +; vceqg %v18, %v16, %v18 +; vsel %v18, %v17, %v16, %v18 +; vst %v18, 0(%r2) ; br %r14 ; ; Disassembled: @@ -340,12 +340,12 @@ block0(v0: i128): ; vclzg %v4, %v1 ; vzero %v6 ; vpdi %v16, %v6, %v4, 0 -; vpdi %v18, %v6, %v4, 1 -; vag %v20, %v16, %v18 -; vrepig %v22, 0x40 -; vceqg %v24, %v16, %v22 -; vsel %v26, %v20, %v16, %v24 -; vst %v26, 0(%r2) +; vpdi %v17, %v6, %v4, 1 +; vag %v17, %v16, %v17 +; vrepig %v18, 0x40 +; vceqg %v18, %v16, %v18 +; vsel %v18, %v17, %v16, %v18 +; vst %v18, 0(%r2) ; br %r14 function %clz_i64(i64) -> i64 { @@ -372,15 +372,15 @@ block0(v0: i32): ; VCode: ; block0: -; llgfr %r4, %r2 -; flogr %r2, %r4 +; llgfr %r2, %r2 +; flogr %r2, %r2 ; ahi %r2, -32 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llgfr %r4, %r2 -; flogr %r2, %r4 +; llgfr %r2, %r2 +; flogr %r2, %r2 ; ahi %r2, -0x20 ; br %r14 @@ -392,15 +392,15 @@ block0(v0: i16): ; VCode: ; block0: -; llghr %r4, %r2 -; flogr %r2, %r4 +; llghr %r2, %r2 +; flogr %r2, %r2 ; ahi %r2, -48 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llghr %r4, %r2 -; flogr %r2, %r4 +; llghr %r2, %r2 +; flogr %r2, %r2 ; ahi %r2, -0x30 ; br %r14 @@ -412,15 +412,15 @@ block0(v0: i8): ; VCode: ; block0: -; llgcr %r4, %r2 -; flogr %r2, %r4 +; llgcr %r2, %r2 +; flogr %r2, %r2 ; ahi %r2, -56 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llgcr %r4, %r2 -; flogr %r2, %r4 +; llgcr %r2, %r2 +; flogr %r2, %r2 ; ahi %r2, -0x38 ; br %r14 @@ -436,17 +436,17 @@ block0(v0: i128): ; vrepib %v4, 255 ; vsrab %v6, %v1, %v4 ; vsra %v16, %v6, %v4 -; vx %v18, %v1, %v16 -; vclzg %v20, %v18 -; vgbm %v22, 0 -; vpdi %v24, %v22, %v20, 0 -; vpdi %v26, %v22, %v20, 1 -; vag %v28, %v24, %v26 -; vrepig %v30, 64 -; vceqg %v0, %v24, %v30 -; vsel %v2, %v28, %v24, %v0 -; vaq %v4, %v2, %v4 -; vst %v4, 0(%r2) +; vx %v16, %v1, %v16 +; vclzg %v17, %v16 +; vgbm %v18, 0 +; vpdi %v16, %v18, %v17, 0 +; vpdi %v18, %v18, %v17, 1 +; vag %v20, %v16, %v18 +; vrepig %v22, 64 +; vceqg %v24, %v16, %v22 +; vsel %v26, %v20, %v16, %v24 +; vaq %v28, %v26, %v4 +; vst %v28, 0(%r2) ; br %r14 ; ; Disassembled: @@ -455,17 +455,17 @@ block0(v0: i128): ; vrepib %v4, 0xff ; vsrab %v6, %v1, %v4 ; vsra %v16, %v6, %v4 -; vx %v18, %v1, %v16 -; vclzg %v20, %v18 -; vzero %v22 -; vpdi %v24, %v22, %v20, 0 -; vpdi %v26, %v22, %v20, 1 -; vag %v28, %v24, %v26 -; vrepig %v30, 0x40 -; vceqg %v0, %v24, %v30 -; vsel %v2, %v28, %v24, %v0 -; vaq %v4, %v2, %v4 -; vst %v4, 0(%r2) +; vx %v16, %v1, %v16 +; vclzg %v17, %v16 +; vzero %v18 +; vpdi %v16, %v18, %v17, 0 +; vpdi %v18, %v18, %v17, 1 +; vag %v20, %v16, %v18 +; vrepig %v22, 0x40 +; vceqg %v24, %v16, %v22 +; vsel %v26, %v20, %v16, %v24 +; vaq %v28, %v26, %v4 +; vst %v28, 0(%r2) ; br %r14 function %cls_i64(i64) -> i64 { @@ -476,17 +476,17 @@ block0(v0: i64): ; VCode: ; block0: -; srag %r4, %r2, 63 -; xgr %r2, %r4 -; flogr %r2, %r2 +; srag %r3, %r2, 63 +; xgrk %r4, %r2, %r3 +; flogr %r2, %r4 ; aghi %r2, -1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; srag %r4, %r2, 0x3f -; xgr %r2, %r4 -; flogr %r2, %r2 +; srag %r3, %r2, 0x3f +; xgrk %r4, %r2, %r3 +; flogr %r2, %r4 ; aghi %r2, -1 ; br %r14 @@ -498,19 +498,19 @@ block0(v0: i32): ; VCode: ; block0: -; lgfr %r4, %r2 -; srag %r2, %r4, 63 -; xgr %r4, %r2 -; flogr %r2, %r4 +; lgfr %r2, %r2 +; srag %r4, %r2, 63 +; xgr %r2, %r4 +; flogr %r2, %r2 ; ahi %r2, -33 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgfr %r4, %r2 -; srag %r2, %r4, 0x3f -; xgr %r4, %r2 -; flogr %r2, %r4 +; lgfr %r2, %r2 +; srag %r4, %r2, 0x3f +; xgr %r2, %r4 +; flogr %r2, %r2 ; ahi %r2, -0x21 ; br %r14 @@ -522,19 +522,19 @@ block0(v0: i16): ; VCode: ; block0: -; lghr %r4, %r2 -; srag %r2, %r4, 63 -; xgr %r4, %r2 -; flogr %r2, %r4 +; lghr %r2, %r2 +; srag %r4, %r2, 63 +; xgr %r2, %r4 +; flogr %r2, %r2 ; ahi %r2, -49 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lghr %r4, %r2 -; srag %r2, %r4, 0x3f -; xgr %r4, %r2 -; flogr %r2, %r4 +; lghr %r2, %r2 +; srag %r4, %r2, 0x3f +; xgr %r2, %r4 +; flogr %r2, %r2 ; ahi %r2, -0x31 ; br %r14 @@ -546,19 +546,19 @@ block0(v0: i8): ; VCode: ; block0: -; lgbr %r4, %r2 -; srag %r2, %r4, 63 -; xgr %r4, %r2 -; flogr %r2, %r4 +; lgbr %r2, %r2 +; srag %r4, %r2, 63 +; xgr %r2, %r4 +; flogr %r2, %r2 ; ahi %r2, -57 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgbr %r4, %r2 -; srag %r2, %r4, 0x3f -; xgr %r4, %r2 -; flogr %r2, %r4 +; lgbr %r2, %r2 +; srag %r4, %r2, 0x3f +; xgr %r2, %r4 +; flogr %r2, %r2 ; ahi %r2, -0x39 ; br %r14 @@ -573,13 +573,13 @@ block0(v0: i128): ; vl %v1, 0(%r3) ; vctzg %v4, %v1 ; vgbm %v6, 0 -; vpdi %v16, %v6, %v4, 0 -; vpdi %v18, %v6, %v4, 1 -; vag %v20, %v16, %v18 -; vrepig %v22, 64 -; vceqg %v24, %v18, %v22 -; vsel %v26, %v20, %v18, %v24 -; vst %v26, 0(%r2) +; vpdi %v17, %v6, %v4, 0 +; vpdi %v16, %v6, %v4, 1 +; vag %v17, %v17, %v16 +; vrepig %v18, 64 +; vceqg %v18, %v16, %v18 +; vsel %v18, %v17, %v16, %v18 +; vst %v18, 0(%r2) ; br %r14 ; ; Disassembled: @@ -587,13 +587,13 @@ block0(v0: i128): ; vl %v1, 0(%r3) ; vctzg %v4, %v1 ; vzero %v6 -; vpdi %v16, %v6, %v4, 0 -; vpdi %v18, %v6, %v4, 1 -; vag %v20, %v16, %v18 -; vrepig %v22, 0x40 -; vceqg %v24, %v18, %v22 -; vsel %v26, %v20, %v18, %v24 -; vst %v26, 0(%r2) +; vpdi %v17, %v6, %v4, 0 +; vpdi %v16, %v6, %v4, 1 +; vag %v17, %v17, %v16 +; vrepig %v18, 0x40 +; vceqg %v18, %v16, %v18 +; vsel %v18, %v17, %v16, %v18 +; vst %v18, 0(%r2) ; br %r14 function %ctz_i64(i64) -> i64 { @@ -604,22 +604,22 @@ block0(v0: i64): ; VCode: ; block0: -; lcgr %r4, %r2 -; ngr %r2, %r4 -; flogr %r2, %r2 +; lcgr %r3, %r2 +; ngrk %r4, %r2, %r3 +; flogr %r2, %r4 ; locghie %r2, -1 -; lghi %r5, 63 -; sgrk %r2, %r5, %r2 +; lghi %r3, 63 +; sgrk %r2, %r3, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lcgr %r4, %r2 -; ngr %r2, %r4 -; flogr %r2, %r2 +; lcgr %r3, %r2 +; ngrk %r4, %r2, %r3 +; flogr %r2, %r4 ; locghie %r2, -1 -; lghi %r5, 0x3f -; sgrk %r2, %r5, %r2 +; lghi %r3, 0x3f +; sgrk %r2, %r3, %r2 ; br %r14 function %ctz_i32(i32) -> i32 { @@ -631,21 +631,21 @@ block0(v0: i32): ; VCode: ; block0: ; oihl %r2, 1 -; lcgr %r3, %r2 -; ngrk %r4, %r2, %r3 -; flogr %r2, %r4 -; lhi %r5, 63 -; srk %r2, %r5, %r2 +; lcgr %r4, %r2 +; ngr %r2, %r4 +; flogr %r2, %r2 +; lhi %r3, 63 +; srk %r2, %r3, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; oihl %r2, 1 -; lcgr %r3, %r2 -; ngrk %r4, %r2, %r3 -; flogr %r2, %r4 -; lhi %r5, 0x3f -; srk %r2, %r5, %r2 +; lcgr %r4, %r2 +; ngr %r2, %r4 +; flogr %r2, %r2 +; lhi %r3, 0x3f +; srk %r2, %r3, %r2 ; br %r14 function %ctz_i16(i16) -> i16 { @@ -657,21 +657,21 @@ block0(v0: i16): ; VCode: ; block0: ; oilh %r2, 1 -; lcgr %r3, %r2 -; ngrk %r4, %r2, %r3 -; flogr %r2, %r4 -; lhi %r5, 63 -; srk %r2, %r5, %r2 +; lcgr %r4, %r2 +; ngr %r2, %r4 +; flogr %r2, %r2 +; lhi %r3, 63 +; srk %r2, %r3, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; oilh %r2, 1 -; lcgr %r3, %r2 -; ngrk %r4, %r2, %r3 -; flogr %r2, %r4 -; lhi %r5, 0x3f -; srk %r2, %r5, %r2 +; lcgr %r4, %r2 +; ngr %r2, %r4 +; flogr %r2, %r2 +; lhi %r3, 0x3f +; srk %r2, %r3, %r2 ; br %r14 function %ctz_i8(i8) -> i8 { @@ -683,21 +683,21 @@ block0(v0: i8): ; VCode: ; block0: ; oill %r2, 256 -; lcgr %r3, %r2 -; ngrk %r4, %r2, %r3 -; flogr %r2, %r4 -; lhi %r5, 63 -; srk %r2, %r5, %r2 +; lcgr %r4, %r2 +; ngr %r2, %r4 +; flogr %r2, %r2 +; lhi %r3, 63 +; srk %r2, %r3, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; oill %r2, 0x100 -; lcgr %r3, %r2 -; ngrk %r4, %r2, %r3 -; flogr %r2, %r4 -; lhi %r5, 0x3f -; srk %r2, %r5, %r2 +; lcgr %r4, %r2 +; ngr %r2, %r4 +; flogr %r2, %r2 +; lhi %r3, 0x3f +; srk %r2, %r3, %r2 ; br %r14 function %popcnt_i128(i128) -> i128 { @@ -712,9 +712,9 @@ block0(v0: i128): ; vpopctg %v4, %v1 ; vgbm %v6, 0 ; vpdi %v16, %v6, %v4, 0 -; vpdi %v18, %v6, %v4, 1 -; vag %v20, %v16, %v18 -; vst %v20, 0(%r2) +; vpdi %v17, %v6, %v4, 1 +; vag %v16, %v16, %v17 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: @@ -723,9 +723,9 @@ block0(v0: i128): ; vpopctg %v4, %v1 ; vzero %v6 ; vpdi %v16, %v6, %v4, 0 -; vpdi %v18, %v6, %v4, 1 -; vag %v20, %v16, %v18 -; vst %v20, 0(%r2) +; vpdi %v17, %v6, %v4, 1 +; vag %v16, %v16, %v17 +; vst %v16, 0(%r2) ; br %r14 function %popcnt_i64(i64) -> i64 { @@ -736,26 +736,26 @@ block0(v0: i64): ; VCode: ; block0: -; popcnt %r4, %r2 -; sllg %r2, %r4, 32 -; agr %r4, %r2 -; sllg %r2, %r4, 16 -; agr %r4, %r2 -; sllg %r2, %r4, 8 -; agr %r4, %r2 -; srlg %r2, %r4, 56 +; popcnt %r2, %r2 +; sllg %r4, %r2, 32 +; agr %r2, %r4 +; sllg %r3, %r2, 16 +; agr %r2, %r3 +; sllg %r3, %r2, 8 +; agr %r2, %r3 +; srlg %r2, %r2, 56 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; popcnt %r4, %r2 -; sllg %r2, %r4, 0x20 -; agr %r4, %r2 -; sllg %r2, %r4, 0x10 -; agr %r4, %r2 -; sllg %r2, %r4, 8 -; agr %r4, %r2 -; srlg %r2, %r4, 0x38 +; popcnt %r2, %r2 +; sllg %r4, %r2, 0x20 +; agr %r2, %r4 +; sllg %r3, %r2, 0x10 +; agr %r2, %r3 +; sllg %r3, %r2, 8 +; agr %r2, %r3 +; srlg %r2, %r2, 0x38 ; br %r14 function %popcnt_i32(i32) -> i32 { @@ -766,22 +766,22 @@ block0(v0: i32): ; VCode: ; block0: -; popcnt %r4, %r2 -; sllk %r2, %r4, 16 -; ar %r4, %r2 -; sllk %r2, %r4, 8 -; ar %r4, %r2 -; srlk %r2, %r4, 24 +; popcnt %r2, %r2 +; sllk %r4, %r2, 16 +; ar %r2, %r4 +; sllk %r3, %r2, 8 +; ar %r2, %r3 +; srlk %r2, %r2, 24 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; popcnt %r4, %r2 -; sllk %r2, %r4, 0x10 -; ar %r4, %r2 -; sllk %r2, %r4, 8 -; ar %r4, %r2 -; srlk %r2, %r4, 0x18 +; popcnt %r2, %r2 +; sllk %r4, %r2, 0x10 +; ar %r2, %r4 +; sllk %r3, %r2, 8 +; ar %r2, %r3 +; srlk %r2, %r2, 0x18 ; br %r14 function %popcnt_i16(i16) -> i16 { @@ -792,17 +792,17 @@ block0(v0: i16): ; VCode: ; block0: -; popcnt %r4, %r2 -; srlk %r2, %r4, 8 -; ark %r2, %r4, %r2 +; popcnt %r2, %r2 +; srlk %r4, %r2, 8 +; ar %r2, %r4 ; nill %r2, 255 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; popcnt %r4, %r2 -; srlk %r2, %r4, 8 -; ark %r2, %r4, %r2 +; popcnt %r2, %r2 +; srlk %r4, %r2, 8 +; ar %r2, %r4 ; nill %r2, 0xff ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/bitwise-arch13.clif b/cranelift/filetests/filetests/isa/s390x/bitwise-arch13.clif index 3ddf146c696f..465c697f2ee8 100644 --- a/cranelift/filetests/filetests/isa/s390x/bitwise-arch13.clif +++ b/cranelift/filetests/filetests/isa/s390x/bitwise-arch13.clif @@ -286,17 +286,21 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; ngr %r3, %r2 -; ncgrk %r4, %r4, %r2 -; ogrk %r2, %r4, %r3 +; lgr %r5, %r4 +; ngrk %r4, %r3, %r2 +; lgr %r3, %r5 +; ncgrk %r2, %r3, %r2 +; ogr %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; ngr %r3, %r2 +; lgr %r5, %r4 +; ngrk %r4, %r3, %r2 +; lgr %r3, %r5 ; .byte 0xb9, 0xe5 -; lpdr %f4, %f4 -; ogrk %r2, %r4, %r3 +; lpdr %f2, %f3 +; ogr %r2, %r4 ; br %r14 function %bitselect_i32(i32, i32, i32) -> i32 { @@ -307,17 +311,21 @@ block0(v0: i32, v1: i32, v2: i32): ; VCode: ; block0: -; nr %r3, %r2 -; ncrk %r4, %r4, %r2 -; ork %r2, %r4, %r3 +; lgr %r5, %r4 +; nrk %r4, %r3, %r2 +; lgr %r3, %r5 +; ncrk %r2, %r3, %r2 +; or %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; nr %r3, %r2 +; lgr %r5, %r4 +; nrk %r4, %r3, %r2 +; lgr %r3, %r5 ; .byte 0xb9, 0xf5 -; lpdr %f4, %f4 -; ork %r2, %r4, %r3 +; lpdr %f2, %f3 +; or %r2, %r4 ; br %r14 function %bitselect_i16(i16, i16, i16) -> i16 { @@ -328,17 +336,21 @@ block0(v0: i16, v1: i16, v2: i16): ; VCode: ; block0: -; nr %r3, %r2 -; ncrk %r4, %r4, %r2 -; ork %r2, %r4, %r3 +; lgr %r5, %r4 +; nrk %r4, %r3, %r2 +; lgr %r3, %r5 +; ncrk %r2, %r3, %r2 +; or %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; nr %r3, %r2 +; lgr %r5, %r4 +; nrk %r4, %r3, %r2 +; lgr %r3, %r5 ; .byte 0xb9, 0xf5 -; lpdr %f4, %f4 -; ork %r2, %r4, %r3 +; lpdr %f2, %f3 +; or %r2, %r4 ; br %r14 function %bitselect_i8(i8, i8, i8) -> i8 { @@ -349,16 +361,20 @@ block0(v0: i8, v1: i8, v2: i8): ; VCode: ; block0: -; nr %r3, %r2 -; ncrk %r4, %r4, %r2 -; ork %r2, %r4, %r3 +; lgr %r5, %r4 +; nrk %r4, %r3, %r2 +; lgr %r3, %r5 +; ncrk %r2, %r3, %r2 +; or %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; nr %r3, %r2 +; lgr %r5, %r4 +; nrk %r4, %r3, %r2 +; lgr %r3, %r5 ; .byte 0xb9, 0xf5 -; lpdr %f4, %f4 -; ork %r2, %r4, %r3 +; lpdr %f2, %f3 +; or %r2, %r4 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/bitwise.clif b/cranelift/filetests/filetests/isa/s390x/bitwise.clif index c9eab079996d..aa617f16064e 100644 --- a/cranelift/filetests/filetests/isa/s390x/bitwise.clif +++ b/cranelift/filetests/filetests/isa/s390x/bitwise.clif @@ -134,14 +134,14 @@ block0(v0: i16, v1: i64): ; VCode: ; block0: -; llh %r3, 0(%r3) -; nr %r2, %r3 +; llh %r4, 0(%r3) +; nr %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llh %r3, 0(%r3) ; trap: heap_oob -; nr %r2, %r3 +; llh %r4, 0(%r3) ; trap: heap_oob +; nr %r2, %r4 ; br %r14 function %band_i8(i8, i8) -> i8 { @@ -169,14 +169,14 @@ block0(v0: i8, v1: i64): ; VCode: ; block0: -; llc %r3, 0(%r3) -; nr %r2, %r3 +; llc %r4, 0(%r3) +; nr %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llc %r3, 0(%r3) ; trap: heap_oob -; nr %r2, %r3 +; llc %r4, 0(%r3) ; trap: heap_oob +; nr %r2, %r4 ; br %r14 function %bor_i128(i128, i128) -> i128 { @@ -309,14 +309,14 @@ block0(v0: i16, v1: i64): ; VCode: ; block0: -; llh %r3, 0(%r3) -; or %r2, %r3 +; llh %r4, 0(%r3) +; or %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llh %r3, 0(%r3) ; trap: heap_oob -; or %r2, %r3 +; llh %r4, 0(%r3) ; trap: heap_oob +; or %r2, %r4 ; br %r14 function %bor_i8(i8, i8) -> i8 { @@ -344,14 +344,14 @@ block0(v0: i8, v1: i64): ; VCode: ; block0: -; llc %r3, 0(%r3) -; or %r2, %r3 +; llc %r4, 0(%r3) +; or %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llc %r3, 0(%r3) ; trap: heap_oob -; or %r2, %r3 +; llc %r4, 0(%r3) ; trap: heap_oob +; or %r2, %r4 ; br %r14 function %bxor_i128(i128, i128) -> i128 { @@ -484,14 +484,14 @@ block0(v0: i16, v1: i64): ; VCode: ; block0: -; llh %r3, 0(%r3) -; xr %r2, %r3 +; llh %r4, 0(%r3) +; xr %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llh %r3, 0(%r3) ; trap: heap_oob -; xr %r2, %r3 +; llh %r4, 0(%r3) ; trap: heap_oob +; xr %r2, %r4 ; br %r14 function %bxor_i8(i8, i8) -> i8 { @@ -519,14 +519,14 @@ block0(v0: i8, v1: i64): ; VCode: ; block0: -; llc %r3, 0(%r3) -; xr %r2, %r3 +; llc %r4, 0(%r3) +; xr %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llc %r3, 0(%r3) ; trap: heap_oob -; xr %r2, %r3 +; llc %r4, 0(%r3) ; trap: heap_oob +; xr %r2, %r4 ; br %r14 function %band_not_i128(i128, i128) -> i128 { @@ -935,20 +935,24 @@ block0(v0: i64, v1: i64, v2: i64): ; VCode: ; block0: -; ngr %r3, %r2 +; lgr %r5, %r4 +; ngrk %r4, %r3, %r2 ; xilf %r2, 4294967295 ; xihf %r2, 4294967295 -; ngr %r4, %r2 -; ogrk %r2, %r4, %r3 +; lgr %r3, %r5 +; ngrk %r2, %r3, %r2 +; ogr %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; ngr %r3, %r2 +; lgr %r5, %r4 +; ngrk %r4, %r3, %r2 ; xilf %r2, 0xffffffff ; xihf %r2, 0xffffffff -; ngr %r4, %r2 -; ogrk %r2, %r4, %r3 +; lgr %r3, %r5 +; ngrk %r2, %r3, %r2 +; ogr %r2, %r4 ; br %r14 function %bitselect_i32(i32, i32, i32) -> i32 { @@ -959,18 +963,22 @@ block0(v0: i32, v1: i32, v2: i32): ; VCode: ; block0: -; nr %r3, %r2 +; lgr %r5, %r4 +; nrk %r4, %r3, %r2 ; xilf %r2, 4294967295 -; nrk %r2, %r4, %r2 -; or %r2, %r3 +; lgr %r3, %r5 +; nrk %r2, %r3, %r2 +; or %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; nr %r3, %r2 +; lgr %r5, %r4 +; nrk %r4, %r3, %r2 ; xilf %r2, 0xffffffff -; nrk %r2, %r4, %r2 -; or %r2, %r3 +; lgr %r3, %r5 +; nrk %r2, %r3, %r2 +; or %r2, %r4 ; br %r14 function %bitselect_i16(i16, i16, i16) -> i16 { @@ -981,18 +989,22 @@ block0(v0: i16, v1: i16, v2: i16): ; VCode: ; block0: -; nr %r3, %r2 +; lgr %r5, %r4 +; nrk %r4, %r3, %r2 ; xilf %r2, 4294967295 -; nrk %r2, %r4, %r2 -; or %r2, %r3 +; lgr %r3, %r5 +; nrk %r2, %r3, %r2 +; or %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; nr %r3, %r2 +; lgr %r5, %r4 +; nrk %r4, %r3, %r2 ; xilf %r2, 0xffffffff -; nrk %r2, %r4, %r2 -; or %r2, %r3 +; lgr %r3, %r5 +; nrk %r2, %r3, %r2 +; or %r2, %r4 ; br %r14 function %bitselect_i8(i8, i8, i8) -> i8 { @@ -1003,18 +1015,22 @@ block0(v0: i8, v1: i8, v2: i8): ; VCode: ; block0: -; nr %r3, %r2 +; lgr %r5, %r4 +; nrk %r4, %r3, %r2 ; xilf %r2, 4294967295 -; nrk %r2, %r4, %r2 -; or %r2, %r3 +; lgr %r3, %r5 +; nrk %r2, %r3, %r2 +; or %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; nr %r3, %r2 +; lgr %r5, %r4 +; nrk %r4, %r3, %r2 ; xilf %r2, 0xffffffff -; nrk %r2, %r4, %r2 -; or %r2, %r3 +; lgr %r3, %r5 +; nrk %r2, %r3, %r2 +; or %r2, %r4 ; br %r14 function %bnot_of_bxor(i32, i32) -> i32 { diff --git a/cranelift/filetests/filetests/isa/s390x/bswap.clif b/cranelift/filetests/filetests/isa/s390x/bswap.clif index ac78af401377..91781949562f 100644 --- a/cranelift/filetests/filetests/isa/s390x/bswap.clif +++ b/cranelift/filetests/filetests/isa/s390x/bswap.clif @@ -41,13 +41,13 @@ block0(v0: i16): ; VCode: ; block0: -; lrvr %r4, %r2 -; srlk %r2, %r4, 16 +; lrvr %r2, %r2 +; srlk %r2, %r2, 16 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvr %r4, %r2 -; srlk %r2, %r4, 0x10 +; lrvr %r2, %r2 +; srlk %r2, %r2, 0x10 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/call-tail.clif b/cranelift/filetests/filetests/isa/s390x/call-tail.clif index 1ce224b65ce8..8761631b19c2 100644 --- a/cranelift/filetests/filetests/isa/s390x/call-tail.clif +++ b/cranelift/filetests/filetests/isa/s390x/call-tail.clif @@ -17,8 +17,8 @@ block0(v0: i64): ; stmg %r14, %r15, 112(%r15) ; aghi %r15, -160 ; block0: -; bras %r1, 12 ; data %g + 0 ; lg %r5, 0(%r1) -; basr %r14, %r5 +; bras %r1, 12 ; data %g + 0 ; lg %r3, 0(%r1) +; basr %r14, %r3 ; lmg %r14, %r15, 272(%r15) ; br %r14 ; @@ -32,8 +32,8 @@ block0(v0: i64): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r5, 0(%r1) -; basr %r14, %r5 +; lg %r3, 0(%r1) +; basr %r14, %r3 ; lmg %r14, %r15, 0x110(%r15) ; br %r14 @@ -111,71 +111,65 @@ block0(v0: i64, v1: i32, v2: i32, v3: i32, v4: i16, v5: i16, v6: i16, v7: i8, v8 ; VCode: ; stmg %r8, %r15, 256(%r15) -; aghi %r15, -8 ; block0: -; lgr %r14, %r7 -; stg %r2, 0(%r15) -; lg %r8, 168(%r15) -; llgc %r10, 183(%r15) -; lg %r7, 184(%r15) -; lg %r2, 192(%r15) -; llgfr %r11, %r3 -; llgfr %r9, %r4 -; llgfr %r12, %r5 -; llghr %r13, %r6 -; lgr %r3, %r14 -; llghr %r6, %r3 -; llghr %r14, %r8 -; llgcr %r5, %r10 -; llgcr %r7, %r7 -; llgcr %r4, %r2 -; lg %r2, 0(%r15) -; agr %r2, %r11 -; agrk %r3, %r9, %r12 -; agrk %r6, %r13, %r6 -; agrk %r5, %r14, %r5 -; agrk %r4, %r7, %r4 -; agrk %r7, %r2, %r3 -; agrk %r5, %r6, %r5 -; agr %r4, %r7 -; agrk %r2, %r5, %r4 -; aghi %r15, 200 -; lmg %r8, %r14, 64(%r15) +; lgr %r12, %r7 +; lg %r8, 160(%r15) +; llgc %r7, 175(%r15) +; lg %r11, 176(%r15) +; lg %r13, 184(%r15) +; llgfr %r3, %r3 +; llgfr %r4, %r4 +; llgfr %r9, %r5 +; llghr %r10, %r6 +; lgr %r5, %r12 +; llghr %r6, %r5 +; llghr %r8, %r8 +; llgcr %r5, %r7 +; llgcr %r7, %r11 +; llgcr %r11, %r13 +; agr %r2, %r3 +; agrk %r3, %r4, %r9 +; agrk %r4, %r10, %r6 +; agrk %r5, %r8, %r5 +; agrk %r6, %r7, %r11 +; agr %r2, %r3 +; agrk %r3, %r4, %r5 +; agrk %r2, %r6, %r2 +; agrk %r2, %r3, %r2 +; aghi %r15, 192 +; lmg %r8, %r13, 64(%r15) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; stmg %r8, %r15, 0x100(%r15) -; aghi %r15, -8 -; block1: ; offset 0xa -; lgr %r14, %r7 -; stg %r2, 0(%r15) -; lg %r8, 0xa8(%r15) -; llgc %r10, 0xb7(%r15) -; lg %r7, 0xb8(%r15) -; lg %r2, 0xc0(%r15) -; llgfr %r11, %r3 -; llgfr %r9, %r4 -; llgfr %r12, %r5 -; llghr %r13, %r6 -; lgr %r3, %r14 -; llghr %r6, %r3 -; llghr %r14, %r8 -; llgcr %r5, %r10 -; llgcr %r7, %r7 -; llgcr %r4, %r2 -; lg %r2, 0(%r15) -; agr %r2, %r11 -; agrk %r3, %r9, %r12 -; agrk %r6, %r13, %r6 -; agrk %r5, %r14, %r5 -; agrk %r4, %r7, %r4 -; agrk %r7, %r2, %r3 -; agrk %r5, %r6, %r5 -; agr %r4, %r7 -; agrk %r2, %r5, %r4 -; aghi %r15, 0xc8 -; lmg %r8, %r14, 0x40(%r15) +; block1: ; offset 0x6 +; lgr %r12, %r7 +; lg %r8, 0xa0(%r15) +; llgc %r7, 0xaf(%r15) +; lg %r11, 0xb0(%r15) +; lg %r13, 0xb8(%r15) +; llgfr %r3, %r3 +; llgfr %r4, %r4 +; llgfr %r9, %r5 +; llghr %r10, %r6 +; lgr %r5, %r12 +; llghr %r6, %r5 +; llghr %r8, %r8 +; llgcr %r5, %r7 +; llgcr %r7, %r11 +; llgcr %r11, %r13 +; agr %r2, %r3 +; agrk %r3, %r4, %r9 +; agrk %r4, %r10, %r6 +; agrk %r5, %r8, %r5 +; agrk %r6, %r7, %r11 +; agr %r2, %r3 +; agrk %r3, %r4, %r5 +; agrk %r2, %r6, %r2 +; agrk %r2, %r3, %r2 +; aghi %r15, 0xc0 +; lmg %r8, %r13, 0x40(%r15) ; br %r14 function %outgoing_args(i64) -> i64 tail { @@ -243,17 +237,17 @@ block0(v0: i64): ; stmg %r8, %r15, 64(%r15) ; aghi %r15, -192 ; block0: -; lgr %r10, %r2 -; brasl %r14, %f -; lgr %r13, %r2 -; brasl %r14, %f -; lgr %r9, %r2 +; lgr %r11, %r2 ; brasl %r14, %f ; lgr %r12, %r2 ; brasl %r14, %f ; lgr %r8, %r2 ; brasl %r14, %f -; lgr %r11, %r2 +; lgr %r9, %r2 +; brasl %r14, %f +; lgr %r10, %r2 +; brasl %r14, %f +; lgr %r13, %r2 ; brasl %r14, %f ; stg %r2, 184(%r15) ; brasl %r14, %f @@ -265,12 +259,12 @@ block0(v0: i64): ; stg %r2, 160(%r15) ; lg %r2, 352(%r15) ; stg %r2, 168(%r15) -; lgr %r2, %r10 -; lgr %r3, %r13 -; lgr %r4, %r9 -; lgr %r5, %r12 -; lgr %r6, %r8 -; lgr %r7, %r11 +; lgr %r2, %r11 +; lgr %r3, %r12 +; lgr %r4, %r8 +; lgr %r5, %r9 +; lgr %r6, %r10 +; lgr %r7, %r13 ; brasl %r14, %g ; callee_pop_size 176 ; lg %r2, 168(%r15) ; lmg %r8, %r15, 256(%r15) @@ -281,17 +275,17 @@ block0(v0: i64): ; stmg %r8, %r15, 0x40(%r15) ; aghi %r15, -0xc0 ; block1: ; offset 0xa -; lgr %r10, %r2 +; lgr %r11, %r2 ; brasl %r14, 0xe ; reloc_external PLTRel32Dbl %f 2 -; lgr %r13, %r2 +; lgr %r12, %r2 ; brasl %r14, 0x18 ; reloc_external PLTRel32Dbl %f 2 -; lgr %r9, %r2 +; lgr %r8, %r2 ; brasl %r14, 0x22 ; reloc_external PLTRel32Dbl %f 2 -; lgr %r12, %r2 +; lgr %r9, %r2 ; brasl %r14, 0x2c ; reloc_external PLTRel32Dbl %f 2 -; lgr %r8, %r2 +; lgr %r10, %r2 ; brasl %r14, 0x36 ; reloc_external PLTRel32Dbl %f 2 -; lgr %r11, %r2 +; lgr %r13, %r2 ; brasl %r14, 0x40 ; reloc_external PLTRel32Dbl %f 2 ; stg %r2, 0xb8(%r15) ; brasl %r14, 0x4c ; reloc_external PLTRel32Dbl %f 2 @@ -303,12 +297,12 @@ block0(v0: i64): ; stg %r2, 0xa0(%r15) ; lg %r2, 0x160(%r15) ; stg %r2, 0xa8(%r15) -; lgr %r2, %r10 -; lgr %r3, %r13 -; lgr %r4, %r9 -; lgr %r5, %r12 -; lgr %r6, %r8 -; lgr %r7, %r11 +; lgr %r2, %r11 +; lgr %r3, %r12 +; lgr %r4, %r8 +; lgr %r5, %r9 +; lgr %r6, %r10 +; lgr %r7, %r13 ; brasl %r14, 0x98 ; reloc_external PLTRel32Dbl %g 2 ; lg %r2, 0xa8(%r15) ; lmg %r8, %r15, 0x100(%r15) diff --git a/cranelift/filetests/filetests/isa/s390x/call.clif b/cranelift/filetests/filetests/isa/s390x/call.clif index 368966bd4c01..e202fe9c2939 100644 --- a/cranelift/filetests/filetests/isa/s390x/call.clif +++ b/cranelift/filetests/filetests/isa/s390x/call.clif @@ -18,8 +18,8 @@ block0(v0: i64): ; stmg %r14, %r15, 112(%r15) ; aghi %r15, -160 ; block0: -; bras %r1, 12 ; data %g + 0 ; lg %r5, 0(%r1) -; basr %r14, %r5 +; bras %r1, 12 ; data %g + 0 ; lg %r3, 0(%r1) +; basr %r14, %r3 ; lmg %r14, %r15, 272(%r15) ; br %r14 ; @@ -33,8 +33,8 @@ block0(v0: i64): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r5, 0(%r1) -; basr %r14, %r5 +; lg %r3, 0(%r1) +; basr %r14, %r3 ; lmg %r14, %r15, 0x110(%r15) ; br %r14 @@ -51,8 +51,8 @@ block0(v0: i32): ; aghi %r15, -160 ; block0: ; llgfr %r2, %r2 -; bras %r1, 12 ; data %g + 0 ; lg %r3, 0(%r1) -; basr %r14, %r3 +; bras %r1, 12 ; data %g + 0 ; lg %r5, 0(%r1) +; basr %r14, %r5 ; lmg %r14, %r15, 272(%r15) ; br %r14 ; @@ -67,8 +67,8 @@ block0(v0: i32): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r3, 0(%r1) -; basr %r14, %r3 +; lg %r5, 0(%r1) +; basr %r14, %r5 ; lmg %r14, %r15, 0x110(%r15) ; br %r14 @@ -100,8 +100,8 @@ block0(v0: i32): ; aghi %r15, -160 ; block0: ; lgfr %r2, %r2 -; bras %r1, 12 ; data %g + 0 ; lg %r3, 0(%r1) -; basr %r14, %r3 +; bras %r1, 12 ; data %g + 0 ; lg %r5, 0(%r1) +; basr %r14, %r5 ; lmg %r14, %r15, 272(%r15) ; br %r14 ; @@ -116,8 +116,8 @@ block0(v0: i32): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r3, 0(%r1) -; basr %r14, %r3 +; lg %r5, 0(%r1) +; basr %r14, %r5 ; lmg %r14, %r15, 0x110(%r15) ; br %r14 @@ -174,8 +174,8 @@ block0(v0: i32): ; aghi %r15, -160 ; block0: ; llgfr %r2, %r2 -; bras %r1, 12 ; data %g + 0 ; lg %r3, 0(%r1) -; basr %r14, %r3 +; bras %r1, 12 ; data %g + 0 ; lg %r5, 0(%r1) +; basr %r14, %r5 ; lmg %r14, %r15, 272(%r15) ; br %r14 ; @@ -190,8 +190,8 @@ block0(v0: i32): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r3, 0(%r1) -; basr %r14, %r3 +; lg %r5, 0(%r1) +; basr %r14, %r5 ; lmg %r14, %r15, 0x110(%r15) ; br %r14 @@ -245,29 +245,31 @@ block0(v0: i64, v1: i32, v2: i32, v3: i32, v4: i16, v5: i16, v6: i16, v7: i8, v8 ; VCode: ; stmg %r6, %r15, 48(%r15) ; block0: -; lg %r12, 160(%r15) -; lg %r14, 168(%r15) -; llgc %r7, 183(%r15) -; lg %r9, 184(%r15) -; lg %r11, 192(%r15) +; lgr %r9, %r6 +; lg %r6, 160(%r15) +; lg %r8, 168(%r15) +; llgc %r10, 183(%r15) +; lg %r12, 184(%r15) +; lg %r14, 192(%r15) ; llgfr %r3, %r3 ; llgfr %r4, %r4 -; llgfr %r13, %r5 -; llghr %r6, %r6 -; llghr %r5, %r12 -; llghr %r12, %r14 -; llgcr %r14, %r7 -; llgcr %r7, %r9 -; llgcr %r8, %r11 -; agrk %r3, %r2, %r3 -; agr %r4, %r13 -; agrk %r5, %r6, %r5 -; agrk %r2, %r12, %r14 -; agrk %r12, %r7, %r8 -; agr %r3, %r4 -; agrk %r4, %r5, %r2 -; agrk %r3, %r12, %r3 -; agrk %r2, %r4, %r3 +; llgfr %r7, %r5 +; lgr %r5, %r9 +; llghr %r9, %r5 +; llghr %r5, %r6 +; llghr %r6, %r8 +; llgcr %r8, %r10 +; llgcr %r10, %r12 +; llgcr %r11, %r14 +; agr %r2, %r3 +; agrk %r3, %r4, %r7 +; agrk %r4, %r9, %r5 +; agrk %r5, %r6, %r8 +; agrk %r6, %r10, %r11 +; agr %r2, %r3 +; agrk %r3, %r4, %r5 +; agrk %r2, %r6, %r2 +; agrk %r2, %r3, %r2 ; lmg %r6, %r15, 48(%r15) ; br %r14 ; @@ -275,29 +277,31 @@ block0(v0: i64, v1: i32, v2: i32, v3: i32, v4: i16, v5: i16, v6: i16, v7: i8, v8 ; block0: ; offset 0x0 ; stmg %r6, %r15, 0x30(%r15) ; block1: ; offset 0x6 -; lg %r12, 0xa0(%r15) -; lg %r14, 0xa8(%r15) -; llgc %r7, 0xb7(%r15) -; lg %r9, 0xb8(%r15) -; lg %r11, 0xc0(%r15) +; lgr %r9, %r6 +; lg %r6, 0xa0(%r15) +; lg %r8, 0xa8(%r15) +; llgc %r10, 0xb7(%r15) +; lg %r12, 0xb8(%r15) +; lg %r14, 0xc0(%r15) ; llgfr %r3, %r3 ; llgfr %r4, %r4 -; llgfr %r13, %r5 -; llghr %r6, %r6 -; llghr %r5, %r12 -; llghr %r12, %r14 -; llgcr %r14, %r7 -; llgcr %r7, %r9 -; llgcr %r8, %r11 -; agrk %r3, %r2, %r3 -; agr %r4, %r13 -; agrk %r5, %r6, %r5 -; agrk %r2, %r12, %r14 -; agrk %r12, %r7, %r8 -; agr %r3, %r4 -; agrk %r4, %r5, %r2 -; agrk %r3, %r12, %r3 -; agrk %r2, %r4, %r3 +; llgfr %r7, %r5 +; lgr %r5, %r9 +; llghr %r9, %r5 +; llghr %r5, %r6 +; llghr %r6, %r8 +; llgcr %r8, %r10 +; llgcr %r10, %r12 +; llgcr %r11, %r14 +; agr %r2, %r3 +; agrk %r3, %r4, %r7 +; agrk %r4, %r9, %r5 +; agrk %r5, %r6, %r8 +; agrk %r6, %r10, %r11 +; agr %r2, %r3 +; agrk %r3, %r4, %r5 +; agrk %r2, %r6, %r2 +; agrk %r2, %r3, %r2 ; lmg %r6, %r15, 0x30(%r15) ; br %r14 @@ -321,21 +325,21 @@ block0(v0: i128, v1: i128, v2: i128, v3: i128, v4: i128, v5: i128, v6: i128, v7: ; vl %v5, 0(%r5) ; vl %v7, 0(%r6) ; lg %r3, 160(%r15) -; vl %v18, 0(%r3) +; vl %v16, 0(%r3) ; lg %r3, 168(%r15) -; vl %v21, 0(%r3) -; lg %r5, 176(%r15) -; vl %v24, 0(%r5) -; lg %r4, 184(%r15) -; vl %v27, 0(%r4) -; vaq %v4, %v1, %v3 -; vaq %v5, %v5, %v7 -; vaq %v6, %v18, %v21 -; vaq %v7, %v24, %v27 -; vaq %v4, %v4, %v5 -; vaq %v5, %v6, %v7 -; vaq %v4, %v4, %v5 -; vst %v4, 0(%r2) +; vl %v17, 0(%r3) +; lg %r3, 176(%r15) +; vl %v18, 0(%r3) +; lg %r3, 184(%r15) +; vl %v19, 0(%r3) +; vaq %v28, %v1, %v3 +; vaq %v29, %v5, %v7 +; vaq %v30, %v16, %v17 +; vaq %v31, %v18, %v19 +; vaq %v28, %v28, %v29 +; vaq %v29, %v30, %v31 +; vaq %v28, %v28, %v29 +; vst %v28, 0(%r2) ; lmg %r6, %r15, 48(%r15) ; br %r14 ; @@ -348,21 +352,21 @@ block0(v0: i128, v1: i128, v2: i128, v3: i128, v4: i128, v5: i128, v6: i128, v7: ; vl %v5, 0(%r5) ; vl %v7, 0(%r6) ; lg %r3, 0xa0(%r15) -; vl %v18, 0(%r3) +; vl %v16, 0(%r3) ; lg %r3, 0xa8(%r15) -; vl %v21, 0(%r3) -; lg %r5, 0xb0(%r15) -; vl %v24, 0(%r5) -; lg %r4, 0xb8(%r15) -; vl %v27, 0(%r4) -; vaq %v4, %v1, %v3 -; vaq %v5, %v5, %v7 -; vaq %v6, %v18, %v21 -; vaq %v7, %v24, %v27 -; vaq %v4, %v4, %v5 -; vaq %v5, %v6, %v7 -; vaq %v4, %v4, %v5 -; vst %v4, 0(%r2) +; vl %v17, 0(%r3) +; lg %r3, 0xb0(%r15) +; vl %v18, 0(%r3) +; lg %r3, 0xb8(%r15) +; vl %v19, 0(%r3) +; vaq %v28, %v1, %v3 +; vaq %v29, %v5, %v7 +; vaq %v30, %v16, %v17 +; vaq %v31, %v18, %v19 +; vaq %v28, %v28, %v29 +; vaq %v29, %v30, %v31 +; vaq %v28, %v28, %v29 +; vst %v28, 0(%r2) ; lmg %r6, %r15, 0x30(%r15) ; br %r14 @@ -433,28 +437,28 @@ block0(v0: f128): } ; VCode: -; stmg %r6, %r15, 48(%r15) +; stmg %r8, %r15, 64(%r15) ; aghi %r15, -208 ; block0: -; lgr %r6, %r2 +; lgr %r8, %r2 ; vl %v1, 0(%r3) ; la %r3, 160(%r15) ; vst %v1, 0(%r3) ; la %r2, 176(%r15) ; bras %r1, 12 ; data %g + 0 ; lg %r4, 0(%r1) ; basr %r14, %r4 ; vl %v1, 176(%r15) ; vst %v1, 192(%r15) -; lgr %r2, %r6 +; lgr %r2, %r8 ; vl %v16, 192(%r15) ; vst %v16, 0(%r2) -; lmg %r6, %r15, 256(%r15) +; lmg %r8, %r15, 272(%r15) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; stmg %r6, %r15, 0x30(%r15) +; stmg %r8, %r15, 0x40(%r15) ; aghi %r15, -0xd0 ; block1: ; offset 0xa -; lgr %r6, %r2 +; lgr %r8, %r2 ; vl %v1, 0(%r3) ; la %r3, 0xa0(%r15) ; vst %v1, 0(%r3) @@ -468,9 +472,9 @@ block0(v0: f128): ; basr %r14, %r4 ; vl %v1, 0xb0(%r15) ; vst %v1, 0xc0(%r15) -; lgr %r2, %r6 +; lgr %r2, %r8 ; vl %v16, 0xc0(%r15) ; vst %v16, 0(%r2) -; lmg %r6, %r15, 0x100(%r15) +; lmg %r8, %r15, 0x110(%r15) ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/condops.clif b/cranelift/filetests/filetests/isa/s390x/condops.clif index 0b902516598d..b49422cff3ca 100644 --- a/cranelift/filetests/filetests/isa/s390x/condops.clif +++ b/cranelift/filetests/filetests/isa/s390x/condops.clif @@ -11,16 +11,16 @@ block0(v0: i8, v1: i64, v2: i64): ; VCode: ; block0: -; llcr %r2, %r2 -; clfi %r2, 42 +; llcr %r5, %r2 +; clfi %r5, 42 ; lgr %r2, %r4 ; locgre %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llcr %r2, %r2 -; clfi %r2, 0x2a +; llcr %r5, %r2 +; clfi %r5, 0x2a ; lgr %r2, %r4 ; locgre %r2, %r3 ; br %r14 @@ -33,16 +33,16 @@ block0(v0: i8, v1: i8, v2: i8): ; VCode: ; block0: -; lbr %r2, %r2 -; chi %r2, 0 +; lbr %r5, %r2 +; chi %r5, 0 ; lgr %r2, %r4 ; locrlh %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r2, %r2 -; chi %r2, 0 +; lbr %r5, %r2 +; chi %r5, 0 ; lgr %r2, %r4 ; locrlh %r2, %r3 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/conversions.clif b/cranelift/filetests/filetests/isa/s390x/conversions.clif index 883dbd348325..473085cfee9a 100644 --- a/cranelift/filetests/filetests/isa/s390x/conversions.clif +++ b/cranelift/filetests/filetests/isa/s390x/conversions.clif @@ -186,15 +186,15 @@ block0(v0: i64): ; VCode: ; block0: -; srag %r5, %r3, 63 -; vlvgp %v5, %r5, %r3 +; srag %r4, %r3, 63 +; vlvgp %v5, %r4, %r3 ; vst %v5, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; srag %r5, %r3, 0x3f -; vlvgp %v5, %r5, %r3 +; srag %r4, %r3, 0x3f +; vlvgp %v5, %r4, %r3 ; vst %v5, 0(%r2) ; br %r14 @@ -206,17 +206,17 @@ block0(v0: i32): ; VCode: ; block0: -; lgfr %r5, %r3 -; srag %r3, %r5, 63 -; vlvgp %v7, %r3, %r5 +; lgfr %r3, %r3 +; srag %r5, %r3, 63 +; vlvgp %v7, %r5, %r3 ; vst %v7, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgfr %r5, %r3 -; srag %r3, %r5, 0x3f -; vlvgp %v7, %r3, %r5 +; lgfr %r3, %r3 +; srag %r5, %r3, 0x3f +; vlvgp %v7, %r5, %r3 ; vst %v7, 0(%r2) ; br %r14 @@ -244,17 +244,17 @@ block0(v0: i16): ; VCode: ; block0: -; lghr %r5, %r3 -; srag %r3, %r5, 63 -; vlvgp %v7, %r3, %r5 +; lghr %r3, %r3 +; srag %r5, %r3, 63 +; vlvgp %v7, %r5, %r3 ; vst %v7, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lghr %r5, %r3 -; srag %r3, %r5, 0x3f -; vlvgp %v7, %r3, %r5 +; lghr %r3, %r3 +; srag %r5, %r3, 0x3f +; vlvgp %v7, %r5, %r3 ; vst %v7, 0(%r2) ; br %r14 @@ -298,17 +298,17 @@ block0(v0: i8): ; VCode: ; block0: -; lgbr %r5, %r3 -; srag %r3, %r5, 63 -; vlvgp %v7, %r3, %r5 +; lgbr %r3, %r3 +; srag %r5, %r3, 63 +; vlvgp %v7, %r5, %r3 ; vst %v7, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgbr %r5, %r3 -; srag %r3, %r5, 0x3f -; vlvgp %v7, %r3, %r5 +; lgbr %r3, %r3 +; srag %r5, %r3, 0x3f +; vlvgp %v7, %r5, %r3 ; vst %v7, 0(%r2) ; br %r14 @@ -541,8 +541,8 @@ block0(v0: i128): ; vceqgs %v6, %v1, %v4 ; lghi %r3, 0 ; locghine %r3, -1 -; vlvgp %v20, %r3, %r3 -; vst %v20, 0(%r2) +; vlvgp %v16, %r3, %r3 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: @@ -552,8 +552,8 @@ block0(v0: i128): ; vceqgs %v6, %v1, %v4 ; lghi %r3, 0 ; locghine %r3, -1 -; vlvgp %v20, %r3, %r3 -; vst %v20, 0(%r2) +; vlvgp %v16, %r3, %r3 +; vst %v16, 0(%r2) ; br %r14 function %bmask_i128_i64(i128) -> i64 { @@ -661,18 +661,18 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: ; cghi %r4, 0 -; lghi %r3, 0 -; locghilh %r3, -1 -; vlvgp %v16, %r3, %r3 +; lghi %r5, 0 +; locghilh %r5, -1 +; vlvgp %v16, %r5, %r5 ; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; cghi %r4, 0 -; lghi %r3, 0 -; locghilh %r3, -1 -; vlvgp %v16, %r3, %r3 +; lghi %r5, 0 +; locghilh %r5, -1 +; vlvgp %v16, %r5, %r5 ; vst %v16, 0(%r2) ; br %r14 @@ -765,18 +765,18 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: ; chi %r4, 0 -; lghi %r3, 0 -; locghilh %r3, -1 -; vlvgp %v16, %r3, %r3 +; lghi %r5, 0 +; locghilh %r5, -1 +; vlvgp %v16, %r5, %r5 ; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; chi %r4, 0 -; lghi %r3, 0 -; locghilh %r3, -1 -; vlvgp %v16, %r3, %r3 +; lghi %r5, 0 +; locghilh %r5, -1 +; vlvgp %v16, %r5, %r5 ; vst %v16, 0(%r2) ; br %r14 @@ -868,22 +868,22 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; lhr %r5, %r4 -; chi %r5, 0 -; lghi %r5, 0 -; locghilh %r5, -1 -; vlvgp %v18, %r5, %r5 -; vst %v18, 0(%r2) +; lhr %r3, %r4 +; chi %r3, 0 +; lghi %r3, 0 +; locghilh %r3, -1 +; vlvgp %v16, %r3, %r3 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhr %r5, %r4 -; chi %r5, 0 -; lghi %r5, 0 -; locghilh %r5, -1 -; vlvgp %v18, %r5, %r5 -; vst %v18, 0(%r2) +; lhr %r3, %r4 +; chi %r3, 0 +; lghi %r3, 0 +; locghilh %r3, -1 +; vlvgp %v16, %r3, %r3 +; vst %v16, 0(%r2) ; br %r14 function %bmask_i16_i64(i16, i16) -> i64 { @@ -894,16 +894,16 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; lhr %r4, %r3 -; chi %r4, 0 +; lhr %r2, %r3 +; chi %r2, 0 ; lghi %r2, 0 ; locghilh %r2, -1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhr %r4, %r3 -; chi %r4, 0 +; lhr %r2, %r3 +; chi %r2, 0 ; lghi %r2, 0 ; locghilh %r2, -1 ; br %r14 @@ -916,16 +916,16 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; lhr %r4, %r3 -; chi %r4, 0 +; lhr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhr %r4, %r3 -; chi %r4, 0 +; lhr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 @@ -938,16 +938,16 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; lhr %r4, %r3 -; chi %r4, 0 +; lhr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhr %r4, %r3 -; chi %r4, 0 +; lhr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 @@ -960,16 +960,16 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; lhr %r4, %r3 -; chi %r4, 0 +; lhr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhr %r4, %r3 -; chi %r4, 0 +; lhr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 @@ -982,22 +982,22 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lbr %r5, %r4 -; chi %r5, 0 -; lghi %r5, 0 -; locghilh %r5, -1 -; vlvgp %v18, %r5, %r5 -; vst %v18, 0(%r2) +; lbr %r3, %r4 +; chi %r3, 0 +; lghi %r3, 0 +; locghilh %r3, -1 +; vlvgp %v16, %r3, %r3 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r5, %r4 -; chi %r5, 0 -; lghi %r5, 0 -; locghilh %r5, -1 -; vlvgp %v18, %r5, %r5 -; vst %v18, 0(%r2) +; lbr %r3, %r4 +; chi %r3, 0 +; lghi %r3, 0 +; locghilh %r3, -1 +; vlvgp %v16, %r3, %r3 +; vst %v16, 0(%r2) ; br %r14 function %bmask_i8_i64(i8, i8) -> i64 { @@ -1008,16 +1008,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lghi %r2, 0 ; locghilh %r2, -1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lghi %r2, 0 ; locghilh %r2, -1 ; br %r14 @@ -1030,16 +1030,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 @@ -1052,16 +1052,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 @@ -1074,16 +1074,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 @@ -1096,22 +1096,22 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lbr %r5, %r4 -; chi %r5, 0 -; lghi %r5, 0 -; locghilh %r5, -1 -; vlvgp %v18, %r5, %r5 -; vst %v18, 0(%r2) +; lbr %r3, %r4 +; chi %r3, 0 +; lghi %r3, 0 +; locghilh %r3, -1 +; vlvgp %v16, %r3, %r3 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r5, %r4 -; chi %r5, 0 -; lghi %r5, 0 -; locghilh %r5, -1 -; vlvgp %v18, %r5, %r5 -; vst %v18, 0(%r2) +; lbr %r3, %r4 +; chi %r3, 0 +; lghi %r3, 0 +; locghilh %r3, -1 +; vlvgp %v16, %r3, %r3 +; vst %v16, 0(%r2) ; br %r14 function %bmask_i8_i64(i8, i8) -> i64 { @@ -1122,16 +1122,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lghi %r2, 0 ; locghilh %r2, -1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lghi %r2, 0 ; locghilh %r2, -1 ; br %r14 @@ -1144,16 +1144,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 @@ -1166,16 +1166,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 @@ -1188,16 +1188,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r4, %r3 -; chi %r4, 0 +; lbr %r2, %r3 +; chi %r2, 0 ; lhi %r2, 0 ; lochilh %r2, -1 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/exceptions.clif b/cranelift/filetests/filetests/isa/s390x/exceptions.clif index 17cc9e96b5af..ac6587186422 100644 --- a/cranelift/filetests/filetests/isa/s390x/exceptions.clif +++ b/cranelift/filetests/filetests/isa/s390x/exceptions.clif @@ -151,8 +151,8 @@ function %f2(i32) -> i32, f32, f64 { ; block0: ; larl %r1, [const(1)] ; ld %f2, 0(%r1) ; vst %v2, 160(%r15) -; bras %r1, 12 ; data %g + 0 ; lg %r5, 0(%r1) -; basr %r14, %r5; jg MachLabel(1); catch [default: MachLabel(2)] +; bras %r1, 12 ; data %g + 0 ; lg %r3, 0(%r1) +; basr %r14, %r3; jg MachLabel(1); catch [default: MachLabel(2)] ; block1: ; lhi %r2, 1 ; vl %v2, 160(%r15) @@ -202,8 +202,8 @@ function %f2(i32) -> i32, f32, f64 { ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r5, 0(%r1) -; basr %r14, %r5 +; lg %r3, 0(%r1) +; basr %r14, %r3 ; block2: ; offset 0x4e ; lhi %r2, 1 ; vl %v2, 0xa0(%r15) @@ -430,8 +430,8 @@ function %f5() -> i64 { ; stg %r2, 160(%r15) ; jg label1 ; block1: -; bras %r1, 12 ; data %g + 0 ; lg %r2, 0(%r1) -; basr %r14, %r2; jg MachLabel(2); catch [default: MachLabel(3)] +; bras %r1, 12 ; data %g + 0 ; lg %r4, 0(%r1) +; basr %r14, %r4; jg MachLabel(2); catch [default: MachLabel(3)] ; block2: ; lg %r2, 160(%r15) ; ld %f8, 168(%r15) @@ -478,8 +478,8 @@ function %f5() -> i64 { ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r2, 0(%r1) -; basr %r14, %r2 +; lg %r4, 0(%r1) +; basr %r14, %r4 ; block3: ; offset 0x4a ; lg %r2, 0xa0(%r15) ; ld %f8, 0xa8(%r15) diff --git a/cranelift/filetests/filetests/isa/s390x/floating-point-arch13.clif b/cranelift/filetests/filetests/isa/s390x/floating-point-arch13.clif index e17324120347..49603dc43979 100644 --- a/cranelift/filetests/filetests/isa/s390x/floating-point-arch13.clif +++ b/cranelift/filetests/filetests/isa/s390x/floating-point-arch13.clif @@ -17,8 +17,8 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wclfeb %v20, %f0, 0, 5 -; vlgvf %r2, %v20, 0 +; wclfeb %v16, %f0, 0, 5 +; vlgvf %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -33,8 +33,8 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; vclgd %v20, %v0, 2, 8, 5 -; vlgvf %r2, %v20, 0 +; vclgd %v16, %v0, 2, 8, 5 +; vlgvf %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -61,8 +61,8 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wcfeb %v20, %f0, 0, 5 -; vlgvf %r2, %v20, 0 +; wcfeb %v16, %f0, 0, 5 +; vlgvf %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -77,8 +77,8 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; vcgd %v20, %v0, 2, 8, 5 -; vlgvf %r2, %v20, 0 +; vcgd %v16, %v0, 2, 8, 5 +; vlgvf %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -106,8 +106,8 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wclfeb %v20, %f0, 0, 5 -; vlgvf %r2, %v20, 0 +; wclfeb %v16, %f0, 0, 5 +; vlgvf %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -122,8 +122,8 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; vclgd %v20, %v0, 2, 8, 5 -; vlgvf %r2, %v20, 0 +; vclgd %v16, %v0, 2, 8, 5 +; vlgvf %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -150,8 +150,8 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wcfeb %v20, %f0, 0, 5 -; vlgvf %r2, %v20, 0 +; wcfeb %v16, %f0, 0, 5 +; vlgvf %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -166,8 +166,8 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; vcgd %v20, %v0, 2, 8, 5 -; vlgvf %r2, %v20, 0 +; vcgd %v16, %v0, 2, 8, 5 +; vlgvf %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -193,8 +193,8 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wclfeb %v20, %f0, 0, 5 -; vlgvf %r2, %v20, 0 +; wclfeb %v16, %f0, 0, 5 +; vlgvf %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -209,8 +209,8 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; vclgd %v20, %v0, 2, 8, 5 -; vlgvf %r2, %v20, 0 +; vclgd %v16, %v0, 2, 8, 5 +; vlgvf %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -237,8 +237,8 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wcfeb %v20, %f0, 0, 5 -; vlgvf %r2, %v20, 0 +; wcfeb %v16, %f0, 0, 5 +; vlgvf %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -253,8 +253,8 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; vcgd %v20, %v0, 2, 8, 5 -; vlgvf %r2, %v20, 0 +; vcgd %v16, %v0, 2, 8, 5 +; vlgvf %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -282,9 +282,9 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wldeb %v20, %f0 -; wclgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wclgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -299,9 +299,9 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wldeb %v20, %f0 -; wclgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wclgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -329,9 +329,9 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wldeb %v20, %f0 -; wcgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wcgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -346,9 +346,9 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wldeb %v20, %f0 -; wcgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wcgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -375,8 +375,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -391,8 +391,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -419,8 +419,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -435,8 +435,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -462,8 +462,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -478,8 +478,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -506,8 +506,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -522,8 +522,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -549,8 +549,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -565,8 +565,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -593,8 +593,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -609,8 +609,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -638,8 +638,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -654,8 +654,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -682,8 +682,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -698,8 +698,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -719,15 +719,15 @@ block0(v0: i8): ; VCode: ; block0: -; llcr %r4, %r2 -; vlvgf %v4, %r4, 0 +; llcr %r2, %r2 +; vlvgf %v4, %r2, 0 ; wcelfb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llcr %r4, %r2 -; vlvgf %v4, %r4, 0 +; llcr %r2, %r2 +; vlvgf %v4, %r2, 0 ; vcdlg %v0, %v4, 2, 8, 4 ; br %r14 @@ -739,15 +739,15 @@ block0(v0: i8): ; VCode: ; block0: -; lbr %r4, %r2 -; vlvgf %v4, %r4, 0 +; lbr %r2, %r2 +; vlvgf %v4, %r2, 0 ; wcefb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r4, %r2 -; vlvgf %v4, %r4, 0 +; lbr %r2, %r2 +; vlvgf %v4, %r2, 0 ; vcdg %v0, %v4, 2, 8, 4 ; br %r14 @@ -759,15 +759,15 @@ block0(v0: i16): ; VCode: ; block0: -; llhr %r4, %r2 -; vlvgf %v4, %r4, 0 +; llhr %r2, %r2 +; vlvgf %v4, %r2, 0 ; wcelfb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llhr %r4, %r2 -; vlvgf %v4, %r4, 0 +; llhr %r2, %r2 +; vlvgf %v4, %r2, 0 ; vcdlg %v0, %v4, 2, 8, 4 ; br %r14 @@ -779,15 +779,15 @@ block0(v0: i16): ; VCode: ; block0: -; lhr %r4, %r2 -; vlvgf %v4, %r4, 0 +; lhr %r2, %r2 +; vlvgf %v4, %r2, 0 ; wcefb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhr %r4, %r2 -; vlvgf %v4, %r4, 0 +; lhr %r2, %r2 +; vlvgf %v4, %r2, 0 ; vcdg %v0, %v4, 2, 8, 4 ; br %r14 @@ -875,15 +875,15 @@ block0(v0: i8): ; VCode: ; block0: -; llgcr %r4, %r2 -; ldgr %f4, %r4 +; llgcr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llgcr %r4, %r2 -; ldgr %f4, %r4 +; llgcr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f0, %f4, 0, 4 ; br %r14 @@ -895,15 +895,15 @@ block0(v0: i8): ; VCode: ; block0: -; lgbr %r4, %r2 -; ldgr %f4, %r4 +; lgbr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgbr %r4, %r2 -; ldgr %f4, %r4 +; lgbr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f0, %f4, 0, 4 ; br %r14 @@ -915,15 +915,15 @@ block0(v0: i16): ; VCode: ; block0: -; llghr %r4, %r2 -; ldgr %f4, %r4 +; llghr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llghr %r4, %r2 -; ldgr %f4, %r4 +; llghr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f0, %f4, 0, 4 ; br %r14 @@ -935,15 +935,15 @@ block0(v0: i16): ; VCode: ; block0: -; lghr %r4, %r2 -; ldgr %f4, %r4 +; lghr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lghr %r4, %r2 -; ldgr %f4, %r4 +; lghr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f0, %f4, 0, 4 ; br %r14 @@ -955,15 +955,15 @@ block0(v0: i32): ; VCode: ; block0: -; llgfr %r4, %r2 -; ldgr %f4, %r4 +; llgfr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llgfr %r4, %r2 -; ldgr %f4, %r4 +; llgfr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f0, %f4, 0, 4 ; br %r14 @@ -975,15 +975,15 @@ block0(v0: i32): ; VCode: ; block0: -; lgfr %r4, %r2 -; ldgr %f4, %r4 +; lgfr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgfr %r4, %r2 -; ldgr %f4, %r4 +; lgfr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f0, %f4, 0, 4 ; br %r14 @@ -1325,18 +1325,18 @@ block0(v0: f64): ; block0: ; wclgdb %f2, %f0, 0, 5 ; lgdr %r2, %f2 -; llilf %r4, 4294967295 -; clgr %r2, %r4 -; locgrh %r2, %r4 +; llilf %r3, 4294967295 +; clgr %r2, %r3 +; locgrh %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; wclgdb %f2, %f0, 0, 5 ; lgdr %r2, %f2 -; llilf %r4, 0xffffffff -; clgr %r2, %r4 -; locgrh %r2, %r4 +; llilf %r3, 0xffffffff +; clgr %r2, %r3 +; locgrh %r2, %r3 ; br %r14 function %fcvt_to_sint_sat_f64_i32(f64) -> i32 { @@ -1354,9 +1354,9 @@ block0(v0: f64): ; lgfi %r3, 2147483647 ; cgr %r2, %r3 ; locgrh %r2, %r3 -; lgfi %r4, -2147483648 -; cgr %r2, %r4 -; locgrl %r2, %r4 +; lgfi %r3, -2147483648 +; cgr %r2, %r3 +; locgrl %r2, %r3 ; br %r14 ; ; Disassembled: @@ -1368,9 +1368,9 @@ block0(v0: f64): ; lgfi %r3, 0x7fffffff ; cgr %r2, %r3 ; locgrh %r2, %r3 -; lgfi %r4, -0x80000000 -; cgr %r2, %r4 -; locgrl %r2, %r4 +; lgfi %r3, -0x80000000 +; cgr %r2, %r3 +; locgrl %r2, %r3 ; br %r14 function %fcvt_to_uint_sat_f64_i64(f64) -> i64 { diff --git a/cranelift/filetests/filetests/isa/s390x/floating-point.clif b/cranelift/filetests/filetests/isa/s390x/floating-point.clif index c9fc1065671e..b68041152a26 100644 --- a/cranelift/filetests/filetests/isa/s390x/floating-point.clif +++ b/cranelift/filetests/filetests/isa/s390x/floating-point.clif @@ -1253,9 +1253,9 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wldeb %v20, %f0 -; wclgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wclgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1270,9 +1270,9 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wldeb %v20, %f0 -; wclgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wclgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1300,9 +1300,9 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wldeb %v20, %f0 -; wcgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wcgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1317,9 +1317,9 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wldeb %v20, %f0 -; wcgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wcgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1348,9 +1348,9 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wldeb %v20, %f0 -; wclgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wclgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1365,9 +1365,9 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wldeb %v20, %f0 -; wclgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wclgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1395,9 +1395,9 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wldeb %v20, %f0 -; wcgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wcgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1412,9 +1412,9 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wldeb %v20, %f0 -; wcgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wcgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1441,9 +1441,9 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wldeb %v20, %f0 -; wclgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wclgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1458,9 +1458,9 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wldeb %v20, %f0 -; wclgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wclgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1488,9 +1488,9 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wldeb %v20, %f0 -; wcgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wcgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1505,9 +1505,9 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wldeb %v20, %f0 -; wcgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wcgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1536,9 +1536,9 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wldeb %v20, %f0 -; wclgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wclgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1553,9 +1553,9 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wldeb %v20, %f0 -; wclgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wclgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1583,9 +1583,9 @@ block0(v0: f32): ; larl %r1, [const(1)] ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wldeb %v20, %f0 -; wcgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wcgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1600,9 +1600,9 @@ block0(v0: f32): ; vlef %v16, 0(%r1), 0 ; wfcsb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wldeb %v20, %f0 -; wcgdb %v22, %v20, 0, 5 -; vlgvg %r2, %v22, 0 +; wldeb %v16, %f0 +; wcgdb %v16, %v16, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1629,8 +1629,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1645,8 +1645,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1673,8 +1673,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1689,8 +1689,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1716,8 +1716,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1732,8 +1732,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1760,8 +1760,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1776,8 +1776,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1803,8 +1803,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1819,8 +1819,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1847,8 +1847,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1863,8 +1863,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1892,8 +1892,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1908,8 +1908,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wclgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wclgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1936,8 +1936,8 @@ block0(v0: f64): ; larl %r1, [const(1)] ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle .+2 # trap=int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; ; Disassembled: @@ -1952,8 +1952,8 @@ block0(v0: f64): ; vleg %v16, 0(%r1), 0 ; wfcdb %f0, %v16 ; jgle 0x32 ; trap: int_ovf -; wcgdb %v20, %f0, 0, 5 -; vlgvg %r2, %v20, 0 +; wcgdb %v16, %f0, 0, 5 +; vlgvg %r2, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -1979,8 +1979,8 @@ block0(v0: f128): ; larl %r1, [const(0)] ; vl %v5, 0(%r1) ; wfcxb %f1, %f5 ; jghe .+2 # trap=int_ovf -; larl %r1, [const(1)] ; vl %v17, 0(%r1) -; wfcxb %f1, %v17 +; larl %r1, [const(1)] ; vl %v16, 0(%r1) +; wfcxb %f1, %v16 ; jgle .+2 # trap=int_ovf ; vrepg %v3, %v1, 1 ; clfxbr %r2, 5, %f1, 0 @@ -1996,8 +1996,8 @@ block0(v0: f128): ; wfcxb %v1, %v5 ; jghe 0x26 ; trap: int_ovf ; larl %r1, 0x60 -; vl %v17, 0(%r1) -; wfcxb %v1, %v17 +; vl %v16, 0(%r1) +; wfcxb %v1, %v16 ; jgle 0x3e ; trap: int_ovf ; vrepg %v3, %v1, 1 ; clfxbr %r2, 5, %f1, 0 @@ -2032,8 +2032,8 @@ block0(v0: f128): ; larl %r1, [const(0)] ; vl %v5, 0(%r1) ; wfcxb %f1, %f5 ; jghe .+2 # trap=int_ovf -; larl %r1, [const(1)] ; vl %v17, 0(%r1) -; wfcxb %f1, %v17 +; larl %r1, [const(1)] ; vl %v16, 0(%r1) +; wfcxb %f1, %v16 ; jgle .+2 # trap=int_ovf ; vrepg %v3, %v1, 1 ; cfxbra %r2, 5, %f1, 0 @@ -2049,8 +2049,8 @@ block0(v0: f128): ; wfcxb %v1, %v5 ; jghe 0x26 ; trap: int_ovf ; larl %r1, 0x60 -; vl %v17, 0(%r1) -; wfcxb %v1, %v17 +; vl %v16, 0(%r1) +; wfcxb %v1, %v16 ; jgle 0x3e ; trap: int_ovf ; vrepg %v3, %v1, 1 ; cfxbr %r2, 5, %f1 @@ -2084,8 +2084,8 @@ block0(v0: f128): ; larl %r1, [const(0)] ; vl %v5, 0(%r1) ; wfcxb %f1, %f5 ; jghe .+2 # trap=int_ovf -; larl %r1, [const(1)] ; vl %v17, 0(%r1) -; wfcxb %f1, %v17 +; larl %r1, [const(1)] ; vl %v16, 0(%r1) +; wfcxb %f1, %v16 ; jgle .+2 # trap=int_ovf ; vrepg %v3, %v1, 1 ; clfxbr %r2, 5, %f1, 0 @@ -2101,8 +2101,8 @@ block0(v0: f128): ; wfcxb %v1, %v5 ; jghe 0x26 ; trap: int_ovf ; larl %r1, 0x60 -; vl %v17, 0(%r1) -; wfcxb %v1, %v17 +; vl %v16, 0(%r1) +; wfcxb %v1, %v16 ; jgle 0x3e ; trap: int_ovf ; vrepg %v3, %v1, 1 ; clfxbr %r2, 5, %f1, 0 @@ -2137,8 +2137,8 @@ block0(v0: f128): ; larl %r1, [const(0)] ; vl %v5, 0(%r1) ; wfcxb %f1, %f5 ; jghe .+2 # trap=int_ovf -; larl %r1, [const(1)] ; vl %v17, 0(%r1) -; wfcxb %f1, %v17 +; larl %r1, [const(1)] ; vl %v16, 0(%r1) +; wfcxb %f1, %v16 ; jgle .+2 # trap=int_ovf ; vrepg %v3, %v1, 1 ; cfxbra %r2, 5, %f1, 0 @@ -2154,8 +2154,8 @@ block0(v0: f128): ; wfcxb %v1, %v5 ; jghe 0x26 ; trap: int_ovf ; larl %r1, 0x60 -; vl %v17, 0(%r1) -; wfcxb %v1, %v17 +; vl %v16, 0(%r1) +; wfcxb %v1, %v16 ; jgle 0x3e ; trap: int_ovf ; vrepg %v3, %v1, 1 ; cfxbr %r2, 5, %f1 @@ -2189,8 +2189,8 @@ block0(v0: f128): ; larl %r1, [const(0)] ; vl %v5, 0(%r1) ; wfcxb %f1, %f5 ; jghe .+2 # trap=int_ovf -; larl %r1, [const(1)] ; vl %v17, 0(%r1) -; wfcxb %f1, %v17 +; larl %r1, [const(1)] ; vl %v16, 0(%r1) +; wfcxb %f1, %v16 ; jgle .+2 # trap=int_ovf ; vrepg %v3, %v1, 1 ; clfxbr %r2, 5, %f1, 0 @@ -2206,8 +2206,8 @@ block0(v0: f128): ; wfcxb %v1, %v5 ; jghe 0x26 ; trap: int_ovf ; larl %r1, 0x60 -; vl %v17, 0(%r1) -; wfcxb %v1, %v17 +; vl %v16, 0(%r1) +; wfcxb %v1, %v16 ; jgle 0x3e ; trap: int_ovf ; vrepg %v3, %v1, 1 ; clfxbr %r2, 5, %f1, 0 @@ -2242,8 +2242,8 @@ block0(v0: f128): ; larl %r1, [const(0)] ; vl %v5, 0(%r1) ; wfcxb %f1, %f5 ; jghe .+2 # trap=int_ovf -; larl %r1, [const(1)] ; vl %v17, 0(%r1) -; wfcxb %f1, %v17 +; larl %r1, [const(1)] ; vl %v16, 0(%r1) +; wfcxb %f1, %v16 ; jgle .+2 # trap=int_ovf ; vrepg %v3, %v1, 1 ; cfxbra %r2, 5, %f1, 0 @@ -2259,8 +2259,8 @@ block0(v0: f128): ; wfcxb %v1, %v5 ; jghe 0x26 ; trap: int_ovf ; larl %r1, 0x60 -; vl %v17, 0(%r1) -; wfcxb %v1, %v17 +; vl %v16, 0(%r1) +; wfcxb %v1, %v16 ; jgle 0x3e ; trap: int_ovf ; vrepg %v3, %v1, 1 ; cfxbr %r2, 5, %f1 @@ -2294,8 +2294,8 @@ block0(v0: f128): ; larl %r1, [const(0)] ; vl %v5, 0(%r1) ; wfcxb %f1, %f5 ; jghe .+2 # trap=int_ovf -; larl %r1, [const(1)] ; vl %v17, 0(%r1) -; wfcxb %f1, %v17 +; larl %r1, [const(1)] ; vl %v16, 0(%r1) +; wfcxb %f1, %v16 ; jgle .+2 # trap=int_ovf ; vrepg %v3, %v1, 1 ; clgxbr %r2, 5, %f1, 0 @@ -2311,8 +2311,8 @@ block0(v0: f128): ; wfcxb %v1, %v5 ; jghe 0x26 ; trap: int_ovf ; larl %r1, 0x60 -; vl %v17, 0(%r1) -; wfcxb %v1, %v17 +; vl %v16, 0(%r1) +; wfcxb %v1, %v16 ; jgle 0x3e ; trap: int_ovf ; vrepg %v3, %v1, 1 ; clgxbr %r2, 5, %f1, 0 @@ -2347,8 +2347,8 @@ block0(v0: f128): ; larl %r1, [const(0)] ; vl %v5, 0(%r1) ; wfcxb %f1, %f5 ; jghe .+2 # trap=int_ovf -; larl %r1, [const(1)] ; vl %v17, 0(%r1) -; wfcxb %f1, %v17 +; larl %r1, [const(1)] ; vl %v16, 0(%r1) +; wfcxb %f1, %v16 ; jgle .+2 # trap=int_ovf ; vrepg %v3, %v1, 1 ; cgxbra %r2, 5, %f1, 0 @@ -2364,8 +2364,8 @@ block0(v0: f128): ; wfcxb %v1, %v5 ; jghe 0x26 ; trap: int_ovf ; larl %r1, 0x60 -; vl %v17, 0(%r1) -; wfcxb %v1, %v17 +; vl %v16, 0(%r1) +; wfcxb %v1, %v16 ; jgle 0x3e ; trap: int_ovf ; vrepg %v3, %v1, 1 ; cgxbr %r2, 5, %f1 @@ -2393,16 +2393,16 @@ block0(v0: i8): ; VCode: ; block0: -; llgcr %r4, %r2 -; ldgr %f4, %r4 +; llgcr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f6, %f4, 0, 3 ; ledbra %f0, 4, %f6, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llgcr %r4, %r2 -; ldgr %f4, %r4 +; llgcr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f6, %f4, 0, 3 ; ledbra %f0, 4, %f6, 0 ; br %r14 @@ -2415,16 +2415,16 @@ block0(v0: i8): ; VCode: ; block0: -; lgbr %r4, %r2 -; ldgr %f4, %r4 +; lgbr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f6, %f4, 0, 3 ; ledbra %f0, 4, %f6, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgbr %r4, %r2 -; ldgr %f4, %r4 +; lgbr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f6, %f4, 0, 3 ; ledbra %f0, 4, %f6, 0 ; br %r14 @@ -2437,16 +2437,16 @@ block0(v0: i16): ; VCode: ; block0: -; llghr %r4, %r2 -; ldgr %f4, %r4 +; llghr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f6, %f4, 0, 3 ; ledbra %f0, 4, %f6, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llghr %r4, %r2 -; ldgr %f4, %r4 +; llghr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f6, %f4, 0, 3 ; ledbra %f0, 4, %f6, 0 ; br %r14 @@ -2459,16 +2459,16 @@ block0(v0: i16): ; VCode: ; block0: -; lghr %r4, %r2 -; ldgr %f4, %r4 +; lghr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f6, %f4, 0, 3 ; ledbra %f0, 4, %f6, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lghr %r4, %r2 -; ldgr %f4, %r4 +; lghr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f6, %f4, 0, 3 ; ledbra %f0, 4, %f6, 0 ; br %r14 @@ -2481,16 +2481,16 @@ block0(v0: i32): ; VCode: ; block0: -; llgfr %r4, %r2 -; ldgr %f4, %r4 +; llgfr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f6, %f4, 0, 3 ; ledbra %f0, 4, %f6, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llgfr %r4, %r2 -; ldgr %f4, %r4 +; llgfr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f6, %f4, 0, 3 ; ledbra %f0, 4, %f6, 0 ; br %r14 @@ -2503,16 +2503,16 @@ block0(v0: i32): ; VCode: ; block0: -; lgfr %r4, %r2 -; ldgr %f4, %r4 +; lgfr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f6, %f4, 0, 3 ; ledbra %f0, 4, %f6, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgfr %r4, %r2 -; ldgr %f4, %r4 +; lgfr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f6, %f4, 0, 3 ; ledbra %f0, 4, %f6, 0 ; br %r14 @@ -2565,15 +2565,15 @@ block0(v0: i8): ; VCode: ; block0: -; llgcr %r4, %r2 -; ldgr %f4, %r4 +; llgcr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llgcr %r4, %r2 -; ldgr %f4, %r4 +; llgcr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f0, %f4, 0, 4 ; br %r14 @@ -2585,15 +2585,15 @@ block0(v0: i8): ; VCode: ; block0: -; lgbr %r4, %r2 -; ldgr %f4, %r4 +; lgbr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgbr %r4, %r2 -; ldgr %f4, %r4 +; lgbr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f0, %f4, 0, 4 ; br %r14 @@ -2605,15 +2605,15 @@ block0(v0: i16): ; VCode: ; block0: -; llghr %r4, %r2 -; ldgr %f4, %r4 +; llghr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llghr %r4, %r2 -; ldgr %f4, %r4 +; llghr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f0, %f4, 0, 4 ; br %r14 @@ -2625,15 +2625,15 @@ block0(v0: i16): ; VCode: ; block0: -; lghr %r4, %r2 -; ldgr %f4, %r4 +; lghr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lghr %r4, %r2 -; ldgr %f4, %r4 +; lghr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f0, %f4, 0, 4 ; br %r14 @@ -2645,15 +2645,15 @@ block0(v0: i32): ; VCode: ; block0: -; llgfr %r4, %r2 -; ldgr %f4, %r4 +; llgfr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llgfr %r4, %r2 -; ldgr %f4, %r4 +; llgfr %r2, %r2 +; ldgr %f4, %r2 ; wcdlgb %f0, %f4, 0, 4 ; br %r14 @@ -2665,15 +2665,15 @@ block0(v0: i32): ; VCode: ; block0: -; lgfr %r4, %r2 -; ldgr %f4, %r4 +; lgfr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f0, %f4, 0, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgfr %r4, %r2 -; ldgr %f4, %r4 +; lgfr %r2, %r2 +; ldgr %f4, %r2 ; wcdgb %f0, %f4, 0, 4 ; br %r14 @@ -2721,16 +2721,16 @@ block0(v0: i8): ; VCode: ; block0: -; llcr %r5, %r3 -; cxlfbr %f1, 4, %r5, 0 +; llcr %r3, %r3 +; cxlfbr %f1, 4, %r3, 0 ; vmrhg %v16, %v1, %v3 ; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llcr %r5, %r3 -; cxlfbr %f1, 4, %r5, 0 +; llcr %r3, %r3 +; cxlfbr %f1, 4, %r3, 0 ; vmrhg %v16, %v1, %v3 ; vst %v16, 0(%r2) ; br %r14 @@ -2743,16 +2743,16 @@ block0(v0: i8): ; VCode: ; block0: -; lbr %r5, %r3 -; cxfbra %f1, 4, %r5, 0 +; lbr %r3, %r3 +; cxfbra %f1, 4, %r3, 0 ; vmrhg %v16, %v1, %v3 ; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r5, %r3 -; cxfbra %f1, 4, %r5, 0 +; lbr %r3, %r3 +; cxfbra %f1, 4, %r3, 0 ; vmrhg %v16, %v1, %v3 ; vst %v16, 0(%r2) ; br %r14 @@ -2765,16 +2765,16 @@ block0(v0: i16): ; VCode: ; block0: -; llhr %r5, %r3 -; cxlfbr %f1, 4, %r5, 0 +; llhr %r3, %r3 +; cxlfbr %f1, 4, %r3, 0 ; vmrhg %v16, %v1, %v3 ; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llhr %r5, %r3 -; cxlfbr %f1, 4, %r5, 0 +; llhr %r3, %r3 +; cxlfbr %f1, 4, %r3, 0 ; vmrhg %v16, %v1, %v3 ; vst %v16, 0(%r2) ; br %r14 @@ -2787,16 +2787,16 @@ block0(v0: i16): ; VCode: ; block0: -; lhr %r5, %r3 -; cxfbra %f1, 4, %r5, 0 +; lhr %r3, %r3 +; cxfbra %f1, 4, %r3, 0 ; vmrhg %v16, %v1, %v3 ; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhr %r5, %r3 -; cxfbra %f1, 4, %r5, 0 +; lhr %r3, %r3 +; cxfbra %f1, 4, %r3, 0 ; vmrhg %v16, %v1, %v3 ; vst %v16, 0(%r2) ; br %r14 @@ -3032,9 +3032,9 @@ block0(v0: f32): ; lgdr %r2, %f4 ; cebr %f0, %f0 ; locghio %r2, 0 -; lgfi %r5, 2147483647 -; cgr %r2, %r5 -; locgrh %r2, %r5 +; lgfi %r3, 2147483647 +; cgr %r2, %r3 +; locgrh %r2, %r3 ; lgfi %r3, -2147483648 ; cgr %r2, %r3 ; locgrl %r2, %r3 @@ -3047,9 +3047,9 @@ block0(v0: f32): ; lgdr %r2, %f4 ; cebr %f0, %f0 ; locghio %r2, 0 -; lgfi %r5, 0x7fffffff -; cgr %r2, %r5 -; locgrh %r2, %r5 +; lgfi %r3, 0x7fffffff +; cgr %r2, %r3 +; locgrh %r2, %r3 ; lgfi %r3, -0x80000000 ; cgr %r2, %r3 ; locgrl %r2, %r3 @@ -3213,18 +3213,18 @@ block0(v0: f64): ; block0: ; wclgdb %f2, %f0, 0, 5 ; lgdr %r2, %f2 -; llilf %r4, 4294967295 -; clgr %r2, %r4 -; locgrh %r2, %r4 +; llilf %r3, 4294967295 +; clgr %r2, %r3 +; locgrh %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; wclgdb %f2, %f0, 0, 5 ; lgdr %r2, %f2 -; llilf %r4, 0xffffffff -; clgr %r2, %r4 -; locgrh %r2, %r4 +; llilf %r3, 0xffffffff +; clgr %r2, %r3 +; locgrh %r2, %r3 ; br %r14 function %fcvt_to_sint_sat_f64_i32(f64) -> i32 { @@ -3242,9 +3242,9 @@ block0(v0: f64): ; lgfi %r3, 2147483647 ; cgr %r2, %r3 ; locgrh %r2, %r3 -; lgfi %r4, -2147483648 -; cgr %r2, %r4 -; locgrl %r2, %r4 +; lgfi %r3, -2147483648 +; cgr %r2, %r3 +; locgrl %r2, %r3 ; br %r14 ; ; Disassembled: @@ -3256,9 +3256,9 @@ block0(v0: f64): ; lgfi %r3, 0x7fffffff ; cgr %r2, %r3 ; locgrh %r2, %r3 -; lgfi %r4, -0x80000000 -; cgr %r2, %r4 -; locgrl %r2, %r4 +; lgfi %r3, -0x80000000 +; cgr %r2, %r3 +; locgrl %r2, %r3 ; br %r14 function %fcvt_to_uint_sat_f64_i64(f64) -> i64 { diff --git a/cranelift/filetests/filetests/isa/s390x/fpmem.clif b/cranelift/filetests/filetests/isa/s390x/fpmem.clif index 63361d4a084c..ab7e4e6a0b70 100644 --- a/cranelift/filetests/filetests/isa/s390x/fpmem.clif +++ b/cranelift/filetests/filetests/isa/s390x/fpmem.clif @@ -41,14 +41,14 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f0, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f0, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f0, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f0, %r2 ; br %r14 function %load_f32_little(i64) -> f32 { @@ -59,14 +59,14 @@ block0(v0: i64): ; VCode: ; block0: -; lrv %r4, 0(%r2) -; vlvgf %v0, %r4, 0 +; lrv %r2, 0(%r2) +; vlvgf %v0, %r2, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r4, 0(%r2) ; trap: heap_oob -; vlvgf %v0, %r4, 0 +; lrv %r2, 0(%r2) ; trap: heap_oob +; vlvgf %v0, %r2, 0 ; br %r14 function %store_f64(f64, i64) { @@ -109,14 +109,14 @@ block0(v0: f64, v1: i64): ; VCode: ; block0: -; lgdr %r5, %f0 -; strvg %r5, 0(%r2) +; lgdr %r3, %f0 +; strvg %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgdr %r5, %f0 -; strvg %r5, 0(%r2) ; trap: heap_oob +; lgdr %r3, %f0 +; strvg %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %store_f32_little(f32, i64) { @@ -127,13 +127,13 @@ block0(v0: f32, v1: i64): ; VCode: ; block0: -; vlgvf %r5, %v0, 0 -; strv %r5, 0(%r2) +; vlgvf %r3, %v0, 0 +; strv %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvf %r5, %v0, 0 -; strv %r5, 0(%r2) ; trap: heap_oob +; vlgvf %r3, %v0, 0 +; strv %r3, 0(%r2) ; trap: heap_oob ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/i128.clif b/cranelift/filetests/filetests/isa/s390x/i128.clif index 7229593b0828..6bbef8b28ee0 100644 --- a/cranelift/filetests/filetests/isa/s390x/i128.clif +++ b/cranelift/filetests/filetests/isa/s390x/i128.clif @@ -89,7 +89,7 @@ block0(v0: i64, v1: i64): ; mgrk %r2, %r2, %r3 ; vlvgp %v16, %r2, %r3 ; vlgvg %r2, %v16, 0 -; vlgvg %r5, %v16, 1 +; vlgvg %r3, %v16, 1 ; br %r14 ; ; Disassembled: @@ -97,7 +97,7 @@ block0(v0: i64, v1: i64): ; mgrk %r2, %r2, %r3 ; vlvgp %v16, %r2, %r3 ; vlgvg %r2, %v16, 0 -; vlgvg %r5, %v16, 1 +; vlgvg %r3, %v16, 1 ; br %r14 function %umul_high_i64_pattern(i64, i64) -> i64 { @@ -112,16 +112,18 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; lgr %r4, %r3 -; lgr %r3, %r2 -; mlgr %r2, %r4 +; lgr %r4, %r2 +; lgr %r2, %r3 +; lgr %r3, %r4 +; mlgr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgr %r4, %r3 -; lgr %r3, %r2 -; mlgr %r2, %r4 +; lgr %r4, %r2 +; lgr %r2, %r3 +; lgr %r3, %r4 +; mlgr %r2, %r2 ; br %r14 function %umul_high_i64_isplit(i64, i64) -> i64 { @@ -135,21 +137,23 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; lgr %r4, %r3 -; lgr %r3, %r2 -; mlgr %r2, %r4 +; lgr %r4, %r2 +; lgr %r2, %r3 +; lgr %r3, %r4 +; mlgr %r2, %r2 ; vlvgp %v16, %r2, %r3 ; vlgvg %r2, %v16, 0 -; vlgvg %r5, %v16, 1 +; vlgvg %r3, %v16, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgr %r4, %r3 -; lgr %r3, %r2 -; mlgr %r2, %r4 +; lgr %r4, %r2 +; lgr %r2, %r3 +; lgr %r3, %r4 +; mlgr %r2, %r2 ; vlvgp %v16, %r2, %r3 ; vlgvg %r2, %v16, 0 -; vlgvg %r5, %v16, 1 +; vlgvg %r3, %v16, 1 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/icmp.clif b/cranelift/filetests/filetests/isa/s390x/icmp.clif index 6b8707eb4039..bee448a674d0 100644 --- a/cranelift/filetests/filetests/isa/s390x/icmp.clif +++ b/cranelift/filetests/filetests/isa/s390x/icmp.clif @@ -416,18 +416,22 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: +; lgr %r4, %r3 +; lhr %r3, %r2 +; lgr %r2, %r4 ; lhr %r5, %r2 -; lhr %r3, %r3 -; cr %r5, %r3 +; cr %r3, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 +; lgr %r4, %r3 +; lhr %r3, %r2 +; lgr %r2, %r4 ; lhr %r5, %r2 -; lhr %r3, %r3 -; cr %r5, %r3 +; cr %r3, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -441,16 +445,16 @@ block0(v0: i16): ; VCode: ; block0: -; lhr %r4, %r2 -; chi %r4, 1 +; lhr %r2, %r2 +; chi %r2, 1 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhr %r4, %r2 -; chi %r4, 1 +; lhr %r2, %r2 +; chi %r2, 1 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -464,16 +468,16 @@ block0(v0: i16, v1: i64): ; VCode: ; block0: -; lhr %r5, %r2 -; ch %r5, 0(%r3) +; lhr %r4, %r2 +; ch %r4, 0(%r3) ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhr %r5, %r2 -; ch %r5, 0(%r3) ; trap: heap_oob +; lhr %r4, %r2 +; ch %r4, 0(%r3) ; trap: heap_oob ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -489,16 +493,16 @@ block0(v0: i16): ; VCode: ; block0: -; lhr %r4, %r2 -; chrl %r4, %sym + 0 +; lhr %r2, %r2 +; chrl %r2, %sym + 0 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhr %r4, %r2 -; chrl %r4, 4 ; reloc_external PCRel32Dbl %sym 2 ; trap: heap_oob +; lhr %r2, %r2 +; chrl %r2, 4 ; reloc_external PCRel32Dbl %sym 2 ; trap: heap_oob ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -511,18 +515,22 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: +; lgr %r4, %r3 +; lbr %r3, %r2 +; lgr %r2, %r4 ; lbr %r5, %r2 -; lbr %r3, %r3 -; cr %r5, %r3 +; cr %r3, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 +; lgr %r4, %r3 +; lbr %r3, %r2 +; lgr %r2, %r4 ; lbr %r5, %r2 -; lbr %r3, %r3 -; cr %r5, %r3 +; cr %r3, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -536,16 +544,16 @@ block0(v0: i8): ; VCode: ; block0: -; lbr %r4, %r2 -; chi %r4, 1 +; lbr %r2, %r2 +; chi %r2, 1 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r4, %r2 -; chi %r4, 1 +; lbr %r2, %r2 +; chi %r2, 1 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -559,18 +567,22 @@ block0(v0: i8, v1: i64): ; VCode: ; block0: -; lbr %r5, %r2 -; lb %r3, 0(%r3) -; cr %r5, %r3 +; lgr %r4, %r3 +; lbr %r3, %r2 +; lgr %r2, %r4 +; lb %r5, 0(%r2) +; cr %r3, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r5, %r2 -; lb %r3, 0(%r3) ; trap: heap_oob -; cr %r5, %r3 +; lgr %r4, %r3 +; lbr %r3, %r2 +; lgr %r2, %r4 +; lb %r5, 0(%r2) ; trap: heap_oob +; cr %r3, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -734,16 +746,16 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; llgh %r3, 0(%r3) -; clgr %r2, %r3 +; llgh %r5, 0(%r3) +; clgr %r2, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llgh %r3, 0(%r3) ; trap: heap_oob -; clgr %r2, %r3 +; llgh %r5, 0(%r3) ; trap: heap_oob +; clgr %r2, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -886,16 +898,16 @@ block0(v0: i32, v1: i64): ; VCode: ; block0: -; llh %r3, 0(%r3) -; clr %r2, %r3 +; llh %r5, 0(%r3) +; clr %r2, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llh %r3, 0(%r3) ; trap: heap_oob -; clr %r2, %r3 +; llh %r5, 0(%r3) ; trap: heap_oob +; clr %r2, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -931,18 +943,22 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: +; lgr %r4, %r3 +; llhr %r3, %r2 +; lgr %r2, %r4 ; llhr %r5, %r2 -; llhr %r3, %r3 -; clr %r5, %r3 +; clr %r3, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 +; lgr %r4, %r3 +; llhr %r3, %r2 +; lgr %r2, %r4 ; llhr %r5, %r2 -; llhr %r3, %r3 -; clr %r5, %r3 +; clr %r3, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -956,16 +972,16 @@ block0(v0: i16): ; VCode: ; block0: -; llhr %r4, %r2 -; clfi %r4, 1 +; llhr %r2, %r2 +; clfi %r2, 1 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llhr %r4, %r2 -; clfi %r4, 1 +; llhr %r2, %r2 +; clfi %r2, 1 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -979,18 +995,22 @@ block0(v0: i16, v1: i64): ; VCode: ; block0: -; llhr %r5, %r2 -; llh %r3, 0(%r3) -; clr %r5, %r3 +; lgr %r4, %r3 +; llhr %r3, %r2 +; lgr %r2, %r4 +; llh %r5, 0(%r2) +; clr %r3, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llhr %r5, %r2 -; llh %r3, 0(%r3) ; trap: heap_oob -; clr %r5, %r3 +; lgr %r4, %r3 +; llhr %r3, %r2 +; lgr %r2, %r4 +; llh %r5, 0(%r2) ; trap: heap_oob +; clr %r3, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -1006,16 +1026,16 @@ block0(v0: i16): ; VCode: ; block0: -; llhr %r4, %r2 -; clhrl %r4, %sym + 0 +; llhr %r2, %r2 +; clhrl %r2, %sym + 0 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llhr %r4, %r2 -; clhrl %r4, 4 ; reloc_external PCRel32Dbl %sym 2 ; trap: heap_oob +; llhr %r2, %r2 +; clhrl %r2, 4 ; reloc_external PCRel32Dbl %sym 2 ; trap: heap_oob ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -1028,18 +1048,22 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: +; lgr %r4, %r3 +; llcr %r3, %r2 +; lgr %r2, %r4 ; llcr %r5, %r2 -; llcr %r3, %r3 -; clr %r5, %r3 +; clr %r3, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 +; lgr %r4, %r3 +; llcr %r3, %r2 +; lgr %r2, %r4 ; llcr %r5, %r2 -; llcr %r3, %r3 -; clr %r5, %r3 +; clr %r3, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -1053,16 +1077,16 @@ block0(v0: i8): ; VCode: ; block0: -; llcr %r4, %r2 -; clfi %r4, 1 +; llcr %r2, %r2 +; clfi %r2, 1 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llcr %r4, %r2 -; clfi %r4, 1 +; llcr %r2, %r2 +; clfi %r2, 1 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 @@ -1076,18 +1100,22 @@ block0(v0: i8, v1: i64): ; VCode: ; block0: -; llcr %r5, %r2 -; llc %r3, 0(%r3) -; clr %r5, %r3 +; lgr %r4, %r3 +; llcr %r3, %r2 +; lgr %r2, %r4 +; llc %r5, 0(%r2) +; clr %r3, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llcr %r5, %r2 -; llc %r3, 0(%r3) ; trap: heap_oob -; clr %r5, %r3 +; lgr %r4, %r3 +; llcr %r3, %r2 +; lgr %r2, %r4 +; llc %r5, 0(%r2) ; trap: heap_oob +; clr %r3, %r5 ; lhi %r2, 0 ; lochil %r2, 1 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/jumptable.clif b/cranelift/filetests/filetests/isa/s390x/jumptable.clif index 32acf6ea6779..7e4a64a16a9e 100644 --- a/cranelift/filetests/filetests/isa/s390x/jumptable.clif +++ b/cranelift/filetests/filetests/isa/s390x/jumptable.clif @@ -28,34 +28,34 @@ block5(v5: i32): ; VCode: ; block0: -; llgfr %r3, %r2 -; sllg %r4, %r3, 2 -; clgfi %r3, 3 -; jghe label4 ; larl %r1, 14 ; agf %r1, 0(%r1, %r4) ; br %r1 ; jt_entries label3 label2 label1 +; llgfr %r4, %r2 +; sllg %r3, %r4, 2 +; clgfi %r4, 3 +; jghe label4 ; larl %r1, 14 ; agf %r1, 0(%r1, %r3) ; br %r1 ; jt_entries label3 label2 label1 ; block1: -; lhi %r4, 3 +; lhi %r3, 3 ; jg label5 ; block2: -; lhi %r4, 2 +; lhi %r3, 2 ; jg label5 ; block3: -; lhi %r4, 1 +; lhi %r3, 1 ; jg label5 ; block4: -; lhi %r4, 4 +; lhi %r3, 4 ; jg label5 ; block5: -; ar %r2, %r4 +; ar %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llgfr %r3, %r2 -; sllg %r4, %r3, 2 -; clgfi %r3, 3 +; llgfr %r4, %r2 +; sllg %r3, %r4, 2 +; clgfi %r4, 3 ; jghe 0x4e ; larl %r1, 0x24 -; agf %r1, 0(%r4, %r1) +; agf %r1, 0(%r3, %r1) ; br %r1 ; .byte 0x00, 0x00 ; .byte 0x00, 0x20 @@ -64,17 +64,17 @@ block5(v5: i32): ; .byte 0x00, 0x00 ; .byte 0x00, 0x0c ; block1: ; offset 0x30 -; lhi %r4, 3 +; lhi %r3, 3 ; jg 0x52 ; block2: ; offset 0x3a -; lhi %r4, 2 +; lhi %r3, 2 ; jg 0x52 ; block3: ; offset 0x44 -; lhi %r4, 1 +; lhi %r3, 1 ; jg 0x52 ; block4: ; offset 0x4e -; lhi %r4, 4 +; lhi %r3, 4 ; block5: ; offset 0x52 -; ar %r2, %r4 +; ar %r2, %r3 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/leaf_function_detection.clif b/cranelift/filetests/filetests/isa/s390x/leaf_function_detection.clif index 874a686352fe..1298bb861fa0 100644 --- a/cranelift/filetests/filetests/isa/s390x/leaf_function_detection.clif +++ b/cranelift/filetests/filetests/isa/s390x/leaf_function_detection.clif @@ -48,11 +48,11 @@ block3(v6: i32): ; VCode: ; block0: -; lhi %r3, 0 +; lhi %r5, 0 ; chi %r2, 0 ; jgh label2 ; jg label1 ; block1: -; srk %r2, %r3, %r2 +; srk %r2, %r5, %r2 ; jg label3 ; block2: ; mhi %r2, 2 @@ -62,11 +62,11 @@ block3(v6: i32): ; ; Disassembled: ; block0: ; offset 0x0 -; lhi %r3, 0 +; lhi %r5, 0 ; chi %r2, 0 ; jgh 0x18 ; block1: ; offset 0xe -; srk %r2, %r3, %r2 +; srk %r2, %r5, %r2 ; jg 0x1c ; block2: ; offset 0x18 ; mhi %r2, 2 diff --git a/cranelift/filetests/filetests/isa/s390x/load-little.clif b/cranelift/filetests/filetests/isa/s390x/load-little.clif index 237dcfcc090f..4f10a4e941a3 100644 --- a/cranelift/filetests/filetests/isa/s390x/load-little.clif +++ b/cranelift/filetests/filetests/isa/s390x/load-little.clif @@ -77,14 +77,14 @@ block0(v0: i64): ; VCode: ; block0: -; lrvh %r4, 0(%r2) -; llghr %r2, %r4 +; lrvh %r2, 0(%r2) +; llghr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvh %r4, 0(%r2) ; trap: heap_oob -; llghr %r2, %r4 +; lrvh %r2, 0(%r2) ; trap: heap_oob +; llghr %r2, %r2 ; br %r14 function %uload16_i64_sym() -> i64 { @@ -116,14 +116,14 @@ block0(v0: i64): ; VCode: ; block0: -; lrvh %r4, 0(%r2) -; lghr %r2, %r4 +; lrvh %r2, 0(%r2) +; lghr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvh %r4, 0(%r2) ; trap: heap_oob -; lghr %r2, %r4 +; lrvh %r2, 0(%r2) ; trap: heap_oob +; lghr %r2, %r2 ; br %r14 function %sload16_i64_sym() -> i64 { @@ -155,14 +155,14 @@ block0(v0: i64): ; VCode: ; block0: -; lrv %r4, 0(%r2) -; llgfr %r2, %r4 +; lrv %r2, 0(%r2) +; llgfr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r4, 0(%r2) ; trap: heap_oob -; llgfr %r2, %r4 +; lrv %r2, 0(%r2) ; trap: heap_oob +; llgfr %r2, %r2 ; br %r14 function %uload32_i64_sym() -> i64 { @@ -194,14 +194,14 @@ block0(v0: i64): ; VCode: ; block0: -; lrv %r4, 0(%r2) -; lgfr %r2, %r4 +; lrv %r2, 0(%r2) +; lgfr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r4, 0(%r2) ; trap: heap_oob -; lgfr %r2, %r4 +; lrv %r2, 0(%r2) ; trap: heap_oob +; lgfr %r2, %r2 ; br %r14 function %sload32_i64_sym() -> i64 { @@ -300,14 +300,14 @@ block0(v0: i64): ; VCode: ; block0: -; lrvh %r4, 0(%r2) -; llhr %r2, %r4 +; lrvh %r2, 0(%r2) +; llhr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvh %r4, 0(%r2) ; trap: heap_oob -; llhr %r2, %r4 +; lrvh %r2, 0(%r2) ; trap: heap_oob +; llhr %r2, %r2 ; br %r14 function %uload16_i32_sym() -> i32 { @@ -339,14 +339,14 @@ block0(v0: i64): ; VCode: ; block0: -; lrvh %r4, 0(%r2) -; lhr %r2, %r4 +; lrvh %r2, 0(%r2) +; lhr %r2, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvh %r4, 0(%r2) ; trap: heap_oob -; lhr %r2, %r4 +; lrvh %r2, 0(%r2) ; trap: heap_oob +; lhr %r2, %r2 ; br %r14 function %sload16_i32_sym() -> i32 { @@ -461,14 +461,14 @@ block0(v0: i64): ; VCode: ; block0: -; lrvh %r4, 0(%r2) -; vlvgh %v0, %r4, 0 +; lrvh %r2, 0(%r2) +; vlvgh %v0, %r2, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvh %r4, 0(%r2) ; trap: heap_oob -; vlvgh %v0, %r4, 0 +; lrvh %r2, 0(%r2) ; trap: heap_oob +; vlvgh %v0, %r2, 0 ; br %r14 function %load_f32(i64) -> f32 { @@ -479,14 +479,14 @@ block0(v0: i64): ; VCode: ; block0: -; lrv %r4, 0(%r2) -; vlvgf %v0, %r4, 0 +; lrv %r2, 0(%r2) +; vlvgf %v0, %r2, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r4, 0(%r2) ; trap: heap_oob -; vlvgf %v0, %r4, 0 +; lrv %r2, 0(%r2) ; trap: heap_oob +; vlvgf %v0, %r2, 0 ; br %r14 function %load_f64(i64) -> f64 { @@ -497,14 +497,14 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f0, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f0, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f0, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f0, %r2 ; br %r14 function %load_f128(i64) -> f128 { @@ -515,17 +515,17 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r5, 0(%r3) -; lrvg %r3, 8(%r3) -; vlvgp %v7, %r3, %r5 +; lrvg %r4, 0(%r3) +; lrvg %r5, 8(%r3) +; vlvgp %v7, %r5, %r4 ; vst %v7, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r5, 0(%r3) ; trap: heap_oob -; lrvg %r3, 8(%r3) ; trap: heap_oob -; vlvgp %v7, %r3, %r5 +; lrvg %r4, 0(%r3) ; trap: heap_oob +; lrvg %r5, 8(%r3) ; trap: heap_oob +; vlvgp %v7, %r5, %r4 ; vst %v7, 0(%r2) ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/load.clif b/cranelift/filetests/filetests/isa/s390x/load.clif index a4574504e03d..0916b312a3b1 100644 --- a/cranelift/filetests/filetests/isa/s390x/load.clif +++ b/cranelift/filetests/filetests/isa/s390x/load.clif @@ -78,15 +78,15 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; agr %r2, %r3 -; lgfi %r1, 10000000 ; llgc %r2, 0(%r1,%r2) +; agrk %r4, %r2, %r3 +; lgfi %r1, 10000000 ; llgc %r2, 0(%r1,%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; agr %r2, %r3 +; agrk %r4, %r2, %r3 ; lgfi %r1, 0x989680 -; llgc %r2, 0(%r1, %r2) ; trap: heap_oob +; llgc %r2, 0(%r1, %r4) ; trap: heap_oob ; br %r14 function %sload8_i64(i64) -> i64 { diff --git a/cranelift/filetests/filetests/isa/s390x/minmax.clif b/cranelift/filetests/filetests/isa/s390x/minmax.clif index f1587ff54a74..da00548c3373 100644 --- a/cranelift/filetests/filetests/isa/s390x/minmax.clif +++ b/cranelift/filetests/filetests/isa/s390x/minmax.clif @@ -74,17 +74,17 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: ; llhr %r2, %r2 -; llhr %r3, %r3 -; clr %r2, %r3 -; locrl %r2, %r3 +; llhr %r5, %r3 +; clr %r2, %r5 +; locrl %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; llhr %r2, %r2 -; llhr %r3, %r3 -; clr %r2, %r3 -; locrl %r2, %r3 +; llhr %r5, %r3 +; clr %r2, %r5 +; locrl %r2, %r5 ; br %r14 function %umax_i8(i8, i8) -> i8 { @@ -96,17 +96,17 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: ; llcr %r2, %r2 -; llcr %r3, %r3 -; clr %r2, %r3 -; locrl %r2, %r3 +; llcr %r5, %r3 +; clr %r2, %r5 +; locrl %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; llcr %r2, %r2 -; llcr %r3, %r3 -; clr %r2, %r3 -; locrl %r2, %r3 +; llcr %r5, %r3 +; clr %r2, %r5 +; locrl %r2, %r5 ; br %r14 function %umin_i128(i128, i128) -> i128 { @@ -181,17 +181,17 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: ; llhr %r2, %r2 -; llhr %r3, %r3 -; clr %r2, %r3 -; locrh %r2, %r3 +; llhr %r5, %r3 +; clr %r2, %r5 +; locrh %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; llhr %r2, %r2 -; llhr %r3, %r3 -; clr %r2, %r3 -; locrh %r2, %r3 +; llhr %r5, %r3 +; clr %r2, %r5 +; locrh %r2, %r5 ; br %r14 function %umin_i8(i8, i8) -> i8 { @@ -203,17 +203,17 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: ; llcr %r2, %r2 -; llcr %r3, %r3 -; clr %r2, %r3 -; locrh %r2, %r3 +; llcr %r5, %r3 +; clr %r2, %r5 +; locrh %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; llcr %r2, %r2 -; llcr %r3, %r3 -; clr %r2, %r3 -; locrh %r2, %r3 +; llcr %r5, %r3 +; clr %r2, %r5 +; locrh %r2, %r5 ; br %r14 function %smax_i128(i128, i128) -> i128 { @@ -288,17 +288,17 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: ; lhr %r2, %r2 -; lhr %r3, %r3 -; cr %r2, %r3 -; locrl %r2, %r3 +; lhr %r5, %r3 +; cr %r2, %r5 +; locrl %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; lhr %r2, %r2 -; lhr %r3, %r3 -; cr %r2, %r3 -; locrl %r2, %r3 +; lhr %r5, %r3 +; cr %r2, %r5 +; locrl %r2, %r5 ; br %r14 function %smax_i8(i8, i8) -> i8 { @@ -310,17 +310,17 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: ; lbr %r2, %r2 -; lbr %r3, %r3 -; cr %r2, %r3 -; locrl %r2, %r3 +; lbr %r5, %r3 +; cr %r2, %r5 +; locrl %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; lbr %r2, %r2 -; lbr %r3, %r3 -; cr %r2, %r3 -; locrl %r2, %r3 +; lbr %r5, %r3 +; cr %r2, %r5 +; locrl %r2, %r5 ; br %r14 function %smin_i128(i128, i128) -> i128 { @@ -395,17 +395,17 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: ; lhr %r2, %r2 -; lhr %r3, %r3 -; cr %r2, %r3 -; locrh %r2, %r3 +; lhr %r5, %r3 +; cr %r2, %r5 +; locrh %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; lhr %r2, %r2 -; lhr %r3, %r3 -; cr %r2, %r3 -; locrh %r2, %r3 +; lhr %r5, %r3 +; cr %r2, %r5 +; locrh %r2, %r5 ; br %r14 function %smin_i8(i8, i8) -> i8 { @@ -417,16 +417,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: ; lbr %r2, %r2 -; lbr %r3, %r3 -; cr %r2, %r3 -; locrh %r2, %r3 +; lbr %r5, %r3 +; cr %r2, %r5 +; locrh %r2, %r5 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; lbr %r2, %r2 -; lbr %r3, %r3 -; cr %r2, %r3 -; locrh %r2, %r3 +; lbr %r5, %r3 +; cr %r2, %r5 +; locrh %r2, %r5 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/multivalue-ret.clif b/cranelift/filetests/filetests/isa/s390x/multivalue-ret.clif index d1f01ff09ba3..755b7fa9539a 100644 --- a/cranelift/filetests/filetests/isa/s390x/multivalue-ret.clif +++ b/cranelift/filetests/filetests/isa/s390x/multivalue-ret.clif @@ -39,34 +39,34 @@ block1: } ; VCode: -; stmg %r6, %r15, 48(%r15) +; stmg %r7, %r15, 56(%r15) ; block0: -; lghi %r13, 1 +; lghi %r7, 1 ; lghi %r3, 2 ; lghi %r4, 3 ; lghi %r5, 4 -; lghi %r14, 5 -; lghi %r6, 6 -; stg %r14, 0(%r2) -; stg %r6, 8(%r2) -; lgr %r2, %r13 -; lmg %r6, %r15, 48(%r15) +; lghi %r8, 5 +; lghi %r9, 6 +; stg %r8, 0(%r2) +; stg %r9, 8(%r2) +; lgr %r2, %r7 +; lmg %r7, %r15, 56(%r15) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; stmg %r6, %r15, 0x30(%r15) +; stmg %r7, %r15, 0x38(%r15) ; block1: ; offset 0x6 -; lghi %r13, 1 +; lghi %r7, 1 ; lghi %r3, 2 ; lghi %r4, 3 ; lghi %r5, 4 -; lghi %r14, 5 -; lghi %r6, 6 -; stg %r14, 0(%r2) -; stg %r6, 8(%r2) -; lgr %r2, %r13 -; lmg %r6, %r15, 0x30(%r15) +; lghi %r8, 5 +; lghi %r9, 6 +; stg %r8, 0(%r2) +; stg %r9, 8(%r2) +; lgr %r2, %r7 +; lmg %r7, %r15, 0x38(%r15) ; br %r14 function %f3() -> f64, f64, f64, f64 { diff --git a/cranelift/filetests/filetests/isa/s390x/nan-canonicalization.clif b/cranelift/filetests/filetests/isa/s390x/nan-canonicalization.clif index 7c14d329d83e..638871475170 100644 --- a/cranelift/filetests/filetests/isa/s390x/nan-canonicalization.clif +++ b/cranelift/filetests/filetests/isa/s390x/nan-canonicalization.clif @@ -10,25 +10,25 @@ block0(v0: f32x4, v1: f32x4): ; VCode: ; block0: -; vfasb %v17, %v24, %v25 -; larl %r1, [const(0)] ; vlef %v18, 0(%r1), 0 -; vrepf %v18, %v18, 0 -; vfchesb %v7, %v17, %v17 -; vfchesb %v19, %v17, %v17 -; vno %v19, %v7, %v19 -; vsel %v24, %v18, %v17, %v19 +; vfasb %v16, %v24, %v25 +; larl %r1, [const(0)] ; vlef %v17, 0(%r1), 0 +; vrepf %v17, %v17, 0 +; vfchesb %v7, %v16, %v16 +; vfchesb %v18, %v16, %v16 +; vno %v18, %v7, %v18 +; vsel %v24, %v17, %v16, %v18 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vfasb %v17, %v24, %v25 +; vfasb %v16, %v24, %v25 ; larl %r1, 0x38 -; vlef %v18, 0(%r1), 0 -; vrepf %v18, %v18, 0 -; vfchesb %v7, %v17, %v17 -; vfchesb %v19, %v17, %v17 -; vno %v19, %v7, %v19 -; vsel %v24, %v18, %v17, %v19 +; vlef %v17, 0(%r1), 0 +; vrepf %v17, %v17, 0 +; vfchesb %v7, %v16, %v16 +; vfchesb %v18, %v16, %v16 +; vno %v18, %v7, %v18 +; vsel %v24, %v17, %v16, %v18 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -45,33 +45,33 @@ block0(v0: f64, v1: f64): ; VCode: ; block0: -; wfadb %v21, %f0, %f2 -; larl %r1, [const(0)] ; vleg %v22, 0(%r1), 0 -; vgbm %v20, 0 -; vpdi %v22, %v22, %v20, 0 -; vgbm %v20, 0 -; vpdi %v23, %v21, %v20, 0 -; vfchedb %v19, %v23, %v23 -; vfchedb %v21, %v23, %v23 -; vno %v24, %v19, %v21 -; vsel %v21, %v22, %v23, %v24 -; vrepg %v0, %v21, 0 +; wfadb %v17, %f0, %f2 +; larl %r1, [const(0)] ; vleg %v16, 0(%r1), 0 +; vgbm %v18, 0 +; vpdi %v16, %v16, %v18, 0 +; vgbm %v18, 0 +; vpdi %v17, %v17, %v18, 0 +; vfchedb %v18, %v17, %v17 +; vfchedb %v19, %v17, %v17 +; vno %v18, %v18, %v19 +; vsel %v16, %v16, %v17, %v18 +; vrepg %v0, %v16, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; wfadb %v21, %f0, %f2 +; wfadb %v17, %f0, %f2 ; larl %r1, 0x50 -; vleg %v22, 0(%r1), 0 -; vzero %v20 -; vpdi %v22, %v22, %v20, 0 -; vzero %v20 -; vpdi %v23, %v21, %v20, 0 -; vfchedb %v19, %v23, %v23 -; vfchedb %v21, %v23, %v23 -; vno %v24, %v19, %v21 -; vsel %v21, %v22, %v23, %v24 -; vrepg %v0, %v21, 0 +; vleg %v16, 0(%r1), 0 +; vzero %v18 +; vpdi %v16, %v16, %v18, 0 +; vzero %v18 +; vpdi %v17, %v17, %v18, 0 +; vfchedb %v18, %v17, %v17 +; vfchedb %v19, %v17, %v17 +; vno %v18, %v18, %v19 +; vsel %v16, %v16, %v17, %v18 +; vrepg %v0, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -88,33 +88,33 @@ block0(v0: f32, v1: f32): ; VCode: ; block0: -; wfasb %v21, %f0, %f2 -; larl %r1, [const(0)] ; vlef %v22, 0(%r1), 0 -; vgbm %v20, 61440 -; vn %v22, %v22, %v20 -; vgbm %v20, 61440 -; vn %v23, %v21, %v20 -; vfchesb %v19, %v23, %v23 -; vfchesb %v21, %v23, %v23 -; vno %v24, %v19, %v21 -; vsel %v21, %v22, %v23, %v24 -; vrepf %v0, %v21, 0 +; wfasb %v17, %f0, %f2 +; larl %r1, [const(0)] ; vlef %v16, 0(%r1), 0 +; vgbm %v18, 61440 +; vn %v16, %v16, %v18 +; vgbm %v18, 61440 +; vn %v17, %v17, %v18 +; vfchesb %v18, %v17, %v17 +; vfchesb %v19, %v17, %v17 +; vno %v18, %v18, %v19 +; vsel %v16, %v16, %v17, %v18 +; vrepf %v0, %v16, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; wfasb %v21, %f0, %f2 +; wfasb %v17, %f0, %f2 ; larl %r1, 0x50 -; vlef %v22, 0(%r1), 0 -; vgbm %v20, 0xf000 -; vn %v22, %v22, %v20 -; vgbm %v20, 0xf000 -; vn %v23, %v21, %v20 -; vfchesb %v19, %v23, %v23 -; vfchesb %v21, %v23, %v23 -; vno %v24, %v19, %v21 -; vsel %v21, %v22, %v23, %v24 -; vrepf %v0, %v21, 0 +; vlef %v16, 0(%r1), 0 +; vgbm %v18, 0xf000 +; vn %v16, %v16, %v18 +; vgbm %v18, 0xf000 +; vn %v17, %v17, %v18 +; vfchesb %v18, %v17, %v17 +; vfchesb %v19, %v17, %v17 +; vno %v18, %v18, %v19 +; vsel %v16, %v16, %v17, %v18 +; vrepf %v0, %v16, 0 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 diff --git a/cranelift/filetests/filetests/isa/s390x/preserve-all.clif b/cranelift/filetests/filetests/isa/s390x/preserve-all.clif index 5036cedbe78d..b484e9f7cafd 100644 --- a/cranelift/filetests/filetests/isa/s390x/preserve-all.clif +++ b/cranelift/filetests/filetests/isa/s390x/preserve-all.clif @@ -78,8 +78,8 @@ block0(v0: i64): ; vst %v31, 704(%r15) ; stmg %r0, %r5, 160(%r15) ; block0: -; bras %r1, 12 ; data %libcall + 0 ; lg %r4, 0(%r1) -; basr %r14, %r4 +; bras %r1, 12 ; data %libcall + 0 ; lg %r3, 0(%r1) +; basr %r14, %r3 ; vl %v0, 208(%r15) ; vl %v1, 224(%r15) ; vl %v2, 240(%r15) @@ -159,8 +159,8 @@ block0(v0: i64): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r4, 0(%r1) -; basr %r14, %r4 +; lg %r3, 0(%r1) +; basr %r14, %r3 ; vl %v0, 0xd0(%r15) ; vl %v1, 0xe0(%r15) ; vl %v2, 0xf0(%r15) diff --git a/cranelift/filetests/filetests/isa/s390x/return-call-indirect.clif b/cranelift/filetests/filetests/isa/s390x/return-call-indirect.clif index 5451c38ea033..6d426ad9da38 100644 --- a/cranelift/filetests/filetests/isa/s390x/return-call-indirect.clif +++ b/cranelift/filetests/filetests/isa/s390x/return-call-indirect.clif @@ -46,8 +46,8 @@ block0(v0: i64): ; aghi %r15, -160 ; stg %r1, 0(%r15) ; block0: -; bras %r1, 12 ; data %callee_i64 + 0 ; lg %r4, 0(%r1) -; lmg %r14, %r15, 272(%r15) ; br %r4 +; bras %r1, 12 ; data %callee_i64 + 0 ; lg %r3, 0(%r1) +; lmg %r14, %r15, 272(%r15) ; br %r3 ; ; Disassembled: ; block0: ; offset 0x0 @@ -61,9 +61,9 @@ block0(v0: i64): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r4, 0(%r1) +; lg %r3, 0(%r1) ; lmg %r14, %r15, 0x110(%r15) -; br %r4 +; br %r3 ;;;; Test colocated tail calls ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -82,8 +82,8 @@ block0(v0: i64): ; aghi %r15, -160 ; stg %r1, 0(%r15) ; block0: -; larl %r4, %callee_i64 + 0 -; lmg %r14, %r15, 272(%r15) ; br %r4 +; larl %r3, %callee_i64 + 0 +; lmg %r14, %r15, 272(%r15) ; br %r3 ; ; Disassembled: ; block0: ; offset 0x0 @@ -92,9 +92,9 @@ block0(v0: i64): ; aghi %r15, -0xa0 ; stg %r1, 0(%r15) ; block1: ; offset 0x14 -; larl %r4, 0x14 ; reloc_external PCRel32Dbl %callee_i64 2 +; larl %r3, 0x14 ; reloc_external PCRel32Dbl %callee_i64 2 ; lmg %r14, %r15, 0x110(%r15) -; br %r4 +; br %r3 ;;;; Test passing `f64`s ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -150,8 +150,8 @@ block0(v0: f64): ; aghi %r15, -160 ; stg %r1, 0(%r15) ; block0: -; bras %r1, 12 ; data %callee_f64 + 0 ; lg %r4, 0(%r1) -; lmg %r14, %r15, 272(%r15) ; br %r4 +; bras %r1, 12 ; data %callee_f64 + 0 ; lg %r2, 0(%r1) +; lmg %r14, %r15, 272(%r15) ; br %r2 ; ; Disassembled: ; block0: ; offset 0x0 @@ -165,9 +165,9 @@ block0(v0: f64): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r4, 0(%r1) +; lg %r2, 0(%r1) ; lmg %r14, %r15, 0x110(%r15) -; br %r4 +; br %r2 ;;;; Test passing `i8`s ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -184,8 +184,8 @@ block0(v0: i8): ; aghi %r15, -160 ; stg %r1, 0(%r15) ; block0: -; llcr %r4, %r2 -; clfi %r4, 0 +; llcr %r2, %r2 +; clfi %r2, 0 ; lhi %r2, 0 ; lochie %r2, 1 ; lmg %r14, %r15, 272(%r15) @@ -198,8 +198,8 @@ block0(v0: i8): ; aghi %r15, -0xa0 ; stg %r1, 0(%r15) ; block1: ; offset 0x14 -; llcr %r4, %r2 -; clfi %r4, 0 +; llcr %r2, %r2 +; clfi %r2, 0 ; lhi %r2, 0 ; lochie %r2, 1 ; lmg %r14, %r15, 0x110(%r15) @@ -220,8 +220,8 @@ block0(v0: i8): ; aghi %r15, -160 ; stg %r1, 0(%r15) ; block0: -; bras %r1, 12 ; data %callee_i8 + 0 ; lg %r4, 0(%r1) -; lmg %r14, %r15, 272(%r15) ; br %r4 +; bras %r1, 12 ; data %callee_i8 + 0 ; lg %r3, 0(%r1) +; lmg %r14, %r15, 272(%r15) ; br %r3 ; ; Disassembled: ; block0: ; offset 0x0 @@ -235,9 +235,9 @@ block0(v0: i8): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r4, 0(%r1) +; lg %r3, 0(%r1) ; lmg %r14, %r15, 0x110(%r15) -; br %r4 +; br %r3 ;;;; Test passing many arguments on stack ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -293,74 +293,74 @@ block0: ; stg %r7, 240(%r15) ; lghi %r7, 35 ; stg %r7, 232(%r15) -; lghi %r14, 40 -; lghi %r8, 45 -; lghi %r9, 50 -; lghi %r10, 55 -; lghi %r11, 60 -; lghi %r12, 65 -; lghi %r13, 70 -; lghi %r2, 75 +; lghi %r8, 40 +; lghi %r9, 45 +; lghi %r10, 50 +; lghi %r11, 55 +; lghi %r12, 60 +; lghi %r13, 65 +; lghi %r14, 70 +; lghi %r3, 75 +; stg %r3, 224(%r15) ; lghi %r3, 80 ; lghi %r4, 85 -; lghi %r6, 90 -; stg %r6, 224(%r15) +; lghi %r5, 90 ; lghi %r6, 95 ; lghi %r7, 100 -; lghi %r5, 105 -; stg %r5, 216(%r15) -; lghi %r5, 110 -; stg %r5, 208(%r15) -; lghi %r5, 115 -; stg %r5, 200(%r15) -; lghi %r5, 120 -; stg %r5, 192(%r15) -; lghi %r5, 125 -; stg %r5, 184(%r15) -; lghi %r5, 130 -; stg %r5, 176(%r15) -; lghi %r5, 135 -; stg %r5, 168(%r15) -; bras %r1, 12 ; data %tail_callee_stack_args + 0 ; lg %r5, 0(%r1) -; stg %r5, 160(%r15) -; lg %r5, 600(%r15) -; stg %r5, 280(%r15) -; stg %r14, 440(%r15) -; stg %r8, 448(%r15) -; stg %r9, 456(%r15) -; stg %r10, 464(%r15) -; stg %r11, 472(%r15) -; stg %r12, 480(%r15) -; stg %r13, 488(%r15) +; lghi %r2, 105 +; stg %r2, 216(%r15) +; lghi %r2, 110 +; stg %r2, 208(%r15) +; lghi %r2, 115 +; stg %r2, 200(%r15) +; lghi %r2, 120 +; stg %r2, 192(%r15) +; lghi %r2, 125 +; stg %r2, 184(%r15) +; lghi %r2, 130 +; stg %r2, 176(%r15) +; lghi %r2, 135 +; stg %r2, 168(%r15) +; bras %r1, 12 ; data %tail_callee_stack_args + 0 ; lg %r2, 0(%r1) +; stg %r2, 160(%r15) +; lg %r2, 600(%r15) +; stg %r2, 280(%r15) +; stg %r8, 440(%r15) +; stg %r9, 448(%r15) +; stg %r10, 456(%r15) +; stg %r11, 464(%r15) +; stg %r12, 472(%r15) +; stg %r13, 480(%r15) +; stg %r14, 488(%r15) +; lg %r2, 224(%r15) ; stg %r2, 496(%r15) ; stg %r3, 504(%r15) ; stg %r4, 512(%r15) -; lg %r2, 224(%r15) -; stg %r2, 520(%r15) +; stg %r5, 520(%r15) ; stg %r6, 528(%r15) ; stg %r7, 536(%r15) -; lg %r5, 216(%r15) -; stg %r5, 544(%r15) -; lg %r5, 208(%r15) -; stg %r5, 552(%r15) -; lg %r5, 200(%r15) -; stg %r5, 560(%r15) -; lg %r5, 192(%r15) -; stg %r5, 568(%r15) -; lg %r5, 184(%r15) -; stg %r5, 576(%r15) -; lg %r5, 176(%r15) -; stg %r5, 584(%r15) -; lg %r5, 168(%r15) -; stg %r5, 592(%r15) +; lg %r2, 216(%r15) +; stg %r2, 544(%r15) +; lg %r2, 208(%r15) +; stg %r2, 552(%r15) +; lg %r2, 200(%r15) +; stg %r2, 560(%r15) +; lg %r2, 192(%r15) +; stg %r2, 568(%r15) +; lg %r2, 184(%r15) +; stg %r2, 576(%r15) +; lg %r2, 176(%r15) +; stg %r2, 584(%r15) +; lg %r2, 168(%r15) +; stg %r2, 592(%r15) ; lg %r2, 272(%r15) ; lg %r3, 264(%r15) ; lg %r4, 256(%r15) ; lg %r5, 248(%r15) ; lg %r6, 240(%r15) ; lg %r7, 232(%r15) -; lg %r12, 160(%r15) -; lgr %r1, %r12 ; aghi %r15, 280 ; lmg %r8, %r14, 384(%r15) ; br %r1 ; callee_pop_size 320 +; lg %r8, 160(%r15) +; lgr %r1, %r8 ; aghi %r15, 280 ; lmg %r8, %r14, 384(%r15) ; br %r1 ; callee_pop_size 320 ; ; Disassembled: ; block0: ; offset 0x0 @@ -381,79 +381,79 @@ block0: ; stg %r7, 0xf0(%r15) ; lghi %r7, 0x23 ; stg %r7, 0xe8(%r15) -; lghi %r14, 0x28 -; lghi %r8, 0x2d -; lghi %r9, 0x32 -; lghi %r10, 0x37 -; lghi %r11, 0x3c -; lghi %r12, 0x41 -; lghi %r13, 0x46 -; lghi %r2, 0x4b +; lghi %r8, 0x28 +; lghi %r9, 0x2d +; lghi %r10, 0x32 +; lghi %r11, 0x37 +; lghi %r12, 0x3c +; lghi %r13, 0x41 +; lghi %r14, 0x46 +; lghi %r3, 0x4b +; stg %r3, 0xe0(%r15) ; lghi %r3, 0x50 ; lghi %r4, 0x55 -; lghi %r6, 0x5a -; stg %r6, 0xe0(%r15) +; lghi %r5, 0x5a ; lghi %r6, 0x5f ; lghi %r7, 0x64 -; lghi %r5, 0x69 -; stg %r5, 0xd8(%r15) -; lghi %r5, 0x6e -; stg %r5, 0xd0(%r15) -; lghi %r5, 0x73 -; stg %r5, 0xc8(%r15) -; lghi %r5, 0x78 -; stg %r5, 0xc0(%r15) -; lghi %r5, 0x7d -; stg %r5, 0xb8(%r15) -; lghi %r5, 0x82 -; stg %r5, 0xb0(%r15) -; lghi %r5, 0x87 -; stg %r5, 0xa8(%r15) +; lghi %r2, 0x69 +; stg %r2, 0xd8(%r15) +; lghi %r2, 0x6e +; stg %r2, 0xd0(%r15) +; lghi %r2, 0x73 +; stg %r2, 0xc8(%r15) +; lghi %r2, 0x78 +; stg %r2, 0xc0(%r15) +; lghi %r2, 0x7d +; stg %r2, 0xb8(%r15) +; lghi %r2, 0x82 +; stg %r2, 0xb0(%r15) +; lghi %r2, 0x87 +; stg %r2, 0xa8(%r15) ; bras %r1, 0xdc ; .byte 0x00, 0x00 ; reloc_external Abs8 %tail_callee_stack_args 0 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r5, 0(%r1) -; stg %r5, 0xa0(%r15) -; lg %r5, 0x258(%r15) -; stg %r5, 0x118(%r15) -; stg %r14, 0x1b8(%r15) -; stg %r8, 0x1c0(%r15) -; stg %r9, 0x1c8(%r15) -; stg %r10, 0x1d0(%r15) -; stg %r11, 0x1d8(%r15) -; stg %r12, 0x1e0(%r15) -; stg %r13, 0x1e8(%r15) +; lg %r2, 0(%r1) +; stg %r2, 0xa0(%r15) +; lg %r2, 0x258(%r15) +; stg %r2, 0x118(%r15) +; stg %r8, 0x1b8(%r15) +; stg %r9, 0x1c0(%r15) +; stg %r10, 0x1c8(%r15) +; stg %r11, 0x1d0(%r15) +; stg %r12, 0x1d8(%r15) +; stg %r13, 0x1e0(%r15) +; stg %r14, 0x1e8(%r15) +; lg %r2, 0xe0(%r15) ; stg %r2, 0x1f0(%r15) ; stg %r3, 0x1f8(%r15) ; stg %r4, 0x200(%r15) -; lg %r2, 0xe0(%r15) -; stg %r2, 0x208(%r15) +; stg %r5, 0x208(%r15) ; stg %r6, 0x210(%r15) ; stg %r7, 0x218(%r15) -; lg %r5, 0xd8(%r15) -; stg %r5, 0x220(%r15) -; lg %r5, 0xd0(%r15) -; stg %r5, 0x228(%r15) -; lg %r5, 0xc8(%r15) -; stg %r5, 0x230(%r15) -; lg %r5, 0xc0(%r15) -; stg %r5, 0x238(%r15) -; lg %r5, 0xb8(%r15) -; stg %r5, 0x240(%r15) -; lg %r5, 0xb0(%r15) -; stg %r5, 0x248(%r15) -; lg %r5, 0xa8(%r15) -; stg %r5, 0x250(%r15) +; lg %r2, 0xd8(%r15) +; stg %r2, 0x220(%r15) +; lg %r2, 0xd0(%r15) +; stg %r2, 0x228(%r15) +; lg %r2, 0xc8(%r15) +; stg %r2, 0x230(%r15) +; lg %r2, 0xc0(%r15) +; stg %r2, 0x238(%r15) +; lg %r2, 0xb8(%r15) +; stg %r2, 0x240(%r15) +; lg %r2, 0xb0(%r15) +; stg %r2, 0x248(%r15) +; lg %r2, 0xa8(%r15) +; stg %r2, 0x250(%r15) ; lg %r2, 0x110(%r15) ; lg %r3, 0x108(%r15) ; lg %r4, 0x100(%r15) ; lg %r5, 0xf8(%r15) ; lg %r6, 0xf0(%r15) ; lg %r7, 0xe8(%r15) -; lg %r12, 0xa0(%r15) -; lgr %r1, %r12 +; lg %r8, 0xa0(%r15) +; lgr %r1, %r8 ; aghi %r15, 0x118 ; lmg %r8, %r14, 0x180(%r15) ; br %r1 diff --git a/cranelift/filetests/filetests/isa/s390x/return-call.clif b/cranelift/filetests/filetests/isa/s390x/return-call.clif index f8c9bf5cc8e0..71f42fa45db0 100644 --- a/cranelift/filetests/filetests/isa/s390x/return-call.clif +++ b/cranelift/filetests/filetests/isa/s390x/return-call.clif @@ -45,8 +45,8 @@ block0(v0: i64): ; aghi %r15, -160 ; stg %r1, 0(%r15) ; block0: -; bras %r1, 12 ; data %callee_i64 + 0 ; lg %r4, 0(%r1) -; lmg %r14, %r15, 272(%r15) ; br %r4 +; bras %r1, 12 ; data %callee_i64 + 0 ; lg %r3, 0(%r1) +; lmg %r14, %r15, 272(%r15) ; br %r3 ; ; Disassembled: ; block0: ; offset 0x0 @@ -60,9 +60,9 @@ block0(v0: i64): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r4, 0(%r1) +; lg %r3, 0(%r1) ; lmg %r14, %r15, 0x110(%r15) -; br %r4 +; br %r3 function %call_i64_multiret(i64) -> i64, i64, i64, i64, i64, i64, i64, i64 tail { fn0 = %callee_i64_multiret(i64) -> i64, i64, i64, i64, i64, i64, i64, i64 tail @@ -77,8 +77,8 @@ block0(v0: i64): ; aghi %r15, -160 ; stg %r1, 0(%r15) ; block0: -; bras %r1, 12 ; data %callee_i64_multiret + 0 ; lg %r5, 0(%r1) -; lmg %r14, %r15, 272(%r15) ; br %r5 +; bras %r1, 12 ; data %callee_i64_multiret + 0 ; lg %r4, 0(%r1) +; lmg %r14, %r15, 272(%r15) ; br %r4 ; ; Disassembled: ; block0: ; offset 0x0 @@ -92,9 +92,9 @@ block0(v0: i64): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r5, 0(%r1) +; lg %r4, 0(%r1) ; lmg %r14, %r15, 0x110(%r15) -; br %r5 +; br %r4 function %call_i64_multiret_clobber(i64) -> i64, i64, i64, i64, i64, i64, i64, i64 tail { fn0 = %callee_i64_multiret(i64) -> i64, i64, i64, i64, i64, i64, i64, i64 tail @@ -106,46 +106,46 @@ block0(v0: i64): } ; VCode: -; stmg %r9, %r15, 72(%r15) +; stmg %r8, %r15, 64(%r15) ; lgr %r1, %r15 ; aghi %r15, -160 ; stg %r1, 0(%r15) ; block0: -; lgr %r9, %r2 -; bras %r1, 12 ; data %callee_i64 + 0 ; lg %r7, 0(%r1) +; lgr %r8, %r2 +; bras %r1, 12 ; data %callee_i64 + 0 ; lg %r5, 0(%r1) ; lgr %r2, %r3 -; basr %r14, %r7 -; bras %r1, 12 ; data %callee_i64_multiret + 0 ; lg %r7, 0(%r1) +; basr %r14, %r5 +; bras %r1, 12 ; data %callee_i64_multiret + 0 ; lg %r5, 0(%r1) ; lgr %r3, %r2 -; lgr %r2, %r9 -; lmg %r9, %r15, 232(%r15) ; br %r7 +; lgr %r2, %r8 +; lmg %r8, %r15, 224(%r15) ; br %r5 ; ; Disassembled: ; block0: ; offset 0x0 -; stmg %r9, %r15, 0x48(%r15) +; stmg %r8, %r15, 0x40(%r15) ; lgr %r1, %r15 ; aghi %r15, -0xa0 ; stg %r1, 0(%r15) ; block1: ; offset 0x14 -; lgr %r9, %r2 +; lgr %r8, %r2 ; bras %r1, 0x24 ; .byte 0x00, 0x00 ; reloc_external Abs8 %callee_i64 0 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r7, 0(%r1) +; lg %r5, 0(%r1) ; lgr %r2, %r3 -; basr %r14, %r7 +; basr %r14, %r5 ; bras %r1, 0x3c ; .byte 0x00, 0x00 ; reloc_external Abs8 %callee_i64_multiret 0 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r7, 0(%r1) +; lg %r5, 0(%r1) ; lgr %r3, %r2 -; lgr %r2, %r9 -; lmg %r9, %r15, 0xe8(%r15) -; br %r7 +; lgr %r2, %r8 +; lmg %r8, %r15, 0xe0(%r15) +; br %r5 ;;;; Test colocated tail calls ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -226,8 +226,8 @@ block0(v0: f64): ; aghi %r15, -160 ; stg %r1, 0(%r15) ; block0: -; bras %r1, 12 ; data %callee_f64 + 0 ; lg %r4, 0(%r1) -; lmg %r14, %r15, 272(%r15) ; br %r4 +; bras %r1, 12 ; data %callee_f64 + 0 ; lg %r2, 0(%r1) +; lmg %r14, %r15, 272(%r15) ; br %r2 ; ; Disassembled: ; block0: ; offset 0x0 @@ -241,9 +241,9 @@ block0(v0: f64): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r4, 0(%r1) +; lg %r2, 0(%r1) ; lmg %r14, %r15, 0x110(%r15) -; br %r4 +; br %r2 ;;;; Test passing `i8`s ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -260,8 +260,8 @@ block0(v0: i8): ; aghi %r15, -160 ; stg %r1, 0(%r15) ; block0: -; llcr %r4, %r2 -; clfi %r4, 0 +; llcr %r2, %r2 +; clfi %r2, 0 ; lhi %r2, 0 ; lochie %r2, 1 ; lmg %r14, %r15, 272(%r15) @@ -274,8 +274,8 @@ block0(v0: i8): ; aghi %r15, -0xa0 ; stg %r1, 0(%r15) ; block1: ; offset 0x14 -; llcr %r4, %r2 -; clfi %r4, 0 +; llcr %r2, %r2 +; clfi %r2, 0 ; lhi %r2, 0 ; lochie %r2, 1 ; lmg %r14, %r15, 0x110(%r15) @@ -294,8 +294,8 @@ block0(v0: i8): ; aghi %r15, -160 ; stg %r1, 0(%r15) ; block0: -; bras %r1, 12 ; data %callee_i8 + 0 ; lg %r4, 0(%r1) -; lmg %r14, %r15, 272(%r15) ; br %r4 +; bras %r1, 12 ; data %callee_i8 + 0 ; lg %r3, 0(%r1) +; lmg %r14, %r15, 272(%r15) ; br %r3 ; ; Disassembled: ; block0: ; offset 0x0 @@ -309,9 +309,9 @@ block0(v0: i8): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r4, 0(%r1) +; lg %r3, 0(%r1) ; lmg %r14, %r15, 0x110(%r15) -; br %r4 +; br %r3 ;;;; Test passing fewer arguments on the stack ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -349,13 +349,13 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32, v7: i32, v } ; VCode: -; stmg %r9, %r15, 256(%r15) +; stmg %r8, %r15, 248(%r15) ; la %r1, 184(%r15) ; aghi %r15, -160 ; stg %r1, 0(%r15) ; block0: -; lgr %r9, %r7 -; lgr %r14, %r6 +; lgr %r8, %r7 +; lgr %r13, %r6 ; llgf %r6, 324(%r15) ; llgf %r7, 332(%r15) ; llgf %r2, 340(%r15) @@ -363,20 +363,20 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32, v7: i32, v ; stg %r3, 176(%r15) ; st %r2, 340(%r15) ; lgr %r3, %r5 -; lgr %r5, %r9 +; lgr %r5, %r8 ; lgr %r2, %r4 -; lgr %r4, %r14 -; aghi %r15, 176 ; lmg %r9, %r14, 240(%r15) ; jg %one_stack_arg ; callee_pop_size 168 +; lgr %r4, %r13 +; aghi %r15, 176 ; lmg %r8, %r14, 232(%r15) ; jg %one_stack_arg ; callee_pop_size 168 ; ; Disassembled: ; block0: ; offset 0x0 -; stmg %r9, %r15, 0x100(%r15) +; stmg %r8, %r15, 0xf8(%r15) ; la %r1, 0xb8(%r15) ; aghi %r15, -0xa0 ; stg %r1, 0(%r15) ; block1: ; offset 0x14 -; lgr %r9, %r7 -; lgr %r14, %r6 +; lgr %r8, %r7 +; lgr %r13, %r6 ; llgf %r6, 0x144(%r15) ; llgf %r7, 0x14c(%r15) ; llgf %r2, 0x154(%r15) @@ -384,11 +384,11 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32, v7: i32, v ; stg %r3, 0xb0(%r15) ; st %r2, 0x154(%r15) ; lgr %r3, %r5 -; lgr %r5, %r9 +; lgr %r5, %r8 ; lgr %r2, %r4 -; lgr %r4, %r14 +; lgr %r4, %r13 ; aghi %r15, 0xb0 -; lmg %r9, %r14, 0xf0(%r15) +; lmg %r8, %r14, 0xe8(%r15) ; jg 0x58 ; reloc_external PLTRel32Dbl %one_stack_arg 2 function %call_zero_stack_args(i32, i32, i32, i32, i32, i32, i32, i32, i8) -> i8 tail { @@ -429,15 +429,15 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32): } ; VCode: -; stmg %r8, %r15, 232(%r15) +; stmg %r9, %r15, 240(%r15) ; la %r1, 168(%r15) ; aghi %r15, -176 ; stg %r1, 0(%r15) ; block0: -; lgr %r8, %r7 +; lgr %r14, %r7 ; llgf %r7, 340(%r15) -; lg %r10, 344(%r15) -; stg %r10, 160(%r15) +; lg %r9, 344(%r15) +; stg %r9, 160(%r15) ; st %r2, 324(%r15) ; st %r2, 332(%r15) ; st %r3, 340(%r15) @@ -445,20 +445,20 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32): ; lgr %r3, %r4 ; lgr %r4, %r5 ; lgr %r5, %r6 -; lgr %r6, %r8 -; aghi %r15, 160 ; lmg %r8, %r14, 248(%r15) ; jg %call_one_stack_arg ; callee_pop_size 184 +; lgr %r6, %r14 +; aghi %r15, 160 ; lmg %r9, %r14, 256(%r15) ; jg %call_one_stack_arg ; callee_pop_size 184 ; ; Disassembled: ; block0: ; offset 0x0 -; stmg %r8, %r15, 0xe8(%r15) +; stmg %r9, %r15, 0xf0(%r15) ; la %r1, 0xa8(%r15) ; aghi %r15, -0xb0 ; stg %r1, 0(%r15) ; block1: ; offset 0x14 -; lgr %r8, %r7 +; lgr %r14, %r7 ; llgf %r7, 0x154(%r15) -; lg %r10, 0x158(%r15) -; stg %r10, 0xa0(%r15) +; lg %r9, 0x158(%r15) +; stg %r9, 0xa0(%r15) ; st %r2, 0x144(%r15) ; st %r2, 0x14c(%r15) ; st %r3, 0x154(%r15) @@ -466,9 +466,9 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32): ; lgr %r3, %r4 ; lgr %r4, %r5 ; lgr %r5, %r6 -; lgr %r6, %r8 +; lgr %r6, %r14 ; aghi %r15, 0xa0 -; lmg %r8, %r14, 0xf8(%r15) +; lmg %r9, %r14, 0x100(%r15) ; jg 0x54 ; reloc_external PLTRel32Dbl %call_one_stack_arg 2 ;;;; Test passing many arguments on stack ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -552,64 +552,64 @@ block0: ; stg %r7, 232(%r15) ; lghi %r7, 35 ; stg %r7, 224(%r15) -; lghi %r14, 40 -; lghi %r8, 45 -; lghi %r9, 50 -; lghi %r10, 55 -; lghi %r11, 60 -; lghi %r12, 65 -; lghi %r13, 70 -; lghi %r2, 75 +; lghi %r8, 40 +; lghi %r9, 45 +; lghi %r10, 50 +; lghi %r11, 55 +; lghi %r12, 60 +; lghi %r13, 65 +; lghi %r14, 70 +; lghi %r3, 75 +; stg %r3, 216(%r15) ; lghi %r3, 80 ; lghi %r4, 85 -; lghi %r6, 90 -; stg %r6, 216(%r15) +; lghi %r5, 90 ; lghi %r6, 95 ; lghi %r7, 100 -; lghi %r5, 105 -; stg %r5, 208(%r15) -; lghi %r5, 110 -; stg %r5, 200(%r15) -; lghi %r5, 115 -; stg %r5, 192(%r15) -; lghi %r5, 120 -; stg %r5, 184(%r15) -; lghi %r5, 125 -; stg %r5, 176(%r15) -; lghi %r5, 130 -; stg %r5, 168(%r15) -; lghi %r5, 135 -; stg %r5, 160(%r15) -; lg %r5, 592(%r15) -; stg %r5, 272(%r15) -; stg %r14, 432(%r15) -; stg %r8, 440(%r15) -; stg %r9, 448(%r15) -; stg %r10, 456(%r15) -; stg %r11, 464(%r15) -; stg %r12, 472(%r15) -; stg %r13, 480(%r15) +; lghi %r2, 105 +; stg %r2, 208(%r15) +; lghi %r2, 110 +; stg %r2, 200(%r15) +; lghi %r2, 115 +; stg %r2, 192(%r15) +; lghi %r2, 120 +; stg %r2, 184(%r15) +; lghi %r2, 125 +; stg %r2, 176(%r15) +; lghi %r2, 130 +; stg %r2, 168(%r15) +; lghi %r2, 135 +; stg %r2, 160(%r15) +; lg %r2, 592(%r15) +; stg %r2, 272(%r15) +; stg %r8, 432(%r15) +; stg %r9, 440(%r15) +; stg %r10, 448(%r15) +; stg %r11, 456(%r15) +; stg %r12, 464(%r15) +; stg %r13, 472(%r15) +; stg %r14, 480(%r15) +; lg %r2, 216(%r15) ; stg %r2, 488(%r15) ; stg %r3, 496(%r15) ; stg %r4, 504(%r15) -; lg %r4, 216(%r15) -; stg %r4, 512(%r15) +; stg %r5, 512(%r15) ; stg %r6, 520(%r15) ; stg %r7, 528(%r15) -; lg %r5, 208(%r15) -; stg %r5, 536(%r15) -; lg %r5, 200(%r15) -; stg %r5, 544(%r15) -; lg %r5, 192(%r15) -; stg %r5, 552(%r15) -; lg %r5, 184(%r15) -; stg %r5, 560(%r15) -; lg %r5, 176(%r15) -; stg %r5, 568(%r15) -; lg %r5, 168(%r15) -; stg %r5, 576(%r15) -; lg %r5, 160(%r15) -; stg %r5, 584(%r15) +; lg %r2, 208(%r15) +; stg %r2, 536(%r15) +; lg %r2, 200(%r15) +; stg %r2, 544(%r15) +; lg %r2, 192(%r15) +; stg %r2, 552(%r15) +; lg %r2, 184(%r15) +; stg %r2, 560(%r15) +; lg %r2, 176(%r15) +; stg %r2, 568(%r15) +; lg %r2, 168(%r15) +; stg %r2, 576(%r15) +; lg %r2, 160(%r15) +; stg %r2, 584(%r15) ; bras %r1, 12 ; data %tail_callee_stack_args + 0 ; lg %r8, 0(%r1) ; lg %r2, 264(%r15) ; lg %r3, 256(%r15) @@ -638,64 +638,64 @@ block0: ; stg %r7, 0xe8(%r15) ; lghi %r7, 0x23 ; stg %r7, 0xe0(%r15) -; lghi %r14, 0x28 -; lghi %r8, 0x2d -; lghi %r9, 0x32 -; lghi %r10, 0x37 -; lghi %r11, 0x3c -; lghi %r12, 0x41 -; lghi %r13, 0x46 -; lghi %r2, 0x4b +; lghi %r8, 0x28 +; lghi %r9, 0x2d +; lghi %r10, 0x32 +; lghi %r11, 0x37 +; lghi %r12, 0x3c +; lghi %r13, 0x41 +; lghi %r14, 0x46 +; lghi %r3, 0x4b +; stg %r3, 0xd8(%r15) ; lghi %r3, 0x50 ; lghi %r4, 0x55 -; lghi %r6, 0x5a -; stg %r6, 0xd8(%r15) +; lghi %r5, 0x5a ; lghi %r6, 0x5f ; lghi %r7, 0x64 -; lghi %r5, 0x69 -; stg %r5, 0xd0(%r15) -; lghi %r5, 0x6e -; stg %r5, 0xc8(%r15) -; lghi %r5, 0x73 -; stg %r5, 0xc0(%r15) -; lghi %r5, 0x78 -; stg %r5, 0xb8(%r15) -; lghi %r5, 0x7d -; stg %r5, 0xb0(%r15) -; lghi %r5, 0x82 -; stg %r5, 0xa8(%r15) -; lghi %r5, 0x87 -; stg %r5, 0xa0(%r15) -; lg %r5, 0x250(%r15) -; stg %r5, 0x110(%r15) -; stg %r14, 0x1b0(%r15) -; stg %r8, 0x1b8(%r15) -; stg %r9, 0x1c0(%r15) -; stg %r10, 0x1c8(%r15) -; stg %r11, 0x1d0(%r15) -; stg %r12, 0x1d8(%r15) -; stg %r13, 0x1e0(%r15) +; lghi %r2, 0x69 +; stg %r2, 0xd0(%r15) +; lghi %r2, 0x6e +; stg %r2, 0xc8(%r15) +; lghi %r2, 0x73 +; stg %r2, 0xc0(%r15) +; lghi %r2, 0x78 +; stg %r2, 0xb8(%r15) +; lghi %r2, 0x7d +; stg %r2, 0xb0(%r15) +; lghi %r2, 0x82 +; stg %r2, 0xa8(%r15) +; lghi %r2, 0x87 +; stg %r2, 0xa0(%r15) +; lg %r2, 0x250(%r15) +; stg %r2, 0x110(%r15) +; stg %r8, 0x1b0(%r15) +; stg %r9, 0x1b8(%r15) +; stg %r10, 0x1c0(%r15) +; stg %r11, 0x1c8(%r15) +; stg %r12, 0x1d0(%r15) +; stg %r13, 0x1d8(%r15) +; stg %r14, 0x1e0(%r15) +; lg %r2, 0xd8(%r15) ; stg %r2, 0x1e8(%r15) ; stg %r3, 0x1f0(%r15) ; stg %r4, 0x1f8(%r15) -; lg %r4, 0xd8(%r15) -; stg %r4, 0x200(%r15) +; stg %r5, 0x200(%r15) ; stg %r6, 0x208(%r15) ; stg %r7, 0x210(%r15) -; lg %r5, 0xd0(%r15) -; stg %r5, 0x218(%r15) -; lg %r5, 0xc8(%r15) -; stg %r5, 0x220(%r15) -; lg %r5, 0xc0(%r15) -; stg %r5, 0x228(%r15) -; lg %r5, 0xb8(%r15) -; stg %r5, 0x230(%r15) -; lg %r5, 0xb0(%r15) -; stg %r5, 0x238(%r15) -; lg %r5, 0xa8(%r15) -; stg %r5, 0x240(%r15) -; lg %r5, 0xa0(%r15) -; stg %r5, 0x248(%r15) +; lg %r2, 0xd0(%r15) +; stg %r2, 0x218(%r15) +; lg %r2, 0xc8(%r15) +; stg %r2, 0x220(%r15) +; lg %r2, 0xc0(%r15) +; stg %r2, 0x228(%r15) +; lg %r2, 0xb8(%r15) +; stg %r2, 0x230(%r15) +; lg %r2, 0xb0(%r15) +; stg %r2, 0x238(%r15) +; lg %r2, 0xa8(%r15) +; stg %r2, 0x240(%r15) +; lg %r2, 0xa0(%r15) +; stg %r2, 0x248(%r15) ; bras %r1, 0x190 ; .byte 0x00, 0x00 ; reloc_external Abs8 %tail_callee_stack_args 0 ; .byte 0x00, 0x00 diff --git a/cranelift/filetests/filetests/isa/s390x/select-float.clif b/cranelift/filetests/filetests/isa/s390x/select-float.clif index a9ce284cac04..96847c7fffba 100644 --- a/cranelift/filetests/filetests/isa/s390x/select-float.clif +++ b/cranelift/filetests/filetests/isa/s390x/select-float.clif @@ -13,8 +13,8 @@ block0(v0: i8, v1: f16, v2: f16): ; VCode: ; block0: -; llcr %r2, %r2 -; clfi %r2, 42 +; llcr %r4, %r2 +; clfi %r4, 42 ; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 10 ; vlr %v0, %v16 @@ -22,8 +22,8 @@ block0(v0: i8, v1: f16, v2: f16): ; ; Disassembled: ; block0: ; offset 0x0 -; llcr %r2, %r2 -; clfi %r2, 0x2a +; llcr %r4, %r2 +; clfi %r4, 0x2a ; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 0x20 @@ -40,8 +40,8 @@ block0(v0: i8, v1: f32, v2: f32): ; VCode: ; block0: -; llcr %r2, %r2 -; clfi %r2, 42 +; llcr %r4, %r2 +; clfi %r4, 42 ; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 10 ; vlr %v0, %v16 @@ -49,8 +49,8 @@ block0(v0: i8, v1: f32, v2: f32): ; ; Disassembled: ; block0: ; offset 0x0 -; llcr %r2, %r2 -; clfi %r2, 0x2a +; llcr %r4, %r2 +; clfi %r4, 0x2a ; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 0x20 @@ -67,8 +67,8 @@ block0(v0: i8, v1: f64, v2: f64): ; VCode: ; block0: -; llcr %r2, %r2 -; clfi %r2, 42 +; llcr %r4, %r2 +; clfi %r4, 42 ; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 10 ; vlr %v0, %v16 @@ -76,8 +76,8 @@ block0(v0: i8, v1: f64, v2: f64): ; ; Disassembled: ; block0: ; offset 0x0 -; llcr %r2, %r2 -; clfi %r2, 0x2a +; llcr %r4, %r2 +; clfi %r4, 0x2a ; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 0x20 @@ -96,8 +96,8 @@ block0(v0: i8, v1: f128, v2: f128): ; block0: ; vl %v2, 0(%r4) ; vl %v7, 0(%r5) -; llcr %r5, %r3 -; clfi %r5, 42 +; llcr %r3, %r3 +; clfi %r3, 42 ; jne 10 ; vlr %v7, %v2 ; vst %v7, 0(%r2) ; br %r14 @@ -106,8 +106,8 @@ block0(v0: i8, v1: f128, v2: f128): ; block0: ; offset 0x0 ; vl %v2, 0(%r4) ; vl %v7, 0(%r5) -; llcr %r5, %r3 -; clfi %r5, 0x2a +; llcr %r3, %r3 +; clfi %r3, 0x2a ; jne 0x20 ; vlr %v7, %v2 ; vst %v7, 0(%r2) @@ -123,8 +123,8 @@ block0(v0: i16, v1: f16, v2: f16): ; VCode: ; block0: -; llhr %r2, %r2 -; clfi %r2, 42 +; llhr %r4, %r2 +; clfi %r4, 42 ; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 10 ; vlr %v0, %v16 @@ -132,8 +132,8 @@ block0(v0: i16, v1: f16, v2: f16): ; ; Disassembled: ; block0: ; offset 0x0 -; llhr %r2, %r2 -; clfi %r2, 0x2a +; llhr %r4, %r2 +; clfi %r4, 0x2a ; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 0x20 @@ -150,8 +150,8 @@ block0(v0: i16, v1: f32, v2: f32): ; VCode: ; block0: -; llhr %r2, %r2 -; clfi %r2, 42 +; llhr %r4, %r2 +; clfi %r4, 42 ; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 10 ; vlr %v0, %v16 @@ -159,8 +159,8 @@ block0(v0: i16, v1: f32, v2: f32): ; ; Disassembled: ; block0: ; offset 0x0 -; llhr %r2, %r2 -; clfi %r2, 0x2a +; llhr %r4, %r2 +; clfi %r4, 0x2a ; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 0x20 @@ -177,8 +177,8 @@ block0(v0: i16, v1: f64, v2: f64): ; VCode: ; block0: -; llhr %r2, %r2 -; clfi %r2, 42 +; llhr %r4, %r2 +; clfi %r4, 42 ; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 10 ; vlr %v0, %v16 @@ -186,8 +186,8 @@ block0(v0: i16, v1: f64, v2: f64): ; ; Disassembled: ; block0: ; offset 0x0 -; llhr %r2, %r2 -; clfi %r2, 0x2a +; llhr %r4, %r2 +; clfi %r4, 0x2a ; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 0x20 @@ -206,8 +206,8 @@ block0(v0: i16, v1: f128, v2: f128): ; block0: ; vl %v2, 0(%r4) ; vl %v7, 0(%r5) -; llhr %r5, %r3 -; clfi %r5, 42 +; llhr %r3, %r3 +; clfi %r3, 42 ; jne 10 ; vlr %v7, %v2 ; vst %v7, 0(%r2) ; br %r14 @@ -216,8 +216,8 @@ block0(v0: i16, v1: f128, v2: f128): ; block0: ; offset 0x0 ; vl %v2, 0(%r4) ; vl %v7, 0(%r5) -; llhr %r5, %r3 -; clfi %r5, 0x2a +; llhr %r3, %r3 +; clfi %r3, 0x2a ; jne 0x20 ; vlr %v7, %v2 ; vst %v7, 0(%r2) @@ -439,26 +439,26 @@ block0(v0: i128, v1: f16, v2: f16): ; VCode: ; block0: ; vl %v1, 0(%r2) -; lghi %r3, 42 -; vgbm %v17, 0 -; vlvgg %v17, %r3, 1 -; vceqgs %v16, %v1, %v17 -; vlr %v23, %v0 +; lghi %r2, 42 +; vgbm %v16, 0 +; vlvgg %v16, %r2, 1 +; vceqgs %v16, %v1, %v16 +; vlr %v16, %v0 ; vlr %v0, %v2 -; jne 10 ; vlr %v0, %v23 +; jne 10 ; vlr %v0, %v16 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v1, 0(%r2) -; lghi %r3, 0x2a -; vzero %v17 -; vlvgg %v17, %r3, 1 -; vceqgs %v16, %v1, %v17 -; vlr %v23, %v0 +; lghi %r2, 0x2a +; vzero %v16 +; vlvgg %v16, %r2, 1 +; vceqgs %v16, %v1, %v16 +; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 0x32 -; vlr %v0, %v23 +; vlr %v0, %v16 ; br %r14 function %select_icmp_i128_f32(i128, f32, f32) -> f32 { @@ -473,26 +473,26 @@ block0(v0: i128, v1: f32, v2: f32): ; VCode: ; block0: ; vl %v1, 0(%r2) -; lghi %r3, 42 -; vgbm %v17, 0 -; vlvgg %v17, %r3, 1 -; vceqgs %v16, %v1, %v17 -; vlr %v23, %v0 +; lghi %r2, 42 +; vgbm %v16, 0 +; vlvgg %v16, %r2, 1 +; vceqgs %v16, %v1, %v16 +; vlr %v16, %v0 ; vlr %v0, %v2 -; jne 10 ; vlr %v0, %v23 +; jne 10 ; vlr %v0, %v16 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v1, 0(%r2) -; lghi %r3, 0x2a -; vzero %v17 -; vlvgg %v17, %r3, 1 -; vceqgs %v16, %v1, %v17 -; vlr %v23, %v0 +; lghi %r2, 0x2a +; vzero %v16 +; vlvgg %v16, %r2, 1 +; vceqgs %v16, %v1, %v16 +; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 0x32 -; vlr %v0, %v23 +; vlr %v0, %v16 ; br %r14 function %select_icmp_i128_f64(i128, f64, f64) -> f64 { @@ -507,26 +507,26 @@ block0(v0: i128, v1: f64, v2: f64): ; VCode: ; block0: ; vl %v1, 0(%r2) -; lghi %r3, 42 -; vgbm %v17, 0 -; vlvgg %v17, %r3, 1 -; vceqgs %v16, %v1, %v17 -; vlr %v23, %v0 +; lghi %r2, 42 +; vgbm %v16, 0 +; vlvgg %v16, %r2, 1 +; vceqgs %v16, %v1, %v16 +; vlr %v16, %v0 ; vlr %v0, %v2 -; jne 10 ; vlr %v0, %v23 +; jne 10 ; vlr %v0, %v16 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v1, 0(%r2) -; lghi %r3, 0x2a -; vzero %v17 -; vlvgg %v17, %r3, 1 -; vceqgs %v16, %v1, %v17 -; vlr %v23, %v0 +; lghi %r2, 0x2a +; vzero %v16 +; vlvgg %v16, %r2, 1 +; vceqgs %v16, %v1, %v16 +; vlr %v16, %v0 ; vlr %v0, %v2 ; jne 0x32 -; vlr %v0, %v23 +; vlr %v0, %v16 ; br %r14 function %select_icmp_i128_f128(i128, f128, f128) -> f128 { @@ -544,9 +544,9 @@ block0(v0: i128, v1: f128, v2: f128): ; vl %v3, 0(%r4) ; vl %v16, 0(%r5) ; lghi %r3, 42 -; vgbm %v20, 0 -; vlvgg %v20, %r3, 1 -; vceqgs %v19, %v1, %v20 +; vgbm %v17, 0 +; vlvgg %v17, %r3, 1 +; vceqgs %v17, %v1, %v17 ; jne 10 ; vlr %v16, %v3 ; vst %v16, 0(%r2) ; br %r14 @@ -557,9 +557,9 @@ block0(v0: i128, v1: f128, v2: f128): ; vl %v3, 0(%r4) ; vl %v16, 0(%r5) ; lghi %r3, 0x2a -; vzero %v20 -; vlvgg %v20, %r3, 1 -; vceqgs %v19, %v1, %v20 +; vzero %v17 +; vlvgg %v17, %r3, 1 +; vceqgs %v17, %v1, %v17 ; jne 0x32 ; vlr %v16, %v3 ; vst %v16, 0(%r2) diff --git a/cranelift/filetests/filetests/isa/s390x/shift-rotate.clif b/cranelift/filetests/filetests/isa/s390x/shift-rotate.clif index 62776cbe83ba..ddf109cc14e4 100644 --- a/cranelift/filetests/filetests/isa/s390x/shift-rotate.clif +++ b/cranelift/filetests/filetests/isa/s390x/shift-rotate.clif @@ -14,12 +14,12 @@ block0(v0: i128, v1: i128): ; vl %v3, 0(%r4) ; vrepb %v6, %v3, 15 ; vlcb %v16, %v6 -; vslb %v18, %v1, %v16 -; vsl %v20, %v18, %v16 -; vsrlb %v22, %v1, %v6 -; vsrl %v24, %v22, %v6 -; vo %v26, %v20, %v24 -; vst %v26, 0(%r2) +; vslb %v17, %v1, %v16 +; vsl %v16, %v17, %v16 +; vsrlb %v17, %v1, %v6 +; vsrl %v17, %v17, %v6 +; vo %v18, %v16, %v17 +; vst %v18, 0(%r2) ; br %r14 ; ; Disassembled: @@ -28,12 +28,12 @@ block0(v0: i128, v1: i128): ; vl %v3, 0(%r4) ; vrepb %v6, %v3, 0xf ; vlcb %v16, %v6 -; vslb %v18, %v1, %v16 -; vsl %v20, %v18, %v16 -; vsrlb %v22, %v1, %v6 -; vsrl %v24, %v22, %v6 -; vo %v26, %v20, %v24 -; vst %v26, 0(%r2) +; vslb %v17, %v1, %v16 +; vsl %v16, %v17, %v16 +; vsrlb %v17, %v1, %v6 +; vsrl %v17, %v17, %v6 +; vo %v18, %v16, %v17 +; vst %v18, 0(%r2) ; br %r14 function %rotr_i128_reg(i128, i64) -> i128 { @@ -47,13 +47,13 @@ block0(v0: i128, v1: i64): ; vl %v1, 0(%r3) ; vlvgb %v5, %r4, 0 ; vrepb %v7, %v5, 0 -; vlcb %v17, %v7 -; vslb %v19, %v1, %v17 -; vsl %v21, %v19, %v17 -; vsrlb %v23, %v1, %v7 -; vsrl %v25, %v23, %v7 -; vo %v27, %v21, %v25 -; vst %v27, 0(%r2) +; vlcb %v16, %v7 +; vslb %v17, %v1, %v16 +; vsl %v16, %v17, %v16 +; vsrlb %v17, %v1, %v7 +; vsrl %v17, %v17, %v7 +; vo %v19, %v16, %v17 +; vst %v19, 0(%r2) ; br %r14 ; ; Disassembled: @@ -61,13 +61,13 @@ block0(v0: i128, v1: i64): ; vl %v1, 0(%r3) ; vlvgb %v5, %r4, 0 ; vrepb %v7, %v5, 0 -; vlcb %v17, %v7 -; vslb %v19, %v1, %v17 -; vsl %v21, %v19, %v17 -; vsrlb %v23, %v1, %v7 -; vsrl %v25, %v23, %v7 -; vo %v27, %v21, %v25 -; vst %v27, 0(%r2) +; vlcb %v16, %v7 +; vslb %v17, %v1, %v16 +; vsl %v16, %v17, %v16 +; vsrlb %v17, %v1, %v7 +; vsrl %v17, %v17, %v7 +; vo %v19, %v16, %v17 +; vst %v19, 0(%r2) ; br %r14 function %rotr_i128_imm(i128) -> i128 { @@ -83,11 +83,11 @@ block0(v0: i128): ; vrepib %v4, 17 ; vlcb %v6, %v4 ; vslb %v16, %v1, %v6 -; vsl %v18, %v16, %v6 -; vsrlb %v20, %v1, %v4 -; vsrl %v22, %v20, %v4 -; vo %v24, %v18, %v22 -; vst %v24, 0(%r2) +; vsl %v16, %v16, %v6 +; vsrlb %v17, %v1, %v4 +; vsrl %v17, %v17, %v4 +; vo %v16, %v16, %v17 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: @@ -96,11 +96,11 @@ block0(v0: i128): ; vrepib %v4, 0x11 ; vlcb %v6, %v4 ; vslb %v16, %v1, %v6 -; vsl %v18, %v16, %v6 -; vsrlb %v20, %v1, %v4 -; vsrl %v22, %v20, %v4 -; vo %v24, %v18, %v22 -; vst %v24, 0(%r2) +; vsl %v16, %v16, %v6 +; vsrlb %v17, %v1, %v4 +; vsrl %v17, %v17, %v4 +; vo %v16, %v16, %v17 +; vst %v16, 0(%r2) ; br %r14 function %rotr_i64_vr(i64, i128) -> i64 { @@ -112,17 +112,17 @@ block0(v0: i64, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; lcr %r4, %r3 -; rllg %r2, %r2, 0(%r4) +; vlgvg %r4, %v2, 1 +; lcr %r3, %r4 +; rllg %r2, %r2, 0(%r3) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; lcr %r4, %r3 -; rllg %r2, %r2, 0(%r4) +; vlgvg %r4, %v2, 1 +; lcr %r3, %r4 +; rllg %r2, %r2, 0(%r3) ; br %r14 function %rotr_i64_reg(i64, i64) -> i64 { @@ -133,14 +133,14 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; lcr %r5, %r3 -; rllg %r2, %r2, 0(%r5) +; lcr %r3, %r3 +; rllg %r2, %r2, 0(%r3) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lcr %r5, %r3 -; rllg %r2, %r2, 0(%r5) +; lcr %r3, %r3 +; rllg %r2, %r2, 0(%r3) ; br %r14 function %rotr_i64_imm(i64) -> i64 { @@ -169,17 +169,17 @@ block0(v0: i32, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; lcr %r4, %r3 -; rll %r2, %r2, 0(%r4) +; vlgvg %r4, %v2, 1 +; lcr %r3, %r4 +; rll %r2, %r2, 0(%r3) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; lcr %r4, %r3 -; rll %r2, %r2, 0(%r4) +; vlgvg %r4, %v2, 1 +; lcr %r3, %r4 +; rll %r2, %r2, 0(%r3) ; br %r14 function %rotr_i32_reg(i32, i32) -> i32 { @@ -190,14 +190,14 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; lcr %r5, %r3 -; rll %r2, %r2, 0(%r5) +; lcr %r3, %r3 +; rll %r2, %r2, 0(%r3) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lcr %r5, %r3 -; rll %r2, %r2, 0(%r5) +; lcr %r3, %r3 +; rll %r2, %r2, 0(%r3) ; br %r14 function %rotr_i32_imm(i32) -> i32 { @@ -226,27 +226,27 @@ block0(v0: i16, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; llhr %r2, %r2 -; vlgvg %r3, %v2, 1 -; lcr %r4, %r3 +; llhr %r4, %r2 +; vlgvg %r2, %v2, 1 +; lcr %r3, %r2 +; nill %r2, 15 ; nill %r3, 15 -; nill %r4, 15 -; sllk %r4, %r2, 0(%r4) -; srlk %r2, %r2, 0(%r3) -; ork %r2, %r4, %r2 +; sllk %r3, %r4, 0(%r3) +; srlk %r2, %r4, 0(%r2) +; ork %r2, %r3, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; llhr %r2, %r2 -; vlgvg %r3, %v2, 1 -; lcr %r4, %r3 +; llhr %r4, %r2 +; vlgvg %r2, %v2, 1 +; lcr %r3, %r2 +; nill %r2, 0xf ; nill %r3, 0xf -; nill %r4, 0xf -; sllk %r4, %r2, 0(%r4) -; srlk %r2, %r2, 0(%r3) -; ork %r2, %r4, %r2 +; sllk %r3, %r4, 0(%r3) +; srlk %r2, %r4, 0(%r2) +; ork %r2, %r3, %r2 ; br %r14 function %rotr_i16_reg(i16, i16) -> i16 { @@ -257,23 +257,23 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; llhr %r5, %r2 +; llhr %r4, %r2 ; lcr %r2, %r3 ; nill %r3, 15 ; nill %r2, 15 -; sllk %r2, %r5, 0(%r2) -; srlk %r3, %r5, 0(%r3) +; sllk %r2, %r4, 0(%r2) +; srlk %r3, %r4, 0(%r3) ; or %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llhr %r5, %r2 +; llhr %r4, %r2 ; lcr %r2, %r3 ; nill %r3, 0xf ; nill %r2, 0xf -; sllk %r2, %r5, 0(%r2) -; srlk %r3, %r5, 0(%r3) +; sllk %r2, %r4, 0(%r2) +; srlk %r3, %r4, 0(%r3) ; or %r2, %r3 ; br %r14 @@ -286,18 +286,18 @@ block0(v0: i16): ; VCode: ; block0: -; llhr %r4, %r2 -; sllk %r2, %r4, 6 -; srlk %r4, %r4, 10 -; or %r2, %r4 +; llhr %r2, %r2 +; sllk %r4, %r2, 6 +; srlk %r2, %r2, 10 +; ork %r2, %r4, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llhr %r4, %r2 -; sllk %r2, %r4, 6 -; srlk %r4, %r4, 0xa -; or %r2, %r4 +; llhr %r2, %r2 +; sllk %r4, %r2, 6 +; srlk %r2, %r2, 0xa +; ork %r2, %r4, %r2 ; br %r14 function %rotr_i8_vr(i8, i128) -> i8 { @@ -309,27 +309,27 @@ block0(v0: i8, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; llcr %r2, %r2 -; vlgvg %r3, %v2, 1 -; lcr %r4, %r3 +; llcr %r4, %r2 +; vlgvg %r2, %v2, 1 +; lcr %r3, %r2 +; nill %r2, 7 ; nill %r3, 7 -; nill %r4, 7 -; sllk %r4, %r2, 0(%r4) -; srlk %r2, %r2, 0(%r3) -; ork %r2, %r4, %r2 +; sllk %r3, %r4, 0(%r3) +; srlk %r2, %r4, 0(%r2) +; ork %r2, %r3, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; llcr %r2, %r2 -; vlgvg %r3, %v2, 1 -; lcr %r4, %r3 +; llcr %r4, %r2 +; vlgvg %r2, %v2, 1 +; lcr %r3, %r2 +; nill %r2, 7 ; nill %r3, 7 -; nill %r4, 7 -; sllk %r4, %r2, 0(%r4) -; srlk %r2, %r2, 0(%r3) -; ork %r2, %r4, %r2 +; sllk %r3, %r4, 0(%r3) +; srlk %r2, %r4, 0(%r2) +; ork %r2, %r3, %r2 ; br %r14 function %rotr_i8_reg(i8, i8) -> i8 { @@ -340,23 +340,23 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; llcr %r5, %r2 +; llcr %r4, %r2 ; lcr %r2, %r3 ; nill %r3, 7 ; nill %r2, 7 -; sllk %r2, %r5, 0(%r2) -; srlk %r3, %r5, 0(%r3) +; sllk %r2, %r4, 0(%r2) +; srlk %r3, %r4, 0(%r3) ; or %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llcr %r5, %r2 +; llcr %r4, %r2 ; lcr %r2, %r3 ; nill %r3, 7 ; nill %r2, 7 -; sllk %r2, %r5, 0(%r2) -; srlk %r3, %r5, 0(%r3) +; sllk %r2, %r4, 0(%r2) +; srlk %r3, %r4, 0(%r3) ; or %r2, %r3 ; br %r14 @@ -369,18 +369,18 @@ block0(v0: i8): ; VCode: ; block0: -; llcr %r4, %r2 -; sllk %r2, %r4, 5 -; srlk %r4, %r4, 3 -; or %r2, %r4 +; llcr %r2, %r2 +; sllk %r4, %r2, 5 +; srlk %r2, %r2, 3 +; ork %r2, %r4, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llcr %r4, %r2 -; sllk %r2, %r4, 5 -; srlk %r4, %r4, 3 -; or %r2, %r4 +; llcr %r2, %r2 +; sllk %r4, %r2, 5 +; srlk %r2, %r2, 3 +; ork %r2, %r4, %r2 ; br %r14 function %rotl_i128_vr(i128, i128) -> i128 { @@ -395,12 +395,12 @@ block0(v0: i128, v1: i128): ; vl %v3, 0(%r4) ; vrepb %v6, %v3, 15 ; vlcb %v16, %v6 -; vslb %v18, %v1, %v6 -; vsl %v20, %v18, %v6 -; vsrlb %v22, %v1, %v16 -; vsrl %v24, %v22, %v16 -; vo %v26, %v20, %v24 -; vst %v26, 0(%r2) +; vslb %v17, %v1, %v6 +; vsl %v17, %v17, %v6 +; vsrlb %v18, %v1, %v16 +; vsrl %v16, %v18, %v16 +; vo %v18, %v17, %v16 +; vst %v18, 0(%r2) ; br %r14 ; ; Disassembled: @@ -409,12 +409,12 @@ block0(v0: i128, v1: i128): ; vl %v3, 0(%r4) ; vrepb %v6, %v3, 0xf ; vlcb %v16, %v6 -; vslb %v18, %v1, %v6 -; vsl %v20, %v18, %v6 -; vsrlb %v22, %v1, %v16 -; vsrl %v24, %v22, %v16 -; vo %v26, %v20, %v24 -; vst %v26, 0(%r2) +; vslb %v17, %v1, %v6 +; vsl %v17, %v17, %v6 +; vsrlb %v18, %v1, %v16 +; vsrl %v16, %v18, %v16 +; vo %v18, %v17, %v16 +; vst %v18, 0(%r2) ; br %r14 function %rotl_i128_reg(i128, i64) -> i128 { @@ -428,13 +428,13 @@ block0(v0: i128, v1: i64): ; vl %v1, 0(%r3) ; vlvgb %v5, %r4, 0 ; vrepb %v7, %v5, 0 -; vlcb %v17, %v7 -; vslb %v19, %v1, %v7 -; vsl %v21, %v19, %v7 -; vsrlb %v23, %v1, %v17 -; vsrl %v25, %v23, %v17 -; vo %v27, %v21, %v25 -; vst %v27, 0(%r2) +; vlcb %v16, %v7 +; vslb %v17, %v1, %v7 +; vsl %v17, %v17, %v7 +; vsrlb %v18, %v1, %v16 +; vsrl %v18, %v18, %v16 +; vo %v19, %v17, %v18 +; vst %v19, 0(%r2) ; br %r14 ; ; Disassembled: @@ -442,13 +442,13 @@ block0(v0: i128, v1: i64): ; vl %v1, 0(%r3) ; vlvgb %v5, %r4, 0 ; vrepb %v7, %v5, 0 -; vlcb %v17, %v7 -; vslb %v19, %v1, %v7 -; vsl %v21, %v19, %v7 -; vsrlb %v23, %v1, %v17 -; vsrl %v25, %v23, %v17 -; vo %v27, %v21, %v25 -; vst %v27, 0(%r2) +; vlcb %v16, %v7 +; vslb %v17, %v1, %v7 +; vsl %v17, %v17, %v7 +; vsrlb %v18, %v1, %v16 +; vsrl %v18, %v18, %v16 +; vo %v19, %v17, %v18 +; vst %v19, 0(%r2) ; br %r14 function %rotl_i128_imm(i128) -> i128 { @@ -464,11 +464,11 @@ block0(v0: i128): ; vrepib %v4, 17 ; vlcb %v6, %v4 ; vslb %v16, %v1, %v4 -; vsl %v18, %v16, %v4 -; vsrlb %v20, %v1, %v6 -; vsrl %v22, %v20, %v6 -; vo %v24, %v18, %v22 -; vst %v24, 0(%r2) +; vsl %v16, %v16, %v4 +; vsrlb %v17, %v1, %v6 +; vsrl %v17, %v17, %v6 +; vo %v16, %v16, %v17 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: @@ -477,11 +477,11 @@ block0(v0: i128): ; vrepib %v4, 0x11 ; vlcb %v6, %v4 ; vslb %v16, %v1, %v4 -; vsl %v18, %v16, %v4 -; vsrlb %v20, %v1, %v6 -; vsrl %v22, %v20, %v6 -; vo %v24, %v18, %v22 -; vst %v24, 0(%r2) +; vsl %v16, %v16, %v4 +; vsrlb %v17, %v1, %v6 +; vsrl %v17, %v17, %v6 +; vo %v16, %v16, %v17 +; vst %v16, 0(%r2) ; br %r14 function %rotl_i64_vr(i64, i128) -> i64 { @@ -493,15 +493,15 @@ block0(v0: i64, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; rllg %r2, %r2, 0(%r3) +; vlgvg %r4, %v2, 1 +; rllg %r2, %r2, 0(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; rllg %r2, %r2, 0(%r3) +; vlgvg %r4, %v2, 1 +; rllg %r2, %r2, 0(%r4) ; br %r14 function %rotl_i64_reg(i64, i64) -> i64 { @@ -546,15 +546,15 @@ block0(v0: i32, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; rll %r2, %r2, 0(%r3) +; vlgvg %r4, %v2, 1 +; rll %r2, %r2, 0(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; rll %r2, %r2, 0(%r3) +; vlgvg %r4, %v2, 1 +; rll %r2, %r2, 0(%r4) ; br %r14 function %rotl_i32_reg(i32, i32) -> i32 { @@ -599,27 +599,27 @@ block0(v0: i16, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; llhr %r2, %r2 +; llhr %r4, %r2 ; vlgvg %r3, %v2, 1 -; lcr %r4, %r3 +; lcr %r2, %r3 ; nill %r3, 15 -; nill %r4, 15 -; sllk %r5, %r2, 0(%r3) -; srlk %r2, %r2, 0(%r4) -; ork %r2, %r5, %r2 +; nill %r2, 15 +; sllk %r3, %r4, 0(%r3) +; srlk %r2, %r4, 0(%r2) +; ork %r2, %r3, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; llhr %r2, %r2 +; llhr %r4, %r2 ; vlgvg %r3, %v2, 1 -; lcr %r4, %r3 +; lcr %r2, %r3 ; nill %r3, 0xf -; nill %r4, 0xf -; sllk %r5, %r2, 0(%r3) -; srlk %r2, %r2, 0(%r4) -; ork %r2, %r5, %r2 +; nill %r2, 0xf +; sllk %r3, %r4, 0(%r3) +; srlk %r2, %r4, 0(%r2) +; ork %r2, %r3, %r2 ; br %r14 function %rotl_i16_reg(i16, i16) -> i16 { @@ -630,24 +630,24 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; llhr %r5, %r2 +; llhr %r4, %r2 ; lcr %r2, %r3 ; nill %r3, 15 ; nill %r2, 15 -; sllk %r3, %r5, 0(%r3) -; srlk %r4, %r5, 0(%r2) -; ork %r2, %r3, %r4 +; sllk %r3, %r4, 0(%r3) +; srlk %r2, %r4, 0(%r2) +; ork %r2, %r3, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llhr %r5, %r2 +; llhr %r4, %r2 ; lcr %r2, %r3 ; nill %r3, 0xf ; nill %r2, 0xf -; sllk %r3, %r5, 0(%r3) -; srlk %r4, %r5, 0(%r2) -; ork %r2, %r3, %r4 +; sllk %r3, %r4, 0(%r3) +; srlk %r2, %r4, 0(%r2) +; ork %r2, %r3, %r2 ; br %r14 function %rotl_i16_imm(i16) -> i16 { @@ -659,18 +659,18 @@ block0(v0: i16): ; VCode: ; block0: -; llhr %r4, %r2 -; sllk %r2, %r4, 10 -; srlk %r4, %r4, 6 -; or %r2, %r4 +; llhr %r2, %r2 +; sllk %r4, %r2, 10 +; srlk %r2, %r2, 6 +; ork %r2, %r4, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llhr %r4, %r2 -; sllk %r2, %r4, 0xa -; srlk %r4, %r4, 6 -; or %r2, %r4 +; llhr %r2, %r2 +; sllk %r4, %r2, 0xa +; srlk %r2, %r2, 6 +; ork %r2, %r4, %r2 ; br %r14 function %rotl_i8_vr(i8, i128) -> i8 { @@ -682,27 +682,27 @@ block0(v0: i8, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; llcr %r2, %r2 +; llcr %r4, %r2 ; vlgvg %r3, %v2, 1 -; lcr %r4, %r3 +; lcr %r2, %r3 ; nill %r3, 7 -; nill %r4, 7 -; sllk %r5, %r2, 0(%r3) -; srlk %r2, %r2, 0(%r4) -; ork %r2, %r5, %r2 +; nill %r2, 7 +; sllk %r3, %r4, 0(%r3) +; srlk %r2, %r4, 0(%r2) +; ork %r2, %r3, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; llcr %r2, %r2 +; llcr %r4, %r2 ; vlgvg %r3, %v2, 1 -; lcr %r4, %r3 +; lcr %r2, %r3 ; nill %r3, 7 -; nill %r4, 7 -; sllk %r5, %r2, 0(%r3) -; srlk %r2, %r2, 0(%r4) -; ork %r2, %r5, %r2 +; nill %r2, 7 +; sllk %r3, %r4, 0(%r3) +; srlk %r2, %r4, 0(%r2) +; ork %r2, %r3, %r2 ; br %r14 function %rotl_i8_reg(i8, i8) -> i8 { @@ -713,24 +713,24 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; llcr %r5, %r2 +; llcr %r4, %r2 ; lcr %r2, %r3 ; nill %r3, 7 ; nill %r2, 7 -; sllk %r3, %r5, 0(%r3) -; srlk %r4, %r5, 0(%r2) -; ork %r2, %r3, %r4 +; sllk %r3, %r4, 0(%r3) +; srlk %r2, %r4, 0(%r2) +; ork %r2, %r3, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llcr %r5, %r2 +; llcr %r4, %r2 ; lcr %r2, %r3 ; nill %r3, 7 ; nill %r2, 7 -; sllk %r3, %r5, 0(%r3) -; srlk %r4, %r5, 0(%r2) -; ork %r2, %r3, %r4 +; sllk %r3, %r4, 0(%r3) +; srlk %r2, %r4, 0(%r2) +; ork %r2, %r3, %r2 ; br %r14 function %rotr_i8_imm(i8) -> i8 { @@ -742,18 +742,18 @@ block0(v0: i8): ; VCode: ; block0: -; llcr %r4, %r2 -; sllk %r2, %r4, 3 -; srlk %r4, %r4, 5 -; or %r2, %r4 +; llcr %r2, %r2 +; sllk %r4, %r2, 3 +; srlk %r2, %r2, 5 +; ork %r2, %r4, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llcr %r4, %r2 -; sllk %r2, %r4, 3 -; srlk %r4, %r4, 5 -; or %r2, %r4 +; llcr %r2, %r2 +; sllk %r4, %r2, 3 +; srlk %r2, %r2, 5 +; ork %r2, %r4, %r2 ; br %r14 function %ushr_i128_vr(i128, i128) -> i128 { @@ -768,8 +768,8 @@ block0(v0: i128, v1: i128): ; vl %v3, 0(%r4) ; vrepb %v6, %v3, 15 ; vsrlb %v16, %v1, %v6 -; vsrl %v18, %v16, %v6 -; vst %v18, 0(%r2) +; vsrl %v16, %v16, %v6 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: @@ -778,8 +778,8 @@ block0(v0: i128, v1: i128): ; vl %v3, 0(%r4) ; vrepb %v6, %v3, 0xf ; vsrlb %v16, %v1, %v6 -; vsrl %v18, %v16, %v6 -; vst %v18, 0(%r2) +; vsrl %v16, %v16, %v6 +; vst %v16, 0(%r2) ; br %r14 function %ushr_i128_reg(i128, i64) -> i128 { @@ -793,9 +793,9 @@ block0(v0: i128, v1: i64): ; vl %v1, 0(%r3) ; vlvgb %v5, %r4, 0 ; vrepb %v7, %v5, 0 -; vsrlb %v17, %v1, %v7 -; vsrl %v19, %v17, %v7 -; vst %v19, 0(%r2) +; vsrlb %v16, %v1, %v7 +; vsrl %v16, %v16, %v7 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: @@ -803,9 +803,9 @@ block0(v0: i128, v1: i64): ; vl %v1, 0(%r3) ; vlvgb %v5, %r4, 0 ; vrepb %v7, %v5, 0 -; vsrlb %v17, %v1, %v7 -; vsrl %v19, %v17, %v7 -; vst %v19, 0(%r2) +; vsrlb %v16, %v1, %v7 +; vsrl %v16, %v16, %v7 +; vst %v16, 0(%r2) ; br %r14 function %ushr_i128_imm(i128) -> i128 { @@ -842,15 +842,15 @@ block0(v0: i64, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; srlg %r2, %r2, 0(%r3) +; vlgvg %r4, %v2, 1 +; srlg %r2, %r2, 0(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; srlg %r2, %r2, 0(%r3) +; vlgvg %r4, %v2, 1 +; srlg %r2, %r2, 0(%r4) ; br %r14 function %ushr_i64_reg(i64, i64) -> i64 { @@ -895,17 +895,17 @@ block0(v0: i32, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; nill %r3, 31 -; srlk %r2, %r2, 0(%r3) +; vlgvg %r5, %v2, 1 +; nill %r5, 31 +; srlk %r2, %r2, 0(%r5) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; nill %r3, 0x1f -; srlk %r2, %r2, 0(%r3) +; vlgvg %r5, %v2, 1 +; nill %r5, 0x1f +; srlk %r2, %r2, 0(%r5) ; br %r14 function %ushr_i32_reg(i32, i32) -> i32 { @@ -952,19 +952,19 @@ block0(v0: i16, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; llhr %r2, %r2 -; vlgvg %r5, %v2, 1 -; nill %r5, 15 -; srlk %r2, %r2, 0(%r5) +; llhr %r4, %r2 +; vlgvg %r2, %v2, 1 +; nill %r2, 15 +; srlk %r2, %r4, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; llhr %r2, %r2 -; vlgvg %r5, %v2, 1 -; nill %r5, 0xf -; srlk %r2, %r2, 0(%r5) +; llhr %r4, %r2 +; vlgvg %r2, %v2, 1 +; nill %r2, 0xf +; srlk %r2, %r4, 0(%r2) ; br %r14 function %ushr_i16_reg(i16, i16) -> i16 { @@ -975,16 +975,16 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; llhr %r5, %r2 +; llhr %r4, %r2 ; nill %r3, 15 -; srlk %r2, %r5, 0(%r3) +; srlk %r2, %r4, 0(%r3) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llhr %r5, %r2 +; llhr %r4, %r2 ; nill %r3, 0xf -; srlk %r2, %r5, 0(%r3) +; srlk %r2, %r4, 0(%r3) ; br %r14 function %ushr_i16_imm(i16) -> i16 { @@ -996,14 +996,14 @@ block0(v0: i16): ; VCode: ; block0: -; llhr %r4, %r2 -; srlk %r2, %r4, 10 +; llhr %r2, %r2 +; srlk %r2, %r2, 10 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llhr %r4, %r2 -; srlk %r2, %r4, 0xa +; llhr %r2, %r2 +; srlk %r2, %r2, 0xa ; br %r14 function %ushr_i8_vr(i8, i128) -> i8 { @@ -1015,19 +1015,19 @@ block0(v0: i8, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; llcr %r2, %r2 -; vlgvg %r5, %v2, 1 -; nill %r5, 7 -; srlk %r2, %r2, 0(%r5) +; llcr %r4, %r2 +; vlgvg %r2, %v2, 1 +; nill %r2, 7 +; srlk %r2, %r4, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; llcr %r2, %r2 -; vlgvg %r5, %v2, 1 -; nill %r5, 7 -; srlk %r2, %r2, 0(%r5) +; llcr %r4, %r2 +; vlgvg %r2, %v2, 1 +; nill %r2, 7 +; srlk %r2, %r4, 0(%r2) ; br %r14 function %ushr_i8_reg(i8, i8) -> i8 { @@ -1038,16 +1038,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; llcr %r5, %r2 +; llcr %r4, %r2 ; nill %r3, 7 -; srlk %r2, %r5, 0(%r3) +; srlk %r2, %r4, 0(%r3) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llcr %r5, %r2 +; llcr %r4, %r2 ; nill %r3, 7 -; srlk %r2, %r5, 0(%r3) +; srlk %r2, %r4, 0(%r3) ; br %r14 function %ushr_i8_imm(i8) -> i8 { @@ -1059,14 +1059,14 @@ block0(v0: i8): ; VCode: ; block0: -; llcr %r4, %r2 -; srlk %r2, %r4, 3 +; llcr %r2, %r2 +; srlk %r2, %r2, 3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; llcr %r4, %r2 -; srlk %r2, %r4, 3 +; llcr %r2, %r2 +; srlk %r2, %r2, 3 ; br %r14 function %ishl_i128_vr(i128, i128) -> i128 { @@ -1081,8 +1081,8 @@ block0(v0: i128, v1: i128): ; vl %v3, 0(%r4) ; vrepb %v6, %v3, 15 ; vslb %v16, %v1, %v6 -; vsl %v18, %v16, %v6 -; vst %v18, 0(%r2) +; vsl %v16, %v16, %v6 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: @@ -1091,8 +1091,8 @@ block0(v0: i128, v1: i128): ; vl %v3, 0(%r4) ; vrepb %v6, %v3, 0xf ; vslb %v16, %v1, %v6 -; vsl %v18, %v16, %v6 -; vst %v18, 0(%r2) +; vsl %v16, %v16, %v6 +; vst %v16, 0(%r2) ; br %r14 function %ishl_i128_reg(i128, i64) -> i128 { @@ -1106,9 +1106,9 @@ block0(v0: i128, v1: i64): ; vl %v1, 0(%r3) ; vlvgb %v5, %r4, 0 ; vrepb %v7, %v5, 0 -; vslb %v17, %v1, %v7 -; vsl %v19, %v17, %v7 -; vst %v19, 0(%r2) +; vslb %v16, %v1, %v7 +; vsl %v16, %v16, %v7 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: @@ -1116,9 +1116,9 @@ block0(v0: i128, v1: i64): ; vl %v1, 0(%r3) ; vlvgb %v5, %r4, 0 ; vrepb %v7, %v5, 0 -; vslb %v17, %v1, %v7 -; vsl %v19, %v17, %v7 -; vst %v19, 0(%r2) +; vslb %v16, %v1, %v7 +; vsl %v16, %v16, %v7 +; vst %v16, 0(%r2) ; br %r14 function %ishl_i128_imm(i128) -> i128 { @@ -1155,15 +1155,15 @@ block0(v0: i64, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; sllg %r2, %r2, 0(%r3) +; vlgvg %r4, %v2, 1 +; sllg %r2, %r2, 0(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; sllg %r2, %r2, 0(%r3) +; vlgvg %r4, %v2, 1 +; sllg %r2, %r2, 0(%r4) ; br %r14 function %ishl_i64_reg(i64, i64) -> i64 { @@ -1208,17 +1208,17 @@ block0(v0: i32, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; nill %r3, 31 -; sllk %r2, %r2, 0(%r3) +; vlgvg %r5, %v2, 1 +; nill %r5, 31 +; sllk %r2, %r2, 0(%r5) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; nill %r3, 0x1f -; sllk %r2, %r2, 0(%r3) +; vlgvg %r5, %v2, 1 +; nill %r5, 0x1f +; sllk %r2, %r2, 0(%r5) ; br %r14 function %ishl_i32_reg(i32, i32) -> i32 { @@ -1265,17 +1265,17 @@ block0(v0: i16, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; nill %r3, 15 -; sllk %r2, %r2, 0(%r3) +; vlgvg %r5, %v2, 1 +; nill %r5, 15 +; sllk %r2, %r2, 0(%r5) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; nill %r3, 0xf -; sllk %r2, %r2, 0(%r3) +; vlgvg %r5, %v2, 1 +; nill %r5, 0xf +; sllk %r2, %r2, 0(%r5) ; br %r14 function %ishl_i16_reg(i16, i16) -> i16 { @@ -1322,17 +1322,17 @@ block0(v0: i8, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; nill %r3, 7 -; sllk %r2, %r2, 0(%r3) +; vlgvg %r5, %v2, 1 +; nill %r5, 7 +; sllk %r2, %r2, 0(%r5) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; nill %r3, 7 -; sllk %r2, %r2, 0(%r3) +; vlgvg %r5, %v2, 1 +; nill %r5, 7 +; sllk %r2, %r2, 0(%r5) ; br %r14 function %ishl_i8_reg(i8, i8) -> i8 { @@ -1382,8 +1382,8 @@ block0(v0: i128, v1: i128): ; vl %v3, 0(%r4) ; vrepb %v6, %v3, 15 ; vsrab %v16, %v1, %v6 -; vsra %v18, %v16, %v6 -; vst %v18, 0(%r2) +; vsra %v16, %v16, %v6 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: @@ -1392,8 +1392,8 @@ block0(v0: i128, v1: i128): ; vl %v3, 0(%r4) ; vrepb %v6, %v3, 0xf ; vsrab %v16, %v1, %v6 -; vsra %v18, %v16, %v6 -; vst %v18, 0(%r2) +; vsra %v16, %v16, %v6 +; vst %v16, 0(%r2) ; br %r14 function %sshr_i128_reg(i128, i64) -> i128 { @@ -1407,9 +1407,9 @@ block0(v0: i128, v1: i64): ; vl %v1, 0(%r3) ; vlvgb %v5, %r4, 0 ; vrepb %v7, %v5, 0 -; vsrab %v17, %v1, %v7 -; vsra %v19, %v17, %v7 -; vst %v19, 0(%r2) +; vsrab %v16, %v1, %v7 +; vsra %v16, %v16, %v7 +; vst %v16, 0(%r2) ; br %r14 ; ; Disassembled: @@ -1417,9 +1417,9 @@ block0(v0: i128, v1: i64): ; vl %v1, 0(%r3) ; vlvgb %v5, %r4, 0 ; vrepb %v7, %v5, 0 -; vsrab %v17, %v1, %v7 -; vsra %v19, %v17, %v7 -; vst %v19, 0(%r2) +; vsrab %v16, %v1, %v7 +; vsra %v16, %v16, %v7 +; vst %v16, 0(%r2) ; br %r14 function %sshr_i128_imm(i128) -> i128 { @@ -1456,15 +1456,15 @@ block0(v0: i64, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; srag %r2, %r2, 0(%r3) +; vlgvg %r4, %v2, 1 +; srag %r2, %r2, 0(%r4) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; srag %r2, %r2, 0(%r3) +; vlgvg %r4, %v2, 1 +; srag %r2, %r2, 0(%r4) ; br %r14 function %sshr_i64_reg(i64, i64) -> i64 { @@ -1509,17 +1509,17 @@ block0(v0: i32, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; nill %r3, 31 -; srak %r2, %r2, 0(%r3) +; vlgvg %r5, %v2, 1 +; nill %r5, 31 +; srak %r2, %r2, 0(%r5) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; vlgvg %r3, %v2, 1 -; nill %r3, 0x1f -; srak %r2, %r2, 0(%r3) +; vlgvg %r5, %v2, 1 +; nill %r5, 0x1f +; srak %r2, %r2, 0(%r5) ; br %r14 function %sshr_i32_reg(i32, i32) -> i32 { @@ -1566,19 +1566,19 @@ block0(v0: i16, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; lhr %r2, %r2 -; vlgvg %r5, %v2, 1 -; nill %r5, 15 -; srak %r2, %r2, 0(%r5) +; lhr %r4, %r2 +; vlgvg %r2, %v2, 1 +; nill %r2, 15 +; srak %r2, %r4, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; lhr %r2, %r2 -; vlgvg %r5, %v2, 1 -; nill %r5, 0xf -; srak %r2, %r2, 0(%r5) +; lhr %r4, %r2 +; vlgvg %r2, %v2, 1 +; nill %r2, 0xf +; srak %r2, %r4, 0(%r2) ; br %r14 function %sshr_i16_reg(i16, i16) -> i16 { @@ -1589,16 +1589,16 @@ block0(v0: i16, v1: i16): ; VCode: ; block0: -; lhr %r5, %r2 +; lhr %r4, %r2 ; nill %r3, 15 -; srak %r2, %r5, 0(%r3) +; srak %r2, %r4, 0(%r3) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhr %r5, %r2 +; lhr %r4, %r2 ; nill %r3, 0xf -; srak %r2, %r5, 0(%r3) +; srak %r2, %r4, 0(%r3) ; br %r14 function %sshr_i16_imm(i16) -> i16 { @@ -1610,14 +1610,14 @@ block0(v0: i16): ; VCode: ; block0: -; lhr %r4, %r2 -; srak %r2, %r4, 10 +; lhr %r2, %r2 +; srak %r2, %r2, 10 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhr %r4, %r2 -; srak %r2, %r4, 0xa +; lhr %r2, %r2 +; srak %r2, %r2, 0xa ; br %r14 function %sshr_i8_vr(i8, i128) -> i8 { @@ -1629,19 +1629,19 @@ block0(v0: i8, v1: i128): ; VCode: ; block0: ; vl %v2, 0(%r3) -; lbr %r2, %r2 -; vlgvg %r5, %v2, 1 -; nill %r5, 7 -; srak %r2, %r2, 0(%r5) +; lbr %r4, %r2 +; vlgvg %r2, %v2, 1 +; nill %r2, 7 +; srak %r2, %r4, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v2, 0(%r3) -; lbr %r2, %r2 -; vlgvg %r5, %v2, 1 -; nill %r5, 7 -; srak %r2, %r2, 0(%r5) +; lbr %r4, %r2 +; vlgvg %r2, %v2, 1 +; nill %r2, 7 +; srak %r2, %r4, 0(%r2) ; br %r14 function %sshr_i8_reg(i8, i8) -> i8 { @@ -1652,16 +1652,16 @@ block0(v0: i8, v1: i8): ; VCode: ; block0: -; lbr %r5, %r2 +; lbr %r4, %r2 ; nill %r3, 7 -; srak %r2, %r5, 0(%r3) +; srak %r2, %r4, 0(%r3) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r5, %r2 +; lbr %r4, %r2 ; nill %r3, 7 -; srak %r2, %r5, 0(%r3) +; srak %r2, %r4, 0(%r3) ; br %r14 function %sshr_i8_imm(i8) -> i8 { @@ -1673,13 +1673,13 @@ block0(v0: i8): ; VCode: ; block0: -; lbr %r4, %r2 -; srak %r2, %r4, 3 +; lbr %r2, %r2 +; srak %r2, %r2, 3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lbr %r4, %r2 -; srak %r2, %r4, 3 +; lbr %r2, %r2 +; srak %r2, %r2, 3 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/stack.clif b/cranelift/filetests/filetests/isa/s390x/stack.clif index eeeae73a39b0..5277ddd868f7 100644 --- a/cranelift/filetests/filetests/isa/s390x/stack.clif +++ b/cranelift/filetests/filetests/isa/s390x/stack.clif @@ -61,8 +61,8 @@ block0: ; VCode: ; aghi %r15, -8 ; block0: -; la %r3, 0(%r15) -; lg %r2, 0(%r3) +; la %r2, 0(%r15) +; lg %r2, 0(%r2) ; aghi %r15, 8 ; br %r14 ; @@ -70,8 +70,8 @@ block0: ; block0: ; offset 0x0 ; aghi %r15, -8 ; block1: ; offset 0x4 -; la %r3, 0(%r15) -; lg %r2, 0(%r3) +; la %r2, 0(%r15) +; lg %r2, 0(%r2) ; aghi %r15, 8 ; br %r14 @@ -87,8 +87,8 @@ block0: ; VCode: ; agfi %r15, -100008 ; block0: -; la %r3, 0(%r15) -; lg %r2, 0(%r3) +; la %r2, 0(%r15) +; lg %r2, 0(%r2) ; agfi %r15, 100008 ; br %r14 ; @@ -96,8 +96,8 @@ block0: ; block0: ; offset 0x0 ; agfi %r15, -0x186a8 ; block1: ; offset 0x6 -; la %r3, 0(%r15) -; lg %r2, 0(%r3) +; la %r2, 0(%r15) +; lg %r2, 0(%r2) ; agfi %r15, 0x186a8 ; br %r14 @@ -112,8 +112,8 @@ block0(v0: i64): ; VCode: ; aghi %r15, -8 ; block0: -; la %r4, 0(%r15) -; stg %r2, 0(%r4) +; la %r3, 0(%r15) +; stg %r2, 0(%r3) ; aghi %r15, 8 ; br %r14 ; @@ -121,8 +121,8 @@ block0(v0: i64): ; block0: ; offset 0x0 ; aghi %r15, -8 ; block1: ; offset 0x4 -; la %r4, 0(%r15) -; stg %r2, 0(%r4) +; la %r3, 0(%r15) +; stg %r2, 0(%r3) ; aghi %r15, 8 ; br %r14 @@ -138,8 +138,8 @@ block0(v0: i64): ; VCode: ; agfi %r15, -100008 ; block0: -; la %r4, 0(%r15) -; stg %r2, 0(%r4) +; la %r3, 0(%r15) +; stg %r2, 0(%r3) ; agfi %r15, 100008 ; br %r14 ; @@ -147,8 +147,8 @@ block0(v0: i64): ; block0: ; offset 0x0 ; agfi %r15, -0x186a8 ; block1: ; offset 0x6 -; la %r4, 0(%r15) -; stg %r2, 0(%r4) +; la %r3, 0(%r15) +; stg %r2, 0(%r3) ; agfi %r15, 0x186a8 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/store-little.clif b/cranelift/filetests/filetests/isa/s390x/store-little.clif index c99669a18e4d..5b781f09ed2c 100644 --- a/cranelift/filetests/filetests/isa/s390x/store-little.clif +++ b/cranelift/filetests/filetests/isa/s390x/store-little.clif @@ -45,14 +45,14 @@ block0(v0: i64): ; VCode: ; block0: -; lghi %r4, 12345 -; strvg %r4, 0(%r2) +; lghi %r3, 12345 +; strvg %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lghi %r4, 0x3039 -; strvg %r4, 0(%r2) ; trap: heap_oob +; lghi %r3, 0x3039 +; strvg %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %istore8_i64(i64, i64) { @@ -184,14 +184,14 @@ block0(v0: i64): ; VCode: ; block0: -; lghi %r4, 12345 -; strv %r4, 0(%r2) +; lghi %r3, 12345 +; strv %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lghi %r4, 0x3039 -; strv %r4, 0(%r2) ; trap: heap_oob +; lghi %r3, 0x3039 +; strv %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %store_i32(i32, i64) { @@ -238,14 +238,14 @@ block0(v0: i64): ; VCode: ; block0: -; lhi %r4, 12345 -; strv %r4, 0(%r2) +; lhi %r3, 12345 +; strv %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lhi %r4, 0x3039 -; strv %r4, 0(%r2) ; trap: heap_oob +; lhi %r3, 0x3039 +; strv %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %istore8_i32(i32, i64) { @@ -492,14 +492,14 @@ block0(v0: f16, v1: i64): ; VCode: ; block0: -; vlgvh %r5, %v0, 0 -; strvh %r5, 0(%r2) +; vlgvh %r3, %v0, 0 +; strvh %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvh %r5, %v0, 0 -; strvh %r5, 0(%r2) ; trap: heap_oob +; vlgvh %r3, %v0, 0 +; strvh %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %store_f32(f32, i64) { @@ -510,14 +510,14 @@ block0(v0: f32, v1: i64): ; VCode: ; block0: -; vlgvf %r5, %v0, 0 -; strv %r5, 0(%r2) +; vlgvf %r3, %v0, 0 +; strv %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvf %r5, %v0, 0 -; strv %r5, 0(%r2) ; trap: heap_oob +; vlgvf %r3, %v0, 0 +; strv %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %store_f64(f64, i64) { @@ -528,14 +528,14 @@ block0(v0: f64, v1: i64): ; VCode: ; block0: -; lgdr %r5, %f0 -; strvg %r5, 0(%r2) +; lgdr %r3, %f0 +; strvg %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lgdr %r5, %f0 -; strvg %r5, 0(%r2) ; trap: heap_oob +; lgdr %r3, %f0 +; strvg %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %store_f16(f128, i64) { @@ -547,18 +547,18 @@ block0(v0: f128, v1: i64): ; VCode: ; block0: ; vl %v1, 0(%r2) -; vlgvg %r2, %v1, 1 -; lgdr %r4, %f1 -; strvg %r2, 0(%r3) -; strvg %r4, 8(%r3) +; vlgvg %r4, %v1, 1 +; lgdr %r2, %f1 +; strvg %r4, 0(%r3) +; strvg %r2, 8(%r3) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v1, 0(%r2) -; vlgvg %r2, %v1, 1 -; lgdr %r4, %f1 -; strvg %r2, 0(%r3) ; trap: heap_oob -; strvg %r4, 8(%r3) ; trap: heap_oob +; vlgvg %r4, %v1, 1 +; lgdr %r2, %f1 +; strvg %r4, 0(%r3) ; trap: heap_oob +; strvg %r2, 8(%r3) ; trap: heap_oob ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/tls_elf.clif b/cranelift/filetests/filetests/isa/s390x/tls_elf.clif index b31136506915..a685783fbd13 100644 --- a/cranelift/filetests/filetests/isa/s390x/tls_elf.clif +++ b/cranelift/filetests/filetests/isa/s390x/tls_elf.clif @@ -17,8 +17,8 @@ block0(v0: i32): ; larl %r12, %ElfGlobalOffsetTable + 0 ; bras %r1, 12 ; data userextname0@tlsgd ; lg %r2, 0(%r1) ; brasl %r14, tls_gdcall:userextname0 -; ear %r4, %a0 -; sllg %r3, %r4, 32 +; ear %r3, %a0 +; sllg %r3, %r3, 32 ; ear %r3, %a1 ; agr %r2, %r3 ; lmg %r12, %r15, 256(%r15) @@ -37,8 +37,8 @@ block0(v0: i32): ; .byte 0x00, 0x00 ; lg %r2, 0(%r1) ; brasl %r14, 0x22 ; reloc_external PLTRel32Dbl %ElfTlsGetOffset 2 ; reloc_external TlsGdCall u1:0 0 -; ear %r4, %a0 -; sllg %r3, %r4, 0x20 +; ear %r3, %a0 +; sllg %r3, %r3, 0x20 ; ear %r3, %a1 ; agr %r2, %r3 ; lmg %r12, %r15, 0x100(%r15) diff --git a/cranelift/filetests/filetests/isa/s390x/uadd_overflow.clif b/cranelift/filetests/filetests/isa/s390x/uadd_overflow.clif index 823ce56f08d9..6668bfb01111 100644 --- a/cranelift/filetests/filetests/isa/s390x/uadd_overflow.clif +++ b/cranelift/filetests/filetests/isa/s390x/uadd_overflow.clif @@ -9,20 +9,20 @@ block0(v0: i32, v1: i32): ; VCode: ; block0: -; ark %r5, %r2, %r3 -; clr %r5, %r2 +; ark %r4, %r2, %r3 +; clr %r4, %r2 ; lhi %r3, 0 ; lochil %r3, 1 -; lgr %r2, %r5 +; lgr %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; ark %r5, %r2, %r3 -; clr %r5, %r2 +; ark %r4, %r2, %r3 +; clr %r4, %r2 ; lhi %r3, 0 ; lochil %r3, 1 -; lgr %r2, %r5 +; lgr %r2, %r4 ; br %r14 function %f4(i64, i64) -> i64, i8 { @@ -33,19 +33,19 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; agrk %r5, %r2, %r3 -; clgr %r5, %r2 +; agrk %r4, %r2, %r3 +; clgr %r4, %r2 ; lhi %r3, 0 ; lochil %r3, 1 -; lgr %r2, %r5 +; lgr %r2, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; agrk %r5, %r2, %r3 -; clgr %r5, %r2 +; agrk %r4, %r2, %r3 +; clgr %r4, %r2 ; lhi %r3, 0 ; lochil %r3, 1 -; lgr %r2, %r5 +; lgr %r2, %r4 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/user_stack_maps.clif b/cranelift/filetests/filetests/isa/s390x/user_stack_maps.clif index e7b88750eca8..77afd9aaeafa 100644 --- a/cranelift/filetests/filetests/isa/s390x/user_stack_maps.clif +++ b/cranelift/filetests/filetests/isa/s390x/user_stack_maps.clif @@ -32,67 +32,67 @@ block0: } ; VCode: -; stmg %r6, %r15, 48(%r15) +; stmg %r8, %r15, 64(%r15) ; aghi %r15, -176 ; block0: -; lhi %r8, 0 -; lhi %r6, 1 -; lhi %r7, 2 +; lhi %r10, 0 +; lhi %r8, 1 +; lhi %r9, 2 ; la %r2, 160(%r15) ; mvhi 0(%r2), 0 -; la %r3, 164(%r15) -; mvhi 0(%r3), 1 -; la %r4, 168(%r15) -; mvhi 0(%r4), 2 -; lgr %r2, %r8 +; la %r2, 164(%r15) +; mvhi 0(%r2), 1 +; la %r2, 168(%r15) +; mvhi 0(%r2), 2 +; lgr %r2, %r10 ; brasl %r14, userextname0 ; ; UserStackMap { by_type: [(types::I32, CompoundBitSet {0, 4, 8})], sp_to_sized_stack_slots: None } ; la %r2, 160(%r15) ; mvhi 0(%r2), 1 -; la %r3, 164(%r15) -; mvhi 0(%r3), 2 -; lgr %r2, %r8 +; la %r2, 164(%r15) +; mvhi 0(%r2), 2 +; lgr %r2, %r10 ; brasl %r14, userextname0 ; ; UserStackMap { by_type: [(types::I32, CompoundBitSet {0, 4})], sp_to_sized_stack_slots: None } -; la %r5, 160(%r15) -; mvhi 0(%r5), 2 -; lgr %r2, %r6 +; la %r2, 160(%r15) +; mvhi 0(%r2), 2 +; lgr %r2, %r8 ; brasl %r14, userextname0 ; ; UserStackMap { by_type: [(types::I32, CompoundBitSet {0})], sp_to_sized_stack_slots: None } -; lgr %r2, %r7 +; lgr %r2, %r9 ; brasl %r14, userextname0 -; lmg %r6, %r15, 224(%r15) +; lmg %r8, %r15, 240(%r15) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; stmg %r6, %r15, 0x30(%r15) +; stmg %r8, %r15, 0x40(%r15) ; aghi %r15, -0xb0 ; block1: ; offset 0xa -; lhi %r8, 0 -; lhi %r6, 1 -; lhi %r7, 2 +; lhi %r10, 0 +; lhi %r8, 1 +; lhi %r9, 2 ; la %r2, 0xa0(%r15) ; mvhi 0(%r2), 0 -; la %r3, 0xa4(%r15) -; mvhi 0(%r3), 1 -; la %r4, 0xa8(%r15) -; mvhi 0(%r4), 2 -; lgr %r2, %r8 +; la %r2, 0xa4(%r15) +; mvhi 0(%r2), 1 +; la %r2, 0xa8(%r15) +; mvhi 0(%r2), 2 +; lgr %r2, %r10 ; brasl %r14, 0x38 ; reloc_external PLTRel32Dbl u0:0 2 ; la %r2, 0xa0(%r15) ; mvhi 0(%r2), 1 -; la %r3, 0xa4(%r15) -; mvhi 0(%r3), 2 -; lgr %r2, %r8 +; la %r2, 0xa4(%r15) +; mvhi 0(%r2), 2 +; lgr %r2, %r10 ; brasl %r14, 0x56 ; reloc_external PLTRel32Dbl u0:0 2 -; la %r5, 0xa0(%r15) -; mvhi 0(%r5), 2 -; lgr %r2, %r6 +; la %r2, 0xa0(%r15) +; mvhi 0(%r2), 2 +; lgr %r2, %r8 ; brasl %r14, 0x6a ; reloc_external PLTRel32Dbl u0:0 2 -; lgr %r2, %r7 +; lgr %r2, %r9 ; brasl %r14, 0x74 ; reloc_external PLTRel32Dbl u0:0 2 -; lmg %r6, %r15, 0xe0(%r15) +; lmg %r8, %r15, 0xf0(%r15) ; br %r14 function %different_types(i8, i16, i32, i64, f32, f64) -> i8, i16, i32, i64, f32, f64 fast { @@ -127,30 +127,30 @@ block0(v0: i8, v1: i16, v2: i32, v3: i64, v4: f32, v5: f64): ; std %f14, 336(%r15) ; std %f15, 344(%r15) ; block0: -; la %r9, 160(%r15) -; stc %r2, 0(%r9) +; la %r12, 160(%r15) +; stc %r2, 0(%r12) ; lgr %r9, %r2 ; la %r2, 168(%r15) ; sth %r3, 0(%r2) -; lgr %r6, %r3 +; lgr %r8, %r3 ; la %r2, 176(%r15) ; st %r4, 0(%r2) -; lgr %r11, %r4 +; lgr %r7, %r4 ; la %r2, 180(%r15) ; ste %f0, 0(%r2) ; vst %v0, 256(%r15) ; la %r2, 184(%r15) ; stg %r5, 0(%r2) -; lgr %r8, %r5 -; la %r3, 192(%r15) -; std %f2, 0(%r3) +; lgr %r6, %r5 +; la %r2, 192(%r15) +; std %f2, 0(%r2) ; vst %v2, 272(%r15) ; brasl %r14, userextname0 ; ; UserStackMap { by_type: [(types::I8, CompoundBitSet {0}), (types::I16, CompoundBitSet {8}), (types::I32, CompoundBitSet {16}), (types::F32, CompoundBitSet {20}), (types::I64, CompoundBitSet {24}), (types::F64, CompoundBitSet {32})], sp_to_sized_stack_slots: None } ; lgr %r2, %r9 -; lgr %r3, %r6 -; lgr %r4, %r11 -; lgr %r5, %r8 +; lgr %r3, %r8 +; lgr %r4, %r7 +; lgr %r5, %r6 ; vl %v0, 256(%r15) ; vl %v2, 272(%r15) ; ld %f8, 288(%r15) @@ -177,29 +177,29 @@ block0(v0: i8, v1: i16, v2: i32, v3: i64, v4: f32, v5: f64): ; std %f14, 0x150(%r15) ; std %f15, 0x158(%r15) ; block1: ; offset 0x2a -; la %r9, 0xa0(%r15) -; stc %r2, 0(%r9) +; la %r12, 0xa0(%r15) +; stc %r2, 0(%r12) ; lgr %r9, %r2 ; la %r2, 0xa8(%r15) ; sth %r3, 0(%r2) -; lgr %r6, %r3 +; lgr %r8, %r3 ; la %r2, 0xb0(%r15) ; st %r4, 0(%r2) -; lgr %r11, %r4 +; lgr %r7, %r4 ; la %r2, 0xb4(%r15) ; ste %f0, 0(%r2) ; vst %v0, 0x100(%r15) ; la %r2, 0xb8(%r15) ; stg %r5, 0(%r2) -; lgr %r8, %r5 -; la %r3, 0xc0(%r15) -; std %f2, 0(%r3) +; lgr %r6, %r5 +; la %r2, 0xc0(%r15) +; std %f2, 0(%r2) ; vst %v2, 0x110(%r15) ; brasl %r14, 0x78 ; reloc_external PLTRel32Dbl u0:0 2 ; lgr %r2, %r9 -; lgr %r3, %r6 -; lgr %r4, %r11 -; lgr %r5, %r8 +; lgr %r3, %r8 +; lgr %r4, %r7 +; lgr %r5, %r6 ; vl %v0, 0x100(%r15) ; vl %v2, 0x110(%r15) ; ld %f8, 0x120(%r15) diff --git a/cranelift/filetests/filetests/isa/s390x/vec-abi-128.clif b/cranelift/filetests/filetests/isa/s390x/vec-abi-128.clif index 256b04e84c2d..cdbf8310fea2 100644 --- a/cranelift/filetests/filetests/isa/s390x/vec-abi-128.clif +++ b/cranelift/filetests/filetests/isa/s390x/vec-abi-128.clif @@ -11,28 +11,28 @@ block0(v0: i128): } ; VCode: -; stmg %r6, %r15, 48(%r15) +; stmg %r8, %r15, 64(%r15) ; aghi %r15, -208 ; block0: -; lgr %r6, %r2 +; lgr %r8, %r2 ; vl %v1, 0(%r3) ; la %r3, 160(%r15) ; vst %v1, 0(%r3) ; la %r2, 176(%r15) ; bras %r1, 12 ; data %callee_be + 0 ; lg %r4, 0(%r1) ; basr %r14, %r4 ; vl %v1, 176(%r15) ; vst %v1, 192(%r15) -; lgr %r2, %r6 +; lgr %r2, %r8 ; vl %v16, 192(%r15) ; vst %v16, 0(%r2) -; lmg %r6, %r15, 256(%r15) +; lmg %r8, %r15, 272(%r15) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; stmg %r6, %r15, 0x30(%r15) +; stmg %r8, %r15, 0x40(%r15) ; aghi %r15, -0xd0 ; block1: ; offset 0xa -; lgr %r6, %r2 +; lgr %r8, %r2 ; vl %v1, 0(%r3) ; la %r3, 0xa0(%r15) ; vst %v1, 0(%r3) @@ -46,10 +46,10 @@ block0(v0: i128): ; basr %r14, %r4 ; vl %v1, 0xb0(%r15) ; vst %v1, 0xc0(%r15) -; lgr %r2, %r6 +; lgr %r2, %r8 ; vl %v16, 0xc0(%r15) ; vst %v16, 0(%r2) -; lmg %r6, %r15, 0x100(%r15) +; lmg %r8, %r15, 0x110(%r15) ; br %r14 function %caller_be_to_le(i128) -> i128 { @@ -78,11 +78,11 @@ block0(v0: i128): ; la %r3, 160(%r15) ; vst %v1, 0(%r3) ; la %r2, 336(%r15) -; bras %r1, 12 ; data %callee_le + 0 ; lg %r5, 0(%r1) -; basr %r14, %r5 ; callee_pop_size 176 ; vl %v1, 160(%r15) ; vst %v1, 176(%r15) +; bras %r1, 12 ; data %callee_le + 0 ; lg %r4, 0(%r1) +; basr %r14, %r4 ; callee_pop_size 176 ; vl %v1, 160(%r15) ; vst %v1, 176(%r15) ; lgr %r2, %r8 -; vl %v17, 176(%r15) -; vst %v17, 0(%r2) +; vl %v16, 176(%r15) +; vst %v16, 0(%r2) ; ld %f8, 192(%r15) ; ld %f9, 200(%r15) ; ld %f10, 208(%r15) @@ -118,13 +118,13 @@ block0(v0: i128): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r5, 0(%r1) -; basr %r14, %r5 +; lg %r4, 0(%r1) +; basr %r14, %r4 ; vl %v1, 0xa0(%r15) ; vst %v1, 0xb0(%r15) ; lgr %r2, %r8 -; vl %v17, 0xb0(%r15) -; vst %v17, 0(%r2) +; vl %v16, 0xb0(%r15) +; vst %v16, 0(%r2) ; ld %f8, 0xc0(%r15) ; ld %f9, 0xc8(%r15) ; ld %f10, 0xd0(%r15) @@ -156,14 +156,14 @@ block0(v0: i128): ; std %f14, 256(%r15) ; std %f15, 264(%r15) ; block0: -; lgr %r7, %r2 +; lgr %r6, %r2 ; vl %v1, 0(%r3) ; la %r3, 160(%r15) ; vst %v1, 0(%r3) ; la %r2, 176(%r15) -; bras %r1, 12 ; data %callee_be + 0 ; lg %r6, 0(%r1) -; basr %r14, %r6 ; vl %v1, 176(%r15) ; vst %v1, 192(%r15) -; lgr %r2, %r7 +; bras %r1, 12 ; data %callee_be + 0 ; lg %r4, 0(%r1) +; basr %r14, %r4 ; vl %v1, 176(%r15) ; vst %v1, 192(%r15) +; lgr %r2, %r6 ; vl %v16, 192(%r15) ; vst %v16, 0(%r2) ; ld %f8, 208(%r15) @@ -191,7 +191,7 @@ block0(v0: i128): ; std %f14, 0x100(%r15) ; std %f15, 0x108(%r15) ; block1: ; offset 0x2a -; lgr %r7, %r2 +; lgr %r6, %r2 ; vl %v1, 0(%r3) ; la %r3, 0xa0(%r15) ; vst %v1, 0(%r3) @@ -201,11 +201,11 @@ block0(v0: i128): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r6, 0(%r1) -; basr %r14, %r6 +; lg %r4, 0(%r1) +; basr %r14, %r4 ; vl %v1, 0xb0(%r15) ; vst %v1, 0xc0(%r15) -; lgr %r2, %r7 +; lgr %r2, %r6 ; vl %v16, 0xc0(%r15) ; vst %v16, 0(%r2) ; ld %f8, 0xd0(%r15) @@ -229,30 +229,30 @@ block0(v0: i128): } ; VCode: -; stmg %r9, %r15, 248(%r15) +; stmg %r8, %r15, 240(%r15) ; aghi %r15, -192 ; block0: -; lgr %r9, %r2 +; lgr %r8, %r2 ; vl %v1, 0(%r3) ; aghi %r15, -176 ; la %r3, 160(%r15) ; vst %v1, 0(%r3) ; la %r2, 336(%r15) -; bras %r1, 12 ; data %callee_le + 0 ; lg %r7, 0(%r1) -; basr %r14, %r7 ; callee_pop_size 176 ; vl %v1, 160(%r15) ; vst %v1, 176(%r15) -; lgr %r2, %r9 -; vl %v17, 176(%r15) -; vst %v17, 0(%r2) +; bras %r1, 12 ; data %callee_le + 0 ; lg %r4, 0(%r1) +; basr %r14, %r4 ; callee_pop_size 176 ; vl %v1, 160(%r15) ; vst %v1, 176(%r15) +; lgr %r2, %r8 +; vl %v16, 176(%r15) +; vst %v16, 0(%r2) ; aghi %r15, 368 -; lmg %r9, %r14, 72(%r15) +; lmg %r8, %r14, 64(%r15) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; stmg %r9, %r15, 0xf8(%r15) +; stmg %r8, %r15, 0xf0(%r15) ; aghi %r15, -0xc0 ; block1: ; offset 0xa -; lgr %r9, %r2 +; lgr %r8, %r2 ; vl %v1, 0(%r3) ; aghi %r15, -0xb0 ; la %r3, 0xa0(%r15) @@ -263,14 +263,14 @@ block0(v0: i128): ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r7, 0(%r1) -; basr %r14, %r7 +; lg %r4, 0(%r1) +; basr %r14, %r4 ; vl %v1, 0xa0(%r15) ; vst %v1, 0xb0(%r15) -; lgr %r2, %r9 -; vl %v17, 0xb0(%r15) -; vst %v17, 0(%r2) +; lgr %r2, %r8 +; vl %v16, 0xb0(%r15) +; vst %v16, 0(%r2) ; aghi %r15, 0x170 -; lmg %r9, %r14, 0x48(%r15) +; lmg %r8, %r14, 0x40(%r15) ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/vec-abi.clif b/cranelift/filetests/filetests/isa/s390x/vec-abi.clif index 98c6521c6f7b..4333c8404a72 100644 --- a/cranelift/filetests/filetests/isa/s390x/vec-abi.clif +++ b/cranelift/filetests/filetests/isa/s390x/vec-abi.clif @@ -13,16 +13,16 @@ block0(v0: i64x2, v1: i32x4, v2: i16x8, v3: i8x16, v4: i64x2, v5: i32x4, v6: i16 ; stmg %r14, %r15, 112(%r15) ; aghi %r15, -224 ; block0: -; vl %v17, 384(%r15) -; vl %v19, 400(%r15) -; vl %v21, 416(%r15) -; vl %v23, 432(%r15) -; vst %v17, 160(%r15) -; vst %v19, 176(%r15) -; vst %v21, 192(%r15) -; vst %v23, 208(%r15) -; bras %r1, 12 ; data %callee_be + 0 ; lg %r4, 0(%r1) -; basr %r14, %r4 +; vl %v19, 384(%r15) +; vl %v18, 400(%r15) +; vl %v17, 416(%r15) +; vl %v16, 432(%r15) +; vst %v19, 160(%r15) +; vst %v18, 176(%r15) +; vst %v17, 192(%r15) +; vst %v16, 208(%r15) +; bras %r1, 12 ; data %callee_be + 0 ; lg %r2, 0(%r1) +; basr %r14, %r2 ; lmg %r14, %r15, 336(%r15) ; br %r14 ; @@ -31,21 +31,21 @@ block0(v0: i64x2, v1: i32x4, v2: i16x8, v3: i8x16, v4: i64x2, v5: i32x4, v6: i16 ; stmg %r14, %r15, 0x70(%r15) ; aghi %r15, -0xe0 ; block1: ; offset 0xa -; vl %v17, 0x180(%r15) -; vl %v19, 0x190(%r15) -; vl %v21, 0x1a0(%r15) -; vl %v23, 0x1b0(%r15) -; vst %v17, 0xa0(%r15) -; vst %v19, 0xb0(%r15) -; vst %v21, 0xc0(%r15) -; vst %v23, 0xd0(%r15) +; vl %v19, 0x180(%r15) +; vl %v18, 0x190(%r15) +; vl %v17, 0x1a0(%r15) +; vl %v16, 0x1b0(%r15) +; vst %v19, 0xa0(%r15) +; vst %v18, 0xb0(%r15) +; vst %v17, 0xc0(%r15) +; vst %v16, 0xd0(%r15) ; bras %r1, 0x46 ; .byte 0x00, 0x00 ; reloc_external Abs8 %callee_be 0 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r4, 0(%r1) -; basr %r14, %r4 +; lg %r2, 0(%r1) +; basr %r14, %r2 ; lmg %r14, %r15, 0x150(%r15) ; br %r14 @@ -69,10 +69,10 @@ block0(v0: i64x2, v1: i32x4, v2: i16x8, v3: i8x16, v4: i64x2, v5: i32x4, v6: i16 ; std %f14, 208(%r15) ; std %f15, 216(%r15) ; block0: -; vl %v17, 384(%r15) -; vl %v19, 400(%r15) -; vl %v21, 416(%r15) -; vl %v23, 432(%r15) +; vl %v19, 384(%r15) +; vl %v18, 400(%r15) +; vl %v17, 416(%r15) +; vl %v16, 432(%r15) ; aghi %r15, -224 ; vpdi %v24, %v24, %v24, 4 ; vpdi %v25, %v25, %v25, 4 ; verllg %v25, %v25, 32 @@ -82,16 +82,16 @@ block0(v0: i64x2, v1: i32x4, v2: i16x8, v3: i8x16, v4: i64x2, v5: i32x4, v6: i16 ; vpdi %v29, %v29, %v29, 4 ; verllg %v29, %v29, 32 ; vpdi %v30, %v30, %v30, 4 ; verllg %v30, %v30, 32 ; verllf %v30, %v30, 16 ; vpdi %v31, %v31, %v31, 4 ; verllg %v31, %v31, 32 ; verllf %v31, %v31, 16 ; verllh %v31, %v31, 8 -; vpdi %v20, %v17, %v17, 4 -; vpdi %v22, %v19, %v19, 4 ; verllg %v19, %v19, 32 -; vpdi %v1, %v21, %v21, 4 ; verllg %v21, %v21, 32 ; verllf %v21, %v21, 16 -; vpdi %v0, %v23, %v23, 4 ; verllg %v23, %v23, 32 ; verllf %v23, %v23, 16 ; verllh %v23, %v23, 8 -; vst %v20, 160(%r15) -; vst %v22, 176(%r15) +; vpdi %v3, %v19, %v19, 4 +; vpdi %v2, %v18, %v18, 4 ; verllg %v18, %v18, 32 +; vpdi %v1, %v17, %v17, 4 ; verllg %v17, %v17, 32 ; verllf %v17, %v17, 16 +; vpdi %v0, %v16, %v16, 4 ; verllg %v16, %v16, 32 ; verllf %v16, %v16, 16 ; verllh %v16, %v16, 8 +; vst %v3, 160(%r15) +; vst %v2, 176(%r15) ; vst %v1, 192(%r15) ; vst %v0, 208(%r15) -; bras %r1, 12 ; data %callee_le + 0 ; lg %r5, 0(%r1) -; basr %r14, %r5 ; callee_pop_size 224 ; vpdi %v24, %v24, %v24, 4 ; verllg %v24, %v24, 32 +; bras %r1, 12 ; data %callee_le + 0 ; lg %r2, 0(%r1) +; basr %r14, %r2 ; callee_pop_size 224 ; vpdi %v24, %v24, %v24, 4 ; verllg %v24, %v24, 32 ; ld %f8, 160(%r15) ; ld %f9, 168(%r15) ; ld %f10, 176(%r15) @@ -116,10 +116,10 @@ block0(v0: i64x2, v1: i32x4, v2: i16x8, v3: i8x16, v4: i64x2, v5: i32x4, v6: i16 ; std %f14, 0xd0(%r15) ; std %f15, 0xd8(%r15) ; block1: ; offset 0x2a -; vl %v17, 0x180(%r15) -; vl %v19, 0x190(%r15) -; vl %v21, 0x1a0(%r15) -; vl %v23, 0x1b0(%r15) +; vl %v19, 0x180(%r15) +; vl %v18, 0x190(%r15) +; vl %v17, 0x1a0(%r15) +; vl %v16, 0x1b0(%r15) ; aghi %r15, -0xe0 ; vpdi %v24, %v24, %v24, 4 ; vpdi %v25, %v25, %v25, 4 @@ -141,18 +141,18 @@ block0(v0: i64x2, v1: i32x4, v2: i16x8, v3: i8x16, v4: i64x2, v5: i32x4, v6: i16 ; verllg %v31, %v31, 0x20 ; verllf %v31, %v31, 0x10 ; verllh %v31, %v31, 8 -; vpdi %v20, %v17, %v17, 4 -; vpdi %v22, %v19, %v19, 4 -; verllg %v22, %v22, 0x20 -; vpdi %v1, %v21, %v21, 4 +; vpdi %v3, %v19, %v19, 4 +; vpdi %v2, %v18, %v18, 4 +; verllg %v2, %v2, 0x20 +; vpdi %v1, %v17, %v17, 4 ; verllg %v1, %v1, 0x20 ; verllf %v1, %v1, 0x10 -; vpdi %v0, %v23, %v23, 4 +; vpdi %v0, %v16, %v16, 4 ; verllg %v0, %v0, 0x20 ; verllf %v0, %v0, 0x10 ; verllh %v0, %v0, 8 -; vst %v20, 0xa0(%r15) -; vst %v22, 0xb0(%r15) +; vst %v3, 0xa0(%r15) +; vst %v2, 0xb0(%r15) ; vst %v1, 0xc0(%r15) ; vst %v0, 0xd0(%r15) ; bras %r1, 0x11e @@ -160,8 +160,8 @@ block0(v0: i64x2, v1: i32x4, v2: i16x8, v3: i8x16, v4: i64x2, v5: i32x4, v6: i16 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r5, 0(%r1) -; basr %r14, %r5 +; lg %r2, 0(%r1) +; basr %r14, %r2 ; vpdi %v24, %v24, %v24, 4 ; verllg %v24, %v24, 0x20 ; ld %f8, 0xa0(%r15) @@ -195,10 +195,10 @@ block0(v0: i64x2, v1: i32x4, v2: i16x8, v3: i8x16, v4: i64x2, v5: i32x4, v6: i16 ; std %f14, 272(%r15) ; std %f15, 280(%r15) ; block0: -; vl %v17, 448(%r15) -; vl %v19, 464(%r15) -; vl %v21, 480(%r15) -; vl %v23, 496(%r15) +; vl %v19, 448(%r15) +; vl %v18, 464(%r15) +; vl %v17, 480(%r15) +; vl %v16, 496(%r15) ; vpdi %v24, %v24, %v24, 4 ; vpdi %v25, %v25, %v25, 4 ; verllg %v25, %v25, 32 ; vpdi %v26, %v26, %v26, 4 ; verllg %v26, %v26, 32 ; verllf %v26, %v26, 16 @@ -207,16 +207,16 @@ block0(v0: i64x2, v1: i32x4, v2: i16x8, v3: i8x16, v4: i64x2, v5: i32x4, v6: i16 ; vpdi %v29, %v29, %v29, 4 ; verllg %v29, %v29, 32 ; vpdi %v30, %v30, %v30, 4 ; verllg %v30, %v30, 32 ; verllf %v30, %v30, 16 ; vpdi %v31, %v31, %v31, 4 ; verllg %v31, %v31, 32 ; verllf %v31, %v31, 16 ; verllh %v31, %v31, 8 -; vpdi %v18, %v17, %v17, 4 -; vpdi %v20, %v19, %v19, 4 ; verllg %v19, %v19, 32 -; vpdi %v22, %v21, %v21, 4 ; verllg %v21, %v21, 32 ; verllf %v21, %v21, 16 -; vpdi %v0, %v23, %v23, 4 ; verllg %v23, %v23, 32 ; verllf %v23, %v23, 16 ; verllh %v23, %v23, 8 -; vst %v18, 160(%r15) -; vst %v20, 176(%r15) -; vst %v22, 192(%r15) +; vpdi %v3, %v19, %v19, 4 +; vpdi %v2, %v18, %v18, 4 ; verllg %v18, %v18, 32 +; vpdi %v1, %v17, %v17, 4 ; verllg %v17, %v17, 32 ; verllf %v17, %v17, 16 +; vpdi %v0, %v16, %v16, 4 ; verllg %v16, %v16, 32 ; verllf %v16, %v16, 16 ; verllh %v16, %v16, 8 +; vst %v3, 160(%r15) +; vst %v2, 176(%r15) +; vst %v1, 192(%r15) ; vst %v0, 208(%r15) -; bras %r1, 12 ; data %callee_be + 0 ; lg %r6, 0(%r1) -; basr %r14, %r6 ; vpdi %v24, %v24, %v24, 4 ; verllg %v24, %v24, 32 +; bras %r1, 12 ; data %callee_be + 0 ; lg %r2, 0(%r1) +; basr %r14, %r2 ; vpdi %v24, %v24, %v24, 4 ; verllg %v24, %v24, 32 ; ld %f8, 224(%r15) ; ld %f9, 232(%r15) ; ld %f10, 240(%r15) @@ -242,10 +242,10 @@ block0(v0: i64x2, v1: i32x4, v2: i16x8, v3: i8x16, v4: i64x2, v5: i32x4, v6: i16 ; std %f14, 0x110(%r15) ; std %f15, 0x118(%r15) ; block1: ; offset 0x2a -; vl %v17, 0x1c0(%r15) -; vl %v19, 0x1d0(%r15) -; vl %v21, 0x1e0(%r15) -; vl %v23, 0x1f0(%r15) +; vl %v19, 0x1c0(%r15) +; vl %v18, 0x1d0(%r15) +; vl %v17, 0x1e0(%r15) +; vl %v16, 0x1f0(%r15) ; vpdi %v24, %v24, %v24, 4 ; vpdi %v25, %v25, %v25, 4 ; verllg %v25, %v25, 0x20 @@ -266,27 +266,27 @@ block0(v0: i64x2, v1: i32x4, v2: i16x8, v3: i8x16, v4: i64x2, v5: i32x4, v6: i16 ; verllg %v31, %v31, 0x20 ; verllf %v31, %v31, 0x10 ; verllh %v31, %v31, 8 -; vpdi %v18, %v17, %v17, 4 -; vpdi %v20, %v19, %v19, 4 -; verllg %v20, %v20, 0x20 -; vpdi %v22, %v21, %v21, 4 -; verllg %v22, %v22, 0x20 -; verllf %v22, %v22, 0x10 -; vpdi %v0, %v23, %v23, 4 +; vpdi %v3, %v19, %v19, 4 +; vpdi %v2, %v18, %v18, 4 +; verllg %v2, %v2, 0x20 +; vpdi %v1, %v17, %v17, 4 +; verllg %v1, %v1, 0x20 +; verllf %v1, %v1, 0x10 +; vpdi %v0, %v16, %v16, 4 ; verllg %v0, %v0, 0x20 ; verllf %v0, %v0, 0x10 ; verllh %v0, %v0, 8 -; vst %v18, 0xa0(%r15) -; vst %v20, 0xb0(%r15) -; vst %v22, 0xc0(%r15) +; vst %v3, 0xa0(%r15) +; vst %v2, 0xb0(%r15) +; vst %v1, 0xc0(%r15) ; vst %v0, 0xd0(%r15) ; bras %r1, 0x11a ; .byte 0x00, 0x00 ; reloc_external Abs8 %callee_be 0 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r6, 0(%r1) -; basr %r14, %r6 +; lg %r2, 0(%r1) +; basr %r14, %r2 ; vpdi %v24, %v24, %v24, 4 ; verllg %v24, %v24, 0x20 ; ld %f8, 0xe0(%r15) @@ -313,17 +313,17 @@ block0(v0: i64x2, v1: i32x4, v2: i16x8, v3: i8x16, v4: i64x2, v5: i32x4, v6: i16 ; stmg %r14, %r15, 336(%r15) ; aghi %r15, -160 ; block0: -; vl %v17, 320(%r15) -; vl %v19, 336(%r15) -; vl %v21, 352(%r15) -; vl %v23, 368(%r15) +; vl %v19, 320(%r15) +; vl %v18, 336(%r15) +; vl %v17, 352(%r15) +; vl %v16, 368(%r15) ; aghi %r15, -224 -; vst %v17, 160(%r15) -; vst %v19, 176(%r15) -; vst %v21, 192(%r15) -; vst %v23, 208(%r15) -; bras %r1, 12 ; data %callee_le + 0 ; lg %r7, 0(%r1) -; basr %r14, %r7 ; callee_pop_size 224 +; vst %v19, 160(%r15) +; vst %v18, 176(%r15) +; vst %v17, 192(%r15) +; vst %v16, 208(%r15) +; bras %r1, 12 ; data %callee_le + 0 ; lg %r2, 0(%r1) +; basr %r14, %r2 ; callee_pop_size 224 ; aghi %r15, 384 ; lmg %r14, %r14, 112(%r15) ; br %r14 @@ -333,22 +333,22 @@ block0(v0: i64x2, v1: i32x4, v2: i16x8, v3: i8x16, v4: i64x2, v5: i32x4, v6: i16 ; stmg %r14, %r15, 0x150(%r15) ; aghi %r15, -0xa0 ; block1: ; offset 0xa -; vl %v17, 0x140(%r15) -; vl %v19, 0x150(%r15) -; vl %v21, 0x160(%r15) -; vl %v23, 0x170(%r15) +; vl %v19, 0x140(%r15) +; vl %v18, 0x150(%r15) +; vl %v17, 0x160(%r15) +; vl %v16, 0x170(%r15) ; aghi %r15, -0xe0 -; vst %v17, 0xa0(%r15) -; vst %v19, 0xb0(%r15) -; vst %v21, 0xc0(%r15) -; vst %v23, 0xd0(%r15) +; vst %v19, 0xa0(%r15) +; vst %v18, 0xb0(%r15) +; vst %v17, 0xc0(%r15) +; vst %v16, 0xd0(%r15) ; bras %r1, 0x4a ; .byte 0x00, 0x00 ; reloc_external Abs8 %callee_le 0 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 -; lg %r7, 0(%r1) -; basr %r14, %r7 +; lg %r2, 0(%r1) +; basr %r14, %r2 ; aghi %r15, 0x180 ; lmg %r14, %r14, 0x70(%r15) ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/vec-arithmetic.clif b/cranelift/filetests/filetests/isa/s390x/vec-arithmetic.clif index ec6bbd530641..e0acb1def1c1 100644 --- a/cranelift/filetests/filetests/isa/s390x/vec-arithmetic.clif +++ b/cranelift/filetests/filetests/isa/s390x/vec-arithmetic.clif @@ -668,10 +668,10 @@ block0(v0: i32x4, v1: i32x4): ; vuphf %v3, %v24 ; vuphf %v5, %v25 ; vag %v7, %v3, %v5 -; vuplf %v17, %v24 -; vuplf %v19, %v25 -; vag %v21, %v17, %v19 -; vpksg %v24, %v7, %v21 +; vuplf %v16, %v24 +; vuplf %v17, %v25 +; vag %v16, %v16, %v17 +; vpksg %v24, %v7, %v16 ; br %r14 ; ; Disassembled: @@ -679,10 +679,10 @@ block0(v0: i32x4, v1: i32x4): ; vuphf %v3, %v24 ; vuphf %v5, %v25 ; vag %v7, %v3, %v5 -; vuplf %v17, %v24 -; vuplf %v19, %v25 -; vag %v21, %v17, %v19 -; vpksg %v24, %v7, %v21 +; vuplf %v16, %v24 +; vuplf %v17, %v25 +; vag %v16, %v16, %v17 +; vpksg %v24, %v7, %v16 ; br %r14 function %sadd_sat16x8(i16x8, i16x8) -> i16x8 { @@ -696,10 +696,10 @@ block0(v0: i16x8, v1: i16x8): ; vuphh %v3, %v24 ; vuphh %v5, %v25 ; vaf %v7, %v3, %v5 -; vuplh %v17, %v24 -; vuplh %v19, %v25 -; vaf %v21, %v17, %v19 -; vpksf %v24, %v7, %v21 +; vuplh %v16, %v24 +; vuplh %v17, %v25 +; vaf %v16, %v16, %v17 +; vpksf %v24, %v7, %v16 ; br %r14 ; ; Disassembled: @@ -707,10 +707,10 @@ block0(v0: i16x8, v1: i16x8): ; vuphh %v3, %v24 ; vuphh %v5, %v25 ; vaf %v7, %v3, %v5 -; vuplhw %v17, %v24 -; vuplhw %v19, %v25 -; vaf %v21, %v17, %v19 -; vpksf %v24, %v7, %v21 +; vuplhw %v16, %v24 +; vuplhw %v17, %v25 +; vaf %v16, %v16, %v17 +; vpksf %v24, %v7, %v16 ; br %r14 function %sadd_sat8x16(i8x16, i8x16) -> i8x16 { @@ -724,10 +724,10 @@ block0(v0: i8x16, v1: i8x16): ; vuphb %v3, %v24 ; vuphb %v5, %v25 ; vah %v7, %v3, %v5 -; vuplb %v17, %v24 -; vuplb %v19, %v25 -; vah %v21, %v17, %v19 -; vpksh %v24, %v7, %v21 +; vuplb %v16, %v24 +; vuplb %v17, %v25 +; vah %v16, %v16, %v17 +; vpksh %v24, %v7, %v16 ; br %r14 ; ; Disassembled: @@ -735,10 +735,10 @@ block0(v0: i8x16, v1: i8x16): ; vuphb %v3, %v24 ; vuphb %v5, %v25 ; vah %v7, %v3, %v5 -; vuplb %v17, %v24 -; vuplb %v19, %v25 -; vah %v21, %v17, %v19 -; vpksh %v24, %v7, %v21 +; vuplb %v16, %v24 +; vuplb %v17, %v25 +; vah %v16, %v16, %v17 +; vpksh %v24, %v7, %v16 ; br %r14 function %usub_sat64x2(i64x2, i64x2) -> i64x2 { @@ -832,10 +832,10 @@ block0(v0: i32x4, v1: i32x4): ; vuphf %v3, %v24 ; vuphf %v5, %v25 ; vsg %v7, %v3, %v5 -; vuplf %v17, %v24 -; vuplf %v19, %v25 -; vsg %v21, %v17, %v19 -; vpksg %v24, %v7, %v21 +; vuplf %v16, %v24 +; vuplf %v17, %v25 +; vsg %v16, %v16, %v17 +; vpksg %v24, %v7, %v16 ; br %r14 ; ; Disassembled: @@ -843,10 +843,10 @@ block0(v0: i32x4, v1: i32x4): ; vuphf %v3, %v24 ; vuphf %v5, %v25 ; vsg %v7, %v3, %v5 -; vuplf %v17, %v24 -; vuplf %v19, %v25 -; vsg %v21, %v17, %v19 -; vpksg %v24, %v7, %v21 +; vuplf %v16, %v24 +; vuplf %v17, %v25 +; vsg %v16, %v16, %v17 +; vpksg %v24, %v7, %v16 ; br %r14 function %ssub_sat16x8(i16x8, i16x8) -> i16x8 { @@ -860,10 +860,10 @@ block0(v0: i16x8, v1: i16x8): ; vuphh %v3, %v24 ; vuphh %v5, %v25 ; vsf %v7, %v3, %v5 -; vuplh %v17, %v24 -; vuplh %v19, %v25 -; vsf %v21, %v17, %v19 -; vpksf %v24, %v7, %v21 +; vuplh %v16, %v24 +; vuplh %v17, %v25 +; vsf %v16, %v16, %v17 +; vpksf %v24, %v7, %v16 ; br %r14 ; ; Disassembled: @@ -871,10 +871,10 @@ block0(v0: i16x8, v1: i16x8): ; vuphh %v3, %v24 ; vuphh %v5, %v25 ; vsf %v7, %v3, %v5 -; vuplhw %v17, %v24 -; vuplhw %v19, %v25 -; vsf %v21, %v17, %v19 -; vpksf %v24, %v7, %v21 +; vuplhw %v16, %v24 +; vuplhw %v17, %v25 +; vsf %v16, %v16, %v17 +; vpksf %v24, %v7, %v16 ; br %r14 function %ssub_sat8x16(i8x16, i8x16) -> i8x16 { @@ -888,10 +888,10 @@ block0(v0: i8x16, v1: i8x16): ; vuphb %v3, %v24 ; vuphb %v5, %v25 ; vsh %v7, %v3, %v5 -; vuplb %v17, %v24 -; vuplb %v19, %v25 -; vsh %v21, %v17, %v19 -; vpksh %v24, %v7, %v21 +; vuplb %v16, %v24 +; vuplb %v17, %v25 +; vsh %v16, %v16, %v17 +; vpksh %v24, %v7, %v16 ; br %r14 ; ; Disassembled: @@ -899,10 +899,10 @@ block0(v0: i8x16, v1: i8x16): ; vuphb %v3, %v24 ; vuphb %v5, %v25 ; vsh %v7, %v3, %v5 -; vuplb %v17, %v24 -; vuplb %v19, %v25 -; vsh %v21, %v17, %v19 -; vpksh %v24, %v7, %v21 +; vuplb %v16, %v24 +; vuplb %v17, %v25 +; vsh %v16, %v16, %v17 +; vpksh %v24, %v7, %v16 ; br %r14 function %iadd_pairwise_i32x4_be(i32x4, i32x4) -> i32x4 { @@ -916,9 +916,9 @@ block0(v0: i32x4, v1: i32x4): ; vrepib %v3, 32 ; vsrlb %v5, %v24, %v3 ; vaf %v7, %v24, %v5 -; vsrlb %v17, %v25, %v3 -; vaf %v19, %v25, %v17 -; vpkg %v24, %v7, %v19 +; vsrlb %v16, %v25, %v3 +; vaf %v16, %v25, %v16 +; vpkg %v24, %v7, %v16 ; br %r14 ; ; Disassembled: @@ -926,9 +926,9 @@ block0(v0: i32x4, v1: i32x4): ; vrepib %v3, 0x20 ; vsrlb %v5, %v24, %v3 ; vaf %v7, %v24, %v5 -; vsrlb %v17, %v25, %v3 -; vaf %v19, %v25, %v17 -; vpkg %v24, %v7, %v19 +; vsrlb %v16, %v25, %v3 +; vaf %v16, %v25, %v16 +; vpkg %v24, %v7, %v16 ; br %r14 function %iadd_pairwise_i16x8_be(i16x8, i16x8) -> i16x8 { @@ -942,9 +942,9 @@ block0(v0: i16x8, v1: i16x8): ; vrepib %v3, 16 ; vsrlb %v5, %v24, %v3 ; vah %v7, %v24, %v5 -; vsrlb %v17, %v25, %v3 -; vah %v19, %v25, %v17 -; vpkf %v24, %v7, %v19 +; vsrlb %v16, %v25, %v3 +; vah %v16, %v25, %v16 +; vpkf %v24, %v7, %v16 ; br %r14 ; ; Disassembled: @@ -952,9 +952,9 @@ block0(v0: i16x8, v1: i16x8): ; vrepib %v3, 0x10 ; vsrlb %v5, %v24, %v3 ; vah %v7, %v24, %v5 -; vsrlb %v17, %v25, %v3 -; vah %v19, %v25, %v17 -; vpkf %v24, %v7, %v19 +; vsrlb %v16, %v25, %v3 +; vah %v16, %v25, %v16 +; vpkf %v24, %v7, %v16 ; br %r14 function %iadd_pairwise_i8x16_be(i8x16, i8x16) -> i8x16 { @@ -968,9 +968,9 @@ block0(v0: i8x16, v1: i8x16): ; vrepib %v3, 8 ; vsrlb %v5, %v24, %v3 ; vab %v7, %v24, %v5 -; vsrlb %v17, %v25, %v3 -; vab %v19, %v25, %v17 -; vpkh %v24, %v7, %v19 +; vsrlb %v16, %v25, %v3 +; vab %v16, %v25, %v16 +; vpkh %v24, %v7, %v16 ; br %r14 ; ; Disassembled: @@ -978,9 +978,9 @@ block0(v0: i8x16, v1: i8x16): ; vrepib %v3, 8 ; vsrlb %v5, %v24, %v3 ; vab %v7, %v24, %v5 -; vsrlb %v17, %v25, %v3 -; vab %v19, %v25, %v17 -; vpkh %v24, %v7, %v19 +; vsrlb %v16, %v25, %v3 +; vab %v16, %v25, %v16 +; vpkh %v24, %v7, %v16 ; br %r14 function %iadd_pairwise_i32x4_le(i32x4, i32x4) -> i32x4 tail { @@ -994,9 +994,9 @@ block0(v0: i32x4, v1: i32x4): ; vrepib %v3, 32 ; vsrlb %v5, %v24, %v3 ; vaf %v7, %v24, %v5 -; vsrlb %v17, %v25, %v3 -; vaf %v19, %v25, %v17 -; vpkg %v24, %v19, %v7 +; vsrlb %v16, %v25, %v3 +; vaf %v16, %v25, %v16 +; vpkg %v24, %v16, %v7 ; br %r14 ; ; Disassembled: @@ -1004,9 +1004,9 @@ block0(v0: i32x4, v1: i32x4): ; vrepib %v3, 0x20 ; vsrlb %v5, %v24, %v3 ; vaf %v7, %v24, %v5 -; vsrlb %v17, %v25, %v3 -; vaf %v19, %v25, %v17 -; vpkg %v24, %v19, %v7 +; vsrlb %v16, %v25, %v3 +; vaf %v16, %v25, %v16 +; vpkg %v24, %v16, %v7 ; br %r14 function %iadd_pairwise_i16x8_le(i16x8, i16x8) -> i16x8 tail { @@ -1020,9 +1020,9 @@ block0(v0: i16x8, v1: i16x8): ; vrepib %v3, 16 ; vsrlb %v5, %v24, %v3 ; vah %v7, %v24, %v5 -; vsrlb %v17, %v25, %v3 -; vah %v19, %v25, %v17 -; vpkf %v24, %v19, %v7 +; vsrlb %v16, %v25, %v3 +; vah %v16, %v25, %v16 +; vpkf %v24, %v16, %v7 ; br %r14 ; ; Disassembled: @@ -1030,9 +1030,9 @@ block0(v0: i16x8, v1: i16x8): ; vrepib %v3, 0x10 ; vsrlb %v5, %v24, %v3 ; vah %v7, %v24, %v5 -; vsrlb %v17, %v25, %v3 -; vah %v19, %v25, %v17 -; vpkf %v24, %v19, %v7 +; vsrlb %v16, %v25, %v3 +; vah %v16, %v25, %v16 +; vpkf %v24, %v16, %v7 ; br %r14 function %iadd_pairwise_i8x16_le(i8x16, i8x16) -> i8x16 tail { @@ -1046,9 +1046,9 @@ block0(v0: i8x16, v1: i8x16): ; vrepib %v3, 8 ; vsrlb %v5, %v24, %v3 ; vab %v7, %v24, %v5 -; vsrlb %v17, %v25, %v3 -; vab %v19, %v25, %v17 -; vpkh %v24, %v19, %v7 +; vsrlb %v16, %v25, %v3 +; vab %v16, %v25, %v16 +; vpkh %v24, %v16, %v7 ; br %r14 ; ; Disassembled: @@ -1056,9 +1056,9 @@ block0(v0: i8x16, v1: i8x16): ; vrepib %v3, 8 ; vsrlb %v5, %v24, %v3 ; vab %v7, %v24, %v5 -; vsrlb %v17, %v25, %v3 -; vab %v19, %v25, %v17 -; vpkh %v24, %v19, %v7 +; vsrlb %v16, %v25, %v3 +; vab %v16, %v25, %v16 +; vpkh %v24, %v16, %v7 ; br %r14 function %imul_i64x2(i64x2, i64x2) -> i64x2 { @@ -1069,24 +1069,24 @@ block0(v0: i64x2, v1: i64x2): ; VCode: ; block0: -; vlgvg %r5, %v24, 0 -; vlgvg %r3, %v25, 0 -; msgr %r5, %r3 +; vlgvg %r3, %v24, 0 +; vlgvg %r5, %v25, 0 +; msgrkc %r2, %r3, %r5 ; vlgvg %r3, %v24, 1 -; vlgvg %r2, %v25, 1 -; msgr %r3, %r2 -; vlvgp %v24, %r5, %r3 +; vlgvg %r4, %v25, 1 +; msgr %r3, %r4 +; vlvgp %v24, %r2, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 0 -; vlgvg %r3, %v25, 0 -; msgr %r5, %r3 +; vlgvg %r3, %v24, 0 +; vlgvg %r5, %v25, 0 +; msgrkc %r2, %r3, %r5 ; vlgvg %r3, %v24, 1 -; vlgvg %r2, %v25, 1 -; msgr %r3, %r2 -; vlvgp %v24, %r5, %r3 +; vlgvg %r4, %v25, 1 +; msgr %r3, %r4 +; vlvgp %v24, %r2, %r3 ; br %r14 function %imul_i32x4(i32x4, i32x4) -> i32x4 { @@ -1146,25 +1146,27 @@ block0(v0: i64x2, v1: i64x2): ; VCode: ; block0: ; vlgvg %r3, %v24, 0 -; vlgvg %r4, %v25, 0 -; mlgr %r2, %r4 +; vlgvg %r5, %v25, 0 +; mlgr %r2, %r5 ; lgr %r4, %r2 ; vlgvg %r3, %v24, 1 ; vlgvg %r2, %v25, 1 ; mlgr %r2, %r2 -; vlvgp %v24, %r4, %r2 +; lgr %r3, %r4 +; vlvgp %v24, %r3, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vlgvg %r3, %v24, 0 -; vlgvg %r4, %v25, 0 -; mlgr %r2, %r4 +; vlgvg %r5, %v25, 0 +; mlgr %r2, %r5 ; lgr %r4, %r2 ; vlgvg %r3, %v24, 1 ; vlgvg %r2, %v25, 1 ; mlgr %r2, %r2 -; vlvgp %v24, %r4, %r2 +; lgr %r3, %r4 +; vlvgp %v24, %r3, %r2 ; br %r14 function %umulhi_i32x4(i32x4, i32x4) -> i32x4 { @@ -1223,25 +1225,25 @@ block0(v0: i64x2, v1: i64x2): ; VCode: ; block0: -; vlgvg %r5, %v24, 0 -; vlgvg %r3, %v25, 0 -; mgrk %r2, %r5, %r3 +; vlgvg %r3, %v24, 0 +; vlgvg %r5, %v25, 0 +; mgrk %r2, %r3, %r5 ; lgr %r4, %r2 -; vlgvg %r2, %v24, 1 -; vlgvg %r5, %v25, 1 -; mgrk %r2, %r2, %r5 +; vlgvg %r3, %v24, 1 +; vlgvg %r2, %v25, 1 +; mgrk %r2, %r3, %r2 ; vlvgp %v24, %r4, %r2 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 0 -; vlgvg %r3, %v25, 0 -; mgrk %r2, %r5, %r3 +; vlgvg %r3, %v24, 0 +; vlgvg %r5, %v25, 0 +; mgrk %r2, %r3, %r5 ; lgr %r4, %r2 -; vlgvg %r2, %v24, 1 -; vlgvg %r5, %v25, 1 -; mgrk %r2, %r2, %r5 +; vlgvg %r3, %v24, 1 +; vlgvg %r2, %v25, 1 +; mgrk %r2, %r3, %r2 ; vlvgp %v24, %r4, %r2 ; br %r14 @@ -1330,16 +1332,16 @@ block0(v0: i16x8, v1: i16x8): ; vuphh %v3, %v24 ; vuphh %v5, %v25 ; vmlf %v7, %v3, %v5 -; vgmf %v17, 17, 17 -; vaf %v19, %v7, %v17 -; vesraf %v21, %v19, 15 -; vuplh %v23, %v24 -; vuplh %v25, %v25 -; vmlf %v27, %v23, %v25 -; vgmf %v29, 17, 17 -; vaf %v31, %v27, %v29 -; vesraf %v1, %v31, 15 -; vpksf %v24, %v21, %v1 +; vgmf %v16, 17, 17 +; vaf %v16, %v7, %v16 +; vesraf %v16, %v16, 15 +; vuplh %v17, %v24 +; vuplh %v18, %v25 +; vmlf %v19, %v17, %v18 +; vgmf %v21, 17, 17 +; vaf %v23, %v19, %v21 +; vesraf %v25, %v23, 15 +; vpksf %v24, %v16, %v25 ; br %r14 ; ; Disassembled: @@ -1347,16 +1349,16 @@ block0(v0: i16x8, v1: i16x8): ; vuphh %v3, %v24 ; vuphh %v5, %v25 ; vmlf %v7, %v3, %v5 -; vgmf %v17, 0x11, 0x11 -; vaf %v19, %v7, %v17 -; vesraf %v21, %v19, 0xf -; vuplhw %v23, %v24 -; vuplhw %v25, %v25 -; vmlf %v27, %v23, %v25 -; vgmf %v29, 0x11, 0x11 -; vaf %v31, %v27, %v29 -; vesraf %v1, %v31, 0xf -; vpksf %v24, %v21, %v1 +; vgmf %v16, 0x11, 0x11 +; vaf %v16, %v7, %v16 +; vesraf %v16, %v16, 0xf +; vuplhw %v17, %v24 +; vuplhw %v18, %v25 +; vmlf %v19, %v17, %v18 +; vgmf %v21, 0x11, 0x11 +; vaf %v23, %v19, %v21 +; vesraf %v25, %v23, 0xf +; vpksf %v24, %v16, %v25 ; br %r14 function %sqmul_round_sat(i32x4, i32x4) -> i32x4 { @@ -1369,57 +1371,57 @@ block0(v0: i32x4, v1: i32x4): ; block0: ; vuphf %v3, %v24 ; vuphf %v5, %v25 -; lgdr %r5, %f3 -; lgdr %r3, %f5 -; msgr %r5, %r3 -; vlgvg %r3, %v3, 1 -; vlgvg %r2, %v5, 1 -; msgr %r3, %r2 -; vlvgp %v27, %r5, %r3 -; vgmg %v29, 33, 33 -; vag %v31, %v27, %v29 -; vesrag %v1, %v31, 31 -; vuplf %v3, %v24 -; vuplf %v5, %v25 -; lgdr %r5, %f3 +; lgdr %r2, %f3 ; lgdr %r3, %f5 -; msgr %r5, %r3 +; msgr %r2, %r3 ; vlgvg %r3, %v3, 1 -; vlgvg %r2, %v5, 1 -; msgr %r3, %r2 -; vlvgp %v27, %r5, %r3 -; vgmg %v29, 33, 33 -; vag %v31, %v27, %v29 -; vesrag %v2, %v31, 31 -; vpksg %v24, %v1, %v2 +; vlgvg %r4, %v5, 1 +; msgr %r3, %r4 +; vlvgp %v19, %r2, %r3 +; vgmg %v21, 33, 33 +; vag %v23, %v19, %v21 +; vesrag %v26, %v23, 31 +; vuplf %v27, %v24 +; vuplf %v29, %v25 +; vlgvg %r2, %v27, 0 +; vlgvg %r3, %v29, 0 +; msgr %r2, %r3 +; vlgvg %r3, %v27, 1 +; vlgvg %r4, %v29, 1 +; msgr %r3, %r4 +; vlvgp %v0, %r2, %r3 +; vgmg %v1, 33, 33 +; vag %v0, %v0, %v1 +; vesrag %v0, %v0, 31 +; vpksg %v24, %v26, %v0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vuphf %v3, %v24 ; vuphf %v5, %v25 -; lgdr %r5, %f3 -; lgdr %r3, %f5 -; msgr %r5, %r3 -; vlgvg %r3, %v3, 1 -; vlgvg %r2, %v5, 1 -; msgr %r3, %r2 -; vlvgp %v27, %r5, %r3 -; vgmg %v29, 0x21, 0x21 -; vag %v31, %v27, %v29 -; vesrag %v1, %v31, 0x1f -; vuplf %v3, %v24 -; vuplf %v5, %v25 -; lgdr %r5, %f3 +; lgdr %r2, %f3 ; lgdr %r3, %f5 -; msgr %r5, %r3 +; msgr %r2, %r3 ; vlgvg %r3, %v3, 1 -; vlgvg %r2, %v5, 1 -; msgr %r3, %r2 -; vlvgp %v27, %r5, %r3 -; vgmg %v29, 0x21, 0x21 -; vag %v31, %v27, %v29 -; vesrag %v2, %v31, 0x1f -; vpksg %v24, %v1, %v2 +; vlgvg %r4, %v5, 1 +; msgr %r3, %r4 +; vlvgp %v19, %r2, %r3 +; vgmg %v21, 0x21, 0x21 +; vag %v23, %v19, %v21 +; vesrag %v26, %v23, 0x1f +; vuplf %v27, %v24 +; vuplf %v29, %v25 +; vlgvg %r2, %v27, 0 +; vlgvg %r3, %v29, 0 +; msgr %r2, %r3 +; vlgvg %r3, %v27, 1 +; vlgvg %r4, %v29, 1 +; msgr %r3, %r4 +; vlvgp %v0, %r2, %r3 +; vgmg %v1, 0x21, 0x21 +; vag %v0, %v0, %v1 +; vesrag %v0, %v0, 0x1f +; vpksg %v24, %v26, %v0 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/vec-fp.clif b/cranelift/filetests/filetests/isa/s390x/vec-fp.clif index c1b2f7ce4edb..4257a0bbb3c3 100644 --- a/cranelift/filetests/filetests/isa/s390x/vec-fp.clif +++ b/cranelift/filetests/filetests/isa/s390x/vec-fp.clif @@ -777,10 +777,10 @@ block0(v0: i32x4): ; vcdlgb %v4, %v2, 0, 3 ; vledb %v6, %v4, 0, 4 ; vupllf %v16, %v24 -; vcdlgb %v18, %v16, 0, 3 -; vledb %v20, %v18, 0, 4 -; larl %r1, [const(0)] ; vl %v22, 0(%r1) -; vperm %v24, %v6, %v20, %v22 +; vcdlgb %v16, %v16, 0, 3 +; vledb %v16, %v16, 0, 4 +; larl %r1, [const(0)] ; vl %v17, 0(%r1) +; vperm %v24, %v6, %v16, %v17 ; br %r14 ; ; Disassembled: @@ -789,11 +789,11 @@ block0(v0: i32x4): ; vcdlgb %v4, %v2, 0, 3 ; vledb %v6, %v4, 0, 4 ; vupllf %v16, %v24 -; vcdlgb %v18, %v16, 0, 3 -; vledb %v20, %v18, 0, 4 +; vcdlgb %v16, %v16, 0, 3 +; vledb %v16, %v16, 0, 4 ; larl %r1, 0x40 -; vl %v22, 0(%r1) -; vperm %v24, %v6, %v20, %v22 +; vl %v17, 0(%r1) +; vperm %v24, %v6, %v16, %v17 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -820,10 +820,10 @@ block0(v0: i32x4): ; vcdgb %v4, %v2, 0, 3 ; vledb %v6, %v4, 0, 4 ; vuplf %v16, %v24 -; vcdgb %v18, %v16, 0, 3 -; vledb %v20, %v18, 0, 4 -; larl %r1, [const(0)] ; vl %v22, 0(%r1) -; vperm %v24, %v6, %v20, %v22 +; vcdgb %v16, %v16, 0, 3 +; vledb %v16, %v16, 0, 4 +; larl %r1, [const(0)] ; vl %v17, 0(%r1) +; vperm %v24, %v6, %v16, %v17 ; br %r14 ; ; Disassembled: @@ -832,11 +832,11 @@ block0(v0: i32x4): ; vcdgb %v4, %v2, 0, 3 ; vledb %v6, %v4, 0, 4 ; vuplf %v16, %v24 -; vcdgb %v18, %v16, 0, 3 -; vledb %v20, %v18, 0, 4 +; vcdgb %v16, %v16, 0, 3 +; vledb %v16, %v16, 0, 4 ; larl %r1, 0x40 -; vl %v22, 0(%r1) -; vperm %v24, %v6, %v20, %v22 +; vl %v17, 0(%r1) +; vperm %v24, %v6, %v16, %v17 ; br %r14 ; .byte 0x00, 0x00 ; .byte 0x00, 0x00 @@ -933,9 +933,9 @@ block0(v0: f32x4): ; vldeb %v4, %v2 ; vclgdb %v6, %v4, 0, 5 ; vmrlf %v16, %v24, %v24 -; vldeb %v18, %v16 -; vclgdb %v20, %v18, 0, 5 -; vpklsg %v24, %v6, %v20 +; vldeb %v16, %v16 +; vclgdb %v16, %v16, 0, 5 +; vpklsg %v24, %v6, %v16 ; br %r14 ; ; Disassembled: @@ -944,9 +944,9 @@ block0(v0: f32x4): ; vldeb %v4, %v2 ; vclgdb %v6, %v4, 0, 5 ; vmrlf %v16, %v24, %v24 -; vldeb %v18, %v16 -; vclgdb %v20, %v18, 0, 5 -; vpklsg %v24, %v6, %v20 +; vldeb %v16, %v16 +; vclgdb %v16, %v16, 0, 5 +; vpklsg %v24, %v6, %v16 ; br %r14 function %fcvt_to_sint_sat_f32x4_i32x4(f32x4) -> i32x4 { @@ -961,12 +961,12 @@ block0(v0: f32x4): ; vldeb %v4, %v2 ; vcgdb %v6, %v4, 0, 5 ; vmrlf %v16, %v24, %v24 -; vldeb %v18, %v16 -; vcgdb %v20, %v18, 0, 5 -; vpksg %v22, %v6, %v20 -; vgbm %v25, 0 -; vfcesb %v26, %v24, %v24 -; vsel %v24, %v22, %v25, %v26 +; vldeb %v16, %v16 +; vcgdb %v16, %v16, 0, 5 +; vpksg %v16, %v6, %v16 +; vgbm %v17, 0 +; vfcesb %v18, %v24, %v24 +; vsel %v24, %v16, %v17, %v18 ; br %r14 ; ; Disassembled: @@ -975,12 +975,12 @@ block0(v0: f32x4): ; vldeb %v4, %v2 ; vcgdb %v6, %v4, 0, 5 ; vmrlf %v16, %v24, %v24 -; vldeb %v18, %v16 -; vcgdb %v20, %v18, 0, 5 -; vpksg %v22, %v6, %v20 -; vzero %v25 -; vfcesb %v26, %v24, %v24 -; vsel %v24, %v22, %v25, %v26 +; vldeb %v16, %v16 +; vcgdb %v16, %v16, 0, 5 +; vpksg %v16, %v6, %v16 +; vzero %v17 +; vfcesb %v18, %v24, %v24 +; vsel %v24, %v16, %v17, %v18 ; br %r14 function %fcvt_to_uint_sat_f64x2_i64x2(f64x2) -> i64x2 { diff --git a/cranelift/filetests/filetests/isa/s390x/vec-lane-le-lane.clif b/cranelift/filetests/filetests/isa/s390x/vec-lane-le-lane.clif index e6d3ca13a837..442afa1cf8cd 100644 --- a/cranelift/filetests/filetests/isa/s390x/vec-lane-le-lane.clif +++ b/cranelift/filetests/filetests/isa/s390x/vec-lane-le-lane.clif @@ -178,14 +178,14 @@ block0(v0: i64x2, v1: i64): ; VCode: ; block0: -; lrvg %r5, 0(%r2) -; vlvgg %v24, %r5, 1 +; lrvg %r3, 0(%r2) +; vlvgg %v24, %r3, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r5, 0(%r2) ; trap: heap_oob -; vlvgg %v24, %r5, 1 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; vlvgg %v24, %r3, 1 ; br %r14 function %insertlane_i64x2_mem_little_1(i64x2, i64) -> i64x2 tail { @@ -197,14 +197,14 @@ block0(v0: i64x2, v1: i64): ; VCode: ; block0: -; lrvg %r5, 0(%r2) -; vlvgg %v24, %r5, 0 +; lrvg %r3, 0(%r2) +; vlvgg %v24, %r3, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r5, 0(%r2) ; trap: heap_oob -; vlvgg %v24, %r5, 0 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; vlvgg %v24, %r3, 0 ; br %r14 function %insertlane_i32x4_0(i32x4, i32) -> i32x4 tail { @@ -396,14 +396,14 @@ block0(v0: i32x4, v1: i64): ; VCode: ; block0: -; lrv %r5, 0(%r2) -; vlvgf %v24, %r5, 3 +; lrv %r3, 0(%r2) +; vlvgf %v24, %r3, 3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r5, 0(%r2) ; trap: heap_oob -; vlvgf %v24, %r5, 3 +; lrv %r3, 0(%r2) ; trap: heap_oob +; vlvgf %v24, %r3, 3 ; br %r14 function %insertlane_i32x4_mem_little_3(i32x4, i64) -> i32x4 tail { @@ -415,14 +415,14 @@ block0(v0: i32x4, v1: i64): ; VCode: ; block0: -; lrv %r5, 0(%r2) -; vlvgf %v24, %r5, 0 +; lrv %r3, 0(%r2) +; vlvgf %v24, %r3, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r5, 0(%r2) ; trap: heap_oob -; vlvgf %v24, %r5, 0 +; lrv %r3, 0(%r2) ; trap: heap_oob +; vlvgf %v24, %r3, 0 ; br %r14 function %insertlane_i16x8_0(i16x8, i16) -> i16x8 tail { @@ -614,14 +614,14 @@ block0(v0: i16x8, v1: i64): ; VCode: ; block0: -; lrvh %r5, 0(%r2) -; vlvgh %v24, %r5, 7 +; lrvh %r3, 0(%r2) +; vlvgh %v24, %r3, 7 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvh %r5, 0(%r2) ; trap: heap_oob -; vlvgh %v24, %r5, 7 +; lrvh %r3, 0(%r2) ; trap: heap_oob +; vlvgh %v24, %r3, 7 ; br %r14 function %insertlane_i16x8_mem_little_7(i16x8, i64) -> i16x8 tail { @@ -633,14 +633,14 @@ block0(v0: i16x8, v1: i64): ; VCode: ; block0: -; lrvh %r5, 0(%r2) -; vlvgh %v24, %r5, 0 +; lrvh %r3, 0(%r2) +; vlvgh %v24, %r3, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvh %r5, 0(%r2) ; trap: heap_oob -; vlvgh %v24, %r5, 0 +; lrvh %r3, 0(%r2) ; trap: heap_oob +; vlvgh %v24, %r3, 0 ; br %r14 function %insertlane_i8x16_0(i8x16, i8) -> i8x16 tail { @@ -1000,14 +1000,14 @@ block0(v0: f64x2, v1: i64): ; VCode: ; block0: -; lrvg %r5, 0(%r2) -; vlvgg %v24, %r5, 1 +; lrvg %r3, 0(%r2) +; vlvgg %v24, %r3, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r5, 0(%r2) ; trap: heap_oob -; vlvgg %v24, %r5, 1 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; vlvgg %v24, %r3, 1 ; br %r14 function %insertlane_f64x2_mem_little_1(f64x2, i64) -> f64x2 tail { @@ -1019,14 +1019,14 @@ block0(v0: f64x2, v1: i64): ; VCode: ; block0: -; lrvg %r5, 0(%r2) -; vlvgg %v24, %r5, 0 +; lrvg %r3, 0(%r2) +; vlvgg %v24, %r3, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r5, 0(%r2) ; trap: heap_oob -; vlvgg %v24, %r5, 0 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; vlvgg %v24, %r3, 0 ; br %r14 function %insertlane_f32x4_0(f32x4, f32) -> f32x4 tail { @@ -1190,14 +1190,14 @@ block0(v0: f32x4, v1: i64): ; VCode: ; block0: -; lrv %r5, 0(%r2) -; vlvgf %v24, %r5, 3 +; lrv %r3, 0(%r2) +; vlvgf %v24, %r3, 3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r5, 0(%r2) ; trap: heap_oob -; vlvgf %v24, %r5, 3 +; lrv %r3, 0(%r2) ; trap: heap_oob +; vlvgf %v24, %r3, 3 ; br %r14 function %insertlane_i32x4_mem_little_3(i32x4, i64) -> i32x4 tail { @@ -1209,14 +1209,14 @@ block0(v0: i32x4, v1: i64): ; VCode: ; block0: -; lrv %r5, 0(%r2) -; vlvgf %v24, %r5, 0 +; lrv %r3, 0(%r2) +; vlvgf %v24, %r3, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r5, 0(%r2) ; trap: heap_oob -; vlvgf %v24, %r5, 0 +; lrv %r3, 0(%r2) ; trap: heap_oob +; vlvgf %v24, %r3, 0 ; br %r14 function %extractlane_i64x2_0(i64x2) -> i64 tail { @@ -1294,14 +1294,14 @@ block0(v0: i64x2, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 1 -; strvg %r5, 0(%r2) +; vlgvg %r3, %v24, 1 +; strvg %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 1 -; strvg %r5, 0(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 1 +; strvg %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_i64x2_mem_little_1(i64x2, i64) tail { @@ -1313,14 +1313,14 @@ block0(v0: i64x2, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 0 -; strvg %r5, 0(%r2) +; vlgvg %r3, %v24, 0 +; strvg %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 0 -; strvg %r5, 0(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 0 +; strvg %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_i32x4_0(i32x4) -> i32 tail { @@ -1398,14 +1398,14 @@ block0(v0: i32x4, v1: i64): ; VCode: ; block0: -; vlgvf %r5, %v24, 3 -; strv %r5, 0(%r2) +; vlgvf %r3, %v24, 3 +; strv %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvf %r5, %v24, 3 -; strv %r5, 0(%r2) ; trap: heap_oob +; vlgvf %r3, %v24, 3 +; strv %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_i32x4_mem_little_3(i32x4, i64) tail { @@ -1417,14 +1417,14 @@ block0(v0: i32x4, v1: i64): ; VCode: ; block0: -; vlgvf %r5, %v24, 0 -; strv %r5, 0(%r2) +; vlgvf %r3, %v24, 0 +; strv %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvf %r5, %v24, 0 -; strv %r5, 0(%r2) ; trap: heap_oob +; vlgvf %r3, %v24, 0 +; strv %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_i16x8_0(i16x8) -> i16 tail { @@ -1502,14 +1502,14 @@ block0(v0: i16x8, v1: i64): ; VCode: ; block0: -; vlgvh %r5, %v24, 7 -; strvh %r5, 0(%r2) +; vlgvh %r3, %v24, 7 +; strvh %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvh %r5, %v24, 7 -; strvh %r5, 0(%r2) ; trap: heap_oob +; vlgvh %r3, %v24, 7 +; strvh %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_i16x8_mem_little_7(i16x8, i64) tail { @@ -1521,14 +1521,14 @@ block0(v0: i16x8, v1: i64): ; VCode: ; block0: -; vlgvh %r5, %v24, 0 -; strvh %r5, 0(%r2) +; vlgvh %r3, %v24, 0 +; strvh %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvh %r5, %v24, 0 -; strvh %r5, 0(%r2) ; trap: heap_oob +; vlgvh %r3, %v24, 0 +; strvh %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_i8x16_0(i8x16) -> i8 tail { @@ -1706,14 +1706,14 @@ block0(v0: f64x2, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 1 -; strvg %r5, 0(%r2) +; vlgvg %r3, %v24, 1 +; strvg %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 1 -; strvg %r5, 0(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 1 +; strvg %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_f64x2_mem_little_1(f64x2, i64) tail { @@ -1725,14 +1725,14 @@ block0(v0: f64x2, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 0 -; strvg %r5, 0(%r2) +; vlgvg %r3, %v24, 0 +; strvg %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 0 -; strvg %r5, 0(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 0 +; strvg %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_f32x4_0(f32x4) -> f32 tail { @@ -1810,14 +1810,14 @@ block0(v0: f32x4, v1: i64): ; VCode: ; block0: -; vlgvf %r5, %v24, 3 -; strv %r5, 0(%r2) +; vlgvf %r3, %v24, 3 +; strv %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvf %r5, %v24, 3 -; strv %r5, 0(%r2) ; trap: heap_oob +; vlgvf %r3, %v24, 3 +; strv %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_f32x4_mem_little_3(f32x4, i64) tail { @@ -1829,14 +1829,14 @@ block0(v0: f32x4, v1: i64): ; VCode: ; block0: -; vlgvf %r5, %v24, 0 -; strv %r5, 0(%r2) +; vlgvf %r3, %v24, 0 +; strv %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvf %r5, %v24, 0 -; strv %r5, 0(%r2) ; trap: heap_oob +; vlgvf %r3, %v24, 0 +; strv %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %splat_i64x2(i64) -> i64x2 tail { @@ -1934,15 +1934,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; vrepg %v24, %v4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; vrepg %v24, %v4, 0 ; br %r14 @@ -2041,15 +2041,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrv %r4, 0(%r2) -; vlvgf %v4, %r4, 0 +; lrv %r2, 0(%r2) +; vlvgf %v4, %r2, 0 ; vrepf %v24, %v4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r4, 0(%r2) ; trap: heap_oob -; vlvgf %v4, %r4, 0 +; lrv %r2, 0(%r2) ; trap: heap_oob +; vlvgf %v4, %r2, 0 ; vrepf %v24, %v4, 0 ; br %r14 @@ -2148,15 +2148,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrvh %r4, 0(%r2) -; vlvgh %v4, %r4, 0 +; lrvh %r2, 0(%r2) +; vlvgh %v4, %r2, 0 ; vreph %v24, %v4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvh %r4, 0(%r2) ; trap: heap_oob -; vlvgh %v4, %r4, 0 +; lrvh %r2, 0(%r2) ; trap: heap_oob +; vlvgh %v4, %r2, 0 ; vreph %v24, %v4, 0 ; br %r14 @@ -2339,15 +2339,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; vrepg %v24, %v4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; vrepg %v24, %v4, 0 ; br %r14 @@ -2427,15 +2427,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrv %r4, 0(%r2) -; vlvgf %v4, %r4, 0 +; lrv %r2, 0(%r2) +; vlvgf %v4, %r2, 0 ; vrepf %v24, %v4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r4, 0(%r2) ; trap: heap_oob -; vlvgf %v4, %r4, 0 +; lrv %r2, 0(%r2) ; trap: heap_oob +; vlvgf %v4, %r2, 0 ; vrepf %v24, %v4, 0 ; br %r14 @@ -2543,15 +2543,15 @@ block0(v0: i64): ; VCode: ; block0: ; vgbm %v24, 0 -; lrvg %r6, 0(%r2) -; vlvgg %v24, %r6, 1 +; lrvg %r4, 0(%r2) +; vlvgg %v24, %r4, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vzero %v24 -; lrvg %r6, 0(%r2) ; trap: heap_oob -; vlvgg %v24, %r6, 1 +; lrvg %r4, 0(%r2) ; trap: heap_oob +; vlvgg %v24, %r4, 1 ; br %r14 function %scalar_to_vector_i32x4(i32) -> i32x4 tail { @@ -2660,15 +2660,15 @@ block0(v0: i64): ; VCode: ; block0: ; vgbm %v24, 0 -; lrv %r6, 0(%r2) -; vlvgf %v24, %r6, 3 +; lrv %r4, 0(%r2) +; vlvgf %v24, %r4, 3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vzero %v24 -; lrv %r6, 0(%r2) ; trap: heap_oob -; vlvgf %v24, %r6, 3 +; lrv %r4, 0(%r2) ; trap: heap_oob +; vlvgf %v24, %r4, 3 ; br %r14 function %scalar_to_vector_i16x8(i16) -> i16x8 tail { @@ -2777,15 +2777,15 @@ block0(v0: i64): ; VCode: ; block0: ; vgbm %v24, 0 -; lrvh %r6, 0(%r2) -; vlvgh %v24, %r6, 7 +; lrvh %r4, 0(%r2) +; vlvgh %v24, %r4, 7 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vzero %v24 -; lrvh %r6, 0(%r2) ; trap: heap_oob -; vlvgh %v24, %r6, 7 +; lrvh %r4, 0(%r2) ; trap: heap_oob +; vlvgh %v24, %r4, 7 ; br %r14 function %scalar_to_vector_i8x16(i8) -> i8x16 tail { @@ -2988,15 +2988,15 @@ block0(v0: i64): ; VCode: ; block0: ; vgbm %v24, 0 -; lrvg %r6, 0(%r2) -; vlvgg %v24, %r6, 1 +; lrvg %r4, 0(%r2) +; vlvgg %v24, %r4, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vzero %v24 -; lrvg %r6, 0(%r2) ; trap: heap_oob -; vlvgg %v24, %r6, 1 +; lrvg %r4, 0(%r2) ; trap: heap_oob +; vlvgg %v24, %r4, 1 ; br %r14 function %scalar_to_vector_f32x4(f32) -> f32x4 tail { @@ -3088,14 +3088,14 @@ block0(v0: i64): ; VCode: ; block0: ; vgbm %v24, 0 -; lrv %r6, 0(%r2) -; vlvgf %v24, %r6, 3 +; lrv %r4, 0(%r2) +; vlvgf %v24, %r4, 3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vzero %v24 -; lrv %r6, 0(%r2) ; trap: heap_oob -; vlvgf %v24, %r6, 3 +; lrv %r4, 0(%r2) ; trap: heap_oob +; vlvgf %v24, %r4, 3 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/vec-lane.clif b/cranelift/filetests/filetests/isa/s390x/vec-lane.clif index 587faaf42343..4a0a1863721a 100644 --- a/cranelift/filetests/filetests/isa/s390x/vec-lane.clif +++ b/cranelift/filetests/filetests/isa/s390x/vec-lane.clif @@ -178,14 +178,14 @@ block0(v0: i64x2, v1: i64): ; VCode: ; block0: -; lrvg %r5, 0(%r2) -; vlvgg %v24, %r5, 0 +; lrvg %r3, 0(%r2) +; vlvgg %v24, %r3, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r5, 0(%r2) ; trap: heap_oob -; vlvgg %v24, %r5, 0 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; vlvgg %v24, %r3, 0 ; br %r14 function %insertlane_i64x2_mem_little_1(i64x2, i64) -> i64x2 { @@ -197,14 +197,14 @@ block0(v0: i64x2, v1: i64): ; VCode: ; block0: -; lrvg %r5, 0(%r2) -; vlvgg %v24, %r5, 1 +; lrvg %r3, 0(%r2) +; vlvgg %v24, %r3, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r5, 0(%r2) ; trap: heap_oob -; vlvgg %v24, %r5, 1 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; vlvgg %v24, %r3, 1 ; br %r14 function %insertlane_i32x4_0(i32x4, i32) -> i32x4 { @@ -396,14 +396,14 @@ block0(v0: i32x4, v1: i64): ; VCode: ; block0: -; lrv %r5, 0(%r2) -; vlvgf %v24, %r5, 0 +; lrv %r3, 0(%r2) +; vlvgf %v24, %r3, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r5, 0(%r2) ; trap: heap_oob -; vlvgf %v24, %r5, 0 +; lrv %r3, 0(%r2) ; trap: heap_oob +; vlvgf %v24, %r3, 0 ; br %r14 function %insertlane_i32x4_mem_little_3(i32x4, i64) -> i32x4 { @@ -415,14 +415,14 @@ block0(v0: i32x4, v1: i64): ; VCode: ; block0: -; lrv %r5, 0(%r2) -; vlvgf %v24, %r5, 3 +; lrv %r3, 0(%r2) +; vlvgf %v24, %r3, 3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r5, 0(%r2) ; trap: heap_oob -; vlvgf %v24, %r5, 3 +; lrv %r3, 0(%r2) ; trap: heap_oob +; vlvgf %v24, %r3, 3 ; br %r14 function %insertlane_i16x8_0(i16x8, i16) -> i16x8 { @@ -614,14 +614,14 @@ block0(v0: i16x8, v1: i64): ; VCode: ; block0: -; lrvh %r5, 0(%r2) -; vlvgh %v24, %r5, 0 +; lrvh %r3, 0(%r2) +; vlvgh %v24, %r3, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvh %r5, 0(%r2) ; trap: heap_oob -; vlvgh %v24, %r5, 0 +; lrvh %r3, 0(%r2) ; trap: heap_oob +; vlvgh %v24, %r3, 0 ; br %r14 function %insertlane_i16x8_mem_little_7(i16x8, i64) -> i16x8 { @@ -633,14 +633,14 @@ block0(v0: i16x8, v1: i64): ; VCode: ; block0: -; lrvh %r5, 0(%r2) -; vlvgh %v24, %r5, 7 +; lrvh %r3, 0(%r2) +; vlvgh %v24, %r3, 7 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvh %r5, 0(%r2) ; trap: heap_oob -; vlvgh %v24, %r5, 7 +; lrvh %r3, 0(%r2) ; trap: heap_oob +; vlvgh %v24, %r3, 7 ; br %r14 function %insertlane_i8x16_0(i8x16, i8) -> i8x16 { @@ -1000,14 +1000,14 @@ block0(v0: f64x2, v1: i64): ; VCode: ; block0: -; lrvg %r5, 0(%r2) -; vlvgg %v24, %r5, 0 +; lrvg %r3, 0(%r2) +; vlvgg %v24, %r3, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r5, 0(%r2) ; trap: heap_oob -; vlvgg %v24, %r5, 0 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; vlvgg %v24, %r3, 0 ; br %r14 function %insertlane_f64x2_mem_little_1(f64x2, i64) -> f64x2 { @@ -1019,14 +1019,14 @@ block0(v0: f64x2, v1: i64): ; VCode: ; block0: -; lrvg %r5, 0(%r2) -; vlvgg %v24, %r5, 1 +; lrvg %r3, 0(%r2) +; vlvgg %v24, %r3, 1 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r5, 0(%r2) ; trap: heap_oob -; vlvgg %v24, %r5, 1 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; vlvgg %v24, %r3, 1 ; br %r14 function %insertlane_f32x4_0(f32x4, f32) -> f32x4 { @@ -1190,14 +1190,14 @@ block0(v0: f32x4, v1: i64): ; VCode: ; block0: -; lrv %r5, 0(%r2) -; vlvgf %v24, %r5, 0 +; lrv %r3, 0(%r2) +; vlvgf %v24, %r3, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r5, 0(%r2) ; trap: heap_oob -; vlvgf %v24, %r5, 0 +; lrv %r3, 0(%r2) ; trap: heap_oob +; vlvgf %v24, %r3, 0 ; br %r14 function %insertlane_i32x4_mem_little_3(i32x4, i64) -> i32x4 { @@ -1209,14 +1209,14 @@ block0(v0: i32x4, v1: i64): ; VCode: ; block0: -; lrv %r5, 0(%r2) -; vlvgf %v24, %r5, 3 +; lrv %r3, 0(%r2) +; vlvgf %v24, %r3, 3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r5, 0(%r2) ; trap: heap_oob -; vlvgf %v24, %r5, 3 +; lrv %r3, 0(%r2) ; trap: heap_oob +; vlvgf %v24, %r3, 3 ; br %r14 function %extractlane_i64x2_0(i64x2) -> i64 { @@ -1294,14 +1294,14 @@ block0(v0: i64x2, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 0 -; strvg %r5, 0(%r2) +; vlgvg %r3, %v24, 0 +; strvg %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 0 -; strvg %r5, 0(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 0 +; strvg %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_i64x2_mem_little_1(i64x2, i64) { @@ -1313,14 +1313,14 @@ block0(v0: i64x2, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 1 -; strvg %r5, 0(%r2) +; vlgvg %r3, %v24, 1 +; strvg %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 1 -; strvg %r5, 0(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 1 +; strvg %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_i32x4_0(i32x4) -> i32 { @@ -1398,14 +1398,14 @@ block0(v0: i32x4, v1: i64): ; VCode: ; block0: -; vlgvf %r5, %v24, 0 -; strv %r5, 0(%r2) +; vlgvf %r3, %v24, 0 +; strv %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvf %r5, %v24, 0 -; strv %r5, 0(%r2) ; trap: heap_oob +; vlgvf %r3, %v24, 0 +; strv %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_i32x4_mem_little_3(i32x4, i64) { @@ -1417,14 +1417,14 @@ block0(v0: i32x4, v1: i64): ; VCode: ; block0: -; vlgvf %r5, %v24, 3 -; strv %r5, 0(%r2) +; vlgvf %r3, %v24, 3 +; strv %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvf %r5, %v24, 3 -; strv %r5, 0(%r2) ; trap: heap_oob +; vlgvf %r3, %v24, 3 +; strv %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_i16x8_0(i16x8) -> i16 { @@ -1502,14 +1502,14 @@ block0(v0: i16x8, v1: i64): ; VCode: ; block0: -; vlgvh %r5, %v24, 0 -; strvh %r5, 0(%r2) +; vlgvh %r3, %v24, 0 +; strvh %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvh %r5, %v24, 0 -; strvh %r5, 0(%r2) ; trap: heap_oob +; vlgvh %r3, %v24, 0 +; strvh %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_i16x8_mem_little_7(i16x8, i64) { @@ -1521,14 +1521,14 @@ block0(v0: i16x8, v1: i64): ; VCode: ; block0: -; vlgvh %r5, %v24, 7 -; strvh %r5, 0(%r2) +; vlgvh %r3, %v24, 7 +; strvh %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvh %r5, %v24, 7 -; strvh %r5, 0(%r2) ; trap: heap_oob +; vlgvh %r3, %v24, 7 +; strvh %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_i8x16_0(i8x16) -> i8 { @@ -1706,14 +1706,14 @@ block0(v0: f64x2, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 0 -; strvg %r5, 0(%r2) +; vlgvg %r3, %v24, 0 +; strvg %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 0 -; strvg %r5, 0(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 0 +; strvg %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_f64x2_mem_little_1(f64x2, i64) { @@ -1725,14 +1725,14 @@ block0(v0: f64x2, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 1 -; strvg %r5, 0(%r2) +; vlgvg %r3, %v24, 1 +; strvg %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 1 -; strvg %r5, 0(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 1 +; strvg %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_f32x4_0(f32x4) -> f32 { @@ -1810,14 +1810,14 @@ block0(v0: f32x4, v1: i64): ; VCode: ; block0: -; vlgvf %r5, %v24, 0 -; strv %r5, 0(%r2) +; vlgvf %r3, %v24, 0 +; strv %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvf %r5, %v24, 0 -; strv %r5, 0(%r2) ; trap: heap_oob +; vlgvf %r3, %v24, 0 +; strv %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %extractlane_f32x4_mem_little_3(f32x4, i64) { @@ -1829,14 +1829,14 @@ block0(v0: f32x4, v1: i64): ; VCode: ; block0: -; vlgvf %r5, %v24, 3 -; strv %r5, 0(%r2) +; vlgvf %r3, %v24, 3 +; strv %r3, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvf %r5, %v24, 3 -; strv %r5, 0(%r2) ; trap: heap_oob +; vlgvf %r3, %v24, 3 +; strv %r3, 0(%r2) ; trap: heap_oob ; br %r14 function %splat_i64x2(i64) -> i64x2 { @@ -1934,15 +1934,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; vrepg %v24, %v4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; vrepg %v24, %v4, 0 ; br %r14 @@ -2041,15 +2041,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrv %r4, 0(%r2) -; vlvgf %v4, %r4, 0 +; lrv %r2, 0(%r2) +; vlvgf %v4, %r2, 0 ; vrepf %v24, %v4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r4, 0(%r2) ; trap: heap_oob -; vlvgf %v4, %r4, 0 +; lrv %r2, 0(%r2) ; trap: heap_oob +; vlvgf %v4, %r2, 0 ; vrepf %v24, %v4, 0 ; br %r14 @@ -2148,15 +2148,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrvh %r4, 0(%r2) -; vlvgh %v4, %r4, 0 +; lrvh %r2, 0(%r2) +; vlvgh %v4, %r2, 0 ; vreph %v24, %v4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvh %r4, 0(%r2) ; trap: heap_oob -; vlvgh %v4, %r4, 0 +; lrvh %r2, 0(%r2) ; trap: heap_oob +; vlvgh %v4, %r2, 0 ; vreph %v24, %v4, 0 ; br %r14 @@ -2339,15 +2339,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; vrepg %v24, %v4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; vrepg %v24, %v4, 0 ; br %r14 @@ -2427,15 +2427,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrv %r4, 0(%r2) -; vlvgf %v4, %r4, 0 +; lrv %r2, 0(%r2) +; vlvgf %v4, %r2, 0 ; vrepf %v24, %v4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrv %r4, 0(%r2) ; trap: heap_oob -; vlvgf %v4, %r4, 0 +; lrv %r2, 0(%r2) ; trap: heap_oob +; vlvgf %v4, %r2, 0 ; vrepf %v24, %v4, 0 ; br %r14 @@ -2543,15 +2543,15 @@ block0(v0: i64): ; VCode: ; block0: ; vgbm %v24, 0 -; lrvg %r2, 0(%r2) -; vlvgg %v24, %r2, 0 +; lrvg %r4, 0(%r2) +; vlvgg %v24, %r4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vzero %v24 -; lrvg %r2, 0(%r2) ; trap: heap_oob -; vlvgg %v24, %r2, 0 +; lrvg %r4, 0(%r2) ; trap: heap_oob +; vlvgg %v24, %r4, 0 ; br %r14 function %scalar_to_vector_i32x4(i32) -> i32x4 { @@ -2660,15 +2660,15 @@ block0(v0: i64): ; VCode: ; block0: ; vgbm %v24, 0 -; lrv %r2, 0(%r2) -; vlvgf %v24, %r2, 0 +; lrv %r4, 0(%r2) +; vlvgf %v24, %r4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vzero %v24 -; lrv %r2, 0(%r2) ; trap: heap_oob -; vlvgf %v24, %r2, 0 +; lrv %r4, 0(%r2) ; trap: heap_oob +; vlvgf %v24, %r4, 0 ; br %r14 function %scalar_to_vector_i16x8(i16) -> i16x8 { @@ -2777,15 +2777,15 @@ block0(v0: i64): ; VCode: ; block0: ; vgbm %v24, 0 -; lrvh %r2, 0(%r2) -; vlvgh %v24, %r2, 0 +; lrvh %r4, 0(%r2) +; vlvgh %v24, %r4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vzero %v24 -; lrvh %r2, 0(%r2) ; trap: heap_oob -; vlvgh %v24, %r2, 0 +; lrvh %r4, 0(%r2) ; trap: heap_oob +; vlvgh %v24, %r4, 0 ; br %r14 function %scalar_to_vector_i8x16(i8) -> i8x16 { @@ -2988,15 +2988,15 @@ block0(v0: i64): ; VCode: ; block0: ; vgbm %v24, 0 -; lrvg %r2, 0(%r2) -; vlvgg %v24, %r2, 0 +; lrvg %r4, 0(%r2) +; vlvgg %v24, %r4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vzero %v24 -; lrvg %r2, 0(%r2) ; trap: heap_oob -; vlvgg %v24, %r2, 0 +; lrvg %r4, 0(%r2) ; trap: heap_oob +; vlvgg %v24, %r4, 0 ; br %r14 function %scalar_to_vector_f32x4(f32) -> f32x4 { @@ -3086,14 +3086,14 @@ block0(v0: i64): ; VCode: ; block0: ; vgbm %v24, 0 -; lrv %r2, 0(%r2) -; vlvgf %v24, %r2, 0 +; lrv %r4, 0(%r2) +; vlvgf %v24, %r4, 0 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vzero %v24 -; lrv %r2, 0(%r2) ; trap: heap_oob -; vlvgf %v24, %r2, 0 +; lrv %r4, 0(%r2) ; trap: heap_oob +; vlvgf %v24, %r4, 0 ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/vec-permute-le-lane.clif b/cranelift/filetests/filetests/isa/s390x/vec-permute-le-lane.clif index 8c50b88bc916..d400a920e7d3 100644 --- a/cranelift/filetests/filetests/isa/s390x/vec-permute-le-lane.clif +++ b/cranelift/filetests/filetests/isa/s390x/vec-permute-le-lane.clif @@ -12,8 +12,8 @@ block0(v0: i8x16, v1: i8x16): ; vgbm %v3, 0 ; vrepib %v5, 239 ; vno %v7, %v25, %v25 -; vmxlb %v17, %v5, %v7 -; vperm %v24, %v3, %v24, %v17 +; vmxlb %v16, %v5, %v7 +; vperm %v24, %v3, %v24, %v16 ; br %r14 ; ; Disassembled: @@ -21,8 +21,8 @@ block0(v0: i8x16, v1: i8x16): ; vzero %v3 ; vrepib %v5, 0xef ; vno %v7, %v25, %v25 -; vmxlb %v17, %v5, %v7 -; vperm %v24, %v3, %v24, %v17 +; vmxlb %v16, %v5, %v7 +; vperm %v24, %v3, %v24, %v16 ; br %r14 function %shuffle_0(i8x16, i8x16) -> i8x16 tail { diff --git a/cranelift/filetests/filetests/isa/s390x/vec-shift-rotate.clif b/cranelift/filetests/filetests/isa/s390x/vec-shift-rotate.clif index 6b7d8f0f94cc..4eea0cb51214 100644 --- a/cranelift/filetests/filetests/isa/s390x/vec-shift-rotate.clif +++ b/cranelift/filetests/filetests/isa/s390x/vec-shift-rotate.clif @@ -9,14 +9,14 @@ block0(v0: i64x2, v1: i64): ; VCode: ; block0: -; lcr %r5, %r2 -; verllg %v24, %v24, 0(%r5) +; lcr %r3, %r2 +; verllg %v24, %v24, 0(%r3) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lcr %r5, %r2 -; verllg %v24, %v24, 0(%r5) +; lcr %r3, %r2 +; verllg %v24, %v24, 0(%r3) ; br %r14 function %rotr_i64x4_imm(i64x2) -> i64x2 { @@ -44,14 +44,14 @@ block0(v0: i32x4, v1: i32): ; VCode: ; block0: -; lcr %r5, %r2 -; verllf %v24, %v24, 0(%r5) +; lcr %r3, %r2 +; verllf %v24, %v24, 0(%r3) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lcr %r5, %r2 -; verllf %v24, %v24, 0(%r5) +; lcr %r3, %r2 +; verllf %v24, %v24, 0(%r3) ; br %r14 function %rotr_i32x4_imm(i32x4) -> i32x4 { @@ -79,14 +79,14 @@ block0(v0: i16x8, v1: i16): ; VCode: ; block0: -; lcr %r5, %r2 -; verllh %v24, %v24, 0(%r5) +; lcr %r3, %r2 +; verllh %v24, %v24, 0(%r3) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lcr %r5, %r2 -; verllh %v24, %v24, 0(%r5) +; lcr %r3, %r2 +; verllh %v24, %v24, 0(%r3) ; br %r14 function %rotr_i16x8_imm(i16x8) -> i16x8 { @@ -114,14 +114,14 @@ block0(v0: i8x16, v1: i8): ; VCode: ; block0: -; lcr %r5, %r2 -; verllb %v24, %v24, 0(%r5) +; lcr %r3, %r2 +; verllb %v24, %v24, 0(%r3) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lcr %r5, %r2 -; verllb %v24, %v24, 0(%r5) +; lcr %r3, %r2 +; verllb %v24, %v24, 0(%r3) ; br %r14 function %rotr_i8x16_imm(i8x16) -> i8x16 { diff --git a/cranelift/filetests/filetests/isa/s390x/vecmem-le-lane.clif b/cranelift/filetests/filetests/isa/s390x/vecmem-le-lane.clif index da55d504c65e..8610808f0c8e 100644 --- a/cranelift/filetests/filetests/isa/s390x/vecmem-le-lane.clif +++ b/cranelift/filetests/filetests/isa/s390x/vecmem-le-lane.clif @@ -9,15 +9,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; vuplhb %v24, %v4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; vuplhb %v24, %v4 ; br %r14 @@ -29,16 +29,16 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; verllh %v6, %v4, 8 ; vuplhh %v24, %v6 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; verllh %v6, %v4, 8 ; vuplhh %v24, %v6 ; br %r14 @@ -71,15 +71,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; vuphb %v24, %v4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; vuphb %v24, %v4 ; br %r14 @@ -91,16 +91,16 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; verllh %v6, %v4, 8 ; vuphh %v24, %v6 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; verllh %v6, %v4, 8 ; vuphh %v24, %v6 ; br %r14 @@ -133,16 +133,16 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; lrvg %r6, 8(%r2) -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0(%r2) +; lrvg %r4, 8(%r2) +; vlvgp %v24, %r4, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; lrvg %r6, 8(%r2) ; trap: heap_oob -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; lrvg %r4, 8(%r2) ; trap: heap_oob +; vlvgp %v24, %r4, %r3 ; br %r14 function %load_i16x8_big(i64) -> i16x8 tail { @@ -247,18 +247,18 @@ block0(v0: i8x16, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0(%r2) -; strvg %r7, 8(%r2) +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0(%r2) +; strvg %r5, 8(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0(%r2) ; trap: heap_oob -; strvg %r7, 8(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0(%r2) ; trap: heap_oob +; strvg %r5, 8(%r2) ; trap: heap_oob ; br %r14 function %store_i16x8_big(i16x8, i64) tail { @@ -363,15 +363,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; vuplhb %v24, %v4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; vuplhb %v24, %v4 ; br %r14 @@ -383,15 +383,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; vuplhh %v24, %v4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; vuplhh %v24, %v4 ; br %r14 @@ -403,15 +403,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; vuplhf %v24, %v4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; vuplhf %v24, %v4 ; br %r14 @@ -423,15 +423,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; vuphb %v24, %v4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; vuphb %v24, %v4 ; br %r14 @@ -443,15 +443,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; vuphh %v24, %v4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; vuphh %v24, %v4 ; br %r14 @@ -463,15 +463,15 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; vuphf %v24, %v4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; vuphf %v24, %v4 ; br %r14 @@ -483,16 +483,16 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; lrvg %r6, 8(%r2) -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0(%r2) +; lrvg %r4, 8(%r2) +; vlvgp %v24, %r4, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; lrvg %r6, 8(%r2) ; trap: heap_oob -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; lrvg %r4, 8(%r2) ; trap: heap_oob +; vlvgp %v24, %r4, %r3 ; br %r14 function %load_i16x8_little(i64) -> i16x8 tail { @@ -503,16 +503,16 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; lrvg %r6, 8(%r2) -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0(%r2) +; lrvg %r4, 8(%r2) +; vlvgp %v24, %r4, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; lrvg %r6, 8(%r2) ; trap: heap_oob -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; lrvg %r4, 8(%r2) ; trap: heap_oob +; vlvgp %v24, %r4, %r3 ; br %r14 function %load_i32x4_little(i64) -> i32x4 tail { @@ -523,16 +523,16 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; lrvg %r6, 8(%r2) -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0(%r2) +; lrvg %r4, 8(%r2) +; vlvgp %v24, %r4, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; lrvg %r6, 8(%r2) ; trap: heap_oob -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; lrvg %r4, 8(%r2) ; trap: heap_oob +; vlvgp %v24, %r4, %r3 ; br %r14 function %load_i64x2_little(i64) -> i64x2 tail { @@ -543,16 +543,16 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; lrvg %r6, 8(%r2) -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0(%r2) +; lrvg %r4, 8(%r2) +; vlvgp %v24, %r4, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; lrvg %r6, 8(%r2) ; trap: heap_oob -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; lrvg %r4, 8(%r2) ; trap: heap_oob +; vlvgp %v24, %r4, %r3 ; br %r14 function %load_f32x4_little(i64) -> f32x4 tail { @@ -563,16 +563,16 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; lrvg %r6, 8(%r2) -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0(%r2) +; lrvg %r4, 8(%r2) +; vlvgp %v24, %r4, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; lrvg %r6, 8(%r2) ; trap: heap_oob -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; lrvg %r4, 8(%r2) ; trap: heap_oob +; vlvgp %v24, %r4, %r3 ; br %r14 function %load_f64x2_little(i64) -> f64x2 tail { @@ -583,16 +583,16 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; lrvg %r6, 8(%r2) -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0(%r2) +; lrvg %r4, 8(%r2) +; vlvgp %v24, %r4, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; lrvg %r6, 8(%r2) ; trap: heap_oob -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; lrvg %r4, 8(%r2) ; trap: heap_oob +; vlvgp %v24, %r4, %r3 ; br %r14 function %load_f64x2_sum_little(i64, i64) -> f64x2 tail { @@ -604,16 +604,16 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; lrvg %r5, 0(%r3,%r2) -; lrvg %r7, 8(%r3,%r2) -; vlvgp %v24, %r7, %r5 +; lrvg %r4, 0(%r3,%r2) +; lrvg %r5, 8(%r3,%r2) +; vlvgp %v24, %r5, %r4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r5, 0(%r3, %r2) ; trap: heap_oob -; lrvg %r7, 8(%r3, %r2) ; trap: heap_oob -; vlvgp %v24, %r7, %r5 +; lrvg %r4, 0(%r3, %r2) ; trap: heap_oob +; lrvg %r5, 8(%r3, %r2) ; trap: heap_oob +; vlvgp %v24, %r5, %r4 ; br %r14 function %load_f64x2_off_little(i64) -> f64x2 tail { @@ -624,16 +624,16 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 128(%r2) -; lrvg %r6, 136(%r2) -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 128(%r2) +; lrvg %r4, 136(%r2) +; vlvgp %v24, %r4, %r3 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0x80(%r2) ; trap: heap_oob -; lrvg %r6, 0x88(%r2) ; trap: heap_oob -; vlvgp %v24, %r6, %r4 +; lrvg %r3, 0x80(%r2) ; trap: heap_oob +; lrvg %r4, 0x88(%r2) ; trap: heap_oob +; vlvgp %v24, %r4, %r3 ; br %r14 function %store_i8x16_little(i8x16, i64) tail { @@ -644,18 +644,18 @@ block0(v0: i8x16, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0(%r2) -; strvg %r7, 8(%r2) +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0(%r2) +; strvg %r5, 8(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0(%r2) ; trap: heap_oob -; strvg %r7, 8(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0(%r2) ; trap: heap_oob +; strvg %r5, 8(%r2) ; trap: heap_oob ; br %r14 function %store_i16x8_little(i16x8, i64) tail { @@ -666,18 +666,18 @@ block0(v0: i16x8, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0(%r2) -; strvg %r7, 8(%r2) +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0(%r2) +; strvg %r5, 8(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0(%r2) ; trap: heap_oob -; strvg %r7, 8(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0(%r2) ; trap: heap_oob +; strvg %r5, 8(%r2) ; trap: heap_oob ; br %r14 function %store_i32x4_little(i32x4, i64) tail { @@ -688,18 +688,18 @@ block0(v0: i32x4, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0(%r2) -; strvg %r7, 8(%r2) +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0(%r2) +; strvg %r5, 8(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0(%r2) ; trap: heap_oob -; strvg %r7, 8(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0(%r2) ; trap: heap_oob +; strvg %r5, 8(%r2) ; trap: heap_oob ; br %r14 function %store_i64x2_little(i64x2, i64) tail { @@ -710,18 +710,18 @@ block0(v0: i64x2, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0(%r2) -; strvg %r7, 8(%r2) +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0(%r2) +; strvg %r5, 8(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0(%r2) ; trap: heap_oob -; strvg %r7, 8(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0(%r2) ; trap: heap_oob +; strvg %r5, 8(%r2) ; trap: heap_oob ; br %r14 function %store_f32x4_little(f32x4, i64) tail { @@ -732,18 +732,18 @@ block0(v0: f32x4, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0(%r2) -; strvg %r7, 8(%r2) +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0(%r2) +; strvg %r5, 8(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0(%r2) ; trap: heap_oob -; strvg %r7, 8(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0(%r2) ; trap: heap_oob +; strvg %r5, 8(%r2) ; trap: heap_oob ; br %r14 function %store_f64x2_little(f64x2, i64) tail { @@ -754,18 +754,18 @@ block0(v0: f64x2, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0(%r2) -; strvg %r7, 8(%r2) +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0(%r2) +; strvg %r5, 8(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0(%r2) ; trap: heap_oob -; strvg %r7, 8(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0(%r2) ; trap: heap_oob +; strvg %r5, 8(%r2) ; trap: heap_oob ; br %r14 function %store_f64x2_sum_little(f64x2, i64, i64) tail { @@ -777,18 +777,18 @@ block0(v0: f64x2, v1: i64, v2: i64): ; VCode: ; block0: -; vlgvg %r6, %v24, 1 -; vlgvg %r4, %v24, 0 -; strvg %r6, 0(%r3,%r2) -; strvg %r4, 8(%r3,%r2) +; vlgvg %r4, %v24, 1 +; vlgvg %r6, %v24, 0 +; strvg %r4, 0(%r3,%r2) +; strvg %r6, 8(%r3,%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r6, %v24, 1 -; vlgvg %r4, %v24, 0 -; strvg %r6, 0(%r3, %r2) ; trap: heap_oob -; strvg %r4, 8(%r3, %r2) ; trap: heap_oob +; vlgvg %r4, %v24, 1 +; vlgvg %r6, %v24, 0 +; strvg %r4, 0(%r3, %r2) ; trap: heap_oob +; strvg %r6, 8(%r3, %r2) ; trap: heap_oob ; br %r14 function %store_f64x2_off_little(f64x2, i64) tail { @@ -799,17 +799,17 @@ block0(v0: f64x2, v1: i64): ; VCode: ; block0: -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 128(%r2) -; strvg %r7, 136(%r2) +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 128(%r2) +; strvg %r5, 136(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; vlgvg %r5, %v24, 1 -; vlgvg %r7, %v24, 0 -; strvg %r5, 0x80(%r2) ; trap: heap_oob -; strvg %r7, 0x88(%r2) ; trap: heap_oob +; vlgvg %r3, %v24, 1 +; vlgvg %r5, %v24, 0 +; strvg %r3, 0x80(%r2) ; trap: heap_oob +; strvg %r5, 0x88(%r2) ; trap: heap_oob ; br %r14 diff --git a/cranelift/filetests/filetests/isa/s390x/vecmem.clif b/cranelift/filetests/filetests/isa/s390x/vecmem.clif index 6e49064c24b5..71c6fd813a42 100644 --- a/cranelift/filetests/filetests/isa/s390x/vecmem.clif +++ b/cranelift/filetests/filetests/isa/s390x/vecmem.clif @@ -384,16 +384,16 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; verllg %v6, %v4, 32 ; vuplhf %v24, %v6 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; verllg %v6, %v4, 0x20 ; vuplhf %v24, %v6 ; br %r14 @@ -444,16 +444,16 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) +; ldgr %f4, %r2 ; verllg %v6, %v4, 32 ; vuphf %v24, %v6 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; ldgr %f4, %r4 +; lrvg %r2, 0(%r2) ; trap: heap_oob +; ldgr %f4, %r2 ; verllg %v6, %v4, 0x20 ; vuphf %v24, %v6 ; br %r14 @@ -482,17 +482,17 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; lrvg %r2, 8(%r2) -; vlvgp %v6, %r2, %r4 +; lrvg %r3, 0(%r2) +; lrvg %r4, 8(%r2) +; vlvgp %v6, %r4, %r3 ; vpdi %v24, %v6, %v6, 4 ; verllg %v6, %v6, 32 ; verllf %v6, %v6, 16 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; lrvg %r2, 8(%r2) ; trap: heap_oob -; vlvgp %v6, %r2, %r4 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; lrvg %r4, 8(%r2) ; trap: heap_oob +; vlvgp %v6, %r4, %r3 ; vpdi %v24, %v6, %v6, 4 ; verllg %v24, %v24, 0x20 ; verllf %v24, %v24, 0x10 @@ -506,17 +506,17 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; lrvg %r2, 8(%r2) -; vlvgp %v6, %r2, %r4 +; lrvg %r3, 0(%r2) +; lrvg %r4, 8(%r2) +; vlvgp %v6, %r4, %r3 ; vpdi %v24, %v6, %v6, 4 ; verllg %v6, %v6, 32 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; lrvg %r2, 8(%r2) ; trap: heap_oob -; vlvgp %v6, %r2, %r4 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; lrvg %r4, 8(%r2) ; trap: heap_oob +; vlvgp %v6, %r4, %r3 ; vpdi %v24, %v6, %v6, 4 ; verllg %v24, %v24, 0x20 ; br %r14 @@ -529,17 +529,17 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; lrvg %r2, 8(%r2) -; vlvgp %v6, %r2, %r4 +; lrvg %r3, 0(%r2) +; lrvg %r4, 8(%r2) +; vlvgp %v6, %r4, %r3 ; vpdi %v24, %v6, %v6, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; lrvg %r2, 8(%r2) ; trap: heap_oob -; vlvgp %v6, %r2, %r4 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; lrvg %r4, 8(%r2) ; trap: heap_oob +; vlvgp %v6, %r4, %r3 ; vpdi %v24, %v6, %v6, 4 ; br %r14 @@ -551,17 +551,17 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r5, 0(%r3) -; lrvg %r3, 8(%r3) -; vlvgp %v7, %r3, %r5 +; lrvg %r4, 0(%r3) +; lrvg %r5, 8(%r3) +; vlvgp %v7, %r5, %r4 ; vst %v7, 0(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r5, 0(%r3) ; trap: heap_oob -; lrvg %r3, 8(%r3) ; trap: heap_oob -; vlvgp %v7, %r3, %r5 +; lrvg %r4, 0(%r3) ; trap: heap_oob +; lrvg %r5, 8(%r3) ; trap: heap_oob +; vlvgp %v7, %r5, %r4 ; vst %v7, 0(%r2) ; br %r14 @@ -573,17 +573,17 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; lrvg %r2, 8(%r2) -; vlvgp %v6, %r2, %r4 +; lrvg %r3, 0(%r2) +; lrvg %r4, 8(%r2) +; vlvgp %v6, %r4, %r3 ; vpdi %v24, %v6, %v6, 4 ; verllg %v6, %v6, 32 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; lrvg %r2, 8(%r2) ; trap: heap_oob -; vlvgp %v6, %r2, %r4 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; lrvg %r4, 8(%r2) ; trap: heap_oob +; vlvgp %v6, %r4, %r3 ; vpdi %v24, %v6, %v6, 4 ; verllg %v24, %v24, 0x20 ; br %r14 @@ -596,17 +596,17 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 0(%r2) -; lrvg %r2, 8(%r2) -; vlvgp %v6, %r2, %r4 +; lrvg %r3, 0(%r2) +; lrvg %r4, 8(%r2) +; vlvgp %v6, %r4, %r3 ; vpdi %v24, %v6, %v6, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0(%r2) ; trap: heap_oob -; lrvg %r2, 8(%r2) ; trap: heap_oob -; vlvgp %v6, %r2, %r4 +; lrvg %r3, 0(%r2) ; trap: heap_oob +; lrvg %r4, 8(%r2) ; trap: heap_oob +; vlvgp %v6, %r4, %r3 ; vpdi %v24, %v6, %v6, 4 ; br %r14 @@ -619,17 +619,17 @@ block0(v0: i64, v1: i64): ; VCode: ; block0: -; lrvg %r5, 0(%r3,%r2) -; lrvg %r3, 8(%r3,%r2) -; vlvgp %v7, %r3, %r5 +; lrvg %r4, 0(%r3,%r2) +; lrvg %r5, 8(%r3,%r2) +; vlvgp %v7, %r5, %r4 ; vpdi %v24, %v7, %v7, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r5, 0(%r3, %r2) ; trap: heap_oob -; lrvg %r3, 8(%r3, %r2) ; trap: heap_oob -; vlvgp %v7, %r3, %r5 +; lrvg %r4, 0(%r3, %r2) ; trap: heap_oob +; lrvg %r5, 8(%r3, %r2) ; trap: heap_oob +; vlvgp %v7, %r5, %r4 ; vpdi %v24, %v7, %v7, 4 ; br %r14 @@ -641,17 +641,17 @@ block0(v0: i64): ; VCode: ; block0: -; lrvg %r4, 128(%r2) -; lrvg %r2, 136(%r2) -; vlvgp %v6, %r2, %r4 +; lrvg %r3, 128(%r2) +; lrvg %r4, 136(%r2) +; vlvgp %v6, %r4, %r3 ; vpdi %v24, %v6, %v6, 4 ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 -; lrvg %r4, 0x80(%r2) ; trap: heap_oob -; lrvg %r2, 0x88(%r2) ; trap: heap_oob -; vlvgp %v6, %r2, %r4 +; lrvg %r3, 0x80(%r2) ; trap: heap_oob +; lrvg %r4, 0x88(%r2) ; trap: heap_oob +; vlvgp %v6, %r4, %r3 ; vpdi %v24, %v6, %v6, 4 ; br %r14 @@ -680,10 +680,10 @@ block0(v0: i16x8, v1: i64): ; VCode: ; block0: ; vpdi %v3, %v24, %v24, 4 ; verllg %v24, %v24, 32 ; verllf %v24, %v24, 16 -; vlgvg %r3, %v3, 1 -; lgdr %r5, %f3 -; strvg %r3, 0(%r2) -; strvg %r5, 8(%r2) +; vlgvg %r5, %v3, 1 +; lgdr %r3, %f3 +; strvg %r5, 0(%r2) +; strvg %r3, 8(%r2) ; br %r14 ; ; Disassembled: @@ -691,10 +691,10 @@ block0(v0: i16x8, v1: i64): ; vpdi %v3, %v24, %v24, 4 ; verllg %v3, %v3, 0x20 ; verllf %v3, %v3, 0x10 -; vlgvg %r3, %v3, 1 -; lgdr %r5, %f3 -; strvg %r3, 0(%r2) ; trap: heap_oob -; strvg %r5, 8(%r2) ; trap: heap_oob +; vlgvg %r5, %v3, 1 +; lgdr %r3, %f3 +; strvg %r5, 0(%r2) ; trap: heap_oob +; strvg %r3, 8(%r2) ; trap: heap_oob ; br %r14 function %store_i32x4_little(i32x4, i64) { @@ -706,20 +706,20 @@ block0(v0: i32x4, v1: i64): ; VCode: ; block0: ; vpdi %v3, %v24, %v24, 4 ; verllg %v24, %v24, 32 -; vlgvg %r3, %v3, 1 -; lgdr %r5, %f3 -; strvg %r3, 0(%r2) -; strvg %r5, 8(%r2) +; vlgvg %r5, %v3, 1 +; lgdr %r3, %f3 +; strvg %r5, 0(%r2) +; strvg %r3, 8(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vpdi %v3, %v24, %v24, 4 ; verllg %v3, %v3, 0x20 -; vlgvg %r3, %v3, 1 -; lgdr %r5, %f3 -; strvg %r3, 0(%r2) ; trap: heap_oob -; strvg %r5, 8(%r2) ; trap: heap_oob +; vlgvg %r5, %v3, 1 +; lgdr %r3, %f3 +; strvg %r5, 0(%r2) ; trap: heap_oob +; strvg %r3, 8(%r2) ; trap: heap_oob ; br %r14 function %store_i64x2_little(i64x2, i64) { @@ -731,19 +731,19 @@ block0(v0: i64x2, v1: i64): ; VCode: ; block0: ; vpdi %v3, %v24, %v24, 4 -; vlgvg %r3, %v3, 1 -; lgdr %r5, %f3 -; strvg %r3, 0(%r2) -; strvg %r5, 8(%r2) +; vlgvg %r5, %v3, 1 +; lgdr %r3, %f3 +; strvg %r5, 0(%r2) +; strvg %r3, 8(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vpdi %v3, %v24, %v24, 4 -; vlgvg %r3, %v3, 1 -; lgdr %r5, %f3 -; strvg %r3, 0(%r2) ; trap: heap_oob -; strvg %r5, 8(%r2) ; trap: heap_oob +; vlgvg %r5, %v3, 1 +; lgdr %r3, %f3 +; strvg %r5, 0(%r2) ; trap: heap_oob +; strvg %r3, 8(%r2) ; trap: heap_oob ; br %r14 function %store_i128_little(i128, i64) { @@ -755,19 +755,19 @@ block0(v0: i128, v1: i64): ; VCode: ; block0: ; vl %v1, 0(%r2) -; vlgvg %r2, %v1, 1 -; lgdr %r4, %f1 -; strvg %r2, 0(%r3) -; strvg %r4, 8(%r3) +; vlgvg %r4, %v1, 1 +; lgdr %r2, %f1 +; strvg %r4, 0(%r3) +; strvg %r2, 8(%r3) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vl %v1, 0(%r2) -; vlgvg %r2, %v1, 1 -; lgdr %r4, %f1 -; strvg %r2, 0(%r3) ; trap: heap_oob -; strvg %r4, 8(%r3) ; trap: heap_oob +; vlgvg %r4, %v1, 1 +; lgdr %r2, %f1 +; strvg %r4, 0(%r3) ; trap: heap_oob +; strvg %r2, 8(%r3) ; trap: heap_oob ; br %r14 function %store_f32x4_little(f32x4, i64) { @@ -779,20 +779,20 @@ block0(v0: f32x4, v1: i64): ; VCode: ; block0: ; vpdi %v3, %v24, %v24, 4 ; verllg %v24, %v24, 32 -; vlgvg %r3, %v3, 1 -; lgdr %r5, %f3 -; strvg %r3, 0(%r2) -; strvg %r5, 8(%r2) +; vlgvg %r5, %v3, 1 +; lgdr %r3, %f3 +; strvg %r5, 0(%r2) +; strvg %r3, 8(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vpdi %v3, %v24, %v24, 4 ; verllg %v3, %v3, 0x20 -; vlgvg %r3, %v3, 1 -; lgdr %r5, %f3 -; strvg %r3, 0(%r2) ; trap: heap_oob -; strvg %r5, 8(%r2) ; trap: heap_oob +; vlgvg %r5, %v3, 1 +; lgdr %r3, %f3 +; strvg %r5, 0(%r2) ; trap: heap_oob +; strvg %r3, 8(%r2) ; trap: heap_oob ; br %r14 function %store_f64x2_little(f64x2, i64) { @@ -804,19 +804,19 @@ block0(v0: f64x2, v1: i64): ; VCode: ; block0: ; vpdi %v3, %v24, %v24, 4 -; vlgvg %r3, %v3, 1 -; lgdr %r5, %f3 -; strvg %r3, 0(%r2) -; strvg %r5, 8(%r2) +; vlgvg %r5, %v3, 1 +; lgdr %r3, %f3 +; strvg %r5, 0(%r2) +; strvg %r3, 8(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vpdi %v3, %v24, %v24, 4 -; vlgvg %r3, %v3, 1 -; lgdr %r5, %f3 -; strvg %r3, 0(%r2) ; trap: heap_oob -; strvg %r5, 8(%r2) ; trap: heap_oob +; vlgvg %r5, %v3, 1 +; lgdr %r3, %f3 +; strvg %r5, 0(%r2) ; trap: heap_oob +; strvg %r3, 8(%r2) ; trap: heap_oob ; br %r14 function %store_f64x2_sum_little(f64x2, i64, i64) { @@ -853,18 +853,18 @@ block0(v0: f64x2, v1: i64): ; VCode: ; block0: ; vpdi %v3, %v24, %v24, 4 -; vlgvg %r3, %v3, 1 -; lgdr %r5, %f3 -; strvg %r3, 128(%r2) -; strvg %r5, 136(%r2) +; vlgvg %r5, %v3, 1 +; lgdr %r3, %f3 +; strvg %r5, 128(%r2) +; strvg %r3, 136(%r2) ; br %r14 ; ; Disassembled: ; block0: ; offset 0x0 ; vpdi %v3, %v24, %v24, 4 -; vlgvg %r3, %v3, 1 -; lgdr %r5, %f3 -; strvg %r3, 0x80(%r2) ; trap: heap_oob -; strvg %r5, 0x88(%r2) ; trap: heap_oob +; vlgvg %r5, %v3, 1 +; lgdr %r3, %f3 +; strvg %r5, 0x80(%r2) ; trap: heap_oob +; strvg %r3, 0x88(%r2) ; trap: heap_oob ; br %r14 diff --git a/cranelift/filetests/filetests/isa/x64/amode-opt.clif b/cranelift/filetests/filetests/isa/x64/amode-opt.clif index e3304a8ab6fc..c85d4b76c264 100644 --- a/cranelift/filetests/filetests/isa/x64/amode-opt.clif +++ b/cranelift/filetests/filetests/isa/x64/amode-opt.clif @@ -243,9 +243,9 @@ block0(v0: i64, v1: i32, v2: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; leal (%rsi, %rdx), %r8d -; shll $0x2, %r8d -; movq -1(%rdi, %r8), %rax +; leal (%rsi, %rdx), %esi +; shll $0x2, %esi +; movq -1(%rdi, %rsi), %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -255,9 +255,9 @@ block0(v0: i64, v1: i32, v2: i32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; leal (%rsi, %rdx), %r8d -; shll $2, %r8d -; movq -1(%rdi, %r8), %rax ; trap: heap_oob +; addl %edx, %esi +; shll $2, %esi +; movq -1(%rdi, %rsi), %rax ; trap: heap_oob ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/atomic-128.clif b/cranelift/filetests/filetests/isa/x64/atomic-128.clif index eb4cfb7737e4..fd5796f5b966 100644 --- a/cranelift/filetests/filetests/isa/x64/atomic-128.clif +++ b/cranelift/filetests/filetests/isa/x64/atomic-128.clif @@ -61,8 +61,8 @@ block0(v0: i128, v1: i64): ; block0: ; movq %rsi, %rcx ; movq %rdi, %rbx -; movq %rdx, %r11 -; atomically { %rdx:%rax = 0(%r11); 0(%r11) = %rcx:%rbx } +; movq %rdx, %r8 +; atomically { %rdx:%rax = 0(%r8); 0(%r8) = %rcx:%rbx } ; movq (%rsp), %rbx ; addq $0x10, %rsp ; movq %rbp, %rsp @@ -78,10 +78,10 @@ block0(v0: i128, v1: i64): ; block1: ; offset 0xc ; movq %rsi, %rcx ; movq %rdi, %rbx -; movq %rdx, %r11 -; movq (%r11), %rax ; trap: heap_oob -; movq 8(%r11), %rdx ; trap: heap_oob -; lock cmpxchg16b (%r11) ; trap: heap_oob +; movq %rdx, %r8 +; movq (%r8), %rax ; trap: heap_oob +; movq 8(%r8), %rdx ; trap: heap_oob +; lock cmpxchg16b (%r8) ; trap: heap_oob ; jne 0x1c ; movq (%rsp), %rbx ; addq $0x10, %rsp @@ -140,8 +140,8 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block0: -; movq %rdx, %r11 -; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Add %r11:%rsi; 0(%rdi) = %rcx:%rbx } +; movq %rdx, %r8 +; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Add %r8:%rsi; 0(%rdi) = %rcx:%rbx } ; movq (%rsp), %rbx ; addq $0x10, %rsp ; movq %rbp, %rsp @@ -155,13 +155,13 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block1: ; offset 0xc -; movq %rdx, %r11 +; movq %rdx, %r8 ; movq (%rdi), %rax ; trap: heap_oob ; movq 8(%rdi), %rdx ; trap: heap_oob ; movq %rax, %rbx ; movq %rdx, %rcx ; addq %rsi, %rbx -; adcq %r11, %rcx +; adcq %r8, %rcx ; lock cmpxchg16b (%rdi) ; trap: heap_oob ; jne 0x16 ; movq (%rsp), %rbx @@ -182,8 +182,8 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block0: -; movq %rdx, %r11 -; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Sub %r11:%rsi; 0(%rdi) = %rcx:%rbx } +; movq %rdx, %r8 +; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Sub %r8:%rsi; 0(%rdi) = %rcx:%rbx } ; movq (%rsp), %rbx ; addq $0x10, %rsp ; movq %rbp, %rsp @@ -197,13 +197,13 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block1: ; offset 0xc -; movq %rdx, %r11 +; movq %rdx, %r8 ; movq (%rdi), %rax ; trap: heap_oob ; movq 8(%rdi), %rdx ; trap: heap_oob ; movq %rax, %rbx ; movq %rdx, %rcx ; subq %rsi, %rbx -; sbbq %r11, %rcx +; sbbq %r8, %rcx ; lock cmpxchg16b (%rdi) ; trap: heap_oob ; jne 0x16 ; movq (%rsp), %rbx @@ -224,8 +224,8 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block0: -; movq %rdx, %r11 -; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax And %r11:%rsi; 0(%rdi) = %rcx:%rbx } +; movq %rdx, %r8 +; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax And %r8:%rsi; 0(%rdi) = %rcx:%rbx } ; movq (%rsp), %rbx ; addq $0x10, %rsp ; movq %rbp, %rsp @@ -239,13 +239,13 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block1: ; offset 0xc -; movq %rdx, %r11 +; movq %rdx, %r8 ; movq (%rdi), %rax ; trap: heap_oob ; movq 8(%rdi), %rdx ; trap: heap_oob ; movq %rax, %rbx ; movq %rdx, %rcx ; andq %rsi, %rbx -; andq %r11, %rcx +; andq %r8, %rcx ; lock cmpxchg16b (%rdi) ; trap: heap_oob ; jne 0x16 ; movq (%rsp), %rbx @@ -266,8 +266,8 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block0: -; movq %rdx, %r11 -; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Nand %r11:%rsi; 0(%rdi) = %rcx:%rbx } +; movq %rdx, %r8 +; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Nand %r8:%rsi; 0(%rdi) = %rcx:%rbx } ; movq (%rsp), %rbx ; addq $0x10, %rsp ; movq %rbp, %rsp @@ -281,13 +281,13 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block1: ; offset 0xc -; movq %rdx, %r11 +; movq %rdx, %r8 ; movq (%rdi), %rax ; trap: heap_oob ; movq 8(%rdi), %rdx ; trap: heap_oob ; movq %rax, %rbx ; movq %rdx, %rcx ; andq %rsi, %rbx -; andq %r11, %rcx +; andq %r8, %rcx ; notq %rbx ; notq %rcx ; lock cmpxchg16b (%rdi) ; trap: heap_oob @@ -310,8 +310,8 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block0: -; movq %rdx, %r11 -; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Or %r11:%rsi; 0(%rdi) = %rcx:%rbx } +; movq %rdx, %r8 +; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Or %r8:%rsi; 0(%rdi) = %rcx:%rbx } ; movq (%rsp), %rbx ; addq $0x10, %rsp ; movq %rbp, %rsp @@ -325,13 +325,13 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block1: ; offset 0xc -; movq %rdx, %r11 +; movq %rdx, %r8 ; movq (%rdi), %rax ; trap: heap_oob ; movq 8(%rdi), %rdx ; trap: heap_oob ; movq %rax, %rbx ; movq %rdx, %rcx ; orq %rsi, %rbx -; orq %r11, %rcx +; orq %r8, %rcx ; lock cmpxchg16b (%rdi) ; trap: heap_oob ; jne 0x16 ; movq (%rsp), %rbx @@ -352,8 +352,8 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block0: -; movq %rdx, %r11 -; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Xor %r11:%rsi; 0(%rdi) = %rcx:%rbx } +; movq %rdx, %r8 +; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Xor %r8:%rsi; 0(%rdi) = %rcx:%rbx } ; movq (%rsp), %rbx ; addq $0x10, %rsp ; movq %rbp, %rsp @@ -367,13 +367,13 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block1: ; offset 0xc -; movq %rdx, %r11 +; movq %rdx, %r8 ; movq (%rdi), %rax ; trap: heap_oob ; movq 8(%rdi), %rdx ; trap: heap_oob ; movq %rax, %rbx ; movq %rdx, %rcx ; xorq %rsi, %rbx -; xorq %r11, %rcx +; xorq %r8, %rcx ; lock cmpxchg16b (%rdi) ; trap: heap_oob ; jne 0x16 ; movq (%rsp), %rbx @@ -434,8 +434,8 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block0: -; movq %rdx, %r11 -; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Umin %r11:%rsi; 0(%rdi) = %rcx:%rbx } +; movq %rdx, %r8 +; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Umin %r8:%rsi; 0(%rdi) = %rcx:%rbx } ; movq (%rsp), %rbx ; addq $0x10, %rsp ; movq %rbp, %rsp @@ -449,16 +449,16 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block1: ; offset 0xc -; movq %rdx, %r11 +; movq %rdx, %r8 ; movq (%rdi), %rax ; trap: heap_oob ; movq 8(%rdi), %rdx ; trap: heap_oob ; movq %rax, %rbx ; movq %rdx, %rcx ; cmpq %rsi, %rbx -; sbbq %r11, %rcx +; sbbq %r8, %rcx ; movq %rdx, %rcx ; cmovaeq %rsi, %rbx -; cmovaeq %r11, %rcx +; cmovaeq %r8, %rcx ; lock cmpxchg16b (%rdi) ; trap: heap_oob ; jne 0x16 ; movq (%rsp), %rbx @@ -479,8 +479,8 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block0: -; movq %rdx, %r11 -; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Umax %r11:%rsi; 0(%rdi) = %rcx:%rbx } +; movq %rdx, %r8 +; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Umax %r8:%rsi; 0(%rdi) = %rcx:%rbx } ; movq (%rsp), %rbx ; addq $0x10, %rsp ; movq %rbp, %rsp @@ -494,16 +494,16 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block1: ; offset 0xc -; movq %rdx, %r11 +; movq %rdx, %r8 ; movq (%rdi), %rax ; trap: heap_oob ; movq 8(%rdi), %rdx ; trap: heap_oob ; movq %rax, %rbx ; movq %rdx, %rcx ; cmpq %rsi, %rbx -; sbbq %r11, %rcx +; sbbq %r8, %rcx ; movq %rdx, %rcx ; cmovbq %rsi, %rbx -; cmovbq %r11, %rcx +; cmovbq %r8, %rcx ; lock cmpxchg16b (%rdi) ; trap: heap_oob ; jne 0x16 ; movq (%rsp), %rbx @@ -524,8 +524,8 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block0: -; movq %rdx, %r11 -; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Smin %r11:%rsi; 0(%rdi) = %rcx:%rbx } +; movq %rdx, %r8 +; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Smin %r8:%rsi; 0(%rdi) = %rcx:%rbx } ; movq (%rsp), %rbx ; addq $0x10, %rsp ; movq %rbp, %rsp @@ -539,16 +539,16 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block1: ; offset 0xc -; movq %rdx, %r11 +; movq %rdx, %r8 ; movq (%rdi), %rax ; trap: heap_oob ; movq 8(%rdi), %rdx ; trap: heap_oob ; movq %rax, %rbx ; movq %rdx, %rcx ; cmpq %rsi, %rbx -; sbbq %r11, %rcx +; sbbq %r8, %rcx ; movq %rdx, %rcx ; cmovgeq %rsi, %rbx -; cmovgeq %r11, %rcx +; cmovgeq %r8, %rcx ; lock cmpxchg16b (%rdi) ; trap: heap_oob ; jne 0x16 ; movq (%rsp), %rbx @@ -569,8 +569,8 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block0: -; movq %rdx, %r11 -; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Smax %r11:%rsi; 0(%rdi) = %rcx:%rbx } +; movq %rdx, %r8 +; atomically { %rdx:%rax = 0(%rdi); %rcx:%rbx = %rdx:%rax Smax %r8:%rsi; 0(%rdi) = %rcx:%rbx } ; movq (%rsp), %rbx ; addq $0x10, %rsp ; movq %rbp, %rsp @@ -584,16 +584,16 @@ block0(v0: i64, v1: i128): ; subq $0x10, %rsp ; movq %rbx, (%rsp) ; block1: ; offset 0xc -; movq %rdx, %r11 +; movq %rdx, %r8 ; movq (%rdi), %rax ; trap: heap_oob ; movq 8(%rdi), %rdx ; trap: heap_oob ; movq %rax, %rbx ; movq %rdx, %rcx ; cmpq %rsi, %rbx -; sbbq %r11, %rcx +; sbbq %r8, %rcx ; movq %rdx, %rcx ; cmovlq %rsi, %rbx -; cmovlq %r11, %rcx +; cmovlq %r8, %rcx ; lock cmpxchg16b (%rdi) ; trap: heap_oob ; jne 0x16 ; movq (%rsp), %rbx diff --git a/cranelift/filetests/filetests/isa/x64/atomic-rmw.clif b/cranelift/filetests/filetests/isa/x64/atomic-rmw.clif index c84703fa1571..3de7073c2245 100644 --- a/cranelift/filetests/filetests/isa/x64/atomic-rmw.clif +++ b/cranelift/filetests/filetests/isa/x64/atomic-rmw.clif @@ -446,9 +446,9 @@ block0(v0: i64, v1: i64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; andq %rsi, %rdx -; lock cmpxchgq %rdx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; andq %rsi, %r8 +; lock cmpxchgq %r8, (%rdi) ; trap: heap_oob ; jne 7 ; movq %rbp, %rsp ; popq %rbp @@ -475,9 +475,9 @@ block0(v0: i64, v1: i32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movl (%rdi), %eax ; trap: heap_oob -; movq %rax, %rdx -; andq %rsi, %rdx -; lock cmpxchgl %edx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; andq %rsi, %r8 +; lock cmpxchgl %r8d, (%rdi) ; trap: heap_oob ; jne 6 ; movq %rbp, %rsp ; popq %rbp @@ -504,9 +504,9 @@ block0(v0: i64, v1: i16): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzwq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; andq %rsi, %rdx -; lock cmpxchgw %dx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; andq %rsi, %r8 +; lock cmpxchgw %r8w, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -533,9 +533,9 @@ block0(v0: i64, v1: i8): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzbq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; andq %rsi, %rdx -; lock cmpxchgb %dl, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; andq %rsi, %r8 +; lock cmpxchgb %r8b, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -662,10 +662,10 @@ block0(v0: i64, v1: i64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; andq %rsi, %rdx -; notq %rdx -; lock cmpxchgq %rdx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; andq %rsi, %r8 +; notq %r8 +; lock cmpxchgq %r8, (%rdi) ; trap: heap_oob ; jne 7 ; movq %rbp, %rsp ; popq %rbp @@ -692,10 +692,10 @@ block0(v0: i64, v1: i32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movl (%rdi), %eax ; trap: heap_oob -; movq %rax, %rdx -; andq %rsi, %rdx -; notq %rdx -; lock cmpxchgl %edx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; andq %rsi, %r8 +; notq %r8 +; lock cmpxchgl %r8d, (%rdi) ; trap: heap_oob ; jne 6 ; movq %rbp, %rsp ; popq %rbp @@ -722,10 +722,10 @@ block0(v0: i64, v1: i16): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzwq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; andq %rsi, %rdx -; notq %rdx -; lock cmpxchgw %dx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; andq %rsi, %r8 +; notq %r8 +; lock cmpxchgw %r8w, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -752,10 +752,10 @@ block0(v0: i64, v1: i8): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzbq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; andq %rsi, %rdx -; notq %rdx -; lock cmpxchgb %dl, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; andq %rsi, %r8 +; notq %r8 +; lock cmpxchgb %r8b, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -782,9 +782,9 @@ block0(v0: i64, v1: i64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; orq %rsi, %rdx -; lock cmpxchgq %rdx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; orq %rsi, %r8 +; lock cmpxchgq %r8, (%rdi) ; trap: heap_oob ; jne 7 ; movq %rbp, %rsp ; popq %rbp @@ -811,9 +811,9 @@ block0(v0: i64, v1: i32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movl (%rdi), %eax ; trap: heap_oob -; movq %rax, %rdx -; orq %rsi, %rdx -; lock cmpxchgl %edx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; orq %rsi, %r8 +; lock cmpxchgl %r8d, (%rdi) ; trap: heap_oob ; jne 6 ; movq %rbp, %rsp ; popq %rbp @@ -840,9 +840,9 @@ block0(v0: i64, v1: i16): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzwq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; orq %rsi, %rdx -; lock cmpxchgw %dx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; orq %rsi, %r8 +; lock cmpxchgw %r8w, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -869,9 +869,9 @@ block0(v0: i64, v1: i8): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzbq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; orq %rsi, %rdx -; lock cmpxchgb %dl, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; orq %rsi, %r8 +; lock cmpxchgb %r8b, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -998,9 +998,9 @@ block0(v0: i64, v1: i64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; xorq %rsi, %rdx -; lock cmpxchgq %rdx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; xorq %rsi, %r8 +; lock cmpxchgq %r8, (%rdi) ; trap: heap_oob ; jne 7 ; movq %rbp, %rsp ; popq %rbp @@ -1027,9 +1027,9 @@ block0(v0: i64, v1: i32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movl (%rdi), %eax ; trap: heap_oob -; movq %rax, %rdx -; xorq %rsi, %rdx -; lock cmpxchgl %edx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; xorq %rsi, %r8 +; lock cmpxchgl %r8d, (%rdi) ; trap: heap_oob ; jne 6 ; movq %rbp, %rsp ; popq %rbp @@ -1056,9 +1056,9 @@ block0(v0: i64, v1: i16): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzwq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; xorq %rsi, %rdx -; lock cmpxchgw %dx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; xorq %rsi, %r8 +; lock cmpxchgw %r8w, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -1085,9 +1085,9 @@ block0(v0: i64, v1: i8): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzbq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; xorq %rsi, %rdx -; lock cmpxchgb %dl, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; xorq %rsi, %r8 +; lock cmpxchgb %r8b, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -1322,10 +1322,10 @@ block0(v0: i64, v1: i64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; cmpq %rdx, %rsi -; cmovbeq %rsi, %rdx -; lock cmpxchgq %rdx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpq %r8, %rsi +; cmovbeq %rsi, %r8 +; lock cmpxchgq %r8, (%rdi) ; trap: heap_oob ; jne 7 ; movq %rbp, %rsp ; popq %rbp @@ -1352,10 +1352,10 @@ block0(v0: i64, v1: i32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movl (%rdi), %eax ; trap: heap_oob -; movq %rax, %rdx -; cmpl %edx, %esi -; cmovbeq %rsi, %rdx -; lock cmpxchgl %edx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpl %r8d, %esi +; cmovbeq %rsi, %r8 +; lock cmpxchgl %r8d, (%rdi) ; trap: heap_oob ; jne 6 ; movq %rbp, %rsp ; popq %rbp @@ -1382,10 +1382,10 @@ block0(v0: i64, v1: i16): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzwq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; cmpw %dx, %si -; cmovbeq %rsi, %rdx -; lock cmpxchgw %dx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpw %r8w, %si +; cmovbeq %rsi, %r8 +; lock cmpxchgw %r8w, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -1412,10 +1412,10 @@ block0(v0: i64, v1: i8): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzbq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; cmpb %dl, %sil -; cmovbeq %rsi, %rdx -; lock cmpxchgb %dl, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpb %r8b, %sil +; cmovbeq %rsi, %r8 +; lock cmpxchgb %r8b, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -1442,10 +1442,10 @@ block0(v0: i64, v1: i64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; cmpq %rdx, %rsi -; cmovaeq %rsi, %rdx -; lock cmpxchgq %rdx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpq %r8, %rsi +; cmovaeq %rsi, %r8 +; lock cmpxchgq %r8, (%rdi) ; trap: heap_oob ; jne 7 ; movq %rbp, %rsp ; popq %rbp @@ -1472,10 +1472,10 @@ block0(v0: i64, v1: i32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movl (%rdi), %eax ; trap: heap_oob -; movq %rax, %rdx -; cmpl %edx, %esi -; cmovaeq %rsi, %rdx -; lock cmpxchgl %edx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpl %r8d, %esi +; cmovaeq %rsi, %r8 +; lock cmpxchgl %r8d, (%rdi) ; trap: heap_oob ; jne 6 ; movq %rbp, %rsp ; popq %rbp @@ -1502,10 +1502,10 @@ block0(v0: i64, v1: i16): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzwq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; cmpw %dx, %si -; cmovaeq %rsi, %rdx -; lock cmpxchgw %dx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpw %r8w, %si +; cmovaeq %rsi, %r8 +; lock cmpxchgw %r8w, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -1532,10 +1532,10 @@ block0(v0: i64, v1: i8): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzbq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; cmpb %dl, %sil -; cmovaeq %rsi, %rdx -; lock cmpxchgb %dl, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpb %r8b, %sil +; cmovaeq %rsi, %r8 +; lock cmpxchgb %r8b, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -1562,10 +1562,10 @@ block0(v0: i64, v1: i64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; cmpq %rdx, %rsi -; cmovleq %rsi, %rdx -; lock cmpxchgq %rdx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpq %r8, %rsi +; cmovleq %rsi, %r8 +; lock cmpxchgq %r8, (%rdi) ; trap: heap_oob ; jne 7 ; movq %rbp, %rsp ; popq %rbp @@ -1592,10 +1592,10 @@ block0(v0: i64, v1: i32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movl (%rdi), %eax ; trap: heap_oob -; movq %rax, %rdx -; cmpl %edx, %esi -; cmovleq %rsi, %rdx -; lock cmpxchgl %edx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpl %r8d, %esi +; cmovleq %rsi, %r8 +; lock cmpxchgl %r8d, (%rdi) ; trap: heap_oob ; jne 6 ; movq %rbp, %rsp ; popq %rbp @@ -1622,10 +1622,10 @@ block0(v0: i64, v1: i16): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzwq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; cmpw %dx, %si -; cmovleq %rsi, %rdx -; lock cmpxchgw %dx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpw %r8w, %si +; cmovleq %rsi, %r8 +; lock cmpxchgw %r8w, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -1652,10 +1652,10 @@ block0(v0: i64, v1: i8): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzbq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; cmpb %dl, %sil -; cmovleq %rsi, %rdx -; lock cmpxchgb %dl, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpb %r8b, %sil +; cmovleq %rsi, %r8 +; lock cmpxchgb %r8b, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -1682,10 +1682,10 @@ block0(v0: i64, v1: i64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; cmpq %rdx, %rsi -; cmovgeq %rsi, %rdx -; lock cmpxchgq %rdx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpq %r8, %rsi +; cmovgeq %rsi, %r8 +; lock cmpxchgq %r8, (%rdi) ; trap: heap_oob ; jne 7 ; movq %rbp, %rsp ; popq %rbp @@ -1712,10 +1712,10 @@ block0(v0: i64, v1: i32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movl (%rdi), %eax ; trap: heap_oob -; movq %rax, %rdx -; cmpl %edx, %esi -; cmovgeq %rsi, %rdx -; lock cmpxchgl %edx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpl %r8d, %esi +; cmovgeq %rsi, %r8 +; lock cmpxchgl %r8d, (%rdi) ; trap: heap_oob ; jne 6 ; movq %rbp, %rsp ; popq %rbp @@ -1742,10 +1742,10 @@ block0(v0: i64, v1: i16): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzwq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; cmpw %dx, %si -; cmovgeq %rsi, %rdx -; lock cmpxchgw %dx, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpw %r8w, %si +; cmovgeq %rsi, %r8 +; lock cmpxchgw %r8w, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp @@ -1772,10 +1772,10 @@ block0(v0: i64, v1: i8): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movzbq (%rdi), %rax ; trap: heap_oob -; movq %rax, %rdx -; cmpb %dl, %sil -; cmovgeq %rsi, %rdx -; lock cmpxchgb %dl, (%rdi) ; trap: heap_oob +; movq %rax, %r8 +; cmpb %r8b, %sil +; cmovgeq %rsi, %r8 +; lock cmpxchgb %r8b, (%rdi) ; trap: heap_oob ; jne 8 ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/bmask.clif b/cranelift/filetests/filetests/isa/x64/bmask.clif index eccd8f4d9a24..dc2e36cc39d5 100644 --- a/cranelift/filetests/filetests/isa/x64/bmask.clif +++ b/cranelift/filetests/filetests/isa/x64/bmask.clif @@ -13,8 +13,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negq %rax +; movq %rdi, %rdx +; negq %rdx ; movq %rdi, %rax ; sbbq %rdi, %rax ; movq %rbp, %rsp @@ -26,8 +26,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negq %rax +; movq %rdi, %rdx +; negq %rdx ; movq %rdi, %rax ; sbbq %rdi, %rax ; movq %rbp, %rsp @@ -44,8 +44,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negq %rax +; movq %rdi, %rdx +; negq %rdx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -57,8 +57,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negq %rax +; movq %rdi, %rdx +; negq %rdx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -75,8 +75,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negq %rax +; movq %rdi, %rdx +; negq %rdx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -88,8 +88,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negq %rax +; movq %rdi, %rdx +; negq %rdx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -106,8 +106,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negq %rax +; movq %rdi, %rdx +; negq %rdx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -119,8 +119,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negq %rax +; movq %rdi, %rdx +; negq %rdx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -137,8 +137,8 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negl %eax +; movq %rdi, %rdx +; negl %edx ; movq %rdi, %rax ; sbbq %rdi, %rax ; movq %rbp, %rsp @@ -150,8 +150,8 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negl %eax +; movq %rdi, %rdx +; negl %edx ; movq %rdi, %rax ; sbbq %rdi, %rax ; movq %rbp, %rsp @@ -168,8 +168,8 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negl %eax +; movq %rdi, %rdx +; negl %edx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -181,8 +181,8 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negl %eax +; movq %rdi, %rdx +; negl %edx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -199,8 +199,8 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negl %eax +; movq %rdi, %rdx +; negl %edx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -212,8 +212,8 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negl %eax +; movq %rdi, %rdx +; negl %edx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -230,8 +230,8 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negl %eax +; movq %rdi, %rdx +; negl %edx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -243,8 +243,8 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negl %eax +; movq %rdi, %rdx +; negl %edx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -261,8 +261,8 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negw %ax +; movq %rdi, %rdx +; negw %dx ; movq %rdi, %rax ; sbbq %rdi, %rax ; movq %rbp, %rsp @@ -274,8 +274,8 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negw %ax +; movq %rdi, %rdx +; negw %dx ; movq %rdi, %rax ; sbbq %rdi, %rax ; movq %rbp, %rsp @@ -292,8 +292,8 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negw %ax +; movq %rdi, %rdx +; negw %dx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -305,8 +305,8 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negw %ax +; movq %rdi, %rdx +; negw %dx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -323,8 +323,8 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negw %ax +; movq %rdi, %rdx +; negw %dx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -336,8 +336,8 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negw %ax +; movq %rdi, %rdx +; negw %dx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -354,8 +354,8 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negw %ax +; movq %rdi, %rdx +; negw %dx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -367,8 +367,8 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negw %ax +; movq %rdi, %rdx +; negw %dx ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -385,8 +385,8 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negb %al +; movq %rdi, %rdx +; negb %dl ; movq %rdi, %rax ; sbbq %rdi, %rax ; movq %rbp, %rsp @@ -398,8 +398,8 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negb %al +; movq %rdi, %rdx +; negb %dl ; movq %rdi, %rax ; sbbq %rdi, %rax ; movq %rbp, %rsp @@ -416,8 +416,8 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negb %al +; movq %rdi, %rdx +; negb %dl ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -429,8 +429,8 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negb %al +; movq %rdi, %rdx +; negb %dl ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -447,8 +447,8 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negb %al +; movq %rdi, %rdx +; negb %dl ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -460,8 +460,8 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negb %al +; movq %rdi, %rdx +; negb %dl ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -478,8 +478,8 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negb %al +; movq %rdi, %rdx +; negb %dl ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -491,8 +491,8 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negb %al +; movq %rdi, %rdx +; negb %dl ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -510,8 +510,8 @@ block0(v0: i128): ; movq %rsp, %rbp ; block0: ; orq %rsi, %rdi -; movq %rdi, %r8 -; negq %r8 +; movq %rdi, %rsi +; negq %rsi ; movq %rdi, %rdx ; sbbq %rdi, %rdx ; movq %rdx, %rax @@ -525,8 +525,8 @@ block0(v0: i128): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; orq %rsi, %rdi -; movq %rdi, %r8 -; negq %r8 +; movq %rdi, %rsi +; negq %rsi ; movq %rdi, %rdx ; sbbq %rdi, %rdx ; movq %rdx, %rax @@ -545,8 +545,8 @@ block0(v0: i128): ; movq %rsp, %rbp ; block0: ; orq %rsi, %rdi -; movq %rdi, %r8 -; negq %r8 +; movq %rdi, %rsi +; negq %rsi ; movq %rdi, %rax ; sbbq %rdi, %rax ; movq %rbp, %rsp @@ -559,8 +559,8 @@ block0(v0: i128): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; orq %rsi, %rdi -; movq %rdi, %r8 -; negq %r8 +; movq %rdi, %rsi +; negq %rsi ; movq %rdi, %rax ; sbbq %rdi, %rax ; movq %rbp, %rsp @@ -578,8 +578,8 @@ block0(v0: i128): ; movq %rsp, %rbp ; block0: ; orq %rsi, %rdi -; movq %rdi, %r8 -; negq %r8 +; movq %rdi, %rsi +; negq %rsi ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -592,8 +592,8 @@ block0(v0: i128): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; orq %rsi, %rdi -; movq %rdi, %r8 -; negq %r8 +; movq %rdi, %rsi +; negq %rsi ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -611,8 +611,8 @@ block0(v0: i128): ; movq %rsp, %rbp ; block0: ; orq %rsi, %rdi -; movq %rdi, %r8 -; negq %r8 +; movq %rdi, %rsi +; negq %rsi ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -625,8 +625,8 @@ block0(v0: i128): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; orq %rsi, %rdi -; movq %rdi, %r8 -; negq %r8 +; movq %rdi, %rsi +; negq %rsi ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -644,8 +644,8 @@ block0(v0: i128): ; movq %rsp, %rbp ; block0: ; orq %rsi, %rdi -; movq %rdi, %r8 -; negq %r8 +; movq %rdi, %rsi +; negq %rsi ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -658,8 +658,8 @@ block0(v0: i128): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; orq %rsi, %rdi -; movq %rdi, %r8 -; negq %r8 +; movq %rdi, %rsi +; negq %rsi ; movq %rdi, %rax ; sbbl %edi, %eax ; movq %rbp, %rsp @@ -676,8 +676,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negq %rax +; movq %rdi, %rdx +; negq %rdx ; movq %rdi, %rdx ; sbbq %rdi, %rdx ; movq %rdx, %rax @@ -690,8 +690,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negq %rax +; movq %rdi, %rdx +; negq %rdx ; movq %rdi, %rdx ; sbbq %rdi, %rdx ; movq %rdx, %rax @@ -709,8 +709,8 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negl %eax +; movq %rdi, %rdx +; negl %edx ; movq %rdi, %rdx ; sbbq %rdi, %rdx ; movq %rdx, %rax @@ -723,8 +723,8 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negl %eax +; movq %rdi, %rdx +; negl %edx ; movq %rdi, %rdx ; sbbq %rdi, %rdx ; movq %rdx, %rax @@ -742,8 +742,8 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negw %ax +; movq %rdi, %rdx +; negw %dx ; movq %rdi, %rdx ; sbbq %rdi, %rdx ; movq %rdx, %rax @@ -756,8 +756,8 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negw %ax +; movq %rdi, %rdx +; negw %dx ; movq %rdi, %rdx ; sbbq %rdi, %rdx ; movq %rdx, %rax @@ -775,8 +775,8 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; negb %al +; movq %rdi, %rdx +; negb %dl ; movq %rdi, %rdx ; sbbq %rdi, %rdx ; movq %rdx, %rax @@ -789,8 +789,8 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; negb %al +; movq %rdi, %rdx +; negb %dl ; movq %rdi, %rdx ; sbbq %rdi, %rdx ; movq %rdx, %rax @@ -811,8 +811,8 @@ block0(v0: i32, v1: i32): ; block0: ; cmpl %esi, %edi ; setg %al -; movq %rax, %r8 -; negb %r8b +; movq %rax, %rsi +; negb %sil ; sbbl %eax, %eax ; movq %rbp, %rsp ; popq %rbp @@ -825,8 +825,8 @@ block0(v0: i32, v1: i32): ; block1: ; offset 0x4 ; cmpl %esi, %edi ; setg %al -; movq %rax, %r8 -; negb %r8b +; movq %rax, %rsi +; negb %sil ; sbbl %eax, %eax ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/branches.clif b/cranelift/filetests/filetests/isa/x64/branches.clif index 6abe49276ff0..da477c19893b 100644 --- a/cranelift/filetests/filetests/isa/x64/branches.clif +++ b/cranelift/filetests/filetests/isa/x64/branches.clif @@ -313,11 +313,12 @@ block2: ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl $0x2, %r10d -; movl %edi, %r11d -; cmpl %r10d, %r11d -; cmovbl %r11d, %r10d -; br_table %r10, %rcx, %rdx +; movq %rdi, %r11 +; movl $0x2, %edi +; movl %r11d, %r8d +; cmpl %edi, %r8d +; cmovbl %r8d, %edi +; br_table %rdi, %rcx, %rax ; block1: ; jmp label4 ; block2: @@ -339,13 +340,14 @@ block2: ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $2, %r10d -; movl %edi, %r11d -; cmpl %r10d, %r11d -; cmovbl %r11d, %r10d +; movq %rdi, %r11 +; movl $2, %edi +; movl %r11d, %r8d +; cmpl %edi, %r8d +; cmovbl %r8d, %edi ; leaq 9(%rip), %rcx -; movslq (%rcx, %r10, 4), %rdx -; addq %rdx, %rcx +; movslq (%rcx, %rdi, 4), %rax +; addq %rax, %rcx ; jmpq *%rcx ; sbbb %al, (%rax) ; addb %al, (%rax) @@ -353,14 +355,14 @@ block2: ; addb %al, (%rax) ; sbbb %al, (%rax) ; addb %al, (%rax) -; block2: ; offset 0x30 -; jmp 0x3c -; block3: ; offset 0x35 +; block2: ; offset 0x32 +; jmp 0x3e +; block3: ; offset 0x37 ; xorl %eax, %eax ; movq %rbp, %rsp ; popq %rbp ; retq -; block4: ; offset 0x3c +; block4: ; offset 0x3e ; movl $1, %eax ; movq %rbp, %rsp ; popq %rbp @@ -960,29 +962,29 @@ block5(v5: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl $0x4, %eax -; movl %edi, %ecx -; cmpl %eax, %ecx -; cmovbl %ecx, %eax -; br_table %rax, %r9, %r10 +; movl $0x4, %r10d +; movl %edi, %r11d +; cmpl %r10d, %r11d +; cmovbl %r11d, %r10d +; br_table %r10, %rcx, %rax ; block1: ; jmp label4 ; block2: ; jmp label4 ; block3: -; movl $0x3, %esi +; movl $0x3, %r9d ; jmp label7 ; block4: -; movl $0x2, %esi +; movl $0x2, %r9d ; jmp label7 ; block5: -; movl $0x1, %esi +; movl $0x1, %r9d ; jmp label7 ; block6: -; movl $0x4, %esi +; movl $0x4, %r9d ; jmp label7 ; block7: -; leal (%rdi, %rsi), %eax +; leal (%rdi, %r9), %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -992,37 +994,29 @@ block5(v5: i32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $4, %eax -; movl %edi, %ecx -; cmpl %eax, %ecx -; cmovbl %ecx, %eax -; leaq 0xa(%rip), %r9 -; movslq (%r9, %rax, 4), %r10 -; addq %r10, %r9 -; jmpq *%r9 -; subl $0x23000000, %eax -; addb %al, (%rax) -; addb %ah, (%rbx) -; addb %al, (%rax) -; addb %bl, (%rcx) -; addb %al, (%rax) -; addb %dh, (%rdi) -; addb %al, (%rax) -; block2: ; offset 0x35 -; jmp 0x44 -; block3: ; offset 0x3a -; movl $3, %esi -; jmp 0x5d -; block4: ; offset 0x44 -; movl $2, %esi -; jmp 0x5d -; block5: ; offset 0x4e -; movl $1, %esi -; jmp 0x5d -; block6: ; offset 0x58 -; movl $4, %esi -; block7: ; offset 0x5d -; leal (%rdi, %rsi), %eax +; movl $4, %r10d +; movl %edi, %r11d +; cmpl %r10d, %r11d +; cmovbl %r11d, %r10d +; leaq 9(%rip), %rcx +; movslq (%rcx, %r10, 4), %rax +; addq %rax, %rcx +; jmpq *%rcx +; block2: ; offset 0x38 +; jmp 0x48 +; block3: ; offset 0x3d +; movl $3, %r9d +; jmp 0x64 +; block4: ; offset 0x48 +; movl $2, %r9d +; jmp 0x64 +; block5: ; offset 0x53 +; movl $1, %r9d +; jmp 0x64 +; block6: ; offset 0x5e +; movl $4, %r9d +; block7: ; offset 0x64 +; leal (%rdi, %r9), %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1043,28 +1037,28 @@ block1(v5: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl $0x1, %r8d -; movl $0x2, %ecx -; movl $0x3, %edx +; movl $0x1, %r11d +; movl $0x2, %edx +; movl $0x3, %ecx ; movl $0x4, %eax -; movl $0x4, %esi -; movl %edi, %edi -; cmpl %esi, %edi -; cmovbl %edi, %esi -; br_table %rsi, %r10, %r9 +; movl $0x4, %r9d +; movl %edi, %r10d +; cmpl %r9d, %r10d +; cmovbl %r10d, %r9d +; br_table %r9, %rdi, %rsi ; block1: ; jmp label6 ; block2: -; movq %r8, %rax +; movq %r11, %rax ; jmp label6 ; block3: -; movq %rcx, %rax +; movq %rdx, %rax ; jmp label6 ; block4: -; movq %rcx, %rax +; movq %rdx, %rax ; jmp label6 ; block5: -; movq %rdx, %rax +; movq %rcx, %rax ; jmp label6 ; block6: ; movq %rbp, %rsp @@ -1076,18 +1070,18 @@ block1(v5: i32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $1, %r8d -; movl $2, %ecx -; movl $3, %edx +; movl $1, %r11d +; movl $2, %edx +; movl $3, %ecx ; movl $4, %eax -; movl $4, %esi -; movl %edi, %edi -; cmpl %esi, %edi -; cmovbl %edi, %esi -; leaq 0xa(%rip), %r10 -; movslq (%r10, %rsi, 4), %r9 -; addq %r9, %r10 -; jmpq *%r10 +; movl $4, %r9d +; movl %edi, %r10d +; cmpl %r9d, %r10d +; cmovbl %r10d, %r9d +; leaq 9(%rip), %rdi +; movslq (%rdi, %r9, 4), %rsi +; addq %rsi, %rdi +; jmpq *%rdi ; sbbl %eax, (%rax) ; addb %al, (%rax) ; andl %eax, (%rax) @@ -1098,20 +1092,20 @@ block1(v5: i32): ; addb %al, (%rax) ; xorb $0, %al ; addb %al, (%rax) -; block2: ; offset 0x4a -; jmp 0x6a -; block3: ; offset 0x4f -; movq %r8, %rax -; jmp 0x6a -; block4: ; offset 0x57 -; movq %rcx, %rax -; jmp 0x6a -; block5: ; offset 0x5f -; movq %rcx, %rax -; jmp 0x6a -; block6: ; offset 0x67 +; block2: ; offset 0x4d +; jmp 0x6d +; block3: ; offset 0x52 +; movq %r11, %rax +; jmp 0x6d +; block4: ; offset 0x5a ; movq %rdx, %rax -; block7: ; offset 0x6a +; jmp 0x6d +; block5: ; offset 0x62 +; movq %rdx, %rax +; jmp 0x6d +; block6: ; offset 0x6a +; movq %rcx, %rax +; block7: ; offset 0x6d ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/call-conv.clif b/cranelift/filetests/filetests/isa/x64/call-conv.clif index b0a50999b3a5..9ebe76441d35 100644 --- a/cranelift/filetests/filetests/isa/x64/call-conv.clif +++ b/cranelift/filetests/filetests/isa/x64/call-conv.clif @@ -194,9 +194,9 @@ block0( ; subq $0x90, %rsp ; block0: ; movq +-0x20(%rbp), %r10 -; movq +-0x18(%rbp), %r11 -; movss +-0x10(%rbp), %xmm11 -; movsd +-8(%rbp), %xmm13 +; movq +-0x18(%rbp), %rax +; movss +-0x10(%rbp), %xmm8 +; movsd +-8(%rbp), %xmm9 ; movq %r8, 0x20(%rsp) ; movq %r9, 0x28(%rsp) ; movsd %xmm0, 0x30(%rsp) @@ -208,9 +208,9 @@ block0( ; movsd %xmm6, 0x60(%rsp) ; movsd %xmm7, 0x68(%rsp) ; movq %r10, 0x70(%rsp) -; movl %r11d, 0x78(%rsp) -; movss %xmm11, 0x80(%rsp) -; movsd %xmm13, 0x88(%rsp) +; movl %eax, 0x78(%rsp) +; movss %xmm8, 0x80(%rsp) +; movsd %xmm9, 0x88(%rsp) ; movq %rcx, %r9 ; movq %rdx, %r8 ; movq %rsi, %rdx @@ -228,9 +228,9 @@ block0( ; subq $0x90, %rsp ; block1: ; offset 0xb ; movq 0x10(%rbp), %r10 -; movq 0x18(%rbp), %r11 -; movss 0x20(%rbp), %xmm11 -; movsd 0x28(%rbp), %xmm13 +; movq 0x18(%rbp), %rax +; movss 0x20(%rbp), %xmm8 +; movsd 0x28(%rbp), %xmm9 ; movq %r8, 0x20(%rsp) ; movq %r9, 0x28(%rsp) ; movsd %xmm0, 0x30(%rsp) @@ -242,9 +242,9 @@ block0( ; movsd %xmm6, 0x60(%rsp) ; movsd %xmm7, 0x68(%rsp) ; movq %r10, 0x70(%rsp) -; movl %r11d, 0x78(%rsp) -; movss %xmm11, 0x80(%rsp) -; movsd %xmm13, 0x88(%rsp) +; movl %eax, 0x78(%rsp) +; movss %xmm8, 0x80(%rsp) +; movsd %xmm9, 0x88(%rsp) ; movq %rcx, %r9 ; movq %rdx, %r8 ; movq %rsi, %rdx @@ -443,9 +443,9 @@ block0(v0: i32, v1: i8x16): ; movq %rsp, %rbp ; subq $0x40, %rsp ; block0: -; leaq 0x30(%rsp), %rcx -; movdqu %xmm0, (%rcx) -; movq %rcx, 0x20(%rsp) +; leaq 0x30(%rsp), %rsi +; movdqu %xmm0, (%rsi) +; movq %rsi, 0x20(%rsp) ; movq %rdi, %r9 ; movq %r9, %rcx ; movq %r9, %rdx @@ -462,9 +462,9 @@ block0(v0: i32, v1: i8x16): ; movq %rsp, %rbp ; subq $0x40, %rsp ; block1: ; offset 0x8 -; leaq 0x30(%rsp), %rcx -; movdqu %xmm0, (%rcx) -; movq %rcx, 0x20(%rsp) +; leaq 0x30(%rsp), %rsi +; movdqu %xmm0, (%rsi) +; movq %rsi, 0x20(%rsp) ; movq %rdi, %r9 ; movq %r9, %rcx ; movq %r9, %rdx @@ -487,12 +487,12 @@ block0(v0: i32, v1: i8x16): ; movq %rsp, %rbp ; subq $0x50, %rsp ; block0: -; leaq 0x30(%rsp), %rcx -; movdqu %xmm0, (%rcx) -; movq %rcx, 0x20(%rsp) -; leaq 0x40(%rsp), %r10 -; movdqu %xmm0, (%r10) -; movq %r10, 0x28(%rsp) +; leaq 0x30(%rsp), %rsi +; movdqu %xmm0, (%rsi) +; movq %rsi, 0x20(%rsp) +; leaq 0x40(%rsp), %r8 +; movdqu %xmm0, (%r8) +; movq %r8, 0x28(%rsp) ; movq %rdi, %r9 ; movq %r9, %rcx ; movq %r9, %rdx @@ -509,12 +509,12 @@ block0(v0: i32, v1: i8x16): ; movq %rsp, %rbp ; subq $0x50, %rsp ; block1: ; offset 0x8 -; leaq 0x30(%rsp), %rcx -; movdqu %xmm0, (%rcx) -; movq %rcx, 0x20(%rsp) -; leaq 0x40(%rsp), %r10 -; movdqu %xmm0, (%r10) -; movq %r10, 0x28(%rsp) +; leaq 0x30(%rsp), %rsi +; movdqu %xmm0, (%rsi) +; movq %rsi, 0x20(%rsp) +; leaq 0x40(%rsp), %r8 +; movdqu %xmm0, (%r8) +; movq %r8, 0x28(%rsp) ; movq %rdi, %r9 ; movq %r9, %rcx ; movq %r9, %rdx @@ -539,12 +539,12 @@ block0(v0: i32, v1: i8x16): ; block0: ; leaq 0x30(%rsp), %rdx ; movdqu %xmm0, (%rdx) -; leaq 0x40(%rsp), %r9 -; movdqu %xmm0, (%r9) -; movq %r9, 0x20(%rsp) -; leaq 0x50(%rsp), %rax -; movdqu %xmm0, (%rax) -; movq %rax, 0x28(%rsp) +; leaq 0x40(%rsp), %rsi +; movdqu %xmm0, (%rsi) +; movq %rsi, 0x20(%rsp) +; leaq 0x50(%rsp), %r10 +; movdqu %xmm0, (%r10) +; movq %r10, 0x28(%rsp) ; movq %rdi, %r9 ; movq %r9, %rcx ; movq %r9, %r8 @@ -562,12 +562,12 @@ block0(v0: i32, v1: i8x16): ; block1: ; offset 0x8 ; leaq 0x30(%rsp), %rdx ; movdqu %xmm0, (%rdx) -; leaq 0x40(%rsp), %r9 -; movdqu %xmm0, (%r9) -; movq %r9, 0x20(%rsp) -; leaq 0x50(%rsp), %rax -; movdqu %xmm0, (%rax) -; movq %rax, 0x28(%rsp) +; leaq 0x40(%rsp), %rsi +; movdqu %xmm0, (%rsi) +; movq %rsi, 0x20(%rsp) +; leaq 0x50(%rsp), %r10 +; movdqu %xmm0, (%r10) +; movq %r10, 0x28(%rsp) ; movq %rdi, %r9 ; movq %r9, %rcx ; movq %r9, %r8 diff --git a/cranelift/filetests/filetests/isa/x64/call-with-retval-insts.clif b/cranelift/filetests/filetests/isa/x64/call-with-retval-insts.clif index ffa6f2c6b796..e61280c914b4 100644 --- a/cranelift/filetests/filetests/isa/x64/call-with-retval-insts.clif +++ b/cranelift/filetests/filetests/isa/x64/call-with-retval-insts.clif @@ -45,40 +45,40 @@ block0(v0: i32): ; block0: ; movq %rdi, %rsi ; leaq (%rsp), %rdi -; load_ext_name %ext+0, %r10 -; call *%r10 -; leaq (%rax, %rdx), %r8 -; leaq (%rbx, %r15), %r9 -; leaq (%r13, %r12), %r10 -; movq +(%rsp), %rcx -; leaq (%rcx, %r14), %r11 -; movq +8(%rsp), %rcx -; movq +0x10(%rsp), %rdi -; leaq (%rcx, %rdi), %rsi -; movq +0x20(%rsp), %rdx -; movq +0x18(%rsp), %rdi -; leaq (%rdi, %rdx), %rdi -; movq +0x28(%rsp), %rax -; movq +0x30(%rsp), %rcx -; leaq (%rax, %rcx), %rax -; movq +0x40(%rsp), %rcx +; load_ext_name %ext+0, %rax +; call *%rax +; leaq (%rax, %rdx), %rcx +; leaq (%r12, %rbx), %rsi +; leaq (%r13, %r15), %rdi +; movq +(%rsp), %rax +; leaq (%rax, %r14), %r8 +; movq +8(%rsp), %rax +; movq +0x10(%rsp), %r10 +; leaq (%rax, %r10), %r9 +; movq +0x18(%rsp), %r10 +; movq +0x20(%rsp), %r11 +; leaq (%r10, %r11), %r10 +; movq +0x28(%rsp), %rdx +; movq +0x30(%rsp), %r11 +; leaq (%rdx, %r11), %rax ; movq +0x38(%rsp), %rdx +; movq +0x40(%rsp), %r11 +; leaq (%rdx, %r11), %rdx +; movq +0x50(%rsp), %rbx +; movq +0x48(%rsp), %r11 +; leaq (%r11, %rbx), %r11 +; movq +0x58(%rsp), %rbx +; movq +0x60(%rsp), %r12 +; leaq (%rbx, %r12), %rbx +; leaq (%rcx, %rsi), %rcx +; leaq (%rdi, %r8), %rsi +; leaq (%r9, %r10), %rdi +; leaq (%rax, %rdx), %rax +; leaq (%r11, %rbx), %rdx +; leaq (%rcx, %rsi), %rcx +; leaq (%rdi, %rax), %rax ; leaq (%rdx, %rcx), %rcx -; movq +0x50(%rsp), %rdx -; movq +0x48(%rsp), %r14 -; leaq (%r14, %rdx), %rdx -; movq +0x60(%rsp), %rbx -; movq +0x58(%rsp), %r13 -; leaq (%r13, %rbx), %r14 -; leaq (%r8, %r9), %r8 -; leaq (%r10, %r11), %r9 -; leaq (%rsi, %rdi), %r10 -; leaq (%rax, %rcx), %r11 -; leaq (%rdx, %r14), %rsi -; leaq (%r8, %r9), %r8 -; leaq (%r10, %r11), %r9 -; leaq (%rsi, %r8), %r8 -; leaq (%r9, %r8), %rax +; leaq (%rax, %rcx), %rax ; movq 0x100(%rsp), %rbx ; movq 0x108(%rsp), %r12 ; movq 0x110(%rsp), %r13 @@ -102,12 +102,12 @@ block0(v0: i32): ; block1: ; offset 0x33 ; movq %rdi, %rsi ; leaq (%rsp), %rdi -; movabsq $0, %r10 ; reloc_external Abs8 %ext 0 -; callq *%r10 -; movq (%rsp), %rbx -; movq 8(%rsp), %r15 +; movabsq $0, %rax ; reloc_external Abs8 %ext 0 +; callq *%rax +; movq (%rsp), %r12 +; movq 8(%rsp), %rbx ; movq 0x10(%rsp), %r13 -; movq 0x18(%rsp), %r12 +; movq 0x18(%rsp), %r15 ; movq 0x20(%rsp), %r11 ; movq %r11, 0x90(%rsp) ; movq 0x28(%rsp), %r14 @@ -135,38 +135,38 @@ block0(v0: i32): ; movq %r11, 0xe8(%rsp) ; movq 0x88(%rsp), %r11 ; movq %r11, 0xf0(%rsp) -; leaq (%rax, %rdx), %r8 -; leaq (%rbx, %r15), %r9 -; leaq (%r13, %r12), %r10 -; movq 0x90(%rsp), %rcx -; leaq (%rcx, %r14), %r11 -; movq 0x98(%rsp), %rcx -; movq 0xa0(%rsp), %rdi -; leaq (%rcx, %rdi), %rsi -; movq 0xb0(%rsp), %rdx -; movq 0xa8(%rsp), %rdi -; addq %rdx, %rdi -; movq 0xb8(%rsp), %rax -; movq 0xc0(%rsp), %rcx -; addq %rcx, %rax -; movq 0xd0(%rsp), %rcx +; leaq (%rax, %rdx), %rcx +; leaq (%r12, %rbx), %rsi +; leaq (%r13, %r15), %rdi +; movq 0x90(%rsp), %rax +; leaq (%rax, %r14), %r8 +; movq 0x98(%rsp), %rax +; movq 0xa0(%rsp), %r10 +; leaq (%rax, %r10), %r9 +; movq 0xa8(%rsp), %r10 +; movq 0xb0(%rsp), %r11 +; addq %r11, %r10 +; movq 0xb8(%rsp), %rdx +; movq 0xc0(%rsp), %r11 +; leaq (%rdx, %r11), %rax ; movq 0xc8(%rsp), %rdx +; movq 0xd0(%rsp), %r11 +; addq %r11, %rdx +; movq 0xe0(%rsp), %rbx +; movq 0xd8(%rsp), %r11 +; addq %rbx, %r11 +; movq 0xe8(%rsp), %rbx +; movq 0xf0(%rsp), %r12 +; addq %r12, %rbx +; addq %rsi, %rcx +; leaq (%rdi, %r8), %rsi +; leaq (%r9, %r10), %rdi +; addq %rdx, %rax +; leaq (%r11, %rbx), %rdx +; addq %rsi, %rcx +; addq %rdi, %rax ; addq %rdx, %rcx -; movq 0xe0(%rsp), %rdx -; movq 0xd8(%rsp), %r14 -; addq %r14, %rdx -; movq 0xf0(%rsp), %rbx -; movq 0xe8(%rsp), %r13 -; leaq (%r13, %rbx), %r14 -; addq %r9, %r8 -; leaq (%r10, %r11), %r9 -; leaq (%rsi, %rdi), %r10 -; leaq (%rax, %rcx), %r11 -; leaq (%rdx, %r14), %rsi -; addq %r9, %r8 -; leaq (%r10, %r11), %r9 -; addq %rsi, %r8 -; leaq (%r9, %r8), %rax +; addq %rcx, %rax ; movq 0x100(%rsp), %rbx ; movq 0x108(%rsp), %r12 ; movq 0x110(%rsp), %r13 diff --git a/cranelift/filetests/filetests/isa/x64/ceil-libcall.clif b/cranelift/filetests/filetests/isa/x64/ceil-libcall.clif index 1e81e685a792..db87669cc779 100644 --- a/cranelift/filetests/filetests/isa/x64/ceil-libcall.clif +++ b/cranelift/filetests/filetests/isa/x64/ceil-libcall.clif @@ -11,8 +11,8 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; load_ext_name %CeilF32+0, %rcx -; call *%rcx +; load_ext_name %CeilF32+0, %rsi +; call *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -22,8 +22,8 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0, %rcx ; reloc_external Abs8 %CeilF32 0 -; callq *%rcx +; movabsq $0, %rsi ; reloc_external Abs8 %CeilF32 0 +; callq *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -38,8 +38,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; load_ext_name %CeilF64+0, %rcx -; call *%rcx +; load_ext_name %CeilF64+0, %rsi +; call *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -49,8 +49,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0, %rcx ; reloc_external Abs8 %CeilF64 0 -; callq *%rcx +; movabsq $0, %rsi ; reloc_external Abs8 %CeilF64 0 +; callq *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/clz-lzcnt.clif b/cranelift/filetests/filetests/isa/x64/clz-lzcnt.clif index 356d03b70a68..f5e60980fd56 100644 --- a/cranelift/filetests/filetests/isa/x64/clz-lzcnt.clif +++ b/cranelift/filetests/filetests/isa/x64/clz-lzcnt.clif @@ -13,11 +13,11 @@ block0(v0: i128): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; lzcntq %rsi, %rcx +; lzcntq %rsi, %rsi ; lzcntq %rdi, %rax ; addq $0x40, %rax -; cmpq $0x40, %rcx -; cmovneq %rcx, %rax +; cmpq $0x40, %rsi +; cmovneq %rsi, %rax ; uninit %rdx ; xorq %rdx, %rdx ; movq %rbp, %rsp @@ -29,11 +29,11 @@ block0(v0: i128): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; lzcntq %rsi, %rcx +; lzcntq %rsi, %rsi ; lzcntq %rdi, %rax ; addq $0x40, %rax -; cmpq $0x40, %rcx -; cmovneq %rcx, %rax +; cmpq $0x40, %rsi +; cmovneq %rsi, %rax ; xorq %rdx, %rdx ; movq %rbp, %rsp ; popq %rbp @@ -99,8 +99,8 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movzwq %di, %rax -; lzcntq %rax, %rax +; movzwq %di, %rdx +; lzcntq %rdx, %rax ; subq $0x30, %rax ; movq %rbp, %rsp ; popq %rbp @@ -111,8 +111,8 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movzwq %di, %rax -; lzcntq %rax, %rax +; movzwq %di, %rdx +; lzcntq %rdx, %rax ; subq $0x30, %rax ; movq %rbp, %rsp ; popq %rbp @@ -128,8 +128,8 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movzbq %dil, %rax -; lzcntq %rax, %rax +; movzbq %dil, %rdx +; lzcntq %rdx, %rax ; subq $0x38, %rax ; movq %rbp, %rsp ; popq %rbp @@ -140,8 +140,8 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movzbq %dil, %rax -; lzcntq %rax, %rax +; movzbq %dil, %rdx +; lzcntq %rdx, %rax ; subq $0x38, %rax ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/clz.clif b/cranelift/filetests/filetests/isa/x64/clz.clif index 93329b74274d..2d4f38d6af67 100644 --- a/cranelift/filetests/filetests/isa/x64/clz.clif +++ b/cranelift/filetests/filetests/isa/x64/clz.clif @@ -13,20 +13,19 @@ block0(v0: i128): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %r8 -; movq $0xffffffffffffffff, %rcx -; bsrq %rsi, %r9 -; cmoveq %rcx, %r9 -; movl $0x3f, %edi -; subq %r9, %rdi -; movq $0xffffffffffffffff, %rdx -; bsrq %r8, %r10 -; cmoveq %rdx, %r10 +; movq $0xffffffffffffffff, %r8 +; bsrq %rsi, %rsi +; cmoveq %r8, %rsi +; movl $0x3f, %r10d +; subq %rsi, %r10 +; movq $0xffffffffffffffff, %rax +; bsrq %rdi, %rcx +; cmoveq %rax, %rcx ; movl $0x3f, %eax -; subq %r10, %rax +; subq %rcx, %rax ; addq $0x40, %rax -; cmpq $0x40, %rdi -; cmovneq %rdi, %rax +; cmpq $0x40, %r10 +; cmovneq %r10, %rax ; uninit %rdx ; xorq %rdx, %rdx ; movq %rbp, %rsp @@ -38,20 +37,19 @@ block0(v0: i128): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %r8 -; movq $18446744073709551615, %rcx -; bsrq %rsi, %r9 -; cmoveq %rcx, %r9 -; movl $0x3f, %edi -; subq %r9, %rdi -; movq $18446744073709551615, %rdx -; bsrq %r8, %r10 -; cmoveq %rdx, %r10 +; movq $18446744073709551615, %r8 +; bsrq %rsi, %rsi +; cmoveq %r8, %rsi +; movl $0x3f, %r10d +; subq %rsi, %r10 +; movq $18446744073709551615, %rax +; bsrq %rdi, %rcx +; cmoveq %rax, %rcx ; movl $0x3f, %eax -; subq %r10, %rax +; subq %rcx, %rax ; addq $0x40, %rax -; cmpq $0x40, %rdi -; cmovneq %rdi, %rax +; cmpq $0x40, %r10 +; cmovneq %r10, %rax ; xorq %rdx, %rdx ; movq %rbp, %rsp ; popq %rbp @@ -67,11 +65,11 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq $0xffffffffffffffff, %rax -; bsrq %rdi, %r8 -; cmoveq %rax, %r8 +; movq $0xffffffffffffffff, %rdx +; bsrq %rdi, %rsi +; cmoveq %rdx, %rsi ; movl $0x3f, %eax -; subq %r8, %rax +; subq %rsi, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -81,11 +79,11 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq $18446744073709551615, %rax -; bsrq %rdi, %r8 -; cmoveq %rax, %r8 +; movq $18446744073709551615, %rdx +; bsrq %rdi, %rsi +; cmoveq %rdx, %rsi ; movl $0x3f, %eax -; subq %r8, %rax +; subq %rsi, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -100,11 +98,11 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq $0xffffffffffffffff, %rax -; bsrl %edi, %r8d -; cmovel %eax, %r8d +; movq $0xffffffffffffffff, %rdx +; bsrl %edi, %esi +; cmovel %edx, %esi ; movl $0x1f, %eax -; subl %r8d, %eax +; subl %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -114,11 +112,11 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq $18446744073709551615, %rax -; bsrl %edi, %r8d -; cmovel %eax, %r8d +; movq $18446744073709551615, %rdx +; bsrl %edi, %esi +; cmovel %edx, %esi ; movl $0x1f, %eax -; subl %r8d, %eax +; subl %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -133,12 +131,12 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movzwq %di, %rax -; movq $0xffffffffffffffff, %rdx -; bsrq %rax, %r10 -; cmoveq %rdx, %r10 +; movzwq %di, %rdx +; movq $0xffffffffffffffff, %rsi +; bsrq %rdx, %rdi +; cmoveq %rsi, %rdi ; movl $0x3f, %eax -; subq %r10, %rax +; subq %rdi, %rax ; subq $0x30, %rax ; movq %rbp, %rsp ; popq %rbp @@ -149,12 +147,12 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movzwq %di, %rax -; movq $18446744073709551615, %rdx -; bsrq %rax, %r10 -; cmoveq %rdx, %r10 +; movzwq %di, %rdx +; movq $18446744073709551615, %rsi +; bsrq %rdx, %rdi +; cmoveq %rsi, %rdi ; movl $0x3f, %eax -; subq %r10, %rax +; subq %rdi, %rax ; subq $0x30, %rax ; movq %rbp, %rsp ; popq %rbp @@ -170,12 +168,12 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movzbq %dil, %rax -; movq $0xffffffffffffffff, %rdx -; bsrq %rax, %r10 -; cmoveq %rdx, %r10 +; movzbq %dil, %rdx +; movq $0xffffffffffffffff, %rsi +; bsrq %rdx, %rdi +; cmoveq %rsi, %rdi ; movl $0x3f, %eax -; subq %r10, %rax +; subq %rdi, %rax ; subq $0x38, %rax ; movq %rbp, %rsp ; popq %rbp @@ -186,12 +184,12 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movzbq %dil, %rax -; movq $18446744073709551615, %rdx -; bsrq %rax, %r10 -; cmoveq %rdx, %r10 +; movzbq %dil, %rdx +; movq $18446744073709551615, %rsi +; bsrq %rdx, %rdi +; cmoveq %rsi, %rdi ; movl $0x3f, %eax -; subq %r10, %rax +; subq %rdi, %rax ; subq $0x38, %rax ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/cmp-mem-bug.clif b/cranelift/filetests/filetests/isa/x64/cmp-mem-bug.clif index 0f076c2fe2e2..64c22d23b9f1 100644 --- a/cranelift/filetests/filetests/isa/x64/cmp-mem-bug.clif +++ b/cranelift/filetests/filetests/isa/x64/cmp-mem-bug.clif @@ -14,11 +14,11 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq (%rsi), %r9 -; cmpq %r9, %rdi -; sete %r10b -; movzbq %r10b, %rax -; cmpq %r9, %rdi +; movq (%rsi), %r8 +; cmpq %r8, %rdi +; sete %r9b +; movzbq %r9b, %rax +; cmpq %r8, %rdi ; movq %rsi, %rdx ; cmoveq %rdi, %rdx ; movq %rbp, %rsp @@ -30,11 +30,11 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq (%rsi), %r9 ; trap: heap_oob -; cmpq %r9, %rdi -; sete %r10b -; movzbq %r10b, %rax -; cmpq %r9, %rdi +; movq (%rsi), %r8 ; trap: heap_oob +; cmpq %r8, %rdi +; sete %r9b +; movzbq %r9b, %rax +; cmpq %r8, %rdi ; movq %rsi, %rdx ; cmoveq %rdi, %rdx ; movq %rbp, %rsp @@ -56,14 +56,14 @@ block0(v0: f64, v1: i64): ; block0: ; movsd (%rdi), %xmm1 ; ucomisd %xmm1, %xmm0 -; setnp %dil -; sete %al -; andl %eax, %edi -; movzbq %dil, %rax +; setnp %r10b +; sete %r11b +; andl %r11d, %r10d +; movzbq %r10b, %rax ; ucomisd %xmm1, %xmm0 -; movdqa %xmm0, %xmm2 -; movsd %xmm0, %xmm0; jnp $next; movsd %xmm2, %xmm0; $next: -; movsd %xmm0, %xmm0; jz $next; movsd %xmm2, %xmm0; $next: +; movdqa %xmm0, %xmm1 +; movsd %xmm0, %xmm0; jnp $next; movsd %xmm1, %xmm0; $next: +; movsd %xmm0, %xmm0; jz $next; movsd %xmm1, %xmm0; $next: ; movq %rbp, %rsp ; popq %rbp ; retq @@ -75,16 +75,16 @@ block0(v0: f64, v1: i64): ; block1: ; offset 0x4 ; movsd (%rdi), %xmm1 ; trap: heap_oob ; ucomisd %xmm1, %xmm0 -; setnp %dil -; sete %al -; andl %eax, %edi -; movzbq %dil, %rax +; setnp %r10b +; sete %r11b +; andl %r11d, %r10d +; movzbq %r10b, %rax ; ucomisd %xmm1, %xmm0 -; movdqa %xmm0, %xmm2 -; jnp 0x2a -; movaps %xmm2, %xmm0 -; je 0x33 -; movaps %xmm2, %xmm0 +; movdqa %xmm0, %xmm1 +; jnp 0x2c +; movaps %xmm1, %xmm0 +; je 0x35 +; movaps %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/conditional-values.clif b/cranelift/filetests/filetests/isa/x64/conditional-values.clif index 509c1175b132..231cbd8d90e5 100644 --- a/cranelift/filetests/filetests/isa/x64/conditional-values.clif +++ b/cranelift/filetests/filetests/isa/x64/conditional-values.clif @@ -140,8 +140,8 @@ block2: ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl (%rdi), %edx -; cmpl $0x1, %edx +; movl (%rdi), %esi +; cmpl $0x1, %esi ; jz label2; j label1 ; block1: ; movl $0x1, %eax @@ -159,8 +159,8 @@ block2: ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl (%rdi), %edx ; trap: heap_oob -; cmpl $1, %edx +; movl (%rdi), %esi ; trap: heap_oob +; cmpl $1, %esi ; je 0x19 ; block2: ; offset 0xf ; movl $1, %eax @@ -191,8 +191,8 @@ block2: ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl (%rdi), %edx -; cmpl $0x1, %edx +; movl (%rdi), %esi +; cmpl $0x1, %esi ; jz label2; j label1 ; block1: ; movl $0x1, %eax @@ -210,8 +210,8 @@ block2: ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl (%rdi), %edx ; trap: heap_oob -; cmpl $1, %edx +; movl (%rdi), %esi ; trap: heap_oob +; cmpl $1, %esi ; je 0x19 ; block2: ; offset 0xf ; movl $1, %eax diff --git a/cranelift/filetests/filetests/isa/x64/ctz-bmi1.clif b/cranelift/filetests/filetests/isa/x64/ctz-bmi1.clif index e25c93f3c782..97f5f058c97a 100644 --- a/cranelift/filetests/filetests/isa/x64/ctz-bmi1.clif +++ b/cranelift/filetests/filetests/isa/x64/ctz-bmi1.clif @@ -14,10 +14,10 @@ block0(v0: i128): ; movq %rsp, %rbp ; block0: ; tzcntq %rdi, %rax -; tzcntq %rsi, %r9 -; addq $0x40, %r9 +; tzcntq %rsi, %rsi +; addq $0x40, %rsi ; cmpq $0x40, %rax -; cmoveq %r9, %rax +; cmoveq %rsi, %rax ; uninit %rdx ; xorq %rdx, %rdx ; movq %rbp, %rsp @@ -30,10 +30,10 @@ block0(v0: i128): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; tzcntq %rdi, %rax -; tzcntq %rsi, %r9 -; addq $0x40, %r9 +; tzcntq %rsi, %rsi +; addq $0x40, %rsi ; cmpq $0x40, %rax -; cmoveq %r9, %rax +; cmoveq %rsi, %rax ; xorq %rdx, %rdx ; movq %rbp, %rsp ; popq %rbp @@ -99,9 +99,9 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movzwl %di, %ecx -; orl $0x10000, %ecx -; tzcntl %ecx, %eax +; movzwl %di, %esi +; orl $0x10000, %esi +; tzcntl %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -111,9 +111,9 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movzwl %di, %ecx -; orl $0x10000, %ecx -; tzcntl %ecx, %eax +; movzwl %di, %esi +; orl $0x10000, %esi +; tzcntl %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -128,9 +128,9 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movzbl %dil, %ecx -; orl $0x100, %ecx -; tzcntl %ecx, %eax +; movzbl %dil, %esi +; orl $0x100, %esi +; tzcntl %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -140,9 +140,9 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movzbl %dil, %ecx -; orl $0x100, %ecx -; tzcntl %ecx, %eax +; movzbl %dil, %esi +; orl $0x100, %esi +; tzcntl %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/ctz.clif b/cranelift/filetests/filetests/isa/x64/ctz.clif index 8b8a626111f3..3a3b1e441eeb 100644 --- a/cranelift/filetests/filetests/isa/x64/ctz.clif +++ b/cranelift/filetests/filetests/isa/x64/ctz.clif @@ -13,15 +13,15 @@ block0(v0: i128): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl $0x40, %ecx +; movl $0x40, %r8d ; bsfq %rdi, %rax -; cmoveq %rcx, %rax -; movl $0x40, %edi -; bsfq %rsi, %rdx -; cmoveq %rdi, %rdx -; addq $0x40, %rdx +; cmoveq %r8, %rax +; movl $0x40, %r9d +; bsfq %rsi, %rcx +; cmoveq %r9, %rcx +; addq $0x40, %rcx ; cmpq $0x40, %rax -; cmoveq %rdx, %rax +; cmoveq %rcx, %rax ; uninit %rdx ; xorq %rdx, %rdx ; movq %rbp, %rsp @@ -33,15 +33,15 @@ block0(v0: i128): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0x40, %ecx +; movl $0x40, %r8d ; bsfq %rdi, %rax -; cmoveq %rcx, %rax -; movl $0x40, %edi -; bsfq %rsi, %rdx -; cmoveq %rdi, %rdx -; addq $0x40, %rdx +; cmoveq %r8, %rax +; movl $0x40, %r9d +; bsfq %rsi, %rcx +; cmoveq %r9, %rcx +; addq $0x40, %rcx ; cmpq $0x40, %rax -; cmoveq %rdx, %rax +; cmoveq %rcx, %rax ; xorq %rdx, %rdx ; movq %rbp, %rsp ; popq %rbp @@ -57,9 +57,9 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl $0x40, %ecx +; movl $0x40, %edx ; bsfq %rdi, %rax -; cmoveq %rcx, %rax +; cmoveq %rdx, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -69,9 +69,9 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0x40, %ecx +; movl $0x40, %edx ; bsfq %rdi, %rax -; cmoveq %rcx, %rax +; cmoveq %rdx, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -86,9 +86,9 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl $0x20, %ecx +; movl $0x20, %edx ; bsfl %edi, %eax -; cmovel %ecx, %eax +; cmovel %edx, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -98,9 +98,9 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0x20, %ecx +; movl $0x20, %edx ; bsfl %edi, %eax -; cmovel %ecx, %eax +; cmovel %edx, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -115,11 +115,11 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movzwl %di, %ecx -; orl $0x10000, %ecx -; movl $0x10, %r9d -; bsfl %ecx, %eax -; cmovel %r9d, %eax +; movzwl %di, %edi +; orl $0x10000, %edi +; movl $0x10, %esi +; bsfl %edi, %eax +; cmovel %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -129,11 +129,11 @@ block0(v0: i16): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movzwl %di, %ecx -; orl $0x10000, %ecx -; movl $0x10, %r9d -; bsfl %ecx, %eax -; cmovel %r9d, %eax +; movzwl %di, %edi +; orl $0x10000, %edi +; movl $0x10, %esi +; bsfl %edi, %eax +; cmovel %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -148,11 +148,11 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movzbl %dil, %ecx -; orl $0x100, %ecx -; movl $0x8, %r9d -; bsfl %ecx, %eax -; cmovel %r9d, %eax +; movzbl %dil, %edi +; orl $0x100, %edi +; movl $0x8, %esi +; bsfl %edi, %eax +; cmovel %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -162,11 +162,11 @@ block0(v0: i8): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movzbl %dil, %ecx -; orl $0x100, %ecx -; movl $8, %r9d -; bsfl %ecx, %eax -; cmovel %r9d, %eax +; movzbl %dil, %edi +; orl $0x100, %edi +; movl $8, %esi +; bsfl %edi, %eax +; cmovel %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/debug.clif b/cranelift/filetests/filetests/isa/x64/debug.clif index 16fc15245e76..9070b5bd9f04 100644 --- a/cranelift/filetests/filetests/isa/x64/debug.clif +++ b/cranelift/filetests/filetests/isa/x64/debug.clif @@ -18,19 +18,19 @@ block0(v0: i32, v1: i32): ; movq %rsp, %rbp ; subq $0x50, %rsp ; movq %r12, 0x40(%rsp) -; movq %r14, 0x48(%rsp) +; movq %r13, 0x48(%rsp) ; block0: -; load_ext_name %f1+0, %rdx +; load_ext_name %f1+0, %r8 ; ; ^-- debug @ Pre: [StackSlot(ss0), User(0), User(0)] -; movq %rsi, %r12 -; movq %rdi, %r14 -; call *%rdx +; movq %rdi, %r12 +; movq %rsi, %r13 +; call *%r8 ; ; ^-- debug @ Post: [StackSlot(ss0), User(0), User(0)] ; sequence_point ; ; ^-- debug @ Pre: [StackSlot(ss0), User(1), User(0)] -; leal (%r14, %r12), %eax +; leal (%r12, %r13), %eax ; movq 0x40(%rsp), %r12 -; movq 0x48(%rsp), %r14 +; movq 0x48(%rsp), %r13 ; addq $0x50, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -42,15 +42,15 @@ block0(v0: i32, v1: i32): ; movq %rsp, %rbp ; subq $0x50, %rsp ; movq %r12, 0x40(%rsp) -; movq %r14, 0x48(%rsp) +; movq %r13, 0x48(%rsp) ; block1: ; offset 0x12 -; movabsq $0, %rdx ; reloc_external Abs8 %f1 0 -; movq %rsi, %r12 -; movq %rdi, %r14 -; callq *%rdx -; leal (%r14, %r12), %eax +; movabsq $0, %r8 ; reloc_external Abs8 %f1 0 +; movq %rdi, %r12 +; movq %rsi, %r13 +; callq *%r8 +; leal (%r12, %r13), %eax ; movq 0x40(%rsp), %r12 -; movq 0x48(%rsp), %r14 +; movq 0x48(%rsp), %r13 ; addq $0x50, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -70,18 +70,18 @@ block0(v0: i32, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; subq $0x50, %rsp -; movq %rbx, 0x40(%rsp) -; movq %r15, 0x48(%rsp) +; movq %r12, 0x40(%rsp) +; movq %r13, 0x48(%rsp) ; block0: -; movq %rdi, %rbx -; movq %rsi, %r15 +; movq %rdi, %r12 +; movq %rsi, %r13 ; call TestCase(%f1) ; ; ^-- debug @ Post: [StackSlot(ss0), User(0), User(0)] ; sequence_point ; ; ^-- debug @ Pre: [StackSlot(ss0), User(1), User(0)] -; leal (%rbx, %r15), %eax -; movq 0x40(%rsp), %rbx -; movq 0x48(%rsp), %r15 +; leal (%r12, %r13), %eax +; movq 0x40(%rsp), %r12 +; movq 0x48(%rsp), %r13 ; addq $0x50, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -92,15 +92,15 @@ block0(v0: i32, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; subq $0x50, %rsp -; movq %rbx, 0x40(%rsp) -; movq %r15, 0x48(%rsp) +; movq %r12, 0x40(%rsp) +; movq %r13, 0x48(%rsp) ; block1: ; offset 0x12 -; movq %rdi, %rbx -; movq %rsi, %r15 +; movq %rdi, %r12 +; movq %rsi, %r13 ; callq 0x1d ; reloc_external CallPCRel4 %f1 -4 -; leal (%rbx, %r15), %eax -; movq 0x40(%rsp), %rbx -; movq 0x48(%rsp), %r15 +; leal (%r12, %r13), %eax +; movq 0x40(%rsp), %r12 +; movq 0x48(%rsp), %r13 ; addq $0x50, %rsp ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/exceptions.clif b/cranelift/filetests/filetests/isa/x64/exceptions.clif index 1afdc690525f..c9a55a903d0b 100644 --- a/cranelift/filetests/filetests/isa/x64/exceptions.clif +++ b/cranelift/filetests/filetests/isa/x64/exceptions.clif @@ -30,11 +30,11 @@ function %f0(i32) -> i32, f32, f64 { ; movq %r14, 0x28(%rsp) ; movq %r15, 0x30(%rsp) ; block0: -; movabsq $0x3ff0000000000000, %rcx -; movq %rcx, %xmm1 +; movabsq $0x3ff0000000000000, %rax +; movq %rax, %xmm1 ; movdqu %xmm1, +(%rsp) -; load_ext_name %g+0, %rdx -; call *%rdx; jmp MachLabel(1); catch [default: MachLabel(2)] +; load_ext_name %g+0, %rax +; call *%rax; jmp MachLabel(1); catch [default: MachLabel(2)] ; block1: ; movl $0x1, %eax ; movdqu +(%rsp), %xmm1 @@ -73,11 +73,11 @@ function %f0(i32) -> i32, f32, f64 { ; movq %r14, 0x28(%rsp) ; movq %r15, 0x30(%rsp) ; block1: ; offset 0x21 -; movabsq $0x3ff0000000000000, %rcx -; movq %rcx, %xmm1 +; movabsq $0x3ff0000000000000, %rax +; movq %rax, %xmm1 ; movdqu %xmm1, (%rsp) -; movabsq $0, %rdx ; reloc_external Abs8 %g 0 -; callq *%rdx +; movabsq $0, %rax ; reloc_external Abs8 %g 0 +; callq *%rax ; block2: ; offset 0x41 ; movl $1, %eax ; movdqu (%rsp), %xmm1 @@ -154,20 +154,19 @@ function %f1(i32) -> i32, f32, f64 { ; testl %edi, %edi ; jnz label2; j label3 ; block2: -; movq %rax, %r11 +; movq %rax, %rcx ; jmp label8 ; block3: ; movdqu %xmm1, +(%rsp) ; jmp label7 ; block4: -; movabsq $0x3ff0000000000000, %r10 -; movq %r10, %xmm1 +; movabsq $0x3ff0000000000000, %rax +; movq %rax, %xmm1 ; movdqu %xmm1, +(%rsp) -; load_ext_name %g+0, %r11 -; call *%r11; jmp MachLabel(6); catch [default: MachLabel(5)] +; load_ext_name %g+0, %rax +; call *%rax; jmp MachLabel(6); catch [default: MachLabel(5)] ; block5: -; movq %rax, %rsi -; movq %rsi, %r11 +; movq %rax, %rcx ; jmp label8 ; block6: ; jmp label7 @@ -184,11 +183,10 @@ function %f1(i32) -> i32, f32, f64 { ; popq %rbp ; retq ; block8: -; leal 1(%r11), %eax -; movq %r11, %rsi +; leal 1(%rcx), %eax ; uninit %xmm0 ; xorps %xmm0, %xmm0 -; movq %rsi, %xmm1 +; movq %rcx, %xmm1 ; movq 0x10(%rsp), %rbx ; movq 0x18(%rsp), %r12 ; movq 0x20(%rsp), %r13 @@ -221,23 +219,22 @@ function %f1(i32) -> i32, f32, f64 { ; testl %edi, %edi ; je 0x56 ; block3: ; offset 0x4e -; movq %rax, %r11 -; jmp 0xbd +; movq %rax, %rcx +; jmp 0xb9 ; block4: ; offset 0x56 ; movdqu %xmm1, (%rsp) -; jmp 0x91 +; jmp 0x8d ; block5: ; offset 0x60 -; movabsq $0x3ff0000000000000, %r10 -; movq %r10, %xmm1 +; movabsq $0x3ff0000000000000, %rax +; movq %rax, %xmm1 ; movdqu %xmm1, (%rsp) -; movabsq $0, %r11 ; reloc_external Abs8 %g 0 -; callq *%r11 -; jmp 0x91 -; block6: ; offset 0x86 -; movq %rax, %rsi -; movq %rsi, %r11 -; jmp 0xbd -; block7: ; offset 0x91 +; movabsq $0, %rax ; reloc_external Abs8 %g 0 +; callq *%rax +; jmp 0x8d +; block6: ; offset 0x85 +; movq %rax, %rcx +; jmp 0xb9 +; block7: ; offset 0x8d ; movl $1, %eax ; movdqu (%rsp), %xmm1 ; movq 0x10(%rsp), %rbx @@ -249,11 +246,10 @@ function %f1(i32) -> i32, f32, f64 { ; movq %rbp, %rsp ; popq %rbp ; retq -; block8: ; offset 0xbd -; leal 1(%r11), %eax -; movq %r11, %rsi +; block8: ; offset 0xb9 +; leal 1(%rcx), %eax ; xorps %xmm0, %xmm0 -; movq %rsi, %xmm1 +; movq %rcx, %xmm1 ; movq 0x10(%rsp), %rbx ; movq 0x18(%rsp), %r12 ; movq 0x20(%rsp), %r13 @@ -294,11 +290,11 @@ function %f2(i32) -> i32, f32, f64 { ; movq %r14, 0x28(%rsp) ; movq %r15, 0x30(%rsp) ; block0: -; movabsq $0x3ff0000000000000, %rcx -; movq %rcx, %xmm1 +; movabsq $0x3ff0000000000000, %rax +; movq %rax, %xmm1 ; movdqu %xmm1, +(%rsp) -; load_ext_name %g+0, %rdx -; call *%rdx; jmp MachLabel(1); catch [default: MachLabel(2)] +; load_ext_name %g+0, %rax +; call *%rax; jmp MachLabel(1); catch [default: MachLabel(2)] ; block1: ; movl $0x1, %eax ; movdqu +(%rsp), %xmm1 @@ -337,11 +333,11 @@ function %f2(i32) -> i32, f32, f64 { ; movq %r14, 0x28(%rsp) ; movq %r15, 0x30(%rsp) ; block1: ; offset 0x21 -; movabsq $0x3ff0000000000000, %rcx -; movq %rcx, %xmm1 +; movabsq $0x3ff0000000000000, %rax +; movq %rax, %xmm1 ; movdqu %xmm1, (%rsp) -; movabsq $0, %rdx ; reloc_external Abs8 %g 0 -; callq *%rdx +; movabsq $0, %rax ; reloc_external Abs8 %g 0 +; callq *%rax ; block2: ; offset 0x41 ; movl $1, %eax ; movdqu (%rsp), %xmm1 @@ -388,8 +384,8 @@ block2: ; block0: ; jmp label1 ; block1: -; load_ext_name userextname0+0, %rcx -; call *%rcx; jmp MachLabel(2); catch [] +; load_ext_name userextname0+0, %rsi +; call *%rsi; jmp MachLabel(2); catch [] ; block2: ; jmp label3 ; block3: @@ -400,8 +396,8 @@ block2: ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0, %rcx ; reloc_external Abs8 u0:1 0 -; callq *%rcx +; movabsq $0, %rsi ; reloc_external Abs8 u0:1 0 +; callq *%rsi ; block2: ; offset 0x10 ; jmp 0x10 @@ -438,13 +434,13 @@ function %f4(i64, i32) -> i32, f32, f64 { ; movq %r15, 0x40(%rsp) ; block0: ; movq %rdi, +8(%rsp) -; movabsq $0x3ff0000000000000, %rdx -; movq %rdx, %xmm1 +; movabsq $0x3ff0000000000000, %rax +; movq %rax, %xmm1 ; movdqu %xmm1, +0x10(%rsp) -; load_ext_name %g+0, %r8 +; load_ext_name %g+0, %rax ; movq %rsi, %rdi ; movq %rsi, +(%rsp) -; call *%r8; jmp MachLabel(3); catch [context stack1, tag0: MachLabel(1), tag1: MachLabel(2), context stack0, tag0: MachLabel(4)] +; call *%rax; jmp MachLabel(3); catch [context stack1, tag0: MachLabel(1), tag1: MachLabel(2), context stack0, tag0: MachLabel(4)] ; block1: ; movq %rax, %rdi ; movdqu +0x10(%rsp), %xmm1 @@ -495,23 +491,23 @@ function %f4(i64, i32) -> i32, f32, f64 { ; movq %r15, 0x40(%rsp) ; block1: ; offset 0x21 ; movq %rdi, 8(%rsp) -; movabsq $0x3ff0000000000000, %rdx -; movq %rdx, %xmm1 +; movabsq $0x3ff0000000000000, %rax +; movq %rax, %xmm1 ; movdqu %xmm1, 0x10(%rsp) -; movabsq $0, %r8 ; reloc_external Abs8 %g 0 +; movabsq $0, %rax ; reloc_external Abs8 %g 0 ; movq %rsi, %rdi ; movq %rsi, (%rsp) -; callq *%r8 -; jmp 0x70 -; block2: ; offset 0x54 +; callq *%rax +; jmp 0x6f +; block2: ; offset 0x53 ; movq %rax, %rdi ; movdqu 0x10(%rsp), %xmm1 -; jmp 0xa8 -; block3: ; offset 0x62 +; jmp 0xa7 +; block3: ; offset 0x61 ; movq %rax, %rdi ; movdqu 0x10(%rsp), %xmm1 -; jmp 0xa8 -; block4: ; offset 0x70 +; jmp 0xa7 +; block4: ; offset 0x6f ; movl $1, %eax ; movdqu 0x10(%rsp), %xmm1 ; movq 0x20(%rsp), %rbx @@ -523,10 +519,10 @@ function %f4(i64, i32) -> i32, f32, f64 { ; movq %rbp, %rsp ; popq %rbp ; retq -; block5: ; offset 0x9d +; block5: ; offset 0x9c ; movdqu 0x10(%rsp), %xmm1 ; movq 8(%rsp), %rdi -; block6: ; offset 0xa8 +; block6: ; offset 0xa7 ; leal 1(%rdi), %eax ; xorps %xmm0, %xmm0 ; movq 0x20(%rsp), %rbx @@ -539,7 +535,6 @@ function %f4(i64, i32) -> i32, f32, f64 { ; popq %rbp ; retq - function %f5() -> i64 { sig0 = () tail fn0 = %g() tail @@ -572,8 +567,8 @@ function %f5() -> i64 { ; movq %rax, +(%rsp) ; jmp label1 ; block1: -; load_ext_name %g+0, %rdx -; call *%rdx; jmp MachLabel(2); catch [default: MachLabel(3)] +; load_ext_name %g+0, %rsi +; call *%rsi; jmp MachLabel(2); catch [default: MachLabel(3)] ; block2: ; movq +(%rsp), %rax ; movq 0x10(%rsp), %rbx @@ -611,8 +606,8 @@ function %f5() -> i64 { ; leaq 0x36(%rip), %rax ; movq %rax, (%rsp) ; block2: ; offset 0x2c -; movabsq $0, %rdx ; reloc_external Abs8 %g 0 -; callq *%rdx +; movabsq $0, %rsi ; reloc_external Abs8 %g 0 +; callq *%rsi ; block3: ; offset 0x38 ; movq (%rsp), %rax ; movq 0x10(%rsp), %rbx diff --git a/cranelift/filetests/filetests/isa/x64/f16const.clif b/cranelift/filetests/filetests/isa/x64/f16const.clif index d4862c40e106..04716612f8c3 100644 --- a/cranelift/filetests/filetests/isa/x64/f16const.clif +++ b/cranelift/filetests/filetests/isa/x64/f16const.clif @@ -37,10 +37,10 @@ block0(): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl $0x3c00, %esi +; movl $0x3c00, %eax ; uninit %xmm0 ; pxor %xmm0, %xmm0 -; pinsrw $0x0, %esi, %xmm0 +; pinsrw $0x0, %eax, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -50,9 +50,9 @@ block0(): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0x3c00, %esi +; movl $0x3c00, %eax ; pxor %xmm0, %xmm0 -; pinsrw $0, %esi, %xmm0 +; pinsrw $0, %eax, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/fabs.clif b/cranelift/filetests/filetests/isa/x64/fabs.clif index 0f1648c3531f..0373c254677d 100644 --- a/cranelift/filetests/filetests/isa/x64/fabs.clif +++ b/cranelift/filetests/filetests/isa/x64/fabs.clif @@ -11,8 +11,8 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl $0x7fffffff, %eax -; movd %eax, %xmm4 +; movl $0x7fffffff, %edx +; movd %edx, %xmm4 ; andps %xmm4, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -23,8 +23,8 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0x7fffffff, %eax -; movd %eax, %xmm4 +; movl $0x7fffffff, %edx +; movd %edx, %xmm4 ; andps %xmm4, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -40,8 +40,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movabsq $0x7fffffffffffffff, %rax -; movq %rax, %xmm4 +; movabsq $0x7fffffffffffffff, %rdx +; movq %rdx, %xmm4 ; andpd %xmm4, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -52,8 +52,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0x7fffffffffffffff, %rax -; movq %rax, %xmm4 +; movabsq $0x7fffffffffffffff, %rdx +; movq %rdx, %xmm4 ; andpd %xmm4, %xmm0 ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/fastcall.clif b/cranelift/filetests/filetests/isa/x64/fastcall.clif index b4192435627f..54f12e2cc555 100644 --- a/cranelift/filetests/filetests/isa/x64/fastcall.clif +++ b/cranelift/filetests/filetests/isa/x64/fastcall.clif @@ -245,10 +245,10 @@ block0(v0: i64): ; cvtsi2sdq %rcx, %xmm3 ; movq %rcx, 0x20(%rsp) ; movq %rcx, 0x28(%rsp) -; load_ext_name %g+0, %r11 +; load_ext_name %g+0, %r8 ; movq %rcx, %rdx ; movdqa %xmm3, %xmm2 -; call *%r11 +; call *%r8 ; addq $0x30, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -264,10 +264,10 @@ block0(v0: i64): ; cvtsi2sdq %rcx, %xmm3 ; movq %rcx, 0x20(%rsp) ; movq %rcx, 0x28(%rsp) -; movabsq $0, %r11 ; reloc_external Abs8 %g 0 +; movabsq $0, %r8 ; reloc_external Abs8 %g 0 ; movq %rcx, %rdx ; movdqa %xmm3, %xmm2 -; callq *%r11 +; callq *%r8 ; addq $0x30, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -351,53 +351,53 @@ block0(v0: i64): ; unwind SaveReg { clobber_offset: 144, reg: p15f } ; block0: ; movsd (%rcx), %xmm0 -; movsd 8(%rcx), %xmm8 -; movdqu %xmm8, +0x30(%rsp) -; movsd 0x10(%rcx), %xmm10 -; movdqu %xmm10, +(%rsp) -; movsd 0x18(%rcx), %xmm9 -; movdqa %xmm9, %xmm10 -; movsd 0x20(%rcx), %xmm5 -; movsd 0x28(%rcx), %xmm6 -; movdqu %xmm6, +0x20(%rsp) -; movsd 0x30(%rcx), %xmm7 -; movsd 0x38(%rcx), %xmm12 -; movdqu %xmm12, +0x10(%rsp) -; movsd 0x40(%rcx), %xmm4 -; movsd 0x48(%rcx), %xmm12 -; movsd 0x50(%rcx), %xmm1 -; movsd 0x58(%rcx), %xmm14 -; movsd 0x60(%rcx), %xmm3 -; movsd 0x68(%rcx), %xmm15 -; movsd 0x70(%rcx), %xmm11 -; movsd 0x78(%rcx), %xmm8 -; movsd 0x80(%rcx), %xmm2 -; movsd 0x88(%rcx), %xmm9 +; movsd 8(%rcx), %xmm10 +; movdqu %xmm10, +0x30(%rsp) +; movsd 0x10(%rcx), %xmm12 +; movdqu %xmm12, +(%rsp) +; movsd 0x18(%rcx), %xmm11 +; movdqa %xmm11, %xmm12 +; movsd 0x20(%rcx), %xmm1 +; movsd 0x28(%rcx), %xmm3 +; movdqu %xmm3, +0x20(%rsp) +; movsd 0x30(%rcx), %xmm4 +; movsd 0x38(%rcx), %xmm14 +; movdqu %xmm14, +0x10(%rsp) +; movsd 0x40(%rcx), %xmm2 +; movsd 0x48(%rcx), %xmm14 +; movsd 0x50(%rcx), %xmm6 +; movsd 0x58(%rcx), %xmm15 +; movsd 0x60(%rcx), %xmm5 +; movsd 0x68(%rcx), %xmm8 +; movsd 0x70(%rcx), %xmm9 +; movsd 0x78(%rcx), %xmm10 +; movsd 0x80(%rcx), %xmm7 +; movsd 0x88(%rcx), %xmm11 ; movsd 0x90(%rcx), %xmm13 -; movdqu +0x30(%rsp), %xmm6 -; addsd %xmm6, %xmm0 -; movdqa %xmm10, %xmm6 -; movdqu +(%rsp), %xmm10 -; addsd %xmm6, %xmm10 -; movdqu +0x20(%rsp), %xmm6 -; addsd %xmm6, %xmm5 -; movdqu +0x10(%rsp), %xmm6 -; addsd %xmm6, %xmm7 -; addsd %xmm12, %xmm4 -; addsd %xmm14, %xmm1 -; addsd %xmm15, %xmm3 -; addsd %xmm8, %xmm11 -; addsd %xmm9, %xmm2 -; addsd 0x98(%rcx), %xmm13 -; addsd %xmm10, %xmm0 -; addsd %xmm7, %xmm5 -; addsd %xmm1, %xmm4 -; addsd %xmm11, %xmm3 -; addsd %xmm13, %xmm2 -; addsd %xmm5, %xmm0 +; movdqu +0x30(%rsp), %xmm3 +; addsd %xmm3, %xmm0 +; movdqa %xmm12, %xmm3 +; movdqu +(%rsp), %xmm12 +; addsd %xmm3, %xmm12 +; movdqu +0x20(%rsp), %xmm3 +; addsd %xmm3, %xmm1 +; movdqu +0x10(%rsp), %xmm3 ; addsd %xmm3, %xmm4 -; addsd %xmm4, %xmm0 +; addsd %xmm14, %xmm2 +; addsd %xmm15, %xmm6 +; addsd %xmm8, %xmm5 +; addsd %xmm10, %xmm9 +; addsd %xmm11, %xmm7 +; addsd 0x98(%rcx), %xmm13 +; addsd %xmm12, %xmm0 +; addsd %xmm4, %xmm1 +; addsd %xmm6, %xmm2 +; addsd %xmm9, %xmm5 +; addsd %xmm13, %xmm7 +; addsd %xmm1, %xmm0 +; addsd %xmm5, %xmm2 ; addsd %xmm2, %xmm0 +; addsd %xmm7, %xmm0 ; movdqu 0x40(%rsp), %xmm6 ; movdqu 0x50(%rsp), %xmm7 ; movdqu 0x60(%rsp), %xmm8 @@ -430,53 +430,53 @@ block0(v0: i64): ; movdqu %xmm15, 0xd0(%rsp) ; block1: ; offset 0x61 ; movsd (%rcx), %xmm0 ; trap: heap_oob -; movsd 8(%rcx), %xmm8 ; trap: heap_oob -; movdqu %xmm8, 0x30(%rsp) -; movsd 0x10(%rcx), %xmm10 ; trap: heap_oob -; movdqu %xmm10, (%rsp) -; movsd 0x18(%rcx), %xmm9 ; trap: heap_oob -; movdqa %xmm9, %xmm10 -; movsd 0x20(%rcx), %xmm5 ; trap: heap_oob -; movsd 0x28(%rcx), %xmm6 ; trap: heap_oob -; movdqu %xmm6, 0x20(%rsp) -; movsd 0x30(%rcx), %xmm7 ; trap: heap_oob -; movsd 0x38(%rcx), %xmm12 ; trap: heap_oob -; movdqu %xmm12, 0x10(%rsp) -; movsd 0x40(%rcx), %xmm4 ; trap: heap_oob -; movsd 0x48(%rcx), %xmm12 ; trap: heap_oob -; movsd 0x50(%rcx), %xmm1 ; trap: heap_oob -; movsd 0x58(%rcx), %xmm14 ; trap: heap_oob -; movsd 0x60(%rcx), %xmm3 ; trap: heap_oob -; movsd 0x68(%rcx), %xmm15 ; trap: heap_oob -; movsd 0x70(%rcx), %xmm11 ; trap: heap_oob -; movsd 0x78(%rcx), %xmm8 ; trap: heap_oob -; movsd 0x80(%rcx), %xmm2 ; trap: heap_oob -; movsd 0x88(%rcx), %xmm9 ; trap: heap_oob +; movsd 8(%rcx), %xmm10 ; trap: heap_oob +; movdqu %xmm10, 0x30(%rsp) +; movsd 0x10(%rcx), %xmm12 ; trap: heap_oob +; movdqu %xmm12, (%rsp) +; movsd 0x18(%rcx), %xmm11 ; trap: heap_oob +; movdqa %xmm11, %xmm12 +; movsd 0x20(%rcx), %xmm1 ; trap: heap_oob +; movsd 0x28(%rcx), %xmm3 ; trap: heap_oob +; movdqu %xmm3, 0x20(%rsp) +; movsd 0x30(%rcx), %xmm4 ; trap: heap_oob +; movsd 0x38(%rcx), %xmm14 ; trap: heap_oob +; movdqu %xmm14, 0x10(%rsp) +; movsd 0x40(%rcx), %xmm2 ; trap: heap_oob +; movsd 0x48(%rcx), %xmm14 ; trap: heap_oob +; movsd 0x50(%rcx), %xmm6 ; trap: heap_oob +; movsd 0x58(%rcx), %xmm15 ; trap: heap_oob +; movsd 0x60(%rcx), %xmm5 ; trap: heap_oob +; movsd 0x68(%rcx), %xmm8 ; trap: heap_oob +; movsd 0x70(%rcx), %xmm9 ; trap: heap_oob +; movsd 0x78(%rcx), %xmm10 ; trap: heap_oob +; movsd 0x80(%rcx), %xmm7 ; trap: heap_oob +; movsd 0x88(%rcx), %xmm11 ; trap: heap_oob ; movsd 0x90(%rcx), %xmm13 ; trap: heap_oob -; movdqu 0x30(%rsp), %xmm6 -; addsd %xmm6, %xmm0 -; movdqa %xmm10, %xmm6 -; movdqu (%rsp), %xmm10 -; addsd %xmm6, %xmm10 -; movdqu 0x20(%rsp), %xmm6 -; addsd %xmm6, %xmm5 -; movdqu 0x10(%rsp), %xmm6 -; addsd %xmm6, %xmm7 -; addsd %xmm12, %xmm4 -; addsd %xmm14, %xmm1 -; addsd %xmm15, %xmm3 -; addsd %xmm8, %xmm11 -; addsd %xmm9, %xmm2 -; addsd 0x98(%rcx), %xmm13 ; trap: heap_oob -; addsd %xmm10, %xmm0 -; addsd %xmm7, %xmm5 -; addsd %xmm1, %xmm4 -; addsd %xmm11, %xmm3 -; addsd %xmm13, %xmm2 -; addsd %xmm5, %xmm0 +; movdqu 0x30(%rsp), %xmm3 +; addsd %xmm3, %xmm0 +; movdqa %xmm12, %xmm3 +; movdqu (%rsp), %xmm12 +; addsd %xmm3, %xmm12 +; movdqu 0x20(%rsp), %xmm3 +; addsd %xmm3, %xmm1 +; movdqu 0x10(%rsp), %xmm3 ; addsd %xmm3, %xmm4 -; addsd %xmm4, %xmm0 +; addsd %xmm14, %xmm2 +; addsd %xmm15, %xmm6 +; addsd %xmm8, %xmm5 +; addsd %xmm10, %xmm9 +; addsd %xmm11, %xmm7 +; addsd 0x98(%rcx), %xmm13 ; trap: heap_oob +; addsd %xmm12, %xmm0 +; addsd %xmm4, %xmm1 +; addsd %xmm6, %xmm2 +; addsd %xmm9, %xmm5 +; addsd %xmm13, %xmm7 +; addsd %xmm1, %xmm0 +; addsd %xmm5, %xmm2 ; addsd %xmm2, %xmm0 +; addsd %xmm7, %xmm0 ; movdqu 0x40(%rsp), %xmm6 ; movdqu 0x50(%rsp), %xmm7 ; movdqu 0x60(%rsp), %xmm8 diff --git a/cranelift/filetests/filetests/isa/x64/fcopysign.clif b/cranelift/filetests/filetests/isa/x64/fcopysign.clif index 2d96faa16f17..b94c88a30655 100644 --- a/cranelift/filetests/filetests/isa/x64/fcopysign.clif +++ b/cranelift/filetests/filetests/isa/x64/fcopysign.clif @@ -11,8 +11,8 @@ block0(v0: f32, v1: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl $0x80000000, %ecx -; movd %ecx, %xmm7 +; movl $0x80000000, %esi +; movd %esi, %xmm7 ; movdqa %xmm0, %xmm2 ; movdqa %xmm7, %xmm0 ; andnps %xmm2, %xmm0 @@ -27,8 +27,8 @@ block0(v0: f32, v1: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0x80000000, %ecx -; movd %ecx, %xmm7 +; movl $0x80000000, %esi +; movd %esi, %xmm7 ; movdqa %xmm0, %xmm2 ; movdqa %xmm7, %xmm0 ; andnps %xmm2, %xmm0 @@ -48,8 +48,8 @@ block0(v0: f64, v1: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movabsq $0x8000000000000000, %rcx -; movq %rcx, %xmm7 +; movabsq $0x8000000000000000, %rsi +; movq %rsi, %xmm7 ; movdqa %xmm0, %xmm2 ; movdqa %xmm7, %xmm0 ; andnpd %xmm2, %xmm0 @@ -64,8 +64,8 @@ block0(v0: f64, v1: f64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $9223372036854775808, %rcx -; movq %rcx, %xmm7 +; movabsq $9223372036854775808, %rsi +; movq %rsi, %xmm7 ; movdqa %xmm0, %xmm2 ; movdqa %xmm7, %xmm0 ; andnpd %xmm2, %xmm0 diff --git a/cranelift/filetests/filetests/isa/x64/fcvt-avx.clif b/cranelift/filetests/filetests/isa/x64/fcvt-avx.clif index 88a67ad34527..6548ad9f3cb2 100644 --- a/cranelift/filetests/filetests/isa/x64/fcvt-avx.clif +++ b/cranelift/filetests/filetests/isa/x64/fcvt-avx.clif @@ -127,8 +127,8 @@ block0(v0: i64x2): ; vpor (%rip), %xmm2, %xmm4 ; vpsrlq $0x20, %xmm0, %xmm6 ; vpor (%rip), %xmm6, %xmm0 -; vsubpd (%rip), %xmm0, %xmm2 -; vaddpd %xmm2, %xmm4, %xmm0 +; vsubpd (%rip), %xmm0, %xmm0 +; vaddpd %xmm0, %xmm4, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -142,8 +142,8 @@ block0(v0: i64x2): ; vpor 0x3c(%rip), %xmm2, %xmm4 ; vpsrlq $0x20, %xmm0, %xmm6 ; vpor 0x3f(%rip), %xmm6, %xmm0 -; vsubpd 0x47(%rip), %xmm0, %xmm2 -; vaddpd %xmm2, %xmm4, %xmm0 +; vsubpd 0x47(%rip), %xmm0, %xmm0 +; vaddpd %xmm0, %xmm4, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -167,12 +167,12 @@ block0(v0: i64x2): ; block0: ; uninit %xmm2 ; vxorpd %xmm2, %xmm2, %xmm4 -; vmovq %xmm0, %r9 -; vcvtsi2sdq %r9, %xmm4, %xmm1 -; vpshufd $0xee, %xmm0, %xmm2 -; vmovq %xmm2, %rcx -; vcvtsi2sdq %rcx, %xmm4, %xmm6 -; vunpcklpd %xmm6, %xmm1, %xmm0 +; vmovq %xmm0, %rsi +; vcvtsi2sdq %rsi, %xmm4, %xmm1 +; vpshufd $0xee, %xmm0, %xmm0 +; vmovq %xmm0, %rax +; vcvtsi2sdq %rax, %xmm4, %xmm0 +; vunpcklpd %xmm0, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -183,12 +183,12 @@ block0(v0: i64x2): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; vxorpd %xmm2, %xmm2, %xmm4 -; vmovq %xmm0, %r9 -; vcvtsi2sdq %r9, %xmm4, %xmm1 -; vpshufd $0xee, %xmm0, %xmm2 -; vmovq %xmm2, %rcx -; vcvtsi2sdq %rcx, %xmm4, %xmm6 -; vunpcklpd %xmm6, %xmm1, %xmm0 +; vmovq %xmm0, %rsi +; vcvtsi2sdq %rsi, %xmm4, %xmm1 +; vpshufd $0xee, %xmm0, %xmm0 +; vmovq %xmm0, %rax +; vcvtsi2sdq %rax, %xmm4, %xmm0 +; vunpcklpd %xmm0, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/fcvt.clif b/cranelift/filetests/filetests/isa/x64/fcvt.clif index 4a5f40ebf5ef..a9ff6d48cc00 100644 --- a/cranelift/filetests/filetests/isa/x64/fcvt.clif +++ b/cranelift/filetests/filetests/isa/x64/fcvt.clif @@ -13,8 +13,8 @@ block0(v0: i8): ; block0: ; uninit %xmm0 ; xorps %xmm0, %xmm0 -; movsbl %dil, %r9d -; cvtsi2ssl %r9d, %xmm0 +; movsbl %dil, %esi +; cvtsi2ssl %esi, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -25,8 +25,8 @@ block0(v0: i8): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; xorps %xmm0, %xmm0 -; movsbl %dil, %r9d -; cvtsi2ssl %r9d, %xmm0 +; movsbl %dil, %esi +; cvtsi2ssl %esi, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -43,8 +43,8 @@ block0(v0: i16): ; block0: ; uninit %xmm0 ; xorps %xmm0, %xmm0 -; movswl %di, %r9d -; cvtsi2ssl %r9d, %xmm0 +; movswl %di, %esi +; cvtsi2ssl %esi, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -55,8 +55,8 @@ block0(v0: i16): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; xorps %xmm0, %xmm0 -; movswl %di, %r9d -; cvtsi2ssl %r9d, %xmm0 +; movswl %di, %esi +; cvtsi2ssl %esi, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -129,8 +129,8 @@ block0(v0: i8): ; block0: ; uninit %xmm0 ; xorpd %xmm0, %xmm0 -; movsbl %dil, %r9d -; cvtsi2sdl %r9d, %xmm0 +; movsbl %dil, %esi +; cvtsi2sdl %esi, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -141,8 +141,8 @@ block0(v0: i8): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; xorpd %xmm0, %xmm0 -; movsbl %dil, %r9d -; cvtsi2sdl %r9d, %xmm0 +; movsbl %dil, %esi +; cvtsi2sdl %esi, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -159,8 +159,8 @@ block0(v0: i16): ; block0: ; uninit %xmm0 ; xorpd %xmm0, %xmm0 -; movswl %di, %r9d -; cvtsi2sdl %r9d, %xmm0 +; movswl %di, %esi +; cvtsi2sdl %esi, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -171,8 +171,8 @@ block0(v0: i16): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; xorpd %xmm0, %xmm0 -; movswl %di, %r9d -; cvtsi2sdl %r9d, %xmm0 +; movswl %di, %esi +; cvtsi2sdl %esi, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -277,20 +277,20 @@ block0(v0: i8, v1: i16, v2: i32, v3: i64): ; block0: ; uninit %xmm0 ; xorps %xmm0, %xmm0 -; movzbq %dil, %r8 -; cvtsi2ssq %r8, %xmm0 -; uninit %xmm6 -; xorps %xmm6, %xmm6 -; movzwq %si, %r8 -; cvtsi2ssq %r8, %xmm6 -; uninit %xmm7 -; xorps %xmm7, %xmm7 -; movl %edx, %r8d -; cvtsi2ssq %r8, %xmm7 -; u64_to_f32_seq %rcx, %xmm4, %r8, %rdx -; addss %xmm6, %xmm0 -; addss %xmm7, %xmm0 -; addss %xmm4, %xmm0 +; movzbq %dil, %rax +; cvtsi2ssq %rax, %xmm0 +; uninit %xmm1 +; xorps %xmm1, %xmm1 +; movzwq %si, %rax +; cvtsi2ssq %rax, %xmm1 +; uninit %xmm2 +; xorps %xmm2, %xmm2 +; movl %edx, %eax +; cvtsi2ssq %rax, %xmm2 +; u64_to_f32_seq %rcx, %xmm3, %rdx, %rax +; addss %xmm1, %xmm0 +; addss %xmm2, %xmm0 +; addss %xmm3, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -301,28 +301,28 @@ block0(v0: i8, v1: i16, v2: i32, v3: i64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; xorps %xmm0, %xmm0 -; movzbq %dil, %r8 -; cvtsi2ssq %r8, %xmm0 -; xorps %xmm6, %xmm6 -; movzwq %si, %r8 -; cvtsi2ssq %r8, %xmm6 -; xorps %xmm7, %xmm7 -; movl %edx, %r8d -; cvtsi2ssq %r8, %xmm7 +; movzbq %dil, %rax +; cvtsi2ssq %rax, %xmm0 +; xorps %xmm1, %xmm1 +; movzwq %si, %rax +; cvtsi2ssq %rax, %xmm1 +; xorps %xmm2, %xmm2 +; movl %edx, %eax +; cvtsi2ssq %rax, %xmm2 ; cmpq $0, %rcx -; jl 0x3b -; cvtsi2ssq %rcx, %xmm4 -; jmp 0x55 -; movq %rcx, %r8 -; shrq $1, %r8 +; jl 0x3a +; cvtsi2ssq %rcx, %xmm3 +; jmp 0x54 ; movq %rcx, %rdx -; andq $1, %rdx -; orq %r8, %rdx -; cvtsi2ssq %rdx, %xmm4 -; addss %xmm4, %xmm4 -; addss %xmm6, %xmm0 -; addss %xmm7, %xmm0 -; addss %xmm4, %xmm0 +; shrq $1, %rdx +; movq %rcx, %rax +; andq $1, %rax +; orq %rdx, %rax +; cvtsi2ssq %rax, %xmm3 +; addss %xmm3, %xmm3 +; addss %xmm1, %xmm0 +; addss %xmm2, %xmm0 +; addss %xmm3, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -424,7 +424,7 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float32_to_uint32_seq %xmm0, %eax, %r8, %xmm3, %xmm4 +; cvt_float32_to_uint32_seq %xmm0, %eax, %rsi, %xmm3, %xmm4 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -434,20 +434,20 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0x4f000000, %r8d -; movd %r8d, %xmm3 +; movl $0x4f000000, %esi +; movd %esi, %xmm3 ; ucomiss %xmm3, %xmm0 -; jae 0x2d -; jp 0x4b +; jae 0x2b +; jp 0x49 ; cvttss2si %xmm0, %eax ; cmpl $0, %eax -; jge 0x46 +; jge 0x44 ; ud2 ; trap: int_ovf ; movaps %xmm0, %xmm4 ; subss %xmm3, %xmm4 ; cvttss2si %xmm4, %eax ; cmpl $0, %eax -; jl 0x4d +; jl 0x4b ; addl $0x80000000, %eax ; movq %rbp, %rsp ; popq %rbp @@ -465,7 +465,7 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float32_to_uint64_seq %xmm0, %rax, %r8, %xmm3, %xmm4 +; cvt_float32_to_uint64_seq %xmm0, %rax, %rsi, %xmm3, %xmm4 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -475,22 +475,22 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0x5f000000, %r8d -; movd %r8d, %xmm3 +; movl $0x5f000000, %esi +; movd %esi, %xmm3 ; ucomiss %xmm3, %xmm0 -; jae 0x2f -; jp 0x57 +; jae 0x2d +; jp 0x55 ; cvttss2si %xmm0, %rax ; cmpq $0, %rax -; jge 0x52 +; jge 0x50 ; ud2 ; trap: int_ovf ; movaps %xmm0, %xmm4 ; subss %xmm3, %xmm4 ; cvttss2si %xmm4, %rax ; cmpq $0, %rax -; jl 0x59 -; movabsq $9223372036854775808, %r8 -; addq %r8, %rax +; jl 0x57 +; movabsq $9223372036854775808, %rsi +; addq %rsi, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -507,7 +507,7 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float64_to_uint32_seq %xmm0, %eax, %r8, %xmm3, %xmm4 +; cvt_float64_to_uint32_seq %xmm0, %eax, %rsi, %xmm3, %xmm4 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -517,8 +517,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0x41e0000000000000, %r8 -; movq %r8, %xmm3 +; movabsq $0x41e0000000000000, %rsi +; movq %rsi, %xmm3 ; ucomisd %xmm3, %xmm0 ; jae 0x32 ; jp 0x50 @@ -548,7 +548,7 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float64_to_uint64_seq %xmm0, %rax, %r8, %xmm3, %xmm4 +; cvt_float64_to_uint64_seq %xmm0, %rax, %rsi, %xmm3, %xmm4 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -558,8 +558,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0x43e0000000000000, %r8 -; movq %r8, %xmm3 +; movabsq $0x43e0000000000000, %rsi +; movq %rsi, %xmm3 ; ucomisd %xmm3, %xmm0 ; jae 0x34 ; jp 0x5c @@ -572,8 +572,8 @@ block0(v0: f64): ; cvttsd2si %xmm4, %rax ; cmpq $0, %rax ; jl 0x5e -; movabsq $9223372036854775808, %r8 -; addq %r8, %rax +; movabsq $9223372036854775808, %rsi +; addq %rsi, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -590,7 +590,7 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float32_to_uint32_sat_seq %xmm0, %eax, %r8, %xmm3, %xmm4 +; cvt_float32_to_uint32_sat_seq %xmm0, %eax, %rsi, %xmm3, %xmm4 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -600,25 +600,25 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0x4f000000, %r8d -; movd %r8d, %xmm3 +; movl $0x4f000000, %esi +; movd %esi, %xmm3 ; ucomiss %xmm3, %xmm0 -; jae 0x39 -; jnp 0x25 +; jae 0x37 +; jnp 0x23 ; xorl %eax, %eax -; jmp 0x5c +; jmp 0x5a ; cvttss2si %xmm0, %eax ; cmpl $0, %eax -; jge 0x5c +; jge 0x5a ; xorl %eax, %eax -; jmp 0x5c +; jmp 0x5a ; movaps %xmm0, %xmm4 ; subss %xmm3, %xmm4 ; cvttss2si %xmm4, %eax ; cmpl $0, %eax -; jge 0x57 +; jge 0x55 ; movl $0xffffffff, %eax -; jmp 0x5c +; jmp 0x5a ; addl $0x80000000, %eax ; movq %rbp, %rsp ; popq %rbp @@ -634,7 +634,7 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float32_to_uint64_sat_seq %xmm0, %rax, %r8, %xmm3, %xmm4 +; cvt_float32_to_uint64_sat_seq %xmm0, %rax, %rsi, %xmm3, %xmm4 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -644,27 +644,27 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0x5f000000, %r8d -; movd %r8d, %xmm3 +; movl $0x5f000000, %esi +; movd %esi, %xmm3 ; ucomiss %xmm3, %xmm0 -; jae 0x3d -; jnp 0x26 +; jae 0x3b +; jnp 0x24 ; xorq %rax, %rax -; jmp 0x6c +; jmp 0x6a ; cvttss2si %xmm0, %rax ; cmpq $0, %rax -; jge 0x6c +; jge 0x6a ; xorq %rax, %rax -; jmp 0x6c +; jmp 0x6a ; movaps %xmm0, %xmm4 ; subss %xmm3, %xmm4 ; cvttss2si %xmm4, %rax ; cmpq $0, %rax -; jge 0x5f +; jge 0x5d ; movq $18446744073709551615, %rax -; jmp 0x6c -; movabsq $9223372036854775808, %r8 -; addq %r8, %rax +; jmp 0x6a +; movabsq $9223372036854775808, %rsi +; addq %rsi, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -679,7 +679,7 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float64_to_uint32_sat_seq %xmm0, %eax, %r8, %xmm3, %xmm4 +; cvt_float64_to_uint32_sat_seq %xmm0, %eax, %rsi, %xmm3, %xmm4 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -689,8 +689,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0x41e0000000000000, %r8 -; movq %r8, %xmm3 +; movabsq $0x41e0000000000000, %rsi +; movq %rsi, %xmm3 ; ucomisd %xmm3, %xmm0 ; jae 0x3e ; jnp 0x2a @@ -723,7 +723,7 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float64_to_uint64_sat_seq %xmm0, %rax, %r8, %xmm3, %xmm4 +; cvt_float64_to_uint64_sat_seq %xmm0, %rax, %rsi, %xmm3, %xmm4 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -733,8 +733,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0x43e0000000000000, %r8 -; movq %r8, %xmm3 +; movabsq $0x43e0000000000000, %rsi +; movq %rsi, %xmm3 ; ucomisd %xmm3, %xmm0 ; jae 0x42 ; jnp 0x2b @@ -752,8 +752,8 @@ block0(v0: f64): ; jge 0x64 ; movq $18446744073709551615, %rax ; jmp 0x71 -; movabsq $9223372036854775808, %r8 -; addq %r8, %rax +; movabsq $9223372036854775808, %rsi +; addq %rsi, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -768,7 +768,7 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float32_to_sint32_seq %xmm0, %eax, %rdx, %xmm3 +; cvt_float32_to_sint32_seq %xmm0, %eax, %rsi, %xmm3 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -783,8 +783,8 @@ block0(v0: f32): ; jno 0x39 ; ucomiss %xmm0, %xmm0 ; jp 0x3e -; movl $0xcf000000, %edx -; movd %edx, %xmm3 +; movl $0xcf000000, %esi +; movd %esi, %xmm3 ; ucomiss %xmm3, %xmm0 ; jb 0x40 ; xorpd %xmm3, %xmm3 @@ -807,7 +807,7 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float32_to_sint64_seq %xmm0, %rax, %rdx, %xmm3 +; cvt_float32_to_sint64_seq %xmm0, %rax, %rsi, %xmm3 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -822,8 +822,8 @@ block0(v0: f32): ; jno 0x3b ; ucomiss %xmm0, %xmm0 ; jp 0x40 -; movl $0xdf000000, %edx -; movd %edx, %xmm3 +; movl $0xdf000000, %esi +; movd %esi, %xmm3 ; ucomiss %xmm3, %xmm0 ; jb 0x42 ; xorpd %xmm3, %xmm3 @@ -846,7 +846,7 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float64_to_sint32_seq %xmm0, %eax, %rdx, %xmm3 +; cvt_float64_to_sint32_seq %xmm0, %eax, %rsi, %xmm3 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -861,8 +861,8 @@ block0(v0: f64): ; jno 0x42 ; ucomisd %xmm0, %xmm0 ; jp 0x47 -; movabsq $13970166044105375744, %rdx -; movq %rdx, %xmm3 +; movabsq $13970166044105375744, %rsi +; movq %rsi, %xmm3 ; ucomisd %xmm3, %xmm0 ; jbe 0x49 ; xorpd %xmm3, %xmm3 @@ -885,7 +885,7 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float64_to_sint64_seq %xmm0, %rax, %rdx, %xmm3 +; cvt_float64_to_sint64_seq %xmm0, %rax, %rsi, %xmm3 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -900,8 +900,8 @@ block0(v0: f64): ; jno 0x44 ; ucomisd %xmm0, %xmm0 ; jp 0x49 -; movabsq $14114281232179134464, %rdx -; movq %rdx, %xmm3 +; movabsq $14114281232179134464, %rsi +; movq %rsi, %xmm3 ; ucomisd %xmm3, %xmm0 ; jb 0x4b ; xorpd %xmm3, %xmm3 @@ -924,7 +924,7 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float32_to_sint32_sat_seq %xmm0, %eax, %rdx, %xmm3 +; cvt_float32_to_sint32_sat_seq %xmm0, %eax, %rsi, %xmm3 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -959,7 +959,7 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float32_to_sint64_sat_seq %xmm0, %rax, %rdx, %xmm3 +; cvt_float32_to_sint64_sat_seq %xmm0, %rax, %rsi, %xmm3 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -994,7 +994,7 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float64_to_sint32_sat_seq %xmm0, %eax, %rdx, %xmm3 +; cvt_float64_to_sint32_sat_seq %xmm0, %eax, %rsi, %xmm3 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1029,7 +1029,7 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; cvt_float64_to_sint64_sat_seq %xmm0, %rax, %rdx, %xmm3 +; cvt_float64_to_sint64_sat_seq %xmm0, %rax, %rsi, %xmm3 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1069,16 +1069,16 @@ block0(v0: f32x4): ; maxps %xmm6, %xmm0 ; pcmpeqd %xmm6, %xmm6 ; psrld $0x1, %xmm6 -; cvtdq2ps %xmm6, %xmm7 -; cvttps2dq %xmm0, %xmm6 -; subps %xmm7, %xmm0 -; cmpleps %xmm0, %xmm7 +; cvtdq2ps %xmm6, %xmm2 +; cvttps2dq %xmm0, %xmm1 +; subps %xmm2, %xmm0 +; cmpleps %xmm0, %xmm2 ; cvttps2dq %xmm0, %xmm0 -; pxor %xmm7, %xmm0 -; uninit %xmm1 -; pxor %xmm1, %xmm1 -; pmaxsd %xmm1, %xmm0 -; paddd %xmm6, %xmm0 +; pxor %xmm2, %xmm0 +; uninit %xmm2 +; pxor %xmm2, %xmm2 +; pmaxsd %xmm2, %xmm0 +; paddd %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1092,15 +1092,15 @@ block0(v0: f32x4): ; maxps %xmm6, %xmm0 ; pcmpeqd %xmm6, %xmm6 ; psrld $1, %xmm6 -; cvtdq2ps %xmm6, %xmm7 -; cvttps2dq %xmm0, %xmm6 -; subps %xmm7, %xmm0 -; cmpleps %xmm0, %xmm7 +; cvtdq2ps %xmm6, %xmm2 +; cvttps2dq %xmm0, %xmm1 +; subps %xmm2, %xmm0 +; cmpleps %xmm0, %xmm2 ; cvttps2dq %xmm0, %xmm0 -; pxor %xmm7, %xmm0 -; pxor %xmm1, %xmm1 -; pmaxsd %xmm1, %xmm0 -; paddd %xmm6, %xmm0 +; pxor %xmm2, %xmm0 +; pxor %xmm2, %xmm2 +; pmaxsd %xmm2, %xmm0 +; paddd %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1206,16 +1206,18 @@ block0(v0: i64x2): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; uninit %xmm1 -; xorpd %xmm1, %xmm1 -; movdqa %xmm0, %xmm6 -; movq %xmm6, %r9 -; movdqa %xmm1, %xmm0 -; cvtsi2sdq %r9, %xmm0 -; pshufd $0xee, %xmm6, %xmm2 -; movq %xmm2, %rcx -; cvtsi2sdq %rcx, %xmm1 -; unpcklpd %xmm1, %xmm0 +; uninit %xmm2 +; xorpd %xmm2, %xmm2 +; movdqa %xmm0, %xmm1 +; movq %xmm1, %rsi +; movdqa %xmm2, %xmm0 +; cvtsi2sdq %rsi, %xmm0 +; movdqa %xmm0, %xmm3 +; pshufd $0xee, %xmm1, %xmm0 +; movq %xmm0, %rax +; cvtsi2sdq %rax, %xmm2 +; movdqa %xmm3, %xmm0 +; unpcklpd %xmm2, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1225,15 +1227,17 @@ block0(v0: i64x2): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; xorpd %xmm1, %xmm1 -; movdqa %xmm0, %xmm6 -; movq %xmm6, %r9 -; movdqa %xmm1, %xmm0 -; cvtsi2sdq %r9, %xmm0 -; pshufd $0xee, %xmm6, %xmm2 -; movq %xmm2, %rcx -; cvtsi2sdq %rcx, %xmm1 -; unpcklpd %xmm1, %xmm0 +; xorpd %xmm2, %xmm2 +; movdqa %xmm0, %xmm1 +; movq %xmm1, %rsi +; movdqa %xmm2, %xmm0 +; cvtsi2sdq %rsi, %xmm0 +; movdqa %xmm0, %xmm3 +; pshufd $0xee, %xmm1, %xmm0 +; movq %xmm0, %rax +; cvtsi2sdq %rax, %xmm2 +; movdqa %xmm3, %xmm0 +; unpcklpd %xmm2, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/float-avx.clif b/cranelift/filetests/filetests/isa/x64/float-avx.clif index ca9b91d7b5ab..37bbc6428afd 100644 --- a/cranelift/filetests/filetests/isa/x64/float-avx.clif +++ b/cranelift/filetests/filetests/isa/x64/float-avx.clif @@ -445,10 +445,10 @@ block0(v0: i32x4): ; vpsrld $0x10, %xmm2, %xmm4 ; vpsubd %xmm4, %xmm0, %xmm6 ; vcvtdq2ps %xmm4, %xmm0 -; vpsrld $0x1, %xmm6, %xmm2 -; vcvtdq2ps %xmm2, %xmm4 -; vaddps %xmm4, %xmm4, %xmm6 -; vaddps %xmm0, %xmm6, %xmm0 +; vpsrld $0x1, %xmm6, %xmm1 +; vcvtdq2ps %xmm1, %xmm1 +; vaddps %xmm1, %xmm1, %xmm1 +; vaddps %xmm0, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -462,10 +462,10 @@ block0(v0: i32x4): ; vpsrld $0x10, %xmm2, %xmm4 ; vpsubd %xmm4, %xmm0, %xmm6 ; vcvtdq2ps %xmm4, %xmm0 -; vpsrld $1, %xmm6, %xmm2 -; vcvtdq2ps %xmm2, %xmm4 -; vaddps %xmm4, %xmm4, %xmm6 -; vaddps %xmm0, %xmm6, %xmm0 +; vpsrld $1, %xmm6, %xmm1 +; vcvtdq2ps %xmm1, %xmm1 +; vaddps %xmm1, %xmm1, %xmm1 +; vaddps %xmm0, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -534,9 +534,9 @@ block0(v0: f32x4): ; vandps %xmm2, %xmm0, %xmm4 ; vpxor %xmm4, %xmm2, %xmm6 ; vcvttps2dq %xmm4, %xmm0 -; vpand %xmm6, %xmm0, %xmm2 -; vpsrad $0x1f, %xmm2, %xmm4 -; vpxor %xmm0, %xmm4, %xmm0 +; vpand %xmm6, %xmm0, %xmm1 +; vpsrad $0x1f, %xmm1, %xmm1 +; vpxor %xmm0, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -550,9 +550,9 @@ block0(v0: f32x4): ; vandps %xmm2, %xmm0, %xmm4 ; vpxor %xmm4, %xmm2, %xmm6 ; vcvttps2dq %xmm4, %xmm0 -; vpand %xmm6, %xmm0, %xmm2 -; vpsrad $0x1f, %xmm2, %xmm4 -; vpxor %xmm0, %xmm4, %xmm0 +; vpand %xmm6, %xmm0, %xmm1 +; vpsrad $0x1f, %xmm1, %xmm1 +; vpxor %xmm0, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/floating-point.clif b/cranelift/filetests/filetests/isa/x64/floating-point.clif index 57f0285300e3..d067d1b2ff4f 100644 --- a/cranelift/filetests/filetests/isa/x64/floating-point.clif +++ b/cranelift/filetests/filetests/isa/x64/floating-point.clif @@ -11,8 +11,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movabsq $0x7fffffffffffffff, %rax -; movq %rax, %xmm4 +; movabsq $0x7fffffffffffffff, %rdx +; movq %rdx, %xmm4 ; andpd %xmm4, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -23,8 +23,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0x7fffffffffffffff, %rax -; movq %rax, %xmm4 +; movabsq $0x7fffffffffffffff, %rdx +; movq %rdx, %xmm4 ; andpd %xmm4, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -42,8 +42,8 @@ block0(v0: i64): ; movq %rsp, %rbp ; block0: ; movsd (%rdi), %xmm0 -; movabsq $0x7fffffffffffffff, %rcx -; movq %rcx, %xmm5 +; movabsq $0x7fffffffffffffff, %rsi +; movq %rsi, %xmm5 ; andpd %xmm5, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -55,8 +55,8 @@ block0(v0: i64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movsd (%rdi), %xmm0 ; trap: heap_oob -; movabsq $0x7fffffffffffffff, %rcx -; movq %rcx, %xmm5 +; movabsq $0x7fffffffffffffff, %rsi +; movq %rsi, %xmm5 ; andpd %xmm5, %xmm0 ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/floor-libcall.clif b/cranelift/filetests/filetests/isa/x64/floor-libcall.clif index 370f8de32195..e581c72b318e 100644 --- a/cranelift/filetests/filetests/isa/x64/floor-libcall.clif +++ b/cranelift/filetests/filetests/isa/x64/floor-libcall.clif @@ -11,8 +11,8 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; load_ext_name %FloorF32+0, %rcx -; call *%rcx +; load_ext_name %FloorF32+0, %rsi +; call *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -22,8 +22,8 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0, %rcx ; reloc_external Abs8 %FloorF32 0 -; callq *%rcx +; movabsq $0, %rsi ; reloc_external Abs8 %FloorF32 0 +; callq *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -38,8 +38,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; load_ext_name %FloorF64+0, %rcx -; call *%rcx +; load_ext_name %FloorF64+0, %rsi +; call *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -49,8 +49,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0, %rcx ; reloc_external Abs8 %FloorF64 0 -; callq *%rcx +; movabsq $0, %rsi ; reloc_external Abs8 %FloorF64 0 +; callq *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/fma-call.clif b/cranelift/filetests/filetests/isa/x64/fma-call.clif index b8d2263895d5..ccfb9d70db6c 100644 --- a/cranelift/filetests/filetests/isa/x64/fma-call.clif +++ b/cranelift/filetests/filetests/isa/x64/fma-call.clif @@ -11,8 +11,8 @@ block0(v0: f32, v1: f32, v2: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; load_ext_name %FmaF32+0, %r8 -; call *%r8 +; load_ext_name %FmaF32+0, %rsi +; call *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -22,8 +22,8 @@ block0(v0: f32, v1: f32, v2: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0, %r8 ; reloc_external Abs8 %FmaF32 0 -; callq *%r8 +; movabsq $0, %rsi ; reloc_external Abs8 %FmaF32 0 +; callq *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -38,8 +38,8 @@ block0(v0: f64, v1: f64, v2: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; load_ext_name %FmaF64+0, %r8 -; call *%r8 +; load_ext_name %FmaF64+0, %rsi +; call *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -49,8 +49,8 @@ block0(v0: f64, v1: f64, v2: f64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0, %r8 ; reloc_external Abs8 %FmaF64 0 -; callq *%r8 +; movabsq $0, %rsi ; reloc_external Abs8 %FmaF64 0 +; callq *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -69,43 +69,43 @@ block0(v0: f32x4, v1: f32x4, v2: f32x4): ; movdqu %xmm0, +(%rsp) ; movdqu %xmm1, +0x10(%rsp) ; movdqu %xmm2, +0x20(%rsp) -; load_ext_name %FmaF32+0, %r8 -; call *%r8 -; movdqu +(%rsp), %xmm4 +; load_ext_name %FmaF32+0, %rsi +; call *%rsi +; movdqu +(%rsp), %xmm1 ; movdqu %xmm0, +0x50(%rsp) -; pshufd $0x1, %xmm4, %xmm0 +; pshufd $0x1, %xmm1, %xmm0 ; movdqu +0x10(%rsp), %xmm1 ; pshufd $0x1, %xmm1, %xmm1 ; movdqu +0x20(%rsp), %xmm2 ; pshufd $0x1, %xmm2, %xmm2 -; load_ext_name %FmaF32+0, %r9 -; call *%r9 -; movdqu +(%rsp), %xmm4 +; load_ext_name %FmaF32+0, %rax +; call *%rax +; movdqu +(%rsp), %xmm1 ; movdqu %xmm0, +0x30(%rsp) -; pshufd $0x2, %xmm4, %xmm0 +; pshufd $0x2, %xmm1, %xmm0 ; movdqu +0x10(%rsp), %xmm1 ; pshufd $0x2, %xmm1, %xmm1 ; movdqu +0x20(%rsp), %xmm2 ; pshufd $0x2, %xmm2, %xmm2 -; load_ext_name %FmaF32+0, %r10 -; call *%r10 -; movdqu +(%rsp), %xmm4 +; load_ext_name %FmaF32+0, %rax +; call *%rax +; movdqu +(%rsp), %xmm1 ; movdqu %xmm0, +0x40(%rsp) -; pshufd $0x3, %xmm4, %xmm0 +; pshufd $0x3, %xmm1, %xmm0 ; movdqu +0x10(%rsp), %xmm1 ; pshufd $0x3, %xmm1, %xmm1 ; movdqu +0x20(%rsp), %xmm2 ; pshufd $0x3, %xmm2, %xmm2 -; load_ext_name %FmaF32+0, %r11 -; call *%r11 +; load_ext_name %FmaF32+0, %rax +; call *%rax ; movdqa %xmm0, %xmm2 ; movdqu +0x30(%rsp), %xmm1 ; movdqu +0x50(%rsp), %xmm0 ; insertps $0x10, %xmm1, %xmm0 -; movdqu +0x40(%rsp), %xmm6 -; insertps $0x20, %xmm6, %xmm0 -; movdqa %xmm2, %xmm3 -; insertps $0x30, %xmm3, %xmm0 +; movdqu +0x40(%rsp), %xmm1 +; insertps $0x20, %xmm1, %xmm0 +; movdqa %xmm2, %xmm4 +; insertps $0x30, %xmm4, %xmm0 ; addq $0x60, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -120,43 +120,43 @@ block0(v0: f32x4, v1: f32x4, v2: f32x4): ; movdqu %xmm0, (%rsp) ; movdqu %xmm1, 0x10(%rsp) ; movdqu %xmm2, 0x20(%rsp) -; movabsq $0, %r8 ; reloc_external Abs8 %FmaF32 0 -; callq *%r8 -; movdqu (%rsp), %xmm4 +; movabsq $0, %rsi ; reloc_external Abs8 %FmaF32 0 +; callq *%rsi +; movdqu (%rsp), %xmm1 ; movdqu %xmm0, 0x50(%rsp) -; pshufd $1, %xmm4, %xmm0 +; pshufd $1, %xmm1, %xmm0 ; movdqu 0x10(%rsp), %xmm1 ; pshufd $1, %xmm1, %xmm1 ; movdqu 0x20(%rsp), %xmm2 ; pshufd $1, %xmm2, %xmm2 -; movabsq $0, %r9 ; reloc_external Abs8 %FmaF32 0 -; callq *%r9 -; movdqu (%rsp), %xmm4 +; movabsq $0, %rax ; reloc_external Abs8 %FmaF32 0 +; callq *%rax +; movdqu (%rsp), %xmm1 ; movdqu %xmm0, 0x30(%rsp) -; pshufd $2, %xmm4, %xmm0 +; pshufd $2, %xmm1, %xmm0 ; movdqu 0x10(%rsp), %xmm1 ; pshufd $2, %xmm1, %xmm1 ; movdqu 0x20(%rsp), %xmm2 ; pshufd $2, %xmm2, %xmm2 -; movabsq $0, %r10 ; reloc_external Abs8 %FmaF32 0 -; callq *%r10 -; movdqu (%rsp), %xmm4 +; movabsq $0, %rax ; reloc_external Abs8 %FmaF32 0 +; callq *%rax +; movdqu (%rsp), %xmm1 ; movdqu %xmm0, 0x40(%rsp) -; pshufd $3, %xmm4, %xmm0 +; pshufd $3, %xmm1, %xmm0 ; movdqu 0x10(%rsp), %xmm1 ; pshufd $3, %xmm1, %xmm1 ; movdqu 0x20(%rsp), %xmm2 ; pshufd $3, %xmm2, %xmm2 -; movabsq $0, %r11 ; reloc_external Abs8 %FmaF32 0 -; callq *%r11 +; movabsq $0, %rax ; reloc_external Abs8 %FmaF32 0 +; callq *%rax ; movdqa %xmm0, %xmm2 ; movdqu 0x30(%rsp), %xmm1 ; movdqu 0x50(%rsp), %xmm0 ; insertps $0x10, %xmm1, %xmm0 -; movdqu 0x40(%rsp), %xmm6 -; insertps $0x20, %xmm6, %xmm0 -; movdqa %xmm2, %xmm3 -; insertps $0x30, %xmm3, %xmm0 +; movdqu 0x40(%rsp), %xmm1 +; insertps $0x20, %xmm1, %xmm0 +; movdqa %xmm2, %xmm4 +; insertps $0x30, %xmm4, %xmm0 ; addq $0x60, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -176,8 +176,8 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2): ; movdqu %xmm0, +(%rsp) ; movdqu %xmm1, +0x10(%rsp) ; movdqu %xmm2, +0x20(%rsp) -; load_ext_name %FmaF64+0, %r8 -; call *%r8 +; load_ext_name %FmaF64+0, %rsi +; call *%rsi ; movdqu %xmm0, +0x30(%rsp) ; movdqu +(%rsp), %xmm0 ; pshufd $0xee, %xmm0, %xmm0 @@ -185,11 +185,11 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2): ; pshufd $0xee, %xmm1, %xmm1 ; movdqu +0x20(%rsp), %xmm2 ; pshufd $0xee, %xmm2, %xmm2 -; load_ext_name %FmaF64+0, %r9 -; call *%r9 -; movdqa %xmm0, %xmm6 +; load_ext_name %FmaF64+0, %rax +; call *%rax +; movdqa %xmm0, %xmm1 ; movdqu +0x30(%rsp), %xmm0 -; movlhps %xmm6, %xmm0 +; movlhps %xmm1, %xmm0 ; addq $0x40, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -204,8 +204,8 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2): ; movdqu %xmm0, (%rsp) ; movdqu %xmm1, 0x10(%rsp) ; movdqu %xmm2, 0x20(%rsp) -; movabsq $0, %r8 ; reloc_external Abs8 %FmaF64 0 -; callq *%r8 +; movabsq $0, %rsi ; reloc_external Abs8 %FmaF64 0 +; callq *%rsi ; movdqu %xmm0, 0x30(%rsp) ; movdqu (%rsp), %xmm0 ; pshufd $0xee, %xmm0, %xmm0 @@ -213,11 +213,11 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2): ; pshufd $0xee, %xmm1, %xmm1 ; movdqu 0x20(%rsp), %xmm2 ; pshufd $0xee, %xmm2, %xmm2 -; movabsq $0, %r9 ; reloc_external Abs8 %FmaF64 0 -; callq *%r9 -; movdqa %xmm0, %xmm6 +; movabsq $0, %rax ; reloc_external Abs8 %FmaF64 0 +; callq *%rax +; movdqa %xmm0, %xmm1 ; movdqu 0x30(%rsp), %xmm0 -; movlhps %xmm6, %xmm0 +; movlhps %xmm1, %xmm0 ; addq $0x40, %rsp ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/fma-inst.clif b/cranelift/filetests/filetests/isa/x64/fma-inst.clif index 0549a9e7e4dd..97681e12c764 100644 --- a/cranelift/filetests/filetests/isa/x64/fma-inst.clif +++ b/cranelift/filetests/filetests/isa/x64/fma-inst.clif @@ -434,18 +434,18 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; uninit %xmm5 -; vpcmpeqd %xmm5, %xmm5, %xmm7 -; vpsllq $0x3f, %xmm7, %xmm3 -; vxorpd %xmm3, %xmm0, %xmm3 -; uninit %xmm5 -; vpcmpeqd %xmm5, %xmm5, %xmm7 -; vpsllq $0x3f, %xmm7, %xmm4 -; vxorpd %xmm4, %xmm3, %xmm3 -; uninit %xmm5 -; vpcmpeqd %xmm5, %xmm5, %xmm7 -; vpsllq $0x3f, %xmm7, %xmm4 -; vxorpd %xmm4, %xmm3, %xmm0 +; uninit %xmm3 +; vpcmpeqd %xmm3, %xmm3, %xmm3 +; vpsllq $0x3f, %xmm3, %xmm3 +; vxorpd %xmm3, %xmm0, %xmm0 +; uninit %xmm3 +; vpcmpeqd %xmm3, %xmm3, %xmm3 +; vpsllq $0x3f, %xmm3, %xmm3 +; vxorpd %xmm3, %xmm0, %xmm0 +; uninit %xmm3 +; vpcmpeqd %xmm3, %xmm3, %xmm3 +; vpsllq $0x3f, %xmm3, %xmm3 +; vxorpd %xmm3, %xmm0, %xmm0 ; vfnmadd213pd %xmm2, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -456,15 +456,15 @@ block0(v0: f64x2, v1: f64x2, v2: f64x2): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; vpcmpeqd %xmm5, %xmm5, %xmm7 -; vpsllq $0x3f, %xmm7, %xmm3 -; vxorpd %xmm3, %xmm0, %xmm3 -; vpcmpeqd %xmm5, %xmm5, %xmm7 -; vpsllq $0x3f, %xmm7, %xmm4 -; vxorpd %xmm4, %xmm3, %xmm3 -; vpcmpeqd %xmm5, %xmm5, %xmm7 -; vpsllq $0x3f, %xmm7, %xmm4 -; vxorpd %xmm4, %xmm3, %xmm0 +; vpcmpeqd %xmm3, %xmm3, %xmm3 +; vpsllq $0x3f, %xmm3, %xmm3 +; vxorpd %xmm3, %xmm0, %xmm0 +; vpcmpeqd %xmm3, %xmm3, %xmm3 +; vpsllq $0x3f, %xmm3, %xmm3 +; vxorpd %xmm3, %xmm0, %xmm0 +; vpcmpeqd %xmm3, %xmm3, %xmm3 +; vpsllq $0x3f, %xmm3, %xmm3 +; vxorpd %xmm3, %xmm0, %xmm0 ; vfnmadd213pd %xmm2, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/fmsub-inst.clif b/cranelift/filetests/filetests/isa/x64/fmsub-inst.clif index 98975b85a089..7e4c73b91b2c 100644 --- a/cranelift/filetests/filetests/isa/x64/fmsub-inst.clif +++ b/cranelift/filetests/filetests/isa/x64/fmsub-inst.clif @@ -457,9 +457,9 @@ block0(v0: f64, v1: f64, v2: f64): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block0: -; leaq +(%rsp), %r8 +; leaq +(%rsp), %rsi ; vmovsd %xmm2, +(%rsp) -; vfnmsub213sd (%r8), %xmm1, %xmm0 +; vfnmsub213sd (%rsi), %xmm1, %xmm0 ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -471,9 +471,9 @@ block0(v0: f64, v1: f64, v2: f64): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block1: ; offset 0x8 -; leaq (%rsp), %r8 +; leaq (%rsp), %rsi ; vmovsd %xmm2, (%rsp) -; vfnmsub213sd (%r8), %xmm1, %xmm0 ; trap: heap_oob +; vfnmsub213sd (%rsi), %xmm1, %xmm0 ; trap: heap_oob ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -495,9 +495,9 @@ block0(v0: f64, v1: f64, v2: f64): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block0: -; leaq +(%rsp), %r8 +; leaq +(%rsp), %rsi ; vmovsd %xmm2, +(%rsp) -; vfmsub213sd (%r8), %xmm1, %xmm0 +; vfmsub213sd (%rsi), %xmm1, %xmm0 ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -509,9 +509,9 @@ block0(v0: f64, v1: f64, v2: f64): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block1: ; offset 0x8 -; leaq (%rsp), %r8 +; leaq (%rsp), %rsi ; vmovsd %xmm2, (%rsp) -; vfmsub213sd (%r8), %xmm1, %xmm0 ; trap: heap_oob +; vfmsub213sd (%rsi), %xmm1, %xmm0 ; trap: heap_oob ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -533,9 +533,9 @@ block0(v0: f32, v1: f32, v2: f32): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block0: -; leaq +(%rsp), %r8 +; leaq +(%rsp), %rsi ; vmovss %xmm1, +(%rsp) -; vfmsub132ss (%r8), %xmm2, %xmm0 +; vfmsub132ss (%rsi), %xmm2, %xmm0 ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -547,9 +547,9 @@ block0(v0: f32, v1: f32, v2: f32): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block1: ; offset 0x8 -; leaq (%rsp), %r8 +; leaq (%rsp), %rsi ; vmovss %xmm1, (%rsp) -; vfmsub132ss (%r8), %xmm2, %xmm0 ; trap: heap_oob +; vfmsub132ss (%rsi), %xmm2, %xmm0 ; trap: heap_oob ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -573,9 +573,9 @@ block0(v0: f32, v1: f32, v2: f32): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block0: -; leaq +(%rsp), %r8 +; leaq +(%rsp), %rsi ; vmovss %xmm1, +(%rsp) -; vfnmsub132ss (%r8), %xmm2, %xmm0 +; vfnmsub132ss (%rsi), %xmm2, %xmm0 ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -587,9 +587,9 @@ block0(v0: f32, v1: f32, v2: f32): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block1: ; offset 0x8 -; leaq (%rsp), %r8 +; leaq (%rsp), %rsi ; vmovss %xmm1, (%rsp) -; vfnmsub132ss (%r8), %xmm2, %xmm0 ; trap: heap_oob +; vfnmsub132ss (%rsi), %xmm2, %xmm0 ; trap: heap_oob ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -616,12 +616,12 @@ block0(v0: f32, v1: f32, v2: f32): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block0: -; leaq +(%rsp), %r11 -; movl $0x80000000, %r9d -; vmovd %r9d, %xmm3 -; vxorps %xmm3, %xmm1, %xmm3 -; vmovss %xmm3, +(%rsp) -; vfmsub132ss (%r11), %xmm2, %xmm0 +; leaq +(%rsp), %r8 +; movl $0x80000000, %esi +; vmovd %esi, %xmm3 +; vxorps %xmm3, %xmm1, %xmm1 +; vmovss %xmm1, +(%rsp) +; vfmsub132ss (%r8), %xmm2, %xmm0 ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -633,12 +633,12 @@ block0(v0: f32, v1: f32, v2: f32): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block1: ; offset 0x8 -; leaq (%rsp), %r11 -; movl $0x80000000, %r9d -; vmovd %r9d, %xmm3 -; vxorps %xmm3, %xmm1, %xmm3 -; vmovss %xmm3, (%rsp) -; vfmsub132ss (%r11), %xmm2, %xmm0 ; trap: heap_oob +; leaq (%rsp), %r8 +; movl $0x80000000, %esi +; vmovd %esi, %xmm3 +; vxorps %xmm3, %xmm1, %xmm1 +; vmovss %xmm1, (%rsp) +; vfmsub132ss (%r8), %xmm2, %xmm0 ; trap: heap_oob ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -660,10 +660,10 @@ block0(v0: f32x4, v1: f32x4, v2: f32x4): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block0: -; leaq +(%rsp), %r8 +; leaq +(%rsp), %rsi ; vmovups %xmm0, +(%rsp) ; movdqa %xmm1, %xmm0 -; vfmsub132ps (%r8), %xmm2, %xmm0 +; vfmsub132ps (%rsi), %xmm2, %xmm0 ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -675,10 +675,10 @@ block0(v0: f32x4, v1: f32x4, v2: f32x4): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block1: ; offset 0x8 -; leaq (%rsp), %r8 +; leaq (%rsp), %rsi ; vmovups %xmm0, (%rsp) ; movdqa %xmm1, %xmm0 -; vfmsub132ps (%r8), %xmm2, %xmm0 ; trap: heap_oob +; vfmsub132ps (%rsi), %xmm2, %xmm0 ; trap: heap_oob ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/fneg.clif b/cranelift/filetests/filetests/isa/x64/fneg.clif index 00e3c9055159..501a853b9394 100644 --- a/cranelift/filetests/filetests/isa/x64/fneg.clif +++ b/cranelift/filetests/filetests/isa/x64/fneg.clif @@ -11,8 +11,8 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl $0x80000000, %eax -; movd %eax, %xmm4 +; movl $0x80000000, %edx +; movd %edx, %xmm4 ; xorps %xmm4, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -23,8 +23,8 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0x80000000, %eax -; movd %eax, %xmm4 +; movl $0x80000000, %edx +; movd %edx, %xmm4 ; xorps %xmm4, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -40,8 +40,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movabsq $0x8000000000000000, %rax -; movq %rax, %xmm4 +; movabsq $0x8000000000000000, %rdx +; movq %rdx, %xmm4 ; xorpd %xmm4, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -52,8 +52,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $9223372036854775808, %rax -; movq %rax, %xmm4 +; movabsq $9223372036854775808, %rdx +; movq %rdx, %xmm4 ; xorpd %xmm4, %xmm0 ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/fp_sp_pc.clif b/cranelift/filetests/filetests/isa/x64/fp_sp_pc.clif index 9cbe6b2e54bb..4e5f25f0ee7f 100644 --- a/cranelift/filetests/filetests/isa/x64/fp_sp_pc.clif +++ b/cranelift/filetests/filetests/isa/x64/fp_sp_pc.clif @@ -62,8 +62,8 @@ block0: ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rbp, %rsi -; movq 8(%rsi), %rax +; movq %rbp, %rax +; movq 8(%rax), %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -73,8 +73,8 @@ block0: ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rbp, %rsi -; movq 8(%rsi), %rax +; movq %rbp, %rax +; movq 8(%rax), %rax ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/fpromote-demote-avx.clif b/cranelift/filetests/filetests/isa/x64/fpromote-demote-avx.clif index 66fbe4a2ce4d..7637a3bff43e 100644 --- a/cranelift/filetests/filetests/isa/x64/fpromote-demote-avx.clif +++ b/cranelift/filetests/filetests/isa/x64/fpromote-demote-avx.clif @@ -45,11 +45,11 @@ block0(v1: i64, v2: f32): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block0: -; leaq +(%rsp), %r8 +; leaq +(%rsp), %rsi ; vmovss %xmm0, +(%rsp) ; uninit %xmm4 ; vxorpd %xmm4, %xmm4, %xmm6 -; vcvtss2sd (%r8), %xmm6, %xmm0 +; vcvtss2sd (%rsi), %xmm6, %xmm0 ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -61,10 +61,10 @@ block0(v1: i64, v2: f32): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block1: ; offset 0x8 -; leaq (%rsp), %r8 +; leaq (%rsp), %rsi ; vmovss %xmm0, (%rsp) ; vxorpd %xmm4, %xmm4, %xmm6 -; vcvtss2sd (%r8), %xmm6, %xmm0 ; trap: heap_oob +; vcvtss2sd (%rsi), %xmm6, %xmm0 ; trap: heap_oob ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -114,11 +114,11 @@ block0(v1: i64, v2: f64): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block0: -; leaq +(%rsp), %r8 +; leaq +(%rsp), %rsi ; vmovsd %xmm0, +(%rsp) ; uninit %xmm4 ; vxorps %xmm4, %xmm4, %xmm6 -; vcvtsd2ss (%r8), %xmm6, %xmm0 +; vcvtsd2ss (%rsi), %xmm6, %xmm0 ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -130,10 +130,10 @@ block0(v1: i64, v2: f64): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block1: ; offset 0x8 -; leaq (%rsp), %r8 +; leaq (%rsp), %rsi ; vmovsd %xmm0, (%rsp) ; vxorps %xmm4, %xmm4, %xmm6 -; vcvtsd2ss (%r8), %xmm6, %xmm0 ; trap: heap_oob +; vcvtsd2ss (%rsi), %xmm6, %xmm0 ; trap: heap_oob ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/fpromote-demote.clif b/cranelift/filetests/filetests/isa/x64/fpromote-demote.clif index a7f89bcbbbd8..a7775d26af6f 100644 --- a/cranelift/filetests/filetests/isa/x64/fpromote-demote.clif +++ b/cranelift/filetests/filetests/isa/x64/fpromote-demote.clif @@ -49,11 +49,11 @@ block0(v1: i64, v2: f32): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block0: -; leaq +(%rsp), %r8 +; leaq +(%rsp), %rsi ; movss %xmm0, +(%rsp) ; uninit %xmm0 ; xorpd %xmm0, %xmm0 -; cvtss2sd (%r8), %xmm0 +; cvtss2sd (%rsi), %xmm0 ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -65,10 +65,10 @@ block0(v1: i64, v2: f32): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block1: ; offset 0x8 -; leaq (%rsp), %r8 +; leaq (%rsp), %rsi ; movss %xmm0, (%rsp) ; xorpd %xmm0, %xmm0 -; cvtss2sd (%r8), %xmm0 ; trap: heap_oob +; cvtss2sd (%rsi), %xmm0 ; trap: heap_oob ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -122,11 +122,11 @@ block0(v1: i64, v2: f64): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block0: -; leaq +(%rsp), %r8 +; leaq +(%rsp), %rsi ; movsd %xmm0, +(%rsp) ; uninit %xmm0 ; xorps %xmm0, %xmm0 -; cvtsd2ss (%r8), %xmm0 +; cvtsd2ss (%rsi), %xmm0 ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -138,10 +138,10 @@ block0(v1: i64, v2: f64): ; movq %rsp, %rbp ; subq $0x10, %rsp ; block1: ; offset 0x8 -; leaq (%rsp), %r8 +; leaq (%rsp), %rsi ; movsd %xmm0, (%rsp) ; xorps %xmm0, %xmm0 -; cvtsd2ss (%r8), %xmm0 ; trap: heap_oob +; cvtsd2ss (%rsi), %xmm0 ; trap: heap_oob ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/i128.clif b/cranelift/filetests/filetests/isa/x64/i128.clif index 2b2813d33b5a..51777c75c2cc 100644 --- a/cranelift/filetests/filetests/isa/x64/i128.clif +++ b/cranelift/filetests/filetests/isa/x64/i128.clif @@ -199,17 +199,17 @@ block0(v0: i128, v1: i128): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdx, %rax +; movq %rdx, %r11 ; movq %rdi, %rdx ; imulq %rcx, %rdx -; movq %rax, %rcx +; movq %r11, %rcx ; imulq %rcx, %rsi ; addq %rsi, %rdx ; movq %rdi, %rax -; movq %rdx, %r8 +; movq %rdx, %rsi ; mulq %rcx ;; implicit: %rax, %rdx ; movq %rdx, %rcx -; movq %r8, %rdx +; movq %rsi, %rdx ; addq %rcx, %rdx ; movq %rbp, %rsp ; popq %rbp @@ -220,17 +220,17 @@ block0(v0: i128, v1: i128): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdx, %rax +; movq %rdx, %r11 ; movq %rdi, %rdx ; imulq %rcx, %rdx -; movq %rax, %rcx +; movq %r11, %rcx ; imulq %rcx, %rsi ; addq %rsi, %rdx ; movq %rdi, %rax -; movq %rdx, %r8 +; movq %rdx, %rsi ; mulq %rcx ; movq %rdx, %rcx -; movq %r8, %rdx +; movq %rsi, %rdx ; addq %rcx, %rdx ; movq %rbp, %rsp ; popq %rbp @@ -317,12 +317,11 @@ block0(v0: i128, v1: i128): ; VCode: ; pushq %rbp ; movq %rsp, %rbp -; subq $0x30, %rsp +; subq $0x20, %rsp ; movq %rbx, (%rsp) ; movq %r12, 8(%rsp) ; movq %r13, 0x10(%rsp) ; movq %r14, 0x18(%rsp) -; movq %r15, 0x20(%rsp) ; block0: ; movq %rdi, %rax ; xorq %rdx, %rax @@ -351,37 +350,36 @@ block0(v0: i128, v1: i128): ; cmpq %rdx, %rdi ; movq %rsi, %rbx ; sbbq %rcx, %rbx -; setge %r14b +; setge %r12b ; cmpq %rdx, %rdi -; movq %rsi, %r12 -; sbbq %rcx, %r12 -; setb %r13b +; movq %rsi, %rbx +; sbbq %rcx, %rbx +; setb %bl ; cmpq %rdi, %rdx -; movq %rcx, %r15 -; sbbq %rsi, %r15 -; setae %bl +; movq %rcx, %r13 +; sbbq %rsi, %r13 +; setae %r14b ; cmpq %rdi, %rdx -; movq %rcx, %r15 -; sbbq %rsi, %r15 -; setb %r15b +; movq %rcx, %r13 +; sbbq %rsi, %r13 +; setb %r13b ; cmpq %rdx, %rdi ; sbbq %rcx, %rsi -; setae %dil +; setae %cl ; andl %r9d, %eax ; andl %r11d, %r8d -; andl %r14d, %r10d -; andl %ebx, %r13d -; andl %edi, %r15d +; andl %r12d, %r10d +; andl %r14d, %ebx +; andl %ecx, %r13d ; andl %r8d, %eax -; andl %r13d, %r10d +; andl %ebx, %r10d ; andl %r10d, %eax -; andl %r15d, %eax +; andl %r13d, %eax ; movq (%rsp), %rbx ; movq 8(%rsp), %r12 ; movq 0x10(%rsp), %r13 ; movq 0x18(%rsp), %r14 -; movq 0x20(%rsp), %r15 -; addq $0x30, %rsp +; addq $0x20, %rsp ; movq %rbp, %rsp ; popq %rbp ; retq @@ -390,13 +388,12 @@ block0(v0: i128, v1: i128): ; block0: ; offset 0x0 ; pushq %rbp ; movq %rsp, %rbp -; subq $0x30, %rsp +; subq $0x20, %rsp ; movq %rbx, (%rsp) ; movq %r12, 8(%rsp) ; movq %r13, 0x10(%rsp) ; movq %r14, 0x18(%rsp) -; movq %r15, 0x20(%rsp) -; block1: ; offset 0x20 +; block1: ; offset 0x1b ; movq %rdi, %rax ; xorq %rdx, %rax ; movq %rsi, %r8 @@ -424,37 +421,36 @@ block0(v0: i128, v1: i128): ; cmpq %rdx, %rdi ; movq %rsi, %rbx ; sbbq %rcx, %rbx -; setge %r14b +; setge %r12b ; cmpq %rdx, %rdi -; movq %rsi, %r12 -; sbbq %rcx, %r12 -; setb %r13b +; movq %rsi, %rbx +; sbbq %rcx, %rbx +; setb %bl ; cmpq %rdi, %rdx -; movq %rcx, %r15 -; sbbq %rsi, %r15 -; setae %bl +; movq %rcx, %r13 +; sbbq %rsi, %r13 +; setae %r14b ; cmpq %rdi, %rdx -; movq %rcx, %r15 -; sbbq %rsi, %r15 -; setb %r15b +; movq %rcx, %r13 +; sbbq %rsi, %r13 +; setb %r13b ; cmpq %rdx, %rdi ; sbbq %rcx, %rsi -; setae %dil +; setae %cl ; andl %r9d, %eax ; andl %r11d, %r8d -; andl %r14d, %r10d -; andl %ebx, %r13d -; andl %edi, %r15d +; andl %r12d, %r10d +; andl %r14d, %ebx +; andl %ecx, %r13d ; andl %r8d, %eax -; andl %r13d, %r10d +; andl %ebx, %r10d ; andl %r10d, %eax -; andl %r15d, %eax +; andl %r13d, %eax ; movq (%rsp), %rbx ; movq 8(%rsp), %r12 ; movq 0x10(%rsp), %r13 ; movq 0x18(%rsp), %r14 -; movq 0x20(%rsp), %r15 -; addq $0x30, %rsp +; addq $0x20, %rsp ; movq %rbp, %rsp ; popq %rbp ; retq @@ -757,45 +753,45 @@ block0(v0: i128): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; shrq $0x1, %rax +; movq %rdi, %r11 +; shrq $0x1, %r11 ; movabsq $0x7777777777777777, %r8 -; andq %r8, %rax -; subq %rax, %rdi -; shrq $0x1, %rax -; andq %r8, %rax -; subq %rax, %rdi -; shrq $0x1, %rax -; andq %r8, %rax -; subq %rax, %rdi +; andq %r8, %r11 +; subq %r11, %rdi +; shrq $0x1, %r11 +; andq %r8, %r11 +; subq %r11, %rdi +; shrq $0x1, %r11 +; andq %r8, %r11 +; subq %r11, %rdi ; movq %rdi, %rax ; shrq $0x4, %rax ; addq %rdi, %rax -; movabsq $0xf0f0f0f0f0f0f0f, %rdi -; andq %rdi, %rax -; movabsq $0x101010101010101, %rdx -; imulq %rdx, %rax -; shrq $0x38, %rax -; movq %rsi, %rdi -; shrq $0x1, %rdi -; movabsq $0x7777777777777777, %rcx -; andq %rcx, %rdi -; subq %rdi, %rsi -; shrq $0x1, %rdi -; andq %rcx, %rdi -; subq %rdi, %rsi -; shrq $0x1, %rdi -; andq %rcx, %rdi -; subq %rdi, %rsi -; movq %rsi, %rdi -; shrq $0x4, %rdi -; addq %rsi, %rdi -; movabsq $0xf0f0f0f0f0f0f0f, %r10 -; andq %r10, %rdi +; movabsq $0xf0f0f0f0f0f0f0f, %rcx +; andq %rcx, %rax ; movabsq $0x101010101010101, %rcx -; imulq %rcx, %rdi -; shrq $0x38, %rdi -; addq %rdi, %rax +; imulq %rcx, %rax +; shrq $0x38, %rax +; movq %rsi, %rcx +; shrq $0x1, %rcx +; movabsq $0x7777777777777777, %rdx +; andq %rdx, %rcx +; subq %rcx, %rsi +; shrq $0x1, %rcx +; andq %rdx, %rcx +; subq %rcx, %rsi +; shrq $0x1, %rcx +; andq %rdx, %rcx +; subq %rcx, %rsi +; movq %rsi, %rcx +; shrq $0x4, %rcx +; addq %rsi, %rcx +; movabsq $0xf0f0f0f0f0f0f0f, %rdx +; andq %rdx, %rcx +; movabsq $0x101010101010101, %rdx +; imulq %rdx, %rcx +; shrq $0x38, %rcx +; addq %rcx, %rax ; uninit %rdx ; xorq %rdx, %rdx ; movq %rbp, %rsp @@ -807,45 +803,45 @@ block0(v0: i128): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; shrq $1, %rax +; movq %rdi, %r11 +; shrq $1, %r11 ; movabsq $0x7777777777777777, %r8 -; andq %r8, %rax -; subq %rax, %rdi -; shrq $1, %rax -; andq %r8, %rax -; subq %rax, %rdi -; shrq $1, %rax -; andq %r8, %rax -; subq %rax, %rdi +; andq %r8, %r11 +; subq %r11, %rdi +; shrq $1, %r11 +; andq %r8, %r11 +; subq %r11, %rdi +; shrq $1, %r11 +; andq %r8, %r11 +; subq %r11, %rdi ; movq %rdi, %rax ; shrq $4, %rax ; addq %rdi, %rax -; movabsq $0xf0f0f0f0f0f0f0f, %rdi -; andq %rdi, %rax -; movabsq $0x101010101010101, %rdx -; imulq %rdx, %rax -; shrq $0x38, %rax -; movq %rsi, %rdi -; shrq $1, %rdi -; movabsq $0x7777777777777777, %rcx -; andq %rcx, %rdi -; subq %rdi, %rsi -; shrq $1, %rdi -; andq %rcx, %rdi -; subq %rdi, %rsi -; shrq $1, %rdi -; andq %rcx, %rdi -; subq %rdi, %rsi -; movq %rsi, %rdi -; shrq $4, %rdi -; addq %rsi, %rdi -; movabsq $0xf0f0f0f0f0f0f0f, %r10 -; andq %r10, %rdi +; movabsq $0xf0f0f0f0f0f0f0f, %rcx +; andq %rcx, %rax ; movabsq $0x101010101010101, %rcx -; imulq %rcx, %rdi -; shrq $0x38, %rdi -; addq %rdi, %rax +; imulq %rcx, %rax +; shrq $0x38, %rax +; movq %rsi, %rcx +; shrq $1, %rcx +; movabsq $0x7777777777777777, %rdx +; andq %rdx, %rcx +; subq %rcx, %rsi +; shrq $1, %rcx +; andq %rdx, %rcx +; subq %rcx, %rsi +; shrq $1, %rcx +; andq %rdx, %rcx +; subq %rcx, %rsi +; movq %rsi, %rcx +; shrq $4, %rcx +; addq %rsi, %rcx +; movabsq $0xf0f0f0f0f0f0f0f, %rdx +; andq %rdx, %rcx +; movabsq $0x101010101010101, %rdx +; imulq %rdx, %rcx +; shrq $0x38, %rcx +; addq %rcx, %rax ; xorq %rdx, %rdx ; movq %rbp, %rsp ; popq %rbp @@ -861,88 +857,88 @@ block0(v0: i128): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movabsq $0x5555555555555555, %rcx -; movq %rsi, %rdx -; andq %rcx, %rdx +; movabsq $0x5555555555555555, %r8 +; movq %rsi, %rcx +; andq %r8, %rcx ; shrq $1, %rsi -; andq %rcx, %rsi -; shlq $1, %rdx -; orq %rsi, %rdx -; movabsq $0x3333333333333333, %r9 -; movq %rdx, %r10 -; andq %r9, %r10 -; shrq $0x2, %rdx -; andq %r9, %rdx -; shlq $0x2, %r10 -; orq %rdx, %r10 -; movabsq $0xf0f0f0f0f0f0f0f, %rsi -; movq %r10, %rax -; andq %rsi, %rax -; shrq $0x4, %r10 -; andq %rsi, %r10 -; shlq $0x4, %rax -; orq %r10, %rax -; movabsq $0xff00ff00ff00ff, %rcx -; movq %rax, %rdx -; andq %rcx, %rdx -; shrq $0x8, %rax -; andq %rcx, %rax -; shlq $0x8, %rdx -; orq %rax, %rdx -; movabsq $0xffff0000ffff, %r10 -; movq %rdx, %r9 -; andq %r10, %r9 -; shrq $0x10, %rdx -; andq %r10, %rdx -; shlq $0x10, %r9 -; orq %rdx, %r9 -; movl $0xffffffff, %esi -; movq %r9, %rax -; andq %rsi, %rax -; shrq $0x20, %r9 -; shlq $0x20, %rax -; orq %r9, %rax -; movabsq $0x5555555555555555, %rdx -; movq %rdi, %rcx -; andq %rdx, %rcx -; shrq $1, %rdi -; andq %rdx, %rdi +; andq %r8, %rsi ; shlq $1, %rcx -; orq %rdi, %rcx +; orq %rsi, %rcx ; movabsq $0x3333333333333333, %rdx -; movq %rcx, %r8 -; andq %rdx, %r8 +; movq %rcx, %rax +; andq %rdx, %rax ; shrq $0x2, %rcx ; andq %rdx, %rcx -; shlq $0x2, %r8 -; orq %rcx, %r8 -; movabsq $0xf0f0f0f0f0f0f0f, %r10 -; movq %r8, %r11 -; andq %r10, %r11 -; shrq $0x4, %r8 -; andq %r10, %r8 -; shlq $0x4, %r11 -; orq %r8, %r11 -; movabsq $0xff00ff00ff00ff, %rdi -; movq %r11, %rcx -; andq %rdi, %rcx -; shrq $0x8, %r11 -; andq %rdi, %r11 +; shlq $0x2, %rax +; orq %rcx, %rax +; movabsq $0xf0f0f0f0f0f0f0f, %rdx +; movq %rax, %rcx +; andq %rdx, %rcx +; shrq $0x4, %rax +; andq %rdx, %rax +; shlq $0x4, %rcx +; orq %rax, %rcx +; movabsq $0xff00ff00ff00ff, %rdx +; movq %rcx, %rax +; andq %rdx, %rax +; shrq $0x8, %rcx +; andq %rdx, %rcx +; shlq $0x8, %rax +; orq %rcx, %rax +; movabsq $0xffff0000ffff, %rdx +; movq %rax, %rcx +; andq %rdx, %rcx +; shrq $0x10, %rax +; andq %rdx, %rax +; shlq $0x10, %rcx +; orq %rax, %rcx +; movl $0xffffffff, %edx +; movq %rcx, %rax +; andq %rdx, %rax +; shrq $0x20, %rcx +; shlq $0x20, %rax +; orq %rcx, %rax +; movabsq $0x5555555555555555, %r10 +; movq %rdi, %rdx +; andq %r10, %rdx +; shrq $1, %rdi +; andq %r10, %rdi +; shlq $1, %rdx +; orq %rdi, %rdx +; movabsq $0x3333333333333333, %rsi +; movq %rdx, %rcx +; andq %rsi, %rcx +; shrq $0x2, %rdx +; andq %rsi, %rdx +; shlq $0x2, %rcx +; orq %rdx, %rcx +; movabsq $0xf0f0f0f0f0f0f0f, %rsi +; movq %rcx, %rdx +; andq %rsi, %rdx +; shrq $0x4, %rcx +; andq %rsi, %rcx +; shlq $0x4, %rdx +; orq %rcx, %rdx +; movabsq $0xff00ff00ff00ff, %rsi +; movq %rdx, %rcx +; andq %rsi, %rcx +; shrq $0x8, %rdx +; andq %rsi, %rdx ; shlq $0x8, %rcx -; orq %r11, %rcx +; orq %rdx, %rcx ; movabsq $0xffff0000ffff, %rdx -; movq %rcx, %r8 -; andq %rdx, %r8 +; movq %rcx, %rsi +; andq %rdx, %rsi ; shrq $0x10, %rcx ; andq %rdx, %rcx -; shlq $0x10, %r8 -; orq %rcx, %r8 -; movl $0xffffffff, %r10d -; movq %r8, %rdx -; andq %r10, %rdx -; shrq $0x20, %r8 +; shlq $0x10, %rsi +; orq %rcx, %rsi +; movl $0xffffffff, %edi +; movq %rsi, %rdx +; andq %rdi, %rdx +; shrq $0x20, %rsi ; shlq $0x20, %rdx -; orq %r8, %rdx +; orq %rsi, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -952,88 +948,88 @@ block0(v0: i128): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0x5555555555555555, %rcx -; movq %rsi, %rdx -; andq %rcx, %rdx +; movabsq $0x5555555555555555, %r8 +; movq %rsi, %rcx +; andq %r8, %rcx ; shrq $1, %rsi -; andq %rcx, %rsi -; shlq $1, %rdx -; orq %rsi, %rdx -; movabsq $0x3333333333333333, %r9 -; movq %rdx, %r10 -; andq %r9, %r10 -; shrq $2, %rdx -; andq %r9, %rdx -; shlq $2, %r10 -; orq %rdx, %r10 -; movabsq $0xf0f0f0f0f0f0f0f, %rsi -; movq %r10, %rax -; andq %rsi, %rax -; shrq $4, %r10 -; andq %rsi, %r10 -; shlq $4, %rax -; orq %r10, %rax -; movabsq $0xff00ff00ff00ff, %rcx -; movq %rax, %rdx -; andq %rcx, %rdx -; shrq $8, %rax -; andq %rcx, %rax -; shlq $8, %rdx -; orq %rax, %rdx -; movabsq $0xffff0000ffff, %r10 -; movq %rdx, %r9 -; andq %r10, %r9 -; shrq $0x10, %rdx -; andq %r10, %rdx -; shlq $0x10, %r9 -; orq %rdx, %r9 -; movl $0xffffffff, %esi -; movq %r9, %rax -; andq %rsi, %rax -; shrq $0x20, %r9 -; shlq $0x20, %rax -; orq %r9, %rax -; movabsq $0x5555555555555555, %rdx -; movq %rdi, %rcx -; andq %rdx, %rcx -; shrq $1, %rdi -; andq %rdx, %rdi +; andq %r8, %rsi ; shlq $1, %rcx -; orq %rdi, %rcx +; orq %rsi, %rcx ; movabsq $0x3333333333333333, %rdx -; movq %rcx, %r8 -; andq %rdx, %r8 +; movq %rcx, %rax +; andq %rdx, %rax ; shrq $2, %rcx ; andq %rdx, %rcx -; shlq $2, %r8 -; orq %rcx, %r8 -; movabsq $0xf0f0f0f0f0f0f0f, %r10 -; movq %r8, %r11 -; andq %r10, %r11 -; shrq $4, %r8 -; andq %r10, %r8 -; shlq $4, %r11 -; orq %r8, %r11 -; movabsq $0xff00ff00ff00ff, %rdi -; movq %r11, %rcx -; andq %rdi, %rcx -; shrq $8, %r11 -; andq %rdi, %r11 +; shlq $2, %rax +; orq %rcx, %rax +; movabsq $0xf0f0f0f0f0f0f0f, %rdx +; movq %rax, %rcx +; andq %rdx, %rcx +; shrq $4, %rax +; andq %rdx, %rax +; shlq $4, %rcx +; orq %rax, %rcx +; movabsq $0xff00ff00ff00ff, %rdx +; movq %rcx, %rax +; andq %rdx, %rax +; shrq $8, %rcx +; andq %rdx, %rcx +; shlq $8, %rax +; orq %rcx, %rax +; movabsq $0xffff0000ffff, %rdx +; movq %rax, %rcx +; andq %rdx, %rcx +; shrq $0x10, %rax +; andq %rdx, %rax +; shlq $0x10, %rcx +; orq %rax, %rcx +; movl $0xffffffff, %edx +; movq %rcx, %rax +; andq %rdx, %rax +; shrq $0x20, %rcx +; shlq $0x20, %rax +; orq %rcx, %rax +; movabsq $0x5555555555555555, %r10 +; movq %rdi, %rdx +; andq %r10, %rdx +; shrq $1, %rdi +; andq %r10, %rdi +; shlq $1, %rdx +; orq %rdi, %rdx +; movabsq $0x3333333333333333, %rsi +; movq %rdx, %rcx +; andq %rsi, %rcx +; shrq $2, %rdx +; andq %rsi, %rdx +; shlq $2, %rcx +; orq %rdx, %rcx +; movabsq $0xf0f0f0f0f0f0f0f, %rsi +; movq %rcx, %rdx +; andq %rsi, %rdx +; shrq $4, %rcx +; andq %rsi, %rcx +; shlq $4, %rdx +; orq %rcx, %rdx +; movabsq $0xff00ff00ff00ff, %rsi +; movq %rdx, %rcx +; andq %rsi, %rcx +; shrq $8, %rdx +; andq %rsi, %rdx ; shlq $8, %rcx -; orq %r11, %rcx +; orq %rdx, %rcx ; movabsq $0xffff0000ffff, %rdx -; movq %rcx, %r8 -; andq %rdx, %r8 +; movq %rcx, %rsi +; andq %rdx, %rsi ; shrq $0x10, %rcx ; andq %rdx, %rcx -; shlq $0x10, %r8 -; orq %rcx, %r8 -; movl $0xffffffff, %r10d -; movq %r8, %rdx -; andq %r10, %rdx -; shrq $0x20, %r8 +; shlq $0x10, %rsi +; orq %rcx, %rsi +; movl $0xffffffff, %edi +; movq %rsi, %rdx +; andq %rdi, %rdx +; shrq $0x20, %rsi ; shlq $0x20, %rdx -; orq %r8, %rdx +; orq %rsi, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1121,8 +1117,8 @@ block2(v8: i128): ; jnz label2; j label1 ; block1: ; addq $0x2, %rax -; setb %cl -; movzbq %cl, %rdx +; setb %r11b +; movzbq %r11b, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1141,15 +1137,15 @@ block2(v8: i128): ; block1: ; offset 0x4 ; xorq %rax, %rax ; testb %dl, %dl -; jne 0x1f +; jne 0x20 ; block2: ; offset 0xf ; addq $2, %rax -; setb %cl -; movzbq %cl, %rdx +; setb %r11b +; movzbq %r11b, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq -; block3: ; offset 0x1f +; block3: ; offset 0x20 ; addq $1, %rax ; setb %cl ; movzbq %cl, %rdx @@ -1172,21 +1168,20 @@ block0(v0: i128, v1: i128, v2: i64, v3: i128, v4: i128, v5: i128): ; VCode: ; pushq %rbp ; movq %rsp, %rbp -; subq $0x20, %rsp -; movq %r13, (%rsp) -; movq %r14, 8(%rsp) -; movq %r15, 0x10(%rsp) +; subq $0x10, %rsp +; movq %rbx, (%rsp) +; movq %r12, 8(%rsp) ; block0: -; movq %rcx, %r13 -; movq %rdx, %r15 +; movq %rdx, %rbx +; movq %rcx, %r12 ; movq +-0x30(%rbp), %rcx ; movq +-0x28(%rbp), %rax ; movq +-0x20(%rbp), %rdx ; movq +-0x18(%rbp), %r11 ; movq +-0x10(%rbp), %r10 -; addq %r15, %rdi -; movq %r13, %r14 -; adcq %r14, %rsi +; addq %rbx, %rdi +; movq %r12, %rbx +; adcq %rbx, %rsi ; addq %r8, %r9 ; adcq $0x0, %rcx ; addq %r11, %rax @@ -1195,10 +1190,9 @@ block0(v0: i128, v1: i128, v2: i64, v3: i128, v4: i128, v5: i128): ; adcq %rcx, %rsi ; addq %rdi, %rax ; adcq %rsi, %rdx -; movq (%rsp), %r13 -; movq 8(%rsp), %r14 -; movq 0x10(%rsp), %r15 -; addq $0x20, %rsp +; movq (%rsp), %rbx +; movq 8(%rsp), %r12 +; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1207,21 +1201,20 @@ block0(v0: i128, v1: i128, v2: i64, v3: i128, v4: i128, v5: i128): ; block0: ; offset 0x0 ; pushq %rbp ; movq %rsp, %rbp -; subq $0x20, %rsp -; movq %r13, (%rsp) -; movq %r14, 8(%rsp) -; movq %r15, 0x10(%rsp) -; block1: ; offset 0x16 -; movq %rcx, %r13 -; movq %rdx, %r15 +; subq $0x10, %rsp +; movq %rbx, (%rsp) +; movq %r12, 8(%rsp) +; block1: ; offset 0x11 +; movq %rdx, %rbx +; movq %rcx, %r12 ; movq 0x10(%rbp), %rcx ; movq 0x18(%rbp), %rax ; movq 0x20(%rbp), %rdx ; movq 0x28(%rbp), %r11 ; movq 0x30(%rbp), %r10 -; addq %r15, %rdi -; movq %r13, %r14 -; adcq %r14, %rsi +; addq %rbx, %rdi +; movq %r12, %rbx +; adcq %rbx, %rsi ; addq %r8, %r9 ; adcq $0, %rcx ; addq %r11, %rax @@ -1230,10 +1223,9 @@ block0(v0: i128, v1: i128, v2: i64, v3: i128, v4: i128, v5: i128): ; adcq %rcx, %rsi ; addq %rdi, %rax ; adcq %rsi, %rdx -; movq (%rsp), %r13 -; movq 8(%rsp), %r14 -; movq 0x10(%rsp), %r15 -; addq $0x20, %rsp +; movq (%rsp), %rbx +; movq 8(%rsp), %r12 +; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1297,8 +1289,8 @@ block0(v0: i128, v1: i128): ; block0: ; movq %rdi, %r13 ; leaq (%rsp), %rdi -; load_ext_name %g+0, %r9 -; call *%r9 +; load_ext_name %g+0, %rax +; call *%rax ; movq %r13, %rdi ; movq %r12, (%rdi) ; movq 0x10(%rsp), %r12 @@ -1318,8 +1310,8 @@ block0(v0: i128, v1: i128): ; block1: ; offset 0x12 ; movq %rdi, %r13 ; leaq (%rsp), %rdi -; movabsq $0, %r9 ; reloc_external Abs8 %g 0 -; callq *%r9 +; movabsq $0, %rax ; reloc_external Abs8 %g 0 +; callq *%rax ; movq (%rsp), %r12 ; movq %r13, %rdi ; movq %r12, (%rdi) @@ -1340,20 +1332,19 @@ block0(v0: i128): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %r8 -; movq $0xffffffffffffffff, %rcx -; bsrq %rsi, %r9 -; cmoveq %rcx, %r9 -; movl $0x3f, %edi -; subq %r9, %rdi -; movq $0xffffffffffffffff, %rdx -; bsrq %r8, %r10 -; cmoveq %rdx, %r10 +; movq $0xffffffffffffffff, %r8 +; bsrq %rsi, %rsi +; cmoveq %r8, %rsi +; movl $0x3f, %r10d +; subq %rsi, %r10 +; movq $0xffffffffffffffff, %rax +; bsrq %rdi, %rcx +; cmoveq %rax, %rcx ; movl $0x3f, %eax -; subq %r10, %rax +; subq %rcx, %rax ; addq $0x40, %rax -; cmpq $0x40, %rdi -; cmovneq %rdi, %rax +; cmpq $0x40, %r10 +; cmovneq %r10, %rax ; uninit %rdx ; xorq %rdx, %rdx ; movq %rbp, %rsp @@ -1365,20 +1356,19 @@ block0(v0: i128): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %r8 -; movq $18446744073709551615, %rcx -; bsrq %rsi, %r9 -; cmoveq %rcx, %r9 -; movl $0x3f, %edi -; subq %r9, %rdi -; movq $18446744073709551615, %rdx -; bsrq %r8, %r10 -; cmoveq %rdx, %r10 +; movq $18446744073709551615, %r8 +; bsrq %rsi, %rsi +; cmoveq %r8, %rsi +; movl $0x3f, %r10d +; subq %rsi, %r10 +; movq $18446744073709551615, %rax +; bsrq %rdi, %rcx +; cmoveq %rax, %rcx ; movl $0x3f, %eax -; subq %r10, %rax +; subq %rcx, %rax ; addq $0x40, %rax -; cmpq $0x40, %rdi -; cmovneq %rdi, %rax +; cmpq $0x40, %r10 +; cmovneq %r10, %rax ; xorq %rdx, %rdx ; movq %rbp, %rsp ; popq %rbp @@ -1394,15 +1384,15 @@ block0(v0: i128): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl $0x40, %ecx +; movl $0x40, %r8d ; bsfq %rdi, %rax -; cmoveq %rcx, %rax -; movl $0x40, %edi -; bsfq %rsi, %rdx -; cmoveq %rdi, %rdx -; addq $0x40, %rdx +; cmoveq %r8, %rax +; movl $0x40, %r9d +; bsfq %rsi, %rcx +; cmoveq %r9, %rcx +; addq $0x40, %rcx ; cmpq $0x40, %rax -; cmoveq %rdx, %rax +; cmoveq %rcx, %rax ; uninit %rdx ; xorq %rdx, %rdx ; movq %rbp, %rsp @@ -1414,15 +1404,15 @@ block0(v0: i128): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0x40, %ecx +; movl $0x40, %r8d ; bsfq %rdi, %rax -; cmoveq %rcx, %rax -; movl $0x40, %edi -; bsfq %rsi, %rdx -; cmoveq %rdi, %rdx -; addq $0x40, %rdx +; cmoveq %r8, %rax +; movl $0x40, %r9d +; bsfq %rsi, %rcx +; cmoveq %r9, %rcx +; addq $0x40, %rcx ; cmpq $0x40, %rax -; cmoveq %rdx, %rax +; cmoveq %rcx, %rax ; xorq %rdx, %rdx ; movq %rbp, %rsp ; popq %rbp @@ -1469,14 +1459,14 @@ block0(v0: i128, v1: i128): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r11 ; movq %rdi, %rdx ; shlq %cl, %rdx ; shlq %cl, %rsi -; movq %rcx, %r11 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %r8 +; movq %rax, %r8 ; subq %r8, %rcx ; shrq %cl, %rdi ; uninit %rax @@ -1496,14 +1486,14 @@ block0(v0: i128, v1: i128): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r11 ; movq %rdi, %rdx ; shlq %cl, %rdx ; shlq %cl, %rsi -; movq %rcx, %r11 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %r8 +; movq %rax, %r8 ; subq %r8, %rcx ; shrq %cl, %rdi ; xorq %rax, %rax @@ -1527,14 +1517,13 @@ block0(v0: i128, v1: i128): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r11 ; shrq %cl, %rdi -; movq %rsi, %r10 -; shrq %cl, %r10 -; movq %rcx, %r11 +; movq %rsi, %r8 +; shrq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %rax ; subq %rax, %rcx ; shlq %cl, %rsi ; uninit %rdx @@ -1543,9 +1532,9 @@ block0(v0: i128, v1: i128): ; cmoveq %rdx, %rsi ; orq %rdi, %rsi ; testq $0x40, %rax -; movq %r10, %rax +; movq %r8, %rax ; cmoveq %rsi, %rax -; cmoveq %r10, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1555,14 +1544,13 @@ block0(v0: i128, v1: i128): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r11 ; shrq %cl, %rdi -; movq %rsi, %r10 -; shrq %cl, %r10 -; movq %rcx, %r11 +; movq %rsi, %r8 +; shrq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %rax ; subq %rax, %rcx ; shlq %cl, %rsi ; xorq %rdx, %rdx @@ -1570,9 +1558,9 @@ block0(v0: i128, v1: i128): ; cmoveq %rdx, %rsi ; orq %rdi, %rsi ; testq $0x40, %rax -; movq %r10, %rax +; movq %r8, %rax ; cmoveq %rsi, %rax -; cmoveq %r10, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1590,25 +1578,25 @@ block0(v0: i128, v1: i128): ; movq %rdx, %rax ; movq %rdx, %rcx ; shrq %cl, %rdi -; movq %rsi, %r10 -; sarq %cl, %r10 +; movq %rsi, %r8 +; sarq %cl, %r8 ; movq %rcx, %rax ; movl $0x40, %ecx -; movq %rax, %rdx -; subq %rdx, %rcx -; movq %rsi, %r11 -; shlq %cl, %r11 -; uninit %rax -; xorq %rax, %rax -; testq $0x7f, %rdx -; cmoveq %rax, %r11 -; orq %r11, %rdi +; movq %rax, %r9 +; subq %r9, %rcx +; movq %rsi, %rax +; shlq %cl, %rax +; uninit %rdx +; xorq %rdx, %rdx +; testq $0x7f, %r9 +; cmoveq %rdx, %rax +; orq %rax, %rdi ; sarq $0x3f, %rsi -; testq $0x40, %rdx -; movq %r10, %rax +; testq $0x40, %r9 +; movq %r8, %rax ; cmoveq %rdi, %rax ; movq %rsi, %rdx -; cmoveq %r10, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1621,24 +1609,24 @@ block0(v0: i128, v1: i128): ; movq %rdx, %rax ; movq %rdx, %rcx ; shrq %cl, %rdi -; movq %rsi, %r10 -; sarq %cl, %r10 +; movq %rsi, %r8 +; sarq %cl, %r8 ; movq %rcx, %rax ; movl $0x40, %ecx -; movq %rax, %rdx -; subq %rdx, %rcx -; movq %rsi, %r11 -; shlq %cl, %r11 -; xorq %rax, %rax -; testq $0x7f, %rdx -; cmoveq %rax, %r11 -; orq %r11, %rdi +; movq %rax, %r9 +; subq %r9, %rcx +; movq %rsi, %rax +; shlq %cl, %rax +; xorq %rdx, %rdx +; testq $0x7f, %r9 +; cmoveq %rdx, %rax +; orq %rax, %rdi ; sarq $0x3f, %rsi -; testq $0x40, %rdx -; movq %r10, %rax +; testq $0x40, %r9 +; movq %r8, %rax ; cmoveq %rdi, %rax ; movq %rsi, %rdx -; cmoveq %r10, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1654,46 +1642,48 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx -; movq %rdx, %r9 +; movq %rdx, %r10 ; movq %rdi, %rdx ; shlq %cl, %rdx -; movq %rcx, %r9 -; movq %rsi, %r10 -; shlq %cl, %r10 +; movq %rcx, %r10 +; movq %rsi, %r9 +; shlq %cl, %r9 ; movl $0x40, %ecx -; movq %r9, %r8 -; subq %r8, %rcx -; movq %rdi, %r11 -; shrq %cl, %r11 +; movq %r10, %rax +; subq %rax, %rcx +; movq %rdi, %r8 +; shrq %cl, %r8 ; uninit %rax ; xorq %rax, %rax -; movq %r9, %rcx +; movq %r10, %rcx ; testq $0x7f, %rcx -; cmoveq %rax, %r11 -; orq %r10, %r11 +; cmoveq %rax, %r8 +; orq %r9, %r8 ; testq $0x40, %rcx ; cmoveq %rdx, %rax -; cmoveq %r11, %rdx +; cmoveq %r8, %rdx ; movl $0x80, %ecx +; movq %r10, %r8 ; subq %r8, %rcx ; shrq %cl, %rdi -; movq %rsi, %r11 -; shrq %cl, %r11 -; movq %rcx, %r10 +; movq %rsi, %r8 +; shrq %cl, %r8 +; movq %rcx, %r9 ; movl $0x40, %ecx +; movq %r9, %r10 ; subq %r10, %rcx ; shlq %cl, %rsi -; uninit %r8 -; xorq %r8, %r8 +; uninit %r9 +; xorq %r9, %r9 ; testq $0x7f, %r10 -; cmoveq %r8, %rsi +; cmoveq %r9, %rsi ; orq %rdi, %rsi ; testq $0x40, %r10 -; movq %r11, %rdi -; cmoveq %rsi, %rdi -; cmoveq %r11, %r8 -; orq %rdi, %rax -; orq %r8, %rdx +; movq %r8, %rcx +; cmoveq %rsi, %rcx +; cmoveq %r8, %r9 +; orq %rcx, %rax +; orq %r9, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1704,44 +1694,46 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx -; movq %rdx, %r9 +; movq %rdx, %r10 ; movq %rdi, %rdx ; shlq %cl, %rdx -; movq %rcx, %r9 -; movq %rsi, %r10 -; shlq %cl, %r10 +; movq %rcx, %r10 +; movq %rsi, %r9 +; shlq %cl, %r9 ; movl $0x40, %ecx -; movq %r9, %r8 -; subq %r8, %rcx -; movq %rdi, %r11 -; shrq %cl, %r11 +; movq %r10, %rax +; subq %rax, %rcx +; movq %rdi, %r8 +; shrq %cl, %r8 ; xorq %rax, %rax -; movq %r9, %rcx +; movq %r10, %rcx ; testq $0x7f, %rcx -; cmoveq %rax, %r11 -; orq %r10, %r11 +; cmoveq %rax, %r8 +; orq %r9, %r8 ; testq $0x40, %rcx ; cmoveq %rdx, %rax -; cmoveq %r11, %rdx +; cmoveq %r8, %rdx ; movl $0x80, %ecx +; movq %r10, %r8 ; subq %r8, %rcx ; shrq %cl, %rdi -; movq %rsi, %r11 -; shrq %cl, %r11 -; movq %rcx, %r10 +; movq %rsi, %r8 +; shrq %cl, %r8 +; movq %rcx, %r9 ; movl $0x40, %ecx +; movq %r9, %r10 ; subq %r10, %rcx ; shlq %cl, %rsi -; xorq %r8, %r8 +; xorq %r9, %r9 ; testq $0x7f, %r10 -; cmoveq %r8, %rsi +; cmoveq %r9, %rsi ; orq %rdi, %rsi ; testq $0x40, %r10 -; movq %r11, %rdi -; cmoveq %rsi, %rdi -; cmoveq %r11, %r8 -; orq %rdi, %rax -; orq %r8, %rdx +; movq %r8, %rcx +; cmoveq %rsi, %rcx +; cmoveq %r8, %r9 +; orq %rcx, %rax +; orq %r9, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1757,47 +1749,48 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rdx, %rcx -; movq %rdx, %r9 -; movq %rdi, %r8 +; movq %rdx, %r11 +; movq %rdi, %r9 +; shrq %cl, %r9 +; movq %rcx, %r11 +; movq %rsi, %r8 ; shrq %cl, %r8 -; movq %rcx, %r9 -; movq %rsi, %r10 -; shrq %cl, %r10 ; movl $0x40, %ecx -; movq %r9, %rdx -; subq %rdx, %rcx -; movq %rsi, %r11 -; shlq %cl, %r11 +; movq %r11, %rax +; subq %rax, %rcx +; movq %rsi, %r10 +; shlq %cl, %r10 ; uninit %rdx ; xorq %rdx, %rdx -; movq %r9, %rcx +; movq %r11, %rcx ; testq $0x7f, %rcx -; cmoveq %rdx, %r11 -; orq %r8, %r11 +; cmoveq %rdx, %r10 +; orq %r9, %r10 ; testq $0x40, %rcx -; movq %r10, %rax -; cmoveq %r11, %rax -; cmoveq %r10, %rdx +; movq %r8, %rax +; cmoveq %r10, %rax +; cmoveq %r8, %rdx ; movl $0x80, %ecx -; movq %r9, %r8 +; movq %r11, %r8 ; subq %r8, %rcx -; movq %rdi, %r10 -; shlq %cl, %r10 +; movq %rdi, %r8 +; shlq %cl, %r8 ; shlq %cl, %rsi -; movq %rcx, %r11 +; movq %rcx, %r9 ; movl $0x40, %ecx -; subq %r11, %rcx +; movq %r9, %r10 +; subq %r10, %rcx ; shrq %cl, %rdi -; uninit %r8 -; xorq %r8, %r8 -; testq $0x7f, %r11 -; cmoveq %r8, %rdi +; uninit %r9 +; xorq %r9, %r9 +; testq $0x7f, %r10 +; cmoveq %r9, %rdi ; orq %rsi, %rdi -; testq $0x40, %r11 -; cmoveq %r10, %r8 -; cmoveq %rdi, %r10 -; orq %r8, %rax -; orq %r10, %rdx +; testq $0x40, %r10 +; cmoveq %r8, %r9 +; cmoveq %rdi, %r8 +; orq %r9, %rax +; orq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1808,45 +1801,46 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movq %rdx, %rcx -; movq %rdx, %r9 -; movq %rdi, %r8 +; movq %rdx, %r11 +; movq %rdi, %r9 +; shrq %cl, %r9 +; movq %rcx, %r11 +; movq %rsi, %r8 ; shrq %cl, %r8 -; movq %rcx, %r9 -; movq %rsi, %r10 -; shrq %cl, %r10 ; movl $0x40, %ecx -; movq %r9, %rdx -; subq %rdx, %rcx -; movq %rsi, %r11 -; shlq %cl, %r11 +; movq %r11, %rax +; subq %rax, %rcx +; movq %rsi, %r10 +; shlq %cl, %r10 ; xorq %rdx, %rdx -; movq %r9, %rcx +; movq %r11, %rcx ; testq $0x7f, %rcx -; cmoveq %rdx, %r11 -; orq %r8, %r11 +; cmoveq %rdx, %r10 +; orq %r9, %r10 ; testq $0x40, %rcx -; movq %r10, %rax -; cmoveq %r11, %rax -; cmoveq %r10, %rdx +; movq %r8, %rax +; cmoveq %r10, %rax +; cmoveq %r8, %rdx ; movl $0x80, %ecx -; movq %r9, %r8 +; movq %r11, %r8 ; subq %r8, %rcx -; movq %rdi, %r10 -; shlq %cl, %r10 +; movq %rdi, %r8 +; shlq %cl, %r8 ; shlq %cl, %rsi -; movq %rcx, %r11 +; movq %rcx, %r9 ; movl $0x40, %ecx -; subq %r11, %rcx +; movq %r9, %r10 +; subq %r10, %rcx ; shrq %cl, %rdi -; xorq %r8, %r8 -; testq $0x7f, %r11 -; cmoveq %r8, %rdi +; xorq %r9, %r9 +; testq $0x7f, %r10 +; cmoveq %r9, %rdi ; orq %rsi, %rdi -; testq $0x40, %r11 -; cmoveq %r10, %r8 -; cmoveq %rdi, %r10 -; orq %r8, %rax -; orq %r10, %rdx +; testq $0x40, %r10 +; cmoveq %r8, %r9 +; cmoveq %rdi, %r8 +; orq %r9, %rax +; orq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -2063,8 +2057,8 @@ block0(v0: i64, v1: i64): ; block0: ; movq %rsi, %rax ; addq (%rdi), %rax -; setb %r8b -; movzbq %r8b, %rdx +; setb %sil +; movzbq %sil, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -2076,8 +2070,8 @@ block0(v0: i64, v1: i64): ; block1: ; offset 0x4 ; movq %rsi, %rax ; addq (%rdi), %rax ; trap: heap_oob -; setb %r8b -; movzbq %r8b, %rdx +; setb %sil +; movzbq %sil, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -2096,11 +2090,11 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq (%rdi), %r10 +; movq (%rdi), %r8 ; uninit %rdx ; xorq %rdx, %rdx ; movq %rsi, %rax -; addq %r10, %rax +; addq %r8, %rax ; adcq 8(%rdi), %rdx ; movq %rbp, %rsp ; popq %rbp @@ -2111,10 +2105,10 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq (%rdi), %r10 ; trap: heap_oob +; movq (%rdi), %r8 ; trap: heap_oob ; xorq %rdx, %rdx ; movq %rsi, %rax -; addq %r10, %rax +; addq %r8, %rax ; adcq 8(%rdi), %rdx ; trap: heap_oob ; movq %rbp, %rsp ; popq %rbp @@ -2134,11 +2128,11 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq (%rdi), %r10 +; movq (%rdi), %r8 ; uninit %rdx ; xorq %rdx, %rdx ; movq %rsi, %rax -; subq %r10, %rax +; subq %r8, %rax ; sbbq 8(%rdi), %rdx ; movq %rbp, %rsp ; popq %rbp @@ -2149,10 +2143,10 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq (%rdi), %r10 ; trap: heap_oob +; movq (%rdi), %r8 ; trap: heap_oob ; xorq %rdx, %rdx ; movq %rsi, %rax -; subq %r10, %rax +; subq %r8, %rax ; sbbq 8(%rdi), %rdx ; trap: heap_oob ; movq %rbp, %rsp ; popq %rbp @@ -2212,8 +2206,8 @@ block0(v0: i64, v1: i64): ; block0: ; movq %rdi, %rax ; addq %rsi, %rax -; setb %r8b -; movzbq %r8b, %rdx +; setb %sil +; movzbq %sil, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -2225,8 +2219,8 @@ block0(v0: i64, v1: i64): ; block1: ; offset 0x4 ; movq %rdi, %rax ; addq %rsi, %rax -; setb %r8b -; movzbq %r8b, %rdx +; setb %sil +; movzbq %sil, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/icmp-recursion-opt.clif b/cranelift/filetests/filetests/isa/x64/icmp-recursion-opt.clif index 6d4b2f62eab3..59c4e2ff17a1 100644 --- a/cranelift/filetests/filetests/isa/x64/icmp-recursion-opt.clif +++ b/cranelift/filetests/filetests/isa/x64/icmp-recursion-opt.clif @@ -12012,8 +12012,8 @@ block0(v0: i32): ; movq %rsp, %rbp ; block0: ; testl %edi, %edi -; setne %dl -; movzbl %dl, %eax +; setne %sil +; movzbl %sil, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -12024,8 +12024,8 @@ block0(v0: i32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; testl %edi, %edi -; setne %dl -; movzbl %dl, %eax +; setne %sil +; movzbl %sil, %eax ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/icmp-recursion.clif b/cranelift/filetests/filetests/isa/x64/icmp-recursion.clif index aa816ea840b4..25a154c81e9b 100644 --- a/cranelift/filetests/filetests/isa/x64/icmp-recursion.clif +++ b/cranelift/filetests/filetests/isa/x64/icmp-recursion.clif @@ -12012,483 +12012,99 @@ block0(v0: i32): ; movq %rsp, %rbp ; block0: ; testl %edi, %edi -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -12498,33 +12114,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -12537,444 +12141,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -12984,33 +12303,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -13023,417 +12330,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -13443,33 +12492,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -13482,417 +12519,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -13902,33 +12681,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -13941,444 +12708,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -14388,33 +12870,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -14427,417 +12897,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -14847,33 +13059,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -14886,417 +13086,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -15306,33 +13248,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -15345,444 +13275,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -15792,33 +13437,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -15831,417 +13464,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -16251,33 +13626,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -16290,444 +13653,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -16737,33 +13815,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -16776,417 +13842,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -17196,33 +14004,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -17235,417 +14031,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -17655,33 +14193,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -17694,444 +14220,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -18141,33 +14382,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -18180,417 +14409,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -18600,33 +14571,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -18639,444 +14598,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -19086,33 +14760,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -19125,417 +14787,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -19545,33 +14949,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -19584,417 +14976,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -20004,33 +15138,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -20043,444 +15165,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -20490,33 +15327,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -20529,417 +15354,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -20949,33 +15516,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -20988,417 +15543,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -21408,33 +15705,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -21447,444 +15732,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -21894,33 +15894,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -21933,417 +15921,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -22353,33 +16083,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -22392,444 +16110,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -22839,33 +16272,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -22878,417 +16299,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -23298,33 +16461,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -23337,417 +16488,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -23757,33 +16650,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -23796,444 +16677,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -24243,33 +16839,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -24282,417 +16866,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -24702,33 +17028,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -24741,417 +17055,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -25161,33 +17217,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -25200,444 +17244,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -25647,33 +17406,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -25686,417 +17433,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -26106,33 +17595,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -26145,444 +17622,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -26592,33 +17784,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -26631,417 +17811,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -27051,33 +17973,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -27090,417 +18000,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -27510,33 +18162,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -27549,444 +18189,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -27996,33 +18351,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -28035,417 +18378,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -28455,33 +18540,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -28494,444 +18567,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -28941,33 +18729,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -28980,417 +18756,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -29400,33 +18918,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -29439,417 +18945,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -29859,33 +19107,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -29898,93 +19134,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -29994,114 +19296,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %eax -; movq %rbp, %rsp -; popq %rbp -; retq -; -; Disassembled: -; block0: ; offset 0x0 -; pushq %rbp -; movq %rsp, %rbp -; block1: ; offset 0x4 -; testl %edi, %edi -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -30114,39 +19323,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -30156,33 +19485,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -30195,417 +19512,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -30615,33 +19674,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -30654,417 +19701,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -31074,33 +19863,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -31113,444 +19890,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -31560,33 +20052,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -31599,417 +20079,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -32019,33 +20241,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -32058,444 +20268,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -32505,33 +20430,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -32544,417 +20457,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -32964,33 +20619,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -33003,417 +20646,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -33423,33 +20808,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -33462,444 +20835,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -33909,33 +20997,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -33948,174 +21024,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -34125,33 +21186,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -34164,12 +21213,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -34179,33 +21375,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -34218,22 +21402,7933 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi +; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d ; sete %r9b @@ -34245,12 +29340,348 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %cl +; movzbl %cl, %ecx +; testl %ecx, %ecx +; sete %dl +; movzbl %dl, %edx +; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi +; sete %r8b +; movzbl %r8b, %r8d +; testl %r8d, %r8d +; sete %r9b +; movzbl %r9b, %r9d +; testl %r9d, %r9d +; sete %r10b +; movzbl %r10b, %r10d +; testl %r10d, %r10d +; sete %r11b +; movzbl %r11b, %r11d +; testl %r11d, %r11d +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -34260,33 +29691,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -34299,12 +29718,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -34314,6 +29880,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %dil +; movzbl %dil, %edi +; testl %edi, %edi ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -34326,147 +29907,213 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; movq %rbp, %rsp +; popq %rbp +; retq +; +; Disassembled: +; block0: ; offset 0x0 +; pushq %rbp +; movq %rsp, %rbp +; block1: ; offset 0x4 ; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -34476,114 +30123,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -34596,255 +30150,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -34854,114 +30312,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -34974,66 +30339,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -35043,33 +30501,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -35082,174 +30528,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -35259,33 +30690,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -35298,12 +30717,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -35313,33 +30879,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -35352,120 +30906,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -35475,87 +31068,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -35568,12 +31095,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -35583,33 +31257,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -35622,39 +31284,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -35664,114 +31446,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -35784,93 +31473,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -35880,33 +31635,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -35919,147 +31662,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -36069,60 +31824,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -36135,12 +31851,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -36150,33 +32013,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -36189,93 +32040,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -36285,114 +32202,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -36405,12 +32229,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -36420,33 +32391,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -36459,12 +32418,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -36474,114 +32580,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -36594,120 +32607,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -36717,33 +32769,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -36756,120 +32796,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -36879,87 +32958,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -36972,12 +32985,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -36987,33 +33147,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -37026,66 +33174,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -37095,114 +33336,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -37215,66 +33363,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -37284,33 +33525,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -37323,174 +33552,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -37500,33 +33714,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -37539,12 +33741,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -37554,33 +33903,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -37593,93 +33930,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -37689,114 +34092,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -37809,12 +34119,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -37824,33 +34281,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -37863,39 +34308,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -37905,114 +34470,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -38025,93 +34497,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -38121,33 +34659,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -38160,147 +34686,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -38310,60 +34848,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -38376,12 +34875,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -38391,33 +35037,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -38430,66 +35064,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -38499,114 +35226,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -38619,39 +35253,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -38661,33 +35415,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -38700,12 +35442,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -38715,114 +35604,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -38835,120 +35631,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -38958,33 +35793,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -38997,120 +35820,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -39120,87 +35982,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -39213,12 +36009,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -39228,33 +36171,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -39267,39 +36198,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -39309,114 +36360,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -39429,93 +36387,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -39525,33 +36549,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -39564,174 +36576,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -39741,33 +36738,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -39780,12 +36765,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -39795,33 +36927,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -39834,93 +36954,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -39930,114 +37116,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -40050,12 +37143,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -40065,33 +37305,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -40104,12 +37332,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -40119,114 +37494,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -40239,120 +37521,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -40362,33 +37683,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -40401,147 +37710,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -40551,60 +37872,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -40617,12 +37899,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -40632,33 +38061,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -40671,66 +38088,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -40740,114 +38250,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -40860,66 +38277,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -40929,33 +38439,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -40968,174 +38466,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -41145,33 +38628,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -41184,12 +38655,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -41199,33 +38817,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -41238,120 +38844,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -41361,87 +39006,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -41454,12 +39033,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -41469,33 +39195,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -41508,39 +39222,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -41550,114 +39384,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -41670,93 +39411,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -41766,33 +39573,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -41805,147 +39600,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -41955,60 +39762,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -42021,12 +39789,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -42036,33 +39951,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -42075,93 +39978,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -42171,114 +40140,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -42291,12 +40167,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -42306,33 +40329,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -42345,12 +40356,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -42360,114 +40518,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -42480,120 +40545,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -42603,33 +40707,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -42642,120 +40734,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -42765,87 +40896,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -42858,12 +40923,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -42873,33 +41085,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -42912,66 +41112,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -42981,114 +41274,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -43101,66 +41301,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -43170,33 +41463,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -43209,174 +41490,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -43386,33 +41652,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -43425,12 +41679,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -43440,33 +41841,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -43479,93 +41868,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -43575,114 +42030,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -43695,12 +42057,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -43710,33 +42219,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -43749,39 +42246,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -43791,114 +42408,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -43911,93 +42435,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -44007,33 +42597,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -44046,147 +42624,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -44196,60 +42786,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -44262,12 +42813,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -44277,33 +42975,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -44316,66 +43002,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -44385,114 +43164,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -44505,39 +43191,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -44547,33 +43353,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -44586,12 +43380,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -44601,114 +43542,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -44721,120 +43569,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -44844,33 +43731,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -44883,120 +43758,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -45006,87 +43920,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -45099,12 +43947,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -45114,33 +44109,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -45153,39 +44136,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -45195,114 +44298,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -45315,93 +44325,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -45411,33 +44487,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -45450,174 +44514,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -45627,33 +44676,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -45666,12 +44703,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -45681,33 +44865,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -45720,93 +44892,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -45816,114 +45054,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -45936,12 +45081,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -45951,33 +45243,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -45990,12 +45270,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -46005,114 +45432,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -46125,120 +45459,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -46248,33 +45621,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -46287,147 +45648,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -46437,60 +45810,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -46503,12 +45837,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -46518,33 +45999,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -46557,66 +46026,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -46626,114 +46188,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -46746,66 +46215,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -46815,33 +46377,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -46854,174 +46404,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -47031,33 +46566,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -47070,12 +46593,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -47085,33 +46755,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -47124,120 +46782,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -47247,87 +46944,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -47340,12 +46971,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -47355,33 +47133,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -47394,39 +47160,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -47436,114 +47322,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -47556,93 +47349,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -47652,33 +47511,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -47691,147 +47538,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi ; sete %al ; movzbl %al, %eax ; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -47841,114 +47700,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -47961,12 +47727,159 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %edi -; testl %edi, %edi +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax ; sete %al ; movzbl %al, %eax ; testl %eax, %eax @@ -47976,33 +47889,21 @@ block0(v0: i32): ; sete %dl ; movzbl %dl, %edx ; testl %edx, %edx -; sete %r8b -; movzbl %r8b, %r8d -; testl %r8d, %r8d -; sete %r9b -; movzbl %r9b, %r9d -; testl %r9d, %r9d -; sete %r10b -; movzbl %r10b, %r10d -; testl %r10d, %r10d -; sete %r11b -; movzbl %r11b, %r11d -; testl %r11d, %r11d +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi +; sete %sil +; movzbl %sil, %esi +; testl %esi, %esi ; sete %sil ; movzbl %sil, %esi ; testl %esi, %esi ; sete %dil ; movzbl %dil, %edi ; testl %edi, %edi -; sete %al -; movzbl %al, %eax -; testl %eax, %eax -; sete %cl -; movzbl %cl, %ecx -; testl %ecx, %ecx -; sete %dl -; movzbl %dl, %edx -; testl %edx, %edx ; sete %r8b ; movzbl %r8b, %r8d ; testl %r8d, %r8d @@ -48015,11 +47916,110 @@ block0(v0: i32): ; sete %r11b ; movzbl %r11b, %r11d ; testl %r11d, %r11d -; sete %sil -; movzbl %sil, %esi -; testl %esi, %esi -; sete %dil -; movzbl %dil, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax +; testl %eax, %eax +; sete %al +; movzbl %al, %eax ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/immediates.clif b/cranelift/filetests/filetests/isa/x64/immediates.clif index 995528e7be10..ff73a774f753 100644 --- a/cranelift/filetests/filetests/isa/x64/immediates.clif +++ b/cranelift/filetests/filetests/isa/x64/immediates.clif @@ -19,15 +19,15 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movabsq $0xffffeeeeddddcccc, %r10 -; leaq (%rdi, %r10), %r10 -; movq %r10, (%rsi) -; movq %rdi, %r11 -; subq (%rip), %r11 -; movq %r11, (%rsi) -; movq %rdi, %rax -; andq (%rip), %rax -; movq %rax, (%rsi) +; movabsq $0xffffeeeeddddcccc, %r8 +; leaq (%rdi, %r8), %r8 +; movq %r8, (%rsi) +; movq %rdi, %r8 +; subq (%rip), %r8 +; movq %r8, (%rsi) +; movq %rdi, %r9 +; andq (%rip), %r9 +; movq %r9, (%rsi) ; orq (%rip), %rdi ; movq %rdi, (%rsi) ; movq %rbp, %rsp @@ -39,15 +39,15 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $18446725308424768716, %r10 -; addq %rdi, %r10 -; movq %r10, (%rsi) ; trap: heap_oob -; movq %rdi, %r11 -; subq 0x22(%rip), %r11 -; movq %r11, (%rsi) ; trap: heap_oob -; movq %rdi, %rax -; andq 0x15(%rip), %rax -; movq %rax, (%rsi) ; trap: heap_oob +; movabsq $18446725308424768716, %r8 +; addq %rdi, %r8 +; movq %r8, (%rsi) ; trap: heap_oob +; movq %rdi, %r8 +; subq 0x22(%rip), %r8 +; movq %r8, (%rsi) ; trap: heap_oob +; movq %rdi, %r9 +; andq 0x15(%rip), %r9 +; movq %r9, (%rsi) ; trap: heap_oob ; orq 0xb(%rip), %rdi ; movq %rdi, (%rsi) ; trap: heap_oob ; movq %rbp, %rsp diff --git a/cranelift/filetests/filetests/isa/x64/ishl.clif b/cranelift/filetests/filetests/isa/x64/ishl.clif index f9bcd845284a..5d8ce0a929f2 100644 --- a/cranelift/filetests/filetests/isa/x64/ishl.clif +++ b/cranelift/filetests/filetests/isa/x64/ishl.clif @@ -22,9 +22,9 @@ block0(v0: i128, v1: i8): ; movq %rdi, %rdx ; shlq %cl, %rdx ; shlq %cl, %rsi -; movq %rcx, %r10 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %r8 +; movq %rax, %r8 ; subq %r8, %rcx ; shrq %cl, %rdi ; uninit %rax @@ -48,9 +48,9 @@ block0(v0: i128, v1: i8): ; movq %rdi, %rdx ; shlq %cl, %rdx ; shlq %cl, %rsi -; movq %rcx, %r10 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %r8 +; movq %rax, %r8 ; subq %r8, %rcx ; shrq %cl, %rdi ; xorq %rax, %rax @@ -74,14 +74,14 @@ block0(v0: i128, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; movq %rdi, %rdx ; shlq %cl, %rdx ; shlq %cl, %rsi -; movq %rcx, %r10 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %r8 +; movq %rax, %r8 ; subq %r8, %rcx ; shrq %cl, %rdi ; uninit %rax @@ -101,14 +101,14 @@ block0(v0: i128, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; movq %rdi, %rdx ; shlq %cl, %rdx ; shlq %cl, %rsi -; movq %rcx, %r10 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %r8 +; movq %rax, %r8 ; subq %r8, %rcx ; shrq %cl, %rdi ; xorq %rax, %rax @@ -132,14 +132,14 @@ block0(v0: i128, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; movq %rdi, %rdx ; shlq %cl, %rdx ; shlq %cl, %rsi -; movq %rcx, %r10 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %r8 +; movq %rax, %r8 ; subq %r8, %rcx ; shrq %cl, %rdi ; uninit %rax @@ -159,14 +159,14 @@ block0(v0: i128, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; movq %rdi, %rdx ; shlq %cl, %rdx ; shlq %cl, %rsi -; movq %rcx, %r10 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %r8 +; movq %rax, %r8 ; subq %r8, %rcx ; shrq %cl, %rdi ; xorq %rax, %rax @@ -190,14 +190,14 @@ block0(v0: i128, v1: i16): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; movq %rdi, %rdx ; shlq %cl, %rdx ; shlq %cl, %rsi -; movq %rcx, %r10 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %r8 +; movq %rax, %r8 ; subq %r8, %rcx ; shrq %cl, %rdi ; uninit %rax @@ -217,14 +217,14 @@ block0(v0: i128, v1: i16): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; movq %rdi, %rdx ; shlq %cl, %rdx ; shlq %cl, %rsi -; movq %rcx, %r10 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %r8 +; movq %rax, %r8 ; subq %r8, %rcx ; shrq %cl, %rdi ; xorq %rax, %rax @@ -248,14 +248,14 @@ block0(v0: i128, v1: i8): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; movq %rdi, %rdx ; shlq %cl, %rdx ; shlq %cl, %rsi -; movq %rcx, %r10 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %r8 +; movq %rax, %r8 ; subq %r8, %rcx ; shrq %cl, %rdi ; uninit %rax @@ -275,14 +275,14 @@ block0(v0: i128, v1: i8): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; movq %rdi, %rdx ; shlq %cl, %rdx ; shlq %cl, %rsi -; movq %rcx, %r10 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %r8 +; movq %rax, %r8 ; subq %r8, %rcx ; shrq %cl, %rdi ; xorq %rax, %rax diff --git a/cranelift/filetests/filetests/isa/x64/issue-10906.clif b/cranelift/filetests/filetests/isa/x64/issue-10906.clif index aecbf5f4cec9..36333e2320b9 100644 --- a/cranelift/filetests/filetests/isa/x64/issue-10906.clif +++ b/cranelift/filetests/filetests/isa/x64/issue-10906.clif @@ -46,12 +46,12 @@ block0: ; pushq %rbp ; movq %rsp, %rbp ; block0: -; load_ext_name %munge_xmm0+0, %r8 -; call *%r8 -; movl $0x2a, %r8d +; load_ext_name %munge_xmm0+0, %rsi +; call *%rsi +; movl $0x2a, %esi ; uninit %xmm0 ; pxor %xmm0, %xmm0 -; pinsrw $0x0, %r8d, %xmm0 +; pinsrw $0x0, %esi, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -61,11 +61,11 @@ block0: ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0, %r8 ; reloc_external Abs8 %munge_xmm0 0 -; callq *%r8 -; movl $0x2a, %r8d +; movabsq $0, %rsi ; reloc_external Abs8 %munge_xmm0 0 +; callq *%rsi +; movl $0x2a, %esi ; pxor %xmm0, %xmm0 -; pinsrw $0, %r8d, %xmm0 +; pinsrw $0, %esi, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/leaf_function_detection.clif b/cranelift/filetests/filetests/isa/x64/leaf_function_detection.clif index 52851abcd9be..df52810e2de7 100644 --- a/cranelift/filetests/filetests/isa/x64/leaf_function_detection.clif +++ b/cranelift/filetests/filetests/isa/x64/leaf_function_detection.clif @@ -203,10 +203,10 @@ block0(v0: i32): ; movq %rsp, %rbp ; block0: ; imull $0x2a, %edi, %eax -; movl $0x7, %r11d +; movl $0x7, %r8d ; uninit %rdx ; xorq %rdx, %rdx -; divl %r11d ;; implicit: %eax, %edx, trap=254 +; divl %r8d ;; implicit: %eax, %edx, trap=254 ; andl $0x3, %eax ; movq %rbp, %rsp ; popq %rbp @@ -218,10 +218,11 @@ block0(v0: i32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; imull $0x2a, %edi, %eax -; movl $7, %r11d +; movl $7, %r8d ; xorq %rdx, %rdx -; divl %r11d ; trap: int_divz +; divl %r8d ; trap: int_divz ; andl $3, %eax ; movq %rbp, %rsp ; popq %rbp ; retq + diff --git a/cranelift/filetests/filetests/isa/x64/load-op.clif b/cranelift/filetests/filetests/isa/x64/load-op.clif index b6a296104826..a0fb8aa25745 100644 --- a/cranelift/filetests/filetests/isa/x64/load-op.clif +++ b/cranelift/filetests/filetests/isa/x64/load-op.clif @@ -154,10 +154,11 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq (%rdi), %r8 -; leaq (%r8, %rdi), %r9 -; movq %r9, (%rsi) -; movq (%r8, %rdi), %rax +; movq %rsi, %r9 +; movq (%rdi), %rsi +; leaq (%rsi, %rdi), %r8 +; movq %r8, (%r9) +; movq (%rsi, %rdi), %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -167,10 +168,11 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq (%rdi), %r8 ; trap: heap_oob -; leaq (%r8, %rdi), %r9 -; movq %r9, (%rsi) ; trap: heap_oob -; movq (%r8, %rdi), %rax ; trap: heap_oob +; movq %rsi, %r9 +; movq (%rdi), %rsi ; trap: heap_oob +; leaq (%rsi, %rdi), %r8 +; movq %r8, (%r9) ; trap: heap_oob +; movq (%rsi, %rdi), %rax ; trap: heap_oob ; movq %rbp, %rsp ; popq %rbp ; retq @@ -220,8 +222,8 @@ block0(v0: i64): ; movq %rsp, %rbp ; block0: ; cmpq (%rdi), %rdi -; sete %dl -; movzbq %dl, %rax +; sete %sil +; movzbq %sil, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -232,8 +234,8 @@ block0(v0: i64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; cmpq (%rdi), %rdi ; trap: heap_oob -; sete %dl -; movzbq %dl, %rax +; sete %sil +; movzbq %sil, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -249,8 +251,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq (%rdi), %rcx -; testq %rcx, %rcx +; movq (%rdi), %rsi +; testq %rsi, %rsi ; sete %al ; movq %rbp, %rsp ; popq %rbp @@ -261,8 +263,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq (%rdi), %rcx ; trap: heap_oob -; testq %rcx, %rcx +; movq (%rdi), %rsi ; trap: heap_oob +; testq %rsi, %rsi ; sete %al ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/mul.clif b/cranelift/filetests/filetests/isa/x64/mul.clif index e32a6d9c0d7b..ecf6aa3e72a5 100644 --- a/cranelift/filetests/filetests/isa/x64/mul.clif +++ b/cranelift/filetests/filetests/isa/x64/mul.clif @@ -495,8 +495,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movzwq (%rdi), %rcx -; imulw $0x3fd, %cx, %ax +; movzwq (%rdi), %rsi +; imulw $0x3fd, %si, %ax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -506,8 +506,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movzwq (%rdi), %rcx ; trap: heap_oob -; imulw $0x3fd, %cx, %ax +; movzwq (%rdi), %rsi ; trap: heap_oob +; imulw $0x3fd, %si, %ax ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/nan-canonicalization-sse41.clif b/cranelift/filetests/filetests/isa/x64/nan-canonicalization-sse41.clif index 50187249f677..830008b45e99 100644 --- a/cranelift/filetests/filetests/isa/x64/nan-canonicalization-sse41.clif +++ b/cranelift/filetests/filetests/isa/x64/nan-canonicalization-sse41.clif @@ -13,8 +13,8 @@ block0(v0: f32x4, v1: f32x4): ; movq %rsp, %rbp ; block0: ; addps %xmm1, %xmm0 -; movl $0x7fc00000, %r10d -; movd %r10d, %xmm7 +; movl $0x7fc00000, %edi +; movd %edi, %xmm7 ; shufps $0x0, (%rip), %xmm7 ; movdqa %xmm0, %xmm1 ; cmpunordps %xmm0, %xmm1 @@ -33,9 +33,9 @@ block0(v0: f32x4, v1: f32x4): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; addps %xmm1, %xmm0 -; movl $0x7fc00000, %r10d -; movd %r10d, %xmm7 -; shufps $0, 0x26(%rip), %xmm7 +; movl $0x7fc00000, %edi +; movd %edi, %xmm7 +; shufps $0, 0x28(%rip), %xmm7 ; movdqa %xmm0, %xmm1 ; cmpunordps %xmm0, %xmm1 ; movdqa %xmm0, %xmm2 @@ -49,6 +49,7 @@ block0(v0: f32x4, v1: f32x4): ; addb %al, (%rax) ; addb %al, (%rax) ; addb %al, (%rax) +; addb %al, (%rax) ; sarb $0, (%rdi) ; addb %al, (%rax) ; addb %al, (%rax) @@ -67,22 +68,22 @@ block0(v0: f64, v1: f64): ; movq %rsp, %rbp ; block0: ; addsd %xmm1, %xmm0 -; movdqa %xmm0, %xmm7 -; movabsq $0x7ff8000000000000, %rcx -; movq %rcx, %xmm6 -; uninit %xmm5 -; xorpd %xmm5, %xmm5 -; movsd %xmm6, %xmm5 +; movdqa %xmm0, %xmm2 +; movabsq $0x7ff8000000000000, %rax +; movq %rax, %xmm0 +; uninit %xmm1 +; xorpd %xmm1, %xmm1 +; movsd %xmm0, %xmm1 ; uninit %xmm0 ; xorpd %xmm0, %xmm0 -; movdqa %xmm7, %xmm6 -; movsd %xmm6, %xmm0 -; movdqa %xmm0, %xmm6 -; cmpunordpd %xmm0, %xmm6 +; movsd %xmm2, %xmm0 +; movdqa %xmm0, %xmm2 +; cmpunordpd %xmm0, %xmm2 ; movdqa %xmm0, %xmm3 -; movdqa %xmm6, %xmm0 -; pblendvb %xmm0, %xmm5, %xmm3 -; movdqa %xmm3, %xmm0 +; movdqa %xmm2, %xmm0 +; movdqa %xmm3, %xmm2 +; pblendvb %xmm0, %xmm1, %xmm2 +; movdqa %xmm2, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -93,20 +94,20 @@ block0(v0: f64, v1: f64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; addsd %xmm1, %xmm0 -; movdqa %xmm0, %xmm7 -; movabsq $0x7ff8000000000000, %rcx -; movq %rcx, %xmm6 -; xorpd %xmm5, %xmm5 -; movsd %xmm6, %xmm5 +; movdqa %xmm0, %xmm2 +; movabsq $0x7ff8000000000000, %rax +; movq %rax, %xmm0 +; xorpd %xmm1, %xmm1 +; movsd %xmm0, %xmm1 ; xorpd %xmm0, %xmm0 -; movdqa %xmm7, %xmm6 -; movsd %xmm6, %xmm0 -; movdqa %xmm0, %xmm6 -; cmpunordpd %xmm0, %xmm6 +; movsd %xmm2, %xmm0 +; movdqa %xmm0, %xmm2 +; cmpunordpd %xmm0, %xmm2 ; movdqa %xmm0, %xmm3 -; movdqa %xmm6, %xmm0 -; pblendvb %xmm0, %xmm5, %xmm3 -; movdqa %xmm3, %xmm0 +; movdqa %xmm2, %xmm0 +; movdqa %xmm3, %xmm2 +; pblendvb %xmm0, %xmm1, %xmm2 +; movdqa %xmm2, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -122,22 +123,22 @@ block0(v0: f32, v1: f32): ; movq %rsp, %rbp ; block0: ; addss %xmm1, %xmm0 -; movdqa %xmm0, %xmm7 -; movl $0x7fc00000, %ecx -; movd %ecx, %xmm6 -; uninit %xmm5 -; xorps %xmm5, %xmm5 -; movss %xmm6, %xmm5 +; movdqa %xmm0, %xmm2 +; movl $0x7fc00000, %eax +; movd %eax, %xmm0 +; uninit %xmm1 +; xorps %xmm1, %xmm1 +; movss %xmm0, %xmm1 ; uninit %xmm0 ; xorps %xmm0, %xmm0 -; movdqa %xmm7, %xmm6 -; movss %xmm6, %xmm0 -; movdqa %xmm0, %xmm6 -; cmpunordps %xmm0, %xmm6 +; movss %xmm2, %xmm0 +; movdqa %xmm0, %xmm2 +; cmpunordps %xmm0, %xmm2 ; movdqa %xmm0, %xmm3 -; movdqa %xmm6, %xmm0 -; pblendvb %xmm0, %xmm5, %xmm3 -; movdqa %xmm3, %xmm0 +; movdqa %xmm2, %xmm0 +; movdqa %xmm3, %xmm2 +; pblendvb %xmm0, %xmm1, %xmm2 +; movdqa %xmm2, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -148,20 +149,20 @@ block0(v0: f32, v1: f32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; addss %xmm1, %xmm0 -; movdqa %xmm0, %xmm7 -; movl $0x7fc00000, %ecx -; movd %ecx, %xmm6 -; xorps %xmm5, %xmm5 -; movss %xmm6, %xmm5 +; movdqa %xmm0, %xmm2 +; movl $0x7fc00000, %eax +; movd %eax, %xmm0 +; xorps %xmm1, %xmm1 +; movss %xmm0, %xmm1 ; xorps %xmm0, %xmm0 -; movdqa %xmm7, %xmm6 -; movss %xmm6, %xmm0 -; movdqa %xmm0, %xmm6 -; cmpunordps %xmm0, %xmm6 +; movss %xmm2, %xmm0 +; movdqa %xmm0, %xmm2 +; cmpunordps %xmm0, %xmm2 ; movdqa %xmm0, %xmm3 -; movdqa %xmm6, %xmm0 -; pblendvb %xmm0, %xmm5, %xmm3 -; movdqa %xmm3, %xmm0 +; movdqa %xmm2, %xmm0 +; movdqa %xmm3, %xmm2 +; pblendvb %xmm0, %xmm1, %xmm2 +; movdqa %xmm2, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/nan-canonicalization.clif b/cranelift/filetests/filetests/isa/x64/nan-canonicalization.clif index 90f85b2e9ac2..a9f1cd3735d9 100644 --- a/cranelift/filetests/filetests/isa/x64/nan-canonicalization.clif +++ b/cranelift/filetests/filetests/isa/x64/nan-canonicalization.clif @@ -14,8 +14,8 @@ block0(v0: f32x4, v1: f32x4): ; block0: ; addps %xmm1, %xmm0 ; movdqa %xmm0, %xmm1 -; movl $0x7fc00000, %esi -; movd %esi, %xmm5 +; movl $0x7fc00000, %r9d +; movd %r9d, %xmm5 ; shufps $0x0, (%rip), %xmm5 ; cmpunordps %xmm1, %xmm0 ; andps %xmm0, %xmm5 @@ -32,9 +32,9 @@ block0(v0: f32x4, v1: f32x4): ; block1: ; offset 0x4 ; addps %xmm1, %xmm0 ; movdqa %xmm0, %xmm1 -; movl $0x7fc00000, %esi -; movd %esi, %xmm5 -; shufps $0, 0x14(%rip), %xmm5 +; movl $0x7fc00000, %r9d +; movd %r9d, %xmm5 +; shufps $0, 0x12(%rip), %xmm5 ; cmpunordps %xmm1, %xmm0 ; andps %xmm0, %xmm5 ; andnps %xmm1, %xmm0 @@ -43,7 +43,6 @@ block0(v0: f32x4, v1: f32x4): ; popq %rbp ; retq ; addb %al, (%rax) -; addb %al, (%rax) ; sarb $0, (%rdi) ; addb %al, (%rax) ; addb %al, (%rax) @@ -62,18 +61,18 @@ block0(v0: f64, v1: f64): ; movq %rsp, %rbp ; block0: ; addsd %xmm1, %xmm0 -; movabsq $0x7ff8000000000000, %r8 -; movq %r8, %xmm1 +; movabsq $0x7ff8000000000000, %rax +; movq %rax, %xmm1 ; uninit %xmm6 ; xorpd %xmm6, %xmm6 ; movsd %xmm1, %xmm6 -; uninit %xmm7 -; xorpd %xmm7, %xmm7 -; movsd %xmm0, %xmm7 -; movdqa %xmm7, %xmm0 -; cmpunordpd %xmm7, %xmm0 +; uninit %xmm1 +; xorpd %xmm1, %xmm1 +; movsd %xmm0, %xmm1 +; movdqa %xmm1, %xmm0 +; cmpunordpd %xmm1, %xmm0 ; andpd %xmm0, %xmm6 -; andnpd %xmm7, %xmm0 +; andnpd %xmm1, %xmm0 ; orpd %xmm6, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -85,16 +84,16 @@ block0(v0: f64, v1: f64): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; addsd %xmm1, %xmm0 -; movabsq $0x7ff8000000000000, %r8 -; movq %r8, %xmm1 +; movabsq $0x7ff8000000000000, %rax +; movq %rax, %xmm1 ; xorpd %xmm6, %xmm6 ; movsd %xmm1, %xmm6 -; xorpd %xmm7, %xmm7 -; movsd %xmm0, %xmm7 -; movdqa %xmm7, %xmm0 -; cmpunordpd %xmm7, %xmm0 +; xorpd %xmm1, %xmm1 +; movsd %xmm0, %xmm1 +; movdqa %xmm1, %xmm0 +; cmpunordpd %xmm1, %xmm0 ; andpd %xmm0, %xmm6 -; andnpd %xmm7, %xmm0 +; andnpd %xmm1, %xmm0 ; orpd %xmm6, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -111,18 +110,18 @@ block0(v0: f32, v1: f32): ; movq %rsp, %rbp ; block0: ; addss %xmm1, %xmm0 -; movl $0x7fc00000, %r8d -; movd %r8d, %xmm1 +; movl $0x7fc00000, %eax +; movd %eax, %xmm1 ; uninit %xmm6 ; xorps %xmm6, %xmm6 ; movss %xmm1, %xmm6 -; uninit %xmm7 -; xorps %xmm7, %xmm7 -; movss %xmm0, %xmm7 -; movdqa %xmm7, %xmm0 -; cmpunordps %xmm7, %xmm0 +; uninit %xmm1 +; xorps %xmm1, %xmm1 +; movss %xmm0, %xmm1 +; movdqa %xmm1, %xmm0 +; cmpunordps %xmm1, %xmm0 ; andps %xmm0, %xmm6 -; andnps %xmm7, %xmm0 +; andnps %xmm1, %xmm0 ; orps %xmm6, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -134,16 +133,16 @@ block0(v0: f32, v1: f32): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; addss %xmm1, %xmm0 -; movl $0x7fc00000, %r8d -; movd %r8d, %xmm1 +; movl $0x7fc00000, %eax +; movd %eax, %xmm1 ; xorps %xmm6, %xmm6 ; movss %xmm1, %xmm6 -; xorps %xmm7, %xmm7 -; movss %xmm0, %xmm7 -; movdqa %xmm7, %xmm0 -; cmpunordps %xmm7, %xmm0 +; xorps %xmm1, %xmm1 +; movss %xmm0, %xmm1 +; movdqa %xmm1, %xmm0 +; cmpunordps %xmm1, %xmm0 ; andps %xmm0, %xmm6 -; andnps %xmm7, %xmm0 +; andnps %xmm1, %xmm0 ; orps %xmm6, %xmm0 ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/nearest-libcall.clif b/cranelift/filetests/filetests/isa/x64/nearest-libcall.clif index b6f2a2041d35..86f4e95896ca 100644 --- a/cranelift/filetests/filetests/isa/x64/nearest-libcall.clif +++ b/cranelift/filetests/filetests/isa/x64/nearest-libcall.clif @@ -11,8 +11,8 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; load_ext_name %NearestF32+0, %rcx -; call *%rcx +; load_ext_name %NearestF32+0, %rsi +; call *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -22,8 +22,8 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0, %rcx ; reloc_external Abs8 %NearestF32 0 -; callq *%rcx +; movabsq $0, %rsi ; reloc_external Abs8 %NearestF32 0 +; callq *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -38,8 +38,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; load_ext_name %NearestF64+0, %rcx -; call *%rcx +; load_ext_name %NearestF64+0, %rsi +; call *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -49,8 +49,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0, %rcx ; reloc_external Abs8 %NearestF64 0 -; callq *%rcx +; movabsq $0, %rsi ; reloc_external Abs8 %NearestF64 0 +; callq *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/pinned-reg.clif b/cranelift/filetests/filetests/isa/x64/pinned-reg.clif index d6da9cf0dd7c..97899e0db831 100644 --- a/cranelift/filetests/filetests/isa/x64/pinned-reg.clif +++ b/cranelift/filetests/filetests/isa/x64/pinned-reg.clif @@ -14,9 +14,9 @@ block0: ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %r15, %rdi -; leaq 1(%rdi), %rdi -; movq %rdi, %r15 +; movq %r15, %rcx +; leaq 1(%rcx), %rcx +; movq %rcx, %r15 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -26,9 +26,9 @@ block0: ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %r15, %rdi -; addq $1, %rdi -; movq %rdi, %r15 +; movq %r15, %rcx +; addq $1, %rcx +; movq %rcx, %r15 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -44,14 +44,10 @@ block0: ; VCode: ; pushq %rbp ; movq %rsp, %rbp -; subq $0x10, %rsp -; movq %rdi, (%rsp) ; block0: -; movq %r15, %rdi -; leaq 1(%rdi), %rdi -; movq %rdi, %r15 -; movq (%rsp), %rdi -; addq $0x10, %rsp +; movq %r15, %rcx +; leaq 1(%rcx), %rcx +; movq %rcx, %r15 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -60,14 +56,10 @@ block0: ; block0: ; offset 0x0 ; pushq %rbp ; movq %rsp, %rbp -; subq $0x10, %rsp -; movq %rdi, (%rsp) -; block1: ; offset 0xc -; movq %r15, %rdi -; addq $1, %rdi -; movq %rdi, %r15 -; movq (%rsp), %rdi -; addq $0x10, %rsp +; block1: ; offset 0x4 +; movq %r15, %rcx +; addq $1, %rcx +; movq %rcx, %r15 ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/popcnt.clif b/cranelift/filetests/filetests/isa/x64/popcnt.clif index 69f4469fccda..6d5a78c95e21 100644 --- a/cranelift/filetests/filetests/isa/x64/popcnt.clif +++ b/cranelift/filetests/filetests/isa/x64/popcnt.clif @@ -11,22 +11,22 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %rax -; shrq $0x1, %rax -; movabsq $0x7777777777777777, %rdx -; andq %rdx, %rax -; subq %rax, %rdi -; shrq $0x1, %rax -; andq %rdx, %rax -; subq %rax, %rdi -; shrq $0x1, %rax -; andq %rdx, %rax -; subq %rax, %rdi +; movq %rdi, %r10 +; shrq $0x1, %r10 +; movabsq $0x7777777777777777, %rsi +; andq %rsi, %r10 +; subq %r10, %rdi +; shrq $0x1, %r10 +; andq %rsi, %r10 +; subq %r10, %rdi +; shrq $0x1, %r10 +; andq %rsi, %r10 +; subq %r10, %rdi ; movq %rdi, %rax ; shrq $0x4, %rax ; addq %rdi, %rax -; movabsq $0xf0f0f0f0f0f0f0f, %r11 -; andq %r11, %rax +; movabsq $0xf0f0f0f0f0f0f0f, %rcx +; andq %rcx, %rax ; movabsq $0x101010101010101, %rcx ; imulq %rcx, %rax ; shrq $0x38, %rax @@ -39,22 +39,22 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %rax -; shrq $1, %rax -; movabsq $0x7777777777777777, %rdx -; andq %rdx, %rax -; subq %rax, %rdi -; shrq $1, %rax -; andq %rdx, %rax -; subq %rax, %rdi -; shrq $1, %rax -; andq %rdx, %rax -; subq %rax, %rdi +; movq %rdi, %r10 +; shrq $1, %r10 +; movabsq $0x7777777777777777, %rsi +; andq %rsi, %r10 +; subq %r10, %rdi +; shrq $1, %r10 +; andq %rsi, %r10 +; subq %r10, %rdi +; shrq $1, %r10 +; andq %rsi, %r10 +; subq %r10, %rdi ; movq %rdi, %rax ; shrq $4, %rax ; addq %rdi, %rax -; movabsq $0xf0f0f0f0f0f0f0f, %r11 -; andq %r11, %rax +; movabsq $0xf0f0f0f0f0f0f0f, %rcx +; andq %rcx, %rax ; movabsq $0x101010101010101, %rcx ; imulq %rcx, %rax ; shrq $0x38, %rax @@ -73,25 +73,25 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq (%rdi), %rdx -; movq %rdx, %rcx -; shrq $0x1, %rcx -; movabsq $0x7777777777777777, %r8 -; andq %r8, %rcx -; subq %rcx, %rdx -; shrq $0x1, %rcx -; andq %r8, %rcx -; subq %rcx, %rdx -; shrq $0x1, %rcx -; andq %r8, %rcx -; subq %rcx, %rdx -; movq %rdx, %rax -; shrq $0x4, %rax -; addq %rdx, %rax -; movabsq $0xf0f0f0f0f0f0f0f, %rsi +; movq (%rdi), %r11 +; movq %r11, %rax +; shrq $0x1, %rax +; movabsq $0x7777777777777777, %rsi ; andq %rsi, %rax -; movabsq $0x101010101010101, %rdx -; imulq %rdx, %rax +; subq %rax, %r11 +; shrq $0x1, %rax +; andq %rsi, %rax +; subq %rax, %r11 +; shrq $0x1, %rax +; andq %rsi, %rax +; subq %rax, %r11 +; movq %r11, %rax +; shrq $0x4, %rax +; addq %r11, %rax +; movabsq $0xf0f0f0f0f0f0f0f, %rcx +; andq %rcx, %rax +; movabsq $0x101010101010101, %rcx +; imulq %rcx, %rax ; shrq $0x38, %rax ; movq %rbp, %rsp ; popq %rbp @@ -102,25 +102,25 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq (%rdi), %rdx ; trap: heap_oob -; movq %rdx, %rcx -; shrq $1, %rcx -; movabsq $0x7777777777777777, %r8 -; andq %r8, %rcx -; subq %rcx, %rdx -; shrq $1, %rcx -; andq %r8, %rcx -; subq %rcx, %rdx -; shrq $1, %rcx -; andq %r8, %rcx -; subq %rcx, %rdx -; movq %rdx, %rax -; shrq $4, %rax -; addq %rdx, %rax -; movabsq $0xf0f0f0f0f0f0f0f, %rsi +; movq (%rdi), %r11 ; trap: heap_oob +; movq %r11, %rax +; shrq $1, %rax +; movabsq $0x7777777777777777, %rsi ; andq %rsi, %rax -; movabsq $0x101010101010101, %rdx -; imulq %rdx, %rax +; subq %rax, %r11 +; shrq $1, %rax +; andq %rsi, %rax +; subq %rax, %r11 +; shrq $1, %rax +; andq %rsi, %rax +; subq %rax, %r11 +; movq %r11, %rax +; shrq $4, %rax +; addq %r11, %rax +; movabsq $0xf0f0f0f0f0f0f0f, %rcx +; andq %rcx, %rax +; movabsq $0x101010101010101, %rcx +; imulq %rcx, %rax ; shrq $0x38, %rax ; movq %rbp, %rsp ; popq %rbp @@ -136,22 +136,22 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdi, %r10 +; shrl $0x1, %r10d +; movl $0x77777777, %esi +; andl %esi, %r10d +; subl %r10d, %edi +; shrl $0x1, %r10d +; andl %esi, %r10d +; subl %r10d, %edi +; shrl $0x1, %r10d +; andl %esi, %r10d +; subl %r10d, %edi ; movq %rdi, %rax -; shrl $0x1, %eax -; movl $0x77777777, %edx -; andl %edx, %eax -; subl %eax, %edi -; shrl $0x1, %eax -; andl %edx, %eax -; subl %eax, %edi -; shrl $0x1, %eax -; andl %edx, %eax -; subl %eax, %edi -; movq %rdi, %r9 -; shrl $0x4, %r9d -; addl %edi, %r9d -; andl $0xf0f0f0f, %r9d -; imull $0x1010101, %r9d, %eax +; shrl $0x4, %eax +; addl %edi, %eax +; andl $0xf0f0f0f, %eax +; imull $0x1010101, %eax, %eax ; shrl $0x18, %eax ; movq %rbp, %rsp ; popq %rbp @@ -162,22 +162,22 @@ block0(v0: i32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdi, %r10 +; shrl $1, %r10d +; movl $0x77777777, %esi +; andl %esi, %r10d +; subl %r10d, %edi +; shrl $1, %r10d +; andl %esi, %r10d +; subl %r10d, %edi +; shrl $1, %r10d +; andl %esi, %r10d +; subl %r10d, %edi ; movq %rdi, %rax -; shrl $1, %eax -; movl $0x77777777, %edx -; andl %edx, %eax -; subl %eax, %edi -; shrl $1, %eax -; andl %edx, %eax -; subl %eax, %edi -; shrl $1, %eax -; andl %edx, %eax -; subl %eax, %edi -; movq %rdi, %r9 -; shrl $4, %r9d -; addl %edi, %r9d -; andl $0xf0f0f0f, %r9d -; imull $0x1010101, %r9d, %eax +; shrl $4, %eax +; addl %edi, %eax +; andl $0xf0f0f0f, %eax +; imull $0x1010101, %eax, %eax ; shrl $0x18, %eax ; movq %rbp, %rsp ; popq %rbp @@ -194,23 +194,23 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl (%rdi), %eax -; movq %rax, %rcx -; shrl $0x1, %ecx -; movl $0x77777777, %r8d -; andl %r8d, %ecx -; subl %ecx, %eax -; shrl $0x1, %ecx -; andl %r8d, %ecx -; subl %ecx, %eax -; shrl $0x1, %ecx -; andl %r8d, %ecx -; subl %ecx, %eax -; movq %rax, %r10 -; shrl $0x4, %r10d -; addl %eax, %r10d -; andl $0xf0f0f0f, %r10d -; imull $0x1010101, %r10d, %eax +; movl (%rdi), %r11d +; movq %r11, %rax +; shrl $0x1, %eax +; movl $0x77777777, %esi +; andl %esi, %eax +; subl %eax, %r11d +; shrl $0x1, %eax +; andl %esi, %eax +; subl %eax, %r11d +; shrl $0x1, %eax +; andl %esi, %eax +; subl %eax, %r11d +; movq %r11, %rax +; shrl $0x4, %eax +; addl %r11d, %eax +; andl $0xf0f0f0f, %eax +; imull $0x1010101, %eax, %eax ; shrl $0x18, %eax ; movq %rbp, %rsp ; popq %rbp @@ -221,23 +221,23 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl (%rdi), %eax ; trap: heap_oob -; movq %rax, %rcx -; shrl $1, %ecx -; movl $0x77777777, %r8d -; andl %r8d, %ecx -; subl %ecx, %eax -; shrl $1, %ecx -; andl %r8d, %ecx -; subl %ecx, %eax -; shrl $1, %ecx -; andl %r8d, %ecx -; subl %ecx, %eax -; movq %rax, %r10 -; shrl $4, %r10d -; addl %eax, %r10d -; andl $0xf0f0f0f, %r10d -; imull $0x1010101, %r10d, %eax +; movl (%rdi), %r11d ; trap: heap_oob +; movq %r11, %rax +; shrl $1, %eax +; movl $0x77777777, %esi +; andl %esi, %eax +; subl %eax, %r11d +; shrl $1, %eax +; andl %esi, %eax +; subl %eax, %r11d +; shrl $1, %eax +; andl %esi, %eax +; subl %eax, %r11d +; movq %r11, %rax +; shrl $4, %eax +; addl %r11d, %eax +; andl $0xf0f0f0f, %eax +; imull $0x1010101, %eax, %eax ; shrl $0x18, %eax ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/preserve-all.clif b/cranelift/filetests/filetests/isa/x64/preserve-all.clif index a38e8f046f43..3cfbb6c85be3 100644 --- a/cranelift/filetests/filetests/isa/x64/preserve-all.clif +++ b/cranelift/filetests/filetests/isa/x64/preserve-all.clif @@ -73,8 +73,8 @@ block0(v0: i64): ; movdqu %xmm14, 0x130(%rsp) ; movdqu %xmm15, 0x140(%rsp) ; block0: -; load_ext_name %libcall+0, %rax -; call *%rax +; load_ext_name %libcall+0, %rdx +; call *%rdx ; movq (%rsp), %rax ; movq 8(%rsp), %rcx ; movq 0x10(%rsp), %rdx @@ -136,8 +136,8 @@ block0(v0: i64): ; movdqu %xmm14, 0x130(%rsp) ; movdqu %xmm15, 0x140(%rsp) ; block1: ; offset 0xc6 -; movabsq $0, %rax ; reloc_external Abs8 %libcall 0 -; callq *%rax +; movabsq $0, %rdx ; reloc_external Abs8 %libcall 0 +; callq *%rdx ; movq (%rsp), %rax ; movq 8(%rsp), %rcx ; movq 0x10(%rsp), %rdx diff --git a/cranelift/filetests/filetests/isa/x64/return-call-indirect.clif b/cranelift/filetests/filetests/isa/x64/return-call-indirect.clif index ca226e7fe055..ed4014449136 100644 --- a/cranelift/filetests/filetests/isa/x64/return-call-indirect.clif +++ b/cranelift/filetests/filetests/isa/x64/return-call-indirect.clif @@ -272,62 +272,62 @@ block0: ; movq %r9, +0x48(%rsp) ; movl $0x23, %r9d ; movq %r9, +0x40(%rsp) -; movl $0x28, %eax +; movl $0x28, %ecx +; movq %rcx, +0x38(%rsp) ; movl $0x2d, %r10d ; movl $0x32, %r11d -; movl $0x37, %r13d -; movl $0x3c, %r14d -; movl $0x41, %r15d -; movl $0x46, %ebx -; movl $0x4b, %r12d +; movl $0x37, %ebx +; movl $0x3c, %r12d +; movl $0x41, %r13d +; movl $0x46, %r14d +; movl $0x4b, %r15d ; movl $0x50, %edi ; movl $0x55, %esi -; movq %rsi, +0x38(%rsp) ; movl $0x5a, %edx ; movl $0x5f, %ecx ; movl $0x64, %r8d ; movl $0x69, %r9d -; movl $0x6e, %esi -; movq %rsi, +0x30(%rsp) -; movl $0x73, %esi -; movq %rsi, +0x28(%rsp) -; movl $0x78, %esi -; movq %rsi, +0x20(%rsp) -; movl $0x7d, %esi -; movq %rsi, +0x18(%rsp) -; movl $0x82, %esi -; movq %rsi, +0x10(%rsp) -; movl $0x87, %esi -; movq %rsi, +8(%rsp) -; load_ext_name %tail_callee_stack_args+0, %rsi -; movq %rsi, +(%rsp) +; movl $0x6e, %eax +; movq %rax, +0x30(%rsp) +; movl $0x73, %eax +; movq %rax, +0x28(%rsp) +; movl $0x78, %eax +; movq %rax, +0x20(%rsp) +; movl $0x7d, %eax +; movq %rax, +0x18(%rsp) +; movl $0x82, %eax +; movq %rax, +0x10(%rsp) +; movl $0x87, %eax +; movq %rax, +8(%rsp) +; load_ext_name %tail_callee_stack_args+0, %rax +; movq %rax, +(%rsp) +; movq +0x38(%rsp), %rax ; movq %rax, +-0xa0(%rbp) ; movq %r10, +-0x98(%rbp) ; movq %r11, +-0x90(%rbp) -; movq %r13, +-0x88(%rbp) -; movq %r14, +-0x80(%rbp) -; movq %r15, +-0x78(%rbp) -; movq %rbx, +-0x70(%rbp) -; movq %r12, +-0x68(%rbp) +; movq %rbx, +-0x88(%rbp) +; movq %r12, +-0x80(%rbp) +; movq %r13, +-0x78(%rbp) +; movq %r14, +-0x70(%rbp) +; movq %r15, +-0x68(%rbp) ; movq %rdi, +-0x60(%rbp) -; movq +0x38(%rsp), %rdi -; movq %rdi, +-0x58(%rbp) +; movq %rsi, +-0x58(%rbp) ; movq %rdx, +-0x50(%rbp) ; movq %rcx, +-0x48(%rbp) ; movq %r8, +-0x40(%rbp) ; movq %r9, +-0x38(%rbp) -; movq +0x30(%rsp), %rsi -; movq %rsi, +-0x30(%rbp) -; movq +0x28(%rsp), %rsi -; movq %rsi, +-0x28(%rbp) -; movq +0x20(%rsp), %rsi -; movq %rsi, +-0x20(%rbp) -; movq +0x18(%rsp), %rsi -; movq %rsi, +-0x18(%rbp) -; movq +0x10(%rsp), %rsi -; movq %rsi, +-0x10(%rbp) -; movq +8(%rsp), %rsi -; movq %rsi, +-8(%rbp) +; movq +0x30(%rsp), %rax +; movq %rax, +-0x30(%rbp) +; movq +0x28(%rsp), %rax +; movq %rax, +-0x28(%rbp) +; movq +0x20(%rsp), %rax +; movq %rax, +-0x20(%rbp) +; movq +0x18(%rsp), %rax +; movq %rax, +-0x18(%rbp) +; movq +0x10(%rsp), %rax +; movq %rax, +-0x10(%rbp) +; movq +8(%rsp), %rax +; movq %rax, +-8(%rbp) ; movq +0x50(%rsp), %rcx ; movq +0x58(%rsp), %rdx ; movq +0x60(%rsp), %rsi @@ -366,62 +366,62 @@ block0: ; movq %r9, 0x48(%rsp) ; movl $0x23, %r9d ; movq %r9, 0x40(%rsp) -; movl $0x28, %eax +; movl $0x28, %ecx +; movq %rcx, 0x38(%rsp) ; movl $0x2d, %r10d ; movl $0x32, %r11d -; movl $0x37, %r13d -; movl $0x3c, %r14d -; movl $0x41, %r15d -; movl $0x46, %ebx -; movl $0x4b, %r12d +; movl $0x37, %ebx +; movl $0x3c, %r12d +; movl $0x41, %r13d +; movl $0x46, %r14d +; movl $0x4b, %r15d ; movl $0x50, %edi ; movl $0x55, %esi -; movq %rsi, 0x38(%rsp) ; movl $0x5a, %edx ; movl $0x5f, %ecx ; movl $0x64, %r8d ; movl $0x69, %r9d -; movl $0x6e, %esi -; movq %rsi, 0x30(%rsp) -; movl $0x73, %esi -; movq %rsi, 0x28(%rsp) -; movl $0x78, %esi -; movq %rsi, 0x20(%rsp) -; movl $0x7d, %esi -; movq %rsi, 0x18(%rsp) -; movl $0x82, %esi -; movq %rsi, 0x10(%rsp) -; movl $0x87, %esi -; movq %rsi, 8(%rsp) -; movabsq $0, %rsi ; reloc_external Abs8 %tail_callee_stack_args 0 -; movq %rsi, (%rsp) +; movl $0x6e, %eax +; movq %rax, 0x30(%rsp) +; movl $0x73, %eax +; movq %rax, 0x28(%rsp) +; movl $0x78, %eax +; movq %rax, 0x20(%rsp) +; movl $0x7d, %eax +; movq %rax, 0x18(%rsp) +; movl $0x82, %eax +; movq %rax, 0x10(%rsp) +; movl $0x87, %eax +; movq %rax, 8(%rsp) +; movabsq $0, %rax ; reloc_external Abs8 %tail_callee_stack_args 0 +; movq %rax, (%rsp) +; movq 0x38(%rsp), %rax ; movq %rax, 0x10(%rbp) ; movq %r10, 0x18(%rbp) ; movq %r11, 0x20(%rbp) -; movq %r13, 0x28(%rbp) -; movq %r14, 0x30(%rbp) -; movq %r15, 0x38(%rbp) -; movq %rbx, 0x40(%rbp) -; movq %r12, 0x48(%rbp) +; movq %rbx, 0x28(%rbp) +; movq %r12, 0x30(%rbp) +; movq %r13, 0x38(%rbp) +; movq %r14, 0x40(%rbp) +; movq %r15, 0x48(%rbp) ; movq %rdi, 0x50(%rbp) -; movq 0x38(%rsp), %rdi -; movq %rdi, 0x58(%rbp) +; movq %rsi, 0x58(%rbp) ; movq %rdx, 0x60(%rbp) ; movq %rcx, 0x68(%rbp) ; movq %r8, 0x70(%rbp) ; movq %r9, 0x78(%rbp) -; movq 0x30(%rsp), %rsi -; movq %rsi, 0x80(%rbp) -; movq 0x28(%rsp), %rsi -; movq %rsi, 0x88(%rbp) -; movq 0x20(%rsp), %rsi -; movq %rsi, 0x90(%rbp) -; movq 0x18(%rsp), %rsi -; movq %rsi, 0x98(%rbp) -; movq 0x10(%rsp), %rsi -; movq %rsi, 0xa0(%rbp) -; movq 8(%rsp), %rsi -; movq %rsi, 0xa8(%rbp) +; movq 0x30(%rsp), %rax +; movq %rax, 0x80(%rbp) +; movq 0x28(%rsp), %rax +; movq %rax, 0x88(%rbp) +; movq 0x20(%rsp), %rax +; movq %rax, 0x90(%rbp) +; movq 0x18(%rsp), %rax +; movq %rax, 0x98(%rbp) +; movq 0x10(%rsp), %rax +; movq %rax, 0xa0(%rbp) +; movq 8(%rsp), %rax +; movq %rax, 0xa8(%rbp) ; movq 0x50(%rsp), %rcx ; movq 0x58(%rsp), %rdx ; movq 0x60(%rsp), %rsi diff --git a/cranelift/filetests/filetests/isa/x64/return-call.clif b/cranelift/filetests/filetests/isa/x64/return-call.clif index 9d5f7ddc00f6..03ee2ef4dcd8 100644 --- a/cranelift/filetests/filetests/isa/x64/return-call.clif +++ b/cranelift/filetests/filetests/isa/x64/return-call.clif @@ -233,16 +233,17 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32, v7: i32, v ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %r8, %r10 -; movq %r9, %r11 +; movq %r8, %rax +; movq %r9, %rsi ; movq +-0x20(%rbp), %r8 ; movq +-0x18(%rbp), %r9 -; movq +-0x10(%rbp), %rsi -; movl %esi, +-0x10(%rbp) +; movq +-0x10(%rbp), %r10 +; movl %r10d, +-0x10(%rbp) ; movq %rdx, %rdi -; movq %rcx, %rsi -; movq %r10, %rdx -; movq %r11, %rcx +; movq %rax, %rdx +; movq %rcx, %r10 +; movq %rsi, %rcx +; movq %r10, %rsi ; return_call_known TestCase(%one_stack_arg) (16) tmp=%r11 %rdi=%rdi %rsi=%rsi %rdx=%rdx %rcx=%rcx %r8=%r8 %r9=%r9 ; ; Disassembled: @@ -250,22 +251,23 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32, v7: i32, v ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %r8, %r10 -; movq %r9, %r11 +; movq %r8, %rax +; movq %r9, %rsi ; movq 0x10(%rbp), %r8 ; movq 0x18(%rbp), %r9 -; movq 0x20(%rbp), %rsi -; movl %esi, 0x20(%rbp) +; movq 0x20(%rbp), %r10 +; movl %r10d, 0x20(%rbp) ; movq %rdx, %rdi -; movq %rcx, %rsi -; movq %r10, %rdx -; movq %r11, %rcx +; movq %rax, %rdx +; movq %rcx, %r10 +; movq %rsi, %rcx +; movq %r10, %rsi ; movq %rbp, %rsp ; popq %rbp ; movq (%rsp), %r11 ; movq %r11, 0x10(%rsp) ; addq $0x10, %rsp -; jmp 0x3b ; reloc_external CallPCRel4 %one_stack_arg -4 +; jmp 0x3f ; reloc_external CallPCRel4 %one_stack_arg -4 function %call_zero_stack_args(i32, i32, i32, i32, i32, i32, i32, i32, i8) -> i8 tail { fn0 = colocated %callee_i8(i8) -> i8 tail @@ -313,7 +315,7 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32): ; movq 0x18(%rsp), %r11 ; movq %r11, 8(%rsp) ; block0: -; movq %r9, %r10 +; movq %r9, %rax ; movq +-0x10(%rbp), %r9 ; movl %edi, +-0x20(%rbp) ; movl %edi, +-0x18(%rbp) @@ -322,7 +324,7 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32): ; movq %rdx, %rsi ; movq %rcx, %rdx ; movq %r8, %rcx -; movq %r10, %r8 +; movq %rax, %r8 ; return_call_known TestCase(%call_one_stack_arg) (32) tmp=%r11 %rdi=%rdi %rsi=%rsi %rdx=%rdx %rcx=%rcx %r8=%r8 %r9=%r9 ; ; Disassembled: @@ -336,7 +338,7 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32): ; movq 0x18(%rsp), %r11 ; movq %r11, 8(%rsp) ; block1: ; offset 0x1e -; movq %r9, %r10 +; movq %r9, %rax ; movq 0x20(%rbp), %r9 ; movl %edi, 0x10(%rbp) ; movl %edi, 0x18(%rbp) @@ -345,7 +347,7 @@ block0(v0: i32, v1: i32, v2: i32, v3: i32, v4: i32, v5: i32, v6: i32): ; movq %rdx, %rsi ; movq %rcx, %rdx ; movq %r8, %rcx -; movq %r10, %r8 +; movq %rax, %r8 ; movq %rbp, %rsp ; popq %rbp ; jmp 0x46 ; reloc_external CallPCRel4 %call_one_stack_arg -4 @@ -437,60 +439,60 @@ block0: ; movq %r9, +0x40(%rsp) ; movl $0x23, %r9d ; movq %r9, +0x38(%rsp) -; movl $0x28, %eax +; movl $0x28, %ecx +; movq %rcx, +0x30(%rsp) ; movl $0x2d, %r10d ; movl $0x32, %r11d -; movl $0x37, %r13d -; movl $0x3c, %r14d -; movl $0x41, %r15d -; movl $0x46, %ebx -; movl $0x4b, %r12d +; movl $0x37, %ebx +; movl $0x3c, %r12d +; movl $0x41, %r13d +; movl $0x46, %r14d +; movl $0x4b, %r15d ; movl $0x50, %edi ; movl $0x55, %esi -; movq %rsi, +0x30(%rsp) ; movl $0x5a, %edx ; movl $0x5f, %ecx ; movl $0x64, %r8d ; movl $0x69, %r9d -; movl $0x6e, %esi -; movq %rsi, +0x28(%rsp) -; movl $0x73, %esi -; movq %rsi, +0x20(%rsp) -; movl $0x78, %esi -; movq %rsi, +0x18(%rsp) -; movl $0x7d, %esi -; movq %rsi, +0x10(%rsp) -; movl $0x82, %esi -; movq %rsi, +8(%rsp) -; movl $0x87, %esi -; movq %rsi, +(%rsp) +; movl $0x6e, %eax +; movq %rax, +0x28(%rsp) +; movl $0x73, %eax +; movq %rax, +0x20(%rsp) +; movl $0x78, %eax +; movq %rax, +0x18(%rsp) +; movl $0x7d, %eax +; movq %rax, +0x10(%rsp) +; movl $0x82, %eax +; movq %rax, +8(%rsp) +; movl $0x87, %eax +; movq %rax, +(%rsp) +; movq +0x30(%rsp), %rax ; movq %rax, +-0xa0(%rbp) ; movq %r10, +-0x98(%rbp) ; movq %r11, +-0x90(%rbp) -; movq %r13, +-0x88(%rbp) -; movq %r14, +-0x80(%rbp) -; movq %r15, +-0x78(%rbp) -; movq %rbx, +-0x70(%rbp) -; movq %r12, +-0x68(%rbp) +; movq %rbx, +-0x88(%rbp) +; movq %r12, +-0x80(%rbp) +; movq %r13, +-0x78(%rbp) +; movq %r14, +-0x70(%rbp) +; movq %r15, +-0x68(%rbp) ; movq %rdi, +-0x60(%rbp) -; movq +0x30(%rsp), %rdi -; movq %rdi, +-0x58(%rbp) +; movq %rsi, +-0x58(%rbp) ; movq %rdx, +-0x50(%rbp) ; movq %rcx, +-0x48(%rbp) ; movq %r8, +-0x40(%rbp) ; movq %r9, +-0x38(%rbp) -; movq +0x28(%rsp), %rsi -; movq %rsi, +-0x30(%rbp) -; movq +0x20(%rsp), %rsi -; movq %rsi, +-0x28(%rbp) -; movq +0x18(%rsp), %rsi -; movq %rsi, +-0x20(%rbp) -; movq +0x10(%rsp), %rsi -; movq %rsi, +-0x18(%rbp) -; movq +8(%rsp), %rsi -; movq %rsi, +-0x10(%rbp) -; movq +(%rsp), %rsi -; movq %rsi, +-8(%rbp) +; movq +0x28(%rsp), %rax +; movq %rax, +-0x30(%rbp) +; movq +0x20(%rsp), %rax +; movq %rax, +-0x28(%rbp) +; movq +0x18(%rsp), %rax +; movq %rax, +-0x20(%rbp) +; movq +0x10(%rsp), %rax +; movq %rax, +-0x18(%rbp) +; movq +8(%rsp), %rax +; movq %rax, +-0x10(%rbp) +; movq +(%rsp), %rax +; movq %rax, +-8(%rbp) ; load_ext_name %tail_callee_stack_args+0, %r10 ; movq +0x48(%rsp), %rcx ; movq +0x50(%rsp), %rdx @@ -529,60 +531,60 @@ block0: ; movq %r9, 0x40(%rsp) ; movl $0x23, %r9d ; movq %r9, 0x38(%rsp) -; movl $0x28, %eax +; movl $0x28, %ecx +; movq %rcx, 0x30(%rsp) ; movl $0x2d, %r10d ; movl $0x32, %r11d -; movl $0x37, %r13d -; movl $0x3c, %r14d -; movl $0x41, %r15d -; movl $0x46, %ebx -; movl $0x4b, %r12d +; movl $0x37, %ebx +; movl $0x3c, %r12d +; movl $0x41, %r13d +; movl $0x46, %r14d +; movl $0x4b, %r15d ; movl $0x50, %edi ; movl $0x55, %esi -; movq %rsi, 0x30(%rsp) ; movl $0x5a, %edx ; movl $0x5f, %ecx ; movl $0x64, %r8d ; movl $0x69, %r9d -; movl $0x6e, %esi -; movq %rsi, 0x28(%rsp) -; movl $0x73, %esi -; movq %rsi, 0x20(%rsp) -; movl $0x78, %esi -; movq %rsi, 0x18(%rsp) -; movl $0x7d, %esi -; movq %rsi, 0x10(%rsp) -; movl $0x82, %esi -; movq %rsi, 8(%rsp) -; movl $0x87, %esi -; movq %rsi, (%rsp) +; movl $0x6e, %eax +; movq %rax, 0x28(%rsp) +; movl $0x73, %eax +; movq %rax, 0x20(%rsp) +; movl $0x78, %eax +; movq %rax, 0x18(%rsp) +; movl $0x7d, %eax +; movq %rax, 0x10(%rsp) +; movl $0x82, %eax +; movq %rax, 8(%rsp) +; movl $0x87, %eax +; movq %rax, (%rsp) +; movq 0x30(%rsp), %rax ; movq %rax, 0x10(%rbp) ; movq %r10, 0x18(%rbp) ; movq %r11, 0x20(%rbp) -; movq %r13, 0x28(%rbp) -; movq %r14, 0x30(%rbp) -; movq %r15, 0x38(%rbp) -; movq %rbx, 0x40(%rbp) -; movq %r12, 0x48(%rbp) +; movq %rbx, 0x28(%rbp) +; movq %r12, 0x30(%rbp) +; movq %r13, 0x38(%rbp) +; movq %r14, 0x40(%rbp) +; movq %r15, 0x48(%rbp) ; movq %rdi, 0x50(%rbp) -; movq 0x30(%rsp), %rdi -; movq %rdi, 0x58(%rbp) +; movq %rsi, 0x58(%rbp) ; movq %rdx, 0x60(%rbp) ; movq %rcx, 0x68(%rbp) ; movq %r8, 0x70(%rbp) ; movq %r9, 0x78(%rbp) -; movq 0x28(%rsp), %rsi -; movq %rsi, 0x80(%rbp) -; movq 0x20(%rsp), %rsi -; movq %rsi, 0x88(%rbp) -; movq 0x18(%rsp), %rsi -; movq %rsi, 0x90(%rbp) -; movq 0x10(%rsp), %rsi -; movq %rsi, 0x98(%rbp) -; movq 8(%rsp), %rsi -; movq %rsi, 0xa0(%rbp) -; movq (%rsp), %rsi -; movq %rsi, 0xa8(%rbp) +; movq 0x28(%rsp), %rax +; movq %rax, 0x80(%rbp) +; movq 0x20(%rsp), %rax +; movq %rax, 0x88(%rbp) +; movq 0x18(%rsp), %rax +; movq %rax, 0x90(%rbp) +; movq 0x10(%rsp), %rax +; movq %rax, 0x98(%rbp) +; movq 8(%rsp), %rax +; movq %rax, 0xa0(%rbp) +; movq (%rsp), %rax +; movq %rax, 0xa8(%rbp) ; movabsq $0, %r10 ; reloc_external Abs8 %tail_callee_stack_args 0 ; movq 0x48(%rsp), %rcx ; movq 0x50(%rsp), %rdx diff --git a/cranelift/filetests/filetests/isa/x64/select-i128.clif b/cranelift/filetests/filetests/isa/x64/select-i128.clif index 6c807e37e14e..7fc54cfb5150 100644 --- a/cranelift/filetests/filetests/isa/x64/select-i128.clif +++ b/cranelift/filetests/filetests/isa/x64/select-i128.clif @@ -17,9 +17,9 @@ block0(v0: i32, v1: i128, v2: i128): ; cmpl $0x2a, %edi ; movq %rcx, %rax ; cmoveq %rsi, %rax -; movq %rdx, %rdi +; movq %rdx, %r10 ; movq %r8, %rdx -; cmoveq %rdi, %rdx +; cmoveq %r10, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -32,9 +32,9 @@ block0(v0: i32, v1: i128, v2: i128): ; cmpl $0x2a, %edi ; movq %rcx, %rax ; cmoveq %rsi, %rax -; movq %rdx, %rdi +; movq %rdx, %r10 ; movq %r8, %rdx -; cmoveq %rdi, %rdx +; cmoveq %r10, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/simd-arith-avx.clif b/cranelift/filetests/filetests/isa/x64/simd-arith-avx.clif index cb5104ada747..38f879f37ab8 100644 --- a/cranelift/filetests/filetests/isa/x64/simd-arith-avx.clif +++ b/cranelift/filetests/filetests/isa/x64/simd-arith-avx.clif @@ -918,10 +918,10 @@ block0(v0: i8x16, v1: i32): ; vpunpcklbw %xmm0, %xmm0, %xmm5 ; vpunpckhbw %xmm0, %xmm0, %xmm7 ; addl $0x8, %edi -; vmovd %edi, %xmm3 -; vpsraw %xmm3, %xmm5, %xmm5 -; vpsraw %xmm3, %xmm7, %xmm7 -; vpacksswb %xmm7, %xmm5, %xmm0 +; vmovd %edi, %xmm1 +; vpsraw %xmm1, %xmm5, %xmm0 +; vpsraw %xmm1, %xmm7, %xmm1 +; vpacksswb %xmm1, %xmm0, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -935,10 +935,10 @@ block0(v0: i8x16, v1: i32): ; vpunpcklbw %xmm0, %xmm0, %xmm5 ; vpunpckhbw %xmm0, %xmm0, %xmm7 ; addl $8, %edi -; vmovd %edi, %xmm3 -; vpsraw %xmm3, %xmm5, %xmm5 -; vpsraw %xmm3, %xmm7, %xmm7 -; vpacksswb %xmm7, %xmm5, %xmm0 +; vmovd %edi, %xmm1 +; vpsraw %xmm1, %xmm5, %xmm0 +; vpsraw %xmm1, %xmm7, %xmm1 +; vpacksswb %xmm1, %xmm0, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1345,9 +1345,9 @@ block0(v0: f64x2): ; vxorpd %xmm2, %xmm2, %xmm4 ; vmaxpd %xmm4, %xmm0, %xmm6 ; vminpd (%rip), %xmm6, %xmm0 -; vroundpd $0x3, %xmm0, %xmm2 -; vaddpd (%rip), %xmm2, %xmm5 -; vshufps $0x88, %xmm4, %xmm5, %xmm0 +; vroundpd $0x3, %xmm0, %xmm0 +; vaddpd (%rip), %xmm0, %xmm0 +; vshufps $0x88, %xmm4, %xmm0, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1360,9 +1360,9 @@ block0(v0: f64x2): ; vxorpd %xmm2, %xmm2, %xmm4 ; vmaxpd %xmm4, %xmm0, %xmm6 ; vminpd 0x1c(%rip), %xmm6, %xmm0 -; vroundpd $3, %xmm0, %xmm2 -; vaddpd 0x1e(%rip), %xmm2, %xmm5 -; vshufps $0x88, %xmm4, %xmm5, %xmm0 +; vroundpd $3, %xmm0, %xmm0 +; vaddpd 0x1e(%rip), %xmm0, %xmm0 +; vshufps $0x88, %xmm4, %xmm0, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1384,10 +1384,10 @@ block0(v0: i8x16, v1: i32): ; andq $0x7, %rdi ; vmovd %edi, %xmm5 ; vpsllw %xmm5, %xmm0, %xmm7 -; leaq (%rip), %rsi +; leaq (%rip), %r9 ; shlq $0x4, %rdi -; vmovdqu (%rsi, %rdi), %xmm5 -; vpand %xmm5, %xmm7, %xmm0 +; vmovdqu (%r9, %rdi), %xmm0 +; vpand %xmm0, %xmm7, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1400,17 +1400,16 @@ block0(v0: i8x16, v1: i32): ; andq $7, %rdi ; vmovd %edi, %xmm5 ; vpsllw %xmm5, %xmm0, %xmm7 -; leaq 0x19(%rip), %rsi +; leaq 0x19(%rip), %r9 ; shlq $4, %rdi -; vmovdqu (%rsi, %rdi), %xmm5 -; vpand %xmm5, %xmm7, %xmm0 +; vmovdqu (%r9, %rdi), %xmm0 +; vpand %xmm0, %xmm7, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq ; addb %al, (%rax) ; addb %al, (%rax) ; addb %al, (%rax) -; addb %bh, %bh function %i8x16_shl_imm(i8x16) -> i8x16 { block0(v0: i8x16): @@ -1623,9 +1622,9 @@ block0(v0: i8x16, v1: i32): ; andq $0x7, %rdi ; vmovd %edi, %xmm5 ; vpsrlw %xmm5, %xmm0, %xmm7 -; leaq (%rip), %rsi +; leaq (%rip), %r9 ; shlq $0x4, %rdi -; vpand (%rsi, %rdi), %xmm7, %xmm0 +; vpand (%r9, %rdi), %xmm7, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1638,9 +1637,9 @@ block0(v0: i8x16, v1: i32): ; andq $7, %rdi ; vmovd %edi, %xmm5 ; vpsrlw %xmm5, %xmm0, %xmm7 -; leaq 0x19(%rip), %rsi +; leaq 0x19(%rip), %r9 ; shlq $4, %rdi -; vpand (%rsi, %rdi), %xmm7, %xmm0 +; vpand (%r9, %rdi), %xmm7, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -1649,7 +1648,6 @@ block0(v0: i8x16, v1: i32): ; addb %al, (%rax) ; addb %al, (%rax) ; addb %al, (%rax) -; addb %bh, %bh function %i8x16_ushr_imm(i8x16) -> i8x16 { block0(v0: i8x16): diff --git a/cranelift/filetests/filetests/isa/x64/simd-bitwise-avx.clif b/cranelift/filetests/filetests/isa/x64/simd-bitwise-avx.clif index ddbce91191a0..bbb426694f0c 100644 --- a/cranelift/filetests/filetests/isa/x64/simd-bitwise-avx.clif +++ b/cranelift/filetests/filetests/isa/x64/simd-bitwise-avx.clif @@ -40,13 +40,13 @@ block0(v0: i64): ; movq %rsp, %rbp ; block0: ; uninit %xmm0 -; vxorps %xmm0, %xmm0, %xmm2 +; vxorps %xmm0, %xmm0, %xmm0 ; vmovss (%rdi), %xmm1 -; movl $0x80000000, %r8d -; vmovd %r8d, %xmm7 -; vandnps %xmm2, %xmm7, %xmm2 -; vandps %xmm1, %xmm7, %xmm3 -; vorps %xmm3, %xmm2, %xmm0 +; movl $0x80000000, %esi +; vmovd %esi, %xmm7 +; vandnps %xmm0, %xmm7, %xmm0 +; vandps %xmm1, %xmm7, %xmm1 +; vorps %xmm1, %xmm0, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -56,13 +56,13 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; vxorps %xmm0, %xmm0, %xmm2 +; vxorps %xmm0, %xmm0, %xmm0 ; vmovss (%rdi), %xmm1 -; movl $0x80000000, %r8d -; vmovd %r8d, %xmm7 -; vandnps %xmm2, %xmm7, %xmm2 -; vandps %xmm1, %xmm7, %xmm3 -; vorps %xmm3, %xmm2, %xmm0 +; movl $0x80000000, %esi +; vmovd %esi, %xmm7 +; vandnps %xmm0, %xmm7, %xmm0 +; vandps %xmm1, %xmm7, %xmm1 +; vorps %xmm1, %xmm0, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/simd-bitwise-compile.clif b/cranelift/filetests/filetests/isa/x64/simd-bitwise-compile.clif index ceea1d5c4b7c..09fb7d7bf6d7 100644 --- a/cranelift/filetests/filetests/isa/x64/simd-bitwise-compile.clif +++ b/cranelift/filetests/filetests/isa/x64/simd-bitwise-compile.clif @@ -328,10 +328,10 @@ block0(v0: i32): ; andq $0x7, %rdi ; movd %edi, %xmm5 ; psllw %xmm5, %xmm0 -; leaq (%rip), %rsi +; leaq (%rip), %r9 ; shlq $0x4, %rdi -; movdqu (%rsi, %rdi), %xmm5 -; pand %xmm5, %xmm0 +; movdqu (%r9, %rdi), %xmm1 +; pand %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -345,10 +345,10 @@ block0(v0: i32): ; andq $7, %rdi ; movd %edi, %xmm5 ; psllw %xmm5, %xmm0 -; leaq 0x31(%rip), %rsi +; leaq 0x31(%rip), %r9 ; shlq $4, %rdi -; movdqu (%rsi, %rdi), %xmm5 -; pand %xmm5, %xmm0 +; movdqu (%r9, %rdi), %xmm1 +; pand %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -359,11 +359,9 @@ block0(v0: i32): ; addb %al, (%rax) ; addb %al, (%rax) ; addb %al, (%rax) -; addb %al, (%rax) -; addl %eax, (%rdx) -; addl 0x9080706(, %rax), %eax -; orb (%rbx), %cl -; orb $0xd, %al +; addb %al, (%rcx) +; addb (%rbx), %al +; addb $5, %al function %ishl_i8x16_imm(i8x16) -> i8x16 { block0(v0: i8x16): @@ -606,9 +604,9 @@ block0(v0: i32): ; punpcklbw %xmm1, %xmm0 ; punpckhbw %xmm1, %xmm1 ; addl $0x8, %edi -; movd %edi, %xmm3 -; psraw %xmm3, %xmm0 -; psraw %xmm3, %xmm1 +; movd %edi, %xmm2 +; psraw %xmm2, %xmm0 +; psraw %xmm2, %xmm1 ; packsswb %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -625,9 +623,9 @@ block0(v0: i32): ; punpcklbw %xmm1, %xmm0 ; punpckhbw %xmm1, %xmm1 ; addl $8, %edi -; movd %edi, %xmm3 -; psraw %xmm3, %xmm0 -; psraw %xmm3, %xmm1 +; movd %edi, %xmm2 +; psraw %xmm2, %xmm0 +; psraw %xmm2, %xmm1 ; packsswb %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -655,11 +653,11 @@ block0(v0: i8x16, v1: i32): ; movdqa %xmm0, %xmm6 ; punpcklbw %xmm0, %xmm6 ; punpckhbw %xmm0, %xmm0 -; movdqa %xmm0, %xmm4 +; movdqa %xmm0, %xmm1 ; movdqa %xmm6, %xmm0 ; psraw $0xb, %xmm0 -; psraw $0xb, %xmm4 -; packsswb %xmm4, %xmm0 +; psraw $0xb, %xmm1 +; packsswb %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -672,11 +670,11 @@ block0(v0: i8x16, v1: i32): ; movdqa %xmm0, %xmm6 ; punpcklbw %xmm0, %xmm6 ; punpckhbw %xmm0, %xmm0 -; movdqa %xmm0, %xmm4 +; movdqa %xmm0, %xmm1 ; movdqa %xmm6, %xmm0 ; psraw $0xb, %xmm0 -; psraw $0xb, %xmm4 -; packsswb %xmm4, %xmm0 +; psraw $0xb, %xmm1 +; packsswb %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -890,9 +888,9 @@ block0(v0: i64x2, v1: i32): ; movdqu (%rip), %xmm1 ; psrlq %xmm5, %xmm1 ; psrlq %xmm5, %xmm0 -; movdqa %xmm0, %xmm7 +; movdqa %xmm0, %xmm2 ; movdqa %xmm1, %xmm0 -; pxor %xmm7, %xmm0 +; pxor %xmm2, %xmm0 ; psubq %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp @@ -908,9 +906,9 @@ block0(v0: i64x2, v1: i32): ; movdqu 0x2b(%rip), %xmm1 ; psrlq %xmm5, %xmm1 ; psrlq %xmm5, %xmm0 -; movdqa %xmm0, %xmm7 +; movdqa %xmm0, %xmm2 ; movdqa %xmm1, %xmm0 -; pxor %xmm7, %xmm0 +; pxor %xmm2, %xmm0 ; psubq %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/simd-cmp-avx.clif b/cranelift/filetests/filetests/isa/x64/simd-cmp-avx.clif index ac9ecf9f71a8..053735ef21fb 100644 --- a/cranelift/filetests/filetests/isa/x64/simd-cmp-avx.clif +++ b/cranelift/filetests/filetests/isa/x64/simd-cmp-avx.clif @@ -215,9 +215,9 @@ block0(v0: f32x4, v1: f32x4): ; vminps %xmm0, %xmm1, %xmm5 ; vorps %xmm5, %xmm3, %xmm7 ; vcmpunordps %xmm5, %xmm7, %xmm1 -; vorps %xmm1, %xmm7, %xmm3 -; vpsrld $0xa, %xmm1, %xmm5 -; vandnps %xmm3, %xmm5, %xmm0 +; vorps %xmm1, %xmm7, %xmm0 +; vpsrld $0xa, %xmm1, %xmm1 +; vandnps %xmm0, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -231,9 +231,9 @@ block0(v0: f32x4, v1: f32x4): ; vminps %xmm0, %xmm1, %xmm5 ; vorps %xmm5, %xmm3, %xmm7 ; vcmpunordps %xmm5, %xmm7, %xmm1 -; vorps %xmm1, %xmm7, %xmm3 -; vpsrld $0xa, %xmm1, %xmm5 -; vandnps %xmm3, %xmm5, %xmm0 +; vorps %xmm1, %xmm7, %xmm0 +; vpsrld $0xa, %xmm1, %xmm1 +; vandnps %xmm0, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -252,9 +252,9 @@ block0(v0: f64x2, v1: f64x2): ; vminpd %xmm0, %xmm1, %xmm5 ; vorpd %xmm5, %xmm3, %xmm7 ; vcmpunordpd %xmm5, %xmm3, %xmm1 -; vorpd %xmm1, %xmm7, %xmm3 -; vpsrlq $0xd, %xmm1, %xmm5 -; vandnpd %xmm3, %xmm5, %xmm0 +; vorpd %xmm1, %xmm7, %xmm0 +; vpsrlq $0xd, %xmm1, %xmm1 +; vandnpd %xmm0, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -268,9 +268,9 @@ block0(v0: f64x2, v1: f64x2): ; vminpd %xmm0, %xmm1, %xmm5 ; vorpd %xmm5, %xmm3, %xmm7 ; vcmpunordpd %xmm5, %xmm3, %xmm1 -; vorpd %xmm1, %xmm7, %xmm3 -; vpsrlq $0xd, %xmm1, %xmm5 -; vandnpd %xmm3, %xmm5, %xmm0 +; vorpd %xmm1, %xmm7, %xmm0 +; vpsrlq $0xd, %xmm1, %xmm1 +; vandnpd %xmm0, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -289,10 +289,10 @@ block0(v0: f32x4, v1: f32x4): ; vmaxps %xmm0, %xmm1, %xmm5 ; vxorps %xmm5, %xmm3, %xmm7 ; vorps %xmm7, %xmm3, %xmm1 -; vsubps %xmm7, %xmm1, %xmm3 -; vcmpunordps %xmm1, %xmm1, %xmm5 -; vpsrld $0xa, %xmm5, %xmm7 -; vandnps %xmm3, %xmm7, %xmm0 +; vsubps %xmm7, %xmm1, %xmm0 +; vcmpunordps %xmm1, %xmm1, %xmm1 +; vpsrld $0xa, %xmm1, %xmm1 +; vandnps %xmm0, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -306,10 +306,10 @@ block0(v0: f32x4, v1: f32x4): ; vmaxps %xmm0, %xmm1, %xmm5 ; vxorps %xmm5, %xmm3, %xmm7 ; vorps %xmm7, %xmm3, %xmm1 -; vsubps %xmm7, %xmm1, %xmm3 -; vcmpunordps %xmm1, %xmm1, %xmm5 -; vpsrld $0xa, %xmm5, %xmm7 -; vandnps %xmm3, %xmm7, %xmm0 +; vsubps %xmm7, %xmm1, %xmm0 +; vcmpunordps %xmm1, %xmm1, %xmm1 +; vpsrld $0xa, %xmm1, %xmm1 +; vandnps %xmm0, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -328,10 +328,10 @@ block0(v0: f64x2, v1: f64x2): ; vmaxpd %xmm0, %xmm1, %xmm5 ; vxorpd %xmm5, %xmm3, %xmm7 ; vorpd %xmm7, %xmm3, %xmm1 -; vsubpd %xmm7, %xmm1, %xmm3 -; vcmpunordpd %xmm1, %xmm1, %xmm5 -; vpsrlq $0xd, %xmm5, %xmm7 -; vandnpd %xmm3, %xmm7, %xmm0 +; vsubpd %xmm7, %xmm1, %xmm0 +; vcmpunordpd %xmm1, %xmm1, %xmm1 +; vpsrlq $0xd, %xmm1, %xmm1 +; vandnpd %xmm0, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -345,10 +345,10 @@ block0(v0: f64x2, v1: f64x2): ; vmaxpd %xmm0, %xmm1, %xmm5 ; vxorpd %xmm5, %xmm3, %xmm7 ; vorpd %xmm7, %xmm3, %xmm1 -; vsubpd %xmm7, %xmm1, %xmm3 -; vcmpunordpd %xmm1, %xmm1, %xmm5 -; vpsrlq $0xd, %xmm5, %xmm7 -; vandnpd %xmm3, %xmm7, %xmm0 +; vsubpd %xmm7, %xmm1, %xmm0 +; vcmpunordpd %xmm1, %xmm1, %xmm1 +; vpsrlq $0xd, %xmm1, %xmm1 +; vandnpd %xmm0, %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/simd-float-min-max.clif b/cranelift/filetests/filetests/isa/x64/simd-float-min-max.clif index 95b440bb00ce..4aa95a740a14 100644 --- a/cranelift/filetests/filetests/isa/x64/simd-float-min-max.clif +++ b/cranelift/filetests/filetests/isa/x64/simd-float-min-max.clif @@ -13,17 +13,17 @@ block0(v0: i64, v1: f32x4): ; movq %rsp, %rbp ; block0: ; movups (%rdi), %xmm4 -; movdqa %xmm0, %xmm6 +; movdqa %xmm0, %xmm1 ; maxps %xmm4, %xmm0 -; maxps %xmm6, %xmm4 +; maxps %xmm1, %xmm4 +; movdqa %xmm0, %xmm2 +; xorps %xmm4, %xmm2 +; orps %xmm2, %xmm0 ; movdqa %xmm0, %xmm1 -; xorps %xmm4, %xmm1 -; orps %xmm1, %xmm0 -; movdqa %xmm0, %xmm4 -; subps %xmm1, %xmm4 +; subps %xmm2, %xmm1 ; cmpunordps %xmm0, %xmm0 ; psrld $0xa, %xmm0 -; andnps %xmm4, %xmm0 +; andnps %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -34,17 +34,17 @@ block0(v0: i64, v1: f32x4): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movups (%rdi), %xmm4 ; trap: heap_oob -; movdqa %xmm0, %xmm6 +; movdqa %xmm0, %xmm1 ; maxps %xmm4, %xmm0 -; maxps %xmm6, %xmm4 +; maxps %xmm1, %xmm4 +; movdqa %xmm0, %xmm2 +; xorps %xmm4, %xmm2 +; orps %xmm2, %xmm0 ; movdqa %xmm0, %xmm1 -; xorps %xmm4, %xmm1 -; orps %xmm1, %xmm0 -; movdqa %xmm0, %xmm4 -; subps %xmm1, %xmm4 +; subps %xmm2, %xmm1 ; cmpunordps %xmm0, %xmm0 ; psrld $0xa, %xmm0 -; andnps %xmm4, %xmm0 +; andnps %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -105,17 +105,17 @@ block0(v0: i64, v1: f64x2): ; movq %rsp, %rbp ; block0: ; movupd (%rdi), %xmm4 -; movdqa %xmm0, %xmm6 +; movdqa %xmm0, %xmm1 ; maxpd %xmm4, %xmm0 -; maxpd %xmm6, %xmm4 +; maxpd %xmm1, %xmm4 +; movdqa %xmm0, %xmm2 +; xorpd %xmm4, %xmm2 +; orpd %xmm2, %xmm0 ; movdqa %xmm0, %xmm1 -; xorpd %xmm4, %xmm1 -; orpd %xmm1, %xmm0 -; movdqa %xmm0, %xmm4 -; subpd %xmm1, %xmm4 +; subpd %xmm2, %xmm1 ; cmpunordpd %xmm0, %xmm0 ; psrlq $0xd, %xmm0 -; andnpd %xmm4, %xmm0 +; andnpd %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -126,17 +126,17 @@ block0(v0: i64, v1: f64x2): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movupd (%rdi), %xmm4 ; trap: heap_oob -; movdqa %xmm0, %xmm6 +; movdqa %xmm0, %xmm1 ; maxpd %xmm4, %xmm0 -; maxpd %xmm6, %xmm4 +; maxpd %xmm1, %xmm4 +; movdqa %xmm0, %xmm2 +; xorpd %xmm4, %xmm2 +; orpd %xmm2, %xmm0 ; movdqa %xmm0, %xmm1 -; xorpd %xmm4, %xmm1 -; orpd %xmm1, %xmm0 -; movdqa %xmm0, %xmm4 -; subpd %xmm1, %xmm4 +; subpd %xmm2, %xmm1 ; cmpunordpd %xmm0, %xmm0 ; psrlq $0xd, %xmm0 -; andnpd %xmm4, %xmm0 +; andnpd %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -153,15 +153,15 @@ block0(v0: i64, v1: f64x2): ; movq %rsp, %rbp ; block0: ; movupd (%rdi), %xmm4 -; movdqa %xmm0, %xmm5 +; movdqa %xmm0, %xmm1 ; minpd %xmm4, %xmm0 -; minpd %xmm5, %xmm4 -; movdqa %xmm0, %xmm2 -; orpd %xmm4, %xmm2 +; minpd %xmm1, %xmm4 +; movdqa %xmm0, %xmm1 +; orpd %xmm4, %xmm1 ; cmpunordpd %xmm4, %xmm0 -; orpd %xmm0, %xmm2 +; orpd %xmm0, %xmm1 ; psrlq $0xd, %xmm0 -; andnpd %xmm2, %xmm0 +; andnpd %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -172,15 +172,15 @@ block0(v0: i64, v1: f64x2): ; movq %rsp, %rbp ; block1: ; offset 0x4 ; movupd (%rdi), %xmm4 ; trap: heap_oob -; movdqa %xmm0, %xmm5 +; movdqa %xmm0, %xmm1 ; minpd %xmm4, %xmm0 -; minpd %xmm5, %xmm4 -; movdqa %xmm0, %xmm2 -; orpd %xmm4, %xmm2 +; minpd %xmm1, %xmm4 +; movdqa %xmm0, %xmm1 +; orpd %xmm4, %xmm1 ; cmpunordpd %xmm4, %xmm0 -; orpd %xmm0, %xmm2 +; orpd %xmm0, %xmm1 ; psrlq $0xd, %xmm0 -; andnpd %xmm2, %xmm0 +; andnpd %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/simd-i64x2-shift-avx512.clif b/cranelift/filetests/filetests/isa/x64/simd-i64x2-shift-avx512.clif index 3facc4a2859b..99541018a29d 100644 --- a/cranelift/filetests/filetests/isa/x64/simd-i64x2-shift-avx512.clif +++ b/cranelift/filetests/filetests/isa/x64/simd-i64x2-shift-avx512.clif @@ -14,9 +14,9 @@ block0(v0: i64x2, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movq %rdi, %r9 -; andq $0x3f, %r9 -; vmovd %r9d, %xmm1 +; movq %rdi, %rsi +; andq $0x3f, %rsi +; vmovd %esi, %xmm1 ; vpsraq %xmm1, %xmm0, %xmm0 ; andq $0x3f, %rdi ; vmovd %edi, %xmm1 @@ -30,9 +30,9 @@ block0(v0: i64x2, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movq %rdi, %r9 -; andq $0x3f, %r9 -; vmovd %r9d, %xmm1 +; movq %rdi, %rsi +; andq $0x3f, %rsi +; vmovd %esi, %xmm1 ; vpsraq %xmm1, %xmm0, %xmm0 ; andq $0x3f, %rdi ; vmovd %edi, %xmm1 diff --git a/cranelift/filetests/filetests/isa/x64/simd-lane-access-compile.clif b/cranelift/filetests/filetests/isa/x64/simd-lane-access-compile.clif index 7940a0c6feac..0d1915f97895 100644 --- a/cranelift/filetests/filetests/isa/x64/simd-lane-access-compile.clif +++ b/cranelift/filetests/filetests/isa/x64/simd-lane-access-compile.clif @@ -188,8 +188,8 @@ block0: ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl $0xffff, %ecx -; movd %ecx, %xmm1 +; movl $0xffff, %esi +; movd %esi, %xmm1 ; pshuflw $0x0, %xmm1, %xmm3 ; pshufd $0x0, %xmm3, %xmm0 ; movq %rbp, %rsp @@ -201,8 +201,8 @@ block0: ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0xffff, %ecx -; movd %ecx, %xmm1 +; movl $0xffff, %esi +; movd %esi, %xmm1 ; pshuflw $0, %xmm1, %xmm3 ; pshufd $0, %xmm3, %xmm0 ; movq %rbp, %rsp diff --git a/cranelift/filetests/filetests/isa/x64/simd-widen-mul.clif b/cranelift/filetests/filetests/isa/x64/simd-widen-mul.clif index 1044529cb88e..9d0d14824691 100644 --- a/cranelift/filetests/filetests/isa/x64/simd-widen-mul.clif +++ b/cranelift/filetests/filetests/isa/x64/simd-widen-mul.clif @@ -51,9 +51,9 @@ block0(v0: i16x8, v1: i16x8): ; movdqa %xmm0, %xmm5 ; pmullw %xmm1, %xmm5 ; pmulhw %xmm1, %xmm0 -; movdqa %xmm0, %xmm2 +; movdqa %xmm0, %xmm1 ; movdqa %xmm5, %xmm0 -; punpckhwd %xmm2, %xmm0 +; punpckhwd %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -66,9 +66,9 @@ block0(v0: i16x8, v1: i16x8): ; movdqa %xmm0, %xmm5 ; pmullw %xmm1, %xmm5 ; pmulhw %xmm1, %xmm0 -; movdqa %xmm0, %xmm2 +; movdqa %xmm0, %xmm1 ; movdqa %xmm5, %xmm0 -; punpckhwd %xmm2, %xmm0 +; punpckhwd %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -150,9 +150,9 @@ block0(v0: i16x8, v1: i16x8): ; movdqa %xmm0, %xmm5 ; pmullw %xmm1, %xmm5 ; pmulhw %xmm1, %xmm0 -; movdqa %xmm0, %xmm2 +; movdqa %xmm0, %xmm1 ; movdqa %xmm5, %xmm0 -; punpcklwd %xmm2, %xmm0 +; punpcklwd %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -165,9 +165,9 @@ block0(v0: i16x8, v1: i16x8): ; movdqa %xmm0, %xmm5 ; pmullw %xmm1, %xmm5 ; pmulhw %xmm1, %xmm0 -; movdqa %xmm0, %xmm2 +; movdqa %xmm0, %xmm1 ; movdqa %xmm5, %xmm0 -; punpcklwd %xmm2, %xmm0 +; punpcklwd %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -255,9 +255,9 @@ block0(v0: i16x8, v1: i16x8): ; movdqa %xmm0, %xmm5 ; pmullw %xmm1, %xmm5 ; pmulhuw %xmm1, %xmm0 -; movdqa %xmm0, %xmm2 +; movdqa %xmm0, %xmm1 ; movdqa %xmm5, %xmm0 -; punpckhwd %xmm2, %xmm0 +; punpckhwd %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -270,9 +270,9 @@ block0(v0: i16x8, v1: i16x8): ; movdqa %xmm0, %xmm5 ; pmullw %xmm1, %xmm5 ; pmulhuw %xmm1, %xmm0 -; movdqa %xmm0, %xmm2 +; movdqa %xmm0, %xmm1 ; movdqa %xmm5, %xmm0 -; punpckhwd %xmm2, %xmm0 +; punpckhwd %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -354,9 +354,9 @@ block0(v0: i16x8, v1: i16x8): ; movdqa %xmm0, %xmm5 ; pmullw %xmm1, %xmm5 ; pmulhuw %xmm1, %xmm0 -; movdqa %xmm0, %xmm2 +; movdqa %xmm0, %xmm1 ; movdqa %xmm5, %xmm0 -; punpcklwd %xmm2, %xmm0 +; punpcklwd %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq @@ -369,9 +369,9 @@ block0(v0: i16x8, v1: i16x8): ; movdqa %xmm0, %xmm5 ; pmullw %xmm1, %xmm5 ; pmulhuw %xmm1, %xmm0 -; movdqa %xmm0, %xmm2 +; movdqa %xmm0, %xmm1 ; movdqa %xmm5, %xmm0 -; punpcklwd %xmm2, %xmm0 +; punpcklwd %xmm1, %xmm0 ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/sink-load-store-of-bitwise-op-on-float.clif b/cranelift/filetests/filetests/isa/x64/sink-load-store-of-bitwise-op-on-float.clif index cfc9d780ce23..a5a3adcd53f6 100644 --- a/cranelift/filetests/filetests/isa/x64/sink-load-store-of-bitwise-op-on-float.clif +++ b/cranelift/filetests/filetests/isa/x64/sink-load-store-of-bitwise-op-on-float.clif @@ -13,8 +13,8 @@ block0(v0: i64, v1: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movd %xmm0, %ecx -; orl %ecx, (%rdi) +; movd %xmm0, %esi +; orl %esi, (%rdi) ; movq %rbp, %rsp ; popq %rbp ; retq @@ -24,8 +24,8 @@ block0(v0: i64, v1: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movd %xmm0, %ecx -; orl %ecx, (%rdi) ; trap: heap_oob +; movd %xmm0, %esi +; orl %esi, (%rdi) ; trap: heap_oob ; movq %rbp, %rsp ; popq %rbp ; retq @@ -42,8 +42,8 @@ block0(v0: i64, v1: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movd %xmm0, %ecx -; orl %ecx, (%rdi) +; movd %xmm0, %esi +; orl %esi, (%rdi) ; movq %rbp, %rsp ; popq %rbp ; retq @@ -53,8 +53,8 @@ block0(v0: i64, v1: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movd %xmm0, %ecx -; orl %ecx, (%rdi) ; trap: heap_oob +; movd %xmm0, %esi +; orl %esi, (%rdi) ; trap: heap_oob ; movq %rbp, %rsp ; popq %rbp ; retq @@ -71,8 +71,8 @@ block0(v0: i64, v1: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movd %xmm0, %ecx -; andl %ecx, (%rdi) +; movd %xmm0, %esi +; andl %esi, (%rdi) ; movq %rbp, %rsp ; popq %rbp ; retq @@ -82,8 +82,8 @@ block0(v0: i64, v1: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movd %xmm0, %ecx -; andl %ecx, (%rdi) ; trap: heap_oob +; movd %xmm0, %esi +; andl %esi, (%rdi) ; trap: heap_oob ; movq %rbp, %rsp ; popq %rbp ; retq @@ -100,8 +100,8 @@ block0(v0: i64, v1: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movd %xmm0, %ecx -; andl %ecx, (%rdi) +; movd %xmm0, %esi +; andl %esi, (%rdi) ; movq %rbp, %rsp ; popq %rbp ; retq @@ -111,8 +111,8 @@ block0(v0: i64, v1: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movd %xmm0, %ecx -; andl %ecx, (%rdi) ; trap: heap_oob +; movd %xmm0, %esi +; andl %esi, (%rdi) ; trap: heap_oob ; movq %rbp, %rsp ; popq %rbp ; retq @@ -129,8 +129,8 @@ block0(v0: i64, v1: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movd %xmm0, %ecx -; xorl %ecx, (%rdi) +; movd %xmm0, %esi +; xorl %esi, (%rdi) ; movq %rbp, %rsp ; popq %rbp ; retq @@ -140,8 +140,8 @@ block0(v0: i64, v1: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movd %xmm0, %ecx -; xorl %ecx, (%rdi) ; trap: heap_oob +; movd %xmm0, %esi +; xorl %esi, (%rdi) ; trap: heap_oob ; movq %rbp, %rsp ; popq %rbp ; retq @@ -158,8 +158,8 @@ block0(v0: i64, v1: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movd %xmm0, %ecx -; xorl %ecx, (%rdi) +; movd %xmm0, %esi +; xorl %esi, (%rdi) ; movq %rbp, %rsp ; popq %rbp ; retq @@ -169,8 +169,8 @@ block0(v0: i64, v1: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movd %xmm0, %ecx -; xorl %ecx, (%rdi) ; trap: heap_oob +; movd %xmm0, %esi +; xorl %esi, (%rdi) ; trap: heap_oob ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/srem.clif b/cranelift/filetests/filetests/isa/x64/srem.clif index c1fce4858743..865caa7496ca 100644 --- a/cranelift/filetests/filetests/isa/x64/srem.clif +++ b/cranelift/filetests/filetests/isa/x64/srem.clif @@ -154,8 +154,8 @@ block0(v0: i8): ; block0: ; movq %rdi, %rax ; cbtw ;; implicit: %ax -; movl $0x11, %edx -; idivb %dl ;; implicit: %ax, trap=254 +; movl $0x11, %esi +; idivb %sil ;; implicit: %ax, trap=254 ; shrq $0x8, %rax ; movq %rbp, %rsp ; popq %rbp @@ -168,8 +168,8 @@ block0(v0: i8): ; block1: ; offset 0x4 ; movq %rdi, %rax ; cbtw -; movl $0x11, %edx -; idivb %dl ; trap: int_divz +; movl $0x11, %esi +; idivb %sil ; trap: int_divz ; shrq $8, %rax ; movq %rbp, %rsp ; popq %rbp @@ -188,8 +188,8 @@ block0(v0: i16): ; block0: ; movq %rdi, %rax ; cwtd ;; implicit: %dx, %ax -; movl $0x11, %r8d -; idivw %r8w ;; implicit: %ax, %dx, trap=254 +; movl $0x11, %esi +; idivw %si ;; implicit: %ax, %dx, trap=254 ; movq %rdx, %rax ; movq %rbp, %rsp ; popq %rbp @@ -202,8 +202,8 @@ block0(v0: i16): ; block1: ; offset 0x4 ; movq %rdi, %rax ; cwtd -; movl $0x11, %r8d -; idivw %r8w ; trap: int_divz +; movl $0x11, %esi +; idivw %si ; trap: int_divz ; movq %rdx, %rax ; movq %rbp, %rsp ; popq %rbp @@ -222,8 +222,8 @@ block0(v0: i32): ; block0: ; movq %rdi, %rax ; cltd ;; implicit: %edx, %eax -; movl $0x11, %r8d -; idivl %r8d ;; implicit: %eax, %edx, trap=254 +; movl $0x11, %esi +; idivl %esi ;; implicit: %eax, %edx, trap=254 ; movq %rdx, %rax ; movq %rbp, %rsp ; popq %rbp @@ -236,8 +236,8 @@ block0(v0: i32): ; block1: ; offset 0x4 ; movq %rdi, %rax ; cltd -; movl $0x11, %r8d -; idivl %r8d ; trap: int_divz +; movl $0x11, %esi +; idivl %esi ; trap: int_divz ; movq %rdx, %rax ; movq %rbp, %rsp ; popq %rbp @@ -256,8 +256,8 @@ block0(v0: i64): ; block0: ; movq %rdi, %rax ; cqto ;; implicit: %rdx, %rax -; movl $0x11, %r8d -; idivq %r8 ;; implicit: %rax, %rdx, trap=254 +; movl $0x11, %esi +; idivq %rsi ;; implicit: %rax, %rdx, trap=254 ; movq %rdx, %rax ; movq %rbp, %rsp ; popq %rbp @@ -270,8 +270,8 @@ block0(v0: i64): ; block1: ; offset 0x4 ; movq %rdi, %rax ; cqto -; movl $0x11, %r8d -; idivq %r8 ; trap: int_divz +; movl $0x11, %esi +; idivq %rsi ; trap: int_divz ; movq %rdx, %rax ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/sshr.clif b/cranelift/filetests/filetests/isa/x64/sshr.clif index 2ca381cc54cb..882e0fa246d1 100644 --- a/cranelift/filetests/filetests/isa/x64/sshr.clif +++ b/cranelift/filetests/filetests/isa/x64/sshr.clif @@ -19,25 +19,25 @@ block0(v0: i128, v1: i8): ; block0: ; movzbq %dl, %rcx ; shrq %cl, %rdi -; movq %rsi, %r10 -; sarq %cl, %r10 -; movq %rcx, %r11 +; movq %rsi, %r8 +; sarq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %rdx -; subq %rdx, %rcx -; movq %rsi, %r11 -; shlq %cl, %r11 -; uninit %rax -; xorq %rax, %rax -; testq $0x7f, %rdx -; cmoveq %rax, %r11 -; orq %r11, %rdi +; movq %rax, %r9 +; subq %r9, %rcx +; movq %rsi, %rax +; shlq %cl, %rax +; uninit %rdx +; xorq %rdx, %rdx +; testq $0x7f, %r9 +; cmoveq %rdx, %rax +; orq %rax, %rdi ; sarq $0x3f, %rsi -; testq $0x40, %rdx -; movq %r10, %rax +; testq $0x40, %r9 +; movq %r8, %rax ; cmoveq %rdi, %rax ; movq %rsi, %rdx -; cmoveq %r10, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -49,24 +49,24 @@ block0(v0: i128, v1: i8): ; block1: ; offset 0x4 ; movzbq %dl, %rcx ; shrq %cl, %rdi -; movq %rsi, %r10 -; sarq %cl, %r10 -; movq %rcx, %r11 +; movq %rsi, %r8 +; sarq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %rdx -; subq %rdx, %rcx -; movq %rsi, %r11 -; shlq %cl, %r11 -; xorq %rax, %rax -; testq $0x7f, %rdx -; cmoveq %rax, %r11 -; orq %r11, %rdi +; movq %rax, %r9 +; subq %r9, %rcx +; movq %rsi, %rax +; shlq %cl, %rax +; xorq %rdx, %rdx +; testq $0x7f, %r9 +; cmoveq %rdx, %rax +; orq %rax, %rdi ; sarq $0x3f, %rsi -; testq $0x40, %rdx -; movq %r10, %rax +; testq $0x40, %r9 +; movq %r8, %rax ; cmoveq %rdi, %rax ; movq %rsi, %rdx -; cmoveq %r10, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -81,28 +81,28 @@ block0(v0: i128, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r11 ; shrq %cl, %rdi -; movq %rsi, %r9 -; sarq %cl, %r9 -; movq %rcx, %r11 +; movq %rsi, %r8 +; sarq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %rax -; subq %rax, %rcx -; movq %rsi, %r10 -; shlq %cl, %r10 -; uninit %r11 -; xorq %r11, %r11 -; testq $0x7f, %rax -; cmoveq %r11, %r10 -; orq %r10, %rdi +; movq %rax, %r9 +; subq %r9, %rcx +; movq %rsi, %rax +; shlq %cl, %rax +; uninit %rdx +; xorq %rdx, %rdx +; testq $0x7f, %r9 +; cmoveq %rdx, %rax +; orq %rax, %rdi ; sarq $0x3f, %rsi -; testq $0x40, %rax -; movq %r9, %rax +; testq $0x40, %r9 +; movq %r8, %rax ; cmoveq %rdi, %rax ; movq %rsi, %rdx -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -112,27 +112,27 @@ block0(v0: i128, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r11 ; shrq %cl, %rdi -; movq %rsi, %r9 -; sarq %cl, %r9 -; movq %rcx, %r11 +; movq %rsi, %r8 +; sarq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %rax -; subq %rax, %rcx -; movq %rsi, %r10 -; shlq %cl, %r10 -; xorq %r11, %r11 -; testq $0x7f, %rax -; cmoveq %r11, %r10 -; orq %r10, %rdi +; movq %rax, %r9 +; subq %r9, %rcx +; movq %rsi, %rax +; shlq %cl, %rax +; xorq %rdx, %rdx +; testq $0x7f, %r9 +; cmoveq %rdx, %rax +; orq %rax, %rdi ; sarq $0x3f, %rsi -; testq $0x40, %rax -; movq %r9, %rax +; testq $0x40, %r9 +; movq %r8, %rax ; cmoveq %rdi, %rax ; movq %rsi, %rdx -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -147,28 +147,28 @@ block0(v0: i128, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r11 ; shrq %cl, %rdi -; movq %rsi, %r9 -; sarq %cl, %r9 -; movq %rcx, %r11 +; movq %rsi, %r8 +; sarq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %rax -; subq %rax, %rcx -; movq %rsi, %r10 -; shlq %cl, %r10 -; uninit %r11 -; xorq %r11, %r11 -; testq $0x7f, %rax -; cmoveq %r11, %r10 -; orq %r10, %rdi +; movq %rax, %r9 +; subq %r9, %rcx +; movq %rsi, %rax +; shlq %cl, %rax +; uninit %rdx +; xorq %rdx, %rdx +; testq $0x7f, %r9 +; cmoveq %rdx, %rax +; orq %rax, %rdi ; sarq $0x3f, %rsi -; testq $0x40, %rax -; movq %r9, %rax +; testq $0x40, %r9 +; movq %r8, %rax ; cmoveq %rdi, %rax ; movq %rsi, %rdx -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -178,27 +178,27 @@ block0(v0: i128, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r11 ; shrq %cl, %rdi -; movq %rsi, %r9 -; sarq %cl, %r9 -; movq %rcx, %r11 +; movq %rsi, %r8 +; sarq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %rax -; subq %rax, %rcx -; movq %rsi, %r10 -; shlq %cl, %r10 -; xorq %r11, %r11 -; testq $0x7f, %rax -; cmoveq %r11, %r10 -; orq %r10, %rdi +; movq %rax, %r9 +; subq %r9, %rcx +; movq %rsi, %rax +; shlq %cl, %rax +; xorq %rdx, %rdx +; testq $0x7f, %r9 +; cmoveq %rdx, %rax +; orq %rax, %rdi ; sarq $0x3f, %rsi -; testq $0x40, %rax -; movq %r9, %rax +; testq $0x40, %r9 +; movq %r8, %rax ; cmoveq %rdi, %rax ; movq %rsi, %rdx -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -213,28 +213,28 @@ block0(v0: i128, v1: i16): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r11 ; shrq %cl, %rdi -; movq %rsi, %r9 -; sarq %cl, %r9 -; movq %rcx, %r11 +; movq %rsi, %r8 +; sarq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %rax -; subq %rax, %rcx -; movq %rsi, %r10 -; shlq %cl, %r10 -; uninit %r11 -; xorq %r11, %r11 -; testq $0x7f, %rax -; cmoveq %r11, %r10 -; orq %r10, %rdi +; movq %rax, %r9 +; subq %r9, %rcx +; movq %rsi, %rax +; shlq %cl, %rax +; uninit %rdx +; xorq %rdx, %rdx +; testq $0x7f, %r9 +; cmoveq %rdx, %rax +; orq %rax, %rdi ; sarq $0x3f, %rsi -; testq $0x40, %rax -; movq %r9, %rax +; testq $0x40, %r9 +; movq %r8, %rax ; cmoveq %rdi, %rax ; movq %rsi, %rdx -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -244,27 +244,27 @@ block0(v0: i128, v1: i16): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r11 ; shrq %cl, %rdi -; movq %rsi, %r9 -; sarq %cl, %r9 -; movq %rcx, %r11 +; movq %rsi, %r8 +; sarq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %rax -; subq %rax, %rcx -; movq %rsi, %r10 -; shlq %cl, %r10 -; xorq %r11, %r11 -; testq $0x7f, %rax -; cmoveq %r11, %r10 -; orq %r10, %rdi +; movq %rax, %r9 +; subq %r9, %rcx +; movq %rsi, %rax +; shlq %cl, %rax +; xorq %rdx, %rdx +; testq $0x7f, %r9 +; cmoveq %rdx, %rax +; orq %rax, %rdi ; sarq $0x3f, %rsi -; testq $0x40, %rax -; movq %r9, %rax +; testq $0x40, %r9 +; movq %r8, %rax ; cmoveq %rdi, %rax ; movq %rsi, %rdx -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -279,28 +279,28 @@ block0(v0: i128, v1: i8): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r11 ; shrq %cl, %rdi -; movq %rsi, %r9 -; sarq %cl, %r9 -; movq %rcx, %r11 +; movq %rsi, %r8 +; sarq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %rax -; subq %rax, %rcx -; movq %rsi, %r10 -; shlq %cl, %r10 -; uninit %r11 -; xorq %r11, %r11 -; testq $0x7f, %rax -; cmoveq %r11, %r10 -; orq %r10, %rdi +; movq %rax, %r9 +; subq %r9, %rcx +; movq %rsi, %rax +; shlq %cl, %rax +; uninit %rdx +; xorq %rdx, %rdx +; testq $0x7f, %r9 +; cmoveq %rdx, %rax +; orq %rax, %rdi ; sarq $0x3f, %rsi -; testq $0x40, %rax -; movq %r9, %rax +; testq $0x40, %r9 +; movq %r8, %rax ; cmoveq %rdi, %rax ; movq %rsi, %rdx -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -310,27 +310,27 @@ block0(v0: i128, v1: i8): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r11 ; shrq %cl, %rdi -; movq %rsi, %r9 -; sarq %cl, %r9 -; movq %rcx, %r11 +; movq %rsi, %r8 +; sarq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %rax -; subq %rax, %rcx -; movq %rsi, %r10 -; shlq %cl, %r10 -; xorq %r11, %r11 -; testq $0x7f, %rax -; cmoveq %r11, %r10 -; orq %r10, %rdi +; movq %rax, %r9 +; subq %r9, %rcx +; movq %rsi, %rax +; shlq %cl, %rax +; xorq %rdx, %rdx +; testq $0x7f, %r9 +; cmoveq %rdx, %rax +; orq %rax, %rdi ; sarq $0x3f, %rsi -; testq $0x40, %rax -; movq %r9, %rax +; testq $0x40, %r9 +; movq %r8, %rax ; cmoveq %rdi, %rax ; movq %rsi, %rdx -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/stack_switch.clif b/cranelift/filetests/filetests/isa/x64/stack_switch.clif index 5ae44e43cdc3..12f720ab5d0f 100644 --- a/cranelift/filetests/filetests/isa/x64/stack_switch.clif +++ b/cranelift/filetests/filetests/isa/x64/stack_switch.clif @@ -20,9 +20,9 @@ block0(v0: i64, v1: i64, v2: i64): ; movq %r14, 0x18(%rsp) ; movq %r15, 0x20(%rsp) ; block0: -; movq %rdi, %r10 +; movq %rdi, %r8 ; movq %rdx, %rdi -; %rdi = stack_switch_basic %r10, %rsi, %rdi +; %rdi = stack_switch_basic %r8, %rsi, %rdi ; movq %rdi, %rax ; movq (%rsp), %rbx ; movq 8(%rsp), %r12 @@ -45,17 +45,17 @@ block0(v0: i64, v1: i64, v2: i64): ; movq %r14, 0x18(%rsp) ; movq %r15, 0x20(%rsp) ; block1: ; offset 0x20 -; movq %rdi, %r10 +; movq %rdi, %r8 ; movq %rdx, %rdi ; movq (%rsi), %rax -; movq %rsp, (%r10) +; movq %rsp, (%r8) ; movq %rax, %rsp ; movq 8(%rsi), %rax -; movq %rbp, 8(%r10) +; movq %rbp, 8(%r8) ; movq %rax, %rbp ; movq 0x10(%rsi), %rax ; leaq 6(%rip), %rcx -; movq %rcx, 0x10(%r10) +; movq %rcx, 0x10(%r8) ; jmpq *%rax ; movq %rdi, %rax ; movq (%rsp), %rbx @@ -184,36 +184,36 @@ block0(v0: i64, v1: i64): ; uninit %rdi ; xorq %rdi, %rdi ; %rdi = stack_switch_basic %rsi, %rsi, %rdi -; movq +0x78(%rsp), %rsi -; addq (%rsi), %rdi -; movq +0x70(%rsp), %rsi -; addq (%rsi), %rdi -; movq +0x68(%rsp), %rsi -; addq (%rsi), %rdi -; movq +0x60(%rsp), %rsi -; addq (%rsi), %rdi -; movq +0x58(%rsp), %rsi -; addq (%rsi), %rdi -; movq +0x50(%rsp), %rsi -; addq (%rsi), %rdi -; movq +0x48(%rsp), %rsi -; addq (%rsi), %rdi -; movq +0x40(%rsp), %rsi -; addq (%rsi), %rdi -; movq +0x38(%rsp), %rsi -; addq (%rsi), %rdi -; movq +0x30(%rsp), %rsi -; addq (%rsi), %rdi -; movq +0x28(%rsp), %rsi -; addq (%rsi), %rdi -; movq +0x20(%rsp), %rsi -; addq (%rsi), %rdi -; movq +0x18(%rsp), %rsi -; addq (%rsi), %rdi -; movq +0x10(%rsp), %rsi -; addq (%rsi), %rdi -; movq +8(%rsp), %rsi -; addq (%rsi), %rdi +; movq +0x78(%rsp), %rax +; addq (%rax), %rdi +; movq +0x70(%rsp), %rax +; addq (%rax), %rdi +; movq +0x68(%rsp), %rax +; addq (%rax), %rdi +; movq +0x60(%rsp), %rax +; addq (%rax), %rdi +; movq +0x58(%rsp), %rax +; addq (%rax), %rdi +; movq +0x50(%rsp), %rax +; addq (%rax), %rdi +; movq +0x48(%rsp), %rax +; addq (%rax), %rdi +; movq +0x40(%rsp), %rax +; addq (%rax), %rdi +; movq +0x38(%rsp), %rax +; addq (%rax), %rdi +; movq +0x30(%rsp), %rax +; addq (%rax), %rdi +; movq +0x28(%rsp), %rax +; addq (%rax), %rdi +; movq +0x20(%rsp), %rax +; addq (%rax), %rdi +; movq +0x18(%rsp), %rax +; addq (%rax), %rdi +; movq +0x10(%rsp), %rax +; addq (%rax), %rdi +; movq +8(%rsp), %rax +; addq (%rax), %rdi ; movq +(%rsp), %rax ; leaq (%rdi, %rax), %rax ; movq 0x80(%rsp), %rbx @@ -279,36 +279,36 @@ block0(v0: i64, v1: i64): ; leaq 6(%rip), %rcx ; movq %rcx, 0x10(%rsi) ; jmpq *%rax -; movq 0x78(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob -; movq 0x70(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob -; movq 0x68(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob -; movq 0x60(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob -; movq 0x58(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob -; movq 0x50(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob -; movq 0x48(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob -; movq 0x40(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob -; movq 0x38(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob -; movq 0x30(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob -; movq 0x28(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob -; movq 0x20(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob -; movq 0x18(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob -; movq 0x10(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob -; movq 8(%rsp), %rsi -; addq (%rsi), %rdi ; trap: heap_oob +; movq 0x78(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob +; movq 0x70(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob +; movq 0x68(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob +; movq 0x60(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob +; movq 0x58(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob +; movq 0x50(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob +; movq 0x48(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob +; movq 0x40(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob +; movq 0x38(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob +; movq 0x30(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob +; movq 0x28(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob +; movq 0x20(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob +; movq 0x18(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob +; movq 0x10(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob +; movq 8(%rsp), %rax +; addq (%rax), %rdi ; trap: heap_oob ; movq (%rsp), %rax ; addq %rdi, %rax ; movq 0x80(%rsp), %rbx @@ -419,39 +419,39 @@ block0(v0: i64, v1: i64): ; uninit %rdi ; xorq %rdi, %rdi ; %rdi = stack_switch_basic %rsi, %rsi, %rdi -; u64_to_f64_seq %rdi, %xmm0, %rcx, %rdx -; movdqu +0xf0(%rsp), %xmm5 -; addsd %xmm5, %xmm0 -; movdqu +0xe0(%rsp), %xmm1 +; u64_to_f64_seq %rdi, %xmm0, %rcx, %rax +; movdqu +0xf0(%rsp), %xmm1 ; addsd %xmm1, %xmm0 -; movdqu +0xd0(%rsp), %xmm3 -; addsd %xmm3, %xmm0 -; movdqu +0xc0(%rsp), %xmm6 -; addsd %xmm6, %xmm0 -; movdqu +0xb0(%rsp), %xmm1 +; movdqu +0xe0(%rsp), %xmm1 ; addsd %xmm1, %xmm0 -; movdqu +0xa0(%rsp), %xmm4 +; movdqu +0xd0(%rsp), %xmm4 ; addsd %xmm4, %xmm0 -; movdqu +0x90(%rsp), %xmm7 +; movdqu +0xc0(%rsp), %xmm7 ; addsd %xmm7, %xmm0 -; movdqu +0x80(%rsp), %xmm2 -; addsd %xmm2, %xmm0 -; movdqu +0x70(%rsp), %xmm5 -; addsd %xmm5, %xmm0 +; movdqu +0xb0(%rsp), %xmm1 +; addsd %xmm1, %xmm0 +; movdqu +0xa0(%rsp), %xmm1 +; addsd %xmm1, %xmm0 +; movdqu +0x90(%rsp), %xmm1 +; addsd %xmm1, %xmm0 +; movdqu +0x80(%rsp), %xmm1 +; addsd %xmm1, %xmm0 +; movdqu +0x70(%rsp), %xmm1 +; addsd %xmm1, %xmm0 ; movdqu +0x60(%rsp), %xmm1 ; addsd %xmm1, %xmm0 -; movdqu +0x50(%rsp), %xmm3 -; addsd %xmm3, %xmm0 -; movdqu +0x40(%rsp), %xmm6 -; addsd %xmm6, %xmm0 +; movdqu +0x50(%rsp), %xmm1 +; addsd %xmm1, %xmm0 +; movdqu +0x40(%rsp), %xmm1 +; addsd %xmm1, %xmm0 ; movdqu +0x30(%rsp), %xmm1 ; addsd %xmm1, %xmm0 -; movdqu +0x20(%rsp), %xmm4 -; addsd %xmm4, %xmm0 -; movdqu +0x10(%rsp), %xmm7 -; addsd %xmm7, %xmm0 -; movdqu +(%rsp), %xmm2 -; addsd %xmm2, %xmm0 +; movdqu +0x20(%rsp), %xmm1 +; addsd %xmm1, %xmm0 +; movdqu +0x10(%rsp), %xmm1 +; addsd %xmm1, %xmm0 +; movdqu +(%rsp), %xmm1 +; addsd %xmm1, %xmm0 ; movq 0x100(%rsp), %rbx ; movq 0x108(%rsp), %r12 ; movq 0x110(%rsp), %r13 @@ -522,43 +522,43 @@ block0(v0: i64, v1: i64): ; jmp 0x14f ; movq %rdi, %rcx ; shrq $1, %rcx -; movq %rdi, %rdx -; andq $1, %rdx -; orq %rcx, %rdx -; cvtsi2sdq %rdx, %xmm0 +; movq %rdi, %rax +; andq $1, %rax +; orq %rcx, %rax +; cvtsi2sdq %rax, %xmm0 ; addsd %xmm0, %xmm0 -; movdqu 0xf0(%rsp), %xmm5 -; addsd %xmm5, %xmm0 -; movdqu 0xe0(%rsp), %xmm1 +; movdqu 0xf0(%rsp), %xmm1 ; addsd %xmm1, %xmm0 -; movdqu 0xd0(%rsp), %xmm3 -; addsd %xmm3, %xmm0 -; movdqu 0xc0(%rsp), %xmm6 -; addsd %xmm6, %xmm0 -; movdqu 0xb0(%rsp), %xmm1 +; movdqu 0xe0(%rsp), %xmm1 ; addsd %xmm1, %xmm0 -; movdqu 0xa0(%rsp), %xmm4 +; movdqu 0xd0(%rsp), %xmm4 ; addsd %xmm4, %xmm0 -; movdqu 0x90(%rsp), %xmm7 +; movdqu 0xc0(%rsp), %xmm7 ; addsd %xmm7, %xmm0 -; movdqu 0x80(%rsp), %xmm2 -; addsd %xmm2, %xmm0 -; movdqu 0x70(%rsp), %xmm5 -; addsd %xmm5, %xmm0 +; movdqu 0xb0(%rsp), %xmm1 +; addsd %xmm1, %xmm0 +; movdqu 0xa0(%rsp), %xmm1 +; addsd %xmm1, %xmm0 +; movdqu 0x90(%rsp), %xmm1 +; addsd %xmm1, %xmm0 +; movdqu 0x80(%rsp), %xmm1 +; addsd %xmm1, %xmm0 +; movdqu 0x70(%rsp), %xmm1 +; addsd %xmm1, %xmm0 ; movdqu 0x60(%rsp), %xmm1 ; addsd %xmm1, %xmm0 -; movdqu 0x50(%rsp), %xmm3 -; addsd %xmm3, %xmm0 -; movdqu 0x40(%rsp), %xmm6 -; addsd %xmm6, %xmm0 +; movdqu 0x50(%rsp), %xmm1 +; addsd %xmm1, %xmm0 +; movdqu 0x40(%rsp), %xmm1 +; addsd %xmm1, %xmm0 ; movdqu 0x30(%rsp), %xmm1 ; addsd %xmm1, %xmm0 -; movdqu 0x20(%rsp), %xmm4 -; addsd %xmm4, %xmm0 -; movdqu 0x10(%rsp), %xmm7 -; addsd %xmm7, %xmm0 -; movdqu (%rsp), %xmm2 -; addsd %xmm2, %xmm0 +; movdqu 0x20(%rsp), %xmm1 +; addsd %xmm1, %xmm0 +; movdqu 0x10(%rsp), %xmm1 +; addsd %xmm1, %xmm0 +; movdqu (%rsp), %xmm1 +; addsd %xmm1, %xmm0 ; movq 0x100(%rsp), %rbx ; movq 0x108(%rsp), %r12 ; movq 0x110(%rsp), %r13 diff --git a/cranelift/filetests/filetests/isa/x64/stackslot.clif b/cranelift/filetests/filetests/isa/x64/stackslot.clif index baf869fbb331..7726719d16eb 100644 --- a/cranelift/filetests/filetests/isa/x64/stackslot.clif +++ b/cranelift/filetests/filetests/isa/x64/stackslot.clif @@ -29,10 +29,10 @@ block0(v0: i64): ; block0: ; leaq +(%rsp), %rax ; leaq +0x10(%rsp), %rdx -; leaq +0x18(%rsp), %r8 -; leaq +0x20(%rsp), %r9 -; movq %r8, (%rdi) -; movq %r9, 8(%rdi) +; leaq +0x18(%rsp), %rsi +; leaq +0x20(%rsp), %r8 +; movq %rsi, (%rdi) +; movq %r8, 8(%rdi) ; addq $0x30, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -50,10 +50,10 @@ block0(v0: i64): ; block1: ; offset 0x18 ; leaq (%rsp), %rax ; leaq 0x10(%rsp), %rdx -; leaq 0x18(%rsp), %r8 -; leaq 0x20(%rsp), %r9 -; movq %r8, (%rdi) -; movq %r9, 8(%rdi) +; leaq 0x18(%rsp), %rsi +; leaq 0x20(%rsp), %r8 +; movq %rsi, (%rdi) +; movq %r8, 8(%rdi) ; addq $0x30, %rsp ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/store-f16-f128.clif b/cranelift/filetests/filetests/isa/x64/store-f16-f128.clif index 819a00e0247c..4a790bc48066 100644 --- a/cranelift/filetests/filetests/isa/x64/store-f16-f128.clif +++ b/cranelift/filetests/filetests/isa/x64/store-f16-f128.clif @@ -11,8 +11,8 @@ block0(v0: f16, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; pextrw $0x0, %xmm0, %ecx -; movw %cx, (%rdi) +; pextrw $0x0, %xmm0, %esi +; movw %si, (%rdi) ; movq %rbp, %rsp ; popq %rbp ; retq @@ -22,8 +22,8 @@ block0(v0: f16, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; pextrw $0, %xmm0, %ecx -; movw %cx, (%rdi) ; trap: heap_oob +; pextrw $0, %xmm0, %esi +; movw %si, (%rdi) ; trap: heap_oob ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/store-imm.clif b/cranelift/filetests/filetests/isa/x64/store-imm.clif index d6d902b73d6c..e49d199fc4d7 100644 --- a/cranelift/filetests/filetests/isa/x64/store-imm.clif +++ b/cranelift/filetests/filetests/isa/x64/store-imm.clif @@ -180,8 +180,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movabsq $0x7fffffffffffffff, %rax -; movq %rax, (%rdi) +; movabsq $0x7fffffffffffffff, %rdx +; movq %rdx, (%rdi) ; movq %rdi, %rax ; movq %rbp, %rsp ; popq %rbp @@ -192,8 +192,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0x7fffffffffffffff, %rax -; movq %rax, (%rdi) ; trap: heap_oob +; movabsq $0x7fffffffffffffff, %rdx +; movq %rdx, (%rdi) ; trap: heap_oob ; movq %rdi, %rax ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/store-small-vector.clif b/cranelift/filetests/filetests/isa/x64/store-small-vector.clif index 8fdd34829a4e..82d0b913d54a 100644 --- a/cranelift/filetests/filetests/isa/x64/store-small-vector.clif +++ b/cranelift/filetests/filetests/isa/x64/store-small-vector.clif @@ -11,8 +11,8 @@ block0(v0: i8x2, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; pextrw $0x0, %xmm0, %ecx -; movw %cx, (%rdi) +; pextrw $0x0, %xmm0, %esi +; movw %si, (%rdi) ; movq %rbp, %rsp ; popq %rbp ; retq @@ -22,8 +22,8 @@ block0(v0: i8x2, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; pextrw $0, %xmm0, %ecx -; movw %cx, (%rdi) ; trap: heap_oob +; pextrw $0, %xmm0, %esi +; movw %si, (%rdi) ; trap: heap_oob ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/struct-arg.clif b/cranelift/filetests/filetests/isa/x64/struct-arg.clif index a7106936e942..0df9df8897d6 100644 --- a/cranelift/filetests/filetests/isa/x64/struct-arg.clif +++ b/cranelift/filetests/filetests/isa/x64/struct-arg.clif @@ -11,8 +11,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; leaq +-0x40(%rbp), %rsi -; movzbq (%rsi), %rax +; leaq +-0x40(%rbp), %rax +; movzbq (%rax), %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -22,8 +22,8 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; leaq 0x10(%rbp), %rsi -; movzbq (%rsi), %rax ; trap: heap_oob +; leaq 0x10(%rbp), %rax +; movzbq (%rax), %rax ; trap: heap_oob ; movq %rbp, %rsp ; popq %rbp ; retq @@ -42,8 +42,8 @@ block0(v0: i64, v1: i64): ; block0: ; leaq +-0x40(%rbp), %rcx ; movzbq (%rdi), %rax -; movzbq (%rcx), %r9 -; addl %r9d, %eax +; movzbq (%rcx), %rsi +; addl %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -55,8 +55,8 @@ block0(v0: i64, v1: i64): ; block1: ; offset 0x4 ; leaq 0x10(%rbp), %rcx ; movzbq (%rdi), %rax ; trap: heap_oob -; movzbq (%rcx), %r9 ; trap: heap_oob -; addl %r9d, %eax +; movzbq (%rcx), %rsi ; trap: heap_oob +; addl %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -77,8 +77,8 @@ block0(v0: i64): ; movq %rdi, %rsi ; leaq (%rsp), %rdi ; movl $0x40, %edx -; load_ext_name %Memcpy+0, %r10 -; call *%r10 +; load_ext_name %Memcpy+0, %r8 +; call *%r8 ; call User(userextname0) ; addq $0x40, %rsp ; movq %rbp, %rsp @@ -94,8 +94,8 @@ block0(v0: i64): ; movq %rdi, %rsi ; leaq (%rsp), %rdi ; movl $0x40, %edx -; movabsq $0, %r10 ; reloc_external Abs8 %Memcpy 0 -; callq *%r10 +; movabsq $0, %r8 ; reloc_external Abs8 %Memcpy 0 +; callq *%r8 ; callq 0x26 ; reloc_external CallPCRel4 u0:0 -4 ; addq $0x40, %rsp ; movq %rbp, %rsp @@ -114,16 +114,16 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; subq $0x50, %rsp -; movq %r14, 0x40(%rsp) +; movq %r12, 0x40(%rsp) ; block0: -; movq %rdi, %r14 +; movq %rdi, %r12 ; leaq (%rsp), %rdi ; movl $0x40, %edx -; load_ext_name %Memcpy+0, %r11 -; call *%r11 -; movq %r14, %rdi +; load_ext_name %Memcpy+0, %r8 +; call *%r8 +; movq %r12, %rdi ; call User(userextname0) -; movq 0x40(%rsp), %r14 +; movq 0x40(%rsp), %r12 ; addq $0x50, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -134,16 +134,16 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; subq $0x50, %rsp -; movq %r14, 0x40(%rsp) +; movq %r12, 0x40(%rsp) ; block1: ; offset 0xd -; movq %rdi, %r14 +; movq %rdi, %r12 ; leaq (%rsp), %rdi ; movl $0x40, %edx -; movabsq $0, %r11 ; reloc_external Abs8 %Memcpy 0 -; callq *%r11 -; movq %r14, %rdi +; movabsq $0, %r8 ; reloc_external Abs8 %Memcpy 0 +; callq *%r8 +; movq %r12, %rdi ; callq 0x2e ; reloc_external CallPCRel4 u0:0 -4 -; movq 0x40(%rsp), %r14 +; movq 0x40(%rsp), %r12 ; addq $0x50, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -161,11 +161,11 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; leaq +-0xc0(%rbp), %rsi -; leaq +-0x40(%rbp), %rcx -; movzbq (%rsi), %rax -; movzbq (%rcx), %r9 -; addl %r9d, %eax +; leaq +-0xc0(%rbp), %rax +; leaq +-0x40(%rbp), %rdx +; movzbq (%rax), %rax +; movzbq (%rdx), %rsi +; addl %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -175,11 +175,11 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; leaq 0x10(%rbp), %rsi -; leaq 0x90(%rbp), %rcx -; movzbq (%rsi), %rax ; trap: heap_oob -; movzbq (%rcx), %r9 ; trap: heap_oob -; addl %r9d, %eax +; leaq 0x10(%rbp), %rax +; leaq 0x90(%rbp), %rdx +; movzbq (%rax), %rax ; trap: heap_oob +; movzbq (%rdx), %rsi ; trap: heap_oob +; addl %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -196,24 +196,24 @@ block0(v0: i64, v1: i64, v2: i64): ; pushq %rbp ; movq %rsp, %rbp ; subq $0xd0, %rsp -; movq %rbx, 0xc0(%rsp) -; movq %r13, 0xc8(%rsp) +; movq %r12, 0xc0(%rsp) +; movq %r15, 0xc8(%rsp) ; block0: -; movq %rdx, %rbx -; movq %rdi, %r13 +; movq %rdi, %r12 +; movq %rdx, %r15 ; leaq (%rsp), %rdi ; movl $0x80, %edx -; load_ext_name %Memcpy+0, %rax -; call *%rax +; load_ext_name %Memcpy+0, %r9 +; call *%r9 ; leaq 0x80(%rsp), %rdi ; movl $0x40, %edx -; load_ext_name %Memcpy+0, %r10 -; movq %rbx, %rsi -; call *%r10 -; movq %r13, %rdi +; load_ext_name %Memcpy+0, %rax +; movq %r15, %rsi +; call *%rax +; movq %r12, %rdi ; call User(userextname0) -; movq 0xc0(%rsp), %rbx -; movq 0xc8(%rsp), %r13 +; movq 0xc0(%rsp), %r12 +; movq 0xc8(%rsp), %r15 ; addq $0xd0, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -224,24 +224,24 @@ block0(v0: i64, v1: i64, v2: i64): ; pushq %rbp ; movq %rsp, %rbp ; subq $0xd0, %rsp -; movq %rbx, 0xc0(%rsp) -; movq %r13, 0xc8(%rsp) +; movq %r12, 0xc0(%rsp) +; movq %r15, 0xc8(%rsp) ; block1: ; offset 0x1b -; movq %rdx, %rbx -; movq %rdi, %r13 +; movq %rdi, %r12 +; movq %rdx, %r15 ; leaq (%rsp), %rdi ; movl $0x80, %edx -; movabsq $0, %rax ; reloc_external Abs8 %Memcpy 0 -; callq *%rax +; movabsq $0, %r9 ; reloc_external Abs8 %Memcpy 0 +; callq *%r9 ; leaq 0x80(%rsp), %rdi ; movl $0x40, %edx -; movabsq $0, %r10 ; reloc_external Abs8 %Memcpy 0 -; movq %rbx, %rsi -; callq *%r10 -; movq %r13, %rdi +; movabsq $0, %rax ; reloc_external Abs8 %Memcpy 0 +; movq %r15, %rsi +; callq *%rax +; movq %r12, %rdi ; callq 0x5b ; reloc_external CallPCRel4 u0:0 -4 -; movq 0xc0(%rsp), %rbx -; movq 0xc8(%rsp), %r13 +; movq 0xc0(%rsp), %r12 +; movq 0xc8(%rsp), %r15 ; addq $0xd0, %rsp ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/struct-ret.clif b/cranelift/filetests/filetests/isa/x64/struct-ret.clif index 5aadbba0d611..fd22a44199ab 100644 --- a/cranelift/filetests/filetests/isa/x64/struct-ret.clif +++ b/cranelift/filetests/filetests/isa/x64/struct-ret.clif @@ -41,14 +41,14 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; subq $0x10, %rsp -; movq %r15, (%rsp) +; movq %r12, (%rsp) ; block0: -; movq %rsi, %r15 -; load_ext_name %f2+0, %rax -; movq %r15, %rdi -; call *%rax -; movq %r15, %rax -; movq (%rsp), %r15 +; movq %rsi, %r12 +; load_ext_name %f2+0, %rdx +; movq %r12, %rdi +; call *%rdx +; movq %r12, %rax +; movq (%rsp), %r12 ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -59,14 +59,14 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; subq $0x10, %rsp -; movq %r15, (%rsp) +; movq %r12, (%rsp) ; block1: ; offset 0xc -; movq %rsi, %r15 -; movabsq $0, %rax ; reloc_external Abs8 %f2 0 -; movq %r15, %rdi -; callq *%rax -; movq %r15, %rax -; movq (%rsp), %r15 +; movq %rsi, %r12 +; movabsq $0, %rdx ; reloc_external Abs8 %f2 0 +; movq %r12, %rdi +; callq *%rdx +; movq %r12, %rax +; movq (%rsp), %r12 ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -84,13 +84,13 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; subq $0x10, %rsp -; movq %r15, (%rsp) +; movq %r12, (%rsp) ; block0: -; movq %rdi, %r15 -; load_ext_name %f4+0, %rax -; call *%rax -; movq %r15, %rax -; movq (%rsp), %r15 +; movq %rdi, %r12 +; load_ext_name %f4+0, %rdx +; call *%rdx +; movq %r12, %rax +; movq (%rsp), %r12 ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -101,13 +101,13 @@ block0(v0: i64): ; pushq %rbp ; movq %rsp, %rbp ; subq $0x10, %rsp -; movq %r15, (%rsp) +; movq %r12, (%rsp) ; block1: ; offset 0xc -; movq %rdi, %r15 -; movabsq $0, %rax ; reloc_external Abs8 %f4 0 -; callq *%rax -; movq %r15, %rax -; movq (%rsp), %r15 +; movq %rdi, %r12 +; movabsq $0, %rdx ; reloc_external Abs8 %f4 0 +; callq *%rdx +; movq %r12, %rax +; movq (%rsp), %r12 ; addq $0x10, %rsp ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/tail-call-conv.clif b/cranelift/filetests/filetests/isa/x64/tail-call-conv.clif index d3bcfb2260dd..c65689aa0c21 100644 --- a/cranelift/filetests/filetests/isa/x64/tail-call-conv.clif +++ b/cranelift/filetests/filetests/isa/x64/tail-call-conv.clif @@ -70,27 +70,27 @@ block0: ; movl $0x19, %ecx ; movl $0x1e, %r8d ; movl $0x23, %r9d -; movl $0x28, %r10d -; movl $0x2d, %r11d -; movl $0x32, %eax -; movl $0x37, %r12d -; movl $0x3c, %r13d -; movl $0x41, %r14d -; movl $0x46, %r15d -; movl $0x4b, %ebx +; movl $0x28, %eax +; movl $0x2d, %r10d +; movl $0x32, %r11d +; movl $0x37, %ebx +; movl $0x3c, %r12d +; movl $0x41, %r13d +; movl $0x46, %r14d +; movl $0x4b, %r15d ; movl $0x50, %edi -; movq %r10, (%rsp) -; movq %r11, 8(%rsp) -; movq %rax, 0x10(%rsp) -; movq %r12, 0x18(%rsp) -; movq %r13, 0x20(%rsp) -; movq %r14, 0x28(%rsp) -; movq %r15, 0x30(%rsp) -; movq %rbx, 0x38(%rsp) +; movq %rax, (%rsp) +; movq %r10, 8(%rsp) +; movq %r11, 0x10(%rsp) +; movq %rbx, 0x18(%rsp) +; movq %r12, 0x20(%rsp) +; movq %r13, 0x28(%rsp) +; movq %r14, 0x30(%rsp) +; movq %r15, 0x38(%rsp) ; movq %rdi, 0x40(%rsp) -; load_ext_name %tail_callee_stack_args+0, %r10 +; load_ext_name %tail_callee_stack_args+0, %rax ; movq +(%rsp), %rdi -; call *%r10 +; call *%rax ; movq 0x60(%rsp), %rbx ; movq 0x68(%rsp), %r12 ; movq 0x70(%rsp), %r13 @@ -119,27 +119,27 @@ block0: ; movl $0x19, %ecx ; movl $0x1e, %r8d ; movl $0x23, %r9d -; movl $0x28, %r10d -; movl $0x2d, %r11d -; movl $0x32, %eax -; movl $0x37, %r12d -; movl $0x3c, %r13d -; movl $0x41, %r14d -; movl $0x46, %r15d -; movl $0x4b, %ebx +; movl $0x28, %eax +; movl $0x2d, %r10d +; movl $0x32, %r11d +; movl $0x37, %ebx +; movl $0x3c, %r12d +; movl $0x41, %r13d +; movl $0x46, %r14d +; movl $0x4b, %r15d ; movl $0x50, %edi -; movq %r10, (%rsp) -; movq %r11, 8(%rsp) -; movq %rax, 0x10(%rsp) -; movq %r12, 0x18(%rsp) -; movq %r13, 0x20(%rsp) -; movq %r14, 0x28(%rsp) -; movq %r15, 0x30(%rsp) -; movq %rbx, 0x38(%rsp) +; movq %rax, (%rsp) +; movq %r10, 8(%rsp) +; movq %r11, 0x10(%rsp) +; movq %rbx, 0x18(%rsp) +; movq %r12, 0x20(%rsp) +; movq %r13, 0x28(%rsp) +; movq %r14, 0x30(%rsp) +; movq %r15, 0x38(%rsp) ; movq %rdi, 0x40(%rsp) -; movabsq $0, %r10 ; reloc_external Abs8 %tail_callee_stack_args 0 +; movabsq $0, %rax ; reloc_external Abs8 %tail_callee_stack_args 0 ; movq 0x50(%rsp), %rdi -; callq *%r10 +; callq *%rax ; subq $0x50, %rsp ; movq 0x60(%rsp), %rbx ; movq 0x68(%rsp), %r12 @@ -210,54 +210,54 @@ block0: ; movq %r10, +0x38(%rsp) ; movl $0x2d, %r10d ; movq %r10, +0x30(%rsp) -; movl $0x32, %r13d -; movl $0x37, %r14d -; movl $0x3c, %r15d -; movl $0x41, %ebx -; movl $0x46, %r12d +; movl $0x32, %ebx +; movl $0x37, %r12d +; movl $0x3c, %r13d +; movl $0x41, %r14d +; movl $0x46, %r15d ; movl $0x4b, %r11d -; movl $0x50, %eax +; movl $0x50, %ecx +; movq %rcx, +0x28(%rsp) ; movl $0x55, %ecx ; movl $0x5a, %edx ; movl $0x5f, %esi -; movq %rsi, +0x28(%rsp) ; movl $0x64, %r8d ; movl $0x69, %r9d ; movl $0x6e, %r10d -; movl $0x73, %esi -; movq %rsi, +0x20(%rsp) -; movl $0x78, %esi -; movq %rsi, +0x18(%rsp) -; movl $0x7d, %esi -; movq %rsi, +0x10(%rsp) -; movl $0x82, %esi -; movq %rsi, +8(%rsp) -; movl $0x87, %esi -; movq %rsi, +(%rsp) -; movq %r13, (%rdi) -; movq %r14, 8(%rdi) -; movq %r15, 0x10(%rdi) -; movq %rbx, 0x18(%rdi) -; movq %r12, 0x20(%rdi) +; movl $0x73, %eax +; movq %rax, +0x20(%rsp) +; movl $0x78, %eax +; movq %rax, +0x18(%rsp) +; movl $0x7d, %eax +; movq %rax, +0x10(%rsp) +; movl $0x82, %eax +; movq %rax, +8(%rsp) +; movl $0x87, %eax +; movq %rax, +(%rsp) +; movq %rbx, (%rdi) +; movq %r12, 8(%rdi) +; movq %r13, 0x10(%rdi) +; movq %r14, 0x18(%rdi) +; movq %r15, 0x20(%rdi) ; movq %r11, 0x28(%rdi) +; movq +0x28(%rsp), %rax ; movq %rax, 0x30(%rdi) ; movq %rcx, 0x38(%rdi) ; movq %rdx, 0x40(%rdi) -; movq +0x28(%rsp), %rax -; movq %rax, 0x48(%rdi) +; movq %rsi, 0x48(%rdi) ; movq %r8, 0x50(%rdi) ; movq %r9, 0x58(%rdi) ; movq %r10, 0x60(%rdi) -; movq +0x20(%rsp), %rsi -; movq %rsi, 0x68(%rdi) -; movq +0x18(%rsp), %rsi -; movq %rsi, 0x70(%rdi) -; movq +0x10(%rsp), %rsi -; movq %rsi, 0x78(%rdi) -; movq +8(%rsp), %rsi -; movq %rsi, 0x80(%rdi) -; movq +(%rsp), %rsi -; movq %rsi, 0x88(%rdi) +; movq +0x20(%rsp), %rax +; movq %rax, 0x68(%rdi) +; movq +0x18(%rsp), %rax +; movq %rax, 0x70(%rdi) +; movq +0x10(%rsp), %rax +; movq %rax, 0x78(%rdi) +; movq +8(%rsp), %rax +; movq %rax, 0x80(%rdi) +; movq +(%rsp), %rax +; movq %rax, 0x88(%rdi) ; movq +0x68(%rsp), %rax ; movq +0x60(%rsp), %rcx ; movq +0x58(%rsp), %rdx @@ -303,54 +303,54 @@ block0: ; movq %r10, 0x38(%rsp) ; movl $0x2d, %r10d ; movq %r10, 0x30(%rsp) -; movl $0x32, %r13d -; movl $0x37, %r14d -; movl $0x3c, %r15d -; movl $0x41, %ebx -; movl $0x46, %r12d +; movl $0x32, %ebx +; movl $0x37, %r12d +; movl $0x3c, %r13d +; movl $0x41, %r14d +; movl $0x46, %r15d ; movl $0x4b, %r11d -; movl $0x50, %eax +; movl $0x50, %ecx +; movq %rcx, 0x28(%rsp) ; movl $0x55, %ecx ; movl $0x5a, %edx ; movl $0x5f, %esi -; movq %rsi, 0x28(%rsp) ; movl $0x64, %r8d ; movl $0x69, %r9d ; movl $0x6e, %r10d -; movl $0x73, %esi -; movq %rsi, 0x20(%rsp) -; movl $0x78, %esi -; movq %rsi, 0x18(%rsp) -; movl $0x7d, %esi -; movq %rsi, 0x10(%rsp) -; movl $0x82, %esi -; movq %rsi, 8(%rsp) -; movl $0x87, %esi -; movq %rsi, (%rsp) -; movq %r13, (%rdi) -; movq %r14, 8(%rdi) -; movq %r15, 0x10(%rdi) -; movq %rbx, 0x18(%rdi) -; movq %r12, 0x20(%rdi) +; movl $0x73, %eax +; movq %rax, 0x20(%rsp) +; movl $0x78, %eax +; movq %rax, 0x18(%rsp) +; movl $0x7d, %eax +; movq %rax, 0x10(%rsp) +; movl $0x82, %eax +; movq %rax, 8(%rsp) +; movl $0x87, %eax +; movq %rax, (%rsp) +; movq %rbx, (%rdi) +; movq %r12, 8(%rdi) +; movq %r13, 0x10(%rdi) +; movq %r14, 0x18(%rdi) +; movq %r15, 0x20(%rdi) ; movq %r11, 0x28(%rdi) +; movq 0x28(%rsp), %rax ; movq %rax, 0x30(%rdi) ; movq %rcx, 0x38(%rdi) ; movq %rdx, 0x40(%rdi) -; movq 0x28(%rsp), %rax -; movq %rax, 0x48(%rdi) +; movq %rsi, 0x48(%rdi) ; movq %r8, 0x50(%rdi) ; movq %r9, 0x58(%rdi) ; movq %r10, 0x60(%rdi) -; movq 0x20(%rsp), %rsi -; movq %rsi, 0x68(%rdi) -; movq 0x18(%rsp), %rsi -; movq %rsi, 0x70(%rdi) -; movq 0x10(%rsp), %rsi -; movq %rsi, 0x78(%rdi) -; movq 8(%rsp), %rsi -; movq %rsi, 0x80(%rdi) -; movq (%rsp), %rsi -; movq %rsi, 0x88(%rdi) +; movq 0x20(%rsp), %rax +; movq %rax, 0x68(%rdi) +; movq 0x18(%rsp), %rax +; movq %rax, 0x70(%rdi) +; movq 0x10(%rsp), %rax +; movq %rax, 0x78(%rdi) +; movq 8(%rsp), %rax +; movq %rax, 0x80(%rdi) +; movq (%rsp), %rax +; movq %rax, 0x88(%rdi) ; movq 0x68(%rsp), %rax ; movq 0x60(%rsp), %rcx ; movq 0x58(%rsp), %rdx @@ -437,11 +437,11 @@ block0: ; movq %r11, 0xe0(%rsp) ; movq 0x58(%rsp), %r11 ; movq %r11, 0xe8(%rsp) -; movq 0x60(%rsp), %rbx -; movq 0x68(%rsp), %r12 +; movq 0x60(%rsp), %r15 +; movq 0x68(%rsp), %r14 ; movq 0x70(%rsp), %r13 -; movq 0x78(%rsp), %r14 -; movq 0x80(%rsp), %r15 +; movq 0x78(%rsp), %r12 +; movq 0x80(%rsp), %rbx ; movq 0x88(%rsp), %r11 ; movq %r11, 0xf0(%rsp) ; movq 0xf0(%rsp), %rax @@ -484,54 +484,54 @@ block0(v0: i64, v1: i64, v2: i64, v3: i64, v4: i64, v5: i64, v6: i64, v7: i64, v ; movq %r10, +0x30(%rsp) ; movq +-0xa0(%rbp), %r10 ; movq %r10, +0x38(%rsp) -; movq +-0x98(%rbp), %rcx -; movq %rcx, +0x40(%rsp) -; movq +-0x90(%rbp), %r8 -; movq %r8, +0x48(%rsp) -; movq +-0x88(%rbp), %r10 -; movq %r10, +0x50(%rsp) -; movq +-0x80(%rbp), %rsi -; movq %rsi, +0x58(%rsp) +; movq +-0x98(%rbp), %rax +; movq %rax, +0x40(%rsp) +; movq +-0x90(%rbp), %rax +; movq %rax, +0x48(%rsp) +; movq +-0x88(%rbp), %rax +; movq %rax, +0x50(%rsp) +; movq +-0x80(%rbp), %rax +; movq %rax, +0x58(%rsp) ; movq +-0x78(%rbp), %rax ; movq %rax, +0x60(%rsp) ; movq +-0x70(%rbp), %r10 ; movq +-0x68(%rbp), %r9 ; movq +-0x60(%rbp), %r8 -; movq +-0x58(%rbp), %rdx -; movq +-0x50(%rbp), %rcx -; movq %rcx, +0x68(%rsp) -; movq +-0x48(%rbp), %rsi +; movq +-0x58(%rbp), %rsi +; movq +-0x50(%rbp), %rdx +; movq +-0x48(%rbp), %rcx ; movq +-0x40(%rbp), %r15 -; movq +-0x38(%rbp), %r12 -; movq +-0x30(%rbp), %r14 -; movq +-0x28(%rbp), %rbx -; movq +-0x20(%rbp), %r13 +; movq +-0x38(%rbp), %r14 +; movq +-0x30(%rbp), %r13 +; movq +-0x28(%rbp), %r12 +; movq +-0x20(%rbp), %rbx ; movq +-0x18(%rbp), %r11 ; movq +-0x10(%rbp), %rax -; movq +0x40(%rsp), %rcx -; movq %rcx, (%rdi) -; movq +0x48(%rsp), %rcx -; movq %rcx, 8(%rdi) -; movq +0x50(%rsp), %rcx -; movq %rcx, 0x10(%rdi) -; movq +0x58(%rsp), %rcx -; movq %rcx, 0x18(%rdi) -; movq +0x60(%rsp), %rcx -; movq %rcx, 0x20(%rdi) +; movq %rax, +0x68(%rsp) +; movq +0x40(%rsp), %rax +; movq %rax, (%rdi) +; movq +0x48(%rsp), %rax +; movq %rax, 8(%rdi) +; movq +0x50(%rsp), %rax +; movq %rax, 0x10(%rdi) +; movq +0x58(%rsp), %rax +; movq %rax, 0x18(%rdi) +; movq +0x60(%rsp), %rax +; movq %rax, 0x20(%rdi) ; movq %r10, 0x28(%rdi) ; movq %r9, 0x30(%rdi) ; movq %r8, 0x38(%rdi) -; movq %rdx, 0x40(%rdi) -; movq +0x68(%rsp), %rdx +; movq %rsi, 0x40(%rdi) ; movq %rdx, 0x48(%rdi) -; movq %rsi, 0x50(%rdi) +; movq %rcx, 0x50(%rdi) ; movq %r15, 0x58(%rdi) -; movq %r12, 0x60(%rdi) -; movq %r14, 0x68(%rdi) -; movq %rbx, 0x70(%rdi) -; movq %r13, 0x78(%rdi) +; movq %r14, 0x60(%rdi) +; movq %r13, 0x68(%rdi) +; movq %r12, 0x70(%rdi) +; movq %rbx, 0x78(%rdi) ; movq %r11, 0x80(%rdi) -; movq %rax, 0x88(%rdi) +; movq +0x68(%rsp), %rcx +; movq %rcx, 0x88(%rdi) ; movq +(%rsp), %rax ; movq +8(%rsp), %rcx ; movq +0x10(%rsp), %rdx @@ -572,54 +572,54 @@ block0(v0: i64, v1: i64, v2: i64, v3: i64, v4: i64, v5: i64, v6: i64, v7: i64, v ; movq %r10, 0x30(%rsp) ; movq 0x20(%rbp), %r10 ; movq %r10, 0x38(%rsp) -; movq 0x28(%rbp), %rcx -; movq %rcx, 0x40(%rsp) -; movq 0x30(%rbp), %r8 -; movq %r8, 0x48(%rsp) -; movq 0x38(%rbp), %r10 -; movq %r10, 0x50(%rsp) -; movq 0x40(%rbp), %rsi -; movq %rsi, 0x58(%rsp) +; movq 0x28(%rbp), %rax +; movq %rax, 0x40(%rsp) +; movq 0x30(%rbp), %rax +; movq %rax, 0x48(%rsp) +; movq 0x38(%rbp), %rax +; movq %rax, 0x50(%rsp) +; movq 0x40(%rbp), %rax +; movq %rax, 0x58(%rsp) ; movq 0x48(%rbp), %rax ; movq %rax, 0x60(%rsp) ; movq 0x50(%rbp), %r10 ; movq 0x58(%rbp), %r9 ; movq 0x60(%rbp), %r8 -; movq 0x68(%rbp), %rdx -; movq 0x70(%rbp), %rcx -; movq %rcx, 0x68(%rsp) -; movq 0x78(%rbp), %rsi +; movq 0x68(%rbp), %rsi +; movq 0x70(%rbp), %rdx +; movq 0x78(%rbp), %rcx ; movq 0x80(%rbp), %r15 -; movq 0x88(%rbp), %r12 -; movq 0x90(%rbp), %r14 -; movq 0x98(%rbp), %rbx -; movq 0xa0(%rbp), %r13 +; movq 0x88(%rbp), %r14 +; movq 0x90(%rbp), %r13 +; movq 0x98(%rbp), %r12 +; movq 0xa0(%rbp), %rbx ; movq 0xa8(%rbp), %r11 ; movq 0xb0(%rbp), %rax -; movq 0x40(%rsp), %rcx -; movq %rcx, (%rdi) -; movq 0x48(%rsp), %rcx -; movq %rcx, 8(%rdi) -; movq 0x50(%rsp), %rcx -; movq %rcx, 0x10(%rdi) -; movq 0x58(%rsp), %rcx -; movq %rcx, 0x18(%rdi) -; movq 0x60(%rsp), %rcx -; movq %rcx, 0x20(%rdi) +; movq %rax, 0x68(%rsp) +; movq 0x40(%rsp), %rax +; movq %rax, (%rdi) +; movq 0x48(%rsp), %rax +; movq %rax, 8(%rdi) +; movq 0x50(%rsp), %rax +; movq %rax, 0x10(%rdi) +; movq 0x58(%rsp), %rax +; movq %rax, 0x18(%rdi) +; movq 0x60(%rsp), %rax +; movq %rax, 0x20(%rdi) ; movq %r10, 0x28(%rdi) ; movq %r9, 0x30(%rdi) ; movq %r8, 0x38(%rdi) -; movq %rdx, 0x40(%rdi) -; movq 0x68(%rsp), %rdx +; movq %rsi, 0x40(%rdi) ; movq %rdx, 0x48(%rdi) -; movq %rsi, 0x50(%rdi) +; movq %rcx, 0x50(%rdi) ; movq %r15, 0x58(%rdi) -; movq %r12, 0x60(%rdi) -; movq %r14, 0x68(%rdi) -; movq %rbx, 0x70(%rdi) -; movq %r13, 0x78(%rdi) +; movq %r14, 0x60(%rdi) +; movq %r13, 0x68(%rdi) +; movq %r12, 0x70(%rdi) +; movq %rbx, 0x78(%rdi) ; movq %r11, 0x80(%rdi) -; movq %rax, 0x88(%rdi) +; movq 0x68(%rsp), %rcx +; movq %rcx, 0x88(%rdi) ; movq (%rsp), %rax ; movq 8(%rsp), %rcx ; movq 0x10(%rsp), %rdx @@ -692,72 +692,72 @@ block0: ; movq %r9, +0x40(%rsp) ; movl $0x1e, %r9d ; movq %r9, +0x38(%rsp) -; movl $0x23, %esi -; movq %rsi, +0x30(%rsp) +; movl $0x23, %ecx +; movq %rcx, +0x30(%rsp) ; movl $0x28, %edi -; movl $0x2d, %eax -; movl $0x32, %r10d -; movl $0x37, %r14d -; movl $0x3c, %r15d -; movl $0x41, %ebx -; movl $0x46, %r12d -; movl $0x4b, %r13d +; movl $0x2d, %r10d +; movl $0x32, %r11d +; movl $0x37, %ebx +; movl $0x3c, %r12d +; movl $0x41, %r13d +; movl $0x46, %r14d +; movl $0x4b, %r15d ; movl $0x50, %esi ; movl $0x55, %edx ; movl $0x5a, %ecx ; movl $0x5f, %r8d ; movl $0x64, %r9d -; movl $0x69, %r11d -; movq %r11, +0x28(%rsp) -; movl $0x6e, %r11d -; movq %r11, +0x20(%rsp) -; movl $0x73, %r11d -; movq %r11, +0x18(%rsp) -; movl $0x78, %r11d -; movq %r11, +0x10(%rsp) -; movl $0x7d, %r11d -; movq %r11, +8(%rsp) -; movl $0x82, %r11d -; movq %r11, +(%rsp) -; movl $0x87, %r11d -; movq %r11, +0x60(%rsp) -; movq +0x30(%rsp), %r11 -; movq %r11, (%rsp) +; movl $0x69, %eax +; movq %rax, +0x28(%rsp) +; movl $0x6e, %eax +; movq %rax, +0x20(%rsp) +; movl $0x73, %eax +; movq %rax, +0x18(%rsp) +; movl $0x78, %eax +; movq %rax, +0x10(%rsp) +; movl $0x7d, %eax +; movq %rax, +8(%rsp) +; movl $0x82, %eax +; movq %rax, +(%rsp) +; movl $0x87, %eax +; movq %rax, +0x60(%rsp) +; movq +0x30(%rsp), %rax +; movq %rax, (%rsp) ; movq %rdi, 8(%rsp) -; movq %rax, 0x10(%rsp) -; movq %r10, 0x18(%rsp) -; movq %r14, 0x20(%rsp) -; movq %r15, 0x28(%rsp) -; movq %rbx, 0x30(%rsp) -; movq %r12, 0x38(%rsp) -; movq %r13, 0x40(%rsp) +; movq %r10, 0x10(%rsp) +; movq %r11, 0x18(%rsp) +; movq %rbx, 0x20(%rsp) +; movq %r12, 0x28(%rsp) +; movq %r13, 0x30(%rsp) +; movq %r14, 0x38(%rsp) +; movq %r15, 0x40(%rsp) ; movq %rsi, 0x48(%rsp) ; movq %rdx, 0x50(%rsp) ; movq %rcx, 0x58(%rsp) ; movq %r8, 0x60(%rsp) ; movq %r9, 0x68(%rsp) -; movq +0x28(%rsp), %r11 -; movq %r11, 0x70(%rsp) -; movq +0x20(%rsp), %r11 -; movq %r11, 0x78(%rsp) -; movq +0x18(%rsp), %r11 -; movq %r11, 0x80(%rsp) -; movq +0x10(%rsp), %r11 -; movq %r11, 0x88(%rsp) -; movq +8(%rsp), %r11 -; movq %r11, 0x90(%rsp) -; movq +(%rsp), %r11 -; movq %r11, 0x98(%rsp) -; movq +0x60(%rsp), %r11 -; movq %r11, 0xa0(%rsp) +; movq +0x28(%rsp), %rax +; movq %rax, 0x70(%rsp) +; movq +0x20(%rsp), %rax +; movq %rax, 0x78(%rsp) +; movq +0x18(%rsp), %rax +; movq %rax, 0x80(%rsp) +; movq +0x10(%rsp), %rax +; movq %rax, 0x88(%rsp) +; movq +8(%rsp), %rax +; movq %rax, 0x90(%rsp) +; movq +(%rsp), %rax +; movq %rax, 0x98(%rsp) +; movq +0x60(%rsp), %rax +; movq %rax, 0xa0(%rsp) ; leaq 0xb0(%rsp), %rdi -; load_ext_name %tail_callee_stack_args_and_rets+0, %r10 +; load_ext_name %tail_callee_stack_args_and_rets+0, %rax ; movq +0x48(%rsp), %rcx ; movq +0x50(%rsp), %rdx ; movq +0x58(%rsp), %rsi ; movq +0x40(%rsp), %r8 ; movq +0x38(%rsp), %r9 -; call *%r10 +; call *%rax ; movq +0x60(%rsp), %rax ; movq 0x1b0(%rsp), %rbx ; movq 0x1b8(%rsp), %r12 @@ -790,72 +790,72 @@ block0: ; movq %r9, 0x180(%rsp) ; movl $0x1e, %r9d ; movq %r9, 0x178(%rsp) -; movl $0x23, %esi -; movq %rsi, 0x170(%rsp) +; movl $0x23, %ecx +; movq %rcx, 0x170(%rsp) ; movl $0x28, %edi -; movl $0x2d, %eax -; movl $0x32, %r10d -; movl $0x37, %r14d -; movl $0x3c, %r15d -; movl $0x41, %ebx -; movl $0x46, %r12d -; movl $0x4b, %r13d +; movl $0x2d, %r10d +; movl $0x32, %r11d +; movl $0x37, %ebx +; movl $0x3c, %r12d +; movl $0x41, %r13d +; movl $0x46, %r14d +; movl $0x4b, %r15d ; movl $0x50, %esi ; movl $0x55, %edx ; movl $0x5a, %ecx ; movl $0x5f, %r8d ; movl $0x64, %r9d -; movl $0x69, %r11d -; movq %r11, 0x168(%rsp) -; movl $0x6e, %r11d -; movq %r11, 0x160(%rsp) -; movl $0x73, %r11d -; movq %r11, 0x158(%rsp) -; movl $0x78, %r11d -; movq %r11, 0x150(%rsp) -; movl $0x7d, %r11d -; movq %r11, 0x148(%rsp) -; movl $0x82, %r11d -; movq %r11, 0x140(%rsp) -; movl $0x87, %r11d -; movq %r11, 0x1a0(%rsp) -; movq 0x170(%rsp), %r11 -; movq %r11, (%rsp) +; movl $0x69, %eax +; movq %rax, 0x168(%rsp) +; movl $0x6e, %eax +; movq %rax, 0x160(%rsp) +; movl $0x73, %eax +; movq %rax, 0x158(%rsp) +; movl $0x78, %eax +; movq %rax, 0x150(%rsp) +; movl $0x7d, %eax +; movq %rax, 0x148(%rsp) +; movl $0x82, %eax +; movq %rax, 0x140(%rsp) +; movl $0x87, %eax +; movq %rax, 0x1a0(%rsp) +; movq 0x170(%rsp), %rax +; movq %rax, (%rsp) ; movq %rdi, 8(%rsp) -; movq %rax, 0x10(%rsp) -; movq %r10, 0x18(%rsp) -; movq %r14, 0x20(%rsp) -; movq %r15, 0x28(%rsp) -; movq %rbx, 0x30(%rsp) -; movq %r12, 0x38(%rsp) -; movq %r13, 0x40(%rsp) +; movq %r10, 0x10(%rsp) +; movq %r11, 0x18(%rsp) +; movq %rbx, 0x20(%rsp) +; movq %r12, 0x28(%rsp) +; movq %r13, 0x30(%rsp) +; movq %r14, 0x38(%rsp) +; movq %r15, 0x40(%rsp) ; movq %rsi, 0x48(%rsp) ; movq %rdx, 0x50(%rsp) ; movq %rcx, 0x58(%rsp) ; movq %r8, 0x60(%rsp) ; movq %r9, 0x68(%rsp) -; movq 0x168(%rsp), %r11 -; movq %r11, 0x70(%rsp) -; movq 0x160(%rsp), %r11 -; movq %r11, 0x78(%rsp) -; movq 0x158(%rsp), %r11 -; movq %r11, 0x80(%rsp) -; movq 0x150(%rsp), %r11 -; movq %r11, 0x88(%rsp) -; movq 0x148(%rsp), %r11 -; movq %r11, 0x90(%rsp) -; movq 0x140(%rsp), %r11 -; movq %r11, 0x98(%rsp) -; movq 0x1a0(%rsp), %r11 -; movq %r11, 0xa0(%rsp) +; movq 0x168(%rsp), %rax +; movq %rax, 0x70(%rsp) +; movq 0x160(%rsp), %rax +; movq %rax, 0x78(%rsp) +; movq 0x158(%rsp), %rax +; movq %rax, 0x80(%rsp) +; movq 0x150(%rsp), %rax +; movq %rax, 0x88(%rsp) +; movq 0x148(%rsp), %rax +; movq %rax, 0x90(%rsp) +; movq 0x140(%rsp), %rax +; movq %rax, 0x98(%rsp) +; movq 0x1a0(%rsp), %rax +; movq %rax, 0xa0(%rsp) ; leaq 0xb0(%rsp), %rdi -; movabsq $0, %r10 ; reloc_external Abs8 %tail_callee_stack_args_and_rets 0 +; movabsq $0, %rax ; reloc_external Abs8 %tail_callee_stack_args_and_rets 0 ; movq 0x188(%rsp), %rcx ; movq 0x190(%rsp), %rdx ; movq 0x198(%rsp), %rsi ; movq 0x180(%rsp), %r8 ; movq 0x178(%rsp), %r9 -; callq *%r10 +; callq *%rax ; subq $0xb0, %rsp ; movq 0xb0(%rsp), %r11 ; movq %r11, 0x140(%rsp) @@ -881,11 +881,11 @@ block0: ; movq %r11, 0x190(%rsp) ; movq 0x108(%rsp), %r11 ; movq %r11, 0x198(%rsp) -; movq 0x110(%rsp), %rbx -; movq 0x118(%rsp), %r12 +; movq 0x110(%rsp), %r15 +; movq 0x118(%rsp), %r14 ; movq 0x120(%rsp), %r13 -; movq 0x128(%rsp), %r14 -; movq 0x130(%rsp), %r15 +; movq 0x128(%rsp), %r12 +; movq 0x130(%rsp), %rbx ; movq 0x138(%rsp), %r11 ; movq %r11, 0x1a0(%rsp) ; movq 0x1a0(%rsp), %rax diff --git a/cranelift/filetests/filetests/isa/x64/trunc-libcall.clif b/cranelift/filetests/filetests/isa/x64/trunc-libcall.clif index d5e5e2631ca8..1b6268e1c3dc 100644 --- a/cranelift/filetests/filetests/isa/x64/trunc-libcall.clif +++ b/cranelift/filetests/filetests/isa/x64/trunc-libcall.clif @@ -11,8 +11,8 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; load_ext_name %TruncF32+0, %rcx -; call *%rcx +; load_ext_name %TruncF32+0, %rsi +; call *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -22,8 +22,8 @@ block0(v0: f32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0, %rcx ; reloc_external Abs8 %TruncF32 0 -; callq *%rcx +; movabsq $0, %rsi ; reloc_external Abs8 %TruncF32 0 +; callq *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -38,8 +38,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; load_ext_name %TruncF64+0, %rcx -; call *%rcx +; load_ext_name %TruncF64+0, %rsi +; call *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq @@ -49,8 +49,8 @@ block0(v0: f64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movabsq $0, %rcx ; reloc_external Abs8 %TruncF64 0 -; callq *%rcx +; movabsq $0, %rsi ; reloc_external Abs8 %TruncF64 0 +; callq *%rsi ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/udivrem.clif b/cranelift/filetests/filetests/isa/x64/udivrem.clif index 26adf926feba..283f0a689cc8 100644 --- a/cranelift/filetests/filetests/isa/x64/udivrem.clif +++ b/cranelift/filetests/filetests/isa/x64/udivrem.clif @@ -16,12 +16,12 @@ block0(v0: i8, v1: i8): ; block0: ; movzbl %dil, %eax ; divb %sil ;; implicit: %ax, trap=254 -; movq %rax, %rcx +; movq %rax, %r9 ; movzbl %dil, %eax ; divb %sil ;; implicit: %ax, trap=254 ; movq %rax, %rdx ; shrq $0x8, %rdx -; movq %rcx, %rax +; movq %r9, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -33,12 +33,12 @@ block0(v0: i8, v1: i8): ; block1: ; offset 0x4 ; movzbl %dil, %eax ; divb %sil ; trap: int_divz -; movq %rax, %rcx +; movq %rax, %r9 ; movzbl %dil, %eax ; divb %sil ; trap: int_divz ; movq %rax, %rdx ; shrq $8, %rdx -; movq %rcx, %rax +; movq %r9, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -58,12 +58,12 @@ block0(v0: i16, v1: i16): ; xorq %rdx, %rdx ; movq %rdi, %rax ; divw %si ;; implicit: %ax, %dx, trap=254 -; movq %rax, %r8 +; movq %rax, %rcx ; uninit %rdx ; xorq %rdx, %rdx ; movq %rdi, %rax ; divw %si ;; implicit: %ax, %dx, trap=254 -; movq %r8, %rax +; movq %rcx, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -76,11 +76,11 @@ block0(v0: i16, v1: i16): ; xorq %rdx, %rdx ; movq %rdi, %rax ; divw %si ; trap: int_divz -; movq %rax, %r8 +; movq %rax, %rcx ; xorq %rdx, %rdx ; movq %rdi, %rax ; divw %si ; trap: int_divz -; movq %r8, %rax +; movq %rcx, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -100,12 +100,12 @@ block0(v0: i32, v1: i32): ; xorq %rdx, %rdx ; movq %rdi, %rax ; divl %esi ;; implicit: %eax, %edx, trap=254 -; movq %rax, %r8 +; movq %rax, %rcx ; uninit %rdx ; xorq %rdx, %rdx ; movq %rdi, %rax ; divl %esi ;; implicit: %eax, %edx, trap=254 -; movq %r8, %rax +; movq %rcx, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -118,11 +118,11 @@ block0(v0: i32, v1: i32): ; xorq %rdx, %rdx ; movq %rdi, %rax ; divl %esi ; trap: int_divz -; movq %rax, %r8 +; movq %rax, %rcx ; xorq %rdx, %rdx ; movq %rdi, %rax ; divl %esi ; trap: int_divz -; movq %r8, %rax +; movq %rcx, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -142,12 +142,12 @@ block0(v0: i64, v1: i64): ; xorq %rdx, %rdx ; movq %rdi, %rax ; divq %rsi ;; implicit: %rax, %rdx, trap=254 -; movq %rax, %r8 +; movq %rax, %rcx ; uninit %rdx ; xorq %rdx, %rdx ; movq %rdi, %rax ; divq %rsi ;; implicit: %rax, %rdx, trap=254 -; movq %r8, %rax +; movq %rcx, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -160,11 +160,11 @@ block0(v0: i64, v1: i64): ; xorq %rdx, %rdx ; movq %rdi, %rax ; divq %rsi ; trap: int_divz -; movq %rax, %r8 +; movq %rax, %rcx ; xorq %rdx, %rdx ; movq %rdi, %rax ; divq %rsi ; trap: int_divz -; movq %r8, %rax +; movq %rcx, %rax ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/uext-sext-handling.clif b/cranelift/filetests/filetests/isa/x64/uext-sext-handling.clif index 9f1eba812b6a..9155c449a01e 100644 --- a/cranelift/filetests/filetests/isa/x64/uext-sext-handling.clif +++ b/cranelift/filetests/filetests/isa/x64/uext-sext-handling.clif @@ -1,4 +1,4 @@ -test compile +test compile precise-output target x86_64 ; The x86_64 system_v call conv respects uext and sext @@ -11,14 +11,29 @@ block0(v0: i8): return } -; check: pushq %rbp -; nextln: movq %rsp, %rbp -; nextln: block0: -; nextln: movzbq %dil, %rdi -; nextln: load_ext_name userextname0+0, %rdx -; nextln: call *%rdx +; VCode: +; pushq %rbp +; movq %rsp, %rbp +; block0: +; movzbq %dil, %rdi +; load_ext_name userextname0+0, %rsi +; call *%rsi +; movq %rbp, %rsp +; popq %rbp +; retq +; +; Disassembled: +; block0: ; offset 0x0 +; pushq %rbp +; movq %rsp, %rbp +; block1: ; offset 0x4 +; movzbq %dil, %rdi +; movabsq $0, %rsi ; reloc_external Abs8 u0:0 0 +; callq *%rsi +; movq %rbp, %rsp +; popq %rbp +; retq -; The x86_64 windows_fastcall call conv respects uext and sext function u0:0(i8) windows_fastcall { sig0 = (i8 uext) windows_fastcall fn0 = u0:0 sig0 @@ -28,10 +43,34 @@ block0(v0: i8): return } -; check: pushq %rbp -; nextln: movq %rsp, %rbp -; nextln: subq $$0x20, %rsp -; nextln: block0: -; nextln: movzbq %cl, %rcx -; nextln: load_ext_name userextname0+0, %rdx -; nextln: call *%rdx +; VCode: +; pushq %rbp +; movq %rsp, %rbp +; subq $0x30, %rsp +; movq %rsi, 0x20(%rsp) +; block0: +; movzbq %cl, %rcx +; load_ext_name userextname0+0, %rsi +; call *%rsi +; movq 0x20(%rsp), %rsi +; addq $0x30, %rsp +; movq %rbp, %rsp +; popq %rbp +; retq +; +; Disassembled: +; block0: ; offset 0x0 +; pushq %rbp +; movq %rsp, %rbp +; subq $0x30, %rsp +; movq %rsi, 0x20(%rsp) +; block1: ; offset 0xd +; movzbq %cl, %rcx +; movabsq $0, %rsi ; reloc_external Abs8 u0:0 0 +; callq *%rsi +; movq 0x20(%rsp), %rsi +; addq $0x30, %rsp +; movq %rbp, %rsp +; popq %rbp +; retq + diff --git a/cranelift/filetests/filetests/isa/x64/umax-bug.clif b/cranelift/filetests/filetests/isa/x64/umax-bug.clif index fe77aec7f7e7..da0154a8b01a 100644 --- a/cranelift/filetests/filetests/isa/x64/umax-bug.clif +++ b/cranelift/filetests/filetests/isa/x64/umax-bug.clif @@ -12,10 +12,10 @@ block0(v1: i32, v2: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl (%rsi), %edx -; cmpl %edi, %edx +; movl (%rsi), %esi +; cmpl %edi, %esi ; movq %rdi, %rax -; cmovael %edx, %eax +; cmovael %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -25,10 +25,10 @@ block0(v1: i32, v2: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl (%rsi), %edx -; cmpl %edi, %edx +; movl (%rsi), %esi +; cmpl %edi, %esi ; movq %rdi, %rax -; cmovael %edx, %eax +; cmovael %esi, %eax ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/user_stack_maps.clif b/cranelift/filetests/filetests/isa/x64/user_stack_maps.clif index a9e15e1c9c5f..741d7c2df43c 100644 --- a/cranelift/filetests/filetests/isa/x64/user_stack_maps.clif +++ b/cranelift/filetests/filetests/isa/x64/user_stack_maps.clif @@ -37,14 +37,14 @@ block0: ; movq %rsp, %rbp ; subq $0x30, %rsp ; movq %rbx, 0x10(%rsp) -; movq %r14, 0x18(%rsp) -; movq %r15, 0x20(%rsp) +; movq %r12, 0x18(%rsp) +; movq %r13, 0x20(%rsp) ; block0: ; uninit %rdi ; xorl %edi, %edi -; movq %rdi, %rbx -; movl $0x1, %r14d -; movl $0x2, %r15d +; movq %rdi, %r13 +; movl $0x1, %ebx +; movl $0x2, %r12d ; movl $0x0, +(%rsp) ; movl $0x1, +4(%rsp) ; movl $0x2, +8(%rsp) @@ -52,18 +52,18 @@ block0: ; ; UserStackMap { by_type: [(types::I32, CompoundBitSet {0, 4, 8})], sp_to_sized_stack_slots: None } ; movl $0x1, +(%rsp) ; movl $0x2, +4(%rsp) -; movq %rbx, %rdi +; movq %r13, %rdi ; call User(userextname0) ; ; UserStackMap { by_type: [(types::I32, CompoundBitSet {0, 4})], sp_to_sized_stack_slots: None } ; movl $0x2, +(%rsp) -; movq %r14, %rdi +; movq %rbx, %rdi ; call User(userextname0) ; ; UserStackMap { by_type: [(types::I32, CompoundBitSet {0})], sp_to_sized_stack_slots: None } -; movq %r15, %rdi +; movq %r12, %rdi ; call User(userextname0) ; movq 0x10(%rsp), %rbx -; movq 0x18(%rsp), %r14 -; movq 0x20(%rsp), %r15 +; movq 0x18(%rsp), %r12 +; movq 0x20(%rsp), %r13 ; addq $0x30, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -75,29 +75,29 @@ block0: ; movq %rsp, %rbp ; subq $0x30, %rsp ; movq %rbx, 0x10(%rsp) -; movq %r14, 0x18(%rsp) -; movq %r15, 0x20(%rsp) +; movq %r12, 0x18(%rsp) +; movq %r13, 0x20(%rsp) ; block1: ; offset 0x17 ; xorl %edi, %edi -; movq %rdi, %rbx -; movl $1, %r14d -; movl $2, %r15d +; movq %rdi, %r13 +; movl $1, %ebx +; movl $2, %r12d ; movl $0, (%rsp) ; movl $1, 4(%rsp) ; movl $2, 8(%rsp) -; callq 0x44 ; reloc_external CallPCRel4 u0:0 -4 +; callq 0x43 ; reloc_external CallPCRel4 u0:0 -4 ; movl $1, (%rsp) ; movl $2, 4(%rsp) -; movq %rbx, %rdi -; callq 0x5b ; reloc_external CallPCRel4 u0:0 -4 +; movq %r13, %rdi +; callq 0x5a ; reloc_external CallPCRel4 u0:0 -4 ; movl $2, (%rsp) -; movq %r14, %rdi -; callq 0x6a ; reloc_external CallPCRel4 u0:0 -4 -; movq %r15, %rdi -; callq 0x72 ; reloc_external CallPCRel4 u0:0 -4 +; movq %rbx, %rdi +; callq 0x69 ; reloc_external CallPCRel4 u0:0 -4 +; movq %r12, %rdi +; callq 0x71 ; reloc_external CallPCRel4 u0:0 -4 ; movq 0x10(%rsp), %rbx -; movq 0x18(%rsp), %r14 -; movq 0x20(%rsp), %r15 +; movq 0x18(%rsp), %r12 +; movq 0x20(%rsp), %r13 ; addq $0x30, %rsp ; movq %rbp, %rsp ; popq %rbp @@ -135,11 +135,11 @@ block0(v0: i8, v1: i16, v2: i32, v3: i64, v4: f32, v5: f64): ; block0: ; movq %rdi, %r12 ; movb %sil, +(%rsp) -; movq %rsi, %r14 +; movq %rsi, %rbx ; movw %dx, +8(%rsp) -; movq %rdx, %rbx +; movq %rdx, %r15 ; movl %ecx, +0x10(%rsp) -; movq %rcx, %r15 +; movq %rcx, %r14 ; movss %xmm0, +0x14(%rsp) ; movdqu %xmm0, +0x60(%rsp) ; movq %r8, +0x18(%rsp) @@ -148,13 +148,13 @@ block0(v0: i8, v1: i16, v2: i32, v3: i64, v4: f32, v5: f64): ; movdqu %xmm1, +0x70(%rsp) ; call User(userextname0) ; ; UserStackMap { by_type: [(types::I8, CompoundBitSet {0}), (types::I16, CompoundBitSet {8}), (types::I32, CompoundBitSet {16}), (types::F32, CompoundBitSet {20}), (types::I64, CompoundBitSet {24}), (types::F64, CompoundBitSet {32})], sp_to_sized_stack_slots: None } -; movq %r15, %rcx +; movq %r14, %rcx ; movq %r12, %rdi ; movl %ecx, (%rdi) ; movq %r13, %r8 ; movq %r8, 8(%rdi) -; movq %r14, %rax -; movq %rbx, %rdx +; movq %rbx, %rax +; movq %r15, %rdx ; movdqu +0x60(%rsp), %xmm0 ; movdqu +0x70(%rsp), %xmm1 ; movq 0x80(%rsp), %rbx @@ -180,11 +180,11 @@ block0(v0: i8, v1: i16, v2: i32, v3: i64, v4: f32, v5: f64): ; block1: ; offset 0x33 ; movq %rdi, %r12 ; movb %sil, (%rsp) -; movq %rsi, %r14 +; movq %rsi, %rbx ; movw %dx, 8(%rsp) -; movq %rdx, %rbx +; movq %rdx, %r15 ; movl %ecx, 0x10(%rsp) -; movq %rcx, %r15 +; movq %rcx, %r14 ; movss %xmm0, 0x14(%rsp) ; movdqu %xmm0, 0x60(%rsp) ; movq %r8, 0x18(%rsp) @@ -192,13 +192,13 @@ block0(v0: i8, v1: i16, v2: i32, v3: i64, v4: f32, v5: f64): ; movsd %xmm1, 0x20(%rsp) ; movdqu %xmm1, 0x70(%rsp) ; callq 0x71 ; reloc_external CallPCRel4 u0:0 -4 -; movq %r15, %rcx +; movq %r14, %rcx ; movq %r12, %rdi ; movl %ecx, (%rdi) ; movq %r13, %r8 ; movq %r8, 8(%rdi) -; movq %r14, %rax -; movq %rbx, %rdx +; movq %rbx, %rax +; movq %r15, %rdx ; movdqu 0x60(%rsp), %xmm0 ; movdqu 0x70(%rsp), %xmm1 ; movq 0x80(%rsp), %rbx diff --git a/cranelift/filetests/filetests/isa/x64/ushr.clif b/cranelift/filetests/filetests/isa/x64/ushr.clif index cf35ce6a50d0..276164338a49 100644 --- a/cranelift/filetests/filetests/isa/x64/ushr.clif +++ b/cranelift/filetests/filetests/isa/x64/ushr.clif @@ -18,11 +18,10 @@ block0(v0: i128, v1: i8): ; block0: ; movzbq %dl, %rcx ; shrq %cl, %rdi -; movq %rsi, %r10 -; shrq %cl, %r10 -; movq %rcx, %r11 +; movq %rsi, %r8 +; shrq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %rax ; subq %rax, %rcx ; shlq %cl, %rsi ; uninit %rdx @@ -31,9 +30,9 @@ block0(v0: i128, v1: i8): ; cmoveq %rdx, %rsi ; orq %rdi, %rsi ; testq $0x40, %rax -; movq %r10, %rax +; movq %r8, %rax ; cmoveq %rsi, %rax -; cmoveq %r10, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -45,11 +44,10 @@ block0(v0: i128, v1: i8): ; block1: ; offset 0x4 ; movzbq %dl, %rcx ; shrq %cl, %rdi -; movq %rsi, %r10 -; shrq %cl, %r10 -; movq %rcx, %r11 +; movq %rsi, %r8 +; shrq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r11, %rax ; subq %rax, %rcx ; shlq %cl, %rsi ; xorq %rdx, %rdx @@ -57,9 +55,9 @@ block0(v0: i128, v1: i8): ; cmoveq %rdx, %rsi ; orq %rdi, %rsi ; testq $0x40, %rax -; movq %r10, %rax +; movq %r8, %rax ; cmoveq %rsi, %rax -; cmoveq %r10, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -74,14 +72,13 @@ block0(v0: i128, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; shrq %cl, %rdi -; movq %rsi, %r9 -; shrq %cl, %r9 -; movq %rcx, %r10 +; movq %rsi, %r8 +; shrq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %rax ; subq %rax, %rcx ; shlq %cl, %rsi ; uninit %rdx @@ -90,9 +87,9 @@ block0(v0: i128, v1: i64): ; cmoveq %rdx, %rsi ; orq %rdi, %rsi ; testq $0x40, %rax -; movq %r9, %rax +; movq %r8, %rax ; cmoveq %rsi, %rax -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -102,14 +99,13 @@ block0(v0: i128, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; shrq %cl, %rdi -; movq %rsi, %r9 -; shrq %cl, %r9 -; movq %rcx, %r10 +; movq %rsi, %r8 +; shrq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %rax ; subq %rax, %rcx ; shlq %cl, %rsi ; xorq %rdx, %rdx @@ -117,9 +113,9 @@ block0(v0: i128, v1: i64): ; cmoveq %rdx, %rsi ; orq %rdi, %rsi ; testq $0x40, %rax -; movq %r9, %rax +; movq %r8, %rax ; cmoveq %rsi, %rax -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -134,14 +130,13 @@ block0(v0: i128, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; shrq %cl, %rdi -; movq %rsi, %r9 -; shrq %cl, %r9 -; movq %rcx, %r10 +; movq %rsi, %r8 +; shrq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %rax ; subq %rax, %rcx ; shlq %cl, %rsi ; uninit %rdx @@ -150,9 +145,9 @@ block0(v0: i128, v1: i32): ; cmoveq %rdx, %rsi ; orq %rdi, %rsi ; testq $0x40, %rax -; movq %r9, %rax +; movq %r8, %rax ; cmoveq %rsi, %rax -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -162,14 +157,13 @@ block0(v0: i128, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; shrq %cl, %rdi -; movq %rsi, %r9 -; shrq %cl, %r9 -; movq %rcx, %r10 +; movq %rsi, %r8 +; shrq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %rax ; subq %rax, %rcx ; shlq %cl, %rsi ; xorq %rdx, %rdx @@ -177,9 +171,9 @@ block0(v0: i128, v1: i32): ; cmoveq %rdx, %rsi ; orq %rdi, %rsi ; testq $0x40, %rax -; movq %r9, %rax +; movq %r8, %rax ; cmoveq %rsi, %rax -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -194,14 +188,13 @@ block0(v0: i128, v1: i16): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; shrq %cl, %rdi -; movq %rsi, %r9 -; shrq %cl, %r9 -; movq %rcx, %r10 +; movq %rsi, %r8 +; shrq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %rax ; subq %rax, %rcx ; shlq %cl, %rsi ; uninit %rdx @@ -210,9 +203,9 @@ block0(v0: i128, v1: i16): ; cmoveq %rdx, %rsi ; orq %rdi, %rsi ; testq $0x40, %rax -; movq %r9, %rax +; movq %r8, %rax ; cmoveq %rsi, %rax -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -222,14 +215,13 @@ block0(v0: i128, v1: i16): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; shrq %cl, %rdi -; movq %rsi, %r9 -; shrq %cl, %r9 -; movq %rcx, %r10 +; movq %rsi, %r8 +; shrq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %rax ; subq %rax, %rcx ; shlq %cl, %rsi ; xorq %rdx, %rdx @@ -237,9 +229,9 @@ block0(v0: i128, v1: i16): ; cmoveq %rdx, %rsi ; orq %rdi, %rsi ; testq $0x40, %rax -; movq %r9, %rax +; movq %r8, %rax ; cmoveq %rsi, %rax -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -254,14 +246,13 @@ block0(v0: i128, v1: i8): ; pushq %rbp ; movq %rsp, %rbp ; block0: +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; shrq %cl, %rdi -; movq %rsi, %r9 -; shrq %cl, %r9 -; movq %rcx, %r10 +; movq %rsi, %r8 +; shrq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %rax ; subq %rax, %rcx ; shlq %cl, %rsi ; uninit %rdx @@ -270,9 +261,9 @@ block0(v0: i128, v1: i8): ; cmoveq %rdx, %rsi ; orq %rdi, %rsi ; testq $0x40, %rax -; movq %r9, %rax +; movq %r8, %rax ; cmoveq %rsi, %rax -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq @@ -282,14 +273,13 @@ block0(v0: i128, v1: i8): ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 +; movq %rdx, %rax ; movq %rdx, %rcx -; movq %rdx, %r10 ; shrq %cl, %rdi -; movq %rsi, %r9 -; shrq %cl, %r9 -; movq %rcx, %r10 +; movq %rsi, %r8 +; shrq %cl, %r8 +; movq %rcx, %rax ; movl $0x40, %ecx -; movq %r10, %rax ; subq %rax, %rcx ; shlq %cl, %rsi ; xorq %rdx, %rdx @@ -297,9 +287,9 @@ block0(v0: i128, v1: i8): ; cmoveq %rdx, %rsi ; orq %rdi, %rsi ; testq $0x40, %rax -; movq %r9, %rax +; movq %r8, %rax ; cmoveq %rsi, %rax -; cmoveq %r9, %rdx +; cmoveq %r8, %rdx ; movq %rbp, %rsp ; popq %rbp ; retq diff --git a/cranelift/filetests/filetests/isa/x64/winch.clif b/cranelift/filetests/filetests/isa/x64/winch.clif index 9b5e63d2ba77..0067b8fc177e 100644 --- a/cranelift/filetests/filetests/isa/x64/winch.clif +++ b/cranelift/filetests/filetests/isa/x64/winch.clif @@ -302,8 +302,8 @@ block0(v0:i64): ; load_ext_name %g+0, %r10 ; call *%r10 ; movq +(%rsp), %rax -; movq +8(%rsp), %rdx -; andl %edx, %eax +; movq +8(%rsp), %rsi +; andl %esi, %eax ; movq 0x20(%rsp), %rbx ; movq 0x28(%rsp), %r12 ; movq 0x30(%rsp), %r13 @@ -333,8 +333,8 @@ block0(v0:i64): ; movq (%rsp), %r11 ; movq %r11, 0x18(%rsp) ; movq 0x10(%rsp), %rax -; movq 0x18(%rsp), %rdx -; andl %edx, %eax +; movq 0x18(%rsp), %rsi +; andl %esi, %eax ; movq 0x20(%rsp), %rbx ; movq 0x28(%rsp), %r12 ; movq 0x30(%rsp), %r13 @@ -387,13 +387,13 @@ block0: ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl $0xffff2222, %r11d -; movl $0x55, %r9d -; movl $0xb, %r10d -; movq %r11, 8(%rdi) -; movzbq %r9b, %r11 -; movq %r9, (%rdi) -; movzbq %r10b, %rax +; movl $0xffff2222, %r9d +; movl $0x55, %esi +; movl $0xb, %r8d +; movq %r9, 8(%rdi) +; movzbq %sil, %r9 +; movq %rsi, (%rdi) +; movzbq %r8b, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -403,13 +403,13 @@ block0: ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0xffff2222, %r11d -; movl $0x55, %r9d -; movl $0xb, %r10d -; movq %r11, 8(%rdi) -; movzbq %r9b, %r11 -; movq %r9, (%rdi) -; movzbq %r10b, %rax +; movl $0xffff2222, %r9d +; movl $0x55, %esi +; movl $0xb, %r8d +; movq %r9, 8(%rdi) +; movzbq %sil, %r9 +; movq %rsi, (%rdi) +; movzbq %r8b, %rax ; movq %rbp, %rsp ; popq %rbp ; retq @@ -426,10 +426,10 @@ block0: ; pushq %rbp ; movq %rsp, %rbp ; block0: -; movl $0xffff2222, %edx +; movl $0xffff2222, %esi ; movl $0x55, %r8d ; movl $0xb, %eax -; movq %rdx, 1(%rdi) +; movq %rsi, 1(%rdi) ; movb %r8b, (%rdi) ; movq %rbp, %rsp ; popq %rbp @@ -440,10 +440,10 @@ block0: ; pushq %rbp ; movq %rsp, %rbp ; block1: ; offset 0x4 -; movl $0xffff2222, %edx +; movl $0xffff2222, %esi ; movl $0x55, %r8d ; movl $0xb, %eax -; movq %rdx, 1(%rdi) +; movq %rsi, 1(%rdi) ; movb %r8b, (%rdi) ; movq %rbp, %rsp ; popq %rbp diff --git a/supply-chain/imports.lock b/supply-chain/imports.lock index 92e14eded7d1..7b0fcfd70869 100644 --- a/supply-chain/imports.lock +++ b/supply-chain/imports.lock @@ -1112,8 +1112,8 @@ user-login = "dtolnay" user-name = "David Tolnay" [[publisher.regalloc2]] -version = "0.13.4" -when = "2026-01-06" +version = "0.14.0" +when = "2026-02-14" user-id = 3726 user-login = "cfallin" user-name = "Chris Fallin" diff --git a/tests/disas/aarch64-relaxed-simd.wat b/tests/disas/aarch64-relaxed-simd.wat index b4f036a53a15..d63e668f70fa 100644 --- a/tests/disas/aarch64-relaxed-simd.wat +++ b/tests/disas/aarch64-relaxed-simd.wat @@ -78,10 +78,10 @@ ;; wasm[0]::function[5]: ;; stp x29, x30, [sp, #-0x10]! ;; mov x29, sp -;; smull v17.8h, v0.8b, v1.8b -;; smull2 v18.8h, v0.16b, v1.16b -;; addp v17.8h, v17.8h, v18.8h -;; saddlp v17.4s, v17.8h -;; add v0.4s, v17.4s, v2.4s +;; smull v16.8h, v0.8b, v1.8b +;; smull2 v17.8h, v0.16b, v1.16b +;; addp v16.8h, v16.8h, v17.8h +;; saddlp v16.4s, v16.8h +;; add v0.4s, v16.4s, v2.4s ;; ldp x29, x30, [sp], #0x10 ;; ret diff --git a/tests/disas/component-model/direct-adapter-calls-x64.wat b/tests/disas/component-model/direct-adapter-calls-x64.wat index fc93912611fa..9ac0812f50af 100644 --- a/tests/disas/component-model/direct-adapter-calls-x64.wat +++ b/tests/disas/component-model/direct-adapter-calls-x64.wat @@ -87,39 +87,39 @@ ;; movq 0x18(%r10), %r10 ;; addq $0x20, %r10 ;; cmpq %rsp, %r10 -;; ja 0xf2 +;; ja 0xe4 ;; 79: subq $0x10, %rsp -;; movq %r12, (%rsp) -;; movq 0x78(%rdi), %r12 -;; movl (%r12), %r10d -;; testl $1, %r10d -;; je 0xdd -;; 96: movq 0x60(%rdi), %rsi -;; movl (%rsi), %r10d -;; movq %r10, %rax -;; andl $0xfffffffe, %eax -;; movl %eax, (%rsi) -;; orl $1, %r10d -;; movl %r10d, (%rsi) +;; movq %rbx, (%rsp) +;; movq 0x78(%rdi), %rbx +;; movl (%rbx), %eax +;; testl $1, %eax +;; je 0xd0 +;; 92: movq 0x60(%rdi), %rax +;; movl (%rax), %ecx +;; movq %rcx, %rsi +;; andl $0xfffffffe, %esi +;; movl %esi, (%rax) +;; orl $1, %ecx +;; movl %ecx, (%rax) ;; movq %rdi, %rax ;; movq 0x40(%rax), %rdi ;; movq %rax, %rsi ;; callq 0 -;; movl (%r12), %esi -;; movq %rsi, %rcx -;; andl $0xfffffffe, %ecx -;; movl %ecx, (%r12) -;; orl $1, %esi -;; movl %esi, (%r12) -;; movq (%rsp), %r12 +;; movl (%rbx), %ecx +;; movq %rcx, %rdx +;; andl $0xfffffffe, %edx +;; movl %edx, (%rbx) +;; orl $1, %ecx +;; movl %ecx, (%rbx) +;; movq (%rsp), %rbx ;; addq $0x10, %rsp ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; dd: movq %rdi, %rsi -;; e0: movq 0x48(%rsi), %r9 -;; e4: movq 0x58(%rsi), %rdi -;; e8: movl $0x17, %edx -;; ed: callq *%r9 -;; f0: ud2 -;; f2: ud2 +;; d0: movq %rdi, %rsi +;; d3: movq 0x48(%rsi), %rax +;; d7: movq 0x58(%rsi), %rdi +;; db: movl $0x17, %edx +;; e0: callq *%rax +;; e2: ud2 +;; e4: ud2 diff --git a/tests/disas/debug-exceptions.wat b/tests/disas/debug-exceptions.wat index 146dfb1d0915..b21f539483bc 100644 --- a/tests/disas/debug-exceptions.wat +++ b/tests/disas/debug-exceptions.wat @@ -26,11 +26,11 @@ ;; stp d10, d11, [sp, #-0x10]! ;; stp d8, d9, [sp, #-0x10]! ;; sub sp, sp, #0x30 -;; ldr x3, [x2, #8] -;; ldr x3, [x3, #0x18] -;; mov x4, sp -;; cmp x4, x3 -;; b.lo #0x190 +;; ldr x0, [x2, #8] +;; ldr x0, [x0, #0x18] +;; mov x1, sp +;; cmp x1, x0 +;; b.lo #0x194 ;; 44: stur x2, [sp] ;; mov x0, x2 ;; stur x2, [sp, #0x10] @@ -45,21 +45,21 @@ ;; nop ;; ├─╼ debug frame state (after previous inst): func key DefinedWasmFunction(StaticModuleIndex(0), DefinedFuncIndex(0)), wasm PC 61, slot at FP-0xc0, locals , stack ;; ╰─╼ breakpoint patch: wasm PC 61, patch bytes [34, 1, 0, 148] -;; mov w22, #0x2a -;; stur w22, [sp, #8] +;; mov w19, #0x2a +;; stur w19, [sp, #8] ;; nop ;; ├─╼ debug frame state (after previous inst): func key DefinedWasmFunction(StaticModuleIndex(0), DefinedFuncIndex(0)), wasm PC 63, slot at FP-0xc0, locals , stack I32 @ slot+0x8 ;; ╰─╼ breakpoint patch: wasm PC 63, patch bytes [31, 1, 0, 148] ;; nop ;; ├─╼ debug frame state (after previous inst): func key DefinedWasmFunction(StaticModuleIndex(0), DefinedFuncIndex(0)), wasm PC 64, slot at FP-0xc0, locals , stack ;; ╰─╼ breakpoint patch: wasm PC 64, patch bytes [30, 1, 0, 148] -;; stur w22, [sp, #8] +;; stur w19, [sp, #8] ;; nop ;; ├─╼ debug frame state (after previous inst): func key DefinedWasmFunction(StaticModuleIndex(0), DefinedFuncIndex(0)), wasm PC 66, slot at FP-0xc0, locals , stack I32 @ slot+0x8 ;; ╰─╼ breakpoint patch: wasm PC 66, patch bytes [28, 1, 0, 148] ;; ldur x2, [sp, #0x10] ;; bl #0x448 -;; 84: mov x24, x2 +;; 84: mov x20, x2 ;; mov w3, #0x4000000 ;; mov w4, #2 ;; mov w5, #0x28 @@ -70,11 +70,11 @@ ;; ldr x5, [x0, #8] ;; ldr x6, [x5, #0x20] ;; stur x5, [sp, #0x20] -;; add x15, x6, #0x20 -;; str w22, [x15, w2, uxtw] +;; add x3, x6, #0x20 +;; str w19, [x3, w2, uxtw] ;; add x3, x6, #0x18 -;; mov x1, x24 -;; str w1, [x3, w2, uxtw] +;; mov x0, x20 +;; str w0, [x3, w2, uxtw] ;; mov w3, #0 ;; add x4, x6, #0x1c ;; stur x6, [sp, #0x18] @@ -90,39 +90,40 @@ ;; ec: ldur x2, [sp, #0x10] ;; bl #0x414 ;; f4: .byte 0x1f, 0xc1, 0x00, 0x00 -;; mov x9, x0 -;; mov w6, w9 -;; mov x7, #0x28 -;; adds x5, x6, x7 -;; cset x7, hs -;; uxtb w6, w7 -;; cbnz x6, #0x1a8 -;; 114: ldur x4, [sp, #0x20] -;; ldr x8, [x4, #0x28] -;; cmp x5, x8 -;; cset x10, hi -;; uxtb w10, w10 -;; cbnz x10, #0x1ac +;; mov x2, x0 +;; mov w3, w2 +;; mov x4, #0x28 +;; adds x3, x3, x4 +;; cset x4, hs +;; uxtb w4, w4 +;; cbnz x4, #0x1ac +;; 114: ldur x5, [sp, #0x20] +;; ldr x1, [x5, #0x28] +;; cmp x3, x1 +;; cset x1, hi +;; uxtb w1, w1 +;; cbnz x1, #0x1b0 ;; 12c: ldur x6, [sp, #0x18] -;; add x10, x6, #0x20 -;; ldr w12, [x10, w9, uxtw] -;; stur w12, [sp, #8] +;; add x0, x6, #0x20 +;; ldr w0, [x0, w2, uxtw] +;; stur w0, [sp, #8] ;; ldur x0, [sp, #0x10] ;; nop ;; ├─╼ debug frame state (after previous inst): func key DefinedWasmFunction(StaticModuleIndex(0), DefinedFuncIndex(0)), wasm PC 72, slot at FP-0xc0, locals , stack I32 @ slot+0x8 ;; ╰─╼ breakpoint patch: wasm PC 72, patch bytes [234, 0, 0, 148] -;; ldr x14, [x0, #0x30] -;; ldr x2, [x0, #0x40] +;; ldur x1, [sp, #0x10] +;; ldr x0, [x1, #0x30] +;; ldr x2, [x1, #0x40] ;; ldur x3, [sp, #0x10] -;; blr x14 +;; blr x0 ;; ╰─╼ debug frame state (after previous inst): func key DefinedWasmFunction(StaticModuleIndex(0), DefinedFuncIndex(0)), wasm PC 72, slot at FP-0xc0, locals , stack I32 @ slot+0x8 -;; 154: ldur x0, [sp, #0x10] +;; 158: ldur x0, [sp, #0x10] ;; nop ;; ├─╼ debug frame state (after previous inst): func key DefinedWasmFunction(StaticModuleIndex(0), DefinedFuncIndex(0)), wasm PC 74, slot at FP-0xc0, locals , stack I32 @ slot+0x8 -;; ╰─╼ breakpoint patch: wasm PC 74, patch bytes [228, 0, 0, 148] +;; ╰─╼ breakpoint patch: wasm PC 74, patch bytes [227, 0, 0, 148] ;; nop ;; ├─╼ debug frame state (after previous inst): func key DefinedWasmFunction(StaticModuleIndex(0), DefinedFuncIndex(0)), wasm PC 75, slot at FP-0xc0, locals , stack -;; ╰─╼ breakpoint patch: wasm PC 75, patch bytes [227, 0, 0, 148] +;; ╰─╼ breakpoint patch: wasm PC 75, patch bytes [226, 0, 0, 148] ;; add sp, sp, #0x30 ;; ldp d8, d9, [sp], #0x10 ;; ldp d10, d11, [sp], #0x10 @@ -135,11 +136,11 @@ ;; ldp x27, x28, [sp], #0x10 ;; ldp x29, x30, [sp], #0x10 ;; ret -;; 190: stur x2, [sp, #0x10] -;; 194: mov w3, #0 -;; 198: bl #0x3dc -;; 19c: ldur x2, [sp, #0x10] -;; 1a0: bl #0x414 -;; 1a4: .byte 0x1f, 0xc1, 0x00, 0x00 +;; 194: stur x2, [sp, #0x10] +;; 198: mov w3, #0 +;; 19c: bl #0x3dc +;; 1a0: ldur x2, [sp, #0x10] +;; 1a4: bl #0x414 ;; 1a8: .byte 0x1f, 0xc1, 0x00, 0x00 ;; 1ac: .byte 0x1f, 0xc1, 0x00, 0x00 +;; 1b0: .byte 0x1f, 0xc1, 0x00, 0x00 diff --git a/tests/disas/debug.wat b/tests/disas/debug.wat index f75351596162..285b5d40e502 100644 --- a/tests/disas/debug.wat +++ b/tests/disas/debug.wat @@ -12,42 +12,42 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; subq $0x30, %rsp -;; movq %rbx, 0x20(%rsp) +;; movq %r12, 0x20(%rsp) ;; movl %edx, 8(%rsp) ;; movl %ecx, 0xc(%rsp) -;; movq 8(%rdi), %rax -;; movq 0x18(%rax), %rax -;; movq %rsp, %r8 -;; cmpq %rax, %r8 +;; movq 8(%rdi), %r11 +;; movq 0x18(%r11), %r11 +;; movq %rsp, %rax +;; cmpq %r11, %rax ;; jb 0x62 ;; 29: movq %rdi, (%rsp) ;; nopl (%rax, %rax) ;; ├─╼ debug frame state (after previous inst): func key DefinedWasmFunction(StaticModuleIndex(0), DefinedFuncIndex(0)), wasm PC 36, slot at FP-0x30, locals I32 @ slot+0x8, I32 @ slot+0xc, stack -;; ╰─╼ breakpoint patch: wasm PC 36, patch bytes [232, 190, 1, 0, 0] +;; ╰─╼ breakpoint patch: wasm PC 36, patch bytes [232, 184, 1, 0, 0] ;; movl %edx, 0x10(%rsp) ;; nopl (%rax, %rax) ;; ├─╼ debug frame state (after previous inst): func key DefinedWasmFunction(StaticModuleIndex(0), DefinedFuncIndex(0)), wasm PC 38, slot at FP-0x30, locals I32 @ slot+0x8, I32 @ slot+0xc, stack I32 @ slot+0x10 -;; ╰─╼ breakpoint patch: wasm PC 38, patch bytes [232, 181, 1, 0, 0] +;; ╰─╼ breakpoint patch: wasm PC 38, patch bytes [232, 175, 1, 0, 0] ;; movl %ecx, 0x14(%rsp) ;; nopl (%rax, %rax) ;; ├─╼ debug frame state (after previous inst): func key DefinedWasmFunction(StaticModuleIndex(0), DefinedFuncIndex(0)), wasm PC 40, slot at FP-0x30, locals I32 @ slot+0x8, I32 @ slot+0xc, stack I32 @ slot+0x10, I32 @ slot+0x14 -;; ╰─╼ breakpoint patch: wasm PC 40, patch bytes [232, 172, 1, 0, 0] +;; ╰─╼ breakpoint patch: wasm PC 40, patch bytes [232, 166, 1, 0, 0] ;; leal (%rdx, %rcx), %eax ;; movl %eax, 0x10(%rsp) ;; nopl (%rax, %rax) ;; ├─╼ debug frame state (after previous inst): func key DefinedWasmFunction(StaticModuleIndex(0), DefinedFuncIndex(0)), wasm PC 41, slot at FP-0x30, locals I32 @ slot+0x8, I32 @ slot+0xc, stack I32 @ slot+0x10 -;; ╰─╼ breakpoint patch: wasm PC 41, patch bytes [232, 160, 1, 0, 0] +;; ╰─╼ breakpoint patch: wasm PC 41, patch bytes [232, 154, 1, 0, 0] ;; movl %eax, 0x10(%rsp) -;; movq 0x20(%rsp), %rbx +;; movq 0x20(%rsp), %r12 ;; addq $0x30, %rsp ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 62: movq %rdi, %rbx +;; 62: movq %rdi, %r12 ;; 65: xorl %esi, %esi -;; 67: callq 0x194 -;; 6c: movq %rbx, %rdi -;; 6f: callq 0x1c4 +;; 67: callq 0x18c +;; 6c: movq %r12, %rdi +;; 6f: callq 0x1bd ;; 74: ud2 ;; ;; wasm[0]::array_to_wasm_trampoline[0]: @@ -59,20 +59,20 @@ ;; movq %r13, 0x20(%rsp) ;; movq %r14, 0x28(%rsp) ;; movq %r15, 0x30(%rsp) -;; movl (%rdx), %r10d +;; movl (%rdx), %eax ;; movl 0x10(%rdx), %ecx ;; movq %rdx, (%rsp) -;; movq 8(%rdi), %r11 -;; movq %rbp, %rax -;; movq %rax, 0x48(%r11) -;; movq %rsp, %rax -;; movq %rax, 0x40(%r11) -;; leaq 0x39(%rip), %rax -;; movq %rax, 0x50(%r11) -;; movq %r10, %rdx +;; movq 8(%rdi), %r8 +;; movq %rbp, %r9 +;; movq %r9, 0x48(%r8) +;; movq %rsp, %r9 +;; movq %r9, 0x40(%r8) +;; leaq 0x39(%rip), %r9 +;; movq %r9, 0x50(%r8) +;; movq %rax, %rdx ;; callq 0 ;; ├─╼ exception frame offset: SP = FP - 0x40 -;; ╰─╼ exception handler: default handler, no dynamic context, handler=0xf3 +;; ╰─╼ exception handler: default handler, no dynamic context, handler=0xf2 ;; movq (%rsp), %rdx ;; movl %eax, (%rdx) ;; movl $1, %eax @@ -85,68 +85,66 @@ ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; f3: xorl %eax, %eax -;; f5: movq 0x10(%rsp), %rbx -;; fa: movq 0x18(%rsp), %r12 -;; ff: movq 0x20(%rsp), %r13 -;; 104: movq 0x28(%rsp), %r14 -;; 109: movq 0x30(%rsp), %r15 -;; 10e: addq $0x40, %rsp -;; 112: movq %rbp, %rsp -;; 115: popq %rbp -;; 116: retq +;; f2: xorl %eax, %eax +;; f4: movq 0x10(%rsp), %rbx +;; f9: movq 0x18(%rsp), %r12 +;; fe: movq 0x20(%rsp), %r13 +;; 103: movq 0x28(%rsp), %r14 +;; 108: movq 0x30(%rsp), %r15 +;; 10d: addq $0x40, %rsp +;; 111: movq %rbp, %rsp +;; 114: popq %rbp +;; 115: retq ;; ;; signatures[0]::wasm_to_array_trampoline: ;; pushq %rbp ;; movq %rsp, %rbp ;; subq $0x30, %rsp -;; movq %r15, 0x20(%rsp) -;; movq %rdx, %rax -;; movq 8(%rsi), %r10 -;; movq %rsi, %r8 -;; movq %rbp, %r11 -;; movq %r11, 0x30(%r10) -;; movq %rbp, %r11 -;; movq 8(%r11), %rsi -;; movq %rsi, 0x38(%r10) +;; movq %rbx, 0x20(%rsp) +;; movq %rdx, %r8 +;; movq 8(%rsi), %rax +;; movq %rbp, %rdx +;; movq %rdx, 0x30(%rax) +;; movq %rbp, %rdx +;; movq 8(%rdx), %rdx +;; movq %rdx, 0x38(%rax) ;; leaq (%rsp), %rdx -;; movq %rax, %rsi -;; movl %esi, (%rsp) +;; movq %r8, %rax +;; movl %eax, (%rsp) ;; movl %ecx, 0x10(%rsp) ;; movq 8(%rdi), %rax ;; movl $2, %ecx -;; movq %r8, %r15 -;; movq %r15, %rsi +;; movq %rsi, %rbx ;; callq *%rax -;; movq 8(%r15), %rcx +;; movq 8(%rbx), %rcx ;; addq $1, 0x10(%rcx) ;; testb %al, %al -;; je 0x181 -;; 170: movl (%rsp), %eax -;; movq 0x20(%rsp), %r15 +;; je 0x17a +;; 169: movl (%rsp), %eax +;; movq 0x20(%rsp), %rbx ;; addq $0x30, %rsp ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 181: movq 0x10(%r15), %r8 -;; 185: movq 0x198(%r8), %r8 -;; 18c: movq %r15, %rdi -;; 18f: callq *%r8 -;; 192: ud2 +;; 17a: movq 0x10(%rbx), %rax +;; 17e: movq 0x198(%rax), %rax +;; 185: movq %rbx, %rdi +;; 188: callq *%rax +;; 18a: ud2 ;; ;; wasmtime_builtin_trap: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 8(%rdi), %rax -;; movq %rbp, %rcx -;; movq %rcx, 0x30(%rax) -;; movq %rbp, %rcx -;; movq 8(%rcx), %rcx -;; movq %rcx, 0x38(%rax) -;; movq 0x10(%rdi), %rax -;; movq 0x190(%rax), %rax +;; movq 8(%rdi), %r9 +;; movq %rbp, %r10 +;; movq %r10, 0x30(%r9) +;; movq %rbp, %r10 +;; movq 8(%r10), %r11 +;; movq %r11, 0x38(%r9) +;; movq 0x10(%rdi), %r11 +;; movq 0x190(%r11), %r11 ;; movzbq %sil, %rsi -;; callq *%rax +;; callq *%r11 ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -154,15 +152,15 @@ ;; wasmtime_builtin_raise: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 8(%rdi), %r10 -;; movq %rbp, %r11 -;; movq %r11, 0x30(%r10) -;; movq %rbp, %r11 -;; movq 8(%r11), %rsi -;; movq %rsi, 0x38(%r10) -;; movq 0x10(%rdi), %rsi -;; movq 0x198(%rsi), %rsi -;; callq *%rsi +;; movq 8(%rdi), %r8 +;; movq %rbp, %r9 +;; movq %r9, 0x30(%r8) +;; movq %rbp, %r9 +;; movq 8(%r9), %r9 +;; movq %r9, 0x38(%r8) +;; movq 0x10(%rdi), %r9 +;; movq 0x198(%r9), %r9 +;; callq *%r9 ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -197,19 +195,19 @@ ;; movdqu %xmm13, 0x120(%rsp) ;; movdqu %xmm14, 0x130(%rsp) ;; movdqu %xmm15, 0x140(%rsp) -;; movq 8(%rdi), %rax -;; movq %rbp, %rcx -;; movq %rcx, 0x30(%rax) -;; movq %rbp, %rcx -;; movq 8(%rcx), %rcx -;; movq %rcx, 0x38(%rax) -;; movq 0x10(%rdi), %rcx -;; movq 0x1c8(%rcx), %rcx +;; movq 8(%rdi), %r10 +;; movq %rbp, %r11 +;; movq %r11, 0x30(%r10) +;; movq %rbp, %r11 +;; movq 8(%r11), %rax +;; movq %rax, 0x38(%r10) +;; movq 0x10(%rdi), %rax +;; movq 0x1c8(%rax), %rcx ;; movq %rdi, %rbx ;; callq *%rcx ;; testb %al, %al -;; je 0x3b5 -;; 2e9: movq (%rsp), %rax +;; je 0x3af +;; 2e3: movq (%rsp), %rax ;; movq 8(%rsp), %rcx ;; movq 0x10(%rsp), %rdx ;; movq 0x18(%rsp), %rbx @@ -239,8 +237,8 @@ ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 3b5: movq 0x10(%rbx), %r8 -;; 3b9: movq 0x198(%r8), %r8 -;; 3c0: movq %rbx, %rdi -;; 3c3: callq *%r8 -;; 3c6: ud2 +;; 3af: movq 0x10(%rbx), %rax +;; 3b3: movq 0x198(%rax), %rax +;; 3ba: movq %rbx, %rdi +;; 3bd: callq *%rax +;; 3bf: ud2 diff --git a/tests/disas/epoch-interruption-x86.wat b/tests/disas/epoch-interruption-x86.wat index 81d6b1792623..0b8d75b564fd 100644 --- a/tests/disas/epoch-interruption-x86.wat +++ b/tests/disas/epoch-interruption-x86.wat @@ -11,29 +11,29 @@ ;; movq 0x18(%r10), %r10 ;; addq $0x30, %r10 ;; cmpq %rsp, %r10 -;; ja 0x7e +;; ja 0x82 ;; 19: subq $0x20, %rsp -;; movq %r13, (%rsp) -;; movq %r14, 8(%rsp) -;; movq %r15, 0x10(%rsp) -;; movq 0x18(%rdi), %r14 -;; movq (%r14), %rcx -;; movq 8(%rdi), %r13 -;; movq %rdi, %r15 -;; movq 8(%r13), %rax +;; movq %r12, (%rsp) +;; movq %r13, 8(%rsp) +;; movq %r14, 0x10(%rsp) +;; movq 0x18(%rdi), %r13 +;; movq (%r13), %rcx +;; movq 8(%rdi), %r12 +;; movq %rdi, %r14 +;; movq 8(%r12), %rax ;; cmpq %rax, %rcx -;; jae 0x57 -;; 46: movq (%r14), %r11 -;; cmpq %rax, %r11 -;; jae 0x64 -;; jmp 0x46 -;; 57: movq %r15, %rdi -;; callq 0xe1 -;; jmp 0x46 -;; 64: movq 8(%r13), %rax -;; cmpq %rax, %r11 -;; jb 0x46 -;; 71: movq %r15, %rdi -;; callq 0xe1 -;; jmp 0x46 -;; 7e: ud2 +;; jae 0x5a +;; 48: movq (%r13), %rcx +;; cmpq %rax, %rcx +;; jae 0x67 +;; jmp 0x48 +;; 5a: movq %r14, %rdi +;; callq 0xde +;; jmp 0x48 +;; 67: movq 8(%r12), %rax +;; cmpq %rax, %rcx +;; jb 0x48 +;; 75: movq %r14, %rdi +;; callq 0xde +;; jmp 0x48 +;; 82: ud2 diff --git a/tests/disas/exceptions.wat b/tests/disas/exceptions.wat index 4354a908c7f0..bad9fd79703e 100644 --- a/tests/disas/exceptions.wat +++ b/tests/disas/exceptions.wat @@ -23,38 +23,38 @@ ;; movq 0x18(%r10), %r10 ;; addq $0x50, %r10 ;; cmpq %rsp, %r10 -;; ja 0xa2 +;; ja 0x9d ;; 19: subq $0x40, %rsp ;; movq %rbx, 0x10(%rsp) ;; movq %r12, 0x18(%rsp) ;; movq %r13, 0x20(%rsp) ;; movq %r14, 0x28(%rsp) ;; movq %r15, 0x30(%rsp) -;; movq %rdi, %r12 -;; movq %rcx, %r13 -;; movq %rdx, %r15 -;; callq 0x3b2 +;; movq %rdi, %rbx +;; movq %rcx, %r12 +;; movq %rdx, %r13 +;; callq 0x385 ;; movq %rax, %r14 ;; movl $0x4000000, %esi ;; movl $3, %edx ;; movl $0x30, %ecx ;; movl $8, %r8d -;; movq %r12, %rdi -;; callq 0x34f -;; movq 8(%r12), %r8 -;; movq 0x20(%r8), %r8 -;; movl %eax, %r9d -;; movq %r15, %rdx -;; movl %edx, 0x20(%r8, %r9) -;; movq %r13, %rcx -;; movq %rcx, 0x28(%r8, %r9) -;; movq %r14, %r10 -;; movl %r10d, 0x18(%r8, %r9) -;; movl $0, 0x1c(%r8, %r9) +;; movq %rbx, %rdi +;; callq 0x322 +;; movq 8(%rbx), %rcx +;; movq 0x20(%rcx), %rcx +;; movl %eax, %edx +;; movq %r13, %rsi +;; movl %esi, 0x20(%rcx, %rdx) +;; movq %r12, %rsi +;; movq %rsi, 0x28(%rcx, %rdx) +;; movq %r14, %rsi +;; movl %esi, 0x18(%rcx, %rdx) +;; movl $0, 0x1c(%rcx, %rdx) ;; movq %rax, %rsi -;; movq %r12, %rdi -;; movq %r12, (%rsp) -;; callq 0x3de +;; movq %rbx, %rdi +;; movq %rbx, (%rsp) +;; callq 0x3b2 ;; ud2 ;; ud2 ;; @@ -65,8 +65,8 @@ ;; movq 0x18(%r10), %r10 ;; addq $0x50, %r10 ;; cmpq %rsp, %r10 -;; ja 0x14f -;; d9: subq $0x40, %rsp +;; ja 0x135 +;; b9: subq $0x40, %rsp ;; movq %rbx, 0x10(%rsp) ;; movq %r12, 0x18(%rsp) ;; movq %r13, 0x20(%rsp) @@ -77,13 +77,15 @@ ;; movq (%rsp), %rdi ;; callq 0 ;; ├─╼ exception frame offset: SP = FP - 0x40 -;; ╰─╼ exception handler: tag=0, context at [SP+0x0], handler=0x116 +;; ╰─╼ exception handler: tag=0, context at [SP+0x0], handler=0xf6 ;; movl $0x2a, %eax ;; movl $0x64, %ecx -;; jmp 0x12d -;; 116: movq (%rsp), %rdi -;; movq 8(%rdi), %rcx -;; movq 0x20(%rcx), %rcx +;; jmp 0x113 +;; f6: movq %rax, %rdx +;; movq (%rsp), %rsi +;; movq 8(%rsi), %rax +;; movq 0x20(%rax), %rcx +;; movq %rdx, %rax ;; movl %eax, %edx ;; movl 0x20(%rcx, %rdx), %eax ;; movq 0x28(%rcx, %rdx), %rcx @@ -96,4 +98,4 @@ ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 14f: ud2 +;; 135: ud2 diff --git a/tests/disas/f64-copysign.wat b/tests/disas/f64-copysign.wat index d9d2762479d4..d3543c3644ca 100644 --- a/tests/disas/f64-copysign.wat +++ b/tests/disas/f64-copysign.wat @@ -30,15 +30,15 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %rcx -;; vmovsd 0xfff3(%rcx), %xmm4 -;; vxorpd %xmm3, %xmm3, %xmm5 -;; movabsq $9223372036854775808, %r11 -;; vmovq %r11, %xmm2 -;; vandnpd %xmm5, %xmm2, %xmm5 -;; vandpd %xmm4, %xmm2, %xmm6 -;; vorpd %xmm6, %xmm5, %xmm0 -;; vmovsd %xmm0, (%rcx) +;; movq 0x38(%rdi), %rax +;; vmovsd 0xfff3(%rax), %xmm0 +;; vxorpd %xmm1, %xmm1, %xmm1 +;; movabsq $9223372036854775808, %r8 +;; vmovq %r8, %xmm2 +;; vandnpd %xmm1, %xmm2, %xmm1 +;; vandpd %xmm0, %xmm2, %xmm0 +;; vorpd %xmm0, %xmm1, %xmm0 +;; vmovsd %xmm0, (%rax) ;; xorl %eax, %eax ;; movq %rbp, %rsp ;; popq %rbp diff --git a/tests/disas/gc/struct-new-stack-map.wat b/tests/disas/gc/struct-new-stack-map.wat index b8b248201fab..a00f86460538 100644 --- a/tests/disas/gc/struct-new-stack-map.wat +++ b/tests/disas/gc/struct-new-stack-map.wat @@ -18,45 +18,45 @@ ;; movq 0x18(%r10), %r10 ;; addq $0x40, %r10 ;; cmpq %rsp, %r10 -;; ja 0xb5 +;; ja 0xaf ;; 19: subq $0x30, %rsp -;; movq %r13, 0x20(%rsp) -;; movq %r14, 0x28(%rsp) -;; movq %rdx, %r14 +;; movq %rbx, 0x20(%rsp) +;; movq %r12, 0x28(%rsp) +;; movq %rdx, %r12 ;; movdqu %xmm0, 8(%rsp) ;; movl %ecx, (%rsp) ;; movl $0xb0000000, %esi ;; xorl %edx, %edx ;; movl $0x28, %ecx ;; movl $8, %r8d -;; movq %rdi, %r13 -;; callq 0x12f -;; movq 8(%r13), %rdx +;; movq %rdi, %rbx +;; callq 0x125 +;; movq 8(%rbx), %rcx ;; ╰─╼ stack_map: frame_size=48, frame_offsets=[0] -;; movq 0x20(%rdx), %rdx -;; movl %eax, %r8d +;; movq 0x20(%rcx), %rcx +;; movl %eax, %edx ;; movdqu 8(%rsp), %xmm0 -;; movss %xmm0, 0x18(%rdx, %r8) -;; movq %r14, %r9 -;; movb %r9b, 0x1c(%rdx, %r8) -;; movl (%rsp), %r9d -;; movq %r9, %rcx -;; andl $1, %ecx -;; testl %r9d, %r9d -;; sete %r10b -;; movzbl %r10b, %r10d -;; orl %r10d, %ecx -;; testl %ecx, %ecx -;; jne 0x9a -;; 8d: movl %r9d, %edi -;; leaq (%rdx, %rdi), %rcx -;; addq $1, 8(%rdx, %rdi) -;; movl (%rsp), %ecx -;; movl %ecx, 0x20(%rdx, %r8) -;; movq 0x20(%rsp), %r13 -;; movq 0x28(%rsp), %r14 +;; movss %xmm0, 0x18(%rcx, %rdx) +;; movq %r12, %rsi +;; movb %sil, 0x1c(%rcx, %rdx) +;; movl (%rsp), %esi +;; movq %rsi, %rdi +;; andl $1, %edi +;; testl %esi, %esi +;; sete %r8b +;; movzbl %r8b, %r8d +;; orl %r8d, %edi +;; testl %edi, %edi +;; jne 0x95 +;; 89: movl %esi, %esi +;; leaq (%rcx, %rsi), %rdi +;; addq $1, 8(%rcx, %rsi) +;; movl (%rsp), %esi +;; movl %esi, 0x20(%rcx, %rdx) +;; movq 0x20(%rsp), %rbx +;; movq 0x28(%rsp), %r12 ;; addq $0x30, %rsp ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; b5: ud2 +;; af: ud2 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat index 9e1d35664a1c..04810e511431 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat @@ -23,15 +23,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; slli a2, a2, 0x20 -;; srli a4, a2, 0x20 -;; addi a5, a5, -4 -;; bgeu a5, a4, 8 +;; ld a4, 0x40(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; addi a2, a4, -4 +;; bgeu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a4, a5, a4 -;; sw a3, 0(a4) +;; ld a0, 0x38(a0) +;; add a0, a0, a1 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -43,14 +43,14 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; ld a3, 0x40(a0) -;; slli a2, a2, 0x20 -;; srli a4, a2, 0x20 -;; addi a3, a3, -4 -;; bgeu a3, a4, 8 +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; addi a2, a3, -4 +;; bgeu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a4, a5, a4 -;; lw a0, 0(a4) +;; ld a0, 0x38(a0) +;; add a0, a0, a1 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat index 399783a52f4d..bee25558009e 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat @@ -23,18 +23,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; slli a4, a2, 0x20 -;; srli a1, a4, 0x20 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 ;; lui a4, 1 -;; addi a2, a4, 4 -;; sub a5, a5, a2 -;; bgeu a5, a1, 8 +;; addi a4, a4, 4 +;; sub a1, a1, a4 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a0, 0x38(a0) -;; add a0, a0, a1 +;; ld a4, 0x38(a0) +;; add a2, a4, a2 ;; lui t6, 1 -;; add t6, t6, a0 +;; add t6, t6, a2 ;; sw a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -46,18 +46,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; slli a4, a2, 0x20 -;; srli a1, a4, 0x20 -;; lui a4, 1 -;; addi a2, a4, 4 -;; sub a5, a5, a2 -;; bgeu a5, a1, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; lui a3, 1 +;; addi a3, a3, 4 +;; sub a1, a1, a3 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a0, 0x38(a0) -;; add a0, a0, a1 +;; ld a3, 0x38(a0) +;; add a2, a3, a2 ;; lui t6, 1 -;; add t6, t6, a0 +;; add t6, t6, a2 ;; lw a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat index 4b85f8c1a6cf..9c9031b57d8a 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -23,23 +23,23 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a2, a2, 0x20 -;; srli a4, a2, 0x20 -;; lui a1, 0x3fffc -;; addi a5, a1, 1 -;; slli a5, a5, 2 -;; add a5, a4, a5 -;; bgeu a5, a4, 8 +;; slli a4, a2, 0x20 +;; srli a1, a4, 0x20 +;; lui a4, 0x3fffc +;; addi a5, a4, 1 +;; slli a2, a5, 2 +;; add a5, a1, a2 +;; bgeu a5, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x40(a0) -;; bgeu a1, a5, 8 +;; ld a2, 0x40(a0) +;; bgeu a2, a5, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a5, a5, a4 -;; lui a4, 0xffff -;; slli a0, a4, 4 -;; add a5, a5, a0 -;; sw a3, 0(a5) +;; ld a0, 0x38(a0) +;; add a0, a0, a1 +;; lui a1, 0xffff +;; slli a1, a1, 4 +;; add a0, a0, a1 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -50,23 +50,23 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a2, a2, 0x20 -;; srli a4, a2, 0x20 -;; lui a1, 0x3fffc -;; addi a3, a1, 1 -;; slli a5, a3, 2 -;; add a3, a4, a5 -;; bgeu a3, a4, 8 +;; slli a4, a2, 0x20 +;; srli a1, a4, 0x20 +;; lui a3, 0x3fffc +;; addi a5, a3, 1 +;; slli a2, a5, 2 +;; add a5, a1, a2 +;; bgeu a5, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x40(a0) -;; bgeu a5, a3, 8 +;; ld a2, 0x40(a0) +;; bgeu a2, a5, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a5, a5, a4 -;; lui a4, 0xffff -;; slli a0, a4, 4 -;; add a5, a5, a0 -;; lw a0, 0(a5) +;; ld a0, 0x38(a0) +;; add a0, a0, a1 +;; lui a1, 0xffff +;; slli a1, a1, 4 +;; add a0, a0, a1 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat index 6e3e373b562d..a3bd06af9ef0 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat @@ -23,14 +23,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; slli a1, a2, 0x20 -;; srli a4, a1, 0x20 -;; bltu a4, a5, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; bltu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a4, a5, a4 -;; sb a3, 0(a4) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -41,14 +41,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; slli a1, a2, 0x20 -;; srli a3, a1, 0x20 -;; bltu a3, a4, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; bltu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a3, a4, a3 -;; lbu a0, 0(a3) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat index 30a93f724f01..8daf6bb2a48a 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat @@ -23,18 +23,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; slli a4, a2, 0x20 -;; srli a1, a4, 0x20 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 ;; lui a4, 1 -;; addi a2, a4, 1 -;; sub a5, a5, a2 -;; bgeu a5, a1, 8 +;; addi a4, a4, 1 +;; sub a1, a1, a4 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a0, 0x38(a0) -;; add a0, a0, a1 +;; ld a4, 0x38(a0) +;; add a2, a4, a2 ;; lui t6, 1 -;; add t6, t6, a0 +;; add t6, t6, a2 ;; sb a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -46,18 +46,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; slli a4, a2, 0x20 -;; srli a1, a4, 0x20 -;; lui a4, 1 -;; addi a2, a4, 1 -;; sub a5, a5, a2 -;; bgeu a5, a1, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; lui a3, 1 +;; addi a3, a3, 1 +;; sub a1, a1, a3 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a0, 0x38(a0) -;; add a0, a0, a1 +;; ld a3, 0x38(a0) +;; add a2, a3, a2 ;; lui t6, 1 -;; add t6, t6, a0 +;; add t6, t6, a2 ;; lbu a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat index f68d1fbbf656..c2c1bdeef847 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -23,22 +23,22 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a1, a2, 0x20 -;; srli a2, a1, 0x20 -;; auipc a1, 0 -;; ld a1, 0x48(a1) -;; add a1, a2, a1 -;; bgeu a1, a2, 8 +;; slli a2, a2, 0x20 +;; srli a4, a2, 0x20 +;; auipc a5, 0 +;; ld a5, 0x48(a5) +;; add a5, a4, a5 +;; bgeu a5, a4, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x40(a0) -;; bgeu a4, a1, 8 +;; ld a1, 0x40(a0) +;; bgeu a1, a5, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a4, a4, a2 -;; lui a2, 0xffff -;; slli a5, a2, 4 -;; add a4, a4, a5 -;; sb a3, 0(a4) +;; ld a5, 0x38(a0) +;; add a5, a5, a4 +;; lui a4, 0xffff +;; slli a0, a4, 4 +;; add a5, a5, a0 +;; sb a3, 0(a5) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -51,22 +51,22 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a1, a2, 0x20 -;; srli a2, a1, 0x20 -;; auipc a1, 0 -;; ld a1, 0x48(a1) -;; add a1, a2, a1 -;; bgeu a1, a2, 8 +;; slli a2, a2, 0x20 +;; srli a4, a2, 0x20 +;; auipc a3, 0 +;; ld a3, 0x48(a3) +;; add a3, a4, a3 +;; bgeu a3, a4, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a3, 0x40(a0) -;; bgeu a3, a1, 8 +;; ld a5, 0x40(a0) +;; bgeu a5, a3, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a3, 0x38(a0) -;; add a3, a3, a2 -;; lui a2, 0xffff -;; slli a4, a2, 4 -;; add a3, a3, a4 -;; lbu a0, 0(a3) +;; ld a5, 0x38(a0) +;; add a5, a5, a4 +;; lui a4, 0xffff +;; slli a0, a4, 4 +;; add a5, a5, a0 +;; lbu a0, 0(a5) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat index 2f9892af9de1..395a96d59eb3 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat @@ -24,16 +24,16 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; ld a5, 0x40(a0) -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a4, a0, 0x20 -;; addi a2, a5, -4 -;; sltu a2, a2, a4 -;; add a1, a1, a4 -;; neg a5, a2 -;; not a2, a5 -;; and a4, a1, a2 -;; sw a3, 0(a4) +;; ld a4, 0x38(a0) +;; slli a2, a2, 0x20 +;; srli a0, a2, 0x20 +;; addi a5, a5, -4 +;; sltu a5, a5, a0 +;; add a4, a4, a0 +;; neg a1, a5 +;; not a5, a1 +;; and a5, a4, a5 +;; sw a3, 0(a5) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -45,16 +45,16 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; ld a4, 0x40(a0) -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a3, a0, 0x20 -;; addi a2, a4, -4 -;; sltu a2, a2, a3 -;; add a1, a1, a3 -;; neg a5, a2 -;; not a2, a5 -;; and a3, a1, a2 -;; lw a0, 0(a3) +;; ld a3, 0x38(a0) +;; slli a2, a2, 0x20 +;; srli a5, a2, 0x20 +;; addi a4, a4, -4 +;; sltu a4, a4, a5 +;; add a3, a3, a5 +;; neg a1, a4 +;; not a4, a1 +;; and a5, a3, a4 +;; lw a0, 0(a5) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat index 4083de3694e3..ab979cba6fe1 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -23,21 +23,21 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; ld a5, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a1, a0, 0x20 -;; lui a0, 1 -;; addi a0, a0, 4 -;; sub a0, a4, a0 -;; sltu a0, a0, a1 -;; add a5, a5, a1 -;; lui a1, 1 -;; add a5, a5, a1 -;; neg a4, a0 -;; not a0, a4 -;; and a1, a5, a0 -;; sw a3, 0(a1) +;; ld a1, 0x40(a0) +;; ld a0, 0x38(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; lui a4, 1 +;; addi a4, a4, 4 +;; sub a1, a1, a4 +;; sltu a1, a1, a2 +;; add a0, a0, a2 +;; lui a2, 1 +;; add a0, a0, a2 +;; neg a5, a1 +;; not a1, a5 +;; and a0, a0, a1 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -48,21 +48,21 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a3, 0x40(a0) -;; ld a5, 0x38(a0) -;; slli a4, a2, 0x20 -;; srli a1, a4, 0x20 -;; lui a4, 1 -;; addi a0, a4, 4 -;; sub a0, a3, a0 -;; sltu a0, a0, a1 -;; add a5, a5, a1 -;; lui a1, 1 -;; add a5, a5, a1 -;; neg a3, a0 -;; not a0, a3 -;; and a1, a5, a0 -;; lw a0, 0(a1) +;; ld a1, 0x40(a0) +;; ld a0, 0x38(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; lui a3, 1 +;; addi a3, a3, 4 +;; sub a1, a1, a3 +;; sltu a1, a1, a2 +;; add a0, a0, a2 +;; lui a2, 1 +;; add a0, a0, a2 +;; neg a5, a1 +;; not a1, a5 +;; and a0, a0, a1 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat index 44624a4e4094..8d97c871da99 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -23,25 +23,26 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a1, a2, 0x20 -;; srli a2, a1, 0x20 -;; lui a5, 0x3fffc -;; addi a1, a5, 1 -;; slli a4, a1, 2 -;; add a1, a2, a4 -;; bgeu a1, a2, 8 +;; mv a4, a0 +;; slli a0, a2, 0x20 +;; srli a0, a0, 0x20 +;; lui a1, 0x3fffc +;; addi a1, a1, 1 +;; slli a1, a1, 2 +;; add a1, a0, a1 +;; bgeu a1, a0, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x40(a0) -;; ld a5, 0x38(a0) -;; sltu a4, a4, a1 -;; add a2, a5, a2 -;; lui a1, 0xffff -;; slli a5, a1, 4 -;; add a2, a2, a5 -;; neg a0, a4 -;; not a4, a0 -;; and a4, a2, a4 -;; sw a3, 0(a4) +;; ld a2, 0x40(a4) +;; ld a4, 0x38(a4) +;; sltu a1, a2, a1 +;; add a0, a4, a0 +;; lui a2, 0xffff +;; slli a2, a2, 4 +;; add a0, a0, a2 +;; neg a1, a1 +;; not a1, a1 +;; and a0, a0, a1 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -52,25 +53,26 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a1, a2, 0x20 -;; srli a2, a1, 0x20 -;; lui a5, 0x3fffc -;; addi a1, a5, 1 -;; slli a3, a1, 2 -;; add a1, a2, a3 -;; bgeu a1, a2, 8 +;; mv a3, a0 +;; slli a0, a2, 0x20 +;; srli a0, a0, 0x20 +;; lui a1, 0x3fffc +;; addi a1, a1, 1 +;; slli a1, a1, 2 +;; add a1, a0, a1 +;; bgeu a1, a0, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a3, 0x40(a0) -;; ld a4, 0x38(a0) -;; sltu a3, a3, a1 -;; add a2, a4, a2 -;; lui a1, 0xffff -;; slli a4, a1, 4 -;; add a2, a2, a4 -;; neg a0, a3 -;; not a3, a0 -;; and a4, a2, a3 -;; lw a0, 0(a4) +;; ld a2, 0x40(a3) +;; ld a3, 0x38(a3) +;; sltu a1, a2, a1 +;; add a0, a3, a0 +;; lui a2, 0xffff +;; slli a2, a2, 4 +;; add a0, a0, a2 +;; neg a1, a1 +;; not a1, a1 +;; and a0, a0, a1 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat index 66165a9c0747..0e3b4d265439 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat @@ -24,16 +24,16 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; ld a5, 0x40(a0) -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a4, a0, 0x20 -;; sltu a0, a4, a5 -;; xori a2, a0, 1 -;; add a1, a1, a4 -;; neg a5, a2 -;; not a2, a5 -;; and a4, a1, a2 -;; sb a3, 0(a4) +;; ld a4, 0x38(a0) +;; slli a2, a2, 0x20 +;; srli a0, a2, 0x20 +;; sltu a2, a0, a5 +;; xori a5, a2, 1 +;; add a4, a4, a0 +;; neg a1, a5 +;; not a5, a1 +;; and a5, a4, a5 +;; sb a3, 0(a5) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -45,16 +45,16 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; ld a4, 0x40(a0) -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a3, a0, 0x20 -;; sltu a0, a3, a4 -;; xori a2, a0, 1 -;; add a1, a1, a3 -;; neg a5, a2 -;; not a2, a5 -;; and a3, a1, a2 -;; lbu a0, 0(a3) +;; ld a3, 0x38(a0) +;; slli a2, a2, 0x20 +;; srli a5, a2, 0x20 +;; sltu a2, a5, a4 +;; xori a4, a2, 1 +;; add a3, a3, a5 +;; neg a1, a4 +;; not a4, a1 +;; and a5, a3, a4 +;; lbu a0, 0(a5) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat index 983e073fca91..f83e0d069978 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -23,21 +23,21 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; ld a5, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a1, a0, 0x20 -;; lui a0, 1 -;; addi a0, a0, 1 -;; sub a0, a4, a0 -;; sltu a0, a0, a1 -;; add a5, a5, a1 -;; lui a1, 1 -;; add a5, a5, a1 -;; neg a4, a0 -;; not a0, a4 -;; and a1, a5, a0 -;; sb a3, 0(a1) +;; ld a1, 0x40(a0) +;; ld a0, 0x38(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; lui a4, 1 +;; addi a4, a4, 1 +;; sub a1, a1, a4 +;; sltu a1, a1, a2 +;; add a0, a0, a2 +;; lui a2, 1 +;; add a0, a0, a2 +;; neg a5, a1 +;; not a1, a5 +;; and a0, a0, a1 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -48,21 +48,21 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a3, 0x40(a0) -;; ld a5, 0x38(a0) -;; slli a4, a2, 0x20 -;; srli a1, a4, 0x20 -;; lui a4, 1 -;; addi a0, a4, 1 -;; sub a0, a3, a0 -;; sltu a0, a0, a1 -;; add a5, a5, a1 -;; lui a1, 1 -;; add a5, a5, a1 -;; neg a3, a0 -;; not a0, a3 -;; and a1, a5, a0 -;; lbu a0, 0(a1) +;; ld a1, 0x40(a0) +;; ld a0, 0x38(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; lui a3, 1 +;; addi a3, a3, 1 +;; sub a1, a1, a3 +;; sltu a1, a1, a2 +;; add a0, a0, a2 +;; lui a2, 1 +;; add a0, a0, a2 +;; neg a5, a1 +;; not a1, a5 +;; and a0, a0, a1 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat index d1803d03c10c..924ca2c55163 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -23,30 +23,30 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; mv a1, a0 -;; slli a4, a2, 0x20 -;; srli a0, a4, 0x20 -;; auipc a5, 0 -;; ld a5, 0x54(a5) -;; add a5, a0, a5 -;; bgeu a5, a0, 8 +;; mv a4, a0 +;; slli a0, a2, 0x20 +;; srli a0, a0, 0x20 +;; auipc a1, 0 +;; ld a1, 0x54(a1) +;; add a1, a0, a1 +;; bgeu a1, a0, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; mv a2, a1 -;; ld a1, 0x40(a2) -;; ld a2, 0x38(a2) -;; sltu a1, a1, a5 -;; add a0, a2, a0 -;; lui a5, 0xffff -;; slli a2, a5, 4 +;; ld a2, 0x40(a4) +;; ld a4, 0x38(a4) +;; sltu a1, a2, a1 +;; add a0, a4, a0 +;; lui a2, 0xffff +;; slli a2, a2, 4 ;; add a0, a0, a2 -;; neg a4, a1 -;; not a1, a4 -;; and a2, a0, a1 -;; sb a3, 0(a2) +;; neg a1, a1 +;; not a1, a1 +;; and a0, a0, a1 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 ;; ret +;; .byte 0x00, 0x00, 0x00, 0x00 ;; .byte 0x01, 0x00, 0xff, 0xff ;; .byte 0x00, 0x00, 0x00, 0x00 ;; @@ -55,29 +55,29 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; mv a1, a0 -;; slli a4, a2, 0x20 -;; srli a0, a4, 0x20 -;; auipc a5, 0 -;; ld a5, 0x54(a5) -;; add a5, a0, a5 -;; bgeu a5, a0, 8 +;; mv a3, a0 +;; slli a0, a2, 0x20 +;; srli a0, a0, 0x20 +;; auipc a1, 0 +;; ld a1, 0x54(a1) +;; add a1, a0, a1 +;; bgeu a1, a0, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; mv a2, a1 -;; ld a1, 0x40(a2) -;; ld a2, 0x38(a2) -;; sltu a1, a1, a5 -;; add a0, a2, a0 -;; lui a5, 0xffff -;; slli a2, a5, 4 +;; ld a2, 0x40(a3) +;; ld a3, 0x38(a3) +;; sltu a1, a2, a1 +;; add a0, a3, a0 +;; lui a2, 0xffff +;; slli a2, a2, 4 ;; add a0, a0, a2 -;; neg a4, a1 -;; not a1, a4 -;; and a2, a0, a1 -;; lbu a0, 0(a2) +;; neg a1, a1 +;; not a1, a1 +;; and a0, a0, a1 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 ;; ret +;; .byte 0x00, 0x00, 0x00, 0x00 ;; .byte 0x01, 0x00, 0xff, 0xff ;; .byte 0x00, 0x00, 0x00, 0x00 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat index b42149089e7a..6d3e0c6ab8ac 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat @@ -23,14 +23,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; slli a1, a2, 0x20 -;; srli a4, a1, 0x20 -;; bgeu a5, a4, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a4, a5, a4 -;; sw a3, 0(a4) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -41,14 +41,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; slli a1, a2, 0x20 -;; srli a3, a1, 0x20 -;; bgeu a4, a3, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a3, a4, a3 -;; lw a0, 0(a3) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat index dd8d46652065..29b865870407 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat @@ -23,15 +23,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; slli a1, a2, 0x20 -;; srli a4, a1, 0x20 -;; bgeu a5, a4, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a4, a5, a4 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a4 +;; add t6, t6, a0 ;; sw a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -43,15 +43,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; slli a1, a2, 0x20 -;; srli a3, a1, 0x20 -;; bgeu a4, a3, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a3, a4, a3 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a3 +;; add t6, t6, a0 ;; lw a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat index e00b2ec58297..a765c5163298 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -23,17 +23,17 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; slli a4, a2, 0x20 -;; srli a1, a4, 0x20 -;; bgeu a5, a1, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a0, 0x38(a0) -;; add a0, a0, a1 -;; lui a5, 0xffff -;; slli a1, a5, 4 -;; add a0, a0, a1 -;; sw a3, 0(a0) +;; ld a4, 0x38(a0) +;; add a2, a4, a2 +;; lui a1, 0xffff +;; slli a4, a1, 4 +;; add a2, a2, a4 +;; sw a3, 0(a2) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -44,17 +44,17 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; slli a4, a2, 0x20 -;; srli a1, a4, 0x20 -;; bgeu a5, a1, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a0, 0x38(a0) -;; add a0, a0, a1 -;; lui a5, 0xffff -;; slli a1, a5, 4 -;; add a0, a0, a1 -;; lw a0, 0(a0) +;; ld a3, 0x38(a0) +;; add a2, a3, a2 +;; lui a1, 0xffff +;; slli a3, a1, 4 +;; add a2, a2, a3 +;; lw a0, 0(a2) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat index a149d2ac481d..6847b10d4116 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat @@ -23,14 +23,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; slli a1, a2, 0x20 -;; srli a4, a1, 0x20 -;; bltu a4, a5, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; bltu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a4, a5, a4 -;; sb a3, 0(a4) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -41,14 +41,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; slli a1, a2, 0x20 -;; srli a3, a1, 0x20 -;; bltu a3, a4, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; bltu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a3, a4, a3 -;; lbu a0, 0(a3) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat index bd83e64e2c69..85ba452cd620 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat @@ -23,15 +23,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; slli a1, a2, 0x20 -;; srli a4, a1, 0x20 -;; bgeu a5, a4, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a4, a5, a4 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a4 +;; add t6, t6, a0 ;; sb a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -43,15 +43,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; slli a1, a2, 0x20 -;; srli a3, a1, 0x20 -;; bgeu a4, a3, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a3, a4, a3 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a3 +;; add t6, t6, a0 ;; lbu a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat index b203605ccbbb..1bc6bbeeaa35 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -23,17 +23,17 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; slli a4, a2, 0x20 -;; srli a1, a4, 0x20 -;; bgeu a5, a1, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a0, 0x38(a0) -;; add a0, a0, a1 -;; lui a5, 0xffff -;; slli a1, a5, 4 -;; add a0, a0, a1 -;; sb a3, 0(a0) +;; ld a4, 0x38(a0) +;; add a2, a4, a2 +;; lui a1, 0xffff +;; slli a4, a1, 4 +;; add a2, a2, a4 +;; sb a3, 0(a2) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -44,17 +44,17 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; slli a4, a2, 0x20 -;; srli a1, a4, 0x20 -;; bgeu a5, a1, 8 +;; ld a1, 0x40(a0) +;; slli a2, a2, 0x20 +;; srli a2, a2, 0x20 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a0, 0x38(a0) -;; add a0, a0, a1 -;; lui a5, 0xffff -;; slli a1, a5, 4 -;; add a0, a0, a1 -;; lbu a0, 0(a0) +;; ld a3, 0x38(a0) +;; add a2, a3, a2 +;; lui a1, 0xffff +;; slli a3, a1, 4 +;; add a2, a2, a3 +;; lbu a0, 0(a2) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat index 741679eb44d7..516c3b9060e7 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat @@ -23,16 +23,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; ld a1, 0x38(a0) -;; slli a5, a2, 0x20 -;; srli a2, a5, 0x20 -;; sltu a4, a4, a2 -;; add a0, a1, a2 -;; neg a4, a4 -;; not a1, a4 -;; and a2, a0, a1 -;; sw a3, 0(a2) +;; ld a5, 0x40(a0) +;; ld a4, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a0, a1, 0x20 +;; sltu a5, a5, a0 +;; add a2, a4, a0 +;; neg a0, a5 +;; not a4, a0 +;; and a4, a2, a4 +;; sw a3, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -43,16 +43,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a3, 0x40(a0) -;; ld a1, 0x38(a0) -;; slli a5, a2, 0x20 -;; srli a2, a5, 0x20 -;; sltu a3, a3, a2 -;; add a0, a1, a2 -;; neg a4, a3 -;; not a1, a4 -;; and a2, a0, a1 -;; lw a0, 0(a2) +;; ld a4, 0x40(a0) +;; ld a3, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a5, a1, 0x20 +;; sltu a4, a4, a5 +;; add a2, a3, a5 +;; neg a0, a4 +;; not a3, a0 +;; and a4, a2, a3 +;; lw a0, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat index 6d946d0be148..1f71b5c482b6 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -23,18 +23,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; ld a4, 0x38(a0) -;; slli a1, a2, 0x20 -;; srli a0, a1, 0x20 -;; sltu a2, a5, a0 -;; add a4, a4, a0 -;; lui a5, 1 -;; add a4, a4, a5 -;; neg a0, a2 -;; not a2, a0 -;; and a4, a4, a2 -;; sw a3, 0(a4) +;; ld a4, 0x40(a0) +;; ld a5, 0x38(a0) +;; slli a0, a2, 0x20 +;; srli a0, a0, 0x20 +;; sltu a4, a4, a0 +;; add a5, a5, a0 +;; lui a0, 1 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -46,17 +46,17 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; ld a4, 0x40(a0) -;; ld a3, 0x38(a0) -;; slli a1, a2, 0x20 -;; srli a5, a1, 0x20 -;; sltu a2, a4, a5 -;; add a3, a3, a5 -;; lui a4, 1 -;; add a3, a3, a4 -;; neg a0, a2 -;; not a2, a0 -;; and a4, a3, a2 -;; lw a0, 0(a4) +;; ld a5, 0x38(a0) +;; slli a3, a2, 0x20 +;; srli a0, a3, 0x20 +;; sltu a4, a4, a0 +;; add a5, a5, a0 +;; lui a0, 1 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat index 08f29ea1f2f7..f00556ba92d6 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -23,19 +23,19 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; ld a5, 0x38(a0) -;; slli a2, a2, 0x20 -;; srli a0, a2, 0x20 -;; sltu a4, a4, a0 -;; add a5, a5, a0 -;; lui a2, 0xffff -;; slli a0, a2, 4 -;; add a5, a5, a0 -;; neg a1, a4 -;; not a4, a1 -;; and a5, a5, a4 -;; sw a3, 0(a5) +;; ld a5, 0x40(a0) +;; ld a0, 0x38(a0) +;; slli a4, a2, 0x20 +;; srli a1, a4, 0x20 +;; sltu a5, a5, a1 +;; add a0, a0, a1 +;; lui a4, 0xffff +;; slli a1, a4, 4 +;; add a0, a0, a1 +;; neg a4, a5 +;; not a5, a4 +;; and a0, a0, a5 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -46,19 +46,19 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a3, 0x40(a0) -;; ld a4, 0x38(a0) -;; slli a2, a2, 0x20 -;; srli a5, a2, 0x20 -;; sltu a3, a3, a5 -;; add a4, a4, a5 -;; lui a2, 0xffff -;; slli a5, a2, 4 -;; add a4, a4, a5 -;; neg a1, a3 -;; not a3, a1 -;; and a5, a4, a3 -;; lw a0, 0(a5) +;; ld a5, 0x40(a0) +;; ld a0, 0x38(a0) +;; slli a4, a2, 0x20 +;; srli a1, a4, 0x20 +;; sltu a5, a5, a1 +;; add a0, a0, a1 +;; lui a4, 0xffff +;; slli a1, a4, 4 +;; add a0, a0, a1 +;; neg a3, a5 +;; not a5, a3 +;; and a0, a0, a5 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat index 3077a212ee8d..9d5d60581f96 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat @@ -24,16 +24,16 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; ld a5, 0x40(a0) -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a4, a0, 0x20 -;; sltu a0, a4, a5 -;; xori a2, a0, 1 -;; add a1, a1, a4 -;; neg a5, a2 -;; not a2, a5 -;; and a4, a1, a2 -;; sb a3, 0(a4) +;; ld a4, 0x38(a0) +;; slli a2, a2, 0x20 +;; srli a0, a2, 0x20 +;; sltu a2, a0, a5 +;; xori a5, a2, 1 +;; add a4, a4, a0 +;; neg a1, a5 +;; not a5, a1 +;; and a5, a4, a5 +;; sb a3, 0(a5) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -45,16 +45,16 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; ld a4, 0x40(a0) -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a3, a0, 0x20 -;; sltu a0, a3, a4 -;; xori a2, a0, 1 -;; add a1, a1, a3 -;; neg a5, a2 -;; not a2, a5 -;; and a3, a1, a2 -;; lbu a0, 0(a3) +;; ld a3, 0x38(a0) +;; slli a2, a2, 0x20 +;; srli a5, a2, 0x20 +;; sltu a2, a5, a4 +;; xori a4, a2, 1 +;; add a3, a3, a5 +;; neg a1, a4 +;; not a4, a1 +;; and a5, a3, a4 +;; lbu a0, 0(a5) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat index 073f47275a82..99e8c709eb33 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -23,18 +23,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; ld a4, 0x38(a0) -;; slli a1, a2, 0x20 -;; srli a0, a1, 0x20 -;; sltu a2, a5, a0 -;; add a4, a4, a0 -;; lui a5, 1 -;; add a4, a4, a5 -;; neg a0, a2 -;; not a2, a0 -;; and a4, a4, a2 -;; sb a3, 0(a4) +;; ld a4, 0x40(a0) +;; ld a5, 0x38(a0) +;; slli a0, a2, 0x20 +;; srli a0, a0, 0x20 +;; sltu a4, a4, a0 +;; add a5, a5, a0 +;; lui a0, 1 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -46,17 +46,17 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; ld a4, 0x40(a0) -;; ld a3, 0x38(a0) -;; slli a1, a2, 0x20 -;; srli a5, a1, 0x20 -;; sltu a2, a4, a5 -;; add a3, a3, a5 -;; lui a4, 1 -;; add a3, a3, a4 -;; neg a0, a2 -;; not a2, a0 -;; and a4, a3, a2 -;; lbu a0, 0(a4) +;; ld a5, 0x38(a0) +;; slli a3, a2, 0x20 +;; srli a0, a3, 0x20 +;; sltu a4, a4, a0 +;; add a5, a5, a0 +;; lui a0, 1 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 8dd93052fef5..2d687d207e4f 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -23,19 +23,19 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; ld a5, 0x38(a0) -;; slli a2, a2, 0x20 -;; srli a0, a2, 0x20 -;; sltu a4, a4, a0 -;; add a5, a5, a0 -;; lui a2, 0xffff -;; slli a0, a2, 4 -;; add a5, a5, a0 -;; neg a1, a4 -;; not a4, a1 -;; and a5, a5, a4 -;; sb a3, 0(a5) +;; ld a5, 0x40(a0) +;; ld a0, 0x38(a0) +;; slli a4, a2, 0x20 +;; srli a1, a4, 0x20 +;; sltu a5, a5, a1 +;; add a0, a0, a1 +;; lui a4, 0xffff +;; slli a1, a4, 4 +;; add a0, a0, a1 +;; neg a4, a5 +;; not a5, a4 +;; and a0, a0, a5 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -46,19 +46,19 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a3, 0x40(a0) -;; ld a4, 0x38(a0) -;; slli a2, a2, 0x20 -;; srli a5, a2, 0x20 -;; sltu a3, a3, a5 -;; add a4, a4, a5 -;; lui a2, 0xffff -;; slli a5, a2, 4 -;; add a4, a4, a5 -;; neg a1, a3 -;; not a3, a1 -;; and a5, a4, a3 -;; lbu a0, 0(a5) +;; ld a5, 0x40(a0) +;; ld a0, 0x38(a0) +;; slli a4, a2, 0x20 +;; srli a1, a4, 0x20 +;; sltu a5, a5, a1 +;; add a0, a0, a1 +;; lui a4, 0xffff +;; slli a1, a4, 4 +;; add a0, a0, a1 +;; neg a3, a5 +;; not a5, a3 +;; and a0, a0, a5 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat index 37407311e7e0..1a9af0df7ada 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat @@ -27,9 +27,9 @@ ;; addi a1, a1, -4 ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a2, a4, a2 -;; sw a3, 0(a2) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -44,9 +44,9 @@ ;; addi a1, a1, -4 ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a3, 0x38(a0) -;; add a2, a3, a2 -;; lw a0, 0(a2) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat index e4f19d285f62..84e30b79ebb0 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat @@ -23,16 +23,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; lui a5, 1 -;; addi a5, a5, 4 -;; sub a4, a4, a5 -;; bgeu a4, a2, 8 +;; ld a1, 0x40(a0) +;; lui a4, 1 +;; addi a4, a4, 4 +;; sub a1, a1, a4 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a4, a4, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a4 +;; add t6, t6, a0 ;; sw a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -44,16 +44,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a3, 0x40(a0) -;; lui a4, 1 -;; addi a4, a4, 4 -;; sub a3, a3, a4 -;; bgeu a3, a2, 8 +;; ld a1, 0x40(a0) +;; lui a3, 1 +;; addi a3, a3, 4 +;; sub a1, a1, a3 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a4, a4, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a4 +;; add t6, t6, a0 ;; lw a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat index 54909bead59c..3fd5dc1968bc 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -23,21 +23,21 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a5, 0x3fffc -;; addi a1, a5, 1 -;; slli a4, a1, 2 -;; add a1, a2, a4 -;; bgeu a1, a2, 8 +;; lui a1, 0x3fffc +;; addi a4, a1, 1 +;; slli a5, a4, 2 +;; add a4, a2, a5 +;; bgeu a4, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x40(a0) -;; bgeu a4, a1, 8 +;; ld a5, 0x40(a0) +;; bgeu a5, a4, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a4, a4, a2 -;; lui a2, 0xffff -;; slli a5, a2, 4 -;; add a4, a4, a5 -;; sw a3, 0(a4) +;; ld a5, 0x38(a0) +;; add a5, a5, a2 +;; lui a4, 0xffff +;; slli a0, a4, 4 +;; add a5, a5, a0 +;; sw a3, 0(a5) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -48,21 +48,21 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a5, 0x3fffc -;; addi a1, a5, 1 -;; slli a3, a1, 2 -;; add a1, a2, a3 -;; bgeu a1, a2, 8 +;; lui a1, 0x3fffc +;; addi a3, a1, 1 +;; slli a5, a3, 2 +;; add a3, a2, a5 +;; bgeu a3, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a3, 0x40(a0) -;; bgeu a3, a1, 8 +;; ld a4, 0x40(a0) +;; bgeu a4, a3, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a3, 0x38(a0) -;; add a3, a3, a2 -;; lui a2, 0xffff -;; slli a4, a2, 4 -;; add a3, a3, a4 -;; lw a0, 0(a3) +;; ld a5, 0x38(a0) +;; add a5, a5, a2 +;; lui a4, 0xffff +;; slli a0, a4, 4 +;; add a5, a5, a0 +;; lw a0, 0(a5) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat index 14222768a6e3..bf3af20e59bc 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat @@ -26,9 +26,9 @@ ;; ld a1, 0x40(a0) ;; bltu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 -;; sb a3, 0(a1) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -42,9 +42,9 @@ ;; ld a1, 0x40(a0) ;; bltu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 -;; lbu a0, 0(a1) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat index d55fbb921779..1ee992f61dba 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat @@ -23,16 +23,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; lui a5, 1 -;; addi a5, a5, 1 -;; sub a4, a4, a5 -;; bgeu a4, a2, 8 +;; ld a1, 0x40(a0) +;; lui a4, 1 +;; addi a4, a4, 1 +;; sub a1, a1, a4 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a4, a4, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a4 +;; add t6, t6, a0 ;; sb a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -44,16 +44,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a3, 0x40(a0) -;; lui a4, 1 -;; addi a4, a4, 1 -;; sub a3, a3, a4 -;; bgeu a3, a2, 8 +;; ld a1, 0x40(a0) +;; lui a3, 1 +;; addi a3, a3, 1 +;; sub a1, a1, a3 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a4, a4, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a4 +;; add t6, t6, a0 ;; lbu a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat index 7855ec9cd382..0fe055ccd229 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -23,20 +23,20 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; auipc a5, 0 -;; ld a5, 0x48(a5) -;; add a5, a2, a5 -;; bgeu a5, a2, 8 +;; auipc a1, 0 +;; ld a1, 0x48(a1) +;; add a1, a2, a1 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x40(a0) -;; bgeu a1, a5, 8 +;; ld a4, 0x40(a0) +;; bgeu a4, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 -;; lui a0, 0xffff -;; slli a2, a0, 4 -;; add a1, a1, a2 -;; sb a3, 0(a1) +;; ld a4, 0x38(a0) +;; add a4, a4, a2 +;; lui a2, 0xffff +;; slli a5, a2, 4 +;; add a4, a4, a5 +;; sb a3, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -49,20 +49,20 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; auipc a5, 0 -;; ld a5, 0x48(a5) -;; add a5, a2, a5 -;; bgeu a5, a2, 8 +;; auipc a1, 0 +;; ld a1, 0x48(a1) +;; add a1, a2, a1 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x40(a0) -;; bgeu a1, a5, 8 +;; ld a3, 0x40(a0) +;; bgeu a3, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 -;; lui a0, 0xffff -;; slli a2, a0, 4 -;; add a1, a1, a2 -;; lbu a0, 0(a1) +;; ld a3, 0x38(a0) +;; add a3, a3, a2 +;; lui a2, 0xffff +;; slli a4, a2, 4 +;; add a3, a3, a4 +;; lbu a0, 0(a3) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat index f6d4ccf2459d..cfbc4baba114 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat @@ -23,15 +23,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x40(a0) -;; ld a5, 0x38(a0) -;; addi a0, a1, -4 -;; sltu a0, a0, a2 -;; add a5, a5, a2 -;; neg a4, a0 -;; not a0, a4 -;; and a1, a5, a0 -;; sw a3, 0(a1) +;; ld a4, 0x40(a0) +;; ld a1, 0x38(a0) +;; addi a4, a4, -4 +;; sltu a4, a4, a2 +;; add a1, a1, a2 +;; neg a0, a4 +;; not a2, a0 +;; and a4, a1, a2 +;; sw a3, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -42,15 +42,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x40(a0) -;; ld a5, 0x38(a0) -;; addi a0, a1, -4 -;; sltu a0, a0, a2 -;; add a5, a5, a2 -;; neg a3, a0 -;; not a0, a3 -;; and a1, a5, a0 -;; lw a0, 0(a1) +;; ld a3, 0x40(a0) +;; ld a1, 0x38(a0) +;; addi a3, a3, -4 +;; sltu a3, a3, a2 +;; add a1, a1, a2 +;; neg a0, a3 +;; not a2, a0 +;; and a3, a1, a2 +;; lw a0, 0(a3) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat index 15e20a6efa66..4097c076defb 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -23,19 +23,19 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; ld a4, 0x38(a0) -;; lui a0, 1 -;; addi a0, a0, 4 -;; sub a5, a5, a0 -;; sltu a5, a5, a2 -;; add a4, a4, a2 -;; lui a0, 1 -;; add a4, a4, a0 -;; neg a1, a5 -;; not a5, a1 -;; and a5, a4, a5 -;; sw a3, 0(a5) +;; ld a1, 0x40(a0) +;; ld a5, 0x38(a0) +;; lui a4, 1 +;; addi a0, a4, 4 +;; sub a0, a1, a0 +;; sltu a0, a0, a2 +;; add a5, a5, a2 +;; lui a1, 1 +;; add a5, a5, a1 +;; neg a4, a0 +;; not a0, a4 +;; and a0, a5, a0 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -46,19 +46,19 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; ld a3, 0x38(a0) -;; lui a5, 1 -;; addi a5, a5, 4 -;; sub a4, a4, a5 -;; sltu a4, a4, a2 -;; add a3, a3, a2 -;; lui a5, 1 -;; add a3, a3, a5 -;; neg a1, a4 -;; not a4, a1 -;; and a5, a3, a4 -;; lw a0, 0(a5) +;; ld a1, 0x40(a0) +;; ld a5, 0x38(a0) +;; lui a4, 1 +;; addi a0, a4, 4 +;; sub a0, a1, a0 +;; sltu a0, a0, a2 +;; add a5, a5, a2 +;; lui a1, 1 +;; add a5, a5, a1 +;; neg a3, a0 +;; not a0, a3 +;; and a0, a5, a0 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat index 1c88ea8dc6d0..6a1e9c901978 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -23,23 +23,23 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a4, 0x3fffc -;; addi a5, a4, 1 -;; slli a1, a5, 2 -;; add a5, a2, a1 -;; bgeu a5, a2, 8 +;; lui a5, 0x3fffc +;; addi a1, a5, 1 +;; slli a1, a1, 2 +;; add a1, a2, a1 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x40(a0) -;; ld a4, 0x38(a0) -;; sltu a0, a1, a5 -;; add a1, a4, a2 -;; lui a5, 0xffff -;; slli a2, a5, 4 +;; ld a4, 0x40(a0) +;; ld a5, 0x38(a0) +;; sltu a0, a4, a1 +;; add a1, a5, a2 +;; lui a2, 0xffff +;; slli a2, a2, 4 ;; add a1, a1, a2 -;; neg a4, a0 -;; not a0, a4 -;; and a2, a1, a0 -;; sw a3, 0(a2) +;; neg a0, a0 +;; not a0, a0 +;; and a0, a1, a0 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -50,23 +50,23 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a3, 0x3fffc -;; addi a5, a3, 1 -;; slli a1, a5, 2 -;; add a5, a2, a1 -;; bgeu a5, a2, 8 +;; lui a5, 0x3fffc +;; addi a1, a5, 1 +;; slli a1, a1, 2 +;; add a1, a2, a1 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x40(a0) -;; ld a3, 0x38(a0) -;; sltu a0, a1, a5 -;; add a1, a3, a2 -;; lui a5, 0xffff -;; slli a2, a5, 4 +;; ld a3, 0x40(a0) +;; ld a4, 0x38(a0) +;; sltu a0, a3, a1 +;; add a1, a4, a2 +;; lui a2, 0xffff +;; slli a2, a2, 4 ;; add a1, a1, a2 -;; neg a4, a0 -;; not a0, a4 -;; and a2, a1, a0 -;; lw a0, 0(a2) +;; neg a0, a0 +;; not a0, a0 +;; and a0, a1, a0 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat index cd7ea6055b77..0f23cece3ac9 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat @@ -23,15 +23,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x40(a0) -;; ld a5, 0x38(a0) -;; sltu a4, a2, a1 -;; xori a0, a4, 1 -;; add a5, a5, a2 -;; neg a4, a0 -;; not a0, a4 -;; and a1, a5, a0 -;; sb a3, 0(a1) +;; ld a4, 0x40(a0) +;; ld a1, 0x38(a0) +;; sltu a0, a2, a4 +;; xori a4, a0, 1 +;; add a1, a1, a2 +;; neg a0, a4 +;; not a2, a0 +;; and a4, a1, a2 +;; sb a3, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -42,15 +42,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x40(a0) -;; ld a5, 0x38(a0) -;; sltu a4, a2, a1 -;; xori a0, a4, 1 -;; add a5, a5, a2 -;; neg a3, a0 -;; not a0, a3 -;; and a1, a5, a0 -;; lbu a0, 0(a1) +;; ld a3, 0x40(a0) +;; ld a1, 0x38(a0) +;; sltu a0, a2, a3 +;; xori a3, a0, 1 +;; add a1, a1, a2 +;; neg a0, a3 +;; not a2, a0 +;; and a3, a1, a2 +;; lbu a0, 0(a3) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat index 62ca3f3f4dcc..ffda43338d7c 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -23,19 +23,19 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a5, 0x40(a0) -;; ld a4, 0x38(a0) -;; lui a0, 1 -;; addi a0, a0, 1 -;; sub a5, a5, a0 -;; sltu a5, a5, a2 -;; add a4, a4, a2 -;; lui a0, 1 -;; add a4, a4, a0 -;; neg a1, a5 -;; not a5, a1 -;; and a5, a4, a5 -;; sb a3, 0(a5) +;; ld a1, 0x40(a0) +;; ld a5, 0x38(a0) +;; lui a4, 1 +;; addi a0, a4, 1 +;; sub a0, a1, a0 +;; sltu a0, a0, a2 +;; add a5, a5, a2 +;; lui a1, 1 +;; add a5, a5, a1 +;; neg a4, a0 +;; not a0, a4 +;; and a0, a5, a0 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -46,19 +46,19 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; ld a3, 0x38(a0) -;; lui a5, 1 -;; addi a5, a5, 1 -;; sub a4, a4, a5 -;; sltu a4, a4, a2 -;; add a3, a3, a2 -;; lui a5, 1 -;; add a3, a3, a5 -;; neg a1, a4 -;; not a4, a1 -;; and a5, a3, a4 -;; lbu a0, 0(a5) +;; ld a1, 0x40(a0) +;; ld a5, 0x38(a0) +;; lui a4, 1 +;; addi a0, a4, 1 +;; sub a0, a1, a0 +;; sltu a0, a0, a2 +;; add a5, a5, a2 +;; lui a1, 1 +;; add a5, a5, a1 +;; neg a3, a0 +;; not a0, a3 +;; and a0, a5, a0 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 38f9f85af67d..e2b8f3b5508e 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -23,21 +23,21 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; auipc a4, 0 -;; ld a4, 0x50(a4) -;; add a4, a2, a4 -;; bgeu a4, a2, 8 +;; auipc a5, 0 +;; ld a5, 0x50(a5) +;; add a5, a2, a5 +;; bgeu a5, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x40(a0) -;; ld a0, 0x38(a0) -;; sltu a4, a5, a4 -;; add a5, a0, a2 -;; lui a0, 0xffff -;; slli a0, a0, 4 -;; add a5, a5, a0 -;; neg a2, a4 -;; not a4, a2 -;; and a0, a5, a4 +;; ld a1, 0x40(a0) +;; ld a4, 0x38(a0) +;; sltu a0, a1, a5 +;; add a1, a4, a2 +;; lui a5, 0xffff +;; slli a2, a5, 4 +;; add a1, a1, a2 +;; neg a4, a0 +;; not a0, a4 +;; and a0, a1, a0 ;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -51,21 +51,21 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; auipc a3, 0 -;; ld a3, 0x50(a3) -;; add a3, a2, a3 -;; bgeu a3, a2, 8 +;; auipc a5, 0 +;; ld a5, 0x50(a5) +;; add a5, a2, a5 +;; bgeu a5, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x40(a0) -;; ld a5, 0x38(a0) -;; sltu a4, a4, a3 -;; add a5, a5, a2 -;; lui a3, 0xffff -;; slli a0, a3, 4 -;; add a5, a5, a0 -;; neg a2, a4 -;; not a4, a2 -;; and a0, a5, a4 +;; ld a1, 0x40(a0) +;; ld a3, 0x38(a0) +;; sltu a0, a1, a5 +;; add a1, a3, a2 +;; lui a5, 0xffff +;; slli a2, a5, 4 +;; add a1, a1, a2 +;; neg a4, a0 +;; not a0, a4 +;; and a0, a1, a0 ;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat index abbeaca24606..bcf9e96404f2 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat @@ -26,9 +26,9 @@ ;; ld a1, 0x40(a0) ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 -;; sw a3, 0(a1) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -42,9 +42,9 @@ ;; ld a1, 0x40(a0) ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 -;; lw a0, 0(a1) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat index 05941b3338bb..8c16342183cd 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat @@ -26,10 +26,10 @@ ;; ld a1, 0x40(a0) ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; sw a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -44,10 +44,10 @@ ;; ld a1, 0x40(a0) ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; lw a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat index 65e8ed0358b5..c92d9f347375 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -23,15 +23,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; bgeu a4, a2, 8 +;; ld a1, 0x40(a0) +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a4, a4, a2 -;; lui a5, 0xffff -;; slli a5, a5, 4 -;; add a4, a4, a5 -;; sw a3, 0(a4) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lui a1, 0xffff +;; slli a1, a1, 4 +;; add a0, a0, a1 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -42,15 +42,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a3, 0x40(a0) -;; bgeu a3, a2, 8 +;; ld a1, 0x40(a0) +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a4, a4, a2 -;; lui a3, 0xffff -;; slli a5, a3, 4 -;; add a4, a4, a5 -;; lw a0, 0(a4) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lui a1, 0xffff +;; slli a1, a1, 4 +;; add a0, a0, a1 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat index 5cbaba85ee26..cfe3097b43d4 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat @@ -26,9 +26,9 @@ ;; ld a1, 0x40(a0) ;; bltu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 -;; sb a3, 0(a1) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -42,9 +42,9 @@ ;; ld a1, 0x40(a0) ;; bltu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 -;; lbu a0, 0(a1) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat index 01b27fe4ad66..097f02b7b836 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat @@ -26,10 +26,10 @@ ;; ld a1, 0x40(a0) ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; sb a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -44,10 +44,10 @@ ;; ld a1, 0x40(a0) ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; lbu a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat index 60f26b40a860..0dd426c1226d 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -23,15 +23,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; bgeu a4, a2, 8 +;; ld a1, 0x40(a0) +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a4, a4, a2 -;; lui a5, 0xffff -;; slli a5, a5, 4 -;; add a4, a4, a5 -;; sb a3, 0(a4) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lui a1, 0xffff +;; slli a1, a1, 4 +;; add a0, a0, a1 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -42,15 +42,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a3, 0x40(a0) -;; bgeu a3, a2, 8 +;; ld a1, 0x40(a0) +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a4, a4, a2 -;; lui a3, 0xffff -;; slli a5, a3, 4 -;; add a4, a4, a5 -;; lbu a0, 0(a4) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lui a1, 0xffff +;; slli a1, a1, 4 +;; add a0, a0, a1 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat index 8a336075b40c..1aa07f4509cb 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat @@ -24,13 +24,13 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; ld a4, 0x40(a0) -;; ld a5, 0x38(a0) -;; sltu a0, a4, a2 -;; add a4, a5, a2 -;; neg a2, a0 -;; not a5, a2 -;; and a0, a4, a5 -;; sw a3, 0(a0) +;; ld a1, 0x38(a0) +;; sltu a4, a4, a2 +;; add a0, a1, a2 +;; neg a1, a4 +;; not a1, a1 +;; and a2, a0, a1 +;; sw a3, 0(a2) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -41,14 +41,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x40(a0) -;; ld a5, 0x38(a0) -;; sltu a0, a4, a2 -;; add a4, a5, a2 -;; neg a2, a0 -;; not a5, a2 -;; and a0, a4, a5 -;; lw a0, 0(a0) +;; ld a3, 0x40(a0) +;; ld a1, 0x38(a0) +;; sltu a3, a3, a2 +;; add a0, a1, a2 +;; neg a1, a3 +;; not a1, a1 +;; and a2, a0, a1 +;; lw a0, 0(a2) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat index 4dc1096f95c9..6cfaa3db9a2b 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -24,15 +24,15 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; ld a4, 0x40(a0) -;; ld a1, 0x38(a0) -;; sltu a0, a4, a2 -;; add a1, a1, a2 -;; lui a2, 1 -;; add a1, a1, a2 -;; neg a4, a0 -;; not a0, a4 -;; and a2, a1, a0 -;; sw a3, 0(a2) +;; ld a5, 0x38(a0) +;; sltu a4, a4, a2 +;; add a2, a5, a2 +;; lui a5, 1 +;; add a2, a2, a5 +;; neg a0, a4 +;; not a4, a0 +;; and a4, a2, a4 +;; sw a3, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -44,15 +44,15 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; ld a3, 0x40(a0) -;; ld a1, 0x38(a0) -;; sltu a0, a3, a2 -;; add a1, a1, a2 -;; lui a2, 1 -;; add a1, a1, a2 -;; neg a4, a0 -;; not a0, a4 -;; and a2, a1, a0 -;; lw a0, 0(a2) +;; ld a4, 0x38(a0) +;; sltu a3, a3, a2 +;; add a2, a4, a2 +;; lui a4, 1 +;; add a2, a2, a4 +;; neg a0, a3 +;; not a3, a0 +;; and a4, a2, a3 +;; lw a0, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat index a460ef3546e8..788d9e43c072 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -23,17 +23,17 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x40(a0) -;; ld a4, 0x38(a0) -;; sltu a1, a1, a2 -;; add a2, a4, a2 -;; lui a0, 0xffff -;; slli a4, a0, 4 -;; add a2, a2, a4 -;; neg a5, a1 -;; not a1, a5 -;; and a4, a2, a1 -;; sw a3, 0(a4) +;; ld a4, 0x40(a0) +;; ld a5, 0x38(a0) +;; sltu a4, a4, a2 +;; add a5, a5, a2 +;; lui a2, 0xffff +;; slli a0, a2, 4 +;; add a5, a5, a0 +;; neg a1, a4 +;; not a4, a1 +;; and a5, a5, a4 +;; sw a3, 0(a5) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -44,17 +44,17 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x40(a0) -;; ld a3, 0x38(a0) -;; sltu a1, a1, a2 -;; add a2, a3, a2 -;; lui a0, 0xffff -;; slli a3, a0, 4 -;; add a2, a2, a3 -;; neg a5, a1 -;; not a1, a5 -;; and a3, a2, a1 -;; lw a0, 0(a3) +;; ld a3, 0x40(a0) +;; ld a4, 0x38(a0) +;; sltu a3, a3, a2 +;; add a4, a4, a2 +;; lui a2, 0xffff +;; slli a5, a2, 4 +;; add a4, a4, a5 +;; neg a1, a3 +;; not a3, a1 +;; and a5, a4, a3 +;; lw a0, 0(a5) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat index 30356e2a7d78..1f68e34e19d2 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat @@ -23,15 +23,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x40(a0) -;; ld a5, 0x38(a0) -;; sltu a4, a2, a1 -;; xori a0, a4, 1 -;; add a5, a5, a2 -;; neg a4, a0 -;; not a0, a4 -;; and a1, a5, a0 -;; sb a3, 0(a1) +;; ld a4, 0x40(a0) +;; ld a1, 0x38(a0) +;; sltu a0, a2, a4 +;; xori a4, a0, 1 +;; add a1, a1, a2 +;; neg a0, a4 +;; not a2, a0 +;; and a4, a1, a2 +;; sb a3, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -42,15 +42,15 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x40(a0) -;; ld a5, 0x38(a0) -;; sltu a4, a2, a1 -;; xori a0, a4, 1 -;; add a5, a5, a2 -;; neg a3, a0 -;; not a0, a3 -;; and a1, a5, a0 -;; lbu a0, 0(a1) +;; ld a3, 0x40(a0) +;; ld a1, 0x38(a0) +;; sltu a0, a2, a3 +;; xori a3, a0, 1 +;; add a1, a1, a2 +;; neg a0, a3 +;; not a2, a0 +;; and a3, a1, a2 +;; lbu a0, 0(a3) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat index d764ca198089..918a374b48dd 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -24,15 +24,15 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; ld a4, 0x40(a0) -;; ld a1, 0x38(a0) -;; sltu a0, a4, a2 -;; add a1, a1, a2 -;; lui a2, 1 -;; add a1, a1, a2 -;; neg a4, a0 -;; not a0, a4 -;; and a2, a1, a0 -;; sb a3, 0(a2) +;; ld a5, 0x38(a0) +;; sltu a4, a4, a2 +;; add a2, a5, a2 +;; lui a5, 1 +;; add a2, a2, a5 +;; neg a0, a4 +;; not a4, a0 +;; and a4, a2, a4 +;; sb a3, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -44,15 +44,15 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; ld a3, 0x40(a0) -;; ld a1, 0x38(a0) -;; sltu a0, a3, a2 -;; add a1, a1, a2 -;; lui a2, 1 -;; add a1, a1, a2 -;; neg a4, a0 -;; not a0, a4 -;; and a2, a1, a0 -;; lbu a0, 0(a2) +;; ld a4, 0x38(a0) +;; sltu a3, a3, a2 +;; add a2, a4, a2 +;; lui a4, 1 +;; add a2, a2, a4 +;; neg a0, a3 +;; not a3, a0 +;; and a4, a2, a3 +;; lbu a0, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 74ddd12bb58b..40f25d638755 100644 --- a/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -23,17 +23,17 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x40(a0) -;; ld a4, 0x38(a0) -;; sltu a1, a1, a2 -;; add a2, a4, a2 -;; lui a0, 0xffff -;; slli a4, a0, 4 -;; add a2, a2, a4 -;; neg a5, a1 -;; not a1, a5 -;; and a4, a2, a1 -;; sb a3, 0(a4) +;; ld a4, 0x40(a0) +;; ld a5, 0x38(a0) +;; sltu a4, a4, a2 +;; add a5, a5, a2 +;; lui a2, 0xffff +;; slli a0, a2, 4 +;; add a5, a5, a0 +;; neg a1, a4 +;; not a4, a1 +;; and a5, a5, a4 +;; sb a3, 0(a5) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -44,17 +44,17 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x40(a0) -;; ld a3, 0x38(a0) -;; sltu a1, a1, a2 -;; add a2, a3, a2 -;; lui a0, 0xffff -;; slli a3, a0, 4 -;; add a2, a2, a3 -;; neg a5, a1 -;; not a1, a5 -;; and a3, a2, a1 -;; lbu a0, 0(a3) +;; ld a3, 0x40(a0) +;; ld a4, 0x38(a0) +;; sltu a3, a3, a2 +;; add a4, a4, a2 +;; lui a2, 0xffff +;; slli a5, a2, 4 +;; add a4, a4, a5 +;; neg a1, a3 +;; not a3, a1 +;; and a5, a4, a3 +;; lbu a0, 0(a5) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat index 7831f09a2ce6..e8caa6750b5d 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat @@ -23,16 +23,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a4, a2, 0x20 -;; srli a5, a4, 0x20 +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 ;; lui a2, 0x40000 -;; addi a4, a2, -1 -;; slli a1, a4, 2 -;; bgeu a1, a5, 8 +;; addi a2, a2, -1 +;; slli a2, a2, 2 +;; bgeu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a0, 0x38(a0) -;; add a5, a0, a5 -;; sw a3, 0(a5) +;; ld a2, 0x38(a0) +;; add a1, a2, a1 +;; sw a3, 0(a1) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -43,16 +43,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a3, a2, 0x20 -;; srli a5, a3, 0x20 +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 ;; lui a2, 0x40000 -;; addi a4, a2, -1 -;; slli a1, a4, 2 -;; bgeu a1, a5, 8 +;; addi a2, a2, -1 +;; slli a2, a2, 2 +;; bgeu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a0, 0x38(a0) -;; add a5, a0, a5 -;; lw a0, 0(a5) +;; ld a2, 0x38(a0) +;; add a1, a2, a1 +;; lw a0, 0(a1) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat index d2ca21dd165a..398da6d69d06 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat @@ -23,17 +23,17 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a4, a2, 0x20 -;; srli a5, a4, 0x20 +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 ;; lui a2, 0x40000 -;; addi a4, a2, -0x401 -;; slli a1, a4, 2 -;; bgeu a1, a5, 8 +;; addi a2, a2, -0x401 +;; slli a2, a2, 2 +;; bgeu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a0, 0x38(a0) -;; add a5, a0, a5 +;; ld a2, 0x38(a0) +;; add a1, a2, a1 ;; lui t6, 1 -;; add t6, t6, a5 +;; add t6, t6, a1 ;; sw a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -45,17 +45,17 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a3, a2, 0x20 -;; srli a5, a3, 0x20 +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 ;; lui a2, 0x40000 -;; addi a4, a2, -0x401 -;; slli a1, a4, 2 -;; bgeu a1, a5, 8 +;; addi a2, a2, -0x401 +;; slli a2, a2, 2 +;; bgeu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a0, 0x38(a0) -;; add a5, a0, a5 +;; ld a2, 0x38(a0) +;; add a1, a2, a1 ;; lui t6, 1 -;; add t6, t6, a5 +;; add t6, t6, a1 ;; lw a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat index 3068b62bea8b..863c41c30d79 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -23,18 +23,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a5, a2, 0x20 -;; srli a1, a5, 0x20 -;; lui a5, 0x10 -;; addi a2, a5, -4 -;; bgeu a2, a1, 8 +;; slli a1, a2, 0x20 +;; srli a4, a1, 0x20 +;; lui a1, 0x10 +;; addi a5, a1, -4 +;; bgeu a5, a4, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a2, 0x38(a0) -;; add a1, a2, a1 -;; lui a0, 0xffff -;; slli a2, a0, 4 -;; add a1, a1, a2 -;; sw a3, 0(a1) +;; ld a5, 0x38(a0) +;; add a4, a5, a4 +;; lui a2, 0xffff +;; slli a5, a2, 4 +;; add a4, a4, a5 +;; sw a3, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -45,18 +45,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a5, a2, 0x20 -;; srli a1, a5, 0x20 -;; lui a5, 0x10 -;; addi a2, a5, -4 -;; bgeu a2, a1, 8 +;; slli a1, a2, 0x20 +;; srli a3, a1, 0x20 +;; lui a1, 0x10 +;; addi a4, a1, -4 +;; bgeu a4, a3, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a2, 0x38(a0) -;; add a1, a2, a1 -;; lui a0, 0xffff -;; slli a2, a0, 4 -;; add a1, a1, a2 -;; lw a0, 0(a1) +;; ld a4, 0x38(a0) +;; add a3, a4, a3 +;; lui a2, 0xffff +;; slli a4, a2, 4 +;; add a3, a3, a4 +;; lw a0, 0(a3) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat index 3c18d22a0c12..a15e1c651ad8 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat @@ -23,11 +23,11 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 -;; sb a3, 0(a1) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -38,11 +38,11 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 -;; lbu a0, 0(a1) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat index 1341663d91f7..38e851bc9cd7 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat @@ -24,15 +24,15 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; slli a1, a2, 0x20 -;; srli a4, a1, 0x20 +;; srli a1, a1, 0x20 ;; auipc a2, 0 ;; ld a2, 0x38(a2) -;; bgeu a2, a4, 8 +;; bgeu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a4, a5, a4 +;; ld a0, 0x38(a0) +;; add a0, a0, a1 ;; lui t6, 1 -;; add t6, t6, a4 +;; add t6, t6, a0 ;; sb a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -48,15 +48,15 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; slli a1, a2, 0x20 -;; srli a3, a1, 0x20 +;; srli a1, a1, 0x20 ;; auipc a2, 0 ;; ld a2, 0x38(a2) -;; bgeu a2, a3, 8 +;; bgeu a2, a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a3, a4, a3 +;; ld a0, 0x38(a0) +;; add a0, a0, a1 ;; lui t6, 1 -;; add t6, t6, a3 +;; add t6, t6, a0 ;; lbu a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat index f798bb1eebd1..b5ab03f110b4 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -23,18 +23,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a5, a2, 0x20 -;; srli a1, a5, 0x20 -;; lui a5, 0x10 -;; addi a2, a5, -1 -;; bgeu a2, a1, 8 +;; slli a1, a2, 0x20 +;; srli a4, a1, 0x20 +;; lui a1, 0x10 +;; addi a5, a1, -1 +;; bgeu a5, a4, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a2, 0x38(a0) -;; add a1, a2, a1 -;; lui a0, 0xffff -;; slli a2, a0, 4 -;; add a1, a1, a2 -;; sb a3, 0(a1) +;; ld a5, 0x38(a0) +;; add a4, a5, a4 +;; lui a2, 0xffff +;; slli a5, a2, 4 +;; add a4, a4, a5 +;; sb a3, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -45,18 +45,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a5, a2, 0x20 -;; srli a1, a5, 0x20 -;; lui a5, 0x10 -;; addi a2, a5, -1 -;; bgeu a2, a1, 8 +;; slli a1, a2, 0x20 +;; srli a3, a1, 0x20 +;; lui a1, 0x10 +;; addi a4, a1, -1 +;; bgeu a4, a3, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a2, 0x38(a0) -;; add a1, a2, a1 -;; lui a0, 0xffff -;; slli a2, a0, 4 -;; add a1, a1, a2 -;; lbu a0, 0(a1) +;; ld a4, 0x38(a0) +;; add a3, a4, a3 +;; lui a2, 0xffff +;; slli a4, a2, 4 +;; add a3, a3, a4 +;; lbu a0, 0(a3) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat index 2209b4696247..5f7290f57d02 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat @@ -23,18 +23,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a1, a2, 0x20 -;; srli a4, a1, 0x20 -;; lui a1, 0x40000 -;; addi a2, a1, -1 -;; slli a5, a2, 2 -;; sltu a2, a5, a4 -;; ld a5, 0x38(a0) -;; add a4, a5, a4 -;; neg a0, a2 -;; not a2, a0 -;; and a4, a4, a2 -;; sw a3, 0(a4) +;; slli a4, a2, 0x20 +;; srli a5, a4, 0x20 +;; lui a2, 0x40000 +;; addi a4, a2, -1 +;; slli a1, a4, 2 +;; sltu a4, a1, a5 +;; ld a0, 0x38(a0) +;; add a5, a0, a5 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -45,18 +45,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a1, a2, 0x20 -;; srli a3, a1, 0x20 -;; lui a1, 0x40000 -;; addi a2, a1, -1 -;; slli a4, a2, 2 -;; sltu a2, a4, a3 -;; ld a4, 0x38(a0) -;; add a3, a4, a3 -;; neg a0, a2 -;; not a2, a0 -;; and a4, a3, a2 -;; lw a0, 0(a4) +;; slli a3, a2, 0x20 +;; srli a5, a3, 0x20 +;; lui a2, 0x40000 +;; addi a4, a2, -1 +;; slli a1, a4, 2 +;; sltu a4, a1, a5 +;; ld a0, 0x38(a0) +;; add a5, a0, a5 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat index 76274dc69f1e..2d30a99d535a 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -23,19 +23,19 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a4, a2, 0x20 -;; srli a5, a4, 0x20 -;; lui a2, 0x40000 -;; addi a4, a2, -0x401 -;; slli a1, a4, 2 -;; sltu a4, a1, a5 +;; slli a5, a2, 0x20 +;; srli a1, a5, 0x20 +;; lui a4, 0x40000 +;; addi a2, a4, -0x401 +;; slli a2, a2, 2 +;; sltu a2, a2, a1 ;; ld a0, 0x38(a0) -;; add a5, a0, a5 -;; lui a0, 1 -;; add a5, a5, a0 -;; neg a2, a4 -;; not a4, a2 -;; and a0, a5, a4 +;; add a0, a0, a1 +;; lui a1, 1 +;; add a0, a0, a1 +;; neg a4, a2 +;; not a1, a4 +;; and a0, a0, a1 ;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -47,19 +47,19 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a3, a2, 0x20 -;; srli a5, a3, 0x20 -;; lui a2, 0x40000 -;; addi a4, a2, -0x401 -;; slli a1, a4, 2 -;; sltu a4, a1, a5 +;; slli a5, a2, 0x20 +;; srli a1, a5, 0x20 +;; lui a4, 0x40000 +;; addi a2, a4, -0x401 +;; slli a2, a2, 2 +;; sltu a2, a2, a1 ;; ld a0, 0x38(a0) -;; add a5, a0, a5 -;; lui a0, 1 -;; add a5, a5, a0 -;; neg a2, a4 -;; not a4, a2 -;; and a0, a5, a4 +;; add a0, a0, a1 +;; lui a1, 1 +;; add a0, a0, a1 +;; neg a4, a2 +;; not a1, a4 +;; and a0, a0, a1 ;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat index cd96c556500c..64863a0a6660 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -23,19 +23,21 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a4, a2, 0x20 -;; srli a5, a4, 0x20 -;; lui a4, 0x10 -;; addi a1, a4, -4 -;; sltu a4, a1, a5 -;; ld a0, 0x38(a0) -;; add a5, a0, a5 -;; lui a0, 0xffff -;; slli a0, a0, 4 -;; add a5, a5, a0 -;; neg a2, a4 -;; not a4, a2 -;; and a0, a5, a4 +;; mv a4, a0 +;; slli a5, a2, 0x20 +;; srli a1, a5, 0x20 +;; lui a5, 0x10 +;; addi a0, a5, -4 +;; sltu a0, a0, a1 +;; mv a2, a4 +;; ld a2, 0x38(a2) +;; add a1, a2, a1 +;; lui a5, 0xffff +;; slli a2, a5, 4 +;; add a1, a1, a2 +;; neg a4, a0 +;; not a0, a4 +;; and a0, a1, a0 ;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -47,19 +49,21 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a3, a2, 0x20 -;; srli a5, a3, 0x20 -;; lui a3, 0x10 -;; addi a1, a3, -4 -;; sltu a4, a1, a5 -;; ld a0, 0x38(a0) -;; add a5, a0, a5 -;; lui a3, 0xffff -;; slli a0, a3, 4 -;; add a5, a5, a0 -;; neg a2, a4 -;; not a4, a2 -;; and a0, a5, a4 +;; mv a3, a0 +;; slli a5, a2, 0x20 +;; srli a1, a5, 0x20 +;; lui a5, 0x10 +;; addi a0, a5, -4 +;; sltu a0, a0, a1 +;; mv a2, a3 +;; ld a2, 0x38(a2) +;; add a1, a2, a1 +;; lui a5, 0xffff +;; slli a2, a5, 4 +;; add a1, a1, a2 +;; neg a4, a0 +;; not a0, a4 +;; and a0, a1, a0 ;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat index 0e1b37e23fe3..4ed12a84d7bb 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat @@ -23,11 +23,11 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 -;; sb a3, 0(a1) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -38,11 +38,11 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 -;; lbu a0, 0(a1) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat index 4c97c95af07a..7cf01f6ddb8e 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -23,19 +23,19 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a1, a2, 0x20 -;; srli a4, a1, 0x20 -;; auipc a2, 0 -;; ld a2, 0x40(a2) -;; sltu a2, a2, a4 -;; ld a5, 0x38(a0) -;; add a4, a5, a4 -;; lui a5, 1 -;; add a4, a4, a5 -;; neg a0, a2 -;; not a2, a0 -;; and a4, a4, a2 -;; sb a3, 0(a4) +;; slli a4, a2, 0x20 +;; srli a5, a4, 0x20 +;; auipc a4, 0 +;; ld a4, 0x40(a4) +;; sltu a4, a4, a5 +;; ld a0, 0x38(a0) +;; add a5, a0, a5 +;; lui a0, 1 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -49,19 +49,19 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a1, a2, 0x20 -;; srli a3, a1, 0x20 -;; auipc a2, 0 -;; ld a2, 0x40(a2) -;; sltu a2, a2, a3 -;; ld a4, 0x38(a0) -;; add a3, a4, a3 -;; lui a4, 1 -;; add a3, a3, a4 -;; neg a0, a2 -;; not a2, a0 -;; and a4, a3, a2 -;; lbu a0, 0(a4) +;; slli a3, a2, 0x20 +;; srli a5, a3, 0x20 +;; auipc a4, 0 +;; ld a4, 0x40(a4) +;; sltu a4, a4, a5 +;; ld a0, 0x38(a0) +;; add a5, a0, a5 +;; lui a0, 1 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat index e131a9444130..906d2a950e04 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -23,19 +23,21 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a4, a2, 0x20 -;; srli a5, a4, 0x20 -;; lui a4, 0x10 -;; addi a1, a4, -1 -;; sltu a4, a1, a5 -;; ld a0, 0x38(a0) -;; add a5, a0, a5 -;; lui a0, 0xffff -;; slli a0, a0, 4 -;; add a5, a5, a0 -;; neg a2, a4 -;; not a4, a2 -;; and a0, a5, a4 +;; mv a4, a0 +;; slli a5, a2, 0x20 +;; srli a1, a5, 0x20 +;; lui a5, 0x10 +;; addi a0, a5, -1 +;; sltu a0, a0, a1 +;; mv a2, a4 +;; ld a2, 0x38(a2) +;; add a1, a2, a1 +;; lui a5, 0xffff +;; slli a2, a5, 4 +;; add a1, a1, a2 +;; neg a4, a0 +;; not a0, a4 +;; and a0, a1, a0 ;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -47,19 +49,21 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; slli a3, a2, 0x20 -;; srli a5, a3, 0x20 -;; lui a3, 0x10 -;; addi a1, a3, -1 -;; sltu a4, a1, a5 -;; ld a0, 0x38(a0) -;; add a5, a0, a5 -;; lui a3, 0xffff -;; slli a0, a3, 4 -;; add a5, a5, a0 -;; neg a2, a4 -;; not a4, a2 -;; and a0, a5, a4 +;; mv a3, a0 +;; slli a5, a2, 0x20 +;; srli a1, a5, 0x20 +;; lui a5, 0x10 +;; addi a0, a5, -1 +;; sltu a0, a0, a1 +;; mv a2, a3 +;; ld a2, 0x38(a2) +;; add a1, a2, a1 +;; lui a5, 0xffff +;; slli a2, a5, 4 +;; add a1, a1, a2 +;; neg a4, a0 +;; not a0, a4 +;; and a0, a1, a0 ;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat index 5c9255e13601..973a3af6d295 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat @@ -23,11 +23,11 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 -;; sw a3, 0(a1) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -38,11 +38,11 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 -;; lw a0, 0(a1) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat index 4a90dd2f39e1..15c8340cbafd 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat @@ -23,12 +23,12 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; sw a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -40,12 +40,12 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; lw a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat index 5a7de923a8ef..3e2698d910f9 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -23,14 +23,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x38(a0) -;; slli a5, a2, 0x20 -;; srli a5, a5, 0x20 -;; add a4, a4, a5 -;; lui a5, 0xffff -;; slli a5, a5, 4 -;; add a4, a4, a5 -;; sw a3, 0(a4) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; lui a1, 0xffff +;; slli a1, a1, 4 +;; add a0, a0, a1 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -41,14 +41,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x38(a0) -;; slli a3, a2, 0x20 -;; srli a5, a3, 0x20 -;; add a4, a4, a5 -;; lui a3, 0xffff -;; slli a5, a3, 4 -;; add a4, a4, a5 -;; lw a0, 0(a4) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; lui a1, 0xffff +;; slli a1, a1, 4 +;; add a0, a0, a1 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat index c663d26fa798..b96f03eb82f9 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat @@ -23,11 +23,11 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 -;; sb a3, 0(a1) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -38,11 +38,11 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 -;; lbu a0, 0(a1) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat index cb980a4b3101..5e5f42b6a4b5 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat @@ -23,12 +23,12 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; sb a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -40,12 +40,12 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; lbu a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat index af593789193c..5cc7c66e1acc 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -23,14 +23,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x38(a0) -;; slli a5, a2, 0x20 -;; srli a5, a5, 0x20 -;; add a4, a4, a5 -;; lui a5, 0xffff -;; slli a5, a5, 4 -;; add a4, a4, a5 -;; sb a3, 0(a4) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; lui a1, 0xffff +;; slli a1, a1, 4 +;; add a0, a0, a1 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -41,14 +41,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x38(a0) -;; slli a3, a2, 0x20 -;; srli a5, a3, 0x20 -;; add a4, a4, a5 -;; lui a3, 0xffff -;; slli a5, a3, 4 -;; add a4, a4, a5 -;; lbu a0, 0(a4) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; lui a1, 0xffff +;; slli a1, a1, 4 +;; add a0, a0, a1 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat index d6374d2a874d..24970ba9ebfd 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat @@ -23,11 +23,11 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 -;; sw a3, 0(a1) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -38,11 +38,11 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 -;; lw a0, 0(a1) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat index bb80a7b6bf94..d4df2560a895 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -23,12 +23,12 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; sw a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -40,12 +40,12 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; lw a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat index 9e9dcf602253..b648edd69a59 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -23,14 +23,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x38(a0) -;; slli a5, a2, 0x20 -;; srli a5, a5, 0x20 -;; add a4, a4, a5 -;; lui a5, 0xffff -;; slli a5, a5, 4 -;; add a4, a4, a5 -;; sw a3, 0(a4) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; lui a1, 0xffff +;; slli a1, a1, 4 +;; add a0, a0, a1 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -41,14 +41,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x38(a0) -;; slli a3, a2, 0x20 -;; srli a5, a3, 0x20 -;; add a4, a4, a5 -;; lui a3, 0xffff -;; slli a5, a3, 4 -;; add a4, a4, a5 -;; lw a0, 0(a4) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; lui a1, 0xffff +;; slli a1, a1, 4 +;; add a0, a0, a1 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat index 696091388e3e..0671cdc17a69 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat @@ -23,11 +23,11 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 -;; sb a3, 0(a1) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -38,11 +38,11 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 -;; lbu a0, 0(a1) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat index f83706a5465b..ecd3b838db4f 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -23,12 +23,12 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; sb a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -40,12 +40,12 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a1, 0x38(a0) -;; slli a0, a2, 0x20 -;; srli a2, a0, 0x20 -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; lbu a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 3803cef1b4c6..80b5af74bcc5 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -23,14 +23,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x38(a0) -;; slli a5, a2, 0x20 -;; srli a5, a5, 0x20 -;; add a4, a4, a5 -;; lui a5, 0xffff -;; slli a5, a5, 4 -;; add a4, a4, a5 -;; sb a3, 0(a4) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; lui a1, 0xffff +;; slli a1, a1, 4 +;; add a0, a0, a1 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -41,14 +41,14 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; ld a4, 0x38(a0) -;; slli a3, a2, 0x20 -;; srli a5, a3, 0x20 -;; add a4, a4, a5 -;; lui a3, 0xffff -;; slli a5, a3, 4 -;; add a4, a4, a5 -;; lbu a0, 0(a4) +;; ld a0, 0x38(a0) +;; slli a1, a2, 0x20 +;; srli a1, a1, 0x20 +;; add a0, a0, a1 +;; lui a1, 0xffff +;; slli a1, a1, 4 +;; add a0, a0, a1 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat index 145881985365..6ca4d634a7a2 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat @@ -24,13 +24,13 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; lui a1, 0x40000 -;; addi a4, a1, -1 -;; slli a4, a4, 2 -;; bgeu a4, a2, 8 +;; addi a1, a1, -1 +;; slli a1, a1, 2 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a4, a4, a2 -;; sw a3, 0(a4) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -42,13 +42,13 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; lui a1, 0x40000 -;; addi a3, a1, -1 -;; slli a4, a3, 2 -;; bgeu a4, a2, 8 +;; addi a1, a1, -1 +;; slli a1, a1, 2 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a3, 0x38(a0) -;; add a3, a3, a2 -;; lw a0, 0(a3) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat index 104d3e5f31fd..1142623dc2c4 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat @@ -24,14 +24,14 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; lui a1, 0x40000 -;; addi a4, a1, -0x401 -;; slli a4, a4, 2 -;; bgeu a4, a2, 8 +;; addi a1, a1, -0x401 +;; slli a1, a1, 2 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a4, a4, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a4 +;; add t6, t6, a0 ;; sw a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -44,14 +44,14 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; lui a1, 0x40000 -;; addi a3, a1, -0x401 -;; slli a4, a3, 2 -;; bgeu a4, a2, 8 +;; addi a1, a1, -0x401 +;; slli a1, a1, 2 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a3, 0x38(a0) -;; add a3, a3, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a3 +;; add t6, t6, a0 ;; lw a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat index 47cc66bfd6c7..0cb02d68e2b8 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -23,16 +23,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a4, 0x10 -;; addi a5, a4, -4 -;; bgeu a5, a2, 8 +;; lui a1, 0x10 +;; addi a1, a1, -4 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a5, a5, a2 -;; lui a4, 0xffff -;; slli a0, a4, 4 -;; add a5, a5, a0 -;; sw a3, 0(a5) +;; ld a1, 0x38(a0) +;; add a1, a1, a2 +;; lui a0, 0xffff +;; slli a2, a0, 4 +;; add a1, a1, a2 +;; sw a3, 0(a1) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -43,16 +43,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a3, 0x10 -;; addi a5, a3, -4 -;; bgeu a5, a2, 8 +;; lui a1, 0x10 +;; addi a1, a1, -4 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a5, a5, a2 -;; lui a4, 0xffff -;; slli a0, a4, 4 -;; add a5, a5, a0 -;; lw a0, 0(a5) +;; ld a1, 0x38(a0) +;; add a1, a1, a2 +;; lui a0, 0xffff +;; slli a2, a0, 4 +;; add a1, a1, a2 +;; lw a0, 0(a1) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat index 7da05ca0d9e9..b31486bb1ef2 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat @@ -27,9 +27,9 @@ ;; ld a1, 0x30(a1) ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 -;; sb a3, 0(a1) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -47,9 +47,9 @@ ;; ld a1, 0x30(a1) ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 -;; lbu a0, 0(a1) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat index 25e6ff5a153e..c47cc77f06f6 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat @@ -27,10 +27,10 @@ ;; ld a1, 0x38(a1) ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; sb a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -49,10 +49,10 @@ ;; ld a1, 0x38(a1) ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; lbu a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat index 7f6b1a964709..984f57973490 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -23,16 +23,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a4, 0x10 -;; addi a5, a4, -1 -;; bgeu a5, a2, 8 +;; lui a1, 0x10 +;; addi a1, a1, -1 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a5, a5, a2 -;; lui a4, 0xffff -;; slli a0, a4, 4 -;; add a5, a5, a0 -;; sb a3, 0(a5) +;; ld a1, 0x38(a0) +;; add a1, a1, a2 +;; lui a0, 0xffff +;; slli a2, a0, 4 +;; add a1, a1, a2 +;; sb a3, 0(a1) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -43,16 +43,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a3, 0x10 -;; addi a5, a3, -1 -;; bgeu a5, a2, 8 +;; lui a1, 0x10 +;; addi a1, a1, -1 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a5, a5, a2 -;; lui a4, 0xffff -;; slli a0, a4, 4 -;; add a5, a5, a0 -;; lbu a0, 0(a5) +;; ld a1, 0x38(a0) +;; add a1, a1, a2 +;; lui a0, 0xffff +;; slli a2, a0, 4 +;; add a1, a1, a2 +;; lbu a0, 0(a1) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat index 888857ce0b81..1756dc4fd171 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat @@ -23,16 +23,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a4, 0x40000 -;; addi a1, a4, -1 -;; slli a4, a1, 2 -;; sltu a1, a4, a2 -;; ld a0, 0x38(a0) -;; add a0, a0, a2 -;; neg a4, a1 -;; not a1, a4 -;; and a2, a0, a1 -;; sw a3, 0(a2) +;; lui a1, 0x40000 +;; addi a4, a1, -1 +;; slli a4, a4, 2 +;; sltu a4, a4, a2 +;; ld a5, 0x38(a0) +;; add a2, a5, a2 +;; neg a0, a4 +;; not a4, a0 +;; and a4, a2, a4 +;; sw a3, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -43,16 +43,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a4, 0x40000 -;; addi a1, a4, -1 -;; slli a3, a1, 2 -;; sltu a1, a3, a2 -;; ld a0, 0x38(a0) -;; add a0, a0, a2 -;; neg a4, a1 -;; not a1, a4 -;; and a2, a0, a1 -;; lw a0, 0(a2) +;; lui a1, 0x40000 +;; addi a3, a1, -1 +;; slli a4, a3, 2 +;; sltu a3, a4, a2 +;; ld a4, 0x38(a0) +;; add a2, a4, a2 +;; neg a0, a3 +;; not a3, a0 +;; and a4, a2, a3 +;; lw a0, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat index 4d9d6c604e93..34ed7143a40b 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -23,18 +23,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a1, 0x40000 -;; addi a4, a1, -0x401 -;; slli a4, a4, 2 -;; sltu a4, a4, a2 +;; lui a4, 0x40000 +;; addi a4, a4, -0x401 +;; slli a1, a4, 2 +;; sltu a4, a1, a2 ;; ld a5, 0x38(a0) -;; add a2, a5, a2 -;; lui a5, 1 -;; add a2, a2, a5 -;; neg a0, a4 -;; not a4, a0 -;; and a4, a2, a4 -;; sw a3, 0(a4) +;; add a5, a5, a2 +;; lui a0, 1 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -45,18 +45,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a1, 0x40000 -;; addi a3, a1, -0x401 -;; slli a4, a3, 2 -;; sltu a3, a4, a2 -;; ld a4, 0x38(a0) -;; add a2, a4, a2 -;; lui a4, 1 -;; add a2, a2, a4 -;; neg a0, a3 -;; not a3, a0 -;; and a4, a2, a3 -;; lw a0, 0(a4) +;; lui a3, 0x40000 +;; addi a4, a3, -0x401 +;; slli a1, a4, 2 +;; sltu a4, a1, a2 +;; ld a5, 0x38(a0) +;; add a5, a5, a2 +;; lui a0, 1 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat index 9c8e748162ed..476264333e14 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -23,18 +23,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a1, 0x10 -;; addi a4, a1, -4 -;; sltu a4, a4, a2 +;; lui a4, 0x10 +;; addi a5, a4, -4 +;; sltu a4, a5, a2 ;; ld a5, 0x38(a0) -;; add a2, a5, a2 -;; lui a1, 0xffff -;; slli a5, a1, 4 -;; add a2, a2, a5 -;; neg a0, a4 -;; not a4, a0 -;; and a4, a2, a4 -;; sw a3, 0(a4) +;; add a5, a5, a2 +;; lui a0, 0xffff +;; slli a0, a0, 4 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -45,18 +45,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a1, 0x10 -;; addi a3, a1, -4 -;; sltu a3, a3, a2 -;; ld a4, 0x38(a0) -;; add a2, a4, a2 -;; lui a1, 0xffff -;; slli a4, a1, 4 -;; add a2, a2, a4 -;; neg a0, a3 -;; not a3, a0 -;; and a4, a2, a3 -;; lw a0, 0(a4) +;; lui a3, 0x10 +;; addi a5, a3, -4 +;; sltu a4, a5, a2 +;; ld a5, 0x38(a0) +;; add a5, a5, a2 +;; lui a3, 0xffff +;; slli a0, a3, 4 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat index c4666e4246ba..68c53e17b233 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat @@ -23,20 +23,20 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; auipc a4, 0 -;; ld a4, 0x38(a4) -;; sltu a4, a4, a2 -;; ld a5, 0x38(a0) -;; add a5, a5, a2 -;; neg a2, a4 -;; not a4, a2 -;; and a0, a5, a4 -;; sb a3, 0(a0) +;; mv a4, a0 +;; auipc a0, 0 +;; ld a0, 0x34(a0) +;; sltu a0, a0, a2 +;; ld a1, 0x38(a4) +;; add a1, a1, a2 +;; neg a0, a0 +;; not a0, a0 +;; and a2, a1, a0 +;; sb a3, 0(a2) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 ;; ret -;; .byte 0x00, 0x00, 0x00, 0x00 ;; .byte 0xff, 0xff, 0xff, 0xff ;; .byte 0x00, 0x00, 0x00, 0x00 ;; @@ -45,19 +45,19 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; auipc a4, 0 -;; ld a4, 0x38(a4) -;; sltu a4, a4, a2 -;; ld a5, 0x38(a0) -;; add a5, a5, a2 -;; neg a2, a4 -;; not a4, a2 -;; and a0, a5, a4 -;; lbu a0, 0(a0) +;; mv a3, a0 +;; auipc a0, 0 +;; ld a0, 0x34(a0) +;; sltu a0, a0, a2 +;; ld a1, 0x38(a3) +;; add a1, a1, a2 +;; neg a0, a0 +;; not a0, a0 +;; and a2, a1, a0 +;; lbu a0, 0(a2) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 ;; ret -;; .byte 0x00, 0x00, 0x00, 0x00 ;; .byte 0xff, 0xff, 0xff, 0xff ;; .byte 0x00, 0x00, 0x00, 0x00 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat index 86f6595e6de5..cd3217bb9e14 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -23,17 +23,17 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; auipc a1, 0 -;; ld a1, 0x40(a1) -;; sltu a4, a1, a2 -;; ld a0, 0x38(a0) -;; add a0, a0, a2 -;; lui a1, 1 -;; add a1, a0, a1 -;; neg a4, a4 -;; not a0, a4 -;; and a2, a1, a0 -;; sb a3, 0(a2) +;; auipc a4, 0 +;; ld a4, 0x40(a4) +;; sltu a4, a4, a2 +;; ld a5, 0x38(a0) +;; add a2, a5, a2 +;; lui a5, 1 +;; add a2, a2, a5 +;; neg a0, a4 +;; not a4, a0 +;; and a4, a2, a4 +;; sb a3, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -47,17 +47,17 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; auipc a1, 0 -;; ld a1, 0x40(a1) -;; sltu a3, a1, a2 -;; ld a0, 0x38(a0) -;; add a0, a0, a2 -;; lui a1, 1 -;; add a1, a0, a1 -;; neg a4, a3 -;; not a0, a4 -;; and a2, a1, a0 -;; lbu a0, 0(a2) +;; auipc a3, 0 +;; ld a3, 0x40(a3) +;; sltu a3, a3, a2 +;; ld a4, 0x38(a0) +;; add a2, a4, a2 +;; lui a4, 1 +;; add a2, a2, a4 +;; neg a0, a3 +;; not a3, a0 +;; and a4, a2, a3 +;; lbu a0, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 4c3601b1a9de..9cce641a6ec5 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -23,18 +23,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a1, 0x10 -;; addi a4, a1, -1 -;; sltu a4, a4, a2 +;; lui a4, 0x10 +;; addi a5, a4, -1 +;; sltu a4, a5, a2 ;; ld a5, 0x38(a0) -;; add a2, a5, a2 -;; lui a1, 0xffff -;; slli a5, a1, 4 -;; add a2, a2, a5 -;; neg a0, a4 -;; not a4, a0 -;; and a4, a2, a4 -;; sb a3, 0(a4) +;; add a5, a5, a2 +;; lui a0, 0xffff +;; slli a0, a0, 4 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -45,18 +45,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a1, 0x10 -;; addi a3, a1, -1 -;; sltu a3, a3, a2 -;; ld a4, 0x38(a0) -;; add a2, a4, a2 -;; lui a1, 0xffff -;; slli a4, a1, 4 -;; add a2, a2, a4 -;; neg a0, a3 -;; not a3, a0 -;; and a4, a2, a3 -;; lbu a0, 0(a4) +;; lui a3, 0x10 +;; addi a5, a3, -1 +;; sltu a4, a5, a2 +;; ld a5, 0x38(a0) +;; add a5, a5, a2 +;; lui a3, 0xffff +;; slli a0, a3, 4 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat index 656e4d77e448..099e47a60aa6 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat @@ -24,13 +24,13 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; lui a1, 0x40000 -;; addi a4, a1, -1 -;; slli a4, a4, 2 -;; bgeu a4, a2, 8 +;; addi a1, a1, -1 +;; slli a1, a1, 2 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a4, a4, a2 -;; sw a3, 0(a4) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -42,13 +42,13 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; lui a1, 0x40000 -;; addi a3, a1, -1 -;; slli a4, a3, 2 -;; bgeu a4, a2, 8 +;; addi a1, a1, -1 +;; slli a1, a1, 2 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a3, 0x38(a0) -;; add a3, a3, a2 -;; lw a0, 0(a3) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat index 05c0f301309a..8a7e47dc0dfc 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat @@ -24,14 +24,14 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; lui a1, 0x40000 -;; addi a4, a1, -0x401 -;; slli a4, a4, 2 -;; bgeu a4, a2, 8 +;; addi a1, a1, -0x401 +;; slli a1, a1, 2 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a4, 0x38(a0) -;; add a4, a4, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a4 +;; add t6, t6, a0 ;; sw a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -44,14 +44,14 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; lui a1, 0x40000 -;; addi a3, a1, -0x401 -;; slli a4, a3, 2 -;; bgeu a4, a2, 8 +;; addi a1, a1, -0x401 +;; slli a1, a1, 2 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a3, 0x38(a0) -;; add a3, a3, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a3 +;; add t6, t6, a0 ;; lw a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat index e33a9cf5afce..6284065eb6c6 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -23,16 +23,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a4, 0x10 -;; addi a5, a4, -4 -;; bgeu a5, a2, 8 +;; lui a1, 0x10 +;; addi a1, a1, -4 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a5, a5, a2 -;; lui a4, 0xffff -;; slli a0, a4, 4 -;; add a5, a5, a0 -;; sw a3, 0(a5) +;; ld a1, 0x38(a0) +;; add a1, a1, a2 +;; lui a0, 0xffff +;; slli a2, a0, 4 +;; add a1, a1, a2 +;; sw a3, 0(a1) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -43,16 +43,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a3, 0x10 -;; addi a5, a3, -4 -;; bgeu a5, a2, 8 +;; lui a1, 0x10 +;; addi a1, a1, -4 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a5, a5, a2 -;; lui a4, 0xffff -;; slli a0, a4, 4 -;; add a5, a5, a0 -;; lw a0, 0(a5) +;; ld a1, 0x38(a0) +;; add a1, a1, a2 +;; lui a0, 0xffff +;; slli a2, a0, 4 +;; add a1, a1, a2 +;; lw a0, 0(a1) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat index 23fa3a84e8a5..a103da674c5c 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat @@ -27,9 +27,9 @@ ;; ld a1, 0x30(a1) ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 -;; sb a3, 0(a1) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -47,9 +47,9 @@ ;; ld a1, 0x30(a1) ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 -;; lbu a0, 0(a1) +;; ld a0, 0x38(a0) +;; add a0, a0, a2 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat index 2711567cd1fd..5f63f7cee4c4 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat @@ -27,10 +27,10 @@ ;; ld a1, 0x38(a1) ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; sb a3, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) @@ -49,10 +49,10 @@ ;; ld a1, 0x38(a1) ;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a1, 0x38(a0) -;; add a1, a1, a2 +;; ld a0, 0x38(a0) +;; add a0, a0, a2 ;; lui t6, 1 -;; add t6, t6, a1 +;; add t6, t6, a0 ;; lbu a0, 0(t6) ;; ld ra, 8(sp) ;; ld s0, 0(sp) diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat index a79a01ca0f13..06453f0efd46 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -23,16 +23,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a4, 0x10 -;; addi a5, a4, -1 -;; bgeu a5, a2, 8 +;; lui a1, 0x10 +;; addi a1, a1, -1 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a5, a5, a2 -;; lui a4, 0xffff -;; slli a0, a4, 4 -;; add a5, a5, a0 -;; sb a3, 0(a5) +;; ld a1, 0x38(a0) +;; add a1, a1, a2 +;; lui a0, 0xffff +;; slli a2, a0, 4 +;; add a1, a1, a2 +;; sb a3, 0(a1) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -43,16 +43,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a3, 0x10 -;; addi a5, a3, -1 -;; bgeu a5, a2, 8 +;; lui a1, 0x10 +;; addi a1, a1, -1 +;; bgeu a1, a2, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 -;; ld a5, 0x38(a0) -;; add a5, a5, a2 -;; lui a4, 0xffff -;; slli a0, a4, 4 -;; add a5, a5, a0 -;; lbu a0, 0(a5) +;; ld a1, 0x38(a0) +;; add a1, a1, a2 +;; lui a0, 0xffff +;; slli a2, a0, 4 +;; add a1, a1, a2 +;; lbu a0, 0(a1) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat index dbdf5a298078..b1162f668a73 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat @@ -23,16 +23,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a4, 0x40000 -;; addi a1, a4, -1 -;; slli a4, a1, 2 -;; sltu a1, a4, a2 -;; ld a0, 0x38(a0) -;; add a0, a0, a2 -;; neg a4, a1 -;; not a1, a4 -;; and a2, a0, a1 -;; sw a3, 0(a2) +;; lui a1, 0x40000 +;; addi a4, a1, -1 +;; slli a4, a4, 2 +;; sltu a4, a4, a2 +;; ld a5, 0x38(a0) +;; add a2, a5, a2 +;; neg a0, a4 +;; not a4, a0 +;; and a4, a2, a4 +;; sw a3, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -43,16 +43,16 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a4, 0x40000 -;; addi a1, a4, -1 -;; slli a3, a1, 2 -;; sltu a1, a3, a2 -;; ld a0, 0x38(a0) -;; add a0, a0, a2 -;; neg a4, a1 -;; not a1, a4 -;; and a2, a0, a1 -;; lw a0, 0(a2) +;; lui a1, 0x40000 +;; addi a3, a1, -1 +;; slli a4, a3, 2 +;; sltu a3, a4, a2 +;; ld a4, 0x38(a0) +;; add a2, a4, a2 +;; neg a0, a3 +;; not a3, a0 +;; and a4, a2, a3 +;; lw a0, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat index 2d8283581d93..f13e83f38f86 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -23,18 +23,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a1, 0x40000 -;; addi a4, a1, -0x401 -;; slli a4, a4, 2 -;; sltu a4, a4, a2 +;; lui a4, 0x40000 +;; addi a4, a4, -0x401 +;; slli a1, a4, 2 +;; sltu a4, a1, a2 ;; ld a5, 0x38(a0) -;; add a2, a5, a2 -;; lui a5, 1 -;; add a2, a2, a5 -;; neg a0, a4 -;; not a4, a0 -;; and a4, a2, a4 -;; sw a3, 0(a4) +;; add a5, a5, a2 +;; lui a0, 1 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -45,18 +45,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a1, 0x40000 -;; addi a3, a1, -0x401 -;; slli a4, a3, 2 -;; sltu a3, a4, a2 -;; ld a4, 0x38(a0) -;; add a2, a4, a2 -;; lui a4, 1 -;; add a2, a2, a4 -;; neg a0, a3 -;; not a3, a0 -;; and a4, a2, a3 -;; lw a0, 0(a4) +;; lui a3, 0x40000 +;; addi a4, a3, -0x401 +;; slli a1, a4, 2 +;; sltu a4, a1, a2 +;; ld a5, 0x38(a0) +;; add a5, a5, a2 +;; lui a0, 1 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat index a8400a6b5d1a..e15f35d7289d 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -23,18 +23,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a1, 0x10 -;; addi a4, a1, -4 -;; sltu a4, a4, a2 +;; lui a4, 0x10 +;; addi a5, a4, -4 +;; sltu a4, a5, a2 ;; ld a5, 0x38(a0) -;; add a2, a5, a2 -;; lui a1, 0xffff -;; slli a5, a1, 4 -;; add a2, a2, a5 -;; neg a0, a4 -;; not a4, a0 -;; and a4, a2, a4 -;; sw a3, 0(a4) +;; add a5, a5, a2 +;; lui a0, 0xffff +;; slli a0, a0, 4 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; sw a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -45,18 +45,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a1, 0x10 -;; addi a3, a1, -4 -;; sltu a3, a3, a2 -;; ld a4, 0x38(a0) -;; add a2, a4, a2 -;; lui a1, 0xffff -;; slli a4, a1, 4 -;; add a2, a2, a4 -;; neg a0, a3 -;; not a3, a0 -;; and a4, a2, a3 -;; lw a0, 0(a4) +;; lui a3, 0x10 +;; addi a5, a3, -4 +;; sltu a4, a5, a2 +;; ld a5, 0x38(a0) +;; add a5, a5, a2 +;; lui a3, 0xffff +;; slli a0, a3, 4 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; lw a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat index 40751faa3048..752beb8d24c2 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat @@ -23,20 +23,20 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; auipc a4, 0 -;; ld a4, 0x38(a4) -;; sltu a4, a4, a2 -;; ld a5, 0x38(a0) -;; add a5, a5, a2 -;; neg a2, a4 -;; not a4, a2 -;; and a0, a5, a4 -;; sb a3, 0(a0) +;; mv a4, a0 +;; auipc a0, 0 +;; ld a0, 0x34(a0) +;; sltu a0, a0, a2 +;; ld a1, 0x38(a4) +;; add a1, a1, a2 +;; neg a0, a0 +;; not a0, a0 +;; and a2, a1, a0 +;; sb a3, 0(a2) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 ;; ret -;; .byte 0x00, 0x00, 0x00, 0x00 ;; .byte 0xff, 0xff, 0xff, 0xff ;; .byte 0x00, 0x00, 0x00, 0x00 ;; @@ -45,19 +45,19 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; auipc a4, 0 -;; ld a4, 0x38(a4) -;; sltu a4, a4, a2 -;; ld a5, 0x38(a0) -;; add a5, a5, a2 -;; neg a2, a4 -;; not a4, a2 -;; and a0, a5, a4 -;; lbu a0, 0(a0) +;; mv a3, a0 +;; auipc a0, 0 +;; ld a0, 0x34(a0) +;; sltu a0, a0, a2 +;; ld a1, 0x38(a3) +;; add a1, a1, a2 +;; neg a0, a0 +;; not a0, a0 +;; and a2, a1, a0 +;; lbu a0, 0(a2) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 ;; ret -;; .byte 0x00, 0x00, 0x00, 0x00 ;; .byte 0xff, 0xff, 0xff, 0xff ;; .byte 0x00, 0x00, 0x00, 0x00 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat index 6a86dd32be4f..478c81d08358 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -23,17 +23,17 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; auipc a1, 0 -;; ld a1, 0x40(a1) -;; sltu a4, a1, a2 -;; ld a0, 0x38(a0) -;; add a0, a0, a2 -;; lui a1, 1 -;; add a1, a0, a1 -;; neg a4, a4 -;; not a0, a4 -;; and a2, a1, a0 -;; sb a3, 0(a2) +;; auipc a4, 0 +;; ld a4, 0x40(a4) +;; sltu a4, a4, a2 +;; ld a5, 0x38(a0) +;; add a2, a5, a2 +;; lui a5, 1 +;; add a2, a2, a5 +;; neg a0, a4 +;; not a4, a0 +;; and a4, a2, a4 +;; sb a3, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -47,17 +47,17 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; auipc a1, 0 -;; ld a1, 0x40(a1) -;; sltu a3, a1, a2 -;; ld a0, 0x38(a0) -;; add a0, a0, a2 -;; lui a1, 1 -;; add a1, a0, a1 -;; neg a4, a3 -;; not a0, a4 -;; and a2, a1, a0 -;; lbu a0, 0(a2) +;; auipc a3, 0 +;; ld a3, 0x40(a3) +;; sltu a3, a3, a2 +;; ld a4, 0x38(a0) +;; add a2, a4, a2 +;; lui a4, 1 +;; add a2, a2, a4 +;; neg a0, a3 +;; not a3, a0 +;; and a4, a2, a3 +;; lbu a0, 0(a4) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 00066f74c941..504198f4c47e 100644 --- a/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/riscv64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -23,18 +23,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a1, 0x10 -;; addi a4, a1, -1 -;; sltu a4, a4, a2 +;; lui a4, 0x10 +;; addi a5, a4, -1 +;; sltu a4, a5, a2 ;; ld a5, 0x38(a0) -;; add a2, a5, a2 -;; lui a1, 0xffff -;; slli a5, a1, 4 -;; add a2, a2, a5 -;; neg a0, a4 -;; not a4, a0 -;; and a4, a2, a4 -;; sb a3, 0(a4) +;; add a5, a5, a2 +;; lui a0, 0xffff +;; slli a0, a0, 4 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; sb a3, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -45,18 +45,18 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; lui a1, 0x10 -;; addi a3, a1, -1 -;; sltu a3, a3, a2 -;; ld a4, 0x38(a0) -;; add a2, a4, a2 -;; lui a1, 0xffff -;; slli a4, a1, 4 -;; add a2, a2, a4 -;; neg a0, a3 -;; not a3, a0 -;; and a4, a2, a3 -;; lbu a0, 0(a4) +;; lui a3, 0x10 +;; addi a5, a3, -1 +;; sltu a4, a5, a2 +;; ld a5, 0x38(a0) +;; add a5, a5, a2 +;; lui a3, 0xffff +;; slli a0, a3, 4 +;; add a5, a5, a0 +;; neg a2, a4 +;; not a4, a2 +;; and a0, a5, a4 +;; lbu a0, 0(a0) ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat index 7d2f692e8a7e..ec64806c0660 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat @@ -27,14 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r6, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r6 -;; aghi %r4, -4 -;; clgr %r3, %r4 -;; jgh 0x40 -;; lg %r6, 0x38(%r2) -;; strv %r5, 0(%r3, %r6) +;; lg %r3, 0x40(%r2) +;; llgfr %r7, %r4 +;; aghi %r3, -4 +;; clgr %r7, %r3 +;; jgh 0x3c +;; lg %r2, 0x38(%r2) +;; strv %r5, 0(%r7, %r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,13 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r5, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r5 -;; aghi %r4, -4 -;; clgr %r3, %r4 -;; jgh 0x98 -;; lg %r5, 0x38(%r2) -;; lrv %r2, 0(%r3, %r5) +;; lg %r3, 0x40(%r2) +;; llgfr %r7, %r4 +;; aghi %r3, -4 +;; clgr %r7, %r3 +;; jgh 0x90 +;; lg %r2, 0x38(%r2) +;; lrv %r2, 0(%r7, %r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat index 126d25aa01af..bef7bedb2129 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat @@ -27,15 +27,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r6, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r6 -;; aghi %r4, -0x1004 -;; clgr %r3, %r4 -;; jgh 0x40 -;; ag %r3, 0x38(%r2) -;; lghi %r6, 0x1000 -;; strv %r5, 0(%r6, %r3) +;; lg %r3, 0x40(%r2) +;; llgfr %r6, %r4 +;; aghi %r3, -0x1004 +;; clgr %r6, %r3 +;; jgh 0x3c +;; ag %r6, 0x38(%r2) +;; lghi %r2, 0x1000 +;; strv %r5, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -48,14 +47,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r6, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r6 -;; aghi %r4, -0x1004 -;; clgr %r3, %r4 -;; jgh 0x9c -;; ag %r3, 0x38(%r2) -;; lghi %r6, 0x1000 -;; lrv %r2, 0(%r6, %r3) +;; lg %r3, 0x40(%r2) +;; llgfr %r6, %r4 +;; aghi %r3, -0x1004 +;; clgr %r6, %r3 +;; jgh 0x94 +;; ag %r6, 0x38(%r2) +;; lghi %r2, 0x1000 +;; lrv %r2, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat index 029395d23f9c..1c6cae09a5c4 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -27,17 +27,16 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; lgr %r3, %r4 -;; llilf %r4, 0xffff0004 -;; algfr %r4, %r3 -;; jgnle 0x3c -;; lg %r6, 0x40(%r2) -;; clgr %r4, %r6 -;; jgh 0x4c -;; ag %r7, 0x38(%r2) +;; llgfr %r6, %r4 +;; llilf %r3, 0xffff0004 +;; algfr %r3, %r4 +;; jgnle 0x38 +;; lg %r4, 0x40(%r2) +;; clgr %r3, %r4 +;; jgh 0x48 +;; ag %r6, 0x38(%r2) ;; llilh %r2, 0xffff -;; strv %r5, 0(%r2, %r7) +;; strv %r5, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -50,16 +49,15 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; lgr %r3, %r4 -;; llilf %r4, 0xffff0004 -;; algfr %r4, %r3 -;; jgnle 0xa4 -;; lg %r6, 0x40(%r2) -;; clgr %r4, %r6 -;; jgh 0xb4 -;; ag %r7, 0x38(%r2) +;; llgfr %r5, %r4 +;; llilf %r3, 0xffff0004 +;; algfr %r3, %r4 +;; jgnle 0x9c +;; lg %r4, 0x40(%r2) +;; clgr %r3, %r4 +;; jgh 0xac +;; ag %r5, 0x38(%r2) ;; llilh %r2, 0xffff -;; lrv %r2, 0(%r2, %r7) +;; lrv %r2, 0(%r2, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat index 08707d119215..ae3d14588385 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat @@ -27,13 +27,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r6, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r6 -;; clgr %r3, %r4 -;; jghe 0x3c -;; lg %r4, 0x38(%r2) -;; stc %r5, 0(%r3, %r4) +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; clgr %r6, %r7 +;; jghe 0x38 +;; lg %r2, 0x38(%r2) +;; stc %r5, 0(%r6, %r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -46,12 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r5, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r5 -;; clgr %r3, %r4 -;; jghe 0x90 -;; lg %r4, 0x38(%r2) -;; llc %r2, 0(%r3, %r4) +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; clgr %r6, %r7 +;; jghe 0x88 +;; lg %r2, 0x38(%r2) +;; llc %r2, 0(%r6, %r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat index 6995dbe147ea..c28dce417ead 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat @@ -27,15 +27,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r6, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r6 -;; aghi %r4, -0x1001 -;; clgr %r3, %r4 -;; jgh 0x40 -;; ag %r3, 0x38(%r2) -;; lghi %r6, 0x1000 -;; stc %r5, 0(%r6, %r3) +;; lg %r3, 0x40(%r2) +;; llgfr %r6, %r4 +;; aghi %r3, -0x1001 +;; clgr %r6, %r3 +;; jgh 0x3c +;; ag %r6, 0x38(%r2) +;; lghi %r2, 0x1000 +;; stc %r5, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -48,14 +47,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r6, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r6 -;; aghi %r4, -0x1001 -;; clgr %r3, %r4 -;; jgh 0x9c -;; ag %r3, 0x38(%r2) -;; lghi %r6, 0x1000 -;; llc %r2, 0(%r6, %r3) +;; lg %r3, 0x40(%r2) +;; llgfr %r6, %r4 +;; aghi %r3, -0x1001 +;; clgr %r6, %r3 +;; jgh 0x94 +;; ag %r6, 0x38(%r2) +;; lghi %r2, 0x1000 +;; llc %r2, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat index fcfdd59715ff..018b4d914ec7 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -27,17 +27,16 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; lgr %r3, %r4 -;; llilf %r4, 0xffff0001 -;; algfr %r4, %r3 -;; jgnle 0x3c -;; lg %r6, 0x40(%r2) -;; clgr %r4, %r6 -;; jgh 0x4c -;; ag %r7, 0x38(%r2) +;; llgfr %r6, %r4 +;; llilf %r3, 0xffff0001 +;; algfr %r3, %r4 +;; jgnle 0x38 +;; lg %r4, 0x40(%r2) +;; clgr %r3, %r4 +;; jgh 0x48 +;; ag %r6, 0x38(%r2) ;; llilh %r2, 0xffff -;; stc %r5, 0(%r2, %r7) +;; stc %r5, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -50,16 +49,15 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; lgr %r3, %r4 -;; llilf %r4, 0xffff0001 -;; algfr %r4, %r3 -;; jgnle 0xa4 -;; lg %r6, 0x40(%r2) -;; clgr %r4, %r6 -;; jgh 0xb4 -;; ag %r7, 0x38(%r2) +;; llgfr %r5, %r4 +;; llilf %r3, 0xffff0001 +;; algfr %r3, %r4 +;; jgnle 0x9c +;; lg %r4, 0x40(%r2) +;; clgr %r3, %r4 +;; jgh 0xac +;; ag %r5, 0x38(%r2) ;; llilh %r2, 0xffff -;; llc %r2, 0(%r2, %r7) +;; llc %r2, 0(%r2, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat index 9b7464e25e7d..a3dc3ac91d25 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat @@ -27,15 +27,16 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) -;; llgfr %r6, %r4 -;; aghik %r3, %r7, -4 -;; lghi %r7, 0 -;; lgr %r4, %r6 -;; ag %r4, 0x38(%r2) -;; clgr %r6, %r3 -;; locgrh %r4, %r7 -;; strv %r5, 0(%r4) +;; lgr %r3, %r4 +;; lg %r4, 0x40(%r2) +;; llgfr %r3, %r3 +;; aghik %r6, %r4, -4 +;; lghi %r4, 0 +;; lgr %r7, %r3 +;; ag %r7, 0x38(%r2) +;; clgr %r3, %r6 +;; locgrh %r7, %r4 +;; strv %r5, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -48,14 +49,15 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) -;; llgfr %r5, %r4 -;; aghik %r7, %r6, -4 -;; lghi %r6, 0 -;; lgr %r4, %r5 -;; ag %r4, 0x38(%r2) -;; clgr %r5, %r7 -;; locgrh %r4, %r6 -;; lrv %r2, 0(%r4) +;; lgr %r3, %r4 +;; lg %r4, 0x40(%r2) +;; llgfr %r3, %r3 +;; aghik %r5, %r4, -4 +;; lghi %r4, 0 +;; lgr %r6, %r3 +;; ag %r6, 0x38(%r2) +;; clgr %r3, %r5 +;; locgrh %r6, %r4 +;; lrv %r2, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat index 091ba214b8fd..3399c24f20e3 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -27,15 +27,16 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) -;; llgfr %r6, %r4 -;; aghik %r3, %r7, -0x1004 -;; lghi %r7, 0 -;; lgr %r4, %r6 -;; ag %r4, 0x38(%r2) -;; aghik %r2, %r4, 0x1000 -;; clgr %r6, %r3 -;; locgrh %r2, %r7 +;; lgr %r3, %r4 +;; lg %r4, 0x40(%r2) +;; llgfr %r3, %r3 +;; aghik %r6, %r4, -0x1004 +;; lghi %r4, 0 +;; lgr %r7, %r3 +;; ag %r7, 0x38(%r2) +;; aghik %r2, %r7, 0x1000 +;; clgr %r3, %r6 +;; locgrh %r2, %r4 ;; strv %r5, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 @@ -49,15 +50,16 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) -;; llgfr %r6, %r4 -;; aghik %r3, %r7, -0x1004 -;; lghi %r7, 0 -;; lgr %r4, %r6 -;; ag %r4, 0x38(%r2) -;; aghik %r5, %r4, 0x1000 -;; clgr %r6, %r3 -;; locgrh %r5, %r7 -;; lrv %r2, 0(%r5) +;; lgr %r3, %r4 +;; lg %r4, 0x40(%r2) +;; llgfr %r3, %r3 +;; aghik %r5, %r4, -0x1004 +;; lghi %r4, 0 +;; lgr %r6, %r3 +;; ag %r6, 0x38(%r2) +;; aghik %r2, %r6, 0x1000 +;; clgr %r3, %r5 +;; locgrh %r2, %r4 +;; lrv %r2, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat index ea8fd5288c25..8bb5b6f50c94 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -27,18 +27,20 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r3, %r4 -;; llilf %r7, 0xffff0004 -;; algfr %r7, %r4 -;; jgnle 0x38 -;; lg %r6, 0x40(%r2) -;; lghi %r4, 0 -;; ag %r3, 0x38(%r2) -;; llilh %r2, 0xffff -;; agrk %r2, %r3, %r2 -;; clgr %r7, %r6 -;; locgrh %r2, %r4 -;; strv %r5, 0(%r2) +;; lgr %r3, %r2 +;; llgfr %r7, %r4 +;; llilf %r2, 0xffff0004 +;; algfr %r2, %r4 +;; jgnle 0x3c +;; lgr %r6, %r3 +;; lg %r4, 0x40(%r6) +;; lghi %r3, 0 +;; ag %r7, 0x38(%r6) +;; llilh %r6, 0xffff +;; agrk %r6, %r7, %r6 +;; clgr %r2, %r4 +;; locgrh %r6, %r3 +;; strv %r5, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -51,17 +53,19 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r3, %r4 -;; llilf %r7, 0xffff0004 -;; algfr %r7, %r4 -;; jgnle 0xa4 -;; lg %r5, 0x40(%r2) -;; lghi %r4, 0 -;; ag %r3, 0x38(%r2) -;; llilh %r6, 0xffff -;; agrk %r2, %r3, %r6 -;; clgr %r7, %r5 -;; locgrh %r2, %r4 -;; lrv %r2, 0(%r2) +;; lgr %r3, %r2 +;; llgfr %r7, %r4 +;; llilf %r2, 0xffff0004 +;; algfr %r2, %r4 +;; jgnle 0xb0 +;; lgr %r5, %r3 +;; lg %r4, 0x40(%r5) +;; lghi %r3, 0 +;; ag %r7, 0x38(%r5) +;; llilh %r5, 0xffff +;; agrk %r5, %r7, %r5 +;; clgr %r2, %r4 +;; locgrh %r5, %r3 +;; lrv %r2, 0(%r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat index 489224398cad..947ccb123ea4 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat @@ -23,19 +23,19 @@ ;; lg %r1, 0x18(%r1) ;; la %r1, 0xa0(%r1) ;; clgrtle %r15, %r1 -;; stmg %r9, %r15, 0x48(%r15) +;; stmg %r14, %r15, 0x70(%r15) ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r9, 0x40(%r2) -;; llgfr %r7, %r4 -;; lghi %r6, 0 -;; lgr %r3, %r7 -;; ag %r3, 0x38(%r2) -;; clgr %r7, %r9 -;; locgrhe %r3, %r6 -;; stc %r5, 0(%r3) -;; lmg %r9, %r15, 0xe8(%r15) +;; lg %r3, 0x40(%r2) +;; llgfr %r6, %r4 +;; lghi %r4, 0 +;; lgr %r7, %r6 +;; ag %r7, 0x38(%r2) +;; clgr %r6, %r3 +;; locgrhe %r7, %r4 +;; stc %r5, 0(%r7) +;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; ;; wasm[0]::function[1]: @@ -47,13 +47,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) -;; llgfr %r6, %r4 -;; lghi %r5, 0 -;; lgr %r3, %r6 -;; ag %r3, 0x38(%r2) -;; clgr %r6, %r7 -;; locgrhe %r3, %r5 -;; llc %r2, 0(%r3) +;; lg %r3, 0x40(%r2) +;; llgfr %r5, %r4 +;; lghi %r4, 0 +;; lgr %r7, %r5 +;; ag %r7, 0x38(%r2) +;; clgr %r5, %r3 +;; locgrhe %r7, %r4 +;; llc %r2, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat index 0092ff94edae..9c3a9f6d2a68 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -27,15 +27,16 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) -;; llgfr %r6, %r4 -;; aghik %r3, %r7, -0x1001 -;; lghi %r7, 0 -;; lgr %r4, %r6 -;; ag %r4, 0x38(%r2) -;; aghik %r2, %r4, 0x1000 -;; clgr %r6, %r3 -;; locgrh %r2, %r7 +;; lgr %r3, %r4 +;; lg %r4, 0x40(%r2) +;; llgfr %r3, %r3 +;; aghik %r6, %r4, -0x1001 +;; lghi %r4, 0 +;; lgr %r7, %r3 +;; ag %r7, 0x38(%r2) +;; aghik %r2, %r7, 0x1000 +;; clgr %r3, %r6 +;; locgrh %r2, %r4 ;; stc %r5, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 @@ -49,15 +50,16 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) -;; llgfr %r6, %r4 -;; aghik %r3, %r7, -0x1001 -;; lghi %r7, 0 -;; lgr %r4, %r6 -;; ag %r4, 0x38(%r2) -;; aghik %r5, %r4, 0x1000 -;; clgr %r6, %r3 -;; locgrh %r5, %r7 -;; llc %r2, 0(%r5) +;; lgr %r3, %r4 +;; lg %r4, 0x40(%r2) +;; llgfr %r3, %r3 +;; aghik %r5, %r4, -0x1001 +;; lghi %r4, 0 +;; lgr %r6, %r3 +;; ag %r6, 0x38(%r2) +;; aghik %r2, %r6, 0x1000 +;; clgr %r3, %r5 +;; locgrh %r2, %r4 +;; llc %r2, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 44e214cd501c..4097a5e53e65 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -27,18 +27,20 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r3, %r4 -;; llilf %r7, 0xffff0001 -;; algfr %r7, %r4 -;; jgnle 0x38 -;; lg %r6, 0x40(%r2) -;; lghi %r4, 0 -;; ag %r3, 0x38(%r2) -;; llilh %r2, 0xffff -;; agrk %r2, %r3, %r2 -;; clgr %r7, %r6 -;; locgrh %r2, %r4 -;; stc %r5, 0(%r2) +;; lgr %r3, %r2 +;; llgfr %r7, %r4 +;; llilf %r2, 0xffff0001 +;; algfr %r2, %r4 +;; jgnle 0x3c +;; lgr %r6, %r3 +;; lg %r4, 0x40(%r6) +;; lghi %r3, 0 +;; ag %r7, 0x38(%r6) +;; llilh %r6, 0xffff +;; agrk %r6, %r7, %r6 +;; clgr %r2, %r4 +;; locgrh %r6, %r3 +;; stc %r5, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -51,17 +53,19 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r3, %r4 -;; llilf %r7, 0xffff0001 -;; algfr %r7, %r4 -;; jgnle 0xa0 -;; lg %r5, 0x40(%r2) -;; lghi %r4, 0 -;; ag %r3, 0x38(%r2) -;; llilh %r6, 0xffff -;; agrk %r2, %r3, %r6 -;; clgr %r7, %r5 -;; locgrh %r2, %r4 -;; llc %r2, 0(%r2) +;; lgr %r3, %r2 +;; llgfr %r7, %r4 +;; llilf %r2, 0xffff0001 +;; algfr %r2, %r4 +;; jgnle 0xac +;; lgr %r5, %r3 +;; lg %r4, 0x40(%r5) +;; lghi %r3, 0 +;; ag %r7, 0x38(%r5) +;; llilh %r5, 0xffff +;; agrk %r5, %r7, %r5 +;; clgr %r2, %r4 +;; locgrh %r5, %r3 +;; llc %r2, 0(%r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat index 486c049e29c1..bb5bbaa59520 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat @@ -27,13 +27,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r6, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r6 -;; clgr %r3, %r4 -;; jgh 0x3c -;; lg %r4, 0x38(%r2) -;; strv %r5, 0(%r3, %r4) +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; clgr %r6, %r7 +;; jgh 0x38 +;; lg %r2, 0x38(%r2) +;; strv %r5, 0(%r6, %r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -46,12 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r5, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r5 -;; clgr %r3, %r4 -;; jgh 0x90 -;; lg %r4, 0x38(%r2) -;; lrv %r2, 0(%r3, %r4) +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; clgr %r6, %r7 +;; jgh 0x88 +;; lg %r2, 0x38(%r2) +;; lrv %r2, 0(%r6, %r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat index 28cae9ae49c0..159381d67c72 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat @@ -27,14 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r6, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r6 -;; clgr %r3, %r4 -;; jgh 0x3c -;; ag %r3, 0x38(%r2) -;; lghi %r6, 0x1000 -;; strv %r5, 0(%r6, %r3) +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; clgr %r6, %r7 +;; jgh 0x38 +;; ag %r6, 0x38(%r2) +;; lghi %r2, 0x1000 +;; strv %r5, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,13 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r5, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r5 -;; clgr %r3, %r4 -;; jgh 0x94 -;; ag %r3, 0x38(%r2) -;; lghi %r5, 0x1000 -;; lrv %r2, 0(%r5, %r3) +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; clgr %r6, %r7 +;; jgh 0x8c +;; ag %r6, 0x38(%r2) +;; lghi %r2, 0x1000 +;; lrv %r2, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat index 8b44d717b6ea..483d35294341 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -27,14 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r6, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r6 -;; clgr %r3, %r4 -;; jgh 0x3c -;; ag %r3, 0x38(%r2) -;; llilh %r6, 0xffff -;; strv %r5, 0(%r6, %r3) +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; clgr %r6, %r7 +;; jgh 0x38 +;; ag %r6, 0x38(%r2) +;; llilh %r2, 0xffff +;; strv %r5, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,13 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r5, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r5 -;; clgr %r3, %r4 -;; jgh 0x94 -;; ag %r3, 0x38(%r2) -;; llilh %r5, 0xffff -;; lrv %r2, 0(%r5, %r3) +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; clgr %r6, %r7 +;; jgh 0x8c +;; ag %r6, 0x38(%r2) +;; llilh %r2, 0xffff +;; lrv %r2, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat index b3b103dac536..82f549ee8a69 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat @@ -27,13 +27,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r6, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r6 -;; clgr %r3, %r4 -;; jghe 0x3c -;; lg %r4, 0x38(%r2) -;; stc %r5, 0(%r3, %r4) +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; clgr %r6, %r7 +;; jghe 0x38 +;; lg %r2, 0x38(%r2) +;; stc %r5, 0(%r6, %r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -46,12 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r5, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r5 -;; clgr %r3, %r4 -;; jghe 0x90 -;; lg %r4, 0x38(%r2) -;; llc %r2, 0(%r3, %r4) +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; clgr %r6, %r7 +;; jghe 0x88 +;; lg %r2, 0x38(%r2) +;; llc %r2, 0(%r6, %r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat index 24a8d6d9fca8..2ba47791c56e 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat @@ -27,14 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r6, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r6 -;; clgr %r3, %r4 -;; jgh 0x3c -;; ag %r3, 0x38(%r2) -;; lghi %r6, 0x1000 -;; stc %r5, 0(%r6, %r3) +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; clgr %r6, %r7 +;; jgh 0x38 +;; ag %r6, 0x38(%r2) +;; lghi %r2, 0x1000 +;; stc %r5, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,13 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r5, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r5 -;; clgr %r3, %r4 -;; jgh 0x94 -;; ag %r3, 0x38(%r2) -;; lghi %r5, 0x1000 -;; llc %r2, 0(%r5, %r3) +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; clgr %r6, %r7 +;; jgh 0x8c +;; ag %r6, 0x38(%r2) +;; lghi %r2, 0x1000 +;; llc %r2, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat index 6e2640354142..0176c1bcca5f 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -27,14 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r6, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r6 -;; clgr %r3, %r4 -;; jgh 0x3c -;; ag %r3, 0x38(%r2) -;; llilh %r6, 0xffff -;; stc %r5, 0(%r6, %r3) +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; clgr %r6, %r7 +;; jgh 0x38 +;; ag %r6, 0x38(%r2) +;; llilh %r2, 0xffff +;; stc %r5, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,13 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r5, %r4 -;; lg %r4, 0x40(%r2) -;; llgfr %r3, %r5 -;; clgr %r3, %r4 -;; jgh 0x94 -;; ag %r3, 0x38(%r2) -;; llilh %r5, 0xffff -;; llc %r2, 0(%r5, %r3) +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; clgr %r6, %r7 +;; jgh 0x8c +;; ag %r6, 0x38(%r2) +;; llilh %r2, 0xffff +;; llc %r2, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat index a5ababf4854d..477c6d747e29 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat @@ -23,19 +23,19 @@ ;; lg %r1, 0x18(%r1) ;; la %r1, 0xa0(%r1) ;; clgrtle %r15, %r1 -;; stmg %r9, %r15, 0x48(%r15) +;; stmg %r14, %r15, 0x70(%r15) ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r9, 0x40(%r2) -;; llgfr %r7, %r4 -;; lghi %r6, 0 -;; lgr %r3, %r7 -;; ag %r3, 0x38(%r2) -;; clgr %r7, %r9 -;; locgrh %r3, %r6 -;; strv %r5, 0(%r3) -;; lmg %r9, %r15, 0xe8(%r15) +;; lg %r3, 0x40(%r2) +;; llgfr %r6, %r4 +;; lghi %r4, 0 +;; lgr %r7, %r6 +;; ag %r7, 0x38(%r2) +;; clgr %r6, %r3 +;; locgrh %r7, %r4 +;; strv %r5, 0(%r7) +;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; ;; wasm[0]::function[1]: @@ -47,13 +47,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) -;; llgfr %r6, %r4 -;; lghi %r5, 0 -;; lgr %r3, %r6 -;; ag %r3, 0x38(%r2) -;; clgr %r6, %r7 -;; locgrh %r3, %r5 -;; lrv %r2, 0(%r3) +;; lg %r3, 0x40(%r2) +;; llgfr %r5, %r4 +;; lghi %r4, 0 +;; lgr %r7, %r5 +;; ag %r7, 0x38(%r2) +;; clgr %r5, %r3 +;; locgrh %r7, %r4 +;; lrv %r2, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat index ef00afabd823..735ae82e1a83 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -27,15 +27,15 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) -;; llgfr %r3, %r4 -;; lghi %r7, 0 -;; lgr %r4, %r3 +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; lghi %r3, 0 +;; lgr %r4, %r6 ;; ag %r4, 0x38(%r2) -;; aghi %r4, 0x1000 -;; clgr %r3, %r6 -;; locgrh %r4, %r7 -;; strv %r5, 0(%r4) +;; aghik %r2, %r4, 0x1000 +;; clgr %r6, %r7 +;; locgrh %r2, %r3 +;; strv %r5, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -48,14 +48,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r5, 0x40(%r2) -;; llgfr %r7, %r4 -;; lghi %r6, 0 -;; lgr %r3, %r7 -;; ag %r3, 0x38(%r2) -;; aghik %r4, %r3, 0x1000 -;; clgr %r7, %r5 -;; locgrh %r4, %r6 -;; lrv %r2, 0(%r4) +;; lg %r6, 0x40(%r2) +;; llgfr %r5, %r4 +;; lghi %r3, 0 +;; lgr %r4, %r5 +;; ag %r4, 0x38(%r2) +;; aghik %r2, %r4, 0x1000 +;; clgr %r5, %r6 +;; locgrh %r2, %r3 +;; lrv %r2, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat index b1e365e77eca..fbb21100e280 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -28,17 +28,15 @@ ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) ;; lg %r6, 0x40(%r2) -;; lgr %r3, %r2 -;; llgfr %r2, %r4 -;; lghi %r7, 0 -;; lgr %r4, %r3 -;; lgr %r3, %r2 -;; ag %r3, 0x38(%r4) -;; llilh %r4, 0xffff -;; agr %r3, %r4 -;; clgr %r2, %r6 -;; locgrh %r3, %r7 -;; strv %r5, 0(%r3) +;; llgfr %r4, %r4 +;; lghi %r3, 0 +;; lgr %r7, %r4 +;; ag %r7, 0x38(%r2) +;; llilh %r2, 0xffff +;; agrk %r2, %r7, %r2 +;; clgr %r4, %r6 +;; locgrh %r2, %r3 +;; strv %r5, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -51,17 +49,15 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) -;; lgr %r3, %r2 -;; llgfr %r2, %r4 -;; lghi %r7, 0 -;; lgr %r4, %r3 -;; lgr %r3, %r2 -;; ag %r3, 0x38(%r4) -;; llilh %r4, 0xffff -;; agrk %r5, %r3, %r4 -;; clgr %r2, %r6 -;; locgrh %r5, %r7 -;; lrv %r2, 0(%r5) +;; lg %r5, 0x40(%r2) +;; llgfr %r4, %r4 +;; lghi %r3, 0 +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; llilh %r2, 0xffff +;; agrk %r2, %r6, %r2 +;; clgr %r4, %r5 +;; locgrh %r2, %r3 +;; lrv %r2, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat index 3a600b12cc05..0cfa47715727 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat @@ -23,19 +23,19 @@ ;; lg %r1, 0x18(%r1) ;; la %r1, 0xa0(%r1) ;; clgrtle %r15, %r1 -;; stmg %r9, %r15, 0x48(%r15) +;; stmg %r14, %r15, 0x70(%r15) ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r9, 0x40(%r2) -;; llgfr %r7, %r4 -;; lghi %r6, 0 -;; lgr %r3, %r7 -;; ag %r3, 0x38(%r2) -;; clgr %r7, %r9 -;; locgrhe %r3, %r6 -;; stc %r5, 0(%r3) -;; lmg %r9, %r15, 0xe8(%r15) +;; lg %r3, 0x40(%r2) +;; llgfr %r6, %r4 +;; lghi %r4, 0 +;; lgr %r7, %r6 +;; ag %r7, 0x38(%r2) +;; clgr %r6, %r3 +;; locgrhe %r7, %r4 +;; stc %r5, 0(%r7) +;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; ;; wasm[0]::function[1]: @@ -47,13 +47,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) -;; llgfr %r6, %r4 -;; lghi %r5, 0 -;; lgr %r3, %r6 -;; ag %r3, 0x38(%r2) -;; clgr %r6, %r7 -;; locgrhe %r3, %r5 -;; llc %r2, 0(%r3) +;; lg %r3, 0x40(%r2) +;; llgfr %r5, %r4 +;; lghi %r4, 0 +;; lgr %r7, %r5 +;; ag %r7, 0x38(%r2) +;; clgr %r5, %r3 +;; locgrhe %r7, %r4 +;; llc %r2, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat index a9fce097c391..57bfa481b1f0 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -27,15 +27,15 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) -;; llgfr %r3, %r4 -;; lghi %r7, 0 -;; lgr %r4, %r3 +;; lg %r7, 0x40(%r2) +;; llgfr %r6, %r4 +;; lghi %r3, 0 +;; lgr %r4, %r6 ;; ag %r4, 0x38(%r2) -;; aghi %r4, 0x1000 -;; clgr %r3, %r6 -;; locgrh %r4, %r7 -;; stc %r5, 0(%r4) +;; aghik %r2, %r4, 0x1000 +;; clgr %r6, %r7 +;; locgrh %r2, %r3 +;; stc %r5, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -48,14 +48,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r5, 0x40(%r2) -;; llgfr %r7, %r4 -;; lghi %r6, 0 -;; lgr %r3, %r7 -;; ag %r3, 0x38(%r2) -;; aghik %r4, %r3, 0x1000 -;; clgr %r7, %r5 -;; locgrh %r4, %r6 -;; llc %r2, 0(%r4) +;; lg %r6, 0x40(%r2) +;; llgfr %r5, %r4 +;; lghi %r3, 0 +;; lgr %r4, %r5 +;; ag %r4, 0x38(%r2) +;; aghik %r2, %r4, 0x1000 +;; clgr %r5, %r6 +;; locgrh %r2, %r3 +;; llc %r2, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat index ff7597bade8e..ce77cf7a0c98 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -28,17 +28,15 @@ ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) ;; lg %r6, 0x40(%r2) -;; lgr %r3, %r2 -;; llgfr %r2, %r4 -;; lghi %r7, 0 -;; lgr %r4, %r3 -;; lgr %r3, %r2 -;; ag %r3, 0x38(%r4) -;; llilh %r4, 0xffff -;; agr %r3, %r4 -;; clgr %r2, %r6 -;; locgrh %r3, %r7 -;; stc %r5, 0(%r3) +;; llgfr %r4, %r4 +;; lghi %r3, 0 +;; lgr %r7, %r4 +;; ag %r7, 0x38(%r2) +;; llilh %r2, 0xffff +;; agrk %r2, %r7, %r2 +;; clgr %r4, %r6 +;; locgrh %r2, %r3 +;; stc %r5, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -51,17 +49,15 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) -;; lgr %r3, %r2 -;; llgfr %r2, %r4 -;; lghi %r7, 0 -;; lgr %r4, %r3 -;; lgr %r3, %r2 -;; ag %r3, 0x38(%r4) -;; llilh %r4, 0xffff -;; agrk %r5, %r3, %r4 -;; clgr %r2, %r6 -;; locgrh %r5, %r7 -;; llc %r2, 0(%r5) +;; lg %r5, 0x40(%r2) +;; llgfr %r4, %r4 +;; lghi %r3, 0 +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; llilh %r2, 0xffff +;; agrk %r2, %r6, %r2 +;; clgr %r4, %r5 +;; locgrh %r2, %r3 +;; llc %r2, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat index ca13fc96b993..09e6aa539608 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat @@ -27,12 +27,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r3, 0x40(%r2) -;; aghi %r3, -4 -;; clgr %r4, %r3 +;; lg %r6, 0x40(%r2) +;; aghi %r6, -4 +;; clgr %r4, %r6 ;; jgh 0x38 -;; lg %r6, 0x38(%r2) -;; strv %r5, 0(%r4, %r6) +;; lg %r2, 0x38(%r2) +;; strv %r5, 0(%r4, %r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -45,11 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r3, 0x40(%r2) -;; aghi %r3, -4 -;; clgr %r4, %r3 +;; lg %r6, 0x40(%r2) +;; aghi %r6, -4 +;; clgr %r4, %r6 ;; jgh 0x88 -;; lg %r5, 0x38(%r2) -;; lrv %r2, 0(%r4, %r5) +;; lg %r2, 0x38(%r2) +;; lrv %r2, 0(%r4, %r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat index 92040dadb215..42b858149293 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat @@ -27,13 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r3, 0x40(%r2) -;; aghi %r3, -0x1004 -;; clgr %r4, %r3 +;; lg %r7, 0x40(%r2) +;; aghi %r7, -0x1004 +;; clgr %r4, %r7 ;; jgh 0x38 ;; ag %r4, 0x38(%r2) -;; lghi %r6, 0x1000 -;; strv %r5, 0(%r6, %r4) +;; lghi %r2, 0x1000 +;; strv %r5, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -46,12 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r3, 0x40(%r2) -;; aghi %r3, -0x1004 -;; clgr %r4, %r3 +;; lg %r7, 0x40(%r2) +;; aghi %r7, -0x1004 +;; clgr %r4, %r7 ;; jgh 0x8c ;; ag %r4, 0x38(%r2) -;; lghi %r5, 0x1000 -;; lrv %r2, 0(%r5, %r4) +;; lghi %r2, 0x1000 +;; lrv %r2, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat index 0a4cbfb263d2..b8509c210fbf 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -27,15 +27,15 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r3, %r4 -;; algfi %r3, 0xffff0004 +;; lgr %r7, %r4 +;; algfi %r7, 0xffff0004 ;; jgnle 0x34 -;; lg %r6, 0x40(%r2) -;; clgr %r3, %r6 +;; lg %r3, 0x40(%r2) +;; clgr %r7, %r3 ;; jgh 0x44 ;; ag %r4, 0x38(%r2) -;; llilh %r6, 0xffff -;; strv %r5, 0(%r6, %r4) +;; llilh %r2, 0xffff +;; strv %r5, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -48,14 +48,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r3, %r4 -;; algfi %r3, 0xffff0004 +;; lgr %r7, %r4 +;; algfi %r7, 0xffff0004 ;; jgnle 0x94 -;; lg %r5, 0x40(%r2) -;; clgr %r3, %r5 +;; lg %r3, 0x40(%r2) +;; clgr %r7, %r3 ;; jgh 0xa4 ;; ag %r4, 0x38(%r2) -;; llilh %r6, 0xffff -;; lrv %r2, 0(%r6, %r4) +;; llilh %r2, 0xffff +;; lrv %r2, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat index 5c2f998e7b03..e61d1b4ff808 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat @@ -27,11 +27,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) -;; clgr %r4, %r7 +;; lg %r6, 0x40(%r2) +;; clgr %r4, %r6 ;; jghe 0x34 -;; lg %r3, 0x38(%r2) -;; stc %r5, 0(%r4, %r3) +;; lg %r7, 0x38(%r2) +;; stc %r5, 0(%r4, %r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -44,10 +44,10 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) -;; clgr %r4, %r7 +;; lg %r5, 0x40(%r2) +;; clgr %r4, %r5 ;; jghe 0x80 -;; lg %r3, 0x38(%r2) -;; llc %r2, 0(%r4, %r3) +;; lg %r7, 0x38(%r2) +;; llc %r2, 0(%r4, %r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat index 5e1504f74120..b6a6866f2a76 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat @@ -27,13 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r3, 0x40(%r2) -;; aghi %r3, -0x1001 -;; clgr %r4, %r3 +;; lg %r7, 0x40(%r2) +;; aghi %r7, -0x1001 +;; clgr %r4, %r7 ;; jgh 0x38 ;; ag %r4, 0x38(%r2) -;; lghi %r6, 0x1000 -;; stc %r5, 0(%r6, %r4) +;; lghi %r2, 0x1000 +;; stc %r5, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -46,12 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r3, 0x40(%r2) -;; aghi %r3, -0x1001 -;; clgr %r4, %r3 +;; lg %r7, 0x40(%r2) +;; aghi %r7, -0x1001 +;; clgr %r4, %r7 ;; jgh 0x8c ;; ag %r4, 0x38(%r2) -;; lghi %r5, 0x1000 -;; llc %r2, 0(%r5, %r4) +;; lghi %r2, 0x1000 +;; llc %r2, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat index 95e1d758331f..114ba3315523 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -27,15 +27,15 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r3, %r4 -;; algfi %r3, 0xffff0001 +;; lgr %r7, %r4 +;; algfi %r7, 0xffff0001 ;; jgnle 0x34 -;; lg %r6, 0x40(%r2) -;; clgr %r3, %r6 +;; lg %r3, 0x40(%r2) +;; clgr %r7, %r3 ;; jgh 0x44 ;; ag %r4, 0x38(%r2) -;; llilh %r6, 0xffff -;; stc %r5, 0(%r6, %r4) +;; llilh %r2, 0xffff +;; stc %r5, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -48,14 +48,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r3, %r4 -;; algfi %r3, 0xffff0001 +;; lgr %r7, %r4 +;; algfi %r7, 0xffff0001 ;; jgnle 0x94 -;; lg %r5, 0x40(%r2) -;; clgr %r3, %r5 +;; lg %r3, 0x40(%r2) +;; clgr %r7, %r3 ;; jgh 0xa4 ;; ag %r4, 0x38(%r2) -;; llilh %r6, 0xffff -;; llc %r2, 0(%r6, %r4) +;; llilh %r2, 0xffff +;; llc %r2, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat index 7685768ca2b8..de65474408c3 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat @@ -27,14 +27,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) -;; aghik %r7, %r6, -4 -;; lghi %r6, 0 -;; lgr %r3, %r4 -;; ag %r3, 0x38(%r2) -;; clgr %r4, %r7 -;; locgrh %r3, %r6 -;; strv %r5, 0(%r3) +;; lg %r3, 0x40(%r2) +;; aghik %r6, %r3, -4 +;; lghi %r3, 0 +;; lgr %r7, %r4 +;; ag %r7, 0x38(%r2) +;; clgr %r4, %r6 +;; locgrh %r7, %r3 +;; strv %r5, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,13 +47,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r5, 0x40(%r2) -;; aghik %r6, %r5, -4 -;; lghi %r5, 0 -;; lgr %r3, %r4 -;; ag %r3, 0x38(%r2) -;; clgr %r4, %r6 -;; locgrh %r3, %r5 -;; lrv %r2, 0(%r3) +;; lg %r3, 0x40(%r2) +;; aghik %r5, %r3, -4 +;; lghi %r3, 0 +;; lgr %r7, %r4 +;; ag %r7, 0x38(%r2) +;; clgr %r4, %r5 +;; locgrh %r7, %r3 +;; lrv %r2, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat index 1fd892d91d57..3d67eefd3bd3 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -27,14 +27,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) -;; aghik %r7, %r6, -0x1004 -;; lghi %r6, 0 -;; lgr %r3, %r4 -;; ag %r3, 0x38(%r2) -;; aghik %r2, %r3, 0x1000 -;; clgr %r4, %r7 -;; locgrh %r2, %r6 +;; lg %r3, 0x40(%r2) +;; aghik %r6, %r3, -0x1004 +;; lghi %r3, 0 +;; lgr %r7, %r4 +;; ag %r7, 0x38(%r2) +;; aghik %r2, %r7, 0x1000 +;; clgr %r4, %r6 +;; locgrh %r2, %r3 ;; strv %r5, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 @@ -48,14 +48,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r5, 0x40(%r2) -;; aghik %r6, %r5, -0x1004 -;; lghi %r5, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; aghi %r7, 0x1000 -;; clgr %r4, %r6 -;; locgrh %r7, %r5 -;; lrv %r2, 0(%r7) +;; lg %r3, 0x40(%r2) +;; aghik %r5, %r3, -0x1004 +;; lghi %r3, 0 +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; aghik %r2, %r6, 0x1000 +;; clgr %r4, %r5 +;; locgrh %r2, %r3 +;; lrv %r2, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat index de995926e0a4..16bf0ca7a705 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -30,13 +30,13 @@ ;; lgr %r6, %r4 ;; algfi %r6, 0xffff0004 ;; jgnle 0x34 -;; lg %r3, 0x40(%r2) -;; lghi %r7, 0 +;; lg %r7, 0x40(%r2) +;; lghi %r3, 0 ;; ag %r4, 0x38(%r2) ;; llilh %r2, 0xffff ;; agrk %r2, %r4, %r2 -;; clgr %r6, %r3 -;; locgrh %r2, %r7 +;; clgr %r6, %r7 +;; locgrh %r2, %r3 ;; strv %r5, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 @@ -50,16 +50,16 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r6, %r4 -;; algfi %r6, 0xffff0004 +;; lgr %r5, %r4 +;; algfi %r5, 0xffff0004 ;; jgnle 0x9c -;; lg %r3, 0x40(%r2) -;; lghi %r7, 0 +;; lg %r6, 0x40(%r2) +;; lghi %r3, 0 ;; ag %r4, 0x38(%r2) ;; llilh %r2, 0xffff ;; agrk %r2, %r4, %r2 -;; clgr %r6, %r3 -;; locgrh %r2, %r7 +;; clgr %r5, %r6 +;; locgrh %r2, %r3 ;; lrv %r2, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat index b6bb4768ee56..229aa053de80 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat @@ -27,13 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) -;; lghi %r3, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; clgr %r4, %r6 -;; locgrhe %r7, %r3 -;; stc %r5, 0(%r7) +;; lg %r3, 0x40(%r2) +;; lghi %r7, 0 +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; clgr %r4, %r3 +;; locgrhe %r6, %r7 +;; stc %r5, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -46,12 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r5, 0x40(%r2) -;; lghi %r3, 0 +;; lg %r3, 0x40(%r2) +;; lghi %r7, 0 ;; lgr %r6, %r4 ;; ag %r6, 0x38(%r2) -;; clgr %r4, %r5 -;; locgrhe %r6, %r3 +;; clgr %r4, %r3 +;; locgrhe %r6, %r7 ;; llc %r2, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat index d9475da460a6..0097c9fa759c 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -27,14 +27,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) -;; aghik %r7, %r6, -0x1001 -;; lghi %r6, 0 -;; lgr %r3, %r4 -;; ag %r3, 0x38(%r2) -;; aghik %r2, %r3, 0x1000 -;; clgr %r4, %r7 -;; locgrh %r2, %r6 +;; lg %r3, 0x40(%r2) +;; aghik %r6, %r3, -0x1001 +;; lghi %r3, 0 +;; lgr %r7, %r4 +;; ag %r7, 0x38(%r2) +;; aghik %r2, %r7, 0x1000 +;; clgr %r4, %r6 +;; locgrh %r2, %r3 ;; stc %r5, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 @@ -48,14 +48,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r5, 0x40(%r2) -;; aghik %r6, %r5, -0x1001 -;; lghi %r5, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; aghi %r7, 0x1000 -;; clgr %r4, %r6 -;; locgrh %r7, %r5 -;; llc %r2, 0(%r7) +;; lg %r3, 0x40(%r2) +;; aghik %r5, %r3, -0x1001 +;; lghi %r3, 0 +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; aghik %r2, %r6, 0x1000 +;; clgr %r4, %r5 +;; locgrh %r2, %r3 +;; llc %r2, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 06b29f4030da..7d9c0ac17a07 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -30,13 +30,13 @@ ;; lgr %r6, %r4 ;; algfi %r6, 0xffff0001 ;; jgnle 0x34 -;; lg %r3, 0x40(%r2) -;; lghi %r7, 0 +;; lg %r7, 0x40(%r2) +;; lghi %r3, 0 ;; ag %r4, 0x38(%r2) ;; llilh %r2, 0xffff ;; agrk %r2, %r4, %r2 -;; clgr %r6, %r3 -;; locgrh %r2, %r7 +;; clgr %r6, %r7 +;; locgrh %r2, %r3 ;; stc %r5, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 @@ -50,16 +50,16 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lgr %r6, %r4 -;; algfi %r6, 0xffff0001 +;; lgr %r5, %r4 +;; algfi %r5, 0xffff0001 ;; jgnle 0x98 -;; lg %r3, 0x40(%r2) -;; lghi %r7, 0 +;; lg %r6, 0x40(%r2) +;; lghi %r3, 0 ;; ag %r4, 0x38(%r2) ;; llilh %r2, 0xffff ;; agrk %r2, %r4, %r2 -;; clgr %r6, %r3 -;; locgrh %r2, %r7 +;; clgr %r5, %r6 +;; locgrh %r2, %r3 ;; llc %r2, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat index 22a35a1758bd..e8458db1ca32 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat @@ -27,11 +27,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) -;; clgr %r4, %r7 +;; lg %r6, 0x40(%r2) +;; clgr %r4, %r6 ;; jgh 0x34 -;; lg %r3, 0x38(%r2) -;; strv %r5, 0(%r4, %r3) +;; lg %r7, 0x38(%r2) +;; strv %r5, 0(%r4, %r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -44,10 +44,10 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) -;; clgr %r4, %r7 +;; lg %r5, 0x40(%r2) +;; clgr %r4, %r5 ;; jgh 0x80 -;; lg %r3, 0x38(%r2) -;; lrv %r2, 0(%r4, %r3) +;; lg %r7, 0x38(%r2) +;; lrv %r2, 0(%r4, %r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat index 7971670a19a0..d62139a13a80 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat @@ -27,12 +27,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r3, 0x40(%r2) -;; clgr %r4, %r3 +;; lg %r6, 0x40(%r2) +;; clgr %r4, %r6 ;; jgh 0x34 ;; ag %r4, 0x38(%r2) -;; lghi %r6, 0x1000 -;; strv %r5, 0(%r6, %r4) +;; lghi %r2, 0x1000 +;; strv %r5, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -45,11 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r3, 0x40(%r2) -;; clgr %r4, %r3 +;; lg %r6, 0x40(%r2) +;; clgr %r4, %r6 ;; jgh 0x84 ;; ag %r4, 0x38(%r2) -;; lghi %r5, 0x1000 -;; lrv %r2, 0(%r5, %r4) +;; lghi %r2, 0x1000 +;; lrv %r2, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat index 6377d330d14a..ce943f1d12c2 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -27,12 +27,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r3, 0x40(%r2) -;; clgr %r4, %r3 +;; lg %r6, 0x40(%r2) +;; clgr %r4, %r6 ;; jgh 0x34 ;; ag %r4, 0x38(%r2) -;; llilh %r6, 0xffff -;; strv %r5, 0(%r6, %r4) +;; llilh %r2, 0xffff +;; strv %r5, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -45,11 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r3, 0x40(%r2) -;; clgr %r4, %r3 +;; lg %r6, 0x40(%r2) +;; clgr %r4, %r6 ;; jgh 0x84 ;; ag %r4, 0x38(%r2) -;; llilh %r5, 0xffff -;; lrv %r2, 0(%r5, %r4) +;; llilh %r2, 0xffff +;; lrv %r2, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat index fd850e493123..3a25bd59ee7f 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat @@ -27,11 +27,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) -;; clgr %r4, %r7 +;; lg %r6, 0x40(%r2) +;; clgr %r4, %r6 ;; jghe 0x34 -;; lg %r3, 0x38(%r2) -;; stc %r5, 0(%r4, %r3) +;; lg %r7, 0x38(%r2) +;; stc %r5, 0(%r4, %r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -44,10 +44,10 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) -;; clgr %r4, %r7 +;; lg %r5, 0x40(%r2) +;; clgr %r4, %r5 ;; jghe 0x80 -;; lg %r3, 0x38(%r2) -;; llc %r2, 0(%r4, %r3) +;; lg %r7, 0x38(%r2) +;; llc %r2, 0(%r4, %r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat index d98f86b8f727..f40e2f313fb3 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat @@ -27,12 +27,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r3, 0x40(%r2) -;; clgr %r4, %r3 +;; lg %r6, 0x40(%r2) +;; clgr %r4, %r6 ;; jgh 0x34 ;; ag %r4, 0x38(%r2) -;; lghi %r6, 0x1000 -;; stc %r5, 0(%r6, %r4) +;; lghi %r2, 0x1000 +;; stc %r5, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -45,11 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r3, 0x40(%r2) -;; clgr %r4, %r3 +;; lg %r6, 0x40(%r2) +;; clgr %r4, %r6 ;; jgh 0x84 ;; ag %r4, 0x38(%r2) -;; lghi %r5, 0x1000 -;; llc %r2, 0(%r5, %r4) +;; lghi %r2, 0x1000 +;; llc %r2, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat index 827290efa998..4b1ac6e38d08 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -27,12 +27,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r3, 0x40(%r2) -;; clgr %r4, %r3 +;; lg %r6, 0x40(%r2) +;; clgr %r4, %r6 ;; jgh 0x34 ;; ag %r4, 0x38(%r2) -;; llilh %r6, 0xffff -;; stc %r5, 0(%r6, %r4) +;; llilh %r2, 0xffff +;; stc %r5, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -45,11 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r3, 0x40(%r2) -;; clgr %r4, %r3 +;; lg %r6, 0x40(%r2) +;; clgr %r4, %r6 ;; jgh 0x84 ;; ag %r4, 0x38(%r2) -;; llilh %r5, 0xffff -;; llc %r2, 0(%r5, %r4) +;; llilh %r2, 0xffff +;; llc %r2, 0(%r2, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat index 4ed6d1bad2c1..35496c977e70 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat @@ -27,13 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) -;; lghi %r3, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; clgr %r4, %r6 -;; locgrh %r7, %r3 -;; strv %r5, 0(%r7) +;; lg %r3, 0x40(%r2) +;; lghi %r7, 0 +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; clgr %r4, %r3 +;; locgrh %r6, %r7 +;; strv %r5, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -46,12 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r5, 0x40(%r2) -;; lghi %r3, 0 +;; lg %r3, 0x40(%r2) +;; lghi %r7, 0 ;; lgr %r6, %r4 ;; ag %r6, 0x38(%r2) -;; clgr %r4, %r5 -;; locgrh %r6, %r3 +;; clgr %r4, %r3 +;; locgrh %r6, %r7 ;; lrv %r2, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat index b559c7322a38..24e12a146304 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -27,14 +27,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) +;; lg %r3, 0x40(%r2) ;; lghi %r6, 0 -;; lgr %r3, %r4 -;; ag %r3, 0x38(%r2) -;; aghi %r3, 0x1000 -;; clgr %r4, %r7 -;; locgrh %r3, %r6 -;; strv %r5, 0(%r3) +;; lgr %r7, %r4 +;; ag %r7, 0x38(%r2) +;; aghi %r7, 0x1000 +;; clgr %r4, %r3 +;; locgrh %r7, %r6 +;; strv %r5, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,13 +47,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) +;; lg %r3, 0x40(%r2) ;; lghi %r5, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; aghik %r3, %r7, 0x1000 -;; clgr %r4, %r6 -;; locgrh %r3, %r5 -;; lrv %r2, 0(%r3) +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; aghik %r7, %r6, 0x1000 +;; clgr %r4, %r3 +;; locgrh %r7, %r5 +;; lrv %r2, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat index 6d0066b18b51..7fb26be60329 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -27,13 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) +;; lg %r3, 0x40(%r2) ;; lghi %r6, 0 -;; lgr %r3, %r4 -;; ag %r3, 0x38(%r2) +;; lgr %r7, %r4 +;; ag %r7, 0x38(%r2) ;; llilh %r2, 0xffff -;; agrk %r2, %r3, %r2 -;; clgr %r4, %r7 +;; agrk %r2, %r7, %r2 +;; clgr %r4, %r3 ;; locgrh %r2, %r6 ;; strv %r5, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) @@ -48,14 +48,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) +;; lg %r3, 0x40(%r2) ;; lghi %r5, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) ;; llilh %r2, 0xffff -;; agr %r7, %r2 -;; clgr %r4, %r6 -;; locgrh %r7, %r5 -;; lrv %r2, 0(%r7) +;; agrk %r2, %r6, %r2 +;; clgr %r4, %r3 +;; locgrh %r2, %r5 +;; lrv %r2, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat index 59d9d6c04010..3dd1887fb4d2 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat @@ -27,13 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) -;; lghi %r3, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; clgr %r4, %r6 -;; locgrhe %r7, %r3 -;; stc %r5, 0(%r7) +;; lg %r3, 0x40(%r2) +;; lghi %r7, 0 +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; clgr %r4, %r3 +;; locgrhe %r6, %r7 +;; stc %r5, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -46,12 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r5, 0x40(%r2) -;; lghi %r3, 0 +;; lg %r3, 0x40(%r2) +;; lghi %r7, 0 ;; lgr %r6, %r4 ;; ag %r6, 0x38(%r2) -;; clgr %r4, %r5 -;; locgrhe %r6, %r3 +;; clgr %r4, %r3 +;; locgrhe %r6, %r7 ;; llc %r2, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat index e2e0cee776a5..811e88375fab 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -27,14 +27,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) +;; lg %r3, 0x40(%r2) ;; lghi %r6, 0 -;; lgr %r3, %r4 -;; ag %r3, 0x38(%r2) -;; aghi %r3, 0x1000 -;; clgr %r4, %r7 -;; locgrh %r3, %r6 -;; stc %r5, 0(%r3) +;; lgr %r7, %r4 +;; ag %r7, 0x38(%r2) +;; aghi %r7, 0x1000 +;; clgr %r4, %r3 +;; locgrh %r7, %r6 +;; stc %r5, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,13 +47,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) +;; lg %r3, 0x40(%r2) ;; lghi %r5, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; aghik %r3, %r7, 0x1000 -;; clgr %r4, %r6 -;; locgrh %r3, %r5 -;; llc %r2, 0(%r3) +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; aghik %r7, %r6, 0x1000 +;; clgr %r4, %r3 +;; locgrh %r7, %r5 +;; llc %r2, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 7b80efa1a5c5..b234605b2837 100644 --- a/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -27,13 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x40(%r2) +;; lg %r3, 0x40(%r2) ;; lghi %r6, 0 -;; lgr %r3, %r4 -;; ag %r3, 0x38(%r2) +;; lgr %r7, %r4 +;; ag %r7, 0x38(%r2) ;; llilh %r2, 0xffff -;; agrk %r2, %r3, %r2 -;; clgr %r4, %r7 +;; agrk %r2, %r7, %r2 +;; clgr %r4, %r3 ;; locgrh %r2, %r6 ;; stc %r5, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) @@ -48,14 +48,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r6, 0x40(%r2) +;; lg %r3, 0x40(%r2) ;; lghi %r5, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) ;; llilh %r2, 0xffff -;; agr %r7, %r2 -;; clgr %r4, %r6 -;; locgrh %r7, %r5 -;; llc %r2, 0(%r7) +;; agrk %r2, %r6, %r2 +;; clgr %r4, %r3 +;; locgrh %r2, %r5 +;; llc %r2, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat index 213569671880..bc1713dee8d7 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat @@ -27,11 +27,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; clgfi %r7, 0xfffffffc +;; llgfr %r6, %r4 +;; clgfi %r6, 0xfffffffc ;; jgh 0x34 -;; lg %r3, 0x38(%r2) -;; strv %r5, 0(%r7, %r3) +;; lg %r7, 0x38(%r2) +;; strv %r5, 0(%r6, %r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -44,10 +44,10 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; clgfi %r7, 0xfffffffc +;; llgfr %r5, %r4 +;; clgfi %r5, 0xfffffffc ;; jgh 0x80 -;; lg %r3, 0x38(%r2) -;; lrv %r2, 0(%r7, %r3) +;; lg %r7, 0x38(%r2) +;; lrv %r2, 0(%r5, %r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat index 85b8f5553db0..b269e6755633 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat @@ -27,12 +27,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; clgfi %r7, 0xffffeffc +;; llgfr %r6, %r4 +;; clgfi %r6, 0xffffeffc ;; jgh 0x34 -;; ag %r7, 0x38(%r2) -;; lghi %r4, 0x1000 -;; strv %r5, 0(%r4, %r7) +;; ag %r6, 0x38(%r2) +;; lghi %r2, 0x1000 +;; strv %r5, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -45,11 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; clgfi %r7, 0xffffeffc +;; llgfr %r5, %r4 +;; clgfi %r5, 0xffffeffc ;; jgh 0x84 -;; ag %r7, 0x38(%r2) -;; lghi %r4, 0x1000 -;; lrv %r2, 0(%r4, %r7) +;; ag %r5, 0x38(%r2) +;; lghi %r2, 0x1000 +;; lrv %r2, 0(%r2, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat index d4c1a2d0e787..ff8892f05b72 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -27,12 +27,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; clgfi %r7, 0xfffc +;; llgfr %r6, %r4 +;; clgfi %r6, 0xfffc ;; jgh 0x34 -;; ag %r7, 0x38(%r2) -;; llilh %r4, 0xffff -;; strv %r5, 0(%r4, %r7) +;; ag %r6, 0x38(%r2) +;; llilh %r2, 0xffff +;; strv %r5, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -45,11 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; clgfi %r7, 0xfffc +;; llgfr %r5, %r4 +;; clgfi %r5, 0xfffc ;; jgh 0x84 -;; ag %r7, 0x38(%r2) -;; llilh %r4, 0xffff -;; lrv %r2, 0(%r4, %r7) +;; ag %r5, 0x38(%r2) +;; llilh %r2, 0xffff +;; lrv %r2, 0(%r2, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat index 2c8538938815..3c545cecab8c 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat @@ -27,9 +27,9 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x38(%r2) -;; llgfr %r2, %r4 -;; stc %r5, 0(%r2, %r7) +;; lg %r6, 0x38(%r2) +;; llgfr %r7, %r4 +;; stc %r5, 0(%r7, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -42,8 +42,8 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x38(%r2) -;; llgfr %r2, %r4 -;; llc %r2, 0(%r2, %r7) +;; lg %r5, 0x38(%r2) +;; llgfr %r6, %r4 +;; llc %r2, 0(%r6, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat index 26a6762d00a2..630a93fa9231 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat @@ -27,12 +27,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; clgfi %r7, 0xffffefff +;; llgfr %r6, %r4 +;; clgfi %r6, 0xffffefff ;; jgh 0x34 -;; ag %r7, 0x38(%r2) -;; lghi %r4, 0x1000 -;; stc %r5, 0(%r4, %r7) +;; ag %r6, 0x38(%r2) +;; lghi %r2, 0x1000 +;; stc %r5, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -45,11 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; clgfi %r7, 0xffffefff +;; llgfr %r5, %r4 +;; clgfi %r5, 0xffffefff ;; jgh 0x84 -;; ag %r7, 0x38(%r2) -;; lghi %r4, 0x1000 -;; llc %r2, 0(%r4, %r7) +;; ag %r5, 0x38(%r2) +;; lghi %r2, 0x1000 +;; llc %r2, 0(%r2, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat index 189438612f58..a739b86a9655 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -27,12 +27,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; clgfi %r7, 0xffff +;; llgfr %r6, %r4 +;; clgfi %r6, 0xffff ;; jgh 0x34 -;; ag %r7, 0x38(%r2) -;; llilh %r4, 0xffff -;; stc %r5, 0(%r4, %r7) +;; ag %r6, 0x38(%r2) +;; llilh %r2, 0xffff +;; stc %r5, 0(%r2, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -45,11 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; clgfi %r7, 0xffff +;; llgfr %r5, %r4 +;; clgfi %r5, 0xffff ;; jgh 0x84 -;; ag %r7, 0x38(%r2) -;; llilh %r4, 0xffff -;; llc %r2, 0(%r4, %r7) +;; ag %r5, 0x38(%r2) +;; llilh %r2, 0xffff +;; llc %r2, 0(%r2, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat index 4ea4955c26c8..0eeda60bc986 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat @@ -27,12 +27,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r4, %r4 -;; lghi %r3, 0 -;; lgr %r6, %r4 +;; llgfr %r3, %r4 +;; lghi %r7, 0 +;; lgr %r6, %r3 ;; ag %r6, 0x38(%r2) -;; clgfi %r4, 0xfffffffc -;; locgrh %r6, %r3 +;; clgfi %r3, 0xfffffffc +;; locgrh %r6, %r7 ;; strv %r5, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 @@ -46,12 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r4, %r4 -;; lghi %r3, 0 -;; lgr %r5, %r4 -;; ag %r5, 0x38(%r2) -;; clgfi %r4, 0xfffffffc -;; locgrh %r5, %r3 -;; lrv %r2, 0(%r5) +;; llgfr %r3, %r4 +;; lghi %r7, 0 +;; lgr %r6, %r3 +;; ag %r6, 0x38(%r2) +;; clgfi %r3, 0xfffffffc +;; locgrh %r6, %r7 +;; lrv %r2, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat index ad44c4baba2e..ff369051be30 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -27,14 +27,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r6, %r4 +;; llgfr %r3, %r4 ;; lghi %r4, 0 -;; lgr %r7, %r6 -;; ag %r7, 0x38(%r2) -;; aghik %r3, %r7, 0x1000 -;; clgfi %r6, 0xffffeffc -;; locgrh %r3, %r4 -;; strv %r5, 0(%r3) +;; lgr %r6, %r3 +;; ag %r6, 0x38(%r2) +;; aghik %r7, %r6, 0x1000 +;; clgfi %r3, 0xffffeffc +;; locgrh %r7, %r4 +;; strv %r5, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,13 +47,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r5, %r4 +;; llgfr %r3, %r4 ;; lghi %r4, 0 -;; lgr %r6, %r5 -;; ag %r6, 0x38(%r2) -;; aghik %r3, %r6, 0x1000 -;; clgfi %r5, 0xffffeffc -;; locgrh %r3, %r4 -;; lrv %r2, 0(%r3) +;; lgr %r5, %r3 +;; ag %r5, 0x38(%r2) +;; aghik %r7, %r5, 0x1000 +;; clgfi %r3, 0xffffeffc +;; locgrh %r7, %r4 +;; lrv %r2, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat index c8469318a3ac..da54a441ad0f 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -27,15 +27,15 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; lghi %r6, 0 -;; lgr %r3, %r7 -;; ag %r3, 0x38(%r2) +;; llgfr %r3, %r4 +;; lghi %r4, 0 +;; lgr %r6, %r3 +;; ag %r6, 0x38(%r2) ;; llilh %r2, 0xffff -;; agrk %r4, %r3, %r2 -;; clgfi %r7, 0xfffc -;; locgrh %r4, %r6 -;; strv %r5, 0(%r4) +;; agrk %r2, %r6, %r2 +;; clgfi %r3, 0xfffc +;; locgrh %r2, %r4 +;; strv %r5, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -48,14 +48,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r6, %r4 -;; lghi %r5, 0 -;; lgr %r7, %r6 -;; ag %r7, 0x38(%r2) +;; llgfr %r3, %r4 +;; lghi %r4, 0 +;; lgr %r5, %r3 +;; ag %r5, 0x38(%r2) ;; llilh %r2, 0xffff -;; agrk %r4, %r7, %r2 -;; clgfi %r6, 0xfffc -;; locgrh %r4, %r5 -;; lrv %r2, 0(%r4) +;; agrk %r2, %r5, %r2 +;; clgfi %r3, 0xfffc +;; locgrh %r2, %r4 +;; lrv %r2, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat index 9851d892bf5f..a79a0bc9dce4 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat @@ -27,9 +27,9 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x38(%r2) -;; llgfr %r2, %r4 -;; stc %r5, 0(%r2, %r7) +;; lg %r6, 0x38(%r2) +;; llgfr %r7, %r4 +;; stc %r5, 0(%r7, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -42,8 +42,8 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x38(%r2) -;; llgfr %r2, %r4 -;; llc %r2, 0(%r2, %r7) +;; lg %r5, 0x38(%r2) +;; llgfr %r6, %r4 +;; llc %r2, 0(%r6, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat index c6d3e031cbcd..1664e500da16 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -27,14 +27,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r6, %r4 +;; llgfr %r3, %r4 ;; lghi %r4, 0 -;; lgr %r7, %r6 -;; ag %r7, 0x38(%r2) -;; aghik %r3, %r7, 0x1000 -;; clgfi %r6, 0xffffefff -;; locgrh %r3, %r4 -;; stc %r5, 0(%r3) +;; lgr %r6, %r3 +;; ag %r6, 0x38(%r2) +;; aghik %r7, %r6, 0x1000 +;; clgfi %r3, 0xffffefff +;; locgrh %r7, %r4 +;; stc %r5, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,13 +47,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r5, %r4 +;; llgfr %r3, %r4 ;; lghi %r4, 0 -;; lgr %r6, %r5 -;; ag %r6, 0x38(%r2) -;; aghik %r3, %r6, 0x1000 -;; clgfi %r5, 0xffffefff -;; locgrh %r3, %r4 -;; llc %r2, 0(%r3) +;; lgr %r5, %r3 +;; ag %r5, 0x38(%r2) +;; aghik %r7, %r5, 0x1000 +;; clgfi %r3, 0xffffefff +;; locgrh %r7, %r4 +;; llc %r2, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat index c62b86503968..9dab79a09370 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -27,15 +27,15 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; lghi %r6, 0 -;; lgr %r3, %r7 -;; ag %r3, 0x38(%r2) +;; llgfr %r3, %r4 +;; lghi %r4, 0 +;; lgr %r6, %r3 +;; ag %r6, 0x38(%r2) ;; llilh %r2, 0xffff -;; agrk %r4, %r3, %r2 -;; clgfi %r7, 0xffff -;; locgrh %r4, %r6 -;; stc %r5, 0(%r4) +;; agrk %r2, %r6, %r2 +;; clgfi %r3, 0xffff +;; locgrh %r2, %r4 +;; stc %r5, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -48,14 +48,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r6, %r4 -;; lghi %r5, 0 -;; lgr %r7, %r6 -;; ag %r7, 0x38(%r2) +;; llgfr %r3, %r4 +;; lghi %r4, 0 +;; lgr %r5, %r3 +;; ag %r5, 0x38(%r2) ;; llilh %r2, 0xffff -;; agrk %r4, %r7, %r2 -;; clgfi %r6, 0xffff -;; locgrh %r4, %r5 -;; llc %r2, 0(%r4) +;; agrk %r2, %r5, %r2 +;; clgfi %r3, 0xffff +;; locgrh %r2, %r4 +;; llc %r2, 0(%r2) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat index 9cab85de3606..45bf164cb7c4 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat @@ -27,9 +27,9 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x38(%r2) -;; llgfr %r2, %r4 -;; strv %r5, 0(%r2, %r7) +;; lg %r6, 0x38(%r2) +;; llgfr %r7, %r4 +;; strv %r5, 0(%r7, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -42,8 +42,8 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x38(%r2) -;; llgfr %r2, %r4 -;; lrv %r2, 0(%r2, %r7) +;; lg %r5, 0x38(%r2) +;; llgfr %r6, %r4 +;; lrv %r2, 0(%r6, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat index 62176a2207bc..7180cee55ae1 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat @@ -27,10 +27,10 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; lghi %r2, 0x1000 -;; strv %r5, 0(%r2, %r7) +;; llgfr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; lghi %r7, 0x1000 +;; strv %r5, 0(%r7, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -43,9 +43,9 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; lghi %r2, 0x1000 -;; lrv %r2, 0(%r2, %r7) +;; llgfr %r5, %r4 +;; ag %r5, 0x38(%r2) +;; lghi %r6, 0x1000 +;; lrv %r2, 0(%r6, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat index fdfef2a089c8..7c7247699e84 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -27,10 +27,10 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; llilh %r2, 0xffff -;; strv %r5, 0(%r2, %r7) +;; llgfr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; llilh %r7, 0xffff +;; strv %r5, 0(%r7, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -43,9 +43,9 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; llilh %r2, 0xffff -;; lrv %r2, 0(%r2, %r7) +;; llgfr %r5, %r4 +;; ag %r5, 0x38(%r2) +;; llilh %r6, 0xffff +;; lrv %r2, 0(%r6, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat index 4330ef6fad3b..4b09dfa79502 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat @@ -27,9 +27,9 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x38(%r2) -;; llgfr %r2, %r4 -;; stc %r5, 0(%r2, %r7) +;; lg %r6, 0x38(%r2) +;; llgfr %r7, %r4 +;; stc %r5, 0(%r7, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -42,8 +42,8 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x38(%r2) -;; llgfr %r2, %r4 -;; llc %r2, 0(%r2, %r7) +;; lg %r5, 0x38(%r2) +;; llgfr %r6, %r4 +;; llc %r2, 0(%r6, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat index 61e4063ff4de..032bc28e3f59 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat @@ -27,10 +27,10 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; lghi %r2, 0x1000 -;; stc %r5, 0(%r2, %r7) +;; llgfr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; lghi %r7, 0x1000 +;; stc %r5, 0(%r7, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -43,9 +43,9 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; lghi %r2, 0x1000 -;; llc %r2, 0(%r2, %r7) +;; llgfr %r5, %r4 +;; ag %r5, 0x38(%r2) +;; lghi %r6, 0x1000 +;; llc %r2, 0(%r6, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat index 75bd25718f98..4c00180be48d 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -27,10 +27,10 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; llilh %r2, 0xffff -;; stc %r5, 0(%r2, %r7) +;; llgfr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; llilh %r7, 0xffff +;; stc %r5, 0(%r7, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -43,9 +43,9 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; llilh %r2, 0xffff -;; llc %r2, 0(%r2, %r7) +;; llgfr %r5, %r4 +;; ag %r5, 0x38(%r2) +;; llilh %r6, 0xffff +;; llc %r2, 0(%r6, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat index a4bf37f169fa..691d7fbb6e54 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat @@ -27,9 +27,9 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x38(%r2) -;; llgfr %r2, %r4 -;; strv %r5, 0(%r2, %r7) +;; lg %r6, 0x38(%r2) +;; llgfr %r7, %r4 +;; strv %r5, 0(%r7, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -42,8 +42,8 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x38(%r2) -;; llgfr %r2, %r4 -;; lrv %r2, 0(%r2, %r7) +;; lg %r5, 0x38(%r2) +;; llgfr %r6, %r4 +;; lrv %r2, 0(%r6, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat index 87a05c1af5fc..59aadb89b8f8 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -27,10 +27,10 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; lghi %r2, 0x1000 -;; strv %r5, 0(%r2, %r7) +;; llgfr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; lghi %r7, 0x1000 +;; strv %r5, 0(%r7, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -43,9 +43,9 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; lghi %r2, 0x1000 -;; lrv %r2, 0(%r2, %r7) +;; llgfr %r5, %r4 +;; ag %r5, 0x38(%r2) +;; lghi %r6, 0x1000 +;; lrv %r2, 0(%r6, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat index 069fa3a1789e..9f9579a818c2 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -27,10 +27,10 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; llilh %r2, 0xffff -;; strv %r5, 0(%r2, %r7) +;; llgfr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; llilh %r7, 0xffff +;; strv %r5, 0(%r7, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -43,9 +43,9 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; llilh %r2, 0xffff -;; lrv %r2, 0(%r2, %r7) +;; llgfr %r5, %r4 +;; ag %r5, 0x38(%r2) +;; llilh %r6, 0xffff +;; lrv %r2, 0(%r6, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat index a1b89395fd88..8082a99b4ef9 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat @@ -27,9 +27,9 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x38(%r2) -;; llgfr %r2, %r4 -;; stc %r5, 0(%r2, %r7) +;; lg %r6, 0x38(%r2) +;; llgfr %r7, %r4 +;; stc %r5, 0(%r7, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -42,8 +42,8 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lg %r7, 0x38(%r2) -;; llgfr %r2, %r4 -;; llc %r2, 0(%r2, %r7) +;; lg %r5, 0x38(%r2) +;; llgfr %r6, %r4 +;; llc %r2, 0(%r6, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat index d96d65a5c2cd..df93df83f3a2 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -27,10 +27,10 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; lghi %r2, 0x1000 -;; stc %r5, 0(%r2, %r7) +;; llgfr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; lghi %r7, 0x1000 +;; stc %r5, 0(%r7, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -43,9 +43,9 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; lghi %r2, 0x1000 -;; llc %r2, 0(%r2, %r7) +;; llgfr %r5, %r4 +;; ag %r5, 0x38(%r2) +;; lghi %r6, 0x1000 +;; llc %r2, 0(%r6, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat index e7e6d0c57a07..b181948e7dff 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -27,10 +27,10 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; llilh %r2, 0xffff -;; stc %r5, 0(%r2, %r7) +;; llgfr %r6, %r4 +;; ag %r6, 0x38(%r2) +;; llilh %r7, 0xffff +;; stc %r5, 0(%r7, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -43,9 +43,9 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; llgfr %r7, %r4 -;; ag %r7, 0x38(%r2) -;; llilh %r2, 0xffff -;; llc %r2, 0(%r2, %r7) +;; llgfr %r5, %r4 +;; ag %r5, 0x38(%r2) +;; llilh %r6, 0xffff +;; llc %r2, 0(%r6, %r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat index 8ff93de5e16d..92692cd91972 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat @@ -29,8 +29,8 @@ ;; stg %r1, 0(%r15) ;; clgfi %r4, 0xfffffffc ;; jgh 0x30 -;; lg %r2, 0x38(%r2) -;; strv %r5, 0(%r4, %r2) +;; lg %r6, 0x38(%r2) +;; strv %r5, 0(%r4, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -45,7 +45,7 @@ ;; stg %r1, 0(%r15) ;; clgfi %r4, 0xfffffffc ;; jgh 0x78 -;; lg %r2, 0x38(%r2) -;; lrv %r2, 0(%r4, %r2) +;; lg %r6, 0x38(%r2) +;; lrv %r2, 0(%r4, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat index a0206196af97..0b7054e7f377 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat @@ -30,8 +30,8 @@ ;; clgfi %r4, 0xffffeffc ;; jgh 0x30 ;; ag %r4, 0x38(%r2) -;; lghi %r3, 0x1000 -;; strv %r5, 0(%r3, %r4) +;; lghi %r7, 0x1000 +;; strv %r5, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,7 +47,7 @@ ;; clgfi %r4, 0xffffeffc ;; jgh 0x7c ;; ag %r4, 0x38(%r2) -;; lghi %r3, 0x1000 -;; lrv %r2, 0(%r3, %r4) +;; lghi %r7, 0x1000 +;; lrv %r2, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat index 042b68893add..30b39d14071c 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -30,8 +30,8 @@ ;; clgfi %r4, 0xfffc ;; jgh 0x30 ;; ag %r4, 0x38(%r2) -;; llilh %r3, 0xffff -;; strv %r5, 0(%r3, %r4) +;; llilh %r7, 0xffff +;; strv %r5, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,7 +47,7 @@ ;; clgfi %r4, 0xfffc ;; jgh 0x7c ;; ag %r4, 0x38(%r2) -;; llilh %r3, 0xffff -;; lrv %r2, 0(%r3, %r4) +;; llilh %r7, 0xffff +;; lrv %r2, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat index eeeea78da3c4..cd7c4730ad4f 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat @@ -29,8 +29,8 @@ ;; stg %r1, 0(%r15) ;; clgfi %r4, 0xffffffff ;; jgh 0x30 -;; lg %r2, 0x38(%r2) -;; stc %r5, 0(%r4, %r2) +;; lg %r6, 0x38(%r2) +;; stc %r5, 0(%r4, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -45,7 +45,7 @@ ;; stg %r1, 0(%r15) ;; clgfi %r4, 0xffffffff ;; jgh 0x78 -;; lg %r2, 0x38(%r2) -;; llc %r2, 0(%r4, %r2) +;; lg %r6, 0x38(%r2) +;; llc %r2, 0(%r4, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat index 0b821c870880..60caa78cf929 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat @@ -30,8 +30,8 @@ ;; clgfi %r4, 0xffffefff ;; jgh 0x30 ;; ag %r4, 0x38(%r2) -;; lghi %r3, 0x1000 -;; stc %r5, 0(%r3, %r4) +;; lghi %r7, 0x1000 +;; stc %r5, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,7 +47,7 @@ ;; clgfi %r4, 0xffffefff ;; jgh 0x7c ;; ag %r4, 0x38(%r2) -;; lghi %r3, 0x1000 -;; llc %r2, 0(%r3, %r4) +;; lghi %r7, 0x1000 +;; llc %r2, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat index 6af33ad0bbe9..c90862aec147 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -30,8 +30,8 @@ ;; clgfi %r4, 0xffff ;; jgh 0x30 ;; ag %r4, 0x38(%r2) -;; llilh %r3, 0xffff -;; stc %r5, 0(%r3, %r4) +;; llilh %r7, 0xffff +;; stc %r5, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,7 +47,7 @@ ;; clgfi %r4, 0xffff ;; jgh 0x7c ;; ag %r4, 0x38(%r2) -;; llilh %r3, 0xffff -;; llc %r2, 0(%r3, %r4) +;; llilh %r7, 0xffff +;; llc %r2, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat index ce7fdabce629..d82262f0d380 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat @@ -27,11 +27,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 +;; lghi %r6, 0 ;; lgr %r7, %r4 ;; ag %r7, 0x38(%r2) ;; clgfi %r4, 0xfffffffc -;; locgrh %r7, %r3 +;; locgrh %r7, %r6 ;; strv %r5, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 @@ -45,11 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) +;; lghi %r6, 0 +;; lgr %r5, %r4 +;; ag %r5, 0x38(%r2) ;; clgfi %r4, 0xfffffffc -;; locgrh %r7, %r3 -;; lrv %r2, 0(%r7) +;; locgrh %r5, %r6 +;; lrv %r2, 0(%r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat index 4678d93e0563..5c77a996e0ba 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -27,13 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 -;; lgr %r6, %r4 -;; ag %r6, 0x38(%r2) -;; aghik %r2, %r6, 0x1000 +;; lghi %r7, 0 +;; lgr %r3, %r4 +;; ag %r3, 0x38(%r2) +;; aghik %r6, %r3, 0x1000 ;; clgfi %r4, 0xffffeffc -;; locgrh %r2, %r3 -;; strv %r5, 0(%r2) +;; locgrh %r6, %r7 +;; strv %r5, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -46,12 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 -;; lgr %r5, %r4 -;; ag %r5, 0x38(%r2) -;; aghik %r2, %r5, 0x1000 +;; lghi %r7, 0 +;; lgr %r3, %r4 +;; ag %r3, 0x38(%r2) +;; aghik %r6, %r3, 0x1000 ;; clgfi %r4, 0xffffeffc -;; locgrh %r2, %r3 -;; lrv %r2, 0(%r2) +;; locgrh %r6, %r7 +;; lrv %r2, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat index b7fcb6578977..5008f9aa36ca 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -27,14 +27,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r6, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) +;; lghi %r3, 0 +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) ;; llilh %r2, 0xffff -;; agrk %r3, %r7, %r2 +;; agrk %r7, %r6, %r2 ;; clgfi %r4, 0xfffc -;; locgrh %r3, %r6 -;; strv %r5, 0(%r3) +;; locgrh %r7, %r3 +;; strv %r5, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,13 +47,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r5, 0 -;; lgr %r6, %r4 -;; ag %r6, 0x38(%r2) -;; llilh %r7, 0xffff -;; agrk %r3, %r6, %r7 +;; lghi %r3, 0 +;; lgr %r5, %r4 +;; ag %r5, 0x38(%r2) +;; llilh %r2, 0xffff +;; agrk %r7, %r5, %r2 ;; clgfi %r4, 0xfffc -;; locgrh %r3, %r5 -;; lrv %r2, 0(%r3) +;; locgrh %r7, %r3 +;; lrv %r2, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat index 6baf1d9acdbd..0864c43320f4 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat @@ -27,11 +27,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 +;; lghi %r6, 0 ;; lgr %r7, %r4 ;; ag %r7, 0x38(%r2) ;; clgfi %r4, 0xffffffff -;; locgrh %r7, %r3 +;; locgrh %r7, %r6 ;; stc %r5, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 @@ -45,11 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) +;; lghi %r6, 0 +;; lgr %r5, %r4 +;; ag %r5, 0x38(%r2) ;; clgfi %r4, 0xffffffff -;; locgrh %r7, %r3 -;; llc %r2, 0(%r7) +;; locgrh %r5, %r6 +;; llc %r2, 0(%r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat index f4ad4015301b..1f8f2cfcae42 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -27,13 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 -;; lgr %r6, %r4 -;; ag %r6, 0x38(%r2) -;; aghik %r2, %r6, 0x1000 +;; lghi %r7, 0 +;; lgr %r3, %r4 +;; ag %r3, 0x38(%r2) +;; aghik %r6, %r3, 0x1000 ;; clgfi %r4, 0xffffefff -;; locgrh %r2, %r3 -;; stc %r5, 0(%r2) +;; locgrh %r6, %r7 +;; stc %r5, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -46,12 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 -;; lgr %r5, %r4 -;; ag %r5, 0x38(%r2) -;; aghik %r2, %r5, 0x1000 +;; lghi %r7, 0 +;; lgr %r3, %r4 +;; ag %r3, 0x38(%r2) +;; aghik %r6, %r3, 0x1000 ;; clgfi %r4, 0xffffefff -;; locgrh %r2, %r3 -;; llc %r2, 0(%r2) +;; locgrh %r6, %r7 +;; llc %r2, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 8377da665fba..dff360de8e10 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -27,14 +27,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r6, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) +;; lghi %r3, 0 +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) ;; llilh %r2, 0xffff -;; agrk %r3, %r7, %r2 +;; agrk %r7, %r6, %r2 ;; clgfi %r4, 0xffff -;; locgrh %r3, %r6 -;; stc %r5, 0(%r3) +;; locgrh %r7, %r3 +;; stc %r5, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,13 +47,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r5, 0 -;; lgr %r6, %r4 -;; ag %r6, 0x38(%r2) -;; llilh %r7, 0xffff -;; agrk %r3, %r6, %r7 +;; lghi %r3, 0 +;; lgr %r5, %r4 +;; ag %r5, 0x38(%r2) +;; llilh %r2, 0xffff +;; agrk %r7, %r5, %r2 ;; clgfi %r4, 0xffff -;; locgrh %r3, %r5 -;; llc %r2, 0(%r3) +;; locgrh %r7, %r3 +;; llc %r2, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat index a5e16ce752b3..f100752eecef 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat @@ -29,8 +29,8 @@ ;; stg %r1, 0(%r15) ;; clgfi %r4, 0xfffffffc ;; jgh 0x30 -;; lg %r2, 0x38(%r2) -;; strv %r5, 0(%r4, %r2) +;; lg %r6, 0x38(%r2) +;; strv %r5, 0(%r4, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -45,7 +45,7 @@ ;; stg %r1, 0(%r15) ;; clgfi %r4, 0xfffffffc ;; jgh 0x78 -;; lg %r2, 0x38(%r2) -;; lrv %r2, 0(%r4, %r2) +;; lg %r6, 0x38(%r2) +;; lrv %r2, 0(%r4, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat index 038a6a6388e9..cdc947c58edc 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat @@ -30,8 +30,8 @@ ;; clgfi %r4, 0xffffeffc ;; jgh 0x30 ;; ag %r4, 0x38(%r2) -;; lghi %r3, 0x1000 -;; strv %r5, 0(%r3, %r4) +;; lghi %r7, 0x1000 +;; strv %r5, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,7 +47,7 @@ ;; clgfi %r4, 0xffffeffc ;; jgh 0x7c ;; ag %r4, 0x38(%r2) -;; lghi %r3, 0x1000 -;; lrv %r2, 0(%r3, %r4) +;; lghi %r7, 0x1000 +;; lrv %r2, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat index 51a4e645475b..6b3d27757576 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -30,8 +30,8 @@ ;; clgfi %r4, 0xfffc ;; jgh 0x30 ;; ag %r4, 0x38(%r2) -;; llilh %r3, 0xffff -;; strv %r5, 0(%r3, %r4) +;; llilh %r7, 0xffff +;; strv %r5, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,7 +47,7 @@ ;; clgfi %r4, 0xfffc ;; jgh 0x7c ;; ag %r4, 0x38(%r2) -;; llilh %r3, 0xffff -;; lrv %r2, 0(%r3, %r4) +;; llilh %r7, 0xffff +;; lrv %r2, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat index 6c53af79a132..690e5a4bd5d5 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat @@ -29,8 +29,8 @@ ;; stg %r1, 0(%r15) ;; clgfi %r4, 0xffffffff ;; jgh 0x30 -;; lg %r2, 0x38(%r2) -;; stc %r5, 0(%r4, %r2) +;; lg %r6, 0x38(%r2) +;; stc %r5, 0(%r4, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -45,7 +45,7 @@ ;; stg %r1, 0(%r15) ;; clgfi %r4, 0xffffffff ;; jgh 0x78 -;; lg %r2, 0x38(%r2) -;; llc %r2, 0(%r4, %r2) +;; lg %r6, 0x38(%r2) +;; llc %r2, 0(%r4, %r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat index ee9c6adaa0e8..f6ab577c9e60 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat @@ -30,8 +30,8 @@ ;; clgfi %r4, 0xffffefff ;; jgh 0x30 ;; ag %r4, 0x38(%r2) -;; lghi %r3, 0x1000 -;; stc %r5, 0(%r3, %r4) +;; lghi %r7, 0x1000 +;; stc %r5, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,7 +47,7 @@ ;; clgfi %r4, 0xffffefff ;; jgh 0x7c ;; ag %r4, 0x38(%r2) -;; lghi %r3, 0x1000 -;; llc %r2, 0(%r3, %r4) +;; lghi %r7, 0x1000 +;; llc %r2, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat index 8fec3d258ff3..db2fe252a60c 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -30,8 +30,8 @@ ;; clgfi %r4, 0xffff ;; jgh 0x30 ;; ag %r4, 0x38(%r2) -;; llilh %r3, 0xffff -;; stc %r5, 0(%r3, %r4) +;; llilh %r7, 0xffff +;; stc %r5, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,7 +47,7 @@ ;; clgfi %r4, 0xffff ;; jgh 0x7c ;; ag %r4, 0x38(%r2) -;; llilh %r3, 0xffff -;; llc %r2, 0(%r3, %r4) +;; llilh %r7, 0xffff +;; llc %r2, 0(%r7, %r4) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat index 65c06c499a9b..a325faf41da9 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat @@ -27,11 +27,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 +;; lghi %r6, 0 ;; lgr %r7, %r4 ;; ag %r7, 0x38(%r2) ;; clgfi %r4, 0xfffffffc -;; locgrh %r7, %r3 +;; locgrh %r7, %r6 ;; strv %r5, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 @@ -45,11 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) +;; lghi %r6, 0 +;; lgr %r5, %r4 +;; ag %r5, 0x38(%r2) ;; clgfi %r4, 0xfffffffc -;; locgrh %r7, %r3 -;; lrv %r2, 0(%r7) +;; locgrh %r5, %r6 +;; lrv %r2, 0(%r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat index e6c1878b7a50..028db52bacd5 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -27,13 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 -;; lgr %r6, %r4 -;; ag %r6, 0x38(%r2) -;; aghik %r2, %r6, 0x1000 +;; lghi %r7, 0 +;; lgr %r3, %r4 +;; ag %r3, 0x38(%r2) +;; aghik %r6, %r3, 0x1000 ;; clgfi %r4, 0xffffeffc -;; locgrh %r2, %r3 -;; strv %r5, 0(%r2) +;; locgrh %r6, %r7 +;; strv %r5, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -46,12 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 -;; lgr %r5, %r4 -;; ag %r5, 0x38(%r2) -;; aghik %r2, %r5, 0x1000 +;; lghi %r7, 0 +;; lgr %r3, %r4 +;; ag %r3, 0x38(%r2) +;; aghik %r6, %r3, 0x1000 ;; clgfi %r4, 0xffffeffc -;; locgrh %r2, %r3 -;; lrv %r2, 0(%r2) +;; locgrh %r6, %r7 +;; lrv %r2, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat index f991ba7d2479..424e1bde5ad5 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -27,14 +27,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r6, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) +;; lghi %r3, 0 +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) ;; llilh %r2, 0xffff -;; agrk %r3, %r7, %r2 +;; agrk %r7, %r6, %r2 ;; clgfi %r4, 0xfffc -;; locgrh %r3, %r6 -;; strv %r5, 0(%r3) +;; locgrh %r7, %r3 +;; strv %r5, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,13 +47,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r5, 0 -;; lgr %r6, %r4 -;; ag %r6, 0x38(%r2) -;; llilh %r7, 0xffff -;; agrk %r3, %r6, %r7 +;; lghi %r3, 0 +;; lgr %r5, %r4 +;; ag %r5, 0x38(%r2) +;; llilh %r2, 0xffff +;; agrk %r7, %r5, %r2 ;; clgfi %r4, 0xfffc -;; locgrh %r3, %r5 -;; lrv %r2, 0(%r3) +;; locgrh %r7, %r3 +;; lrv %r2, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat index 41adac7f6416..22ad5a12aead 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat @@ -27,11 +27,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 +;; lghi %r6, 0 ;; lgr %r7, %r4 ;; ag %r7, 0x38(%r2) ;; clgfi %r4, 0xffffffff -;; locgrh %r7, %r3 +;; locgrh %r7, %r6 ;; stc %r5, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 @@ -45,11 +45,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) +;; lghi %r6, 0 +;; lgr %r5, %r4 +;; ag %r5, 0x38(%r2) ;; clgfi %r4, 0xffffffff -;; locgrh %r7, %r3 -;; llc %r2, 0(%r7) +;; locgrh %r5, %r6 +;; llc %r2, 0(%r5) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat index 661674159dc7..c3ab3e384b12 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -27,13 +27,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 -;; lgr %r6, %r4 -;; ag %r6, 0x38(%r2) -;; aghik %r2, %r6, 0x1000 +;; lghi %r7, 0 +;; lgr %r3, %r4 +;; ag %r3, 0x38(%r2) +;; aghik %r6, %r3, 0x1000 ;; clgfi %r4, 0xffffefff -;; locgrh %r2, %r3 -;; stc %r5, 0(%r2) +;; locgrh %r6, %r7 +;; stc %r5, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -46,12 +46,12 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r3, 0 -;; lgr %r5, %r4 -;; ag %r5, 0x38(%r2) -;; aghik %r2, %r5, 0x1000 +;; lghi %r7, 0 +;; lgr %r3, %r4 +;; ag %r3, 0x38(%r2) +;; aghik %r6, %r3, 0x1000 ;; clgfi %r4, 0xffffefff -;; locgrh %r2, %r3 -;; llc %r2, 0(%r2) +;; locgrh %r6, %r7 +;; llc %r2, 0(%r6) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat index c0a1812909ff..04dccafa52c3 100644 --- a/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/s390x/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -27,14 +27,14 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r6, 0 -;; lgr %r7, %r4 -;; ag %r7, 0x38(%r2) +;; lghi %r3, 0 +;; lgr %r6, %r4 +;; ag %r6, 0x38(%r2) ;; llilh %r2, 0xffff -;; agrk %r3, %r7, %r2 +;; agrk %r7, %r6, %r2 ;; clgfi %r4, 0xffff -;; locgrh %r3, %r6 -;; stc %r5, 0(%r3) +;; locgrh %r7, %r3 +;; stc %r5, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -47,13 +47,13 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; lghi %r5, 0 -;; lgr %r6, %r4 -;; ag %r6, 0x38(%r2) -;; llilh %r7, 0xffff -;; agrk %r3, %r6, %r7 +;; lghi %r3, 0 +;; lgr %r5, %r4 +;; ag %r5, 0x38(%r2) +;; llilh %r2, 0xffff +;; agrk %r7, %r5, %r2 ;; clgfi %r4, 0xffff -;; locgrh %r3, %r5 -;; llc %r2, 0(%r3) +;; locgrh %r7, %r3 +;; llc %r2, 0(%r7) ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat index 08e8358697b1..27c21f269e99 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat @@ -21,13 +21,13 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r8 -;; movl %edx, %r10d -;; subq $4, %r8 -;; cmpq %r8, %r10 +;; movq 0x40(%rdi), %rsi +;; movl %edx, %r8d +;; subq $4, %rsi +;; cmpq %rsi, %r8 ;; ja 0x25 -;; 18: movq 0x38(%rdi), %rsi -;; movl %ecx, (%rsi, %r10) +;; 18: movq 0x38(%rdi), %r9 +;; movl %ecx, (%r9, %r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,13 +36,13 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r8 -;; movl %edx, %r10d -;; subq $4, %r8 -;; cmpq %r8, %r10 +;; movq 0x40(%rdi), %rsi +;; movl %edx, %r8d +;; subq $4, %rsi +;; cmpq %rsi, %r8 ;; ja 0x65 -;; 58: movq 0x38(%rdi), %rsi -;; movl (%rsi, %r10), %eax +;; 58: movq 0x38(%rdi), %r9 +;; movl (%r9, %r8), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat index 4464a3f9b4af..80acc3fdb739 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat @@ -21,13 +21,13 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r8 -;; movl %edx, %r10d -;; subq $0x1004, %r8 -;; cmpq %r8, %r10 +;; movq 0x40(%rdi), %rsi +;; movl %edx, %r8d +;; subq $0x1004, %rsi +;; cmpq %rsi, %r8 ;; ja 0x2c -;; 1b: movq 0x38(%rdi), %rsi -;; movl %ecx, 0x1000(%rsi, %r10) +;; 1b: movq 0x38(%rdi), %r9 +;; movl %ecx, 0x1000(%r9, %r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,13 +36,13 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r8 -;; movl %edx, %r10d -;; subq $0x1004, %r8 -;; cmpq %r8, %r10 +;; movq 0x40(%rdi), %rsi +;; movl %edx, %r8d +;; subq $0x1004, %rsi +;; cmpq %rsi, %r8 ;; ja 0x6c -;; 5b: movq 0x38(%rdi), %rsi -;; movl 0x1000(%rsi, %r10), %eax +;; 5b: movq 0x38(%rdi), %r9 +;; movl 0x1000(%r9, %r8), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat index fddafcdb2bad..26b596c43394 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -21,15 +21,15 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; movq %r8, %r10 -;; addq 0x27(%rip), %r10 +;; movl %edx, %esi +;; movq %rsi, %r8 +;; addq 0x28(%rip), %r8 ;; jb 0x33 -;; 17: cmpq 0x40(%rdi), %r10 +;; 16: cmpq 0x40(%rdi), %r8 ;; ja 0x35 -;; 21: addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %edi -;; movl %ecx, (%r8, %rdi) +;; 20: addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %r10d +;; movl %ecx, (%rsi, %r10) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -40,15 +40,15 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; movq %r8, %r10 -;; addq 0x27(%rip), %r10 +;; movl %edx, %esi +;; movq %rsi, %r8 +;; addq 0x28(%rip), %r8 ;; jb 0x73 -;; 57: cmpq 0x40(%rdi), %r10 +;; 56: cmpq 0x40(%rdi), %r8 ;; ja 0x75 -;; 61: addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %edi -;; movl (%r8, %rdi), %eax +;; 60: addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %r10d +;; movl (%rsi, %r10), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat index df1371faf146..d6451d6a98a1 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat @@ -21,25 +21,25 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x40(%rdi), %r8 -;; jae 0x1e -;; 11: movq 0x38(%rdi), %r10 -;; movb %cl, (%r10, %r8) +;; movl %edx, %esi +;; cmpq 0x40(%rdi), %rsi +;; jae 0x1c +;; 10: movq 0x38(%rdi), %rdi +;; movb %cl, (%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 1e: ud2 +;; 1c: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x40(%rdi), %r8 -;; jae 0x3f -;; 31: movq 0x38(%rdi), %r10 -;; movzbq (%r10, %r8), %rax +;; movl %edx, %esi +;; cmpq 0x40(%rdi), %rsi +;; jae 0x3e +;; 30: movq 0x38(%rdi), %rdi +;; movzbq (%rdi, %rsi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 3f: ud2 +;; 3e: ud2 diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat index a07dc3d9832d..6fde4b25259f 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat @@ -21,13 +21,13 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r8 -;; movl %edx, %r10d -;; subq $0x1001, %r8 -;; cmpq %r8, %r10 +;; movq 0x40(%rdi), %rsi +;; movl %edx, %r8d +;; subq $0x1001, %rsi +;; cmpq %rsi, %r8 ;; ja 0x2c -;; 1b: movq 0x38(%rdi), %rsi -;; movb %cl, 0x1000(%rsi, %r10) +;; 1b: movq 0x38(%rdi), %r9 +;; movb %cl, 0x1000(%r9, %r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,13 +36,13 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r8 -;; movl %edx, %r10d -;; subq $0x1001, %r8 -;; cmpq %r8, %r10 +;; movq 0x40(%rdi), %rsi +;; movl %edx, %r8d +;; subq $0x1001, %rsi +;; cmpq %rsi, %r8 ;; ja 0x6d -;; 5b: movq 0x38(%rdi), %rsi -;; movzbq 0x1000(%rsi, %r10), %rax +;; 5b: movq 0x38(%rdi), %r9 +;; movzbq 0x1000(%r9, %r8), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat index cc0f18ddd16b..c46bdd18f671 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -21,15 +21,15 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; movq %r8, %r10 -;; addq 0x27(%rip), %r10 +;; movl %edx, %esi +;; movq %rsi, %r8 +;; addq 0x28(%rip), %r8 ;; jb 0x33 -;; 17: cmpq 0x40(%rdi), %r10 +;; 16: cmpq 0x40(%rdi), %r8 ;; ja 0x35 -;; 21: addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %edi -;; movb %cl, (%r8, %rdi) +;; 20: addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %r10d +;; movb %cl, (%rsi, %r10) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -43,15 +43,15 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; movq %r8, %r10 -;; addq 0x27(%rip), %r10 +;; movl %edx, %esi +;; movq %rsi, %r8 +;; addq 0x28(%rip), %r8 ;; jb 0x74 -;; 57: cmpq 0x40(%rdi), %r10 +;; 56: cmpq 0x40(%rdi), %r8 ;; ja 0x76 -;; 61: addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %edi -;; movzbq (%r8, %rdi), %rax +;; 60: addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %r10d +;; movzbq (%rsi, %r10), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat index 96fd84fa4e14..6698a502ad8a 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat @@ -21,15 +21,15 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r11 -;; movl %edx, %eax -;; subq $4, %r11 -;; xorq %rdx, %rdx -;; movq %rax, %rsi -;; addq 0x38(%rdi), %rsi -;; cmpq %r11, %rax -;; cmovaq %rdx, %rsi -;; movl %ecx, (%rsi) +;; movq 0x40(%rdi), %r8 +;; movl %edx, %r10d +;; subq $4, %r8 +;; xorq %r11, %r11 +;; movq %r10, %r9 +;; addq 0x38(%rdi), %r9 +;; cmpq %r8, %r10 +;; cmovaq %r11, %r9 +;; movl %ecx, (%r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -37,15 +37,15 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r11 -;; movl %edx, %eax -;; subq $4, %r11 -;; xorq %rcx, %rcx -;; movq %rax, %rsi -;; addq 0x38(%rdi), %rsi -;; cmpq %r11, %rax -;; cmovaq %rcx, %rsi -;; movl (%rsi), %eax +;; movq 0x40(%rdi), %r8 +;; movl %edx, %r10d +;; subq $4, %r8 +;; xorq %r11, %r11 +;; movq %r10, %r9 +;; addq 0x38(%rdi), %r9 +;; cmpq %r8, %r10 +;; cmovaq %r11, %r9 +;; movl (%r9), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat index 21ce00a4792c..17c2761d60cc 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -21,15 +21,15 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r11 -;; movq 0x38(%rdi), %rax -;; movl %edx, %edx -;; subq $0x1004, %r11 -;; xorq %r8, %r8 -;; leaq 0x1000(%rax, %rdx), %rdi -;; cmpq %r11, %rdx -;; cmovaq %r8, %rdi -;; movl %ecx, (%rdi) +;; movq 0x40(%rdi), %r8 +;; movq 0x38(%rdi), %r11 +;; movl %edx, %eax +;; subq $0x1004, %r8 +;; xorq %rdx, %rdx +;; leaq 0x1000(%r11, %rax), %r10 +;; cmpq %r8, %rax +;; cmovaq %rdx, %r10 +;; movl %ecx, (%r10) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -37,15 +37,15 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r11 -;; movq 0x38(%rdi), %rax -;; movl %edx, %ecx -;; subq $0x1004, %r11 -;; xorq %rdx, %rdx -;; leaq 0x1000(%rax, %rcx), %rdi -;; cmpq %r11, %rcx -;; cmovaq %rdx, %rdi -;; movl (%rdi), %eax +;; movq 0x40(%rdi), %r8 +;; movq 0x38(%rdi), %r11 +;; movl %edx, %eax +;; subq $0x1004, %r8 +;; xorq %rcx, %rcx +;; leaq 0x1000(%r11, %rax), %r10 +;; cmpq %r8, %rax +;; cmovaq %rcx, %r10 +;; movl (%r10), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat index 3bf27bda4bcb..b727c1f0bf72 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -21,45 +21,45 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r10d -;; movq %r10, %rdx -;; addq 0x2f(%rip), %rdx -;; jb 0x3a -;; 17: movq 0x40(%rdi), %r9 -;; xorq %r8, %r8 -;; addq 0x38(%rdi), %r10 -;; movl $0xffff0000, %r11d -;; addq %r11, %r10 -;; cmpq %r9, %rdx -;; cmovaq %r8, %r10 -;; movl %ecx, (%r10) +;; movq %rdi, %rsi +;; movl %edx, %edi +;; movq %rdi, %rax +;; addq 0x2d(%rip), %rax +;; jb 0x3e +;; 19: movq %rsi, %r8 +;; movq 0x40(%r8), %rsi +;; xorq %rdx, %rdx +;; addq 0x38(%r8), %rdi +;; movl $0xffff0000, %r8d +;; addq %r8, %rdi +;; cmpq %rsi, %rax +;; cmovaq %rdx, %rdi +;; movl %ecx, (%rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 3a: ud2 -;; 3c: addb %al, (%rax) -;; 3e: addb %al, (%rax) +;; 3e: ud2 ;; 40: addb $0, %al ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r10d -;; movq %r10, %rcx -;; addq 0x2f(%rip), %rcx -;; jb 0x9a -;; 77: movq 0x40(%rdi), %r8 -;; xorq %rdx, %rdx -;; addq 0x38(%rdi), %r10 -;; movl $0xffff0000, %r9d -;; addq %r10, %r9 -;; cmpq %r8, %rcx -;; cmovaq %rdx, %r9 -;; movl (%r9), %eax +;; movq %rdi, %rcx +;; movl %edx, %edi +;; movq %rdi, %rax +;; addq 0x2d(%rip), %rax +;; jb 0x9d +;; 79: movq %rcx, %rsi +;; movq 0x40(%rsi), %rdx +;; xorq %rcx, %rcx +;; addq 0x38(%rsi), %rdi +;; movl $0xffff0000, %esi +;; addq %rdi, %rsi +;; cmpq %rdx, %rax +;; cmovaq %rcx, %rsi +;; movl (%rsi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 9a: ud2 -;; 9c: addb %al, (%rax) -;; 9e: addb %al, (%rax) -;; a0: addb $0, %al +;; 9d: ud2 +;; 9f: addb %al, (%rax, %rax) diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat index e02853002d25..7e1864503007 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat @@ -21,14 +21,14 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; movl %edx, %edx -;; xorq %rax, %rax -;; movq %rdx, %r11 -;; addq 0x38(%rdi), %r11 -;; cmpq %rsi, %rdx -;; cmovaeq %rax, %r11 -;; movb %cl, (%r11) +;; movq 0x40(%rdi), %r9 +;; movl %edx, %r11d +;; xorq %r10, %r10 +;; movq %r11, %r8 +;; addq 0x38(%rdi), %r8 +;; cmpq %r9, %r11 +;; cmovaeq %r10, %r8 +;; movb %cl, (%r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,14 +36,14 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; movl %edx, %ecx -;; xorq %rax, %rax -;; movq %rcx, %r11 -;; addq 0x38(%rdi), %r11 -;; cmpq %rsi, %rcx -;; cmovaeq %rax, %r11 -;; movzbq (%r11), %rax +;; movq 0x40(%rdi), %r9 +;; movl %edx, %r11d +;; xorq %r10, %r10 +;; movq %r11, %r8 +;; addq 0x38(%rdi), %r8 +;; cmpq %r9, %r11 +;; cmovaeq %r10, %r8 +;; movzbq (%r8), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat index c8b2c75a8c8f..6c3e21af44ec 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -21,15 +21,15 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r11 -;; movq 0x38(%rdi), %rax -;; movl %edx, %edx -;; subq $0x1001, %r11 -;; xorq %r8, %r8 -;; leaq 0x1000(%rax, %rdx), %rdi -;; cmpq %r11, %rdx -;; cmovaq %r8, %rdi -;; movb %cl, (%rdi) +;; movq 0x40(%rdi), %r8 +;; movq 0x38(%rdi), %r11 +;; movl %edx, %eax +;; subq $0x1001, %r8 +;; xorq %rdx, %rdx +;; leaq 0x1000(%r11, %rax), %r10 +;; cmpq %r8, %rax +;; cmovaq %rdx, %r10 +;; movb %cl, (%r10) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -37,15 +37,15 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r11 -;; movq 0x38(%rdi), %rax -;; movl %edx, %ecx -;; subq $0x1001, %r11 -;; xorq %rdx, %rdx -;; leaq 0x1000(%rax, %rcx), %rdi -;; cmpq %r11, %rcx -;; cmovaq %rdx, %rdi -;; movzbq (%rdi), %rax +;; movq 0x40(%rdi), %r8 +;; movq 0x38(%rdi), %r11 +;; movl %edx, %eax +;; subq $0x1001, %r8 +;; xorq %rcx, %rcx +;; leaq 0x1000(%r11, %rax), %r10 +;; cmpq %r8, %rax +;; cmovaq %rcx, %r10 +;; movzbq (%r10), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 7221604c5bb2..eaa4144b7115 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -21,47 +21,51 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r10d -;; movq %r10, %rdx -;; addq 0x2f(%rip), %rdx -;; jb 0x3a -;; 17: movq 0x40(%rdi), %r9 -;; xorq %r8, %r8 -;; addq 0x38(%rdi), %r10 -;; movl $0xffff0000, %r11d -;; addq %r11, %r10 -;; cmpq %r9, %rdx -;; cmovaq %r8, %r10 -;; movb %cl, (%r10) +;; movq %rdi, %rsi +;; movl %edx, %edi +;; movq %rdi, %rax +;; addq 0x2d(%rip), %rax +;; jb 0x3e +;; 19: movq %rsi, %r8 +;; movq 0x40(%r8), %rsi +;; xorq %rdx, %rdx +;; addq 0x38(%r8), %rdi +;; movl $0xffff0000, %r8d +;; addq %r8, %rdi +;; cmpq %rsi, %rax +;; cmovaq %rdx, %rdi +;; movb %cl, (%rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 3a: ud2 -;; 3c: addb %al, (%rax) -;; 3e: addb %al, (%rax) +;; 3e: ud2 ;; 40: addl %eax, (%rax) ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r10d -;; movq %r10, %rcx -;; addq 0x2f(%rip), %rcx -;; jb 0x9b -;; 77: movq 0x40(%rdi), %r8 -;; xorq %rdx, %rdx -;; addq 0x38(%rdi), %r10 -;; movl $0xffff0000, %r9d -;; addq %r10, %r9 -;; cmpq %r8, %rcx -;; cmovaq %rdx, %r9 -;; movzbq (%r9), %rax +;; movq %rdi, %rcx +;; movl %edx, %edi +;; movq %rdi, %rax +;; addq 0x35(%rip), %rax +;; jb 0x9f +;; 79: movq %rcx, %rsi +;; movq 0x40(%rsi), %rdx +;; xorq %rcx, %rcx +;; addq 0x38(%rsi), %rdi +;; movl $0xffff0000, %esi +;; addq %rdi, %rsi +;; cmpq %rdx, %rax +;; cmovaq %rcx, %rsi +;; movzbq (%rsi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 9b: ud2 -;; 9d: addb %al, (%rax) -;; 9f: addb %al, (%rcx) -;; a1: addb %bh, %bh -;; a3: incl (%rax) +;; 9f: ud2 +;; a1: addb %al, (%rax) +;; a3: addb %al, (%rax) ;; a5: addb %al, (%rax) +;; a7: addb %al, (%rcx) +;; a9: addb %bh, %bh +;; ab: incl (%rax) +;; ad: addb %al, (%rax) diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat index 09265b5e4478..8e58b1686fc4 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat @@ -21,25 +21,25 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x40(%rdi), %r8 -;; ja 0x1e -;; 11: movq 0x38(%rdi), %r10 -;; movl %ecx, (%r10, %r8) +;; movl %edx, %esi +;; cmpq 0x40(%rdi), %rsi +;; ja 0x1c +;; 10: movq 0x38(%rdi), %rdi +;; movl %ecx, (%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 1e: ud2 +;; 1c: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x40(%rdi), %r8 -;; ja 0x3e -;; 31: movq 0x38(%rdi), %r10 -;; movl (%r10, %r8), %eax +;; movl %edx, %esi +;; cmpq 0x40(%rdi), %rsi +;; ja 0x3c +;; 30: movq 0x38(%rdi), %rdi +;; movl (%rdi, %rsi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 3e: ud2 +;; 3c: ud2 diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat index 0095eb030536..7ac4b1842a45 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat @@ -21,25 +21,25 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x40(%rdi), %r8 -;; ja 0x22 -;; 11: movq 0x38(%rdi), %r10 -;; movl %ecx, 0x1000(%r10, %r8) +;; movl %edx, %esi +;; cmpq 0x40(%rdi), %rsi +;; ja 0x20 +;; 10: movq 0x38(%rdi), %rdi +;; movl %ecx, 0x1000(%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 22: ud2 +;; 20: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x40(%rdi), %r8 -;; ja 0x62 -;; 51: movq 0x38(%rdi), %r10 -;; movl 0x1000(%r10, %r8), %eax +;; movl %edx, %esi +;; cmpq 0x40(%rdi), %rsi +;; ja 0x60 +;; 50: movq 0x38(%rdi), %rdi +;; movl 0x1000(%rdi, %rsi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 62: ud2 +;; 60: ud2 diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat index d37de7a60e65..663ca03df4b6 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -21,27 +21,27 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x40(%rdi), %r8 -;; ja 0x24 -;; 11: addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r11d -;; movl %ecx, (%r8, %r11) +;; movl %edx, %esi +;; cmpq 0x40(%rdi), %rsi +;; ja 0x23 +;; 10: addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %r8d +;; movl %ecx, (%rsi, %r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 24: ud2 +;; 23: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x40(%rdi), %r8 -;; ja 0x64 -;; 51: addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r11d -;; movl (%r8, %r11), %eax +;; movl %edx, %esi +;; cmpq 0x40(%rdi), %rsi +;; ja 0x63 +;; 50: addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %r8d +;; movl (%rsi, %r8), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 64: ud2 +;; 63: ud2 diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat index 373cdecb5011..4dd57842c3af 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat @@ -21,25 +21,25 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x40(%rdi), %r8 -;; jae 0x1e -;; 11: movq 0x38(%rdi), %r10 -;; movb %cl, (%r10, %r8) +;; movl %edx, %esi +;; cmpq 0x40(%rdi), %rsi +;; jae 0x1c +;; 10: movq 0x38(%rdi), %rdi +;; movb %cl, (%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 1e: ud2 +;; 1c: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x40(%rdi), %r8 -;; jae 0x3f -;; 31: movq 0x38(%rdi), %r10 -;; movzbq (%r10, %r8), %rax +;; movl %edx, %esi +;; cmpq 0x40(%rdi), %rsi +;; jae 0x3e +;; 30: movq 0x38(%rdi), %rdi +;; movzbq (%rdi, %rsi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 3f: ud2 +;; 3e: ud2 diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat index c24ec472873e..10754921fa46 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat @@ -21,25 +21,25 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x40(%rdi), %r8 -;; ja 0x22 -;; 11: movq 0x38(%rdi), %r10 -;; movb %cl, 0x1000(%r10, %r8) +;; movl %edx, %esi +;; cmpq 0x40(%rdi), %rsi +;; ja 0x20 +;; 10: movq 0x38(%rdi), %rdi +;; movb %cl, 0x1000(%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 22: ud2 +;; 20: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x40(%rdi), %r8 -;; ja 0x63 -;; 51: movq 0x38(%rdi), %r10 -;; movzbq 0x1000(%r10, %r8), %rax +;; movl %edx, %esi +;; cmpq 0x40(%rdi), %rsi +;; ja 0x62 +;; 50: movq 0x38(%rdi), %rdi +;; movzbq 0x1000(%rdi, %rsi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 63: ud2 +;; 62: ud2 diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat index a4d8e9579df9..40d17e4aa409 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -21,27 +21,27 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x40(%rdi), %r8 -;; ja 0x24 -;; 11: addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r11d -;; movb %cl, (%r8, %r11) +;; movl %edx, %esi +;; cmpq 0x40(%rdi), %rsi +;; ja 0x23 +;; 10: addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %r8d +;; movb %cl, (%rsi, %r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 24: ud2 +;; 23: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x40(%rdi), %r8 -;; ja 0x65 -;; 51: addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r11d -;; movzbq (%r8, %r11), %rax +;; movl %edx, %esi +;; cmpq 0x40(%rdi), %rsi +;; ja 0x64 +;; 50: addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %r8d +;; movzbq (%rsi, %r8), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 65: ud2 +;; 64: ud2 diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat index ea4dd5917103..2dd21c33b079 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat @@ -21,14 +21,14 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; movl %edx, %edx -;; xorq %rax, %rax -;; movq %rdx, %r11 -;; addq 0x38(%rdi), %r11 -;; cmpq %rsi, %rdx -;; cmovaq %rax, %r11 -;; movl %ecx, (%r11) +;; movq 0x40(%rdi), %r9 +;; movl %edx, %r11d +;; xorq %r10, %r10 +;; movq %r11, %r8 +;; addq 0x38(%rdi), %r8 +;; cmpq %r9, %r11 +;; cmovaq %r10, %r8 +;; movl %ecx, (%r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,14 +36,14 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; movl %edx, %ecx -;; xorq %rax, %rax -;; movq %rcx, %r11 -;; addq 0x38(%rdi), %r11 -;; cmpq %rsi, %rcx -;; cmovaq %rax, %r11 -;; movl (%r11), %eax +;; movq 0x40(%rdi), %r9 +;; movl %edx, %r11d +;; xorq %r10, %r10 +;; movq %r11, %r8 +;; addq 0x38(%rdi), %r8 +;; cmpq %r9, %r11 +;; cmovaq %r10, %r8 +;; movl (%r8), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat index ec5440c1378f..b4550c689647 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -22,13 +22,13 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; movq 0x40(%rdi), %r10 -;; movq 0x38(%rdi), %r9 -;; movl %edx, %r8d -;; xorq %rax, %rax -;; leaq 0x1000(%r9, %r8), %rsi -;; cmpq %r10, %r8 -;; cmovaq %rax, %rsi -;; movl %ecx, (%rsi) +;; movq 0x38(%rdi), %rax +;; movl %edx, %edx +;; xorq %r11, %r11 +;; leaq 0x1000(%rax, %rdx), %r9 +;; cmpq %r10, %rdx +;; cmovaq %r11, %r9 +;; movl %ecx, (%r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,14 +36,14 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r8 -;; movq 0x38(%rdi), %rcx -;; movl %edx, %edx -;; xorq %rax, %rax -;; leaq 0x1000(%rcx, %rdx), %rsi -;; cmpq %r8, %rdx -;; cmovaq %rax, %rsi -;; movl (%rsi), %eax +;; movq 0x40(%rdi), %r10 +;; movq 0x38(%rdi), %rax +;; movl %edx, %ecx +;; xorq %r11, %r11 +;; leaq 0x1000(%rax, %rcx), %r9 +;; cmpq %r10, %rcx +;; cmovaq %r11, %r9 +;; movl (%r9), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat index 26b64f787866..72d120e6996a 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -21,16 +21,16 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rax -;; movl %edx, %r8d -;; xorq %rdx, %rdx -;; movq %r8, %r9 -;; addq 0x38(%rdi), %r9 -;; movl $0xffff0000, %r10d -;; leaq (%r9, %r10), %rdi -;; cmpq %rax, %r8 -;; cmovaq %rdx, %rdi -;; movl %ecx, (%rdi) +;; movq 0x40(%rdi), %r11 +;; movl %edx, %edx +;; xorq %rax, %rax +;; movq %rdx, %rsi +;; addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %edi +;; leaq (%rsi, %rdi), %r10 +;; cmpq %r11, %rdx +;; cmovaq %rax, %r10 +;; movl %ecx, (%r10) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -38,16 +38,16 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rax -;; movl %edx, %edx -;; xorq %rcx, %rcx -;; movq %rdx, %r8 -;; addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r9d -;; leaq (%r8, %r9), %rdi -;; cmpq %rax, %rdx -;; cmovaq %rcx, %rdi -;; movl (%rdi), %eax +;; movq 0x40(%rdi), %r11 +;; movl %edx, %ecx +;; xorq %rax, %rax +;; movq %rcx, %rdx +;; addq 0x38(%rdi), %rdx +;; movl $0xffff0000, %esi +;; leaq (%rdx, %rsi), %r10 +;; cmpq %r11, %rcx +;; cmovaq %rax, %r10 +;; movl (%r10), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat index d2a169437f60..b65829a1f3c6 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat @@ -21,14 +21,14 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; movl %edx, %edx -;; xorq %rax, %rax -;; movq %rdx, %r11 -;; addq 0x38(%rdi), %r11 -;; cmpq %rsi, %rdx -;; cmovaeq %rax, %r11 -;; movb %cl, (%r11) +;; movq 0x40(%rdi), %r9 +;; movl %edx, %r11d +;; xorq %r10, %r10 +;; movq %r11, %r8 +;; addq 0x38(%rdi), %r8 +;; cmpq %r9, %r11 +;; cmovaeq %r10, %r8 +;; movb %cl, (%r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,14 +36,14 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; movl %edx, %ecx -;; xorq %rax, %rax -;; movq %rcx, %r11 -;; addq 0x38(%rdi), %r11 -;; cmpq %rsi, %rcx -;; cmovaeq %rax, %r11 -;; movzbq (%r11), %rax +;; movq 0x40(%rdi), %r9 +;; movl %edx, %r11d +;; xorq %r10, %r10 +;; movq %r11, %r8 +;; addq 0x38(%rdi), %r8 +;; cmpq %r9, %r11 +;; cmovaeq %r10, %r8 +;; movzbq (%r8), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat index 7ac59ec76e44..e216a9dfd10f 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -22,13 +22,13 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; movq 0x40(%rdi), %r10 -;; movq 0x38(%rdi), %r9 -;; movl %edx, %r8d -;; xorq %rax, %rax -;; leaq 0x1000(%r9, %r8), %rsi -;; cmpq %r10, %r8 -;; cmovaq %rax, %rsi -;; movb %cl, (%rsi) +;; movq 0x38(%rdi), %rax +;; movl %edx, %edx +;; xorq %r11, %r11 +;; leaq 0x1000(%rax, %rdx), %r9 +;; cmpq %r10, %rdx +;; cmovaq %r11, %r9 +;; movb %cl, (%r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,14 +36,14 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r8 -;; movq 0x38(%rdi), %rcx -;; movl %edx, %edx -;; xorq %rax, %rax -;; leaq 0x1000(%rcx, %rdx), %rsi -;; cmpq %r8, %rdx -;; cmovaq %rax, %rsi -;; movzbq (%rsi), %rax +;; movq 0x40(%rdi), %r10 +;; movq 0x38(%rdi), %rax +;; movl %edx, %ecx +;; xorq %r11, %r11 +;; leaq 0x1000(%rax, %rcx), %r9 +;; cmpq %r10, %rcx +;; cmovaq %r11, %r9 +;; movzbq (%r9), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat index b46837068910..b86151c596bb 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -21,16 +21,16 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rax -;; movl %edx, %r8d -;; xorq %rdx, %rdx -;; movq %r8, %r9 -;; addq 0x38(%rdi), %r9 -;; movl $0xffff0000, %r10d -;; leaq (%r9, %r10), %rdi -;; cmpq %rax, %r8 -;; cmovaq %rdx, %rdi -;; movb %cl, (%rdi) +;; movq 0x40(%rdi), %r11 +;; movl %edx, %edx +;; xorq %rax, %rax +;; movq %rdx, %rsi +;; addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %edi +;; leaq (%rsi, %rdi), %r10 +;; cmpq %r11, %rdx +;; cmovaq %rax, %r10 +;; movb %cl, (%r10) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -38,16 +38,16 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rax -;; movl %edx, %edx -;; xorq %rcx, %rcx -;; movq %rdx, %r8 -;; addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r9d -;; leaq (%r8, %r9), %rdi -;; cmpq %rax, %rdx -;; cmovaq %rcx, %rdi -;; movzbq (%rdi), %rax +;; movq 0x40(%rdi), %r11 +;; movl %edx, %ecx +;; xorq %rax, %rax +;; movq %rcx, %rdx +;; addq 0x38(%rdi), %rdx +;; movl $0xffff0000, %esi +;; leaq (%rdx, %rsi), %r10 +;; cmpq %r11, %rcx +;; cmovaq %rax, %r10 +;; movzbq (%r10), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat index 9f06443c6856..b784250cd119 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat @@ -21,12 +21,12 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r8 -;; subq $4, %r8 -;; cmpq %r8, %rdx +;; movq 0x40(%rdi), %rsi +;; subq $4, %rsi +;; cmpq %rsi, %rdx ;; ja 0x22 -;; 15: movq 0x38(%rdi), %r11 -;; movl %ecx, (%r11, %rdx) +;; 15: movq 0x38(%rdi), %r8 +;; movl %ecx, (%r8, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -35,12 +35,12 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r8 -;; subq $4, %r8 -;; cmpq %r8, %rdx +;; movq 0x40(%rdi), %rsi +;; subq $4, %rsi +;; cmpq %rsi, %rdx ;; ja 0x62 -;; 55: movq 0x38(%rdi), %r11 -;; movl (%r11, %rdx), %eax +;; 55: movq 0x38(%rdi), %r8 +;; movl (%r8, %rdx), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat index 699239f916c9..d9d1e1bc15ea 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat @@ -21,12 +21,12 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r8 -;; subq $0x1004, %r8 -;; cmpq %r8, %rdx +;; movq 0x40(%rdi), %rsi +;; subq $0x1004, %rsi +;; cmpq %rsi, %rdx ;; ja 0x29 -;; 18: movq 0x38(%rdi), %r11 -;; movl %ecx, 0x1000(%r11, %rdx) +;; 18: movq 0x38(%rdi), %r8 +;; movl %ecx, 0x1000(%r8, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -35,12 +35,12 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r8 -;; subq $0x1004, %r8 -;; cmpq %r8, %rdx +;; movq 0x40(%rdi), %rsi +;; subq $0x1004, %rsi +;; cmpq %rsi, %rdx ;; ja 0x69 -;; 58: movq 0x38(%rdi), %r11 -;; movl 0x1000(%r11, %rdx), %eax +;; 58: movq 0x38(%rdi), %r8 +;; movl 0x1000(%r8, %rdx), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat index 71418f1b1266..a143e0ae017b 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -21,39 +21,37 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdx, %r9 -;; addq 0x2a(%rip), %r9 -;; jb 0x2f -;; 14: cmpq 0x40(%rdi), %r9 -;; ja 0x31 +;; movq %rdx, %rsi +;; addq 0x2a(%rip), %rsi +;; jb 0x31 +;; 14: cmpq 0x40(%rdi), %rsi +;; ja 0x33 ;; 1e: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %esi -;; movl %ecx, (%rdx, %rsi) +;; movl $0xffff0000, %r9d +;; movl %ecx, (%rdx, %r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 2f: ud2 ;; 31: ud2 -;; 33: addb %al, (%rax) +;; 33: ud2 ;; 35: addb %al, (%rax) ;; 37: addb %al, (%rax, %rax) ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdx, %r9 -;; addq 0x2a(%rip), %r9 -;; jb 0x6f -;; 54: cmpq 0x40(%rdi), %r9 -;; ja 0x71 +;; movq %rdx, %rsi +;; addq 0x2a(%rip), %rsi +;; jb 0x71 +;; 54: cmpq 0x40(%rdi), %rsi +;; ja 0x73 ;; 5e: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %esi -;; movl (%rdx, %rsi), %eax +;; movl $0xffff0000, %r9d +;; movl (%rdx, %r9), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 6f: ud2 ;; 71: ud2 -;; 73: addb %al, (%rax) +;; 73: ud2 ;; 75: addb %al, (%rax) ;; 77: addb %al, (%rax, %rax) diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat index ddb64a5c1bed..977669f1d23d 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat @@ -22,21 +22,21 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x40(%rdi), %rdx -;; jae 0x1b -;; e: movq 0x38(%rdi), %r9 -;; movb %cl, (%r9, %rdx) +;; jae 0x1a +;; e: movq 0x38(%rdi), %rsi +;; movb %cl, (%rsi, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 1b: ud2 +;; 1a: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x40(%rdi), %rdx ;; jae 0x3c -;; 2e: movq 0x38(%rdi), %r9 -;; movzbq (%r9, %rdx), %rax +;; 2e: movq 0x38(%rdi), %rsi +;; movzbq (%rsi, %rdx), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat index 37dbfb1e0ca7..b5d1bbb55eb5 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat @@ -21,12 +21,12 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r8 -;; subq $0x1001, %r8 -;; cmpq %r8, %rdx +;; movq 0x40(%rdi), %rsi +;; subq $0x1001, %rsi +;; cmpq %rsi, %rdx ;; ja 0x29 -;; 18: movq 0x38(%rdi), %r11 -;; movb %cl, 0x1000(%r11, %rdx) +;; 18: movq 0x38(%rdi), %r8 +;; movb %cl, 0x1000(%r8, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -35,12 +35,12 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r8 -;; subq $0x1001, %r8 -;; cmpq %r8, %rdx +;; movq 0x40(%rdi), %rsi +;; subq $0x1001, %rsi +;; cmpq %rsi, %rdx ;; ja 0x6a -;; 58: movq 0x38(%rdi), %r11 -;; movzbq 0x1000(%r11, %rdx), %rax +;; 58: movq 0x38(%rdi), %r8 +;; movzbq 0x1000(%r8, %rdx), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat index f197c06be19f..6e7979f01710 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -21,20 +21,19 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdx, %r9 -;; addq 0x2a(%rip), %r9 -;; jb 0x2f -;; 14: cmpq 0x40(%rdi), %r9 -;; ja 0x31 +;; movq %rdx, %rsi +;; addq 0x2a(%rip), %rsi +;; jb 0x31 +;; 14: cmpq 0x40(%rdi), %rsi +;; ja 0x33 ;; 1e: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %esi -;; movb %cl, (%rdx, %rsi) +;; movl $0xffff0000, %r9d +;; movb %cl, (%rdx, %r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 2f: ud2 ;; 31: ud2 -;; 33: addb %al, (%rax) +;; 33: ud2 ;; 35: addb %al, (%rax) ;; 37: addb %al, (%rcx) ;; 39: addb %bh, %bh @@ -44,21 +43,18 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdx, %r9 -;; addq 0x2a(%rip), %r9 -;; jb 0x71 -;; 54: cmpq 0x40(%rdi), %r9 -;; ja 0x73 +;; movq %rdx, %rsi +;; addq 0x2a(%rip), %rsi +;; jb 0x72 +;; 54: cmpq 0x40(%rdi), %rsi +;; ja 0x74 ;; 5e: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %esi -;; movzbq (%rdx, %rsi), %rax +;; movl $0xffff0000, %r9d +;; movzbq (%rdx, %r9), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 71: ud2 -;; 73: ud2 -;; 75: addb %al, (%rax) -;; 77: addb %al, (%rcx) -;; 79: addb %bh, %bh -;; 7b: incl (%rax) -;; 7d: addb %al, (%rax) +;; 72: ud2 +;; 74: ud2 +;; 76: addb %al, (%rax) +;; 78: addl %eax, (%rax) diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat index a753072375c0..72d0acb6e4bb 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat @@ -21,14 +21,14 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r11 -;; subq $4, %r11 -;; xorq %rsi, %rsi -;; movq %rdx, %rax -;; addq 0x38(%rdi), %rax -;; cmpq %r11, %rdx -;; cmovaq %rsi, %rax -;; movl %ecx, (%rax) +;; movq 0x40(%rdi), %r8 +;; subq $4, %r8 +;; xorq %r9, %r9 +;; movq %rdx, %r10 +;; addq 0x38(%rdi), %r10 +;; cmpq %r8, %rdx +;; cmovaq %r9, %r10 +;; movl %ecx, (%r10) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,14 +36,14 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r11 -;; subq $4, %r11 -;; xorq %rsi, %rsi -;; movq %rdx, %rax -;; addq 0x38(%rdi), %rax -;; cmpq %r11, %rdx -;; cmovaq %rsi, %rax -;; movl (%rax), %eax +;; movq 0x40(%rdi), %r8 +;; subq $4, %r8 +;; xorq %r9, %r9 +;; movq %rdx, %r10 +;; addq 0x38(%rdi), %r10 +;; cmpq %r8, %rdx +;; cmovaq %r9, %r10 +;; movl (%r10), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat index d719e745dfd7..465d104ab595 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -21,14 +21,14 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r11 -;; movq 0x38(%rdi), %rax -;; subq $0x1004, %r11 -;; xorq %rdi, %rdi -;; leaq 0x1000(%rax, %rdx), %rsi -;; cmpq %r11, %rdx -;; cmovaq %rdi, %rsi -;; movl %ecx, (%rsi) +;; movq 0x40(%rdi), %r8 +;; movq 0x38(%rdi), %r11 +;; subq $0x1004, %r8 +;; xorq %r10, %r10 +;; leaq 0x1000(%r11, %rdx), %r9 +;; cmpq %r8, %rdx +;; cmovaq %r10, %r9 +;; movl %ecx, (%r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,14 +36,14 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r11 -;; movq 0x38(%rdi), %rax -;; subq $0x1004, %r11 -;; xorq %rdi, %rdi -;; leaq 0x1000(%rax, %rdx), %rsi -;; cmpq %r11, %rdx -;; cmovaq %rdi, %rsi -;; movl (%rsi), %eax +;; movq 0x40(%rdi), %r8 +;; movq 0x38(%rdi), %r11 +;; subq $0x1004, %r8 +;; xorq %r10, %r10 +;; leaq 0x1000(%r11, %rdx), %r9 +;; cmpq %r8, %rdx +;; cmovaq %r10, %r9 +;; movl (%r9), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat index 95a78dd48386..7a6a306f6d1d 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -21,39 +21,39 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdx, %rax -;; addq 0x2a(%rip), %rax -;; jb 0x36 -;; 14: movq 0x40(%rdi), %r9 -;; xorq %r8, %r8 +;; movq %rdx, %r11 +;; addq 0x2a(%rip), %r11 +;; jb 0x35 +;; 14: movq 0x40(%rdi), %rsi +;; xorq %rax, %rax ;; addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r10d -;; addq %r10, %rdx -;; cmpq %r9, %rax -;; cmovaq %r8, %rdx +;; movl $0xffff0000, %edi +;; addq %rdi, %rdx +;; cmpq %rsi, %r11 +;; cmovaq %rax, %rdx ;; movl %ecx, (%rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 36: ud2 -;; 38: addb $0, %al +;; 35: ud2 +;; 37: addb %al, (%rax, %rax) ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdx, %rax -;; addq 0x2a(%rip), %rax -;; jb 0x76 -;; 54: movq 0x40(%rdi), %r8 -;; xorq %rcx, %rcx +;; movq %rdx, %r11 +;; addq 0x2a(%rip), %r11 +;; jb 0x75 +;; 54: movq 0x40(%rdi), %rcx +;; xorq %rax, %rax ;; addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r9d -;; addq %r9, %rdx -;; cmpq %r8, %rax -;; cmovaq %rcx, %rdx +;; movl $0xffff0000, %esi +;; addq %rsi, %rdx +;; cmpq %rcx, %r11 +;; cmovaq %rax, %rdx ;; movl (%rdx), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 76: ud2 -;; 78: addb $0, %al +;; 75: ud2 +;; 77: addb %al, (%rax, %rax) diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat index ac9434b6b921..3ec0f8333416 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat @@ -21,12 +21,12 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; xorq %r11, %r11 +;; movq 0x40(%rdi), %r9 +;; xorq %r8, %r8 ;; movq %rdx, %r10 ;; addq 0x38(%rdi), %r10 -;; cmpq %rsi, %rdx -;; cmovaeq %r11, %r10 +;; cmpq %r9, %rdx +;; cmovaeq %r8, %r10 ;; movb %cl, (%r10) ;; movq %rbp, %rsp ;; popq %rbp @@ -35,12 +35,12 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; xorq %r11, %r11 +;; movq 0x40(%rdi), %r9 +;; xorq %r8, %r8 ;; movq %rdx, %r10 ;; addq 0x38(%rdi), %r10 -;; cmpq %rsi, %rdx -;; cmovaeq %r11, %r10 +;; cmpq %r9, %rdx +;; cmovaeq %r8, %r10 ;; movzbq (%r10), %rax ;; movq %rbp, %rsp ;; popq %rbp diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat index 26fa90935850..f948fd3b4d65 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -21,14 +21,14 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r11 -;; movq 0x38(%rdi), %rax -;; subq $0x1001, %r11 -;; xorq %rdi, %rdi -;; leaq 0x1000(%rax, %rdx), %rsi -;; cmpq %r11, %rdx -;; cmovaq %rdi, %rsi -;; movb %cl, (%rsi) +;; movq 0x40(%rdi), %r8 +;; movq 0x38(%rdi), %r11 +;; subq $0x1001, %r8 +;; xorq %r10, %r10 +;; leaq 0x1000(%r11, %rdx), %r9 +;; cmpq %r8, %rdx +;; cmovaq %r10, %r9 +;; movb %cl, (%r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,14 +36,14 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %r11 -;; movq 0x38(%rdi), %rax -;; subq $0x1001, %r11 -;; xorq %rdi, %rdi -;; leaq 0x1000(%rax, %rdx), %rsi -;; cmpq %r11, %rdx -;; cmovaq %rdi, %rsi -;; movzbq (%rsi), %rax +;; movq 0x40(%rdi), %r8 +;; movq 0x38(%rdi), %r11 +;; subq $0x1001, %r8 +;; xorq %r10, %r10 +;; leaq 0x1000(%r11, %rdx), %r9 +;; cmpq %r8, %rdx +;; cmovaq %r10, %r9 +;; movzbq (%r9), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 70e301ae7517..f5a1d6641718 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -21,42 +21,48 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdx, %rax -;; addq 0x2a(%rip), %rax -;; jb 0x36 -;; 14: movq 0x40(%rdi), %r9 -;; xorq %r8, %r8 +;; movq %rdx, %r11 +;; addq 0x2a(%rip), %r11 +;; jb 0x35 +;; 14: movq 0x40(%rdi), %rsi +;; xorq %rax, %rax ;; addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r10d -;; addq %r10, %rdx -;; cmpq %r9, %rax -;; cmovaq %r8, %rdx +;; movl $0xffff0000, %edi +;; addq %rdi, %rdx +;; cmpq %rsi, %r11 +;; cmovaq %rax, %rdx ;; movb %cl, (%rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 36: ud2 -;; 38: addl %eax, (%rax) +;; 35: ud2 +;; 37: addb %al, (%rcx) +;; 39: addb %bh, %bh +;; 3b: incl (%rax) +;; 3d: addb %al, (%rax) ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdx, %rax -;; addq 0x32(%rip), %rax -;; jb 0x78 -;; 54: movq 0x40(%rdi), %r8 -;; xorq %rcx, %rcx +;; movq %rdx, %r11 +;; addq 0x32(%rip), %r11 +;; jb 0x77 +;; 54: movq 0x40(%rdi), %rcx +;; xorq %rax, %rax ;; addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r9d -;; addq %r9, %rdx -;; cmpq %r8, %rax -;; cmovaq %rcx, %rdx +;; movl $0xffff0000, %esi +;; addq %rsi, %rdx +;; cmpq %rcx, %r11 +;; cmovaq %rax, %rdx ;; movzbq (%rdx), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 78: ud2 -;; 7a: addb %al, (%rax) -;; 7c: addb %al, (%rax) -;; 7e: addb %al, (%rax) -;; 80: addl %eax, (%rax) +;; 77: ud2 +;; 79: addb %al, (%rax) +;; 7b: addb %al, (%rax) +;; 7d: addb %al, (%rax) +;; 7f: addb %al, (%rcx) +;; 81: addb %bh, %bh +;; 83: incl (%rax) +;; 85: addb %al, (%rax) diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat index 57f4f5edd03d..929fb383defd 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat @@ -22,22 +22,22 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x40(%rdi), %rdx -;; ja 0x1b -;; e: movq 0x38(%rdi), %r9 -;; movl %ecx, (%r9, %rdx) +;; ja 0x1a +;; e: movq 0x38(%rdi), %rsi +;; movl %ecx, (%rsi, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 1b: ud2 +;; 1a: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x40(%rdi), %rdx -;; ja 0x3b -;; 2e: movq 0x38(%rdi), %r9 -;; movl (%r9, %rdx), %eax +;; ja 0x3a +;; 2e: movq 0x38(%rdi), %rsi +;; movl (%rsi, %rdx), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 3b: ud2 +;; 3a: ud2 diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat index a239114880b7..e27f5439324d 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat @@ -22,22 +22,22 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x40(%rdi), %rdx -;; ja 0x1f -;; e: movq 0x38(%rdi), %r9 -;; movl %ecx, 0x1000(%r9, %rdx) +;; ja 0x1e +;; e: movq 0x38(%rdi), %rsi +;; movl %ecx, 0x1000(%rsi, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 1f: ud2 +;; 1e: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x40(%rdi), %rdx -;; ja 0x5f -;; 4e: movq 0x38(%rdi), %r9 -;; movl 0x1000(%r9, %rdx), %eax +;; ja 0x3e +;; 2e: movq 0x38(%rdi), %rsi +;; movl 0x1000(%rsi, %rdx), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 5f: ud2 +;; 3e: ud2 diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat index 5c84df8e1827..d07e508164f7 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -22,24 +22,24 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x40(%rdi), %rdx -;; ja 0x21 +;; ja 0x1f ;; e: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r10d -;; movl %ecx, (%rdx, %r10) +;; movl $0xffff0000, %edi +;; movl %ecx, (%rdx, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 21: ud2 +;; 1f: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x40(%rdi), %rdx -;; ja 0x61 +;; ja 0x5f ;; 4e: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r10d -;; movl (%rdx, %r10), %eax +;; movl $0xffff0000, %edi +;; movl (%rdx, %rdi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 61: ud2 +;; 5f: ud2 diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat index f1a3e8d47139..d8472853a757 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat @@ -22,21 +22,21 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x40(%rdi), %rdx -;; jae 0x1b -;; e: movq 0x38(%rdi), %r9 -;; movb %cl, (%r9, %rdx) +;; jae 0x1a +;; e: movq 0x38(%rdi), %rsi +;; movb %cl, (%rsi, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 1b: ud2 +;; 1a: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x40(%rdi), %rdx ;; jae 0x3c -;; 2e: movq 0x38(%rdi), %r9 -;; movzbq (%r9, %rdx), %rax +;; 2e: movq 0x38(%rdi), %rsi +;; movzbq (%rsi, %rdx), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat index 6a365117c90a..bf4700df796c 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat @@ -22,22 +22,22 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x40(%rdi), %rdx -;; ja 0x1f -;; e: movq 0x38(%rdi), %r9 -;; movb %cl, 0x1000(%r9, %rdx) +;; ja 0x1e +;; e: movq 0x38(%rdi), %rsi +;; movb %cl, 0x1000(%rsi, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 1f: ud2 +;; 1e: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x40(%rdi), %rdx -;; ja 0x60 -;; 4e: movq 0x38(%rdi), %r9 -;; movzbq 0x1000(%r9, %rdx), %rax +;; ja 0x40 +;; 2e: movq 0x38(%rdi), %rsi +;; movzbq 0x1000(%rsi, %rdx), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 60: ud2 +;; 40: ud2 diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat index 189e5d3dff00..de7e2c28aad3 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -22,24 +22,24 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x40(%rdi), %rdx -;; ja 0x21 +;; ja 0x1f ;; e: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r10d -;; movb %cl, (%rdx, %r10) +;; movl $0xffff0000, %edi +;; movb %cl, (%rdx, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 21: ud2 +;; 1f: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x40(%rdi), %rdx -;; ja 0x62 +;; ja 0x61 ;; 4e: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r10d -;; movzbq (%rdx, %r10), %rax +;; movl $0xffff0000, %edi +;; movzbq (%rdx, %rdi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 62: ud2 +;; 61: ud2 diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat index d53d966a5fed..9c0b868755c1 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat @@ -21,12 +21,12 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; xorq %r11, %r11 +;; movq 0x40(%rdi), %r9 +;; xorq %r8, %r8 ;; movq %rdx, %r10 ;; addq 0x38(%rdi), %r10 -;; cmpq %rsi, %rdx -;; cmovaq %r11, %r10 +;; cmpq %r9, %rdx +;; cmovaq %r8, %r10 ;; movl %ecx, (%r10) ;; movq %rbp, %rsp ;; popq %rbp @@ -35,12 +35,12 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; xorq %r11, %r11 +;; movq 0x40(%rdi), %r9 +;; xorq %r8, %r8 ;; movq %rdx, %r10 ;; addq 0x38(%rdi), %r10 -;; cmpq %rsi, %rdx -;; cmovaq %r11, %r10 +;; cmpq %r9, %rdx +;; cmovaq %r8, %r10 ;; movl (%r10), %eax ;; movq %rbp, %rsp ;; popq %rbp diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat index e4e7f6596047..13e8250c2cc9 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -21,13 +21,13 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; movq 0x38(%rdi), %rax -;; xorq %rdi, %rdi -;; leaq 0x1000(%rax, %rdx), %r11 -;; cmpq %rsi, %rdx -;; cmovaq %rdi, %r11 -;; movl %ecx, (%r11) +;; movq 0x40(%rdi), %r9 +;; movq 0x38(%rdi), %r11 +;; xorq %r10, %r10 +;; leaq 0x1000(%r11, %rdx), %r8 +;; cmpq %r9, %rdx +;; cmovaq %r10, %r8 +;; movl %ecx, (%r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -35,13 +35,13 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; movq 0x38(%rdi), %rax -;; xorq %rdi, %rdi -;; leaq 0x1000(%rax, %rdx), %r11 -;; cmpq %rsi, %rdx -;; cmovaq %rdi, %r11 -;; movl (%r11), %eax +;; movq 0x40(%rdi), %r9 +;; movq 0x38(%rdi), %r11 +;; xorq %r10, %r10 +;; leaq 0x1000(%r11, %rdx), %r8 +;; cmpq %r9, %rdx +;; cmovaq %r10, %r8 +;; movl (%r8), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat index b0889d5118de..eb5f51ab8070 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -21,16 +21,15 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rax -;; movq %rdi, %r9 -;; xorq %rdi, %rdi -;; movq %rdx, %r8 -;; addq 0x38(%r9), %r8 -;; movl $0xffff0000, %r9d -;; leaq (%r8, %r9), %rsi -;; cmpq %rax, %rdx -;; cmovaq %rdi, %rsi -;; movl %ecx, (%rsi) +;; movq 0x40(%rdi), %r11 +;; xorq %r10, %r10 +;; movq %rdx, %rax +;; addq 0x38(%rdi), %rax +;; movl $0xffff0000, %esi +;; leaq (%rax, %rsi), %r9 +;; cmpq %r11, %rdx +;; cmovaq %r10, %r9 +;; movl %ecx, (%r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -38,16 +37,15 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rax -;; movq %rdi, %r8 -;; xorq %rdi, %rdi -;; movq %rdx, %rcx -;; addq 0x38(%r8), %rcx -;; movl $0xffff0000, %r8d -;; leaq (%rcx, %r8), %rsi -;; cmpq %rax, %rdx -;; cmovaq %rdi, %rsi -;; movl (%rsi), %eax +;; movq 0x40(%rdi), %r11 +;; xorq %r10, %r10 +;; movq %rdx, %rax +;; addq 0x38(%rdi), %rax +;; movl $0xffff0000, %ecx +;; leaq (%rax, %rcx), %r9 +;; cmpq %r11, %rdx +;; cmovaq %r10, %r9 +;; movl (%r9), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat index f05c9e2ab1f9..269e328cd11c 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat @@ -21,12 +21,12 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; xorq %r11, %r11 +;; movq 0x40(%rdi), %r9 +;; xorq %r8, %r8 ;; movq %rdx, %r10 ;; addq 0x38(%rdi), %r10 -;; cmpq %rsi, %rdx -;; cmovaeq %r11, %r10 +;; cmpq %r9, %rdx +;; cmovaeq %r8, %r10 ;; movb %cl, (%r10) ;; movq %rbp, %rsp ;; popq %rbp @@ -35,12 +35,12 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; xorq %r11, %r11 +;; movq 0x40(%rdi), %r9 +;; xorq %r8, %r8 ;; movq %rdx, %r10 ;; addq 0x38(%rdi), %r10 -;; cmpq %rsi, %rdx -;; cmovaeq %r11, %r10 +;; cmpq %r9, %rdx +;; cmovaeq %r8, %r10 ;; movzbq (%r10), %rax ;; movq %rbp, %rsp ;; popq %rbp diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat index ee34508141c0..7005f734a5db 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -21,13 +21,13 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; movq 0x38(%rdi), %rax -;; xorq %rdi, %rdi -;; leaq 0x1000(%rax, %rdx), %r11 -;; cmpq %rsi, %rdx -;; cmovaq %rdi, %r11 -;; movb %cl, (%r11) +;; movq 0x40(%rdi), %r9 +;; movq 0x38(%rdi), %r11 +;; xorq %r10, %r10 +;; leaq 0x1000(%r11, %rdx), %r8 +;; cmpq %r9, %rdx +;; cmovaq %r10, %r8 +;; movb %cl, (%r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -35,13 +35,13 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rsi -;; movq 0x38(%rdi), %rax -;; xorq %rdi, %rdi -;; leaq 0x1000(%rax, %rdx), %r11 -;; cmpq %rsi, %rdx -;; cmovaq %rdi, %r11 -;; movzbq (%r11), %rax +;; movq 0x40(%rdi), %r9 +;; movq 0x38(%rdi), %r11 +;; xorq %r10, %r10 +;; leaq 0x1000(%r11, %rdx), %r8 +;; cmpq %r9, %rdx +;; cmovaq %r10, %r8 +;; movzbq (%r8), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 9e6d32c10860..f3cc89850b00 100644 --- a/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_dynamic_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -21,16 +21,15 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rax -;; movq %rdi, %r9 -;; xorq %rdi, %rdi -;; movq %rdx, %r8 -;; addq 0x38(%r9), %r8 -;; movl $0xffff0000, %r9d -;; leaq (%r8, %r9), %rsi -;; cmpq %rax, %rdx -;; cmovaq %rdi, %rsi -;; movb %cl, (%rsi) +;; movq 0x40(%rdi), %r11 +;; xorq %r10, %r10 +;; movq %rdx, %rax +;; addq 0x38(%rdi), %rax +;; movl $0xffff0000, %esi +;; leaq (%rax, %rsi), %r9 +;; cmpq %r11, %rdx +;; cmovaq %r10, %r9 +;; movb %cl, (%r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -38,16 +37,15 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x40(%rdi), %rax -;; movq %rdi, %r8 -;; xorq %rdi, %rdi -;; movq %rdx, %rcx -;; addq 0x38(%r8), %rcx -;; movl $0xffff0000, %r8d -;; leaq (%rcx, %r8), %rsi -;; cmpq %rax, %rdx -;; cmovaq %rdi, %rsi -;; movzbq (%rsi), %rax +;; movq 0x40(%rdi), %r11 +;; xorq %r10, %r10 +;; movq %rdx, %rax +;; addq 0x38(%rdi), %rax +;; movl $0xffff0000, %ecx +;; leaq (%rax, %rcx), %r9 +;; cmpq %r11, %rdx +;; cmovaq %r10, %r9 +;; movzbq (%r9), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat index 11b674294448..c702bbf8f0e5 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0_offset.wat @@ -21,15 +21,16 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x1a(%rip), %r8 -;; ja 0x21 -;; 14: movq 0x38(%rdi), %r10 -;; movl %ecx, (%r10, %r8) +;; movl %edx, %esi +;; cmpq 0x1b(%rip), %rsi +;; ja 0x1f +;; 13: movq 0x38(%rdi), %rdi +;; movl %ecx, (%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 21: ud2 +;; 1f: ud2 +;; 21: addb %al, (%rax) ;; 23: addb %al, (%rax) ;; 25: addb %al, (%rax) ;; 27: addb %bh, %ah @@ -37,15 +38,16 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x1a(%rip), %r8 -;; ja 0x61 -;; 54: movq 0x38(%rdi), %r10 -;; movl (%r10, %r8), %eax +;; movl %edx, %esi +;; cmpq 0x1b(%rip), %rsi +;; ja 0x5f +;; 53: movq 0x38(%rdi), %rdi +;; movl (%rdi, %rsi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 61: ud2 +;; 5f: ud2 +;; 61: addb %al, (%rax) ;; 63: addb %al, (%rax) ;; 65: addb %al, (%rax) ;; 67: addb %bh, %ah diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat index a45a7116f2e8..23272c9d80b4 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0x1000_offset.wat @@ -21,29 +21,31 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x1a(%rip), %r8 -;; ja 0x25 -;; 14: movq 0x38(%rdi), %r10 -;; movl %ecx, 0x1000(%r10, %r8) +;; movl %edx, %esi +;; cmpq 0x1b(%rip), %rsi +;; ja 0x23 +;; 13: movq 0x38(%rdi), %rdi +;; movl %ecx, 0x1000(%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 25: ud2 +;; 23: ud2 +;; 25: addb %al, (%rax) ;; 27: addb %bh, %ah ;; 29: outl %eax, %dx ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x1a(%rip), %r8 -;; ja 0x65 -;; 54: movq 0x38(%rdi), %r10 -;; movl 0x1000(%r10, %r8), %eax +;; movl %edx, %esi +;; cmpq 0x1b(%rip), %rsi +;; ja 0x63 +;; 53: movq 0x38(%rdi), %rdi +;; movl 0x1000(%rdi, %rsi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 65: ud2 +;; 63: ud2 +;; 65: addb %al, (%rax) ;; 67: addb %bh, %ah ;; 69: outl %eax, %dx diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat index 16f6933a053b..1dea223494c8 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -21,27 +21,27 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq $0xfffc, %r8 -;; ja 0x27 -;; 14: addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r11d -;; movl %ecx, (%r8, %r11) +;; movl %edx, %esi +;; cmpq $0xfffc, %rsi +;; ja 0x26 +;; 13: addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %r8d +;; movl %ecx, (%rsi, %r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 27: ud2 +;; 26: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq $0xfffc, %r8 -;; ja 0x67 -;; 54: addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r11d -;; movl (%r8, %r11), %eax +;; movl %edx, %esi +;; cmpq $0xfffc, %rsi +;; ja 0x66 +;; 53: addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %r8d +;; movl (%rsi, %r8), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 67: ud2 +;; 66: ud2 diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat index c5b6c9253d4f..2e1baa117538 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0_offset.wat @@ -21,9 +21,9 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movb %cl, (%r8, %r9) +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movb %cl, (%rsi, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -31,9 +31,9 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movzbq (%r8, %r9), %rax +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movzbq (%rsi, %rdi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat index f0836d6f3e64..3ed0eb9837d4 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0x1000_offset.wat @@ -21,27 +21,30 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x1a(%rip), %r8 -;; ja 0x25 -;; 14: movq 0x38(%rdi), %r10 -;; movb %cl, 0x1000(%r10, %r8) +;; movl %edx, %esi +;; cmpq 0x1b(%rip), %rsi +;; ja 0x23 +;; 13: movq 0x38(%rdi), %rdi +;; movb %cl, 0x1000(%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 25: ud2 +;; 23: ud2 +;; 25: addb %al, (%rax) ;; 27: addb %bh, %bh ;; 29: outl %eax, %dx ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq 0x1a(%rip), %r8 -;; ja 0x66 -;; 54: movq 0x38(%rdi), %r10 -;; movzbq 0x1000(%r10, %r8), %rax +;; movl %edx, %esi +;; cmpq 0x1b(%rip), %rsi +;; ja 0x65 +;; 53: movq 0x38(%rdi), %rdi +;; movzbq 0x1000(%rdi, %rsi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 66: ud2 +;; 65: ud2 +;; 67: addb %bh, %bh +;; 69: outl %eax, %dx diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat index 61d0b04772ed..e8c5efb72968 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -21,27 +21,27 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq $0xffff, %r8 -;; ja 0x27 -;; 14: addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r11d -;; movb %cl, (%r8, %r11) +;; movl %edx, %esi +;; cmpq $0xffff, %rsi +;; ja 0x26 +;; 13: addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %r8d +;; movb %cl, (%rsi, %r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 27: ud2 +;; 26: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; cmpq $0xffff, %r8 -;; ja 0x68 -;; 54: addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r11d -;; movzbq (%r8, %r11), %rax +;; movl %edx, %esi +;; cmpq $0xffff, %rsi +;; ja 0x67 +;; 53: addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %r8d +;; movzbq (%rsi, %r8), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 68: ud2 +;; 67: ud2 diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat index f3b5183089fc..fa171a9395f8 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0_offset.wat @@ -21,33 +21,33 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %esi -;; xorq %r11, %r11 -;; movq %rsi, %r10 +;; movl %edx, %r9d +;; xorq %r8, %r8 +;; movq %r9, %r10 ;; addq 0x38(%rdi), %r10 -;; cmpq 0x11(%rip), %rsi -;; cmovaq %r11, %r10 +;; cmpq 0x10(%rip), %r9 +;; cmovaq %r8, %r10 ;; movl %ecx, (%r10) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 23: addb %al, (%rax) -;; 25: addb %al, (%rax) -;; 27: addb %bh, %ah +;; 24: addb %al, (%rax) +;; 26: addb %al, (%rax) +;; 28: cld ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %esi -;; xorq %r11, %r11 -;; movq %rsi, %r10 +;; movl %edx, %r9d +;; xorq %r8, %r8 +;; movq %r9, %r10 ;; addq 0x38(%rdi), %r10 -;; cmpq 0x11(%rip), %rsi -;; cmovaq %r11, %r10 +;; cmpq 0x10(%rip), %r9 +;; cmovaq %r8, %r10 ;; movl (%r10), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 63: addb %al, (%rax) -;; 65: addb %al, (%rax) -;; 67: addb %bh, %ah +;; 64: addb %al, (%rax) +;; 66: addb %al, (%rax) +;; 68: cld diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat index 0932f9b97e66..b05e7586a2a8 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -21,37 +21,37 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdi, %rax -;; movl %edx, %edi -;; xorq %rsi, %rsi -;; movq %rax, %rdx -;; movq 0x38(%rdx), %rax -;; leaq 0x1000(%rax, %rdi), %r11 -;; cmpq 0xe(%rip), %rdi -;; cmovaq %rsi, %r11 -;; movl %ecx, (%r11) +;; movl %edx, %r10d +;; xorq %r9, %r9 +;; movq 0x38(%rdi), %r11 +;; leaq 0x1000(%r11, %r10), %r8 +;; cmpq 0x13(%rip), %r10 +;; cmovaq %r9, %r8 +;; movl %ecx, (%r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 2e: addb %al, (%rax) -;; 30: cld +;; 29: addb %al, (%rax) +;; 2b: addb %al, (%rax) +;; 2d: addb %al, (%rax) +;; 2f: addb %bh, %ah ;; 31: outl %eax, %dx ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdi, %rax -;; movl %edx, %edi -;; xorq %rsi, %rsi -;; movq %rax, %rdx -;; movq 0x38(%rdx), %rax -;; leaq 0x1000(%rax, %rdi), %r11 -;; cmpq 0xe(%rip), %rdi -;; cmovaq %rsi, %r11 -;; movl (%r11), %eax +;; movl %edx, %r10d +;; xorq %r9, %r9 +;; movq 0x38(%rdi), %r11 +;; leaq 0x1000(%r11, %r10), %r8 +;; cmpq 0x13(%rip), %r10 +;; cmovaq %r9, %r8 +;; movl (%r8), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 6e: addb %al, (%rax) -;; 70: cld +;; 69: addb %al, (%rax) +;; 6b: addb %al, (%rax) +;; 6d: addb %al, (%rax) +;; 6f: addb %bh, %ah ;; 71: outl %eax, %dx diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat index 4d125a77e7a8..755c0abd95dc 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -21,16 +21,15 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdi, %r8 -;; movl %edx, %eax -;; xorq %rdi, %rdi -;; movq %rax, %rdx -;; addq 0x38(%r8), %rdx -;; movl $0xffff0000, %r8d -;; leaq (%rdx, %r8), %rsi -;; cmpq $0xfffc, %rax -;; cmovaq %rdi, %rsi -;; movl %ecx, (%rsi) +;; movl %edx, %r11d +;; xorq %r10, %r10 +;; movq %r11, %rax +;; addq 0x38(%rdi), %rax +;; movl $0xffff0000, %edx +;; leaq (%rax, %rdx), %r9 +;; cmpq $0xfffc, %r11 +;; cmovaq %r10, %r9 +;; movl %ecx, (%r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -38,17 +37,15 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdi, %rcx -;; movl %edx, %eax -;; xorq %rdi, %rdi -;; movq %rcx, %r8 -;; movq %rax, %rcx -;; addq 0x38(%r8), %rcx -;; movl $0xffff0000, %edx -;; leaq (%rcx, %rdx), %rsi -;; cmpq $0xfffc, %rax -;; cmovaq %rdi, %rsi -;; movl (%rsi), %eax +;; movl %edx, %r11d +;; xorq %r10, %r10 +;; movq %r11, %rax +;; addq 0x38(%rdi), %rax +;; movl $0xffff0000, %ecx +;; leaq (%rax, %rcx), %r9 +;; cmpq $0xfffc, %r11 +;; cmovaq %r10, %r9 +;; movl (%r9), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat index a06edd44fbca..f748aee725e6 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0_offset.wat @@ -21,9 +21,9 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movb %cl, (%r8, %r9) +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movb %cl, (%rsi, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -31,9 +31,9 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movzbq (%r8, %r9), %rax +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movzbq (%rsi, %rdi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat index 3735b47d5c64..3a996fce707f 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -21,34 +21,35 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdi, %rax -;; movl %edx, %edi -;; xorq %rsi, %rsi -;; movq %rax, %rdx -;; movq 0x38(%rdx), %rax -;; leaq 0x1000(%rax, %rdi), %r11 -;; cmpq 0xe(%rip), %rdi -;; cmovaq %rsi, %r11 -;; movb %cl, (%r11) +;; movl %edx, %r10d +;; xorq %r9, %r9 +;; movq 0x38(%rdi), %r11 +;; leaq 0x1000(%r11, %r10), %r8 +;; cmpq 0x13(%rip), %r10 +;; cmovaq %r9, %r8 +;; movb %cl, (%r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 2e: addb %al, (%rax) +;; 29: addb %al, (%rax) +;; 2b: addb %al, (%rax) +;; 2d: addb %al, (%rax) +;; 2f: addb %bh, %bh +;; 31: outl %eax, %dx ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdi, %rax -;; movl %edx, %edi -;; xorq %rsi, %rsi -;; movq %rax, %rdx -;; movq 0x38(%rdx), %rax -;; leaq 0x1000(%rax, %rdi), %r11 -;; cmpq 0xe(%rip), %rdi -;; cmovaq %rsi, %r11 -;; movzbq (%r11), %rax +;; movl %edx, %r10d +;; xorq %r9, %r9 +;; movq 0x38(%rdi), %r11 +;; leaq 0x1000(%r11, %r10), %r8 +;; cmpq 0x13(%rip), %r10 +;; cmovaq %r9, %r8 +;; movzbq (%r8), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 6f: addb %bh, %bh -;; 71: outl %eax, %dx +;; 6a: addb %al, (%rax) +;; 6c: addb %al, (%rax) +;; 6e: addb %al, (%rax) diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat index b258c5e946d2..6c9be967ee0f 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -21,16 +21,15 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdi, %r8 -;; movl %edx, %eax -;; xorq %rdi, %rdi -;; movq %rax, %rdx -;; addq 0x38(%r8), %rdx -;; movl $0xffff0000, %r8d -;; leaq (%rdx, %r8), %rsi -;; cmpq $0xffff, %rax -;; cmovaq %rdi, %rsi -;; movb %cl, (%rsi) +;; movl %edx, %r11d +;; xorq %r10, %r10 +;; movq %r11, %rax +;; addq 0x38(%rdi), %rax +;; movl $0xffff0000, %edx +;; leaq (%rax, %rdx), %r9 +;; cmpq $0xffff, %r11 +;; cmovaq %r10, %r9 +;; movb %cl, (%r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -38,17 +37,15 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq %rdi, %rcx -;; movl %edx, %eax -;; xorq %rdi, %rdi -;; movq %rcx, %r8 -;; movq %rax, %rcx -;; addq 0x38(%r8), %rcx -;; movl $0xffff0000, %edx -;; leaq (%rcx, %rdx), %rsi -;; cmpq $0xffff, %rax -;; cmovaq %rdi, %rsi -;; movzbq (%rsi), %rax +;; movl %edx, %r11d +;; xorq %r10, %r10 +;; movq %r11, %rax +;; addq 0x38(%rdi), %rax +;; movl $0xffff0000, %ecx +;; leaq (%rax, %rcx), %r9 +;; cmpq $0xffff, %r11 +;; cmovaq %r10, %r9 +;; movzbq (%r9), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat index b13b044cb9c7..59745722fa35 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat @@ -21,9 +21,9 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movl %ecx, (%r8, %r9) +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movl %ecx, (%rsi, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -31,9 +31,9 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movl (%r8, %r9), %eax +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movl (%rsi, %rdi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat index 933423aaab0a..7b5466bd31b7 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat @@ -21,9 +21,9 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movl %ecx, 0x1000(%r8, %r9) +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movl %ecx, 0x1000(%rsi, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -31,9 +31,9 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movl 0x1000(%r8, %r9), %eax +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movl 0x1000(%rsi, %rdi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat index abe7d44c45b5..e75a56f72e9a 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -21,10 +21,10 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r9d -;; movl %ecx, (%r8, %r9) +;; movl %edx, %esi +;; addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %edi +;; movl %ecx, (%rsi, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -32,10 +32,10 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r9d -;; movl (%r8, %r9), %eax +;; movl %edx, %esi +;; addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %edi +;; movl (%rsi, %rdi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat index f606e42b8442..dfe372fcf148 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat @@ -21,9 +21,9 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movb %cl, (%r8, %r9) +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movb %cl, (%rsi, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -31,9 +31,9 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movzbq (%r8, %r9), %rax +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movzbq (%rsi, %rdi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat index 1b5a9543efb0..f6ab3b83deb8 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat @@ -21,9 +21,9 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movb %cl, 0x1000(%r8, %r9) +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movb %cl, 0x1000(%rsi, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -31,9 +31,9 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movzbq 0x1000(%r8, %r9), %rax +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movzbq 0x1000(%rsi, %rdi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat index 84f01a1861de..d19090c1cc3c 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -21,10 +21,10 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r9d -;; movb %cl, (%r8, %r9) +;; movl %edx, %esi +;; addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %edi +;; movb %cl, (%rsi, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -32,10 +32,10 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r9d -;; movzbq (%r8, %r9), %rax +;; movl %edx, %esi +;; addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %edi +;; movzbq (%rsi, %rdi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat index 5a85c570eb0e..dfc21bb851e8 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat @@ -21,9 +21,9 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movl %ecx, (%r8, %r9) +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movl %ecx, (%rsi, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -31,9 +31,9 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movl (%r8, %r9), %eax +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movl (%rsi, %rdi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat index ec63232b8e50..1fe6c64fb9a9 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -21,9 +21,9 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movl %ecx, 0x1000(%r8, %r9) +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movl %ecx, 0x1000(%rsi, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -31,9 +31,9 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movl 0x1000(%r8, %r9), %eax +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movl 0x1000(%rsi, %rdi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat index a83a35f12deb..53d8b3830ffc 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -21,10 +21,10 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r9d -;; movl %ecx, (%r8, %r9) +;; movl %edx, %esi +;; addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %edi +;; movl %ecx, (%rsi, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -32,10 +32,10 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r9d -;; movl (%r8, %r9), %eax +;; movl %edx, %esi +;; addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %edi +;; movl (%rsi, %rdi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat index 9a6ab1cca141..5a4a97b70ecd 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat @@ -21,9 +21,9 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movb %cl, (%r8, %r9) +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movb %cl, (%rsi, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -31,9 +31,9 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movzbq (%r8, %r9), %rax +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movzbq (%rsi, %rdi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat index d4d8f8f12953..8320af5b5016 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -21,9 +21,9 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movb %cl, 0x1000(%r8, %r9) +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movb %cl, 0x1000(%rsi, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -31,9 +31,9 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r8 -;; movl %edx, %r9d -;; movzbq 0x1000(%r8, %r9), %rax +;; movq 0x38(%rdi), %rsi +;; movl %edx, %edi +;; movzbq 0x1000(%rsi, %rdi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 5e366fffd940..2feb9b7846c1 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i32_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -21,10 +21,10 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r9d -;; movb %cl, (%r8, %r9) +;; movl %edx, %esi +;; addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %edi +;; movb %cl, (%rsi, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -32,10 +32,10 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; addq 0x38(%rdi), %r8 -;; movl $0xffff0000, %r9d -;; movzbq (%r8, %r9), %rax +;; movl %edx, %esi +;; addq 0x38(%rdi), %rsi +;; movl $0xffff0000, %edi +;; movzbq (%rsi, %rdi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat index fc07b08e8407..afd1e7dba259 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0_offset.wat @@ -22,24 +22,24 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x15(%rip), %rdx -;; ja 0x1e -;; 11: movq 0x38(%rdi), %r9 -;; movl %ecx, (%r9, %rdx) +;; ja 0x1d +;; 11: movq 0x38(%rdi), %rsi +;; movl %ecx, (%rsi, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 1e: ud2 -;; 20: cld +;; 1d: ud2 +;; 1f: addb %bh, %ah ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x15(%rip), %rdx -;; ja 0x5e -;; 51: movq 0x38(%rdi), %r9 -;; movl (%r9, %rdx), %eax +;; ja 0x5d +;; 51: movq 0x38(%rdi), %rsi +;; movl (%rsi, %rdx), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 5e: ud2 -;; 60: cld +;; 5d: ud2 +;; 5f: addb %bh, %ah diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat index 47383fb1573e..5528db41b7e1 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0x1000_offset.wat @@ -22,30 +22,30 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x1d(%rip), %rdx -;; ja 0x22 -;; 11: movq 0x38(%rdi), %r9 -;; movl %ecx, 0x1000(%r9, %rdx) +;; ja 0x21 +;; 11: movq 0x38(%rdi), %rsi +;; movl %ecx, 0x1000(%rsi, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 22: ud2 -;; 24: addb %al, (%rax) -;; 26: addb %al, (%rax) -;; 28: cld +;; 21: ud2 +;; 23: addb %al, (%rax) +;; 25: addb %al, (%rax) +;; 27: addb %bh, %ah ;; 29: outl %eax, %dx ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x1d(%rip), %rdx -;; ja 0x62 -;; 51: movq 0x38(%rdi), %r9 -;; movl 0x1000(%r9, %rdx), %eax +;; ja 0x61 +;; 51: movq 0x38(%rdi), %rsi +;; movl 0x1000(%rsi, %rdx), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 62: ud2 -;; 64: addb %al, (%rax) -;; 66: addb %al, (%rax) -;; 68: cld +;; 61: ud2 +;; 63: addb %al, (%rax) +;; 65: addb %al, (%rax) +;; 67: addb %bh, %ah ;; 69: outl %eax, %dx diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat index 1678765e9961..2f150dff40ca 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -22,24 +22,24 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq $0xfffc, %rdx -;; ja 0x24 +;; ja 0x22 ;; 11: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r10d -;; movl %ecx, (%rdx, %r10) +;; movl $0xffff0000, %edi +;; movl %ecx, (%rdx, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 24: ud2 +;; 22: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq $0xfffc, %rdx -;; ja 0x64 +;; ja 0x62 ;; 51: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r10d -;; movl (%rdx, %r10), %eax +;; movl $0xffff0000, %edi +;; movl (%rdx, %rdi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 64: ud2 +;; 62: ud2 diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat index c8371fa25fd3..6c3359fbd293 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0_offset.wat @@ -22,21 +22,22 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x15(%rip), %rdx -;; ja 0x1e -;; 11: movq 0x38(%rdi), %r9 -;; movb %cl, (%r9, %rdx) +;; ja 0x1d +;; 11: movq 0x38(%rdi), %rsi +;; movb %cl, (%rsi, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 1e: ud2 +;; 1d: ud2 +;; 1f: addb %bh, %bh ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x1d(%rip), %rdx ;; ja 0x5f -;; 51: movq 0x38(%rdi), %r9 -;; movzbq (%r9, %rdx), %rax +;; 51: movq 0x38(%rdi), %rsi +;; movzbq (%rsi, %rdx), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat index 38a38bcdbdef..d16cc8e7ec1c 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0x1000_offset.wat @@ -22,23 +22,25 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x1d(%rip), %rdx -;; ja 0x22 -;; 11: movq 0x38(%rdi), %r9 -;; movb %cl, 0x1000(%r9, %rdx) +;; ja 0x21 +;; 11: movq 0x38(%rdi), %rsi +;; movb %cl, 0x1000(%rsi, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 22: ud2 -;; 24: addb %al, (%rax) -;; 26: addb %al, (%rax) +;; 21: ud2 +;; 23: addb %al, (%rax) +;; 25: addb %al, (%rax) +;; 27: addb %bh, %bh +;; 29: outl %eax, %dx ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x1d(%rip), %rdx ;; ja 0x63 -;; 51: movq 0x38(%rdi), %r9 -;; movzbq 0x1000(%r9, %rdx), %rax +;; 51: movq 0x38(%rdi), %rsi +;; movzbq 0x1000(%rsi, %rdx), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat index 8eceb8662e13..172e764fb57f 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -22,24 +22,24 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq $0xffff, %rdx -;; ja 0x24 +;; ja 0x22 ;; 11: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r10d -;; movb %cl, (%rdx, %r10) +;; movl $0xffff0000, %edi +;; movb %cl, (%rdx, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 24: ud2 +;; 22: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq $0xffff, %rdx -;; ja 0x65 +;; ja 0x64 ;; 51: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r10d -;; movzbq (%rdx, %r10), %rax +;; movl $0xffff0000, %edi +;; movzbq (%rdx, %rdi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 65: ud2 +;; 64: ud2 diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat index ccccf966e862..11c5be6eab18 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0_offset.wat @@ -21,33 +21,33 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r10, %r10 -;; movq %rdx, %r9 -;; addq 0x38(%rdi), %r9 -;; cmpq 0x13(%rip), %rdx -;; cmovaq %r10, %r9 -;; movl %ecx, (%r9) +;; movq %rdi, %r8 +;; xorq %rdi, %rdi +;; movq %r8, %r10 +;; movq %rdx, %rsi +;; addq 0x38(%r10), %rsi +;; cmpq 0xd(%rip), %rdx +;; cmovaq %rdi, %rsi +;; movl %ecx, (%rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 21: addb %al, (%rax) -;; 23: addb %al, (%rax) -;; 25: addb %al, (%rax) -;; 27: addb %bh, %ah +;; 26: addb %al, (%rax) +;; 28: cld ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r10, %r10 -;; movq %rdx, %r9 -;; addq 0x38(%rdi), %r9 -;; cmpq 0x13(%rip), %rdx -;; cmovaq %r10, %r9 -;; movl (%r9), %eax +;; movq %rdi, %r8 +;; xorq %rdi, %rdi +;; movq %r8, %r10 +;; movq %rdx, %rsi +;; addq 0x38(%r10), %rsi +;; cmpq 0xd(%rip), %rdx +;; cmovaq %rdi, %rsi +;; movl (%rsi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 61: addb %al, (%rax) -;; 63: addb %al, (%rax) -;; 65: addb %al, (%rax) -;; 67: addb %bh, %ah +;; 66: addb %al, (%rax) +;; 68: cld diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat index 01ea353bc8c0..6f6a1ecada58 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -21,31 +21,31 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r11, %r11 -;; movq 0x38(%rdi), %rsi -;; leaq 0x1000(%rsi, %rdx), %r10 +;; xorq %r8, %r8 +;; movq 0x38(%rdi), %r9 +;; leaq 0x1000(%r9, %rdx), %rdi ;; cmpq 0xe(%rip), %rdx -;; cmovaq %r11, %r10 -;; movl %ecx, (%r10) +;; cmovaq %r8, %rdi +;; movl %ecx, (%rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 26: addb %al, (%rax) -;; 28: cld +;; 25: addb %al, (%rax) +;; 27: addb %bh, %ah ;; 29: outl %eax, %dx ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r11, %r11 -;; movq 0x38(%rdi), %rsi -;; leaq 0x1000(%rsi, %rdx), %r10 +;; xorq %r8, %r8 +;; movq 0x38(%rdi), %r9 +;; leaq 0x1000(%r9, %rdx), %rdi ;; cmpq 0xe(%rip), %rdx -;; cmovaq %r11, %r10 -;; movl (%r10), %eax +;; cmovaq %r8, %rdi +;; movl (%rdi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 66: addb %al, (%rax) -;; 68: cld +;; 65: addb %al, (%rax) +;; 67: addb %bh, %ah ;; 69: outl %eax, %dx diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat index c585c161c6fb..f86e97f54e1d 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -21,14 +21,14 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %rsi, %rsi -;; movq %rdx, %rax -;; addq 0x38(%rdi), %rax -;; movl $0xffff0000, %edi -;; leaq (%rax, %rdi), %r11 +;; xorq %r9, %r9 +;; movq %rdx, %r10 +;; addq 0x38(%rdi), %r10 +;; movl $0xffff0000, %r11d +;; leaq (%r10, %r11), %r8 ;; cmpq $0xfffc, %rdx -;; cmovaq %rsi, %r11 -;; movl %ecx, (%r11) +;; cmovaq %r9, %r8 +;; movl %ecx, (%r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,14 +36,14 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %rsi, %rsi -;; movq %rdx, %rax -;; addq 0x38(%rdi), %rax -;; movl $0xffff0000, %edi -;; leaq (%rax, %rdi), %r11 +;; xorq %r9, %r9 +;; movq %rdx, %r10 +;; addq 0x38(%rdi), %r10 +;; movl $0xffff0000, %r11d +;; leaq (%r10, %r11), %r8 ;; cmpq $0xfffc, %rdx -;; cmovaq %rsi, %r11 -;; movl (%r11), %eax +;; cmovaq %r9, %r8 +;; movl (%r8), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat index 723acec2b6f9..6b5ce1034600 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0_offset.wat @@ -21,32 +21,30 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r10, %r10 -;; movq %rdx, %r9 -;; addq 0x38(%rdi), %r9 -;; cmpq 0x13(%rip), %rdx -;; cmovaq %r10, %r9 -;; movb %cl, (%r9) +;; movq %rdi, %r8 +;; xorq %rdi, %rdi +;; movq %r8, %r10 +;; movq %rdx, %rsi +;; addq 0x38(%r10), %rsi +;; cmpq 0xd(%rip), %rdx +;; cmovaq %rdi, %rsi +;; movb %cl, (%rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 21: addb %al, (%rax) -;; 23: addb %al, (%rax) -;; 25: addb %al, (%rax) -;; 27: addb %bh, %bh +;; 26: addb %al, (%rax) ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r10, %r10 -;; movq %rdx, %r9 -;; addq 0x38(%rdi), %r9 -;; cmpq 0x13(%rip), %rdx -;; cmovaq %r10, %r9 -;; movzbq (%r9), %rax +;; movq %rdi, %r8 +;; xorq %rdi, %rdi +;; movq %r8, %r10 +;; movq %rdx, %rsi +;; addq 0x38(%r10), %rsi +;; cmpq 0xd(%rip), %rdx +;; cmovaq %rdi, %rsi +;; movzbq (%rsi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 62: addb %al, (%rax) -;; 64: addb %al, (%rax) -;; 66: addb %al, (%rax) diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat index 263ec5b1431d..80f2ef057a55 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -21,26 +21,28 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r11, %r11 -;; movq 0x38(%rdi), %rsi -;; leaq 0x1000(%rsi, %rdx), %r10 +;; xorq %r8, %r8 +;; movq 0x38(%rdi), %r9 +;; leaq 0x1000(%r9, %rdx), %rdi ;; cmpq 0xe(%rip), %rdx -;; cmovaq %r11, %r10 -;; movb %cl, (%r10) +;; cmovaq %r8, %rdi +;; movb %cl, (%rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 26: addb %al, (%rax) +;; 25: addb %al, (%rax) +;; 27: addb %bh, %bh +;; 29: outl %eax, %dx ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r11, %r11 -;; movq 0x38(%rdi), %rsi -;; leaq 0x1000(%rsi, %rdx), %r10 +;; xorq %r8, %r8 +;; movq 0x38(%rdi), %r9 +;; leaq 0x1000(%r9, %rdx), %rdi ;; cmpq 0xe(%rip), %rdx -;; cmovaq %r11, %r10 -;; movzbq (%r10), %rax +;; cmovaq %r8, %rdi +;; movzbq (%rdi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat index 9b59a7668932..501f2011d3bb 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -21,14 +21,14 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %rsi, %rsi -;; movq %rdx, %rax -;; addq 0x38(%rdi), %rax -;; movl $0xffff0000, %edi -;; leaq (%rax, %rdi), %r11 +;; xorq %r9, %r9 +;; movq %rdx, %r10 +;; addq 0x38(%rdi), %r10 +;; movl $0xffff0000, %r11d +;; leaq (%r10, %r11), %r8 ;; cmpq $0xffff, %rdx -;; cmovaq %rsi, %r11 -;; movb %cl, (%r11) +;; cmovaq %r9, %r8 +;; movb %cl, (%r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,14 +36,14 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %rsi, %rsi -;; movq %rdx, %rax -;; addq 0x38(%rdi), %rax -;; movl $0xffff0000, %edi -;; leaq (%rax, %rdi), %r11 +;; xorq %r9, %r9 +;; movq %rdx, %r10 +;; addq 0x38(%rdi), %r10 +;; movl $0xffff0000, %r11d +;; leaq (%r10, %r11), %r8 ;; cmpq $0xffff, %rdx -;; cmovaq %rsi, %r11 -;; movzbq (%r11), %rax +;; cmovaq %r9, %r8 +;; movzbq (%r8), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat index 47507d6bdc6f..cb0fbf9a7b47 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0_offset.wat @@ -22,24 +22,24 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x15(%rip), %rdx -;; ja 0x1e -;; 11: movq 0x38(%rdi), %r9 -;; movl %ecx, (%r9, %rdx) +;; ja 0x1d +;; 11: movq 0x38(%rdi), %rsi +;; movl %ecx, (%rsi, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 1e: ud2 -;; 20: cld +;; 1d: ud2 +;; 1f: addb %bh, %ah ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x15(%rip), %rdx -;; ja 0x5e -;; 51: movq 0x38(%rdi), %r9 -;; movl (%r9, %rdx), %eax +;; ja 0x5d +;; 51: movq 0x38(%rdi), %rsi +;; movl (%rsi, %rdx), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 5e: ud2 -;; 60: cld +;; 5d: ud2 +;; 5f: addb %bh, %ah diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat index 467e0ccc85b6..cd10c6abfaa1 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0x1000_offset.wat @@ -22,30 +22,30 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x1d(%rip), %rdx -;; ja 0x22 -;; 11: movq 0x38(%rdi), %r9 -;; movl %ecx, 0x1000(%r9, %rdx) +;; ja 0x21 +;; 11: movq 0x38(%rdi), %rsi +;; movl %ecx, 0x1000(%rsi, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 22: ud2 -;; 24: addb %al, (%rax) -;; 26: addb %al, (%rax) -;; 28: cld +;; 21: ud2 +;; 23: addb %al, (%rax) +;; 25: addb %al, (%rax) +;; 27: addb %bh, %ah ;; 29: outl %eax, %dx ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x1d(%rip), %rdx -;; ja 0x62 -;; 51: movq 0x38(%rdi), %r9 -;; movl 0x1000(%r9, %rdx), %eax +;; ja 0x61 +;; 51: movq 0x38(%rdi), %rsi +;; movl 0x1000(%rsi, %rdx), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 62: ud2 -;; 64: addb %al, (%rax) -;; 66: addb %al, (%rax) -;; 68: cld +;; 61: ud2 +;; 63: addb %al, (%rax) +;; 65: addb %al, (%rax) +;; 67: addb %bh, %ah ;; 69: outl %eax, %dx diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat index 09cc2894400d..cc3458ef85ac 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i32_access_0xffff0000_offset.wat @@ -22,24 +22,24 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq $0xfffc, %rdx -;; ja 0x24 +;; ja 0x22 ;; 11: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r10d -;; movl %ecx, (%rdx, %r10) +;; movl $0xffff0000, %edi +;; movl %ecx, (%rdx, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 24: ud2 +;; 22: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq $0xfffc, %rdx -;; ja 0x64 +;; ja 0x62 ;; 51: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r10d -;; movl (%rdx, %r10), %eax +;; movl $0xffff0000, %edi +;; movl (%rdx, %rdi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 64: ud2 +;; 62: ud2 diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat index b67316e98865..ecbbd649aff7 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0_offset.wat @@ -22,21 +22,22 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x15(%rip), %rdx -;; ja 0x1e -;; 11: movq 0x38(%rdi), %r9 -;; movb %cl, (%r9, %rdx) +;; ja 0x1d +;; 11: movq 0x38(%rdi), %rsi +;; movb %cl, (%rsi, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 1e: ud2 +;; 1d: ud2 +;; 1f: addb %bh, %bh ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x1d(%rip), %rdx ;; ja 0x5f -;; 51: movq 0x38(%rdi), %r9 -;; movzbq (%r9, %rdx), %rax +;; 51: movq 0x38(%rdi), %rsi +;; movzbq (%rsi, %rdx), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat index 43ddafa0ceb9..b46f6f89b1d2 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0x1000_offset.wat @@ -22,23 +22,25 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x1d(%rip), %rdx -;; ja 0x22 -;; 11: movq 0x38(%rdi), %r9 -;; movb %cl, 0x1000(%r9, %rdx) +;; ja 0x21 +;; 11: movq 0x38(%rdi), %rsi +;; movb %cl, 0x1000(%rsi, %rdx) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 22: ud2 -;; 24: addb %al, (%rax) -;; 26: addb %al, (%rax) +;; 21: ud2 +;; 23: addb %al, (%rax) +;; 25: addb %al, (%rax) +;; 27: addb %bh, %bh +;; 29: outl %eax, %dx ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq 0x1d(%rip), %rdx ;; ja 0x63 -;; 51: movq 0x38(%rdi), %r9 -;; movzbq 0x1000(%r9, %rdx), %rax +;; 51: movq 0x38(%rdi), %rsi +;; movzbq 0x1000(%rsi, %rdx), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat index 467957ce4e79..d50cbcc90752 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_no_spectre_i8_access_0xffff0000_offset.wat @@ -22,24 +22,24 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq $0xffff, %rdx -;; ja 0x24 +;; ja 0x22 ;; 11: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r10d -;; movb %cl, (%rdx, %r10) +;; movl $0xffff0000, %edi +;; movb %cl, (%rdx, %rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 24: ud2 +;; 22: ud2 ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; cmpq $0xffff, %rdx -;; ja 0x65 +;; ja 0x64 ;; 51: addq 0x38(%rdi), %rdx -;; movl $0xffff0000, %r10d -;; movzbq (%rdx, %r10), %rax +;; movl $0xffff0000, %edi +;; movzbq (%rdx, %rdi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 65: ud2 +;; 64: ud2 diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat index 460c43851f7c..730f2c7afb9d 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0_offset.wat @@ -21,33 +21,33 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r10, %r10 -;; movq %rdx, %r9 -;; addq 0x38(%rdi), %r9 -;; cmpq 0x13(%rip), %rdx -;; cmovaq %r10, %r9 -;; movl %ecx, (%r9) +;; movq %rdi, %r8 +;; xorq %rdi, %rdi +;; movq %r8, %r10 +;; movq %rdx, %rsi +;; addq 0x38(%r10), %rsi +;; cmpq 0xd(%rip), %rdx +;; cmovaq %rdi, %rsi +;; movl %ecx, (%rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 21: addb %al, (%rax) -;; 23: addb %al, (%rax) -;; 25: addb %al, (%rax) -;; 27: addb %bh, %ah +;; 26: addb %al, (%rax) +;; 28: cld ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r10, %r10 -;; movq %rdx, %r9 -;; addq 0x38(%rdi), %r9 -;; cmpq 0x13(%rip), %rdx -;; cmovaq %r10, %r9 -;; movl (%r9), %eax +;; movq %rdi, %r8 +;; xorq %rdi, %rdi +;; movq %r8, %r10 +;; movq %rdx, %rsi +;; addq 0x38(%r10), %rsi +;; cmpq 0xd(%rip), %rdx +;; cmovaq %rdi, %rsi +;; movl (%rsi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 61: addb %al, (%rax) -;; 63: addb %al, (%rax) -;; 65: addb %al, (%rax) -;; 67: addb %bh, %ah +;; 66: addb %al, (%rax) +;; 68: cld diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat index 0e3c2906b193..65bea6916f8b 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0x1000_offset.wat @@ -21,31 +21,31 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r11, %r11 -;; movq 0x38(%rdi), %rsi -;; leaq 0x1000(%rsi, %rdx), %r10 +;; xorq %r8, %r8 +;; movq 0x38(%rdi), %r9 +;; leaq 0x1000(%r9, %rdx), %rdi ;; cmpq 0xe(%rip), %rdx -;; cmovaq %r11, %r10 -;; movl %ecx, (%r10) +;; cmovaq %r8, %rdi +;; movl %ecx, (%rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 26: addb %al, (%rax) -;; 28: cld +;; 25: addb %al, (%rax) +;; 27: addb %bh, %ah ;; 29: outl %eax, %dx ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r11, %r11 -;; movq 0x38(%rdi), %rsi -;; leaq 0x1000(%rsi, %rdx), %r10 +;; xorq %r8, %r8 +;; movq 0x38(%rdi), %r9 +;; leaq 0x1000(%r9, %rdx), %rdi ;; cmpq 0xe(%rip), %rdx -;; cmovaq %r11, %r10 -;; movl (%r10), %eax +;; cmovaq %r8, %rdi +;; movl (%rdi), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 66: addb %al, (%rax) -;; 68: cld +;; 65: addb %al, (%rax) +;; 67: addb %bh, %ah ;; 69: outl %eax, %dx diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat index 1fc7392274c1..e27eab948eda 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i32_access_0xffff0000_offset.wat @@ -21,14 +21,14 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %rsi, %rsi -;; movq %rdx, %rax -;; addq 0x38(%rdi), %rax -;; movl $0xffff0000, %edi -;; leaq (%rax, %rdi), %r11 +;; xorq %r9, %r9 +;; movq %rdx, %r10 +;; addq 0x38(%rdi), %r10 +;; movl $0xffff0000, %r11d +;; leaq (%r10, %r11), %r8 ;; cmpq $0xfffc, %rdx -;; cmovaq %rsi, %r11 -;; movl %ecx, (%r11) +;; cmovaq %r9, %r8 +;; movl %ecx, (%r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,14 +36,14 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %rsi, %rsi -;; movq %rdx, %rax -;; addq 0x38(%rdi), %rax -;; movl $0xffff0000, %edi -;; leaq (%rax, %rdi), %r11 +;; xorq %r9, %r9 +;; movq %rdx, %r10 +;; addq 0x38(%rdi), %r10 +;; movl $0xffff0000, %r11d +;; leaq (%r10, %r11), %r8 ;; cmpq $0xfffc, %rdx -;; cmovaq %rsi, %r11 -;; movl (%r11), %eax +;; cmovaq %r9, %r8 +;; movl (%r8), %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat index f407ad1a07ac..82848d5099a9 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0_offset.wat @@ -21,32 +21,30 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r10, %r10 -;; movq %rdx, %r9 -;; addq 0x38(%rdi), %r9 -;; cmpq 0x13(%rip), %rdx -;; cmovaq %r10, %r9 -;; movb %cl, (%r9) +;; movq %rdi, %r8 +;; xorq %rdi, %rdi +;; movq %r8, %r10 +;; movq %rdx, %rsi +;; addq 0x38(%r10), %rsi +;; cmpq 0xd(%rip), %rdx +;; cmovaq %rdi, %rsi +;; movb %cl, (%rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 21: addb %al, (%rax) -;; 23: addb %al, (%rax) -;; 25: addb %al, (%rax) -;; 27: addb %bh, %bh +;; 26: addb %al, (%rax) ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r10, %r10 -;; movq %rdx, %r9 -;; addq 0x38(%rdi), %r9 -;; cmpq 0x13(%rip), %rdx -;; cmovaq %r10, %r9 -;; movzbq (%r9), %rax +;; movq %rdi, %r8 +;; xorq %rdi, %rdi +;; movq %r8, %r10 +;; movq %rdx, %rsi +;; addq 0x38(%r10), %rsi +;; cmpq 0xd(%rip), %rdx +;; cmovaq %rdi, %rsi +;; movzbq (%rsi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 62: addb %al, (%rax) -;; 64: addb %al, (%rax) -;; 66: addb %al, (%rax) diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat index a5a4c14d014c..0a1cf839eab2 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0x1000_offset.wat @@ -21,26 +21,28 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r11, %r11 -;; movq 0x38(%rdi), %rsi -;; leaq 0x1000(%rsi, %rdx), %r10 +;; xorq %r8, %r8 +;; movq 0x38(%rdi), %r9 +;; leaq 0x1000(%r9, %rdx), %rdi ;; cmpq 0xe(%rip), %rdx -;; cmovaq %r11, %r10 -;; movb %cl, (%r10) +;; cmovaq %r8, %rdi +;; movb %cl, (%rdi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 26: addb %al, (%rax) +;; 25: addb %al, (%rax) +;; 27: addb %bh, %bh +;; 29: outl %eax, %dx ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %r11, %r11 -;; movq 0x38(%rdi), %rsi -;; leaq 0x1000(%rsi, %rdx), %r10 +;; xorq %r8, %r8 +;; movq 0x38(%rdi), %r9 +;; leaq 0x1000(%r9, %rdx), %rdi ;; cmpq 0xe(%rip), %rdx -;; cmovaq %r11, %r10 -;; movzbq (%r10), %rax +;; cmovaq %r8, %rdi +;; movzbq (%rdi), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat index cf87058fdad6..46bc33c20659 100644 --- a/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat +++ b/tests/disas/load-store/x64/load_store_static_kind_i64_index_0xffffffff_guard_yes_spectre_i8_access_0xffff0000_offset.wat @@ -21,14 +21,14 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %rsi, %rsi -;; movq %rdx, %rax -;; addq 0x38(%rdi), %rax -;; movl $0xffff0000, %edi -;; leaq (%rax, %rdi), %r11 +;; xorq %r9, %r9 +;; movq %rdx, %r10 +;; addq 0x38(%rdi), %r10 +;; movl $0xffff0000, %r11d +;; leaq (%r10, %r11), %r8 ;; cmpq $0xffff, %rdx -;; cmovaq %rsi, %r11 -;; movb %cl, (%r11) +;; cmovaq %r9, %r8 +;; movb %cl, (%r8) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -36,14 +36,14 @@ ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp -;; xorq %rsi, %rsi -;; movq %rdx, %rax -;; addq 0x38(%rdi), %rax -;; movl $0xffff0000, %edi -;; leaq (%rax, %rdi), %r11 +;; xorq %r9, %r9 +;; movq %rdx, %r10 +;; addq 0x38(%rdi), %r10 +;; movl $0xffff0000, %r11d +;; leaq (%r10, %r11), %r8 ;; cmpq $0xffff, %rdx -;; cmovaq %rsi, %r11 -;; movzbq (%r11), %rax +;; cmovaq %r9, %r8 +;; movzbq (%r8), %rax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/pcc-imported-memory.wat b/tests/disas/pcc-imported-memory.wat index 507a03dd792c..564422192f62 100644 --- a/tests/disas/pcc-imported-memory.wat +++ b/tests/disas/pcc-imported-memory.wat @@ -37,22 +37,22 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x30(%rdi), %rdx -;; movq 8(%rdx), %rcx -;; shrq $0x10, %rcx -;; movq %rcx, %rdx -;; shll $0x10, %edx -;; leal 4(%rcx), %r8d -;; cmpl %r8d, %edx -;; jbe 0x3d -;; 23: testl %ecx, %ecx -;; jle 0x3d -;; 2b: movq 0x30(%rdi), %rdi -;; movq (%rdi), %rdi -;; movl %ecx, %eax -;; movl (%rdi, %rax), %r10d -;; jmp 0x40 -;; 3d: xorl %r10d, %r10d +;; movq 0x30(%rdi), %rax +;; movq 8(%rax), %rax +;; shrq $0x10, %rax +;; movq %rax, %rcx +;; shll $0x10, %ecx +;; leal 4(%rax), %edx +;; cmpl %edx, %ecx +;; jbe 0x3a +;; 21: testl %eax, %eax +;; jle 0x3a +;; 29: movq 0x30(%rdi), %rcx +;; movq (%rcx), %rcx +;; movl %eax, %eax +;; movl (%rcx, %rax), %eax +;; jmp 0x3c +;; 3a: xorl %eax, %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/pcc-insertlane-x64-avx.wat b/tests/disas/pcc-insertlane-x64-avx.wat index 764f93954e2e..b57ffa38f044 100644 --- a/tests/disas/pcc-insertlane-x64-avx.wat +++ b/tests/disas/pcc-insertlane-x64-avx.wat @@ -69,27 +69,30 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; vmovdqu 0x14(%rip), %xmm6 -;; movl %edx, %r9d -;; movq 0x38(%rdi), %r10 -;; vpinsrb $1, (%r10, %r9), %xmm6, %xmm0 +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; vpinsrb $1, (%rdi, %rsi), %xmm6, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 1f: addb %ch, (%rax) -;; 21: subl %ebp, (%rcx) -;; 23: subl %ebp, (%rax) -;; 25: imull $0x616d286d, 0x20(%rsi), %ebp +;; 1e: addb %al, (%rax) +;; 20: subb %ch, (%rcx) +;; 22: subl %ebp, (%rcx) +;; 24: subb %ch, 0x6e(%rcx) +;; 27: andb %ch, 0x28(%rbp) +;; 2a: insl %dx, (%rdi) ;; ;; wasm[0]::function[1]: ;; pushq %rbp ;; movq %rsp, %rbp ;; vmovdqu 0x14(%rip), %xmm6 -;; movl %edx, %r9d -;; movq 0x38(%rdi), %r10 -;; vpinsrw $1, (%r10, %r9), %xmm6, %xmm0 +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; vpinsrw $1, (%rdi, %rsi), %xmm6, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq +;; 5d: addb %al, (%rax) ;; 5f: addb %ch, (%rax) ;; 61: subl %ebp, (%rcx) ;; 63: subl %ebp, (%rax) @@ -99,38 +102,42 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; vmovdqu 0x14(%rip), %xmm6 -;; movl %edx, %r9d -;; movq 0x38(%rdi), %r10 -;; vpinsrd $1, (%r10, %r9), %xmm6, %xmm0 +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; vpinsrd $1, (%rdi, %rsi), %xmm6, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; 9f: addb %ch, (%rax) -;; a1: subl %ebp, (%rcx) -;; a3: subl %ebp, (%rax) -;; a5: imull $0x616d286d, 0x20(%rsi), %ebp +;; 9e: addb %al, (%rax) +;; a0: subb %ch, (%rcx) +;; a2: subl %ebp, (%rcx) +;; a4: subb %ch, 0x6e(%rcx) +;; a7: andb %ch, 0x28(%rbp) +;; aa: insl %dx, (%rdi) ;; ;; wasm[0]::function[3]: ;; pushq %rbp ;; movq %rsp, %rbp ;; vmovdqu 0x14(%rip), %xmm6 -;; movl %edx, %r9d -;; movq 0x38(%rdi), %r10 -;; vpinsrq $1, (%r10, %r9), %xmm6, %xmm0 +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; vpinsrq $1, (%rdi, %rsi), %xmm6, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; df: addb %ch, (%rax) -;; e1: subl %ebp, (%rcx) -;; e3: subl %ebp, (%rax) -;; e5: imull $0x616d286d, 0x20(%rsi), %ebp +;; de: addb %al, (%rax) +;; e0: subb %ch, (%rcx) +;; e2: subl %ebp, (%rcx) +;; e4: subb %ch, 0x6e(%rcx) +;; e7: andb %ch, 0x28(%rbp) +;; ea: insl %dx, (%rdi) ;; ;; wasm[0]::function[4]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r9d -;; movq 0x38(%rdi), %r10 -;; vinsertps $0, (%r10, %r9), %xmm0, %xmm0 +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; vinsertps $0, (%rdi, %rsi), %xmm0, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -138,9 +145,10 @@ ;; wasm[0]::function[5]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r10d -;; movq 0x38(%rdi), %r11 -;; vmovsd (%r11, %r10), %xmm7 +;; movq %rdi, %r9 +;; movl %edx, %edi +;; movq 0x38(%r9), %r8 +;; vmovsd (%r8, %rdi), %xmm7 ;; vmovlhps %xmm7, %xmm0, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp @@ -149,9 +157,10 @@ ;; wasm[0]::function[6]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r10d -;; movq 0x38(%rdi), %r11 -;; vmovsd (%r11, %r10), %xmm7 +;; movq %rdi, %r9 +;; movl %edx, %edi +;; movq 0x38(%r9), %r8 +;; vmovsd (%r8, %rdi), %xmm7 ;; vmovsd %xmm7, %xmm0, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp @@ -161,9 +170,9 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; vpshufd $0xee, %xmm0, %xmm6 -;; movl %edx, %r9d -;; movq 0x38(%rdi), %r10 -;; vmovsd %xmm6, (%r10, %r9) +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; vmovsd %xmm6, (%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -172,9 +181,9 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; vpshufd $1, %xmm0, %xmm6 -;; movl %edx, %r9d -;; movq 0x38(%rdi), %r10 -;; vmovss %xmm6, (%r10, %r9) +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; vmovss %xmm6, (%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -182,11 +191,11 @@ ;; wasm[0]::function[9]: ;; pushq %rbp ;; movq %rsp, %rbp -;; vpextrb $1, %xmm0, %r10d -;; movsbl %r10b, %r10d -;; movl %edx, %r11d -;; movq 0x38(%rdi), %rsi -;; movb %r10b, (%rsi, %r11) +;; vpextrb $1, %xmm0, %r8d +;; movsbl %r8b, %r8d +;; movl %edx, %r9d +;; movq 0x38(%rdi), %rdi +;; movb %r8b, (%rdi, %r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -194,11 +203,11 @@ ;; wasm[0]::function[10]: ;; pushq %rbp ;; movq %rsp, %rbp -;; vpextrw $1, %xmm0, %r10d -;; movswl %r10w, %r10d -;; movl %edx, %r11d -;; movq 0x38(%rdi), %rsi -;; movw %r10w, (%rsi, %r11) +;; vpextrw $1, %xmm0, %r8d +;; movswl %r8w, %r8d +;; movl %edx, %r9d +;; movq 0x38(%rdi), %rdi +;; movw %r8w, (%rdi, %r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -206,9 +215,9 @@ ;; wasm[0]::function[11]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; movq 0x38(%rdi), %r9 -;; vpextrd $1, %xmm0, (%r9, %r8) +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; vpextrd $1, %xmm0, (%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -216,9 +225,9 @@ ;; wasm[0]::function[12]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; movq 0x38(%rdi), %r9 -;; vpextrq $1, %xmm0, (%r9, %r8) +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; vpextrq $1, %xmm0, (%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/pcc-insertlane-x64.wat b/tests/disas/pcc-insertlane-x64.wat index 3e81a9da0fcb..fb4dc8babc23 100644 --- a/tests/disas/pcc-insertlane-x64.wat +++ b/tests/disas/pcc-insertlane-x64.wat @@ -69,12 +69,13 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; movdqu 0x14(%rip), %xmm0 -;; movl %edx, %r9d -;; movq 0x38(%rdi), %r10 -;; pinsrb $1, (%r10, %r9), %xmm0 +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; pinsrb $1, (%rdi, %rsi), %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq +;; 1e: addb %al, (%rax) ;; 20: subb %ch, (%rcx) ;; 22: subl %ebp, (%rcx) ;; 24: subb %ch, 0x6e(%rcx) @@ -85,12 +86,13 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; movdqu 0x14(%rip), %xmm0 -;; movl %edx, %r9d -;; movq 0x38(%rdi), %r10 -;; pinsrw $1, (%r10, %r9), %xmm0 +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; pinsrw $1, (%rdi, %rsi), %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq +;; 5d: addb %al, (%rax) ;; 5f: addb %ch, (%rax) ;; 61: subl %ebp, (%rcx) ;; 63: subl %ebp, (%rax) @@ -100,12 +102,13 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; movdqu 0x14(%rip), %xmm0 -;; movl %edx, %r9d -;; movq 0x38(%rdi), %r10 -;; pinsrd $1, (%r10, %r9), %xmm0 +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; pinsrd $1, (%rdi, %rsi), %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq +;; 9e: addb %al, (%rax) ;; a0: subb %ch, (%rcx) ;; a2: subl %ebp, (%rcx) ;; a4: subb %ch, 0x6e(%rcx) @@ -116,24 +119,23 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; movdqu 0x14(%rip), %xmm0 -;; movl %edx, %r9d -;; movq 0x38(%rdi), %r10 -;; pinsrq $1, (%r10, %r9), %xmm0 +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; pinsrq $1, (%rdi, %rsi), %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq -;; e0: subb %ch, (%rcx) -;; e2: subl %ebp, (%rcx) -;; e4: subb %ch, 0x6e(%rcx) -;; e7: andb %ch, 0x28(%rbp) -;; ea: insl %dx, (%rdi) +;; df: addb %ch, (%rax) +;; e1: subl %ebp, (%rcx) +;; e3: subl %ebp, (%rax) +;; e5: imull $0x616d286d, 0x20(%rsi), %ebp ;; ;; wasm[0]::function[4]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r9d -;; movq 0x38(%rdi), %r10 -;; insertps $0, (%r10, %r9), %xmm0 +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; insertps $0, (%rdi, %rsi), %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -141,9 +143,10 @@ ;; wasm[0]::function[5]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r10d -;; movq 0x38(%rdi), %r11 -;; movsd (%r11, %r10), %xmm7 +;; movq %rdi, %r9 +;; movl %edx, %edi +;; movq 0x38(%r9), %r8 +;; movsd (%r8, %rdi), %xmm7 ;; movlhps %xmm7, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp @@ -152,9 +155,10 @@ ;; wasm[0]::function[6]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r10d -;; movq 0x38(%rdi), %r11 -;; movsd (%r11, %r10), %xmm7 +;; movq %rdi, %r9 +;; movl %edx, %edi +;; movq 0x38(%r9), %r8 +;; movsd (%r8, %rdi), %xmm7 ;; movsd %xmm7, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp @@ -164,9 +168,9 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; pshufd $0xee, %xmm0, %xmm6 -;; movl %edx, %r9d -;; movq 0x38(%rdi), %r10 -;; movsd %xmm6, (%r10, %r9) +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; movsd %xmm6, (%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -175,9 +179,9 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; pshufd $1, %xmm0, %xmm6 -;; movl %edx, %r9d -;; movq 0x38(%rdi), %r10 -;; movss %xmm6, (%r10, %r9) +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; movss %xmm6, (%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -185,11 +189,11 @@ ;; wasm[0]::function[9]: ;; pushq %rbp ;; movq %rsp, %rbp -;; pextrb $1, %xmm0, %r10d -;; movsbl %r10b, %r10d -;; movl %edx, %r11d -;; movq 0x38(%rdi), %rsi -;; movb %r10b, (%rsi, %r11) +;; pextrb $1, %xmm0, %r8d +;; movsbl %r8b, %r8d +;; movl %edx, %r9d +;; movq 0x38(%rdi), %rdi +;; movb %r8b, (%rdi, %r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -197,11 +201,11 @@ ;; wasm[0]::function[10]: ;; pushq %rbp ;; movq %rsp, %rbp -;; pextrw $1, %xmm0, %r10d -;; movswl %r10w, %r10d -;; movl %edx, %r11d -;; movq 0x38(%rdi), %rsi -;; movw %r10w, (%rsi, %r11) +;; pextrw $1, %xmm0, %r8d +;; movswl %r8w, %r8d +;; movl %edx, %r9d +;; movq 0x38(%rdi), %rdi +;; movw %r8w, (%rdi, %r9) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -209,9 +213,9 @@ ;; wasm[0]::function[11]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; movq 0x38(%rdi), %r9 -;; pextrd $1, %xmm0, (%r9, %r8) +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; pextrd $1, %xmm0, (%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -219,9 +223,9 @@ ;; wasm[0]::function[12]: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r8d -;; movq 0x38(%rdi), %r9 -;; pextrq $1, %xmm0, (%r9, %r8) +;; movl %edx, %esi +;; movq 0x38(%rdi), %rdi +;; pextrq $1, %xmm0, (%rdi, %rsi) ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/pulley/coremark-1.wat b/tests/disas/pulley/coremark-1.wat index e20ee5af93f4..a1e637752111 100644 --- a/tests/disas/pulley/coremark-1.wat +++ b/tests/disas/pulley/coremark-1.wat @@ -70,28 +70,28 @@ (func $other) ) ;; wasm[0]::function[0]: -;; push_frame_save 16, x26 +;; push_frame_save 16, x24 ;; xzero x6 -;; xload64le_o32 x7, x0, 56 -;; xload64le_o32 x9, x0, 64 -;; xload16le_u32_g32 x8, x7, x9, x2, 0 -;; xload16le_u32_g32 x9, x7, x9, x3, 0 +;; xload64le_o32 x1, x0, 56 +;; xload64le_o32 x8, x0, 64 +;; xload16le_u32_g32 x7, x1, x8, x2, 0 +;; xload16le_u32_g32 x8, x1, x8, x3, 0 ;; xsub32_u8 x4, x4, 1 -;; xmul32 x8, x8, x9 -;; xshr32_u_u6 x9, x8, 2 -;; xband32_s8 x9, x9, 15 -;; xshr32_u_u6 x8, x8, 5 -;; xband32_s8 x8, x8, 127 -;; xmadd32 x6, x9, x8, x6 -;; xmov x26, x6 +;; xmul32 x7, x7, x8 +;; xshr32_u_u6 x8, x7, 2 +;; xband32_s8 x8, x8, 15 +;; xshr32_u_u6 x7, x7, 5 +;; xband32_s8 x7, x7, 127 +;; xmadd32 x6, x8, x7, x6 +;; xmov x24, x6 ;; xadd32 x2, x2, x5 ;; xadd32_u8 x3, x3, 2 ;; br_if_not32 x4, 0xe // target = 0x53 -;; 4b: xmov x6, x26 +;; 4b: xmov x6, x24 ;; jump -0x40 // target = 0xe ;; 53: call2 x0, x0, 0x10 // target = 0x63 -;; xmov x0, x26 -;; pop_frame_restore 16, x26 +;; xmov x0, x24 +;; pop_frame_restore 16, x24 ;; ret ;; ;; wasm[0]::function[1]::other: diff --git a/tests/disas/pulley/fib.wat b/tests/disas/pulley/fib.wat index 4bd80d0d16bf..98e0960766ab 100644 --- a/tests/disas/pulley/fib.wat +++ b/tests/disas/pulley/fib.wat @@ -49,23 +49,23 @@ ) ) ;; wasm[0]::function[0]::fib: -;; push_frame_save 32, x17, x24, x29 +;; push_frame_save 16, x16, x22 ;; br_if_xeq32_i8 x2, 0, 0x47 // target = 0x4c ;; br_if_xeq32_i8 x2, 1, 0x39 // target = 0x45 ;; 13: xsub32_u8 x14, x2, 1 -;; xmov x24, x0 -;; xmov x29, x2 -;; call3 x24, x24, x14, -0x1d // target = 0x0 -;; xmov x17, x0 -;; xmov x2, x29 -;; xmov x0, x24 +;; xmov x16, x2 +;; xmov x22, x0 +;; call3 x22, x22, x14, -0x1d // target = 0x0 +;; xmov x2, x16 +;; xmov x16, x0 +;; xmov x0, x22 ;; xsub32_u8 x14, x2, 2 ;; call3 x0, x0, x14, -0x32 // target = 0x0 -;; xmov x5, x17 -;; xadd32 x0, x5, x0 +;; xmov x1, x16 +;; xadd32 x0, x1, x0 ;; jump 0xe // target = 0x4e ;; 45: xone x0 ;; jump 0x7 // target = 0x4e ;; 4c: xone x0 -;; pop_frame_restore 32, x17, x24, x29 +;; pop_frame_restore 16, x16, x22 ;; ret diff --git a/tests/disas/riscv64-component-builtins-asm.wat b/tests/disas/riscv64-component-builtins-asm.wat index 3ee1b839fd13..88f9ee9f8489 100644 --- a/tests/disas/riscv64-component-builtins-asm.wat +++ b/tests/disas/riscv64-component-builtins-asm.wat @@ -17,41 +17,40 @@ ;; sd s0, 0(sp) ;; mv s0, sp ;; addi sp, sp, -0x10 -;; sd s1, 8(sp) -;; mv s1, a1 -;; mv a7, a2 +;; sd s4, 8(sp) +;; mv a3, a2 +;; mv s4, a1 ;; ld a1, 0x10(a0) ;; mv a2, s0 ;; sd a2, 0x30(a1) ;; ld a2, 8(s0) ;; sd a2, 0x38(a1) -;; lw a2, 0x20(a0) -;; andi a2, a2, 1 -;; bnez a2, 8 +;; lw a1, 0x20(a0) +;; andi a1, a1, 1 +;; bnez a1, 8 ;; .byte 0x00, 0x00, 0x00, 0x00 ;; ╰─╼ trap: CannotLeaveComponent -;; ld a3, 8(a0) -;; ld a5, 0x10(a3) +;; ld a1, 8(a0) +;; ld a5, 0x10(a1) ;; mv a4, zero ;; slli a1, a4, 0x20 ;; srai a1, a1, 0x20 ;; slli a2, a4, 0x20 ;; srai a2, a2, 0x20 -;; mv a3, a7 ;; slli a3, a3, 0x20 ;; srai a3, a3, 0x20 ;; jalr a5 -;; addi a3, zero, -1 -;; beq a0, a3, 0x1c -;; ld s1, 8(sp) +;; addi a1, zero, -1 +;; beq a0, a1, 0x1c +;; ld s4, 8(sp) ;; addi sp, sp, 0x10 ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 ;; ret -;; mv a1, s1 -;; ld a4, 0x10(a1) -;; ld a4, 0x198(a4) +;; mv a1, s4 +;; ld a0, 0x10(a1) +;; ld a2, 0x198(a0) ;; mv a0, a1 -;; jalr a4 +;; jalr a2 ;; .byte 0x00, 0x00, 0x00, 0x00 diff --git a/tests/disas/riscv64-entry-trampoline.wat b/tests/disas/riscv64-entry-trampoline.wat index 5b789b9d6f1c..e0fb731c4168 100644 --- a/tests/disas/riscv64-entry-trampoline.wat +++ b/tests/disas/riscv64-entry-trampoline.wat @@ -33,14 +33,14 @@ ;; fsd fs9, 0x18(sp) ;; fsd fs10, 0x10(sp) ;; fsd fs11, 8(sp) -;; ld a5, 8(a0) -;; mv a2, s0 -;; sd a2, 0x48(a5) -;; mv a2, sp -;; sd a2, 0x40(a5) -;; auipc a2, 0 -;; addi a2, a2, 0x88 -;; sd a2, 0x50(a5) +;; ld a2, 8(a0) +;; mv a3, s0 +;; sd a3, 0x48(a2) +;; mv a3, sp +;; sd a3, 0x40(a2) +;; auipc a3, 0 +;; addi a3, a3, 0x88 +;; sd a3, 0x50(a2) ;; auipc ra, 0 ;; jalr ra, ra, -0xb0 ;; ├─╼ exception frame offset: SP = FP - 0xc0 diff --git a/tests/disas/riscv64-wide-arithmetic.wat b/tests/disas/riscv64-wide-arithmetic.wat index dc81d5a1d97d..ab33fe2fc84c 100644 --- a/tests/disas/riscv64-wide-arithmetic.wat +++ b/tests/disas/riscv64-wide-arithmetic.wat @@ -51,8 +51,8 @@ ;; mv s0, sp ;; add a0, a2, a4 ;; sltu a1, a0, a4 -;; add a3, a3, a5 -;; add a1, a3, a1 +;; add a2, a3, a5 +;; add a1, a2, a1 ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -65,8 +65,8 @@ ;; mv s0, sp ;; sub a0, a2, a4 ;; sltu a1, a2, a0 -;; sub a3, a3, a5 -;; sub a1, a3, a1 +;; sub a2, a3, a5 +;; sub a1, a2, a1 ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/riscv64-zbs.wat b/tests/disas/riscv64-zbs.wat index 667f5d643234..d8403055da18 100644 --- a/tests/disas/riscv64-zbs.wat +++ b/tests/disas/riscv64-zbs.wat @@ -127,8 +127,8 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; andi a4, a3, 0x1f -;; .byte 0x33, 0x15, 0xe6, 0x48 +;; andi a0, a3, 0x1f +;; .byte 0x33, 0x15, 0xa6, 0x48 ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -194,8 +194,8 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; andi a4, a3, 0x1f -;; .byte 0x33, 0x55, 0xe6, 0x48 +;; andi a0, a3, 0x1f +;; .byte 0x33, 0x55, 0xa6, 0x48 ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -206,8 +206,8 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; andi a4, a3, 0x1f -;; .byte 0x33, 0x55, 0xe6, 0x48 +;; andi a0, a3, 0x1f +;; .byte 0x33, 0x55, 0xa6, 0x48 ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -218,8 +218,8 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; andi a4, a3, 0x1f -;; .byte 0x33, 0x55, 0xe6, 0x48 +;; andi a0, a3, 0x1f +;; .byte 0x33, 0x55, 0xa6, 0x48 ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -230,8 +230,8 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; andi a4, a3, 0x1f -;; .byte 0x33, 0x55, 0xe6, 0x48 +;; andi a0, a3, 0x1f +;; .byte 0x33, 0x55, 0xa6, 0x48 ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -374,8 +374,8 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; andi a4, a3, 0x1f -;; .byte 0x33, 0x15, 0xe6, 0x68 +;; andi a0, a3, 0x1f +;; .byte 0x33, 0x15, 0xa6, 0x68 ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -386,8 +386,8 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; andi a4, a3, 0x1f -;; .byte 0x33, 0x15, 0xe6, 0x68 +;; andi a0, a3, 0x1f +;; .byte 0x33, 0x15, 0xa6, 0x68 ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -464,8 +464,8 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; andi a4, a3, 0x1f -;; .byte 0x33, 0x15, 0xe6, 0x28 +;; andi a0, a3, 0x1f +;; .byte 0x33, 0x15, 0xa6, 0x28 ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 @@ -476,8 +476,8 @@ ;; sd ra, 8(sp) ;; sd s0, 0(sp) ;; mv s0, sp -;; andi a4, a3, 0x1f -;; .byte 0x33, 0x15, 0xe6, 0x28 +;; andi a0, a3, 0x1f +;; .byte 0x33, 0x15, 0xa6, 0x28 ;; ld ra, 8(sp) ;; ld s0, 0(sp) ;; addi sp, sp, 0x10 diff --git a/tests/disas/s390x-entry-trampoline.wat b/tests/disas/s390x-entry-trampoline.wat index 142100da8c78..512757e5dc8e 100644 --- a/tests/disas/s390x-entry-trampoline.wat +++ b/tests/disas/s390x-entry-trampoline.wat @@ -17,13 +17,13 @@ ;; std %f13, 0xc8(%r15) ;; std %f14, 0xd0(%r15) ;; std %f15, 0xd8(%r15) -;; lg %r5, 8(%r2) -;; lg %r4, 0(%r15) -;; stg %r4, 0x48(%r5) -;; lgr %r4, %r15 -;; stg %r4, 0x40(%r5) -;; larl %r4, 0xbe -;; stg %r4, 0x50(%r5) +;; lg %r4, 8(%r2) +;; lg %r5, 0(%r15) +;; stg %r5, 0x48(%r4) +;; lgr %r5, %r15 +;; stg %r5, 0x40(%r4) +;; larl %r5, 0xbe +;; stg %r5, 0x50(%r4) ;; brasl %r14, 0 ;; ├─╼ exception frame offset: SP = FP - 0xe0 ;; ╰─╼ exception handler: default handler, no dynamic context, handler=0xbe diff --git a/tests/disas/s390x-wide-arithmetic.wat b/tests/disas/s390x-wide-arithmetic.wat index 066100ad0b35..46de629ddff5 100644 --- a/tests/disas/s390x-wide-arithmetic.wat +++ b/tests/disas/s390x-wide-arithmetic.wat @@ -53,11 +53,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; vlvgp %v17, %r5, %r4 -;; vlvgp %v18, %r7, %r6 -;; vaq %v17, %v17, %v18 -;; vlgvg %r3, %v17, 0 -;; vlgvg %r2, %v17, 1 +;; vlvgp %v16, %r5, %r4 +;; vlvgp %v17, %r7, %r6 +;; vaq %v16, %v16, %v17 +;; vlgvg %r3, %v16, 0 +;; vlgvg %r2, %v16, 1 ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -70,11 +70,11 @@ ;; lgr %r1, %r15 ;; aghi %r15, -0xa0 ;; stg %r1, 0(%r15) -;; vlvgp %v17, %r5, %r4 -;; vlvgp %v18, %r7, %r6 -;; vsq %v17, %v17, %v18 -;; vlgvg %r3, %v17, 0 -;; vlgvg %r2, %v17, 1 +;; vlvgp %v16, %r5, %r4 +;; vlvgp %v17, %r7, %r6 +;; vsq %v16, %v16, %v17 +;; vlgvg %r3, %v16, 0 +;; vlgvg %r2, %v16, 1 ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -123,7 +123,7 @@ ;; mgrk %r2, %r4, %r5 ;; vlvgp %v16, %r2, %r3 ;; vlgvg %r2, %v16, 0 -;; vlgvg %r3, %v16, 1 +;; vlgvg %r7, %v16, 1 ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 ;; @@ -140,6 +140,6 @@ ;; mlgr %r2, %r5 ;; vlvgp %v16, %r2, %r3 ;; vlgvg %r2, %v16, 0 -;; vlgvg %r3, %v16, 1 +;; vlgvg %r7, %v16, 1 ;; lmg %r14, %r15, 0x110(%r15) ;; br %r14 diff --git a/tests/disas/trunc.wat b/tests/disas/trunc.wat index 6f69c3594b01..165db1d1a6ea 100644 --- a/tests/disas/trunc.wat +++ b/tests/disas/trunc.wat @@ -11,71 +11,71 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; subq $0x20, %rsp -;; movq %r14, 0x10(%rsp) -;; movq 8(%rdi), %r11 -;; movq %rdi, %r14 -;; movq 0x18(%r11), %r11 -;; movq %rsp, %rsi -;; cmpq %r11, %rsi +;; movq %rbx, 0x10(%rsp) +;; movq 8(%rdi), %rax +;; movq %rdi, %rbx +;; movq 0x18(%rax), %rax +;; movq %rsp, %rcx +;; cmpq %rax, %rcx ;; jb 0x118 ;; 24: ucomisd %xmm0, %xmm0 ;; movdqu %xmm0, (%rsp) ;; jp 0x101 ;; jne 0x101 -;; 39: movq %r14, %rdi +;; 39: movq %rbx, %rdi ;; movdqu (%rsp), %xmm0 -;; callq 0x245 +;; callq 0x244 ;; movabsq $13830554455654793216, %rax -;; movq %rax, %xmm6 -;; ucomisd %xmm0, %xmm6 +;; movq %rax, %xmm1 +;; ucomisd %xmm0, %xmm1 ;; jae 0xea ;; 5f: ucomisd 0xc9(%rip), %xmm0 ;; jae 0xd3 -;; 6d: movdqu (%rsp), %xmm1 -;; movabsq $0x43e0000000000000, %r10 -;; movq %r10, %xmm7 -;; ucomisd %xmm7, %xmm1 +;; 6d: movdqu (%rsp), %xmm2 +;; movabsq $0x43e0000000000000, %rcx +;; movq %rcx, %xmm1 +;; ucomisd %xmm1, %xmm2 ;; jae 0xa2 ;; jp 0x12c -;; 91: cvttsd2si %xmm1, %rax +;; 91: cvttsd2si %xmm2, %rax ;; cmpq $0, %rax ;; jge 0xc5 ;; a0: ud2 -;; movaps %xmm1, %xmm0 -;; subsd %xmm7, %xmm0 +;; movaps %xmm2, %xmm0 +;; subsd %xmm1, %xmm0 ;; cvttsd2si %xmm0, %rax ;; cmpq $0, %rax ;; jl 0x12e -;; b8: movabsq $9223372036854775808, %r10 -;; addq %r10, %rax -;; movq 0x10(%rsp), %r14 +;; b8: movabsq $9223372036854775808, %rcx +;; addq %rcx, %rax +;; movq 0x10(%rsp), %rbx ;; addq $0x20, %rsp ;; movq %rbp, %rsp ;; popq %rbp ;; retq ;; d3: movl $6, %esi -;; d8: movq %r14, %rdi +;; d8: movq %rbx, %rdi ;; db: callq 0x271 -;; e0: movq %r14, %rdi -;; e3: callq 0x2a1 +;; e0: movq %rbx, %rdi +;; e3: callq 0x2a2 ;; e8: ud2 ;; ea: movl $6, %esi -;; ef: movq %r14, %rdi +;; ef: movq %rbx, %rdi ;; f2: callq 0x271 -;; f7: movq %r14, %rdi -;; fa: callq 0x2a1 +;; f7: movq %rbx, %rdi +;; fa: callq 0x2a2 ;; ff: ud2 ;; 101: movl $8, %esi -;; 106: movq %r14, %rdi +;; 106: movq %rbx, %rdi ;; 109: callq 0x271 -;; 10e: movq %r14, %rdi -;; 111: callq 0x2a1 +;; 10e: movq %rbx, %rdi +;; 111: callq 0x2a2 ;; 116: ud2 ;; 118: xorl %esi, %esi -;; 11a: movq %r14, %rdi +;; 11a: movq %rbx, %rdi ;; 11d: callq 0x271 -;; 122: movq %r14, %rdi -;; 125: callq 0x2a1 +;; 122: movq %rbx, %rdi +;; 125: callq 0x2a2 ;; 12a: ud2 ;; 12c: ud2 ;; 12e: ud2 diff --git a/tests/disas/trunc32.wat b/tests/disas/trunc32.wat index 019f36cbb35e..370a72b7d54f 100644 --- a/tests/disas/trunc32.wat +++ b/tests/disas/trunc32.wat @@ -11,72 +11,72 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; subq $0x20, %rsp -;; movq %r12, 0x10(%rsp) +;; movq %rbx, 0x10(%rsp) ;; movdqu %xmm0, (%rsp) ;; movq 8(%rdi), %rax -;; movq %rdi, %r12 +;; movq %rdi, %rbx ;; movq 0x18(%rax), %rax ;; movq %rsp, %rcx ;; cmpq %rax, %rcx ;; jb 0x10d ;; 29: xorpd %xmm0, %xmm0 -;; movdqu (%rsp), %xmm3 -;; cvtss2sd %xmm3, %xmm0 +;; movdqu (%rsp), %xmm1 +;; cvtss2sd %xmm1, %xmm0 ;; ucomisd %xmm0, %xmm0 ;; jp 0xf6 ;; jne 0xf6 -;; 46: movq %r12, %rdi -;; callq 0x243 -;; movabsq $13830554455654793216, %r8 -;; movq %r8, %xmm1 +;; 46: movq %rbx, %rdi +;; callq 0x242 +;; movabsq $13830554455654793216, %rax +;; movq %rax, %xmm1 ;; ucomisd %xmm0, %xmm1 ;; jae 0xdf ;; 67: ucomisd 0xc1(%rip), %xmm0 ;; jae 0xc8 -;; 75: movdqu (%rsp), %xmm7 -;; movl $0x4f000000, %edi -;; movd %edi, %xmm2 -;; ucomiss %xmm2, %xmm7 +;; 75: movdqu (%rsp), %xmm2 +;; movl $0x4f000000, %ecx +;; movd %ecx, %xmm1 +;; ucomiss %xmm1, %xmm2 ;; jae 0xa1 ;; jp 0x121 -;; 92: cvttss2si %xmm7, %eax +;; 92: cvttss2si %xmm2, %eax ;; cmpl $0, %eax ;; jge 0xba ;; 9f: ud2 -;; movaps %xmm7, %xmm3 -;; subss %xmm2, %xmm3 -;; cvttss2si %xmm3, %eax +;; movaps %xmm2, %xmm0 +;; subss %xmm1, %xmm0 +;; cvttss2si %xmm0, %eax ;; cmpl $0, %eax ;; jl 0x123 ;; b5: addl $0x80000000, %eax -;; movq 0x10(%rsp), %r12 +;; movq 0x10(%rsp), %rbx ;; addq $0x20, %rsp ;; movq %rbp, %rsp ;; popq %rbp ;; retq ;; c8: movl $6, %esi -;; cd: movq %r12, %rdi +;; cd: movq %rbx, %rdi ;; d0: callq 0x26f -;; d5: movq %r12, %rdi -;; d8: callq 0x29f +;; d5: movq %rbx, %rdi +;; d8: callq 0x2a0 ;; dd: ud2 ;; df: movl $6, %esi -;; e4: movq %r12, %rdi +;; e4: movq %rbx, %rdi ;; e7: callq 0x26f -;; ec: movq %r12, %rdi -;; ef: callq 0x29f +;; ec: movq %rbx, %rdi +;; ef: callq 0x2a0 ;; f4: ud2 ;; f6: movl $8, %esi -;; fb: movq %r12, %rdi +;; fb: movq %rbx, %rdi ;; fe: callq 0x26f -;; 103: movq %r12, %rdi -;; 106: callq 0x29f +;; 103: movq %rbx, %rdi +;; 106: callq 0x2a0 ;; 10b: ud2 ;; 10d: xorl %esi, %esi -;; 10f: movq %r12, %rdi +;; 10f: movq %rbx, %rdi ;; 112: callq 0x26f -;; 117: movq %r12, %rdi -;; 11a: callq 0x29f +;; 117: movq %rbx, %rdi +;; 11a: callq 0x2a0 ;; 11f: ud2 ;; 121: ud2 ;; 123: ud2 diff --git a/tests/disas/winch/aarch64/call_indirect/call_indirect.wat b/tests/disas/winch/aarch64/call_indirect/call_indirect.wat index ff45963ef7ac..941c6255db1c 100644 --- a/tests/disas/winch/aarch64/call_indirect/call_indirect.wat +++ b/tests/disas/winch/aarch64/call_indirect/call_indirect.wat @@ -85,7 +85,7 @@ ;; mov x0, x9 ;; mov x1, #0 ;; ldur w2, [x28] -;; bl #0x3e4 +;; bl #0x3f0 ;; e0: add x28, x28, #4 ;; mov sp, x28 ;; ldur x9, [x28, #0x14] @@ -153,7 +153,7 @@ ;; mov x0, x9 ;; mov x1, #0 ;; ldur w2, [x28, #0xc] -;; bl #0x3e4 +;; bl #0x3f0 ;; 1f0: add x28, x28, #0xc ;; mov sp, x28 ;; add x28, x28, #4 diff --git a/tests/disas/winch/aarch64/call_indirect/local_arg.wat b/tests/disas/winch/aarch64/call_indirect/local_arg.wat index 0e47ceacb9f1..cd66a45f03c5 100644 --- a/tests/disas/winch/aarch64/call_indirect/local_arg.wat +++ b/tests/disas/winch/aarch64/call_indirect/local_arg.wat @@ -91,7 +91,7 @@ ;; mov x0, x9 ;; mov x1, #0 ;; ldur w2, [x28] -;; bl #0x404 +;; bl #0x408 ;; 120: add x28, x28, #4 ;; mov sp, x28 ;; ldur x9, [x28, #0x14] diff --git a/tests/disas/winch/x64/atomic/notify/notify.wat b/tests/disas/winch/x64/atomic/notify/notify.wat index fac012d3ad69..9ebac8b87d53 100644 --- a/tests/disas/winch/x64/atomic/notify/notify.wat +++ b/tests/disas/winch/x64/atomic/notify/notify.wat @@ -27,7 +27,7 @@ ;; movl $0, %esi ;; movq 8(%rsp), %rdx ;; movl 4(%rsp), %ecx -;; callq 0x175 +;; callq 0x16e ;; addq $4, %rsp ;; addq $0xc, %rsp ;; movq 8(%rsp), %r14 diff --git a/tests/disas/winch/x64/atomic/notify/notify_offset.wat b/tests/disas/winch/x64/atomic/notify/notify_offset.wat index f7ba24a2c382..68ec40e22322 100644 --- a/tests/disas/winch/x64/atomic/notify/notify_offset.wat +++ b/tests/disas/winch/x64/atomic/notify/notify_offset.wat @@ -28,7 +28,7 @@ ;; movl $0, %esi ;; movq 8(%rsp), %rdx ;; movl 4(%rsp), %ecx -;; callq 0x17c +;; callq 0x175 ;; addq $4, %rsp ;; addq $0xc, %rsp ;; movq 8(%rsp), %r14 diff --git a/tests/disas/winch/x64/atomic/wait/wait32.wat b/tests/disas/winch/x64/atomic/wait/wait32.wat index 714dd8e63f4f..763669203e47 100644 --- a/tests/disas/winch/x64/atomic/wait/wait32.wat +++ b/tests/disas/winch/x64/atomic/wait/wait32.wat @@ -30,7 +30,7 @@ ;; movq 0x18(%rsp), %rdx ;; movl 0x14(%rsp), %ecx ;; movq 0xc(%rsp), %r8 -;; callq 0x182 +;; callq 0x17b ;; addq $0xc, %rsp ;; addq $0x14, %rsp ;; movq 8(%rsp), %r14 diff --git a/tests/disas/winch/x64/atomic/wait/wait32_offset.wat b/tests/disas/winch/x64/atomic/wait/wait32_offset.wat index fe7f95e70479..8967c73b3128 100644 --- a/tests/disas/winch/x64/atomic/wait/wait32_offset.wat +++ b/tests/disas/winch/x64/atomic/wait/wait32_offset.wat @@ -34,7 +34,7 @@ ;; movq 0x18(%rsp), %rdx ;; movl 0x14(%rsp), %ecx ;; movq 0xc(%rsp), %r8 -;; callq 0x189 +;; callq 0x182 ;; addq $0xc, %rsp ;; addq $0x14, %rsp ;; movq 8(%rsp), %r14 diff --git a/tests/disas/winch/x64/atomic/wait/wait64.wat b/tests/disas/winch/x64/atomic/wait/wait64.wat index a950e66394fe..5fa37e700b6e 100644 --- a/tests/disas/winch/x64/atomic/wait/wait64.wat +++ b/tests/disas/winch/x64/atomic/wait/wait64.wat @@ -29,7 +29,7 @@ ;; movq 0x18(%rsp), %rdx ;; movq 0x10(%rsp), %rcx ;; movq 8(%rsp), %r8 -;; callq 0x17a +;; callq 0x173 ;; addq $8, %rsp ;; addq $0x18, %rsp ;; movq 8(%rsp), %r14 diff --git a/tests/disas/winch/x64/atomic/wait/wait64_offset.wat b/tests/disas/winch/x64/atomic/wait/wait64_offset.wat index 01cf1bcbdae1..a6410773301a 100644 --- a/tests/disas/winch/x64/atomic/wait/wait64_offset.wat +++ b/tests/disas/winch/x64/atomic/wait/wait64_offset.wat @@ -33,7 +33,7 @@ ;; movq 0x18(%rsp), %rdx ;; movq 0x10(%rsp), %rcx ;; movq 8(%rsp), %r8 -;; callq 0x181 +;; callq 0x17a ;; addq $8, %rsp ;; addq $0x18, %rsp ;; movq 8(%rsp), %r14 diff --git a/tests/disas/winch/x64/call_indirect/call_indirect.wat b/tests/disas/winch/x64/call_indirect/call_indirect.wat index 26d788b04c48..667a67a86524 100644 --- a/tests/disas/winch/x64/call_indirect/call_indirect.wat +++ b/tests/disas/winch/x64/call_indirect/call_indirect.wat @@ -76,7 +76,7 @@ ;; movq %r14, %rdi ;; movl $0, %esi ;; movl 8(%rsp), %edx -;; callq 0x337 +;; callq 0x334 ;; addq $8, %rsp ;; addq $4, %rsp ;; movq 0x1c(%rsp), %r14 @@ -128,7 +128,7 @@ ;; movq %r14, %rdi ;; movl $0, %esi ;; movl 4(%rsp), %edx -;; callq 0x337 +;; callq 0x334 ;; addq $4, %rsp ;; addq $4, %rsp ;; movq 0x20(%rsp), %r14 diff --git a/tests/disas/winch/x64/call_indirect/local_arg.wat b/tests/disas/winch/x64/call_indirect/local_arg.wat index 8db6c8195a01..a061482e746e 100644 --- a/tests/disas/winch/x64/call_indirect/local_arg.wat +++ b/tests/disas/winch/x64/call_indirect/local_arg.wat @@ -72,7 +72,7 @@ ;; movq %r14, %rdi ;; movl $0, %esi ;; movl 8(%rsp), %edx -;; callq 0x32b +;; callq 0x31d ;; addq $8, %rsp ;; addq $4, %rsp ;; movq 0x1c(%rsp), %r14 diff --git a/tests/disas/winch/x64/epoch/func.wat b/tests/disas/winch/x64/epoch/func.wat index b087ee61cf43..9efcc83ad4dd 100644 --- a/tests/disas/winch/x64/epoch/func.wat +++ b/tests/disas/winch/x64/epoch/func.wat @@ -23,7 +23,7 @@ ;; cmpq %rcx, %rdx ;; jb 0x54 ;; 47: movq %r14, %rdi -;; callq 0x13b +;; callq 0x134 ;; movq 8(%rsp), %r14 ;; addq $0x10, %rsp ;; popq %rbp diff --git a/tests/disas/winch/x64/epoch/loop.wat b/tests/disas/winch/x64/epoch/loop.wat index 720ee49fe4da..23baa77a5d6d 100644 --- a/tests/disas/winch/x64/epoch/loop.wat +++ b/tests/disas/winch/x64/epoch/loop.wat @@ -25,7 +25,7 @@ ;; cmpq %rcx, %rdx ;; jb 0x54 ;; 47: movq %r14, %rdi -;; callq 0x165 +;; callq 0x15e ;; movq 8(%rsp), %r14 ;; movq 0x18(%r14), %rdx ;; movq (%rdx), %rdx @@ -34,7 +34,7 @@ ;; cmpq %rcx, %rdx ;; jb 0x79 ;; 6c: movq %r14, %rdi -;; callq 0x165 +;; callq 0x15e ;; movq 8(%rsp), %r14 ;; jmp 0x54 ;; 7e: addq $0x10, %rsp diff --git a/tests/disas/winch/x64/f32_ceil/f32_ceil_param.wat b/tests/disas/winch/x64/f32_ceil/f32_ceil_param.wat index 3ef7a29cc705..64604cb13f70 100644 --- a/tests/disas/winch/x64/f32_ceil/f32_ceil_param.wat +++ b/tests/disas/winch/x64/f32_ceil/f32_ceil_param.wat @@ -26,7 +26,7 @@ ;; subq $0xc, %rsp ;; movq %r14, %rdi ;; movss 0xc(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xdb ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/f32_floor/f32_floor_param.wat b/tests/disas/winch/x64/f32_floor/f32_floor_param.wat index 191d1feb01ae..c860da414f11 100644 --- a/tests/disas/winch/x64/f32_floor/f32_floor_param.wat +++ b/tests/disas/winch/x64/f32_floor/f32_floor_param.wat @@ -26,7 +26,7 @@ ;; subq $0xc, %rsp ;; movq %r14, %rdi ;; movss 0xc(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xdb ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/f32_nearest/f32_nearest_param.wat b/tests/disas/winch/x64/f32_nearest/f32_nearest_param.wat index 0ab1e3066430..1099f240c05a 100644 --- a/tests/disas/winch/x64/f32_nearest/f32_nearest_param.wat +++ b/tests/disas/winch/x64/f32_nearest/f32_nearest_param.wat @@ -26,7 +26,7 @@ ;; subq $0xc, %rsp ;; movq %r14, %rdi ;; movss 0xc(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xdb ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/f32_trunc/f32_trunc_param.wat b/tests/disas/winch/x64/f32_trunc/f32_trunc_param.wat index 696a17c15a5d..04ea2d3771c2 100644 --- a/tests/disas/winch/x64/f32_trunc/f32_trunc_param.wat +++ b/tests/disas/winch/x64/f32_trunc/f32_trunc_param.wat @@ -26,7 +26,7 @@ ;; subq $0xc, %rsp ;; movq %r14, %rdi ;; movss 0xc(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xdb ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/f64_ceil/f64_ceil_param.wat b/tests/disas/winch/x64/f64_ceil/f64_ceil_param.wat index cca7df3169ae..d0823c70d614 100644 --- a/tests/disas/winch/x64/f64_ceil/f64_ceil_param.wat +++ b/tests/disas/winch/x64/f64_ceil/f64_ceil_param.wat @@ -26,7 +26,7 @@ ;; subq $8, %rsp ;; movq %r14, %rdi ;; movsd 8(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xdb ;; addq $8, %rsp ;; addq $8, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/f64_floor/f64_floor_param.wat b/tests/disas/winch/x64/f64_floor/f64_floor_param.wat index a23d8194260d..87ada7f89f1d 100644 --- a/tests/disas/winch/x64/f64_floor/f64_floor_param.wat +++ b/tests/disas/winch/x64/f64_floor/f64_floor_param.wat @@ -26,7 +26,7 @@ ;; subq $8, %rsp ;; movq %r14, %rdi ;; movsd 8(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xdb ;; addq $8, %rsp ;; addq $8, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/f64_nearest/f64_nearest_param.wat b/tests/disas/winch/x64/f64_nearest/f64_nearest_param.wat index 43427ea2e9e3..64d7814165e2 100644 --- a/tests/disas/winch/x64/f64_nearest/f64_nearest_param.wat +++ b/tests/disas/winch/x64/f64_nearest/f64_nearest_param.wat @@ -26,7 +26,7 @@ ;; subq $8, %rsp ;; movq %r14, %rdi ;; movsd 8(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xdb ;; addq $8, %rsp ;; addq $8, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/f64_trunc/f64_trunc_param.wat b/tests/disas/winch/x64/f64_trunc/f64_trunc_param.wat index 8c98aa5367be..507039062c74 100644 --- a/tests/disas/winch/x64/f64_trunc/f64_trunc_param.wat +++ b/tests/disas/winch/x64/f64_trunc/f64_trunc_param.wat @@ -26,7 +26,7 @@ ;; subq $8, %rsp ;; movq %r14, %rdi ;; movsd 8(%rsp), %xmm0 -;; callq 0xdc +;; callq 0xdb ;; addq $8, %rsp ;; addq $8, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/fuel/call.wat b/tests/disas/winch/x64/fuel/call.wat index 274046f66146..9669d13352b7 100644 --- a/tests/disas/winch/x64/fuel/call.wat +++ b/tests/disas/winch/x64/fuel/call.wat @@ -28,7 +28,7 @@ ;; cmpq $0, %rcx ;; jl 0x5e ;; 51: movq %r14, %rdi -;; callq 0x1f5 +;; callq 0x1ee ;; movq 8(%rsp), %r14 ;; movq 8(%r14), %rax ;; movq (%rax), %r11 @@ -74,7 +74,7 @@ ;; cmpq $0, %rcx ;; jl 0x10e ;; 101: movq %r14, %rdi -;; callq 0x1f5 +;; callq 0x1ee ;; movq 8(%rsp), %r14 ;; addq $0x10, %rsp ;; popq %rbp diff --git a/tests/disas/winch/x64/fuel/func.wat b/tests/disas/winch/x64/fuel/func.wat index f9b8da67a85d..b239b1370889 100644 --- a/tests/disas/winch/x64/fuel/func.wat +++ b/tests/disas/winch/x64/fuel/func.wat @@ -24,7 +24,7 @@ ;; cmpq $0, %rcx ;; jl 0x5e ;; 51: movq %r14, %rdi -;; callq 0x145 +;; callq 0x13e ;; movq 8(%rsp), %r14 ;; addq $0x10, %rsp ;; popq %rbp diff --git a/tests/disas/winch/x64/fuel/loop.wat b/tests/disas/winch/x64/fuel/loop.wat index b31304f35cc0..6d3bd20c2743 100644 --- a/tests/disas/winch/x64/fuel/loop.wat +++ b/tests/disas/winch/x64/fuel/loop.wat @@ -26,14 +26,14 @@ ;; cmpq $0, %rcx ;; jl 0x5e ;; 51: movq %r14, %rdi -;; callq 0x179 +;; callq 0x172 ;; movq 8(%rsp), %r14 ;; movq 8(%r14), %rcx ;; movq (%rcx), %rcx ;; cmpq $0, %rcx ;; jl 0x7c ;; 6f: movq %r14, %rdi -;; callq 0x179 +;; callq 0x172 ;; movq 8(%rsp), %r14 ;; movq 8(%r14), %rax ;; movq (%rax), %r11 diff --git a/tests/disas/winch/x64/load/grow_load.wat b/tests/disas/winch/x64/load/grow_load.wat index 09b070292334..5c88ae5e85eb 100644 --- a/tests/disas/winch/x64/load/grow_load.wat +++ b/tests/disas/winch/x64/load/grow_load.wat @@ -65,7 +65,7 @@ ;; movq %r14, %rdi ;; movl 0xc(%rsp), %esi ;; movl $0, %edx -;; callq 0x2e2 +;; callq 0x2e1 ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x58(%rsp), %r14 diff --git a/tests/disas/winch/x64/table/fill.wat b/tests/disas/winch/x64/table/fill.wat index b5b703fd3a8a..0164d54991bb 100644 --- a/tests/disas/winch/x64/table/fill.wat +++ b/tests/disas/winch/x64/table/fill.wat @@ -113,7 +113,7 @@ ;; movq %r14, %rdi ;; movl $0, %esi ;; movl 0xc(%rsp), %edx -;; callq 0x4ee +;; callq 0x4e6 ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x28(%rsp), %r14 @@ -133,7 +133,7 @@ ;; movl 0xc(%rsp), %edx ;; movq 4(%rsp), %rcx ;; movl (%rsp), %r8d -;; callq 0x51a +;; callq 0x511 ;; addq $0x10, %rsp ;; movq 0x28(%rsp), %r14 ;; addq $0x30, %rsp diff --git a/tests/disas/winch/x64/table/get.wat b/tests/disas/winch/x64/table/get.wat index 72fb93d2900c..934ceff6fa1f 100644 --- a/tests/disas/winch/x64/table/get.wat +++ b/tests/disas/winch/x64/table/get.wat @@ -65,7 +65,7 @@ ;; movq %r14, %rdi ;; movl $0, %esi ;; movl 0xc(%rsp), %edx -;; callq 0x2ef +;; callq 0x2e5 ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/table/grow.wat b/tests/disas/winch/x64/table/grow.wat index 4d2d538c5529..8994afbea2a1 100644 --- a/tests/disas/winch/x64/table/grow.wat +++ b/tests/disas/winch/x64/table/grow.wat @@ -30,7 +30,7 @@ ;; movl $0, %esi ;; movl $0xa, %edx ;; movq 8(%rsp), %rcx -;; callq 0x178 +;; callq 0x177 ;; addq $8, %rsp ;; addq $8, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/table/init_copy_drop.wat b/tests/disas/winch/x64/table/init_copy_drop.wat index fdbe15349bc8..402bbc1b8f8c 100644 --- a/tests/disas/winch/x64/table/init_copy_drop.wat +++ b/tests/disas/winch/x64/table/init_copy_drop.wat @@ -142,11 +142,11 @@ ;; movl $7, %ecx ;; movl $0, %r8d ;; movl $4, %r9d -;; callq 0x94a +;; callq 0x939 ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $1, %esi -;; callq 0x995 +;; callq 0x984 ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $0, %esi @@ -154,11 +154,11 @@ ;; movl $0xf, %ecx ;; movl $1, %r8d ;; movl $3, %r9d -;; callq 0x94a +;; callq 0x939 ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $3, %esi -;; callq 0x995 +;; callq 0x984 ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $0, %esi @@ -166,7 +166,7 @@ ;; movl $0x14, %ecx ;; movl $0xf, %r8d ;; movl $5, %r9d -;; callq 0x8ff +;; callq 0x8ee ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $0, %esi @@ -174,7 +174,7 @@ ;; movl $0x15, %ecx ;; movl $0x1d, %r8d ;; movl $1, %r9d -;; callq 0x8ff +;; callq 0x8ee ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $0, %esi @@ -182,7 +182,7 @@ ;; movl $0x18, %ecx ;; movl $0xa, %r8d ;; movl $1, %r9d -;; callq 0x8ff +;; callq 0x8ee ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $0, %esi @@ -190,7 +190,7 @@ ;; movl $0xd, %ecx ;; movl $0xb, %r8d ;; movl $4, %r9d -;; callq 0x8ff +;; callq 0x8ee ;; movq 8(%rsp), %r14 ;; movq %r14, %rdi ;; movl $0, %esi @@ -198,7 +198,7 @@ ;; movl $0x13, %ecx ;; movl $0x14, %r8d ;; movl $5, %r9d -;; callq 0x8ff +;; callq 0x8ee ;; movq 8(%rsp), %r14 ;; addq $0x10, %rsp ;; popq %rbp @@ -243,7 +243,7 @@ ;; movq %r14, %rdi ;; movl $0, %esi ;; movl 0xc(%rsp), %edx -;; callq 0x9df +;; callq 0x9cc ;; addq $0xc, %rsp ;; addq $4, %rsp ;; movq 0x18(%rsp), %r14 diff --git a/tests/disas/winch/x64/table/set.wat b/tests/disas/winch/x64/table/set.wat index 3f40480a5b13..8ec3ab16217b 100644 --- a/tests/disas/winch/x64/table/set.wat +++ b/tests/disas/winch/x64/table/set.wat @@ -109,7 +109,7 @@ ;; movq %r14, %rdi ;; movl $0, %esi ;; movl 8(%rsp), %edx -;; callq 0x4ba +;; callq 0x4a5 ;; addq $8, %rsp ;; addq $4, %rsp ;; movq 0x1c(%rsp), %r14 diff --git a/tests/disas/x64-bit-and-condition.wat b/tests/disas/x64-bit-and-condition.wat index 0c00b62d456c..1e361e77933e 100644 --- a/tests/disas/x64-bit-and-condition.wat +++ b/tests/disas/x64-bit-and-condition.wat @@ -112,8 +112,8 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; testl $0x100000, %edx -;; sete %r8b -;; movzbl %r8b, %eax +;; sete %sil +;; movzbl %sil, %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -144,8 +144,8 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; btq $0x28, %rdx -;; setae %r8b -;; movzbl %r8b, %eax +;; setae %sil +;; movzbl %sil, %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -176,8 +176,8 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; btl %ecx, %edx -;; setae %r9b -;; movzbl %r9b, %eax +;; setae %sil +;; movzbl %sil, %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -208,8 +208,8 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; btq %rcx, %rdx -;; setae %r9b -;; movzbl %r9b, %eax +;; setae %sil +;; movzbl %sil, %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/x64-entry-trampoline.wat b/tests/disas/x64-entry-trampoline.wat index 7a552997718c..be475c03a484 100644 --- a/tests/disas/x64-entry-trampoline.wat +++ b/tests/disas/x64-entry-trampoline.wat @@ -13,13 +13,13 @@ ;; movq %r13, 0x10(%rsp) ;; movq %r14, 0x18(%rsp) ;; movq %r15, 0x20(%rsp) -;; movq 8(%rdi), %rcx -;; movq %rbp, %rdx -;; movq %rdx, 0x48(%rcx) -;; movq %rsp, %rdx -;; movq %rdx, 0x40(%rcx) -;; leaq 0x2f(%rip), %r8 -;; movq %r8, 0x50(%rcx) +;; movq 8(%rdi), %rax +;; movq %rbp, %rcx +;; movq %rcx, 0x48(%rax) +;; movq %rsp, %rcx +;; movq %rcx, 0x40(%rax) +;; leaq 0x2f(%rip), %rcx +;; movq %rcx, 0x50(%rax) ;; callq 0 ;; ├─╼ exception frame offset: SP = FP - 0x30 ;; ╰─╼ exception handler: default handler, no dynamic context, handler=0x71 diff --git a/tests/disas/x64-mul16-negative.wat b/tests/disas/x64-mul16-negative.wat index 2345fd1658ff..d94592dad1ba 100644 --- a/tests/disas/x64-mul16-negative.wat +++ b/tests/disas/x64-mul16-negative.wat @@ -12,8 +12,8 @@ ;; wasm[0]::function[0]: ;; pushq %rbp ;; movq %rsp, %rbp -;; imulw $0xe0ff, %dx, %dx -;; movswl %dx, %eax +;; imulw $0xe0ff, %dx, %si +;; movswl %si, %eax ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/x64-relaxed-simd-deterministic.wat b/tests/disas/x64-relaxed-simd-deterministic.wat index 241f8fb328a1..62456ff76d70 100644 --- a/tests/disas/x64-relaxed-simd-deterministic.wat +++ b/tests/disas/x64-relaxed-simd-deterministic.wat @@ -43,10 +43,10 @@ ;; vcmpeqps %xmm0, %xmm0, %xmm3 ;; vandps %xmm3, %xmm0, %xmm5 ;; vpxor %xmm5, %xmm3, %xmm7 -;; vcvttps2dq %xmm5, %xmm1 -;; vpand %xmm7, %xmm1, %xmm3 -;; vpsrad $0x1f, %xmm3, %xmm5 -;; vpxor %xmm1, %xmm5, %xmm0 +;; vcvttps2dq %xmm5, %xmm0 +;; vpand %xmm7, %xmm0, %xmm1 +;; vpsrad $0x1f, %xmm1, %xmm1 +;; vpxor %xmm0, %xmm1, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -55,18 +55,18 @@ ;; pushq %rbp ;; movq %rsp, %rbp ;; vxorps %xmm3, %xmm3, %xmm5 -;; vmaxps %xmm5, %xmm0, %xmm0 -;; vpcmpeqd %xmm5, %xmm5, %xmm1 -;; vpsrld $1, %xmm1, %xmm3 -;; vcvtdq2ps %xmm3, %xmm5 -;; vcvttps2dq %xmm0, %xmm7 -;; vsubps %xmm5, %xmm0, %xmm1 -;; vcmpleps %xmm1, %xmm5, %xmm3 -;; vcvttps2dq %xmm1, %xmm5 -;; vpxor %xmm3, %xmm5, %xmm0 -;; vpxor %xmm1, %xmm1, %xmm3 -;; vpmaxsd %xmm3, %xmm0, %xmm5 -;; vpaddd %xmm7, %xmm5, %xmm0 +;; vmaxps %xmm5, %xmm0, %xmm7 +;; vpcmpeqd %xmm5, %xmm5, %xmm0 +;; vpsrld $1, %xmm0, %xmm0 +;; vcvtdq2ps %xmm0, %xmm1 +;; vcvttps2dq %xmm7, %xmm0 +;; vsubps %xmm1, %xmm7, %xmm2 +;; vcmpleps %xmm2, %xmm1, %xmm1 +;; vcvttps2dq %xmm2, %xmm2 +;; vpxor %xmm1, %xmm2, %xmm1 +;; vpxor %xmm2, %xmm2, %xmm2 +;; vpmaxsd %xmm2, %xmm1, %xmm1 +;; vpaddd %xmm0, %xmm1, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -90,10 +90,10 @@ ;; movq %rsp, %rbp ;; vxorpd %xmm3, %xmm3, %xmm5 ;; vmaxpd %xmm5, %xmm0, %xmm7 -;; vminpd 0x1c(%rip), %xmm7, %xmm1 -;; vroundpd $3, %xmm1, %xmm3 -;; vaddpd 0x1e(%rip), %xmm3, %xmm6 -;; vshufps $0x88, %xmm5, %xmm6, %xmm0 +;; vminpd 0x1c(%rip), %xmm7, %xmm0 +;; vroundpd $3, %xmm0, %xmm0 +;; vaddpd 0x1e(%rip), %xmm0, %xmm0 +;; vshufps $0x88, %xmm5, %xmm0, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -105,15 +105,15 @@ ;; wasm[0]::function[4]: ;; pushq %rbp ;; movq %rsp, %rbp -;; vpmovsxbw %xmm0, %xmm4 -;; vpmovsxbw %xmm1, %xmm5 -;; vpmullw %xmm5, %xmm4, %xmm4 -;; vpalignr $8, %xmm0, %xmm0, %xmm3 -;; vpmovsxbw %xmm3, %xmm5 -;; vpalignr $8, %xmm1, %xmm1, %xmm3 -;; vpmovsxbw %xmm3, %xmm6 -;; vpmullw %xmm6, %xmm5, %xmm5 -;; vphaddw %xmm5, %xmm4, %xmm0 +;; vpmovsxbw %xmm0, %xmm2 +;; vpmovsxbw %xmm1, %xmm3 +;; vpmullw %xmm3, %xmm2, %xmm2 +;; vpalignr $8, %xmm0, %xmm0, %xmm0 +;; vpmovsxbw %xmm0, %xmm0 +;; vpalignr $8, %xmm1, %xmm1, %xmm1 +;; vpmovsxbw %xmm1, %xmm1 +;; vpmullw %xmm1, %xmm0, %xmm0 +;; vphaddw %xmm0, %xmm2, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq @@ -121,17 +121,17 @@ ;; wasm[0]::function[5]: ;; pushq %rbp ;; movq %rsp, %rbp -;; vpmovsxbw %xmm0, %xmm7 -;; vpmovsxbw %xmm1, %xmm3 -;; vpmullw %xmm3, %xmm7, %xmm7 -;; vpalignr $8, %xmm0, %xmm0, %xmm6 -;; vpmovsxbw %xmm6, %xmm0 -;; vpalignr $8, %xmm1, %xmm1, %xmm6 -;; vpmovsxbw %xmm6, %xmm1 +;; vpmovsxbw %xmm0, %xmm3 +;; vpmovsxbw %xmm1, %xmm4 +;; vpmullw %xmm4, %xmm3, %xmm3 +;; vpalignr $8, %xmm0, %xmm0, %xmm0 +;; vpmovsxbw %xmm0, %xmm0 +;; vpalignr $8, %xmm1, %xmm1, %xmm1 +;; vpmovsxbw %xmm1, %xmm1 ;; vpmullw %xmm1, %xmm0, %xmm0 -;; vphaddw %xmm0, %xmm7, %xmm7 -;; vpmaddwd 0x17(%rip), %xmm7, %xmm7 -;; vpaddd %xmm2, %xmm7, %xmm0 +;; vphaddw %xmm0, %xmm3, %xmm0 +;; vpmaddwd 0x17(%rip), %xmm0, %xmm0 +;; vpaddd %xmm2, %xmm0, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/x64-relaxed-simd.wat b/tests/disas/x64-relaxed-simd.wat index 291b12d0efaf..5c5c3c9ba0e8 100644 --- a/tests/disas/x64-relaxed-simd.wat +++ b/tests/disas/x64-relaxed-simd.wat @@ -52,15 +52,15 @@ ;; maxps %xmm7, %xmm0 ;; pcmpeqd %xmm7, %xmm7 ;; psrld $1, %xmm7 -;; cvtdq2ps %xmm7, %xmm1 -;; cvttps2dq %xmm0, %xmm7 -;; subps %xmm1, %xmm0 -;; cmpleps %xmm0, %xmm1 +;; cvtdq2ps %xmm7, %xmm2 +;; cvttps2dq %xmm0, %xmm1 +;; subps %xmm2, %xmm0 +;; cmpleps %xmm0, %xmm2 ;; cvttps2dq %xmm0, %xmm0 -;; pxor %xmm1, %xmm0 +;; pxor %xmm2, %xmm0 ;; pxor %xmm2, %xmm2 ;; pmaxsd %xmm2, %xmm0 -;; paddd %xmm7, %xmm0 +;; paddd %xmm1, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp ;; retq diff --git a/tests/disas/x64-simple-load.wat b/tests/disas/x64-simple-load.wat index 71475912d021..fdea5f178952 100644 --- a/tests/disas/x64-simple-load.wat +++ b/tests/disas/x64-simple-load.wat @@ -13,12 +13,12 @@ ;; wasm[0]::function[0]::load8: ;; pushq %rbp ;; movq %rsp, %rbp -;; movq 0x38(%rdi), %r9 +;; movq 0x38(%rdi), %rsi ;; ╰─╼ addrmap: 0x21 -;; movl %edx, %r10d -;; movzbq (%r9, %r10), %rax +;; movl %edx, %edi +;; movzbq (%rsi, %rdi), %rax ;; ╰─╼ trap: MemoryOutOfBounds -;; movzbq 4(%r9, %r10), %rcx +;; movzbq 4(%rsi, %rdi), %rcx ;; ├─╼ addrmap: 0x26 ;; ╰─╼ trap: MemoryOutOfBounds ;; movq %rbp, %rsp diff --git a/tests/disas/x64-sse-no-fold-unaligned-load.wat b/tests/disas/x64-sse-no-fold-unaligned-load.wat index 9ec1c784f1cb..b2464c6d7762 100644 --- a/tests/disas/x64-sse-no-fold-unaligned-load.wat +++ b/tests/disas/x64-sse-no-fold-unaligned-load.wat @@ -19,9 +19,9 @@ ;; wasm[0]::function[0]::punpckhbw: ;; pushq %rbp ;; movq %rsp, %rbp -;; movl %edx, %r9d -;; addq 0x38(%rdi), %r9 -;; movdqu 1(%r9), %xmm6 +;; movl %edx, %esi +;; addq 0x38(%rdi), %rsi +;; movdqu 1(%rsi), %xmm6 ;; punpckhbw %xmm6, %xmm0 ;; movq %rbp, %rsp ;; popq %rbp