POSTRISC virtual processor instruction set

instruction fields/arguments

assembler instructions

group syntax description
f128
abs_diff_f128 ra,rb,rc
absolute difference f128
f16
abs_diff_f16 ra,rb,rc
absolute difference f16
f32
abs_diff_f32 ra,rb,rc
absolute difference f32
f64
abs_diff_f64 ra,rb,rc
absolute difference f64
i128
abs_diff_i128 ra,rb,rc
absolute difference i128
base
abs_diff_i32 ra,rb,rc
absolute difference i32
base
abs_diff_i64 ra,rb,rc
absolute difference i64
f16
abs_diff_vf16 ra,rb,rc
absolute difference vf16
f32
abs_diff_vf32 ra,rb,rc
absolute difference vf32
f64
abs_diff_vf64 ra,rb,rc
absolute difference vf64
f128
abs_f128 ra,rb
absolute value f128
f16
abs_f16 ra,rb
absolute value f16
f32
abs_f32 ra,rb
absolute value f32
f64
abs_f64 ra,rb
absolute value f64
i128
abs_i128 ra,rb
absolute value i128
base
abs_i32 ra,rb
absolute value i32
base
abs_i64 ra,rb
absolute value i64
f128
abs_max_f128 ra,rb,rc
absolute maximum f128
f16
abs_max_f16 ra,rb,rc
absolute maximum f16
f32
abs_max_f32 ra,rb,rc
absolute maximum f32
f64
abs_max_f64 ra,rb,rc
absolute maximum f64
f16
abs_max_vf16 ra,rb,rc
absolute maximum vf16
f32
abs_max_vf32 ra,rb,rc
absolute maximum vf32
f64
abs_max_vf64 ra,rb,rc
absolute maximum vf64
f128
abs_min_f128 ra,rb,rc
absolute minimum f128
f16
abs_min_f16 ra,rb,rc
absolute minimum f16
f32
abs_min_f32 ra,rb,rc
absolute minimum f32
f64
abs_min_f64 ra,rb,rc
absolute minimum f64
f16
abs_min_vf16 ra,rb,rc
absolute minimum vf16
f32
abs_min_vf32 ra,rb,rc
absolute minimum vf32
f64
abs_min_vf64 ra,rb,rc
absolute minimum vf64
f16
abs_vf16 ra,rb
absolute difference vf16
f32
abs_vf32 ra,rb
absolute difference vf32
f64
abs_vf64 ra,rb
absolute difference vf64
base
add_add_i64 ra,rb,rc,rd
add-add i64
base
add_addc_u64 ra,rb,rc,rd
add-add with carry-out u64
f16
add_alt_vf16 ra,rb,rc
add alternating vf16
f32
add_alt_vf32 ra,rb,rc
add alternating vf32
f64
add_alt_vf64 ra,rb,rc
add alternating vf64
f128
add_f128 ra,rb,rc
add f128
f16
add_f16 ra,rb,rc
add f16
f32
add_f32 ra,rb,rc
add f32
f64
add_f64 ra,rb,rc
add f64
f16
add_horiz_vf16 ra,rb,rc
add horizontal vf16
f32
add_horiz_vf32 ra,rb,rc
add horizontal vf32
f64
add_horiz_vf64 ra,rb,rc
add horizontal vf64
i128
add_i128 ra,rb,rc
add i128
base
add_i32 ra,rb,rc
add i32
base
add_i64 ra,rb,rc
add i64
i128
add_imm_i128.l ra,rb,simm21
add immediate i128
base
add_imm_i32.l ra,rb,simm21
add immediate i32
base
add_imm_i64.l ra,rb,simm21
add immediate i64
base
add_imm_u32.l ra,rb,simm21
add immediate u32
mmx
add_sat_vi16 ra,rb,rc
add saturate vi16
mmx
add_sat_vi32 ra,rb,rc
add saturate vi32
mmx
add_sat_vi64 ra,rb,rc
add saturate vi64
mmx
add_sat_vi8 ra,rb,rc
add saturate vi8
mmx
add_sat_vu16 ra,rb,rc
add saturate vu16
mmx
add_sat_vu32 ra,rb,rc
add saturate vu32
mmx
add_sat_vu64 ra,rb,rc
add saturate vu64
mmx
add_sat_vu8 ra,rb,rc
add saturate vu8
base
add_sub_i64 ra,rb,rc,rd
add-subtract i64
base
add_u32 ra,rb,rc
add u32
f16
add_vf16 ra,rb,rc
add vf16
f32
add_vf32 ra,rb,rc
add vf32
f64
add_vf64 ra,rb,rc
add vf64
mmx
add_vu16 ra,rb,rc
add vu16
mmx
add_vu32 ra,rb,rc
add vu32
mmx
add_vu64 ra,rb,rc
add vu64
mmx
add_vu8 ra,rb,rc
add vu8
base
addc_u64 ra,rb,rc
add carry-out u64
mmx
addc_vu16 ra,rb,rc
add carry-out vu16
mmx
addc_vu32 ra,rb,rc
add carry-out vu32
mmx
addc_vu64 ra,rb,rc
add carry-out vu64
mmx
addc_vu8 ra,rb,rc
add carry-out vu8
base
addo_i64 ra,rb,rc
add overflow i64
mmx
addo_vi16 ra,rb,rc
add overflow vi16
mmx
addo_vi32 ra,rb,rc
add overflow vi32
mmx
addo_vi64 ra,rb,rc
add overflow vi64
mmx
addo_vi8 ra,rb,rc
add overflow vi8
cipher
aes_dec ra,rb,rc
aes decrypt round
cipher
aes_dec_last ra,rb,rc
aes decrypt last round
cipher
aes_enc ra,rb,rc
aes encrypt round
cipher
aes_enc_last ra,rb,rc
aes encrypt last round
cipher
aes_imc ra,rb
aes inverse mix columns
cipher
aes_keygen_assist ra,rb,simm10
aes key generation assist
base
alignup_u64 ra,rb,sc,sd
align up shifted
special
alloc framesize
allocate register frame, update eip
special
alloc_sp.l framesize,uimm21
allocate register frame, update eip,sp
atomic
amq_cas_i128 ra,rb,rc,rd
atomic acquire compare and swap i128
atomic
amq_cas_i16 ra,rb,rc,rd
atomic acquire compare and swap i16
atomic
amq_cas_i32 ra,rb,rc,rd
atomic acquire compare and swap i32
atomic
amq_cas_i64 ra,rb,rc,rd
atomic acquire compare and swap i64
atomic
amq_cas_i8 ra,rb,rc,rd
atomic acquire compare and swap i8
atomic
amq_ld_add_u128 ra,rb,rc
atomic acquire load-add u128
atomic
amq_ld_add_u16 ra,rb,rc
atomic acquire load-add u16
atomic
amq_ld_add_u32 ra,rb,rc
atomic acquire load-add u32
atomic
amq_ld_add_u64 ra,rb,rc
atomic acquire load-add u64
atomic
amq_ld_add_u8 ra,rb,rc
atomic acquire load-add u8
atomic
amq_ld_and_u128 ra,rb,rc
atomic acquire load-and u128
atomic
amq_ld_and_u16 ra,rb,rc
atomic acquire load-and u16
atomic
amq_ld_and_u32 ra,rb,rc
atomic acquire load-and u32
atomic
amq_ld_and_u64 ra,rb,rc
atomic acquire load-and u64
atomic
amq_ld_and_u8 ra,rb,rc
atomic acquire load-and u8
atomic
amq_ld_i128 ra,rb
atomic acquire load i128
atomic
amq_ld_i16 ra,rb
atomic acquire load i16
atomic
amq_ld_i32 ra,rb
atomic acquire load i32
atomic
amq_ld_i64 ra,rb
atomic acquire load i64
atomic
amq_ld_i8 ra,rb
atomic acquire load i8
atomic
amq_ld_max_i128 ra,rb,rc
atomic acquire load-maximum i128
atomic
amq_ld_max_i16 ra,rb,rc
atomic acquire load-maximum i16
atomic
amq_ld_max_i32 ra,rb,rc
atomic acquire load-maximum i32
atomic
amq_ld_max_i64 ra,rb,rc
atomic acquire load-maximum i64
atomic
amq_ld_max_i8 ra,rb,rc
atomic acquire load-maximum i8
atomic
amq_ld_max_u128 ra,rb,rc
atomic acquire load-maximum u128
atomic
amq_ld_max_u16 ra,rb,rc
atomic acquire load-maximum u16
atomic
amq_ld_max_u32 ra,rb,rc
atomic acquire load-maximum u32
atomic
amq_ld_max_u64 ra,rb,rc
atomic acquire load-maximum u64
atomic
amq_ld_max_u8 ra,rb,rc
atomic acquire load-maximum u8
atomic
amq_ld_min_i128 ra,rb,rc
atomic acquire load-minimum i128
atomic
amq_ld_min_i16 ra,rb,rc
atomic acquire load-minimum i16
atomic
amq_ld_min_i32 ra,rb,rc
atomic acquire load-minimum i32
atomic
amq_ld_min_i64 ra,rb,rc
atomic acquire load-minimum i64
atomic
amq_ld_min_i8 ra,rb,rc
atomic acquire load-minimum i8
atomic
amq_ld_min_u128 ra,rb,rc
atomic acquire load-minimum u128
atomic
amq_ld_min_u16 ra,rb,rc
atomic acquire load-minimum u16
atomic
amq_ld_min_u32 ra,rb,rc
atomic acquire load-minimum u32
atomic
amq_ld_min_u64 ra,rb,rc
atomic acquire load-minimum u64
atomic
amq_ld_min_u8 ra,rb,rc
atomic acquire load-minimum u8
atomic
amq_ld_or_u128 ra,rb,rc
atomic acquire load-or u128
atomic
amq_ld_or_u16 ra,rb,rc
atomic acquire load-or u16
atomic
amq_ld_or_u32 ra,rb,rc
atomic acquire load-or u32
atomic
amq_ld_or_u64 ra,rb,rc
atomic acquire load-or u64
atomic
amq_ld_or_u8 ra,rb,rc
atomic acquire load-or u8
atomic
amq_ld_xor_u128 ra,rb,rc
atomic acquire load-xor u128
atomic
amq_ld_xor_u16 ra,rb,rc
atomic acquire load-xor u16
atomic
amq_ld_xor_u32 ra,rb,rc
atomic acquire load-xor u32
atomic
amq_ld_xor_u64 ra,rb,rc
atomic acquire load-xor u64
atomic
amq_ld_xor_u8 ra,rb,rc
atomic acquire load-xor u8
atomic
amq_swap_u128 ra,rb,rc
atomic acquire swap u128
atomic
amq_swap_u16 ra,rb,rc
atomic acquire swap u16
atomic
amq_swap_u32 ra,rb,rc
atomic acquire swap u32
atomic
amq_swap_u64 ra,rb,rc
atomic acquire swap u64
atomic
amq_swap_u8 ra,rb,rc
atomic acquire swap u8
atomic
amr_cas_i128 ra,rb,rc,rd
atomic release compare and swap i128
atomic
amr_cas_i16 ra,rb,rc,rd
atomic release compare and swap i16
atomic
amr_cas_i32 ra,rb,rc,rd
atomic release compare and swap i32
atomic
amr_cas_i64 ra,rb,rc,rd
atomic release compare and swap i64
atomic
amr_cas_i8 ra,rb,rc,rd
atomic release compare and swap i8
atomic
amr_ld_add_u128 ra,rb,rc
atomic release load-add u128
atomic
amr_ld_add_u16 ra,rb,rc
atomic release load-add u16
atomic
amr_ld_add_u32 ra,rb,rc
atomic release load-add u32
atomic
amr_ld_add_u64 ra,rb,rc
atomic release load-add u64
atomic
amr_ld_add_u8 ra,rb,rc
atomic release load-add u8
atomic
amr_ld_and_u128 ra,rb,rc
atomic release load-and u128
atomic
amr_ld_and_u16 ra,rb,rc
atomic release load-and u16
atomic
amr_ld_and_u32 ra,rb,rc
atomic release load-and u32
atomic
amr_ld_and_u64 ra,rb,rc
atomic release load-and u64
atomic
amr_ld_and_u8 ra,rb,rc
atomic release load-and u8
atomic
amr_ld_max_i128 ra,rb,rc
atomic release load-maximum i128
atomic
amr_ld_max_i16 ra,rb,rc
atomic release load-maximum i16
atomic
amr_ld_max_i32 ra,rb,rc
atomic release load-maximum i32
atomic
amr_ld_max_i64 ra,rb,rc
atomic release load-maximum i64
atomic
amr_ld_max_i8 ra,rb,rc
atomic release load-maximum i8
atomic
amr_ld_max_u128 ra,rb,rc
atomic release load-maximum u128
atomic
amr_ld_max_u16 ra,rb,rc
atomic release load-maximum u16
atomic
amr_ld_max_u32 ra,rb,rc
atomic release load-maximum u32
atomic
amr_ld_max_u64 ra,rb,rc
atomic release load-maximum u64
atomic
amr_ld_max_u8 ra,rb,rc
atomic release load-maximum u8
atomic
amr_ld_min_i128 ra,rb,rc
atomic release load-minimum i128
atomic
amr_ld_min_i16 ra,rb,rc
atomic release load-minimum i16
atomic
amr_ld_min_i32 ra,rb,rc
atomic release load-minimum i32
atomic
amr_ld_min_i64 ra,rb,rc
atomic release load-minimum i64
atomic
amr_ld_min_i8 ra,rb,rc
atomic release load-minimum i8
atomic
amr_ld_min_u128 ra,rb,rc
atomic release load-minimum u128
atomic
amr_ld_min_u16 ra,rb,rc
atomic release load-minimum u16
atomic
amr_ld_min_u32 ra,rb,rc
atomic release load-minimum u32
atomic
amr_ld_min_u64 ra,rb,rc
atomic release load-minimum u64
atomic
amr_ld_min_u8 ra,rb,rc
atomic release load-minimum u8
atomic
amr_ld_or_u128 ra,rb,rc
atomic release load-or u128
atomic
amr_ld_or_u16 ra,rb,rc
atomic release load-or u16
atomic
amr_ld_or_u32 ra,rb,rc
atomic release load-or u32
atomic
amr_ld_or_u64 ra,rb,rc
atomic release load-or u64
atomic
amr_ld_or_u8 ra,rb,rc
atomic release load-or u8
atomic
amr_ld_xor_u128 ra,rb,rc
atomic release load-xor u128
atomic
amr_ld_xor_u16 ra,rb,rc
atomic release load-xor u16
atomic
amr_ld_xor_u32 ra,rb,rc
atomic release load-xor u32
atomic
amr_ld_xor_u64 ra,rb,rc
atomic release load-xor u64
atomic
amr_ld_xor_u8 ra,rb,rc
atomic release load-xor u8
atomic
amr_st_add_u128 rb,rc
atomic release store-add u128
atomic
amr_st_add_u16 rb,rc
atomic release store-add u16
atomic
amr_st_add_u32 rb,rc
atomic release store-add u32
atomic
amr_st_add_u64 rb,rc
atomic release store-add u64
atomic
amr_st_add_u8 rb,rc
atomic release store-add u8
atomic
amr_st_and_u128 rb,rc
atomic release store-and u128
atomic
amr_st_and_u16 rb,rc
atomic release store-and u16
atomic
amr_st_and_u32 rb,rc
atomic release store-and u32
atomic
amr_st_and_u64 rb,rc
atomic release store-and u64
atomic
amr_st_and_u8 rb,rc
atomic release store-and u8
atomic
amr_st_i128 ra,rb
atomic release store i128
atomic
amr_st_i16 ra,rb
atomic release store i16
atomic
amr_st_i32 ra,rb
atomic release store i32
atomic
amr_st_i64 ra,rb
atomic release store i64
atomic
amr_st_i8 ra,rb
atomic release store i8
atomic
amr_st_max_i128 rb,rc
atomic release store-maximum i128
atomic
amr_st_max_i16 rb,rc
atomic release store-maximum i16
atomic
amr_st_max_i32 rb,rc
atomic release store-maximum i32
atomic
amr_st_max_i64 rb,rc
atomic release store-maximum i64
atomic
amr_st_max_i8 rb,rc
atomic release store-maximum i8
atomic
amr_st_max_u128 rb,rc
atomic release store-maximum u128
atomic
amr_st_max_u16 rb,rc
atomic release store-maximum u16
atomic
amr_st_max_u32 rb,rc
atomic release store-maximum u32
atomic
amr_st_max_u64 rb,rc
atomic release store-maximum u64
atomic
amr_st_max_u8 rb,rc
atomic release store-maximum u8
atomic
amr_st_min_i128 rb,rc
atomic release store-minimum i128
atomic
amr_st_min_i16 rb,rc
atomic release store-minimum i16
atomic
amr_st_min_i32 rb,rc
atomic release store-minimum i32
atomic
amr_st_min_i64 rb,rc
atomic release store-minimum i64
atomic
amr_st_min_i8 rb,rc
atomic release store-minimum i8
atomic
amr_st_min_u128 rb,rc
atomic release store-minimum u128
atomic
amr_st_min_u16 rb,rc
atomic release store-minimum u16
atomic
amr_st_min_u32 rb,rc
atomic release store-minimum u32
atomic
amr_st_min_u64 rb,rc
atomic release store-minimum u64
atomic
amr_st_min_u8 rb,rc
atomic release store-minimum u8
atomic
amr_st_or_u128 rb,rc
atomic release store-or u128
atomic
amr_st_or_u16 rb,rc
atomic release store-or u16
atomic
amr_st_or_u32 rb,rc
atomic release store-or u32
atomic
amr_st_or_u64 rb,rc
atomic release store-or u64
atomic
amr_st_or_u8 rb,rc
atomic release store-or u8
atomic
amr_st_xor_u128 rb,rc
atomic release store-xor u128
atomic
amr_st_xor_u16 rb,rc
atomic release store-xor u16
atomic
amr_st_xor_u32 rb,rc
atomic release store-xor u32
atomic
amr_st_xor_u64 rb,rc
atomic release store-xor u64
atomic
amr_st_xor_u8 rb,rc
atomic release store-xor u8
atomic
amr_swap_u128 ra,rb,rc
atomic release swap u128
atomic
amr_swap_u16 ra,rb,rc
atomic release swap u16
atomic
amr_swap_u32 ra,rb,rc
atomic release swap u32
atomic
amr_swap_u64 ra,rb,rc
atomic release swap u64
atomic
amr_swap_u8 ra,rb,rc
atomic release swap u8
atomic
amx_cas_i128 ra,rb,rc,rd
atomic relaxed compare and swap i128
atomic
amx_cas_i16 ra,rb,rc,rd
atomic relaxed compare and swap i16
atomic
amx_cas_i32 ra,rb,rc,rd
atomic relaxed compare and swap i32
atomic
amx_cas_i64 ra,rb,rc,rd
atomic relaxed compare and swap i64
atomic
amx_cas_i8 ra,rb,rc,rd
atomic relaxed compare and swap i8
atomic
amx_ld_add_u128 ra,rb,rc
atomic relaxed load-add u128
atomic
amx_ld_add_u16 ra,rb,rc
atomic relaxed load-add u16
atomic
amx_ld_add_u32 ra,rb,rc
atomic relaxed load-add u32
atomic
amx_ld_add_u64 ra,rb,rc
atomic relaxed load-add u64
atomic
amx_ld_add_u8 ra,rb,rc
atomic relaxed load-add u8
atomic
amx_ld_and_u128 ra,rb,rc
atomic relaxed load-and u128
atomic
amx_ld_and_u16 ra,rb,rc
atomic relaxed load-and u16
atomic
amx_ld_and_u32 ra,rb,rc
atomic relaxed load-and u32
atomic
amx_ld_and_u64 ra,rb,rc
atomic relaxed load-and u64
atomic
amx_ld_and_u8 ra,rb,rc
atomic relaxed load-and u8
atomic
amx_ld_i128 ra,rb
atomic relaxed load i128
atomic
amx_ld_i16 ra,rb
atomic relaxed load i16
atomic
amx_ld_i32 ra,rb
atomic relaxed load i32
atomic
amx_ld_i64 ra,rb
atomic relaxed load i64
atomic
amx_ld_i8 ra,rb
atomic relaxed load i8
atomic
amx_ld_max_i128 ra,rb,rc
atomic relaxed load-maximum i128
atomic
amx_ld_max_i16 ra,rb,rc
atomic relaxed load-maximum i16
atomic
amx_ld_max_i32 ra,rb,rc
atomic relaxed load-maximum i32
atomic
amx_ld_max_i64 ra,rb,rc
atomic relaxed load-maximum i64
atomic
amx_ld_max_i8 ra,rb,rc
atomic relaxed load-maximum i8
atomic
amx_ld_max_u128 ra,rb,rc
atomic relaxed load-maximum u128
atomic
amx_ld_max_u16 ra,rb,rc
atomic relaxed load-maximum u16
atomic
amx_ld_max_u32 ra,rb,rc
atomic relaxed load-maximum u32
atomic
amx_ld_max_u64 ra,rb,rc
atomic relaxed load-maximum u64
atomic
amx_ld_max_u8 ra,rb,rc
atomic relaxed load-maximum u8
atomic
amx_ld_min_i128 ra,rb,rc
atomic relaxed load-minimum i128
atomic
amx_ld_min_i16 ra,rb,rc
atomic relaxed load-minimum i16
atomic
amx_ld_min_i32 ra,rb,rc
atomic relaxed load-minimum i32
atomic
amx_ld_min_i64 ra,rb,rc
atomic relaxed load-minimum i64
atomic
amx_ld_min_i8 ra,rb,rc
atomic relaxed load-minimum i8
atomic
amx_ld_min_u128 ra,rb,rc
atomic relaxed load-minimum u128
atomic
amx_ld_min_u16 ra,rb,rc
atomic relaxed load-minimum u16
atomic
amx_ld_min_u32 ra,rb,rc
atomic relaxed load-minimum u32
atomic
amx_ld_min_u64 ra,rb,rc
atomic relaxed load-minimum u64
atomic
amx_ld_min_u8 ra,rb,rc
atomic relaxed load-minimum u8
atomic
amx_ld_or_u128 ra,rb,rc
atomic relaxed load-or u128
atomic
amx_ld_or_u16 ra,rb,rc
atomic relaxed load-or u16
atomic
amx_ld_or_u32 ra,rb,rc
atomic relaxed load-or u32
atomic
amx_ld_or_u64 ra,rb,rc
atomic relaxed load-or u64
atomic
amx_ld_or_u8 ra,rb,rc
atomic relaxed load-or u8
atomic
amx_ld_xor_u128 ra,rb,rc
atomic relaxed load-xor u128
atomic
amx_ld_xor_u16 ra,rb,rc
atomic relaxed load-xor u16
atomic
amx_ld_xor_u32 ra,rb,rc
atomic relaxed load-xor u32
atomic
amx_ld_xor_u64 ra,rb,rc
atomic relaxed load-xor u64
atomic
amx_ld_xor_u8 ra,rb,rc
atomic relaxed load-xor u8
atomic
amx_st_add_u128 rb,rc
atomic relaxed store-add u128
atomic
amx_st_add_u16 rb,rc
atomic relaxed store-add u16
atomic
amx_st_add_u32 rb,rc
atomic relaxed store-add u32
atomic
amx_st_add_u64 rb,rc
atomic relaxed store-add u64
atomic
amx_st_add_u8 rb,rc
atomic relaxed store-add u8
atomic
amx_st_and_u128 rb,rc
atomic relaxed store-and u128
atomic
amx_st_and_u16 rb,rc
atomic relaxed store-and u16
atomic
amx_st_and_u32 rb,rc
atomic relaxed store-and u32
atomic
amx_st_and_u64 rb,rc
atomic relaxed store-and u64
atomic
amx_st_and_u8 rb,rc
atomic relaxed store-and u8
atomic
amx_st_i128 ra,rb
atomic relaxed store i128
atomic
amx_st_i16 ra,rb
atomic relaxed store i16
atomic
amx_st_i32 ra,rb
atomic relaxed store i32
atomic
amx_st_i64 ra,rb
atomic relaxed store i64
atomic
amx_st_i8 ra,rb
atomic relaxed store i8
atomic
amx_st_max_i128 rb,rc
atomic relaxed store-maximum i128
atomic
amx_st_max_i16 rb,rc
atomic relaxed store-maximum i16
atomic
amx_st_max_i32 rb,rc
atomic relaxed store-maximum i32
atomic
amx_st_max_i64 rb,rc
atomic relaxed store-maximum i64
atomic
amx_st_max_i8 rb,rc
atomic relaxed store-maximum i8
atomic
amx_st_max_u128 rb,rc
atomic relaxed store-maximum u128
atomic
amx_st_max_u16 rb,rc
atomic relaxed store-maximum u16
atomic
amx_st_max_u32 rb,rc
atomic relaxed store-maximum u32
atomic
amx_st_max_u64 rb,rc
atomic relaxed store-maximum u64
atomic
amx_st_max_u8 rb,rc
atomic relaxed store-maximum u8
atomic
amx_st_min_i128 rb,rc
atomic relaxed store-minimum i128
atomic
amx_st_min_i16 rb,rc
atomic relaxed store-minimum i16
atomic
amx_st_min_i32 rb,rc
atomic relaxed store-minimum i32
atomic
amx_st_min_i64 rb,rc
atomic relaxed store-minimum i64
atomic
amx_st_min_i8 rb,rc
atomic relaxed store-minimum i8
atomic
amx_st_min_u128 rb,rc
atomic relaxed store-minimum u128
atomic
amx_st_min_u16 rb,rc
atomic relaxed store-minimum u16
atomic
amx_st_min_u32 rb,rc
atomic relaxed store-minimum u32
atomic
amx_st_min_u64 rb,rc
atomic relaxed store-minimum u64
atomic
amx_st_min_u8 rb,rc
atomic relaxed store-minimum u8
atomic
amx_st_or_u128 rb,rc
atomic relaxed store-or u128
atomic
amx_st_or_u16 rb,rc
atomic relaxed store-or u16
atomic
amx_st_or_u32 rb,rc
atomic relaxed store-or u32
atomic
amx_st_or_u64 rb,rc
atomic relaxed store-or u64
atomic
amx_st_or_u8 rb,rc
atomic relaxed store-or u8
atomic
amx_st_xor_u128 rb,rc
atomic relaxed store-xor u128
atomic
amx_st_xor_u16 rb,rc
atomic relaxed store-xor u16
atomic
amx_st_xor_u32 rb,rc
atomic relaxed store-xor u32
atomic
amx_st_xor_u64 rb,rc
atomic relaxed store-xor u64
atomic
amx_st_xor_u8 rb,rc
atomic relaxed store-xor u8
atomic
amx_swap_u128 ra,rb,rc
atomic relaxed swap u128
atomic
amx_swap_u16 ra,rb,rc
atomic relaxed swap u16
atomic
amx_swap_u32 ra,rb,rc
atomic relaxed swap u32
atomic
amx_swap_u64 ra,rb,rc
atomic relaxed swap u64
atomic
amx_swap_u8 ra,rb,rc
atomic relaxed swap u8
atomic
amz_cas_i128 ra,rb,rc,rd
atomic acq_rel compare and swap i128
atomic
amz_cas_i16 ra,rb,rc,rd
atomic acq_rel compare and swap i16
atomic
amz_cas_i32 ra,rb,rc,rd
atomic acq_rel compare and swap i32
atomic
amz_cas_i64 ra,rb,rc,rd
atomic acq_rel compare and swap i64
atomic
amz_cas_i8 ra,rb,rc,rd
atomic acq_rel compare and swap i8
atomic
amz_ld_add_u128 ra,rb,rc
atomic acq_rel load-add u128
atomic
amz_ld_add_u16 ra,rb,rc
atomic acq_rel load-add u16
atomic
amz_ld_add_u32 ra,rb,rc
atomic acq_rel load-add u32
atomic
amz_ld_add_u64 ra,rb,rc
atomic acq_rel load-add u64
atomic
amz_ld_add_u8 ra,rb,rc
atomic acq_rel load-add u8
atomic
amz_ld_and_u128 ra,rb,rc
atomic acq_rel load-and u128
atomic
amz_ld_and_u16 ra,rb,rc
atomic acq_rel load-and u16
atomic
amz_ld_and_u32 ra,rb,rc
atomic acq_rel load-and u32
atomic
amz_ld_and_u64 ra,rb,rc
atomic acq_rel load-and u64
atomic
amz_ld_and_u8 ra,rb,rc
atomic acq_rel load-and u8
atomic
amz_ld_max_i128 ra,rb,rc
atomic acq_rel load-maximum i128
atomic
amz_ld_max_i16 ra,rb,rc
atomic acq_rel load-maximum i16
atomic
amz_ld_max_i32 ra,rb,rc
atomic acq_rel load-maximum i32
atomic
amz_ld_max_i64 ra,rb,rc
atomic acq_rel load-maximum i64
atomic
amz_ld_max_i8 ra,rb,rc
atomic acq_rel load-maximum i8
atomic
amz_ld_max_u128 ra,rb,rc
atomic acq_rel load-maximum u128
atomic
amz_ld_max_u16 ra,rb,rc
atomic acq_rel load-maximum u16
atomic
amz_ld_max_u32 ra,rb,rc
atomic acq_rel load-maximum u32
atomic
amz_ld_max_u64 ra,rb,rc
atomic acq_rel load-maximum u64
atomic
amz_ld_max_u8 ra,rb,rc
atomic acq_rel load-maximum u8
atomic
amz_ld_min_i128 ra,rb,rc
atomic acq_rel load-minimum i128
atomic
amz_ld_min_i16 ra,rb,rc
atomic acq_rel load-minimum i16
atomic
amz_ld_min_i32 ra,rb,rc
atomic acq_rel load-minimum i32
atomic
amz_ld_min_i64 ra,rb,rc
atomic acq_rel load-minimum i64
atomic
amz_ld_min_i8 ra,rb,rc
atomic acq_rel load-minimum i8
atomic
amz_ld_min_u128 ra,rb,rc
atomic acq_rel load-minimum u128
atomic
amz_ld_min_u16 ra,rb,rc
atomic acq_rel load-minimum u16
atomic
amz_ld_min_u32 ra,rb,rc
atomic acq_rel load-minimum u32
atomic
amz_ld_min_u64 ra,rb,rc
atomic acq_rel load-minimum u64
atomic
amz_ld_min_u8 ra,rb,rc
atomic acq_rel load-minimum u8
atomic
amz_ld_or_u128 ra,rb,rc
atomic acq_rel load-or u128
atomic
amz_ld_or_u16 ra,rb,rc
atomic acq_rel load-or u16
atomic
amz_ld_or_u32 ra,rb,rc
atomic acq_rel load-or u32
atomic
amz_ld_or_u64 ra,rb,rc
atomic acq_rel load-or u64
atomic
amz_ld_or_u8 ra,rb,rc
atomic acq_rel load-or u8
atomic
amz_ld_xor_u128 ra,rb,rc
atomic acq_rel load-xor u128
atomic
amz_ld_xor_u16 ra,rb,rc
atomic acq_rel load-xor u16
atomic
amz_ld_xor_u32 ra,rb,rc
atomic acq_rel load-xor u32
atomic
amz_ld_xor_u64 ra,rb,rc
atomic acq_rel load-xor u64
atomic
amz_ld_xor_u8 ra,rb,rc
atomic acq_rel load-xor u8
atomic
amz_swap_u128 ra,rb,rc
atomic acq_rel swap u128
atomic
amz_swap_u16 ra,rb,rc
atomic acq_rel swap u16
atomic
amz_swap_u32 ra,rb,rc
atomic acq_rel swap u32
atomic
amz_swap_u64 ra,rb,rc
atomic acq_rel swap u64
atomic
amz_swap_u8 ra,rb,rc
atomic acq_rel swap u8
base
and ra,rb,rc
bitwise and
base
and_dec ra,rb,rc
bitwise and decremented
base
and_imm.l ra,rb,simm21
and bitwise with immediate
base
and_neg ra,rb,rc
bitwise and negate
base
andn ra,rb,rc
bitwise and-not
base
andn_imm.l ra,rb,simm21
bitwise and-not with immediate
mmx
avg_vi16 ra,rb,rc
average vi16
mmx
avg_vi32 ra,rb,rc
average vi32
mmx
avg_vi64 ra,rb,rc
average vi64
mmx
avg_vi8 ra,rb,rc
average vi8
mmx
avg_vu16 ra,rb,rc
average vu16
mmx
avg_vu32 ra,rb,rc
average vu32
mmx
avg_vu64 ra,rb,rc
average vu64
mmx
avg_vu8 ra,rb,rc
average vu8
bitmanip
bit_clear ra,rb,rc
bit clear
bitmanip
bit_clear_imm ra,rb,sc
bit clear immediate
bitmanip
bit_flip ra,rb,rc
bit flip
bitmanip
bit_flip_imm ra,rb,sc
bit flip immediate
bitmanip
bit_set ra,rb,rc
bit set
bitmanip
bit_set_imm ra,rb,sc
bit set immediate
branch
br_bc.l ra,rb,simm17x16
branch if bit clear
branch
br_bc_imm.l ra,sb,simm17x16
branch if bit clear immediate
branch
br_bs.l ra,rb,simm17x16
branch if bit set
branch
br_bs_imm.l ra,sb,simm17x16
branch if bit set immediate
branch
br_eq_i128.l ra,rb,simm17x16
branch if equal i128
branch
br_eq_i32.l ra,rb,simm17x16
branch if equal i32
branch
br_eq_i64.l ra,rb,simm17x16
branch if equal i64
branch
br_eq_imm_i128.l ra,simm11,simm17x16
branch if equal immediate i128
branch
br_eq_imm_i32.l ra,simm11,simm17x16
branch if equal immediate i32
branch
br_eq_imm_i64.l ra,simm11,simm17x16
branch if equal immediate i64
branch
br_ge_i128.l ra,rb,simm17x16
branch if greater or equal i128
branch
br_ge_i32.l ra,rb,simm17x16
branch if greater or equal i32
branch
br_ge_i64.l ra,rb,simm17x16
branch if greater or equal i64
branch
br_ge_imm_i128.l ra,simm11,simm17x16
branch if greater or equal immediate i128
branch
br_ge_imm_i32.l ra,simm11,simm17x16
branch if greater or equal immediate i32
branch
br_ge_imm_i64.l ra,simm11,simm17x16
branch if greater or equal immediate i64
branch
br_ge_imm_u128.l ra,uimm11,simm17x16
branch if greater or equal immediate u128
branch
br_ge_imm_u32.l ra,uimm11,simm17x16
branch if greater or equal immediate u32
branch
br_ge_imm_u64.l ra,uimm11,simm17x16
branch if greater or equal immediate u64
branch
br_ge_u128.l ra,rb,simm17x16
branch if greater or equal u128
branch
br_ge_u32.l ra,rb,simm17x16
branch if greater or equal u32
branch
br_ge_u64.l ra,rb,simm17x16
branch if greater or equal u64
branch
br_lt_i128.l ra,rb,simm17x16
branch if less i128
branch
br_lt_i32.l ra,rb,simm17x16
branch if less i32
branch
br_lt_i64.l ra,rb,simm17x16
branch if less i64
branch
br_lt_imm_i128.l ra,simm11,simm17x16
branch if less immediate i128
branch
br_lt_imm_i32.l ra,simm11,simm17x16
branch if less immediate i32
branch
br_lt_imm_i64.l ra,simm11,simm17x16
branch if less immediate i64
branch
br_lt_imm_u128.l ra,uimm11,simm17x16
branch if less immediate u128
branch
br_lt_imm_u32.l ra,uimm11,simm17x16
branch if less immediate u32
branch
br_lt_imm_u64.l ra,uimm11,simm17x16
branch if less immediate u64
branch
br_lt_u128.l ra,rb,simm17x16
branch if less u128
branch
br_lt_u32.l ra,rb,simm17x16
branch if less u32
branch
br_lt_u64.l ra,rb,simm17x16
branch if less u64
branch
br_mask_all.l ra,uimm11,simm17x16
branch if mask immediate all bits set
branch
br_mask_any.l ra,uimm11,simm17x16
branch if mask immediate any bit set
branch
br_mask_none.l ra,uimm11,simm17x16
branch if mask immediate none bit set
branch
br_mask_notall.l ra,uimm11,simm17x16
branch if mask immediate not all bits set
branch
br_ne_i128.l ra,rb,simm17x16
branch if not equal i128
branch
br_ne_i32.l ra,rb,simm17x16
branch if not equal i32
branch
br_ne_i64.l ra,rb,simm17x16
branch if not equal i64
branch
br_ne_imm_i128.l ra,simm11,simm17x16
branch if not equal immediate i128
branch
br_ne_imm_i32.l ra,simm11,simm17x16
branch if not equal immediate i32
branch
br_ne_imm_i64.l ra,simm11,simm17x16
branch if not equal immediate i64
branch
br_o_f128.l ra,rb,simm17x16
branch if ordered f128
branch
br_o_f32.l ra,rb,simm17x16
branch if ordered f32
branch
br_o_f64.l ra,rb,simm17x16
branch if ordered f64
branch
br_oeq_f128.l ra,rb,simm17x16
branch if ordered and equal f128
branch
br_oeq_f32.l ra,rb,simm17x16
branch if ordered and equal f32
branch
br_oeq_f64.l ra,rb,simm17x16
branch if ordered and equal f64
branch
br_oge_f128.l ra,rb,simm17x16
branch if ordered and greater-or-equal f128
branch
br_oge_f32.l ra,rb,simm17x16
branch if ordered and greater-or-equal f32
branch
br_oge_f64.l ra,rb,simm17x16
branch if ordered and greater-or-equal f64
branch
br_olt_f128.l ra,rb,simm17x16
branch if ordered and less f128
branch
br_olt_f32.l ra,rb,simm17x16
branch if ordered and less f32
branch
br_olt_f64.l ra,rb,simm17x16
branch if ordered and less f64
branch
br_one_f128.l ra,rb,simm17x16
branch if ordered and not-equal f128
branch
br_one_f32.l ra,rb,simm17x16
branch if ordered and not-equal f32
branch
br_one_f64.l ra,rb,simm17x16
branch if ordered and not-equal f64
branch
br_u_f128.l ra,rb,simm17x16
branch if unordered f128
branch
br_u_f32.l ra,rb,simm17x16
branch if unordered f32
branch
br_u_f64.l ra,rb,simm17x16
branch if unordered f64
branch
br_ueq_f128.l ra,rb,simm17x16
branch if unordered or equal f128
branch
br_ueq_f32.l ra,rb,simm17x16
branch if unordered or equal f32
branch
br_ueq_f64.l ra,rb,simm17x16
branch if unordered or equal f64
branch
br_uge_f128.l ra,rb,simm17x16
branch if unordered or greater-or-equal f128
branch
br_uge_f32.l ra,rb,simm17x16
branch if unordered or greater-or-equal f32
branch
br_uge_f64.l ra,rb,simm17x16
branch if unordered or greater-or-equal f64
branch
br_ult_f128.l ra,rb,simm17x16
branch if unordered or less f128
branch
br_ult_f32.l ra,rb,simm17x16
branch if unordered or less f32
branch
br_ult_f64.l ra,rb,simm17x16
branch if unordered or less f64
branch
br_une_f128.l ra,rb,simm17x16
branch if unordered or not-equal f128
branch
br_une_f32.l ra,rb,simm17x16
branch if unordered or not-equal f32
branch
br_une_f64.l ra,rb,simm17x16
branch if unordered or not-equal f64
jump
call.l ra,simm28x16
call relative
jump
call_mi.l ra,rb,simm14
call memory indirect
jump
call_plt.l ra,uimm28
call procedure linkage table
jump
call_ri ra,rb,rc
call register indirect
jump
call_rvt.l ra,rb,simm14
call relative vtable
f128
ceil_f128 ra,rb
ceil (round up) f128
f16
ceil_f16 ra,rb
ceil (round up) f16
f32
ceil_f32 ra,rb
ceil (round up) f32
f64
ceil_f64 ra,rb
ceil (round up) f64
f16
ceil_vf16 ra,rb
round upward vf16
f32
ceil_vf32 ra,rb
round upward vf32
f64
ceil_vf64 ra,rb
round upward vf64
f128
class_f128 ra,rb,simm10
classify f128
f16
class_f16 ra,rb,simm10
classify f16
f32
class_f32 ra,rb,simm10
classify f32
f64
class_f64 ra,rb,simm10
classify f64
cipher
clmul_hh ra,rb,rc
carry-less multiply high-high
cipher
clmul_hl ra,rb,rc
carry-less multiply high-low
cipher
clmul_ll ra,rb,rc
carry-less multiply low-low
i128
cmov_eq_i128 ra,rb,rc,rd
cond move if equal zero i128
base
cmov_eq_i32 ra,rb,rc,rd
cond move if equal zero i32
base
cmov_eq_i64 ra,rb,rc,rd
cond move if equal zero i64
i128
cmov_le_i128 ra,rb,rc,rd
cond move if less than or equal zero i128
base
cmov_le_i32 ra,rb,rc,rd
cond move if less than or equal zero i32
base
cmov_le_i64 ra,rb,rc,rd
cond move if less than or equal zero i64
base
cmov_lsb ra,rb,rc,rd
cond move if least significand bit
i128
cmov_lt_i128 ra,rb,rc,rd
cond move if less than zero i128
base
cmov_lt_i32 ra,rb,rc,rd
cond move if less than zero i32
base
cmov_lt_i64 ra,rb,rc,rd
cond move if less than zero i64
i128
cmp_eq_i128 ra,rb,rc
compare equal i128
base
cmp_eq_i32 ra,rb,rc
compare equal i32
base
cmp_eq_i64 ra,rb,rc
compare equal i64
i128
cmp_eq_imm_i128.l ra,rb,simm21
compare equal immediate i128
base
cmp_eq_imm_i32.l ra,rb,simm21
compare equal immediate i32
base
cmp_eq_imm_i64.l ra,rb,simm21
compare equal immediate i64
mmx
cmp_eq_vi16 ra,rb,rc
compare equal vi16
mmx
cmp_eq_vi32 ra,rb,rc
compare equal vi32
mmx
cmp_eq_vi64 ra,rb,rc
compare equal vi64
mmx
cmp_eq_vi8 ra,rb,rc
compare equal vi8
i128
cmp_ge_i128 ra,rb,rc
compare greater-or-equal i128
base
cmp_ge_i32 ra,rb,rc
compare greater-or-equal i32
base
cmp_ge_i64 ra,rb,rc
compare greater-or-equal i64
i128
cmp_ge_imm_i128.l ra,rb,simm21
compare greater or equal immediate i128
base
cmp_ge_imm_i32.l ra,rb,simm21
compare greater or equal immediate i32
base
cmp_ge_imm_i64.l ra,rb,simm21
compare greater or equal immediate i64
i128
cmp_ge_imm_u128.l ra,rb,uimm21
compare greater or equal immediate u128
base
cmp_ge_imm_u32.l ra,rb,uimm21
compare greater or equal immediate u32
base
cmp_ge_imm_u64.l ra,rb,uimm21
compare greater or equal immediate u64
i128
cmp_ge_u128 ra,rb,rc
compare greater-or-equal u128
base
cmp_ge_u32 ra,rb,rc
compare greater-or-equal u32
base
cmp_ge_u64 ra,rb,rc
compare greater-or-equal u64
i128
cmp_lt_i128 ra,rb,rc
compare less i128
base
cmp_lt_i32 ra,rb,rc
compare less i32
base
cmp_lt_i64 ra,rb,rc
compare less i64
i128
cmp_lt_imm_i128.l ra,rb,simm21
compare less immediate i128
base
cmp_lt_imm_i32.l ra,rb,simm21
compare less immediate i32
base
cmp_lt_imm_i64.l ra,rb,simm21
compare less immediate i64
i128
cmp_lt_imm_u128.l ra,rb,uimm21
compare less immediate u128
base
cmp_lt_imm_u32.l ra,rb,uimm21
compare less immediate u32
base
cmp_lt_imm_u64.l ra,rb,uimm21
compare less immediate u64
i128
cmp_lt_u128 ra,rb,rc
compare less u128
base
cmp_lt_u32 ra,rb,rc
compare less u32
base
cmp_lt_u64 ra,rb,rc
compare less u64
mmx
cmp_lt_vi16 ra,rb,rc
compare less vi16
mmx
cmp_lt_vi32 ra,rb,rc
compare less vi32
mmx
cmp_lt_vi64 ra,rb,rc
compare less vi64
mmx
cmp_lt_vi8 ra,rb,rc
compare less vi8
mmx
cmp_lt_vu16 ra,rb,rc
compare less vu16
mmx
cmp_lt_vu32 ra,rb,rc
compare less vu32
mmx
cmp_lt_vu64 ra,rb,rc
compare less vu64
mmx
cmp_lt_vu8 ra,rb,rc
compare less vu8
i128
cmp_ne_i128 ra,rb,rc
compare not equal i128
base
cmp_ne_i32 ra,rb,rc
compare not equal i32
base
cmp_ne_i64 ra,rb,rc
compare not equal i64
i128
cmp_ne_imm_i128.l ra,rb,simm21
compare not equal immediate i128
base
cmp_ne_imm_i32.l ra,rb,simm21
compare not equal immediate i32
base
cmp_ne_imm_i64.l ra,rb,simm21
compare not equal immediate i64
f128
cmp_o_f128 ra,rb,rc
compare ordered f128
f16
cmp_o_f16 ra,rb,rc
compare ordered f16
f32
cmp_o_f32 ra,rb,rc
compare ordered f32
f64
cmp_o_f64 ra,rb,rc
compare ordered f64
f16
cmp_o_vf16 ra,rb,rc
compare ordered vf16
f32
cmp_o_vf32 ra,rb,rc
compare ordered vf32
f64
cmp_o_vf64 ra,rb,rc
compare ordered vf64
f128
cmp_oeq_f128 ra,rb,rc
compare ordered and equal f128
f16
cmp_oeq_f16 ra,rb,rc
compare ordered and equal f16
f32
cmp_oeq_f32 ra,rb,rc
compare ordered and equal f32
f64
cmp_oeq_f64 ra,rb,rc
compare ordered and equal f64
f16
cmp_oeq_vf16 ra,rb,rc
compare ordered and equal vf16
f32
cmp_oeq_vf32 ra,rb,rc
compare ordered and equal vf32
f64
cmp_oeq_vf64 ra,rb,rc
compare ordered and equal vf64
f128
cmp_oge_f128 ra,rb,rc
compare ordered and greater-or-equal f128
f16
cmp_oge_f16 ra,rb,rc
compare ordered and greater-or-equal f16
f32
cmp_oge_f32 ra,rb,rc
compare ordered and greater-or-equal f32
f64
cmp_oge_f64 ra,rb,rc
compare ordered and greater-or-equal f64
f16
cmp_oge_vf16 ra,rb,rc
compare ordered and greater-equal vf16
f32
cmp_oge_vf32 ra,rb,rc
compare ordered and greater-equal vf32
f64
cmp_oge_vf64 ra,rb,rc
compare ordered and greater-equal vf64
f128
cmp_olt_f128 ra,rb,rc
compare ordered and less f128
f16
cmp_olt_f16 ra,rb,rc
compare ordered and less f16
f32
cmp_olt_f32 ra,rb,rc
compare ordered and less f32
f64
cmp_olt_f64 ra,rb,rc
compare ordered and less f64
f16
cmp_olt_vf16 ra,rb,rc
compare ordered and less vf16
f32
cmp_olt_vf32 ra,rb,rc
compare ordered and less vf32
f64
cmp_olt_vf64 ra,rb,rc
compare ordered and less vf64
f128
cmp_one_f128 ra,rb,rc
compare ordered and not-equal f128
f16
cmp_one_f16 ra,rb,rc
compare ordered and not-equal f16
f32
cmp_one_f32 ra,rb,rc
compare ordered and not-equal f32
f64
cmp_one_f64 ra,rb,rc
compare ordered and not-equal f64
f16
cmp_one_vf16 ra,rb,rc
compare ordered and not-equal vf16
f32
cmp_one_vf32 ra,rb,rc
compare ordered and not-equal vf32
f64
cmp_one_vf64 ra,rb,rc
compare ordered and not-equal vf64
f128
cmp_u_f128 ra,rb,rc
compare unordered f128
f16
cmp_u_f16 ra,rb,rc
compare unordered f16
f32
cmp_u_f32 ra,rb,rc
compare unordered f32
f64
cmp_u_f64 ra,rb,rc
compare unordered f64
f16
cmp_u_vf16 ra,rb,rc
compare unordered vf16
f32
cmp_u_vf32 ra,rb,rc
compare unordered vf32
f64
cmp_u_vf64 ra,rb,rc
compare unordered vf64
f128
cmp_ueq_f128 ra,rb,rc
compare unordered or equal f128
f16
cmp_ueq_f16 ra,rb,rc
compare unordered or equal f16
f32
cmp_ueq_f32 ra,rb,rc
compare unordered or equal f32
f64
cmp_ueq_f64 ra,rb,rc
compare unordered or equal f64
f16
cmp_ueq_vf16 ra,rb,rc
compare unordered or equal vf16
f32
cmp_ueq_vf32 ra,rb,rc
compare unordered or equal vf32
f64
cmp_ueq_vf64 ra,rb,rc
compare unordered or equal vf64
f128
cmp_uge_f128 ra,rb,rc
compare unordered or greater-or-equal f128
f16
cmp_uge_f16 ra,rb,rc
compare unordered or greater-or-equal f16
f32
cmp_uge_f32 ra,rb,rc
compare unordered or greater-or-equal f32
f64
cmp_uge_f64 ra,rb,rc
compare unordered or greater-or-equal f64
f16
cmp_uge_vf16 ra,rb,rc
compare unordered or greater-equal vf16
f32
cmp_uge_vf32 ra,rb,rc
compare unordered or greater-equal vf32
f64
cmp_uge_vf64 ra,rb,rc
compare unordered or greater-equal vf64
f128
cmp_ult_f128 ra,rb,rc
compare unordered or less f128
f16
cmp_ult_f16 ra,rb,rc
compare unordered or less f16
f32
cmp_ult_f32 ra,rb,rc
compare unordered or less f32
f64
cmp_ult_f64 ra,rb,rc
compare unordered or less f64
f16
cmp_ult_vf16 ra,rb,rc
compare unordered or less vf16
f32
cmp_ult_vf32 ra,rb,rc
compare unordered or less vf32
f64
cmp_ult_vf64 ra,rb,rc
compare unordered or less vf64
f128
cmp_une_f128 ra,rb,rc
compare unordered or not-equal f128
f16
cmp_une_f16 ra,rb,rc
compare unordered or not-equal f16
f32
cmp_une_f32 ra,rb,rc
compare unordered or not-equal f32
f64
cmp_une_f64 ra,rb,rc
compare unordered or not-equal f64
f16
cmp_une_vf16 ra,rb,rc
compare unordered or not-equal vf16
f32
cmp_une_vf32 ra,rb,rc
compare unordered or not-equal vf32
f64
cmp_une_vf64 ra,rb,rc
compare unordered or not-equal vf64
bitmanip
cnt_lz ra,rb,sc
count leading zeros
bitmanip
cnt_pop ra,rb,sc
count population
bitmanip
cnt_tz ra,rb,sc
count trailing zeros
special
cpuid ra,rb,simm10
cpu identification
cipher
crc32c ra,rb,rc,rd
crc32c
f128
cvt_f128_f16 ra,rb
convert f128 to f16
f128
cvt_f128_f32 ra,rb
convert f128 to f32
f128
cvt_f128_f64 ra,rb
convert f128 to f64
f16
cvt_f32_f16 ra,rb
convert f32 to f16
f16
cvt_f64_f16 ra,rb
convert f64 to f16
f32
cvt_f64_f32 ra,rb
convert f64 to f32
f128
cvt_i128_f128 ra,rb
convert i128 to f128
f16
cvt_i128_f16 ra,rb
convert i128 to f16
f32
cvt_i128_f32 ra,rb
convert i128 to f32
f64
cvt_i128_f64 ra,rb
convert i128 to f64
f128
cvt_i32_f128 ra,rb
convert i32 to f128
f16
cvt_i32_f16 ra,rb
convert i32 to f16
f32
cvt_i32_f32 ra,rb
convert i32 to f32
f64
cvt_i32_f64 ra,rb
convert i32 to f64
f128
cvt_i64_f128 ra,rb
convert i64 to f128
f16
cvt_i64_f16 ra,rb
convert i64 to f16
f32
cvt_i64_f32 ra,rb
convert i64 to f32
f64
cvt_i64_f64 ra,rb
convert i64 to f64
f128
cvt_u128_f128 ra,rb
convert u128 to f128
f16
cvt_u128_f16 ra,rb
convert u128 to f16
f32
cvt_u128_f32 ra,rb
convert u128 to f32
f64
cvt_u128_f64 ra,rb
convert u128 to f64
f128
cvt_u32_f128 ra,rb
convert u32 to f128
f16
cvt_u32_f16 ra,rb
convert u32 to f16
f32
cvt_u32_f32 ra,rb
convert u32 to f32
f64
cvt_u32_f64 ra,rb
convert u32 to f64
f128
cvt_u64_f128 ra,rb
convert u64 to f128
f16
cvt_u64_f16 ra,rb
convert u64 to f16
f32
cvt_u64_f32 ra,rb
convert u64 to f32
f64
cvt_u64_f64 ra,rb
convert u64 to f64
f16
cvt_vi16_vf16 ra,rb
convert vi16 to vf16
f32
cvt_vi32_vf32 ra,rb
convert vi32 to vf32
f64
cvt_vi64_vf64 ra,rb
convert vi64 to vf64
f16
cvt_vu16_vf16 ra,rb
convert vu16 to vf16
f32
cvt_vu32_vf32 ra,rb
convert vu32 to vf32
f64
cvt_vu64_vf64 ra,rb
convert vu64 to vf64
special
dcbf.l rb,simm21
data cache block flush
privileged
dcbi.l rb,simm21
data cache block invalidate
special
dcbt.l rb,simm21
data cache block touch
base
dep ra,rb,rc,sd,se
deposit
base
dep_r ra,rb,rc,rd
deposit register
f128
div_f128 ra,rb,rc
divide f128
f16
div_f16 ra,rb,rc
divide f16
f32
div_f32 ra,rb,rc
divide f32
f64
div_f64 ra,rb,rc
divide f64
i128
div_i128 ra,rb,rc
divide i128
base
div_i32 ra,rb,rc
divide i32
base
div_i64 ra,rb,rc
divide i64
base
div_imm_i32.l ra,rb,simm21
divide i32 immediate
base
div_imm_i64.l ra,rb,simm21
divide i64 immediate
base
div_imm_u32.l ra,rb,uimm21
divide u32 immediate
base
div_imm_u64.l ra,rb,uimm21
divide u64 immediate
i128
div_u128 ra,rb,rc
divide u128
base
div_u32 ra,rb,rc
divide u32
base
div_u64 ra,rb,rc
divide u64
f16
div_vf16 ra,rb,rc
divide vf16
f32
div_vf32 ra,rb,rc
divide vf32
f64
div_vf64 ra,rb,rc
divide vf64
f16
dot_vf16 ra,rb,rc
dot-product vf16
f32
dot_vf32 ra,rb,rc
dot-product vf32
f64
dot_vf64 ra,rb,rc
dot-product vf64
special
eh_adj.l simm28x16
exception handler adjust eip
special
eh_catch.l rb,simm17x16
exception handler catch
special
eh_next.l rb,simm17x16
exception handler next
special
eh_throw.l rb,simm21
exception handler throw
f128
extend_f16_f128 ra,rb
extend f16 to f128
f16
extend_f16_f32 ra,rb
extend f16 to f32
f16
extend_f16_f64 ra,rb
extend f16 to f64
f128
extend_f32_f128 ra,rb
extend f32 to f128
f32
extend_f32_f64 ra,rb
extend f32 to f64
f128
extend_f64_f128 ra,rb
extend f64 to f128
atomic
fence_a
fence acquire
atomic
fence_ar
fence acq-rel
atomic
fence_r
fence release
atomic
fence_sc
fence seq-cst
f128
floor_f128 ra,rb
floor (round down)f128
f16
floor_f16 ra,rb
floor (round down)f16
f32
floor_f32 ra,rb
floor (round down)f32
f64
floor_f64 ra,rb
floor (round down)f64
f16
floor_vf16 ra,rb
round downward vf16
f32
floor_vf32 ra,rb
round downward vf32
f64
floor_vf64 ra,rb
round downward vf64
privileged
get_dbr ra,rb,simm10
get data breakpoint register
privileged
get_ibr ra,rb,simm10
get instruction breakpoint register
privileged
get_mr ra,rb,simm10
get monitor register
special
get_spr ra,spr
get special-purpose register
bitmanip
gtb ra,rb
graycode to binary
privileged
halt
halt processor
special
icbi.l rb,simm21
instruction cache block invalidate
special
int rb,simm10
interrupt
jump
jmp.l simm28x16
jump relative
special
jmp_mi rb,rc,scale
jmpmi
jump
jmp_r rb,rc,scale
jump register indirect
jump
jmp_t rb,rc
jump table
jump
jmp_t_i32 rb,rc
jump table i32 index
jump
jmp_t_u32 rb,rc
jump table u32 index
memory
ld_i128.l ra,rb,simm21
load base i128
memory
ld_i16.l ra,rb,simm21
load base i16
memory
ld_i32.l ra,rb,simm21
load base i32
memory
ld_i64.l ra,rb,simm21
load base i64
memory
ld_i8.l ra,rb,simm21
load base i8
base
ld_imm.l ra,simm28
load immediate
f32
ld_imm_f32.l ra,fp32
load immediate f32*
f64
ld_imm_f64.l ra,fp64
load immediate f64*
base
ld_imm_high.l ra,simm28
load immediate high
f128
ld_iprel_f128.l ra,fp128
load relative f128*
f32
ld_iprel_f32.l ra,fp32
load relative f32*
f64
ld_iprel_f64.l ra,fp64
load relative f64*
memory
ld_iprel_i128.l ra,uimm28
load relative i128
memory
ld_iprel_i16.l ra,uimm28
load relative i16
memory
ld_iprel_i32.l ra,uimm28
load relative i32
memory
ld_iprel_i64.l ra,uimm28
load relative i64
memory
ld_iprel_i8.l ra,uimm28
load relative i8
memory
ld_iprel_u16.l ra,uimm28
load relative u16
memory
ld_iprel_u32.l ra,uimm28
load relative u32
memory
ld_iprel_u64.l ra,uimm28
load relative u64
memory
ld_iprel_u8.l ra,uimm28
load relative u8
memory
ld_mia_i128 ra,rb,simm10
load and modify immediate after i128
memory
ld_mia_i16 ra,rb,simm10
load and modify immediate after i16
memory
ld_mia_i32 ra,rb,simm10
load and modify immediate after i32
memory
ld_mia_i64 ra,rb,simm10
load and modify immediate after i64
memory
ld_mia_i8 ra,rb,simm10
load and modify immediate after i8
memory
ld_mia_u16 ra,rb,simm10
load and modify immediate after u16
memory
ld_mia_u32 ra,rb,simm10
load and modify immediate after u32
memory
ld_mia_u64 ra,rb,simm10
load and modify immediate after u64
memory
ld_mia_u8 ra,rb,simm10
load and modify immediate after u8
memory
ld_mib_i128 ra,rb,simm10
load and modify immediate before i128
memory
ld_mib_i16 ra,rb,simm10
load and modify immediate before i16
memory
ld_mib_i32 ra,rb,simm10
load and modify immediate before i32
memory
ld_mib_i64 ra,rb,simm10
load and modify immediate before i64
memory
ld_mib_i8 ra,rb,simm10
load and modify immediate before i8
memory
ld_mib_u16 ra,rb,simm10
load and modify immediate before u16
memory
ld_mib_u32 ra,rb,simm10
load and modify immediate before u32
memory
ld_mib_u64 ra,rb,simm10
load and modify immediate before u64
memory
ld_mib_u8 ra,rb,simm10
load and modify immediate before u8
memory
ld_u16.l ra,rb,simm21
load base u16
memory
ld_u32.l ra,rb,simm21
load base u32
memory
ld_u64.l ra,rb,simm21
load base u64
memory
ld_u8.l ra,rb,simm21
load base u8
memory
ld_xi32_i128.l ra,rb,rc,scale,simm7
load i32-indexed i128
memory
ld_xi32_i16.l ra,rb,rc,scale,simm7
load i32-indexed i16
memory
ld_xi32_i32.l ra,rb,rc,scale,simm7
load i32-indexed i32
memory
ld_xi32_i64.l ra,rb,rc,scale,simm7
load i32-indexed i64
memory
ld_xi32_i8.l ra,rb,rc,scale,simm7
load i32-indexed i8
memory
ld_xi32_u16.l ra,rb,rc,scale,simm7
load i32-indexed u16
memory
ld_xi32_u32.l ra,rb,rc,scale,simm7
load i32-indexed u32
memory
ld_xi32_u64.l ra,rb,rc,scale,simm7
load i32-indexed u64
memory
ld_xi32_u8.l ra,rb,rc,scale,simm7
load i32-indexed u8
memory
ld_xi64_i128.l ra,rb,rc,scale,simm7
load i64-indexed i128
memory
ld_xi64_i16.l ra,rb,rc,scale,simm7
load i64-indexed i16
memory
ld_xi64_i32.l ra,rb,rc,scale,simm7
load i64-indexed i32
memory
ld_xi64_i64.l ra,rb,rc,scale,simm7
load i64-indexed i64
memory
ld_xi64_i8.l ra,rb,rc,scale,simm7
load i64-indexed i8
memory
ld_xi64_u16.l ra,rb,rc,scale,simm7
load i64-indexed u16
memory
ld_xi64_u32.l ra,rb,rc,scale,simm7
load i64-indexed u32
memory
ld_xi64_u64.l ra,rb,rc,scale,simm7
load i64-indexed u64
memory
ld_xi64_u8.l ra,rb,rc,scale,simm7
load i64-indexed u8
memory
ld_xu32_i128.l ra,rb,rc,scale,simm7
load u32-indexed i128
memory
ld_xu32_i16.l ra,rb,rc,scale,simm7
load u32-indexed i16
memory
ld_xu32_i32.l ra,rb,rc,scale,simm7
load u32-indexed i32
memory
ld_xu32_i64.l ra,rb,rc,scale,simm7
load u32-indexed i64
memory
ld_xu32_i8.l ra,rb,rc,scale,simm7
load u32-indexed i8
memory
ld_xu32_u16.l ra,rb,rc,scale,simm7
load u32-indexed u16
memory
ld_xu32_u32.l ra,rb,rc,scale,simm7
load u32-indexed u32
memory
ld_xu32_u64.l ra,rb,rc,scale,simm7
load u32-indexed u64
memory
ld_xu32_u8.l ra,rb,rc,scale,simm7
load u32-indexed u8
base
lda_iprel.l ra,uimm28
load address relative forward
base
lda_n.l ra,rb,simm14
load address near
base
lda_nrc.l ra,rb,simm14
load address near relative
base
lda_r.l ra,simm28x16
load address relative
base
lda_xi32.l ra,rb,rc,scale,simm7
load address i32-indexed
base
lda_xi64.l ra,rb,rc,scale,simm7
load address i64-indexed
base
lda_xu32.l ra,rb,rc,scale,simm7
load address u32-indexed
f16
madd_alt_vf16 ra,rb,rc,rd
multiply-alternating add-subtract vf16
f32
madd_alt_vf32 ra,rb,rc,rd
multiply-alternating add-subtract vf32
f64
madd_alt_vf64 ra,rb,rc,rd
multiply-alternating add-subtract vf64
f128
madd_f128 ra,rb,rc,rd
multiply-add f128
f16
madd_f16 ra,rb,rc,rd
multiply-add f16
f32
madd_f32 ra,rb,rc,rd
multiply-add f32
f64
madd_f64 ra,rb,rc,rd
multiply-add f64
f16
madd_vf16 ra,rb,rc,rd
multiply-add vf16
f32
madd_vf32 ra,rb,rc,rd
multiply-add vf32
f64
madd_vf64 ra,rb,rc,rd
multiply-add vf64
f128
max_f128 ra,rb,rc
maximum f128
f16
max_f16 ra,rb,rc
maximum f16
f32
max_f32 ra,rb,rc
maximum f32
f64
max_f64 ra,rb,rc
maximum f64
i128
max_i128 ra,rb,rc
maximum i128
base
max_i32 ra,rb,rc
maximum i32
base
max_i64 ra,rb,rc
maximum i64
base
max_imm_i32.l ra,rb,simm21
maximum immediate i32
base
max_imm_i64.l ra,rb,simm21
maximum immediate i64
base
max_imm_u32.l ra,rb,uimm21
maximum immediate u32
base
max_imm_u64.l ra,rb,uimm21
maximum immediate u64
i128
max_u128 ra,rb,rc
maximum u128
base
max_u32 ra,rb,rc
maximum u32
base
max_u64 ra,rb,rc
maximum u64
f16
max_vf16 ra,rb,rc
maximum vf16
f32
max_vf32 ra,rb,rc
maximum vf32
f64
max_vf64 ra,rb,rc
maximum vf64
mmx
max_vi16 ra,rb,rc
maximum vi16
mmx
max_vi32 ra,rb,rc
maximum vi32
mmx
max_vi64 ra,rb,rc
maximum vi64
mmx
max_vi8 ra,rb,rc
maximum vi8
mmx
max_vu16 ra,rb,rc
maximum vu16
mmx
max_vu32 ra,rb,rc
maximum vu32
mmx
max_vu64 ra,rb,rc
maximum vu64
mmx
max_vu8 ra,rb,rc
maximum vu8
f128
maxnum_f128 ra,rb,rc
maximum number f128
f16
maxnum_f16 ra,rb,rc
maximum number f16
f32
maxnum_f32 ra,rb,rc
maximum number f32
f64
maxnum_f64 ra,rb,rc
maximum number f64
f16
maxnum_vf16 ra,rb,rc
maximum number vf16
f32
maxnum_vf32 ra,rb,rc
maximum number vf32
f64
maxnum_vf64 ra,rb,rc
maximum number vf64
bitmanip
mbgath ra,rb,rc
masked bit gather
bitmanip
mbscat ra,rb,rc
masked bit scatter
base
mbsel ra,rb,rc,rd
masked bit selection
f128
merge_f128 ra,rb,rc,rd
merge f128
f16
merge_f16 ra,rb,rc,rd
merge f16
f32
merge_f32 ra,rb,rc,rd
merge f32
f64
merge_f64 ra,rb,rc,rd
merge f64
f16
merge_high_vf16 ra,rb,rc
merge high parts vf16
f32
merge_high_vf32 ra,rb,rc
merge high parts vf32
f64
merge_high_vf64 ra,rb,rc
merge high parts vf64
mmx
merge_high_vu16 ra,rb,rc
merge high vu16
mmx
merge_high_vu32 ra,rb,rc
merge high vu32
mmx
merge_high_vu64 ra,rb,rc
merge high vu64
mmx
merge_high_vu8 ra,rb,rc
merge high vu8
f16
merge_low_vf16 ra,rb,rc
merge low parts vf16
f32
merge_low_vf32 ra,rb,rc
merge low parts vf32
f64
merge_low_vf64 ra,rb,rc
merge low parts vf64
mmx
merge_low_vu16 ra,rb,rc
merge low vu16
mmx
merge_low_vu32 ra,rb,rc
merge low vu32
mmx
merge_low_vu64 ra,rb,rc
merge low vu64
mmx
merge_low_vu8 ra,rb,rc
merge low vu8
f16
merge_vf16 ra,rb,rc,rd
merge vf16
f32
merge_vf32 ra,rb,rc,rd
merge vf32
f64
merge_vf64 ra,rb,rc,rd
merge vf64
f128
min_f128 ra,rb,rc
minimum f128
f16
min_f16 ra,rb,rc
minimum f16
f32
min_f32 ra,rb,rc
minimum f32
f64
min_f64 ra,rb,rc
minimum f64
i128
min_i128 ra,rb,rc
minimum i128
base
min_i32 ra,rb,rc
minimum i32
base
min_i64 ra,rb,rc
minimum i64
base
min_imm_i32.l ra,rb,simm21
minimum immediate i32
base
min_imm_i64.l ra,rb,simm21
minimum immediate i64
base
min_imm_u32.l ra,rb,uimm21
minimum immediate u32
base
min_imm_u64.l ra,rb,uimm21
minimum immediate u64
i128
min_u128 ra,rb,rc
minimum u128
base
min_u32 ra,rb,rc
minimum u32
base
min_u64 ra,rb,rc
minimum u64
f16
min_vf16 ra,rb,rc
minimum vf16
f32
min_vf32 ra,rb,rc
minimum vf32
f64
min_vf64 ra,rb,rc
minimum vf64
mmx
min_vi16 ra,rb,rc
minimum vi16
mmx
min_vi32 ra,rb,rc
minimum vi32
mmx
min_vi64 ra,rb,rc
minimum vi64
mmx
min_vi8 ra,rb,rc
minimum vi8
mmx
min_vu16 ra,rb,rc
minimum vu16
mmx
min_vu32 ra,rb,rc
minimum vu32
mmx
min_vu64 ra,rb,rc
minimum vu64
mmx
min_vu8 ra,rb,rc
minimum vu8
f128
minnum_f128 ra,rb,rc
minimum number f128
f16
minnum_f16 ra,rb,rc
minimum number f16
f32
minnum_f32 ra,rb,rc
minimum number f32
f64
minnum_f64 ra,rb,rc
minimum number f64
f16
minnum_vf16 ra,rb,rc
minimum number vf16
f32
minnum_vf32 ra,rb,rc
minimum number vf32
f64
minnum_vf64 ra,rb,rc
minimum number vf64
base
mov ra,rb
move general register
base
mov2 ra,rb,rc,rd
move 2 general registers
special
mprobe ra,rb,rc
memory probe access
f16
msub_alt_vf16 ra,rb,rc,rd
multiply-alternating subtract-add vf16
f32
msub_alt_vf32 ra,rb,rc,rd
multiply-alternating subtract-add vf32
f64
msub_alt_vf64 ra,rb,rc,rd
multiply-alternating subtract-add vf64
f128
msub_f128 ra,rb,rc,rd
multiply-subtract f128
f16
msub_f16 ra,rb,rc,rd
multiply-subtract f16
f32
msub_f32 ra,rb,rc,rd
multiply-subtract f32
f64
msub_f64 ra,rb,rc,rd
multiply-subtract f64
f16
msub_vf16 ra,rb,rc,rd
multiply-subtract vf16
f32
msub_vf32 ra,rb,rc,rd
multiply-subtract vf32
f64
msub_vf64 ra,rb,rc,rd
multiply-subtract vf64
base
mul_add ra,rb,rc,rd
multiply-add u64
f128
mul_f128 ra,rb,rc
multiply f128
f16
mul_f16 ra,rb,rc
multiply f16
f32
mul_f32 ra,rb,rc
multiply f32
f64
mul_f64 ra,rb,rc
multiply f64
base
mul_h ra,rb,rc
multiply high
f16
mul_horiz_vf16 ra,rb,rc
multiply horizontal vf16
f32
mul_horiz_vf32 ra,rb,rc
multiply horizontal vf32
f64
mul_horiz_vf64 ra,rb,rc
multiply horizontal vf64
i128
mul_i128 ra,rb,rc
multiply i128
base
mul_i32 ra,rb,rc
multiply i32
base
mul_i64 ra,rb,rc
multiply i64
base
mul_imm_i32.l ra,rb,simm21
multiply immediate i32
base
mul_imm_i64.l ra,rb,simm21
multiply immediate i64
base
mul_imm_u32.l ra,rb,uimm21
multiply immediate u32
base
mul_sub ra,rb,rc,rd
multiply-subtract i64
base
mul_subr ra,rb,rc,rd
multiply-subtract reverse i64
base
mul_u32 ra,rb,rc
multiply u32
f16
mul_vf16 ra,rb,rc
multiply vf16
f32
mul_vf32 ra,rb,rc
multiply vf32
f64
mul_vf64 ra,rb,rc
multiply vf64
f128
nabs_diff_f128 ra,rb,rc
negate absolute difference f128
f16
nabs_diff_f16 ra,rb,rc
negate absolute difference f16
f32
nabs_diff_f32 ra,rb,rc
negate absolute difference f32
f64
nabs_diff_f64 ra,rb,rc
negate absolute difference f64
f16
nabs_diff_vf16 ra,rb,rc
negate absolute difference vf16
f32
nabs_diff_vf32 ra,rb,rc
negate absolute difference vf32
f64
nabs_diff_vf64 ra,rb,rc
negate absolute difference vf64
f128
nabs_f128 ra,rb
negate absolute value f128
f16
nabs_f16 ra,rb
negate absolute value f16
f32
nabs_f32 ra,rb
negate absolute value f32
f64
nabs_f64 ra,rb
negate absolute value f64
f16
nabs_vf16 ra,rb
absolute value vf16
f32
nabs_vf32 ra,rb
absolute value vf32
f64
nabs_vf64 ra,rb
absolute value vf64
f128
nadd_f128 ra,rb,rc
negate add f128
f16
nadd_f16 ra,rb,rc
negate add f16
f32
nadd_f32 ra,rb,rc
negate add f32
f64
nadd_f64 ra,rb,rc
negate add f64
f16
nadd_vf16 ra,rb,rc
negate add vf16
f32
nadd_vf32 ra,rb,rc
negate add vf32
f64
nadd_vf64 ra,rb,rc
negate add vf64
base
nand ra,rb,rc
bitwise not-and
f128
nearbyint_f128 ra,rb
round dynamic f128
f16
nearbyint_f16 ra,rb
round dynamic f16
f32
nearbyint_f32 ra,rb
round dynamic f32
f64
nearbyint_f64 ra,rb
round dynamic f64
f16
nearbyint_vf16 ra,rb
round dynamic vf16
f32
nearbyint_vf32 ra,rb
round dynamic vf32
f64
nearbyint_vf64 ra,rb
round dynamic vf64
f128
neg_f128 ra,rb
negate f128
f16
neg_f16 ra,rb
negate f16
f32
neg_f32 ra,rb
negate f32
f64
neg_f64 ra,rb
negate f64
i128
neg_i128 ra,rb
neg i128
base
neg_i32 ra,rb
neg i32
base
neg_i64 ra,rb
neg i64
f16
neg_vf16 ra,rb
negate vf16
f32
neg_vf32 ra,rb
negate vf32
f64
neg_vf64 ra,rb
negate vf64
f128
nmadd_f128 ra,rb,rc,rd
negate multiply-add f128
f16
nmadd_f16 ra,rb,rc,rd
negate multiply-add f16
f32
nmadd_f32 ra,rb,rc,rd
negate multiply-add f32
f64
nmadd_f64 ra,rb,rc,rd
negate multiply-add f64
f16
nmadd_vf16 ra,rb,rc,rd
negate multiply-add vf16
f32
nmadd_vf32 ra,rb,rc,rd
negate multiply-add vf32
f64
nmadd_vf64 ra,rb,rc,rd
negate multiply-add vf64
f128
nmsub_f128 ra,rb,rc,rd
negate multiply-subtract f128
f16
nmsub_f16 ra,rb,rc,rd
negate multiply-subtract f16
f32
nmsub_f32 ra,rb,rc,rd
negate multiply-subtract f32
f64
nmsub_f64 ra,rb,rc,rd
negate multiply-subtract f64
f16
nmsub_vf16 ra,rb,rc,rd
negate multiply-subtract vf16
f32
nmsub_vf32 ra,rb,rc,rd
negate multiply-subtract vf32
f64
nmsub_vf64 ra,rb,rc,rd
negate multiply-subtract vf64
f128
nmul_f128 ra,rb,rc
negate multiply f128
f16
nmul_f16 ra,rb,rc
negate multiply f16
f32
nmul_f32 ra,rb,rc
negate multiply f32
f64
nmul_f64 ra,rb,rc
negate multiply f64
f16
nmul_vf16 ra,rb,rc
negate multiply vf16
f32
nmul_vf32 ra,rb,rc
negate multiply vf32
f64
nmul_vf64 ra,rb,rc
negate multiply vf64
base
nop.l simm28
no operation
base
nor ra,rb,rc
bitwise not-or
base
not ra,rb
bitwise not
nullifying
nul_bc ra,rb,dy,dn
nullify if bit clear
nullifying
nul_bc_imm ra,sb,dy,dn
nullify if bit clear immediate
nullifying
nul_bs ra,rb,dy,dn
nullify if bit set
nullifying
nul_bs_imm ra,sb,dy,dn
nullify if bit set immediate
nullifying
nul_eq_i128 ra,rb,dy,dn
nullify if equal i128
nullifying
nul_eq_i32 ra,rb,dy,dn
nullify if equal i32
nullifying
nul_eq_i64 ra,rb,dy,dn
nullify if equal i64
nullifying
nul_eq_imm_i128.l ra,simm11,dy,dn
nullify if equal immediate i128
nullifying
nul_eq_imm_i32.l ra,simm11,dy,dn
nullify if equal immediate i32
nullifying
nul_eq_imm_i64.l ra,simm11,dy,dn
nullify if equal immediate i64
nullifying
nul_ge_i128 ra,rb,dy,dn
nullify if greater or equal i128
nullifying
nul_ge_i32 ra,rb,dy,dn
nullify if greater or equal i32
nullifying
nul_ge_i64 ra,rb,dy,dn
nullify if greater or equal i64
nullifying
nul_ge_imm_i128.l ra,simm11,dy,dn
nullify if greater or equal immediate i128
nullifying
nul_ge_imm_i32.l ra,simm11,dy,dn
nullify if greater or equal immediate i32
nullifying
nul_ge_imm_i64.l ra,simm11,dy,dn
nullify if greater or equal immediate i64
nullifying
nul_ge_imm_u128.l ra,uimm11,dy,dn
nullify if greater or equal immediate u128
nullifying
nul_ge_imm_u32.l ra,uimm11,dy,dn
nullify if greater or equal immediate u32
nullifying
nul_ge_imm_u64.l ra,uimm11,dy,dn
nullify if greater or equal immediate u64
nullifying
nul_ge_u128 ra,rb,dy,dn
nullify if greater or equal u128
nullifying
nul_ge_u32 ra,rb,dy,dn
nullify if greater or equal u32
nullifying
nul_ge_u64 ra,rb,dy,dn
nullify if greater or equal u64
nullifying
nul_lt_i128 ra,rb,dy,dn
nullify if less i128
nullifying
nul_lt_i32 ra,rb,dy,dn
nullify if less i32
nullifying
nul_lt_i64 ra,rb,dy,dn
nullify if less i64
nullifying
nul_lt_imm_i128.l ra,simm11,dy,dn
nullify if less immediate i128
nullifying
nul_lt_imm_i32.l ra,simm11,dy,dn
nullify if less immediate i32
nullifying
nul_lt_imm_i64.l ra,simm11,dy,dn
nullify if less immediate i64
nullifying
nul_lt_imm_u128.l ra,uimm11,dy,dn
nullify if less immediate u128
nullifying
nul_lt_imm_u32.l ra,uimm11,dy,dn
nullify if less immediate u32
nullifying
nul_lt_imm_u64.l ra,uimm11,dy,dn
nullify if less immediate u64
nullifying
nul_lt_u128 ra,rb,dy,dn
nullify if less u128
nullifying
nul_lt_u32 ra,rb,dy,dn
nullify if less u32
nullifying
nul_lt_u64 ra,rb,dy,dn
nullify if less u64
nullifying
nul_mask_all.l ra,uimm11,dy,dn
nullify if mask immediate all bits set
nullifying
nul_mask_any.l ra,uimm11,dy,dn
nullify if mask immediate any bit set
nullifying
nul_mask_none.l ra,uimm11,dy,dn
nullify if mask immediate none bit set
nullifying
nul_mask_notall.l ra,uimm11,dy,dn
nullify if mask immediate not all bits set
nullifying
nul_ne_i128 ra,rb,dy,dn
nullify if not-equal i128
nullifying
nul_ne_i32 ra,rb,dy,dn
nullify if not-equal i32
nullifying
nul_ne_i64 ra,rb,dy,dn
nullify if not-equal i64
nullifying
nul_ne_imm_i128.l ra,simm11,dy,dn
nullify if not-equal immediate i128
nullifying
nul_ne_imm_i32.l ra,simm11,dy,dn
nullify if not-equal immediate i32
nullifying
nul_ne_imm_i64.l ra,simm11,dy,dn
nullify if not-equal immediate i64
nullifying
nul_o_f128 ra,rb,dy,dn
nullify if ordered f128
nullifying
nul_o_f32 ra,rb,dy,dn
nullify if ordered f32
nullifying
nul_o_f64 ra,rb,dy,dn
nullify if ordered f64
nullifying
nul_oeq_f128 ra,rb,dy,dn
nullify if ordered and equal f128
nullifying
nul_oeq_f32 ra,rb,dy,dn
nullify if ordered and equal f32
nullifying
nul_oeq_f64 ra,rb,dy,dn
nullify if ordered and equal f64
nullifying
nul_oge_f128 ra,rb,dy,dn
nullify if ordered and greater-or-equal f128
nullifying
nul_oge_f32 ra,rb,dy,dn
nullify if ordered and greater-or-equal f32
nullifying
nul_oge_f64 ra,rb,dy,dn
nullify if ordered and greater-or-equal f64
nullifying
nul_olt_f128 ra,rb,dy,dn
nullify if ordered and less f128
nullifying
nul_olt_f32 ra,rb,dy,dn
nullify if ordered and less f32
nullifying
nul_olt_f64 ra,rb,dy,dn
nullify if ordered and less f64
nullifying
nul_one_f128 ra,rb,dy,dn
nullify if ordered and not-equal f128
nullifying
nul_one_f32 ra,rb,dy,dn
nullify if ordered and not-equal f32
nullifying
nul_one_f64 ra,rb,dy,dn
nullify if ordered and not-equal f64
nullifying
nul_u_f128 ra,rb,dy,dn
nullify if unordered f128
nullifying
nul_u_f32 ra,rb,dy,dn
nullify if unordered f32
nullifying
nul_u_f64 ra,rb,dy,dn
nullify if unordered f64
nullifying
nul_ueq_f128 ra,rb,dy,dn
nullify if unordered or equal f128
nullifying
nul_ueq_f32 ra,rb,dy,dn
nullify if unordered or equal f32
nullifying
nul_ueq_f64 ra,rb,dy,dn
nullify if unordered or equal f64
nullifying
nul_uge_f128 ra,rb,dy,dn
nullify if unordered or greater-or-equal f128
nullifying
nul_uge_f32 ra,rb,dy,dn
nullify if unordered or greater-or-equal f32
nullifying
nul_uge_f64 ra,rb,dy,dn
nullify if unordered or greater-or-equal f64
nullifying
nul_ult_f128 ra,rb,dy,dn
nullify if unordered or less f128
nullifying
nul_ult_f32 ra,rb,dy,dn
nullify if unordered or less f32
nullifying
nul_ult_f64 ra,rb,dy,dn
nullify if unordered or less f64
nullifying
nul_une_f128 ra,rb,dy,dn
nullify if unordered or not-equal f128
nullifying
nul_une_f32 ra,rb,dy,dn
nullify if unordered or not-equal f32
nullifying
nul_une_f64 ra,rb,dy,dn
nullify if unordered or not-equal f64
base
or ra,rb,rc
bitwise or
base
or_imm.l ra,rb,simm21
bitwise or with immediate
base
orn ra,rb,rc
bitwise or-not
base
orn_imm.l ra,rb,simm21
bitwise or-not immediate
mmx
pack_mod_vu16 ra,rb,rc
pack unsigned modulo vu16
mmx
pack_mod_vu32 ra,rb,rc
pack unsigned modulo vu32
mmx
pack_mod_vu64 ra,rb,rc
pack unsigned modulo vu64
mmx
pack_sat_vi16 ra,rb,rc
pack saturated vi16
mmx
pack_sat_vi32 ra,rb,rc
pack saturated vi32
mmx
pack_sat_vi64 ra,rb,rc
pack saturated vi64
mmx
pack_sat_vu16 ra,rb,rc
pack saturated vu16
mmx
pack_sat_vu32 ra,rb,rc
pack saturated vu32
mmx
pack_sat_vu64 ra,rb,rc
pack saturated vu64
mmx
pack_usat_vi16 ra,rb,rc
pack unsigned saturated vi16
mmx
pack_usat_vi32 ra,rb,rc
pack unsigned saturated vi32
mmx
pack_usat_vi64 ra,rb,rc
pack unsigned saturated vi64
f16
pack_vf16 ra,rb,rc
pack vf16
f32
pack_vf32 ra,rb,rc
pack vf32
f64
pack_vf64 ra,rb,rc
pack vf64
bitmanip
perm ra,rb,rc,rd
permute bytes
bitmanip
permb ra,rb,sc
permute bits
privileged
ptc ra,rb,rc
purge translation cache
special
random ra,rb
random
i128
rem_i128 ra,rb,rc
remainder i128
base
rem_i32 ra,rb,rc
remainder i32
base
rem_i64 ra,rb,rc
remainder i64
base
rem_imm_i32.l ra,rb,simm21
remainder i32 immediate
base
rem_imm_i64.l ra,rb,simm21
remainder i64 immediate
base
rem_imm_u32.l ra,rb,uimm21
remainder u32 immediate
base
rem_imm_u64.l ra,rb,uimm21
remainder u64 immediate
i128
rem_u128 ra,rb,rc
remainder u128
base
rem_u32 ra,rb,rc
remainder u32
base
rem_u64 ra,rb,rc
remainder u64
jump
rep_ge_i32.l ra,rb,uimm6,simm11x16
repeat on greater or equal i32
jump
rep_ge_i64.l ra,rb,uimm6,simm11x16
repeat on greater or equal i64
jump
rep_ge_u32.l ra,rb,uimm6,simm11x16
repeat on greater or equal u32
jump
rep_ge_u64.l ra,rb,uimm6,simm11x16
repeat on greater or equal u64
jump
rep_gt_i32.l ra,rb,uimm6,simm11x16
repeat on greater i32
jump
rep_gt_i64.l ra,rb,uimm6,simm11x16
repeat on greater i64
jump
rep_gt_u32.l ra,rb,uimm6,simm11x16
repeat on greater u32
jump
rep_gt_u64.l ra,rb,uimm6,simm11x16
repeat on greater u64
jump
rep_le_i32.l ra,rb,uimm6,simm11x16
repeat on less or equal i32
jump
rep_le_i64.l ra,rb,uimm6,simm11x16
repeat on less or equal i64
jump
rep_le_u32.l ra,rb,uimm6,simm11x16
repeat on less or equal u32
jump
rep_le_u64.l ra,rb,uimm6,simm11x16
repeat on less or equal u64
jump
rep_lt_i32.l ra,rb,uimm6,simm11x16
repeat on less i32
jump
rep_lt_i64.l ra,rb,uimm6,simm11x16
repeat on less i64
jump
rep_lt_u32.l ra,rb,uimm6,simm11x16
repeat on less u32
jump
rep_lt_u64.l ra,rb,uimm6,simm11x16
repeat on less u64
jump
ret
return from subroutine
jump
retf.l uimm21
return from subroutine (rollback frame)
privileged
rfi
return from interruption
f128
rint_f128 ra,rb
round dynamic, detect inexact f128
f16
rint_f16 ra,rb
round dynamic, detect inexact f16
f32
rint_f32 ra,rb
round dynamic, detect inexact f32
f64
rint_f64 ra,rb
round dynamic, detect inexact f64
f16
rint_vf16 ra,rb
round dynamic, detect inexact vf16
f32
rint_vf32 ra,rb
round dynamic, detect inexact vf32
f64
rint_vf64 ra,rb
round dynamic, detect inexact vf64
mmx
rol_vu16 ra,rb,rc
rotate left vu16
mmx
rol_vu32 ra,rb,rc
rotate left vu32
mmx
rol_vu64 ra,rb,rc
rotate left vu64
mmx
rol_vu8 ra,rb,rc
rotate left vu8
mmx
ror_vu16 ra,rb,rc
rotate right vu16
mmx
ror_vu32 ra,rb,rc
rotate right vu32
mmx
ror_vu64 ra,rb,rc
rotate right vu64
mmx
ror_vu8 ra,rb,rc
rotate right vu8
f128
round_f128 ra,rb
round away from zero f128
f16
round_f16 ra,rb
round away from zero f16
f32
round_f32 ra,rb
round away from zero f32
f64
round_f64 ra,rb
round away from zero f64
f16
round_vf16 ra,rb
round away from zero vf16
f32
round_vf32 ra,rb
round away from zero vf32
f64
round_vf64 ra,rb
round away from zero vf64
f128
roundeven_f128 ra,rb
round to nearest even f128
f16
roundeven_f16 ra,rb
round to nearest even f16
f32
roundeven_f32 ra,rb
round to nearest even f32
f64
roundeven_f64 ra,rb
round to nearest even f64
f16
roundeven_vf16 ra,rb
round to nearest even vf16
f32
roundeven_vf32 ra,rb
round to nearest even vf32
f64
roundeven_vf64 ra,rb
round to nearest even vf64
privileged
rscover
register stack cover
privileged
rsflush
register stack flush
privileged
rsload
register stack load
f128
rsqrt_f128 ra,rb
reciprocal square root f128
f16
rsqrt_f16 ra,rb
reciprocal square root f16
f32
rsqrt_f32 ra,rb
reciprocal square root f32
f64
rsqrt_f64 ra,rb
reciprocal square root f64
f16
rsqrt_vf16 ra,rb
reciprocal square root vf16
f32
rsqrt_vf32 ra,rb
reciprocal square root vf32
f64
rsqrt_vf64 ra,rb
reciprocal square root vf64
f128
scale_f128 ra,rb,sc
scale f128
privileged
set_dbr ra,rb,simm10
set data breakpoint register
privileged
set_dtr ra,rb,rc
set data translation register
privileged
set_ibr ra,rb,simm10
set instruction breakpoint register
privileged
set_itr ra,rb,rc
set instruction translation register
privileged
set_mr ra,rb,simm10
set monitor register
special
set_spr ra,spr
set special-purpose register
base
sext_i16 ra,rb
sign extend i16
base
sext_i32 ra,rb
sign extend i32
base
sext_i64 ra,rb
sign extend i64
base
sext_i8 ra,rb
sign extend i8
base
sl_add_i32 ra,rb,rc,sd
shift left and add i32
base
sl_add_i64 ra,rb,rc,sd
shift left and add i64
base
sl_add_u32 ra,rb,rc,sd
shift left and add u32
base
sl_or ra,rb,rc,sd
shift left and or
base
sl_sub_i32 ra,rb,rc,sd
shift left and subtract i32
base
sl_sub_i64 ra,rb,rc,sd
shift left and subtract i64
base
sl_sub_u32 ra,rb,rc,sd
shift left and subtract u32
base
sl_subr_i32 ra,rb,rc,sd
shift left and subtract reverse i32
base
sl_subr_i64 ra,rb,rc,sd
shift left and subtract reverse i64
base
sl_subr_u32 ra,rb,rc,sd
shift left and subtract reverse u32
base
sl_xor ra,rb,rc,sd
shift left and xor
i128
sll_imm_u128 ra,rb,sc
shift left logical immediate u128
base
sll_imm_u32 ra,rb,sc
shift left logical immediate u32
base
sll_imm_u64 ra,rb,sc
shift left logical immediate u64
mmx
sll_imm_vu16 ra,rb,sc
shift left logical immediate vu16
mmx
sll_imm_vu32 ra,rb,sc
shift left logical immediate vu32
mmx
sll_imm_vu64 ra,rb,sc
shift left logical immediate vu64
mmx
sll_imm_vu8 ra,rb,sc
shift left logical immediate vu8
i128
sll_u128 ra,rb,rc
shift left logical u128
base
sll_u32 ra,rb,rc
shift left logical u32
base
sll_u64 ra,rb,rc
shift left logical u64
mmx
sll_vu16 ra,rb,rc
shift left logical vu16
mmx
sll_vu32 ra,rb,rc
shift left logical vu32
mmx
sll_vu64 ra,rb,rc
shift left logical vu64
mmx
sll_vu8 ra,rb,rc
shift left logical vu8
i128
slp_i128 ra,rb,rc,rd
shift left pair i128
base
slp_i32 ra,rb,rc,rd
shift left pair i32
base
slp_i64 ra,rb,rc,rd
shift left pair i64
base
slsra_i32 ra,rb,rc,rd
shift left and shift right algebraic i32
base
slsra_i64 ra,rb,rc,rd
shift left and shift right algebraic i64
base
slsra_imm_i64 ra,rb,sc,sd
shift left and right algebraic immediate i64
base
slsrl_imm_u64 ra,rb,sc,sd
shift left and right logical immediate u64
base
slsrl_u32 ra,rb,rc,rd
shift left and shift right logical i32
base
slsrl_u64 ra,rb,rc,rd
shift left and shift right logical u64
f128
sqrt_f128 ra,rb
square root f128
f16
sqrt_f16 ra,rb
square root f16
f32
sqrt_f32 ra,rb
square root f32
f64
sqrt_f64 ra,rb
square root f64
f16
sqrt_vf16 ra,rb
square root vf16
f32
sqrt_vf32 ra,rb
square root vf32
f64
sqrt_vf64 ra,rb
square root vf64
i128
sra_i128 ra,rb,rc
shift right algebraic i128
base
sra_i32 ra,rb,rc
shift right algebraic i32
base
sra_i64 ra,rb,rc
shift right algebraic i64
i128
sra_imm_i128 ra,rb,sc
shift right algebraic immediate i128
base
sra_imm_i32 ra,rb,sc
shift right algebraic immediate i32
base
sra_imm_i64 ra,rb,sc
shift right algebraic immediate i64
mmx
sra_imm_vi16 ra,rb,sc
shift right algebraic immediate vi16
mmx
sra_imm_vi32 ra,rb,sc
shift right algebraic immediate vi32
mmx
sra_imm_vi64 ra,rb,sc
shift right algebraic immediate vi64
mmx
sra_imm_vi8 ra,rb,sc
shift right algebraic immediate vi8
mmx
sra_vi16 ra,rb,rc
shift right algebraic vi16
mmx
sra_vi32 ra,rb,rc
shift right algebraic vi32
mmx
sra_vi64 ra,rb,rc
shift right algebraic vi64
mmx
sra_vi8 ra,rb,rc
shift right algebraic vi8
i128
srd_i128 ra,rb,rc
shift right dividing i128
base
srd_i32 ra,rb,rc
shift right dividing i32
base
srd_i64 ra,rb,rc
shift right dividing i64
i128
srd_imm_i128 ra,rb,sc
shift right dividing immediate i128
base
srd_imm_i32 ra,rb,sc
shift right dividing immediate i32
base
srd_imm_i64 ra,rb,sc
shift right dividing immediate i64
i128
srl_imm_u128 ra,rb,sc
shift right logical immediate u128
base
srl_imm_u32 ra,rb,sc
shift right logical immediate u32
base
srl_imm_u64 ra,rb,sc
shift right logical immediate u64
mmx
srl_imm_vu16 ra,rb,sc
shift right logical immediate vu16
mmx
srl_imm_vu32 ra,rb,sc
shift right logical immediate vu32
mmx
srl_imm_vu64 ra,rb,sc
shift right logical immediate vu64
mmx
srl_imm_vu8 ra,rb,sc
shift right logical immediate vu8
i128
srl_u128 ra,rb,rc
shift right logical u128
base
srl_u32 ra,rb,rc
shift right logical u32
base
srl_u64 ra,rb,rc
shift right logical u64
mmx
srl_vu16 ra,rb,rc
shift right logical vu16
mmx
srl_vu32 ra,rb,rc
shift right logical vu32
mmx
srl_vu64 ra,rb,rc
shift right logical vu64
mmx
srl_vu8 ra,rb,rc
shift right logical vu8
i128
srp_i128 ra,rb,rc,rd
shift right pair i128
base
srp_i32 ra,rb,rc,rd
shift right pair i32
base
srp_i64 ra,rb,rc,rd
shift right pair i64
i128
srp_imm_i128 ra,rb,rc,sd
shift right pair immediate i128
base
srp_imm_i32 ra,rb,rc,sd
shift right pair immediate i32
base
srp_imm_i64 ra,rb,rc,sd
shift right pair immediate i64
memory
st_i128.l ra,rb,simm21
store base i128
memory
st_i16.l ra,rb,simm21
store base i16
memory
st_i32.l ra,rb,simm21
store base i32
memory
st_i64.l ra,rb,simm21
store base i64
memory
st_i8.l ra,rb,simm21
store base i8
memory
st_iprel_i128.l ra,uimm28
store relative i128
memory
st_iprel_i16.l ra,uimm28
store relative i16
memory
st_iprel_i32.l ra,uimm28
store relative i32
memory
st_iprel_i64.l ra,uimm28
store relative i64
memory
st_iprel_i8.l ra,uimm28
store relative i8
memory
st_mia_i128 ra,rb,simm10
store and modify immediate after i128
memory
st_mia_i16 ra,rb,simm10
store and modify immediate after i16
memory
st_mia_i32 ra,rb,simm10
store and modify immediate after i32
memory
st_mia_i64 ra,rb,simm10
store and modify immediate after i64
memory
st_mia_i8 ra,rb,simm10
store and modify immediate after i8
memory
st_mib_i128 ra,rb,simm10
store and modify immediate before i128
memory
st_mib_i16 ra,rb,simm10
store and modify immediate before i16
memory
st_mib_i32 ra,rb,simm10
store and modify immediate before i32
memory
st_mib_i64 ra,rb,simm10
store and modify immediate before i64
memory
st_mib_i8 ra,rb,simm10
store and modify immediate before i8
memory
st_xi32_i128.l ra,rb,rc,scale,simm7
store i32-indexed i128
memory
st_xi32_i16.l ra,rb,rc,scale,simm7
store i32-indexed i16
memory
st_xi32_i32.l ra,rb,rc,scale,simm7
store i32-indexed i32
memory
st_xi32_i64.l ra,rb,rc,scale,simm7
store i32-indexed i64
memory
st_xi32_i8.l ra,rb,rc,scale,simm7
store i32-indexed i8
memory
st_xi64_i128.l ra,rb,rc,scale,simm7
store i64-indexed i128
memory
st_xi64_i16.l ra,rb,rc,scale,simm7
store i64-indexed i16
memory
st_xi64_i32.l ra,rb,rc,scale,simm7
store i64-indexed i32
memory
st_xi64_i64.l ra,rb,rc,scale,simm7
store i64-indexed i64
memory
st_xi64_i8.l ra,rb,rc,scale,simm7
store i64-indexed i8
memory
st_xu32_i128.l ra,rb,rc,scale,simm7
store u32-indexed i128
memory
st_xu32_i16.l ra,rb,rc,scale,simm7
store u32-indexed i16
memory
st_xu32_i32.l ra,rb,rc,scale,simm7
store u32-indexed i32
memory
st_xu32_i64.l ra,rb,rc,scale,simm7
store u32-indexed i64
memory
st_xu32_i8.l ra,rb,rc,scale,simm7
store u32-indexed i8
f16
sub_alt_vf16 ra,rb,rc
subtract alternating vf16
f32
sub_alt_vf32 ra,rb,rc
subtract alternating vf32
f64
sub_alt_vf64 ra,rb,rc
subtract alternating vf64
f128
sub_f128 ra,rb,rc
subtract f128
f16
sub_f16 ra,rb,rc
subtract f16
f32
sub_f32 ra,rb,rc
subtract f32
f64
sub_f64 ra,rb,rc
subtract f64
f16
sub_horiz_vf16 ra,rb,rc
subtract horizontal vf16
f32
sub_horiz_vf32 ra,rb,rc
subtract horizontal vf32
f64
sub_horiz_vf64 ra,rb,rc
subtract horizontal vf64
i128
sub_i128 ra,rb,rc
subtract i128
base
sub_i32 ra,rb,rc
subtract i32
base
sub_i64 ra,rb,rc
subtract i64
mmx
sub_sat_vi16 ra,rb,rc
subtract saturated vi16
mmx
sub_sat_vi32 ra,rb,rc
subtract saturated vi32
mmx
sub_sat_vi64 ra,rb,rc
subtract saturated vi64
mmx
sub_sat_vi8 ra,rb,rc
subtract saturated vi8
mmx
sub_sat_vu16 ra,rb,rc
subtract saturated vu16
mmx
sub_sat_vu32 ra,rb,rc
subtract saturated vu32
mmx
sub_sat_vu64 ra,rb,rc
subtract saturated vu64
mmx
sub_sat_vu8 ra,rb,rc
subtract saturated vu8
base
sub_sub_i64 ra,rb,rc,rd
subtract-subtract i64
base
sub_subb_u64 ra,rb,rc,rd
subtract-subtract with borrow-out u64
base
sub_u32 ra,rb,rc
subtract u32
f16
sub_vf16 ra,rb,rc
subtract vf16
f32
sub_vf32 ra,rb,rc
subtract vf32
f64
sub_vf64 ra,rb,rc
subtract vf64
mmx
sub_vu16 ra,rb,rc
subtract vu16
mmx
sub_vu32 ra,rb,rc
subtract vu32
mmx
sub_vu64 ra,rb,rc
subtract vu64
mmx
sub_vu8 ra,rb,rc
subtract vu8
base
subb_u64 ra,rb,rc
subtract with borrow u64
mmx
subb_vu16 ra,rb,rc
subtract borrow vu16
mmx
subb_vu32 ra,rb,rc
subtract borrow vu32
mmx
subb_vu64 ra,rb,rc
subtract borrow vu64
mmx
subb_vu8 ra,rb,rc
subtract borrow vu8
base
subo_i64 ra,rb,rc
subtract overflow i64
mmx
subo_vi16 ra,rb,rc
subtract overflow vi16
mmx
subo_vi32 ra,rb,rc
subtract overflow vi32
mmx
subo_vi64 ra,rb,rc
subtract overflow vi64
mmx
subo_vi8 ra,rb,rc
subtract overflow vi8
base
subr_imm_i32.l ra,rb,simm21
subtract reverse i32
base
subr_imm_i64.l ra,rb,simm21
subtract reverse i64
base
subr_imm_u32.l ra,rb,simm21
subtract reverse u32
special
syscall
system call
privileged
sysret
system return
privileged
tpa ra,rb,rc
translate to physical address
f128
trunk_f128 ra,rb
trunk (round to zero) f128
f128
trunk_f128_i128 ra,rb
trunk f128 to i128
f128
trunk_f128_i32 ra,rb
trunk f128 to i32
f128
trunk_f128_i64 ra,rb
trunk f128 to i64
f128
trunk_f128_u128 ra,rb
trunk f128 to u128
f128
trunk_f128_u32 ra,rb
trunk f128 to u32
f128
trunk_f128_u64 ra,rb
trunk f128 to u64
f16
trunk_f16 ra,rb
trunk (round to zero) f16
f16
trunk_f16_i128 ra,rb
trunk f16 to i128
f16
trunk_f16_i32 ra,rb
trunk f16 to i32
f16
trunk_f16_i64 ra,rb
trunk f16 to i64
f16
trunk_f16_u128 ra,rb
trunk f16 to u128
f16
trunk_f16_u32 ra,rb
trunk f16 to u32
f16
trunk_f16_u64 ra,rb
trunk f16 to u64
f32
trunk_f32 ra,rb
trunk (round to zero) f32
f32
trunk_f32_i128 ra,rb
trunk f32 to i128
f32
trunk_f32_i32 ra,rb
trunk f32 to i32
f32
trunk_f32_i64 ra,rb
trunk f32 to i64
f32
trunk_f32_u128 ra,rb
trunk f32 to u128
f32
trunk_f32_u32 ra,rb
trunk f32 to u32
f32
trunk_f32_u64 ra,rb
trunk f32 to u64
f64
trunk_f64 ra,rb
trunk (round to zero) f64
f64
trunk_f64_i128 ra,rb
trunk f64 to i128
f64
trunk_f64_i32 ra,rb
trunk f64 to i32
f64
trunk_f64_i64 ra,rb
trunk f64 to i64
f64
trunk_f64_u128 ra,rb
trunk f64 to u128
f64
trunk_f64_u32 ra,rb
trunk f64 to u32
f64
trunk_f64_u64 ra,rb
trunk f64 to u64
f16
trunk_vf16 ra,rb
round toward zero vf16
f16
trunk_vf16_vi16 ra,rb
trunk vf16 to vi16
f16
trunk_vf16_vu16 ra,rb
trunk vf16 to vu16
f32
trunk_vf32 ra,rb
round toward zero vf32
f32
trunk_vf32_vi32 ra,rb
trunk vf32 to vi32
f32
trunk_vf32_vu32 ra,rb
trunk vf32 to vu32
f64
trunk_vf64 ra,rb
round toward zero vf64
f64
trunk_vf64_vi64 ra,rb
trunk vf64 to vi64
f64
trunk_vf64_vu64 ra,rb
trunk vf64 to vu64
special
undef
undefined instruction
f16
unpack_high_vf16 ra,rb
unpack high part vf16
f32
unpack_high_vf32 ra,rb
unpack high part vf32
f64
unpack_high_vf64 ra,rb
unpack high part vf64
mmx
unpack_high_vi16 ra,rb
unpack high vi16
mmx
unpack_high_vi32 ra,rb
unpack high vi32
mmx
unpack_high_vi8 ra,rb
unpack high vi8
mmx
unpack_high_vu16 ra,rb
unpack high vu16
mmx
unpack_high_vu32 ra,rb
unpack high vu32
mmx
unpack_high_vu8 ra,rb
unpack high vu8
f16
unpack_low_vf16 ra,rb
unpack low part vf16
f32
unpack_low_vf32 ra,rb
unpack low part vf32
f64
unpack_low_vf64 ra,rb
unpack low part vf64
mmx
unpack_low_vi16 ra,rb
unpack low vi16
mmx
unpack_low_vi32 ra,rb
unpack low vi32
mmx
unpack_low_vi8 ra,rb
unpack low vi8
mmx
unpack_low_vu16 ra,rb
unpack low vu16
mmx
unpack_low_vu32 ra,rb
unpack low vu32
mmx
unpack_low_vu8 ra,rb
unpack low vu8
special
write.l uimm28
write string formatted
base
xnor ra,rb,rc
bitwise exclusive not-or
base
xor ra,rb,rc
bitwise exclusive or
base
xor_dec ra,rb,rc
bitwise exclusive-or decremented
base
xor_imm.l ra,rb,simm21
bitwise exclusive or with immediate
base
zext_i16 ra,rb
zero extend i16
base
zext_i32 ra,rb
zero extend i32
base
zext_i64 ra,rb
zero extend i64
base
zext_i8 ra,rb
zero extend i8

machine instruction formats

bundle formats:
slot 3
(42 bits)
slot 2
(42 bits)
slot 1
(42 bits)
template
(2 bits)
short 3short 2short 100
long 2short 101
short 2long 110
very long11
slot formats:
format
name
bit numbers
41 40 39 38 37 36 35 34 33 32 31 30 29 28 27 26 25 24 23 22 21 20 19 18 17 16 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 0
ld_imm opcode ra simm28
call opcode ra simm28x16
mem_iprel opcode ra uimm28
write opcode opx uimm28
jmp opcode opx simm28x16
nop opcode opx simm28
alloc opcode opx framesize 0
alloc_sp opcode opx framesize uimm21
retf opcode opx 0 uimm21
cache_simm opcode opx rb simm21
bin_simm opcode ra rb simm21
bin_uimm opcode ra rb uimm21
loop opcode ra rb opx uimm6 simm11x16
br_eh opcode opx rb 0 simm17x16
br_rr opcode ra rb opx simm17x16
br_rs opcode ra sb opx simm17x16
br_simm opcode ra simm11 simm17x16
br_uimm opcode ra uimm11 simm17x16
nul_simm opcode ra simm11 dn dy opx
nul_uimm opcode ra uimm11 dn dy opx
nul_rs opcode ra sb opx dn dy opx
nul_rr opcode ra rb opx dn dy opx
mid_bin_simm opcode ra rb simm14 opx
r4 opcode ra rb rc rd opx
r3s1 opcode ra rb rc sd opx
r2s2 opcode ra rb sc sd opx
r3s2 opcode ra rb rc sd se
gmemx opcode ra rb rc scale opx simm7
RbcScale opcode 0 rb rc scale opx
Rbc opcode 0 rb rc 0 opx
mspr opcode ra 0 spr 0 opx
r2 opcode ra rb 0 0 opx
r2s1 opcode ra rb sc 0 opx
r3 opcode ra rb rc 0 opx
gmemu opcode ra rb simm10 opx
int opcode 0 rb simm10 opx
NoArgs opcode 0 opx
color descriptions for instruction fields:
primary opcode
extended opcode
general-purpose register number
special-purpose register number
immediate constant
shift (bit count)
modifier
reserved (must be zero)

machine instruction description

instruction
mnemonic
bit numbers
41 40 39 38 37 36 35 34 33 32 31 30 29 28 27 26 25 24 23 22 21 20 19 18 17 16 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 0
abs_diff_f128
0 ra rb rc 0 1708
abs_diff_f16
0 ra rb rc 0 1228
abs_diff_f32
0 ra rb rc 0 1388
abs_diff_f64
0 ra rb rc 0 1548
abs_diff_i128
0 ra rb rc 0 306
abs_diff_i32
0 ra rb rc 0 146
abs_diff_i64
0 ra rb rc 0 226
abs_diff_vf16
0 ra rb rc 0 1299
abs_diff_vf32
0 ra rb rc 0 1459
abs_diff_vf64
0 ra rb rc 0 1619
abs_f128
0 ra rb 0 0 1706
abs_f16
0 ra rb 0 0 1226
abs_f32
0 ra rb 0 0 1386
abs_f64
0 ra rb 0 0 1546
abs_i128
0 ra rb 0 0 307
abs_i32
0 ra rb 0 0 147
abs_i64
0 ra rb 0 0 227
abs_max_f128
0 ra rb rc 0 1715
abs_max_f16
0 ra rb rc 0 1235
abs_max_f32
0 ra rb rc 0 1395
abs_max_f64
0 ra rb rc 0 1555
abs_max_vf16
0 ra rb rc 0 1314
abs_max_vf32
0 ra rb rc 0 1474
abs_max_vf64
0 ra rb rc 0 1634
abs_min_f128
0 ra rb rc 0 1714
abs_min_f16
0 ra rb rc 0 1234
abs_min_f32
0 ra rb rc 0 1394
abs_min_f64
0 ra rb rc 0 1554
abs_min_vf16
0 ra rb rc 0 1313
abs_min_vf32
0 ra rb rc 0 1473
abs_min_vf64
0 ra rb rc 0 1633
abs_vf16
0 ra rb 0 0 1297
abs_vf32
0 ra rb 0 0 1457
abs_vf64
0 ra rb 0 0 1617
add_add_i64
1 ra rb rc rd 8
add_addc_u64
1 ra rb rc rd 11
add_alt_vf16
0 ra rb rc 0 1336
add_alt_vf32
0 ra rb rc 0 1496
add_alt_vf64
0 ra rb rc 0 1656
add_f128
0 ra rb rc 0 1699
add_f16
0 ra rb rc 0 1219
add_f32
0 ra rb rc 0 1379
add_f64
0 ra rb rc 0 1539
add_horiz_vf16
0 ra rb rc 0 1338
add_horiz_vf32
0 ra rb rc 0 1498
add_horiz_vf64
0 ra rb rc 0 1658
add_i128
0 ra rb rc 0 288
add_i32
0 ra rb rc 0 128
add_i64
0 ra rb rc 0 208
add_imm_i128
70 ra rb simm21
add_imm_i32
24 ra rb simm21
add_imm_i64
30 ra rb simm21
add_imm_u32
25 ra rb simm21
add_sat_vi16
0 ra rb rc 0 1867
add_sat_vi32
0 ra rb rc 0 1931
add_sat_vi64
0 ra rb rc 0 1995
add_sat_vi8
0 ra rb rc 0 1803
add_sat_vu16
0 ra rb rc 0 1866
add_sat_vu32
0 ra rb rc 0 1930
add_sat_vu64
0 ra rb rc 0 1994
add_sat_vu8
0 ra rb rc 0 1802
add_sub_i64
1 ra rb rc rd 9
add_u32
0 ra rb rc 0 161
add_vf16
0 ra rb rc 0 1303
add_vf32
0 ra rb rc 0 1463
add_vf64
0 ra rb rc 0 1623
add_vu16
0 ra rb rc 0 1860
add_vu32
0 ra rb rc 0 1924
add_vu64
0 ra rb rc 0 1988
add_vu8
0 ra rb rc 0 1796
addc_u64
0 ra rb rc 0 245
addc_vu16
0 ra rb rc 0 1864
addc_vu32
0 ra rb rc 0 1928
addc_vu64
0 ra rb rc 0 1992
addc_vu8
0 ra rb rc 0 1800
addo_i64
0 ra rb rc 0 243
addo_vi16
0 ra rb rc 0 1862
addo_vi32
0 ra rb rc 0 1926
addo_vi64
0 ra rb rc 0 1990
addo_vi8
0 ra rb rc 0 1798
aes_dec
0 ra rb rc 0 554
aes_dec_last
0 ra rb rc 0 555
aes_enc
0 ra rb rc 0 552
aes_enc_last
0 ra rb rc 0 553
aes_imc
0 ra rb 0 0 556
aes_keygen_assist
0 ra rb simm10 557
alignup_u64
1 ra rb sc sd 31
alloc
2 3 framesize 0
alloc_sp
2 4 framesize uimm21
amq_cas_i128
11 ra rb rc rd 125
amq_cas_i16
11 ra rb rc rd 113
amq_cas_i32
11 ra rb rc rd 117
amq_cas_i64
11 ra rb rc rd 121
amq_cas_i8
11 ra rb rc rd 109
amq_ld_add_u128
0 ra rb rc 0 1101
amq_ld_add_u16
0 ra rb rc 0 861
amq_ld_add_u32
0 ra rb rc 0 941
amq_ld_add_u64
0 ra rb rc 0 1021
amq_ld_add_u8
0 ra rb rc 0 781
amq_ld_and_u128
0 ra rb rc 0 1105
amq_ld_and_u16
0 ra rb rc 0 865
amq_ld_and_u32
0 ra rb rc 0 945
amq_ld_and_u64
0 ra rb rc 0 1025
amq_ld_and_u8
0 ra rb rc 0 785
amq_ld_i128
0 ra rb 0 0 1089
amq_ld_i16
0 ra rb 0 0 849
amq_ld_i32
0 ra rb 0 0 929
amq_ld_i64
0 ra rb 0 0 1009
amq_ld_i8
0 ra rb 0 0 769
amq_ld_max_i128
0 ra rb rc 0 1121
amq_ld_max_i16
0 ra rb rc 0 881
amq_ld_max_i32
0 ra rb rc 0 961
amq_ld_max_i64
0 ra rb rc 0 1041
amq_ld_max_i8
0 ra rb rc 0 801
amq_ld_max_u128
0 ra rb rc 0 1129
amq_ld_max_u16
0 ra rb rc 0 889
amq_ld_max_u32
0 ra rb rc 0 969
amq_ld_max_u64
0 ra rb rc 0 1049
amq_ld_max_u8
0 ra rb rc 0 809
amq_ld_min_i128
0 ra rb rc 0 1117
amq_ld_min_i16
0 ra rb rc 0 877
amq_ld_min_i32
0 ra rb rc 0 957
amq_ld_min_i64
0 ra rb rc 0 1037
amq_ld_min_i8
0 ra rb rc 0 797
amq_ld_min_u128
0 ra rb rc 0 1125
amq_ld_min_u16
0 ra rb rc 0 885
amq_ld_min_u32
0 ra rb rc 0 965
amq_ld_min_u64
0 ra rb rc 0 1045
amq_ld_min_u8
0 ra rb rc 0 805
amq_ld_or_u128
0 ra rb rc 0 1109
amq_ld_or_u16
0 ra rb rc 0 869
amq_ld_or_u32
0 ra rb rc 0 949
amq_ld_or_u64
0 ra rb rc 0 1029
amq_ld_or_u8
0 ra rb rc 0 789
amq_ld_xor_u128
0 ra rb rc 0 1113
amq_ld_xor_u16
0 ra rb rc 0 873
amq_ld_xor_u32
0 ra rb rc 0 953
amq_ld_xor_u64
0 ra rb rc 0 1033
amq_ld_xor_u8
0 ra rb rc 0 793
amq_swap_u128
0 ra rb rc 0 1093
amq_swap_u16
0 ra rb rc 0 853
amq_swap_u32
0 ra rb rc 0 933
amq_swap_u64
0 ra rb rc 0 1013
amq_swap_u8
0 ra rb rc 0 773
amr_cas_i128
11 ra rb rc rd 126
amr_cas_i16
11 ra rb rc rd 114
amr_cas_i32
11 ra rb rc rd 118
amr_cas_i64
11 ra rb rc rd 122
amr_cas_i8
11 ra rb rc rd 110
amr_ld_add_u128
0 ra rb rc 0 1102
amr_ld_add_u16
0 ra rb rc 0 862
amr_ld_add_u32
0 ra rb rc 0 942
amr_ld_add_u64
0 ra rb rc 0 1022
amr_ld_add_u8
0 ra rb rc 0 782
amr_ld_and_u128
0 ra rb rc 0 1106
amr_ld_and_u16
0 ra rb rc 0 866
amr_ld_and_u32
0 ra rb rc 0 946
amr_ld_and_u64
0 ra rb rc 0 1026
amr_ld_and_u8
0 ra rb rc 0 786
amr_ld_max_i128
0 ra rb rc 0 1122
amr_ld_max_i16
0 ra rb rc 0 882
amr_ld_max_i32
0 ra rb rc 0 962
amr_ld_max_i64
0 ra rb rc 0 1042
amr_ld_max_i8
0 ra rb rc 0 802
amr_ld_max_u128
0 ra rb rc 0 1130
amr_ld_max_u16
0 ra rb rc 0 890
amr_ld_max_u32
0 ra rb rc 0 970
amr_ld_max_u64
0 ra rb rc 0 1050
amr_ld_max_u8
0 ra rb rc 0 810
amr_ld_min_i128
0 ra rb rc 0 1118
amr_ld_min_i16
0 ra rb rc 0 878
amr_ld_min_i32
0 ra rb rc 0 958
amr_ld_min_i64
0 ra rb rc 0 1038
amr_ld_min_i8
0 ra rb rc 0 798
amr_ld_min_u128
0 ra rb rc 0 1126
amr_ld_min_u16
0 ra rb rc 0 886
amr_ld_min_u32
0 ra rb rc 0 966
amr_ld_min_u64
0 ra rb rc 0 1046
amr_ld_min_u8
0 ra rb rc 0 806
amr_ld_or_u128
0 ra rb rc 0 1110
amr_ld_or_u16
0 ra rb rc 0 870
amr_ld_or_u32
0 ra rb rc 0 950
amr_ld_or_u64
0 ra rb rc 0 1030
amr_ld_or_u8
0 ra rb rc 0 790
amr_ld_xor_u128
0 ra rb rc 0 1114
amr_ld_xor_u16
0 ra rb rc 0 874
amr_ld_xor_u32
0 ra rb rc 0 954
amr_ld_xor_u64
0 ra rb rc 0 1034
amr_ld_xor_u8
0 ra rb rc 0 794
amr_st_add_u128
0 0 rb rc 0 1133
amr_st_add_u16
0 0 rb rc 0 893
amr_st_add_u32
0 0 rb rc 0 973
amr_st_add_u64
0 0 rb rc 0 1053
amr_st_add_u8
0 0 rb rc 0 813
amr_st_and_u128
0 0 rb rc 0 1135
amr_st_and_u16
0 0 rb rc 0 895
amr_st_and_u32
0 0 rb rc 0 975
amr_st_and_u64
0 0 rb rc 0 1055
amr_st_and_u8
0 0 rb rc 0 815
amr_st_i128
0 ra rb 0 0 1091
amr_st_i16
0 ra rb 0 0 851
amr_st_i32
0 ra rb 0 0 931
amr_st_i64
0 ra rb 0 0 1011
amr_st_i8
0 ra rb 0 0 771
amr_st_max_i128
0 0 rb rc 0 1143
amr_st_max_i16
0 0 rb rc 0 903
amr_st_max_i32
0 0 rb rc 0 983
amr_st_max_i64
0 0 rb rc 0 1063
amr_st_max_i8
0 0 rb rc 0 823
amr_st_max_u128
0 0 rb rc 0 1147
amr_st_max_u16
0 0 rb rc 0 907
amr_st_max_u32
0 0 rb rc 0 987
amr_st_max_u64
0 0 rb rc 0 1067
amr_st_max_u8
0 0 rb rc 0 827
amr_st_min_i128
0 0 rb rc 0 1141
amr_st_min_i16
0 0 rb rc 0 901
amr_st_min_i32
0 0 rb rc 0 981
amr_st_min_i64
0 0 rb rc 0 1061
amr_st_min_i8
0 0 rb rc 0 821
amr_st_min_u128
0 0 rb rc 0 1145
amr_st_min_u16
0 0 rb rc 0 905
amr_st_min_u32
0 0 rb rc 0 985
amr_st_min_u64
0 0 rb rc 0 1065
amr_st_min_u8
0 0 rb rc 0 825
amr_st_or_u128
0 0 rb rc 0 1137
amr_st_or_u16
0 0 rb rc 0 897
amr_st_or_u32
0 0 rb rc 0 977
amr_st_or_u64
0 0 rb rc 0 1057
amr_st_or_u8
0 0 rb rc 0 817
amr_st_xor_u128
0 0 rb rc 0 1139
amr_st_xor_u16
0 0 rb rc 0 899
amr_st_xor_u32
0 0 rb rc 0 979
amr_st_xor_u64
0 0 rb rc 0 1059
amr_st_xor_u8
0 0 rb rc 0 819
amr_swap_u128
0 ra rb rc 0 1094
amr_swap_u16
0 ra rb rc 0 854
amr_swap_u32
0 ra rb rc 0 934
amr_swap_u64
0 ra rb rc 0 1014
amr_swap_u8
0 ra rb rc 0 774
amx_cas_i128
11 ra rb rc rd 124
amx_cas_i16
11 ra rb rc rd 112
amx_cas_i32
11 ra rb rc rd 116
amx_cas_i64
11 ra rb rc rd 120
amx_cas_i8
11 ra rb rc rd 108
amx_ld_add_u128
0 ra rb rc 0 1100
amx_ld_add_u16
0 ra rb rc 0 860
amx_ld_add_u32
0 ra rb rc 0 940
amx_ld_add_u64
0 ra rb rc 0 1020
amx_ld_add_u8
0 ra rb rc 0 780
amx_ld_and_u128
0 ra rb rc 0 1104
amx_ld_and_u16
0 ra rb rc 0 864
amx_ld_and_u32
0 ra rb rc 0 944
amx_ld_and_u64
0 ra rb rc 0 1024
amx_ld_and_u8
0 ra rb rc 0 784
amx_ld_i128
0 ra rb 0 0 1088
amx_ld_i16
0 ra rb 0 0 848
amx_ld_i32
0 ra rb 0 0 928
amx_ld_i64
0 ra rb 0 0 1008
amx_ld_i8
0 ra rb 0 0 768
amx_ld_max_i128
0 ra rb rc 0 1120
amx_ld_max_i16
0 ra rb rc 0 880
amx_ld_max_i32
0 ra rb rc 0 960
amx_ld_max_i64
0 ra rb rc 0 1040
amx_ld_max_i8
0 ra rb rc 0 800
amx_ld_max_u128
0 ra rb rc 0 1128
amx_ld_max_u16
0 ra rb rc 0 888
amx_ld_max_u32
0 ra rb rc 0 968
amx_ld_max_u64
0 ra rb rc 0 1048
amx_ld_max_u8
0 ra rb rc 0 808
amx_ld_min_i128
0 ra rb rc 0 1116
amx_ld_min_i16
0 ra rb rc 0 876
amx_ld_min_i32
0 ra rb rc 0 956
amx_ld_min_i64
0 ra rb rc 0 1036
amx_ld_min_i8
0 ra rb rc 0 796
amx_ld_min_u128
0 ra rb rc 0 1124
amx_ld_min_u16
0 ra rb rc 0 884
amx_ld_min_u32
0 ra rb rc 0 964
amx_ld_min_u64
0 ra rb rc 0 1044
amx_ld_min_u8
0 ra rb rc 0 804
amx_ld_or_u128
0 ra rb rc 0 1108
amx_ld_or_u16
0 ra rb rc 0 868
amx_ld_or_u32
0 ra rb rc 0 948
amx_ld_or_u64
0 ra rb rc 0 1028
amx_ld_or_u8
0 ra rb rc 0 788
amx_ld_xor_u128
0 ra rb rc 0 1112
amx_ld_xor_u16
0 ra rb rc 0 872
amx_ld_xor_u32
0 ra rb rc 0 952
amx_ld_xor_u64
0 ra rb rc 0 1032
amx_ld_xor_u8
0 ra rb rc 0 792
amx_st_add_u128
0 0 rb rc 0 1132
amx_st_add_u16
0 0 rb rc 0 892
amx_st_add_u32
0 0 rb rc 0 972
amx_st_add_u64
0 0 rb rc 0 1052
amx_st_add_u8
0 0 rb rc 0 812
amx_st_and_u128
0 0 rb rc 0 1134
amx_st_and_u16
0 0 rb rc 0 894
amx_st_and_u32
0 0 rb rc 0 974
amx_st_and_u64
0 0 rb rc 0 1054
amx_st_and_u8
0 0 rb rc 0 814
amx_st_i128
0 ra rb 0 0 1090
amx_st_i16
0 ra rb 0 0 850
amx_st_i32
0 ra rb 0 0 930
amx_st_i64
0 ra rb 0 0 1010
amx_st_i8
0 ra rb 0 0 770
amx_st_max_i128
0 0 rb rc 0 1142
amx_st_max_i16
0 0 rb rc 0 902
amx_st_max_i32
0 0 rb rc 0 982
amx_st_max_i64
0 0 rb rc 0 1062
amx_st_max_i8
0 0 rb rc 0 822
amx_st_max_u128
0 0 rb rc 0 1146
amx_st_max_u16
0 0 rb rc 0 906
amx_st_max_u32
0 0 rb rc 0 986
amx_st_max_u64
0 0 rb rc 0 1066
amx_st_max_u8
0 0 rb rc 0 826
amx_st_min_i128
0 0 rb rc 0 1140
amx_st_min_i16
0 0 rb rc 0 900
amx_st_min_i32
0 0 rb rc 0 980
amx_st_min_i64
0 0 rb rc 0 1060
amx_st_min_i8
0 0 rb rc 0 820
amx_st_min_u128
0 0 rb rc 0 1144
amx_st_min_u16
0 0 rb rc 0 904
amx_st_min_u32
0 0 rb rc 0 984
amx_st_min_u64
0 0 rb rc 0 1064
amx_st_min_u8
0 0 rb rc 0 824
amx_st_or_u128
0 0 rb rc 0 1136
amx_st_or_u16
0 0 rb rc 0 896
amx_st_or_u32
0 0 rb rc 0 976
amx_st_or_u64
0 0 rb rc 0 1056
amx_st_or_u8
0 0 rb rc 0 816
amx_st_xor_u128
0 0 rb rc 0 1138
amx_st_xor_u16
0 0 rb rc 0 898
amx_st_xor_u32
0 0 rb rc 0 978
amx_st_xor_u64
0 0 rb rc 0 1058
amx_st_xor_u8
0 0 rb rc 0 818
amx_swap_u128
0 ra rb rc 0 1092
amx_swap_u16
0 ra rb rc 0 852
amx_swap_u32
0 ra rb rc 0 932
amx_swap_u64
0 ra rb rc 0 1012
amx_swap_u8
0 ra rb rc 0 772
amz_cas_i128
11 ra rb rc rd 127
amz_cas_i16
11 ra rb rc rd 115
amz_cas_i32
11 ra rb rc rd 119
amz_cas_i64
11 ra rb rc rd 123
amz_cas_i8
11 ra rb rc rd 111
amz_ld_add_u128
0 ra rb rc 0 1103
amz_ld_add_u16
0 ra rb rc 0 863
amz_ld_add_u32
0 ra rb rc 0 943
amz_ld_add_u64
0 ra rb rc 0 1023
amz_ld_add_u8
0 ra rb rc 0 783
amz_ld_and_u128
0 ra rb rc 0 1107
amz_ld_and_u16
0 ra rb rc 0 867
amz_ld_and_u32
0 ra rb rc 0 947
amz_ld_and_u64
0 ra rb rc 0 1027
amz_ld_and_u8
0 ra rb rc 0 787
amz_ld_max_i128
0 ra rb rc 0 1123
amz_ld_max_i16
0 ra rb rc 0 883
amz_ld_max_i32
0 ra rb rc 0 963
amz_ld_max_i64
0 ra rb rc 0 1043
amz_ld_max_i8
0 ra rb rc 0 803
amz_ld_max_u128
0 ra rb rc 0 1131
amz_ld_max_u16
0 ra rb rc 0 891
amz_ld_max_u32
0 ra rb rc 0 971
amz_ld_max_u64
0 ra rb rc 0 1051
amz_ld_max_u8
0 ra rb rc 0 811
amz_ld_min_i128
0 ra rb rc 0 1119
amz_ld_min_i16
0 ra rb rc 0 879
amz_ld_min_i32
0 ra rb rc 0 959
amz_ld_min_i64
0 ra rb rc 0 1039
amz_ld_min_i8
0 ra rb rc 0 799
amz_ld_min_u128
0 ra rb rc 0 1127
amz_ld_min_u16
0 ra rb rc 0 887
amz_ld_min_u32
0 ra rb rc 0 967
amz_ld_min_u64
0 ra rb rc 0 1047
amz_ld_min_u8
0 ra rb rc 0 807
amz_ld_or_u128
0 ra rb rc 0 1111
amz_ld_or_u16
0 ra rb rc 0 871
amz_ld_or_u32
0 ra rb rc 0 951
amz_ld_or_u64
0 ra rb rc 0 1031
amz_ld_or_u8
0 ra rb rc 0 791
amz_ld_xor_u128
0 ra rb rc 0 1115
amz_ld_xor_u16
0 ra rb rc 0 875
amz_ld_xor_u32
0 ra rb rc 0 955
amz_ld_xor_u64
0 ra rb rc 0 1035
amz_ld_xor_u8
0 ra rb rc 0 795
amz_swap_u128
0 ra rb rc 0 1095
amz_swap_u16
0 ra rb rc 0 855
amz_swap_u32
0 ra rb rc 0 935
amz_swap_u64
0 ra rb rc 0 1015
amz_swap_u8
0 ra rb rc 0 775
and
0 ra rb rc 0 4
and_dec
0 ra rb rc 0 241
and_imm
20 ra rb simm21
and_neg
0 ra rb rc 0 242
andn
0 ra rb rc 0 7
andn_imm
18 ra rb simm21
avg_vi16
0 ra rb rc 0 1870
avg_vi32
0 ra rb rc 0 1934
avg_vi64
0 ra rb rc 0 1998
avg_vi8
0 ra rb rc 0 1806
avg_vu16
0 ra rb rc 0 1871
avg_vu32
0 ra rb rc 0 1935
avg_vu64
0 ra rb rc 0 1999
avg_vu8
0 ra rb rc 0 1807
bit_clear
0 ra rb rc 0 28
bit_clear_imm
0 ra rb sc 0 29
bit_flip
0 ra rb rc 0 32
bit_flip_imm
0 ra rb sc 0 33
bit_set
0 ra rb rc 0 30
bit_set_imm
0 ra rb sc 0 31
br_bc
3 ra rb 0 simm17x16
br_bc_imm
3 ra sb 1 simm17x16
br_bs
3 ra rb 2 simm17x16
br_bs_imm
3 ra sb 3 simm17x16
br_eq_i128
7 ra rb 0 simm17x16
br_eq_i32
5 ra rb 0 simm17x16
br_eq_i64
6 ra rb 0 simm17x16
br_eq_imm_i128
118 ra simm11 simm17x16
br_eq_imm_i32
106 ra simm11 simm17x16
br_eq_imm_i64
112 ra simm11 simm17x16
br_ge_i128
7 ra rb 3 simm17x16
br_ge_i32
5 ra rb 3 simm17x16
br_ge_i64
6 ra rb 3 simm17x16
br_ge_imm_i128
121 ra simm11 simm17x16
br_ge_imm_i32
109 ra simm11 simm17x16
br_ge_imm_i64
115 ra simm11 simm17x16
br_ge_imm_u128
123 ra uimm11 simm17x16
br_ge_imm_u32
111 ra uimm11 simm17x16
br_ge_imm_u64
117 ra uimm11 simm17x16
br_ge_u128
7 ra rb 5 simm17x16
br_ge_u32
5 ra rb 5 simm17x16
br_ge_u64
6 ra rb 5 simm17x16
br_lt_i128
7 ra rb 2 simm17x16
br_lt_i32
5 ra rb 2 simm17x16
br_lt_i64
6 ra rb 2 simm17x16
br_lt_imm_i128
120 ra simm11 simm17x16
br_lt_imm_i32
108 ra simm11 simm17x16
br_lt_imm_i64
114 ra simm11 simm17x16
br_lt_imm_u128
122 ra uimm11 simm17x16
br_lt_imm_u32
110 ra uimm11 simm17x16
br_lt_imm_u64
116 ra uimm11 simm17x16
br_lt_u128
7 ra rb 4 simm17x16
br_lt_u32
5 ra rb 4 simm17x16
br_lt_u64
6 ra rb 4 simm17x16
br_mask_all
124 ra uimm11 simm17x16
br_mask_any
127 ra uimm11 simm17x16
br_mask_none
126 ra uimm11 simm17x16
br_mask_notall
125 ra uimm11 simm17x16
br_ne_i128
7 ra rb 1 simm17x16
br_ne_i32
5 ra rb 1 simm17x16
br_ne_i64
6 ra rb 1 simm17x16
br_ne_imm_i128
119 ra simm11 simm17x16
br_ne_imm_i32
107 ra simm11 simm17x16
br_ne_imm_i64
113 ra simm11 simm17x16
br_o_f128
7 ra rb 10 simm17x16
br_o_f32
5 ra rb 10 simm17x16
br_o_f64
6 ra rb 10 simm17x16
br_oeq_f128
7 ra rb 6 simm17x16
br_oeq_f32
5 ra rb 6 simm17x16
br_oeq_f64
6 ra rb 6 simm17x16
br_oge_f128
7 ra rb 9 simm17x16
br_oge_f32
5 ra rb 9 simm17x16
br_oge_f64
6 ra rb 9 simm17x16
br_olt_f128
7 ra rb 8 simm17x16
br_olt_f32
5 ra rb 8 simm17x16
br_olt_f64
6 ra rb 8 simm17x16
br_one_f128
7 ra rb 7 simm17x16
br_one_f32
5 ra rb 7 simm17x16
br_one_f64
6 ra rb 7 simm17x16
br_u_f128
7 ra rb 15 simm17x16
br_u_f32
5 ra rb 15 simm17x16
br_u_f64
6 ra rb 15 simm17x16
br_ueq_f128
7 ra rb 11 simm17x16
br_ueq_f32
5 ra rb 11 simm17x16
br_ueq_f64
6 ra rb 11 simm17x16
br_uge_f128
7 ra rb 14 simm17x16
br_uge_f32
5 ra rb 14 simm17x16
br_uge_f64
6 ra rb 14 simm17x16
br_ult_f128
7 ra rb 13 simm17x16
br_ult_f32
5 ra rb 13 simm17x16
br_ult_f64
6 ra rb 13 simm17x16
br_une_f128
7 ra rb 12 simm17x16
br_une_f32
5 ra rb 12 simm17x16
br_une_f64
6 ra rb 12 simm17x16
call
104 ra simm28x16
call_mi
1 ra rb simm14 100
call_plt
103 ra uimm28
call_ri
0 ra rb rc 0 16
call_rvt
1 ra rb simm14 101
ceil_f128
0 ra rb 0 0 1717
ceil_f16
0 ra rb 0 0 1237
ceil_f32
0 ra rb 0 0 1397
ceil_f64
0 ra rb 0 0 1557
ceil_vf16
0 ra rb 0 0 1316
ceil_vf32
0 ra rb 0 0 1476
ceil_vf64
0 ra rb 0 0 1636
class_f128
0 ra rb simm10 1696
class_f16
0 ra rb simm10 1216
class_f32
0 ra rb simm10 1376
class_f64
0 ra rb simm10 1536
clmul_hh
0 ra rb rc 0 546
clmul_hl
0 ra rb rc 0 545
clmul_ll
0 ra rb rc 0 544
cmov_eq_i128
11 ra rb rc rd 9
cmov_eq_i32
11 ra rb rc rd 1
cmov_eq_i64
11 ra rb rc rd 5
cmov_le_i128
11 ra rb rc rd 11
cmov_le_i32
11 ra rb rc rd 3
cmov_le_i64
11 ra rb rc rd 7
cmov_lsb
11 ra rb rc rd 0
cmov_lt_i128
11 ra rb rc rd 10
cmov_lt_i32
11 ra rb rc rd 2
cmov_lt_i64
11 ra rb rc rd 6
cmp_eq_i128
0 ra rb rc 0 300
cmp_eq_i32
0 ra rb rc 0 140
cmp_eq_i64
0 ra rb rc 0 220
cmp_eq_imm_i128
64 ra rb simm21
cmp_eq_imm_i32
58 ra rb simm21
cmp_eq_imm_i64
52 ra rb simm21
cmp_eq_vi16
0 ra rb rc 0 1872
cmp_eq_vi32
0 ra rb rc 0 1936
cmp_eq_vi64
0 ra rb rc 0 2000
cmp_eq_vi8
0 ra rb rc 0 1808
cmp_ge_i128
0 ra rb rc 0 303
cmp_ge_i32
0 ra rb rc 0 143
cmp_ge_i64
0 ra rb rc 0 223
cmp_ge_imm_i128
67 ra rb simm21
cmp_ge_imm_i32
61 ra rb simm21
cmp_ge_imm_i64
55 ra rb simm21
cmp_ge_imm_u128
69 ra rb uimm21
cmp_ge_imm_u32
63 ra rb uimm21
cmp_ge_imm_u64
57 ra rb uimm21
cmp_ge_u128
0 ra rb rc 0 305
cmp_ge_u32
0 ra rb rc 0 145
cmp_ge_u64
0 ra rb rc 0 225
cmp_lt_i128
0 ra rb rc 0 302
cmp_lt_i32
0 ra rb rc 0 142
cmp_lt_i64
0 ra rb rc 0 222
cmp_lt_imm_i128
66 ra rb simm21
cmp_lt_imm_i32
60 ra rb simm21
cmp_lt_imm_i64
54 ra rb simm21
cmp_lt_imm_u128
68 ra rb uimm21
cmp_lt_imm_u32
62 ra rb uimm21
cmp_lt_imm_u64
56 ra rb uimm21
cmp_lt_u128
0 ra rb rc 0 304
cmp_lt_u32
0 ra rb rc 0 144
cmp_lt_u64
0 ra rb rc 0 224
cmp_lt_vi16
0 ra rb rc 0 1873
cmp_lt_vi32
0 ra rb rc 0 1937
cmp_lt_vi64
0 ra rb rc 0 2001
cmp_lt_vi8
0 ra rb rc 0 1809
cmp_lt_vu16
0 ra rb rc 0 1874
cmp_lt_vu32
0 ra rb rc 0 1938
cmp_lt_vu64
0 ra rb rc 0 2002
cmp_lt_vu8
0 ra rb rc 0 1810
cmp_ne_i128
0 ra rb rc 0 301
cmp_ne_i32
0 ra rb rc 0 141
cmp_ne_i64
0 ra rb rc 0 221
cmp_ne_imm_i128
65 ra rb simm21
cmp_ne_imm_i32
59 ra rb simm21
cmp_ne_imm_i64
53 ra rb simm21
cmp_o_f128
0 ra rb rc 0 1728
cmp_o_f16
0 ra rb rc 0 1248
cmp_o_f32
0 ra rb rc 0 1408
cmp_o_f64
0 ra rb rc 0 1568
cmp_o_vf16
0 ra rb rc 0 1328
cmp_o_vf32
0 ra rb rc 0 1488
cmp_o_vf64
0 ra rb rc 0 1648
cmp_oeq_f128
0 ra rb rc 0 1724
cmp_oeq_f16
0 ra rb rc 0 1244
cmp_oeq_f32
0 ra rb rc 0 1404
cmp_oeq_f64
0 ra rb rc 0 1564
cmp_oeq_vf16
0 ra rb rc 0 1324
cmp_oeq_vf32
0 ra rb rc 0 1484
cmp_oeq_vf64
0 ra rb rc 0 1644
cmp_oge_f128
0 ra rb rc 0 1727
cmp_oge_f16
0 ra rb rc 0 1247
cmp_oge_f32
0 ra rb rc 0 1407
cmp_oge_f64
0 ra rb rc 0 1567
cmp_oge_vf16
0 ra rb rc 0 1327
cmp_oge_vf32
0 ra rb rc 0 1487
cmp_oge_vf64
0 ra rb rc 0 1647
cmp_olt_f128
0 ra rb rc 0 1726
cmp_olt_f16
0 ra rb rc 0 1246
cmp_olt_f32
0 ra rb rc 0 1406
cmp_olt_f64
0 ra rb rc 0 1566
cmp_olt_vf16
0 ra rb rc 0 1326
cmp_olt_vf32
0 ra rb rc 0 1486
cmp_olt_vf64
0 ra rb rc 0 1646
cmp_one_f128
0 ra rb rc 0 1725
cmp_one_f16
0 ra rb rc 0 1245
cmp_one_f32
0 ra rb rc 0 1405
cmp_one_f64
0 ra rb rc 0 1565
cmp_one_vf16
0 ra rb rc 0 1325
cmp_one_vf32
0 ra rb rc 0 1485
cmp_one_vf64
0 ra rb rc 0 1645
cmp_u_f128
0 ra rb rc 0 1733
cmp_u_f16
0 ra rb rc 0 1253
cmp_u_f32
0 ra rb rc 0 1413
cmp_u_f64
0 ra rb rc 0 1573
cmp_u_vf16
0 ra rb rc 0 1333
cmp_u_vf32
0 ra rb rc 0 1493
cmp_u_vf64
0 ra rb rc 0 1653
cmp_ueq_f128
0 ra rb rc 0 1729
cmp_ueq_f16
0 ra rb rc 0 1249
cmp_ueq_f32
0 ra rb rc 0 1409
cmp_ueq_f64
0 ra rb rc 0 1569
cmp_ueq_vf16
0 ra rb rc 0 1329
cmp_ueq_vf32
0 ra rb rc 0 1489
cmp_ueq_vf64
0 ra rb rc 0 1649
cmp_uge_f128
0 ra rb rc 0 1732
cmp_uge_f16
0 ra rb rc 0 1252
cmp_uge_f32
0 ra rb rc 0 1412
cmp_uge_f64
0 ra rb rc 0 1572
cmp_uge_vf16
0 ra rb rc 0 1332
cmp_uge_vf32
0 ra rb rc 0 1492
cmp_uge_vf64
0 ra rb rc 0 1652
cmp_ult_f128
0 ra rb rc 0 1731
cmp_ult_f16
0 ra rb rc 0 1251
cmp_ult_f32
0 ra rb rc 0 1411
cmp_ult_f64
0 ra rb rc 0 1571
cmp_ult_vf16
0 ra rb rc 0 1331
cmp_ult_vf32
0 ra rb rc 0 1491
cmp_ult_vf64
0 ra rb rc 0 1651
cmp_une_f128
0 ra rb rc 0 1730
cmp_une_f16
0 ra rb rc 0 1250
cmp_une_f32
0 ra rb rc 0 1410
cmp_une_f64
0 ra rb rc 0 1570
cmp_une_vf16
0 ra rb rc 0 1330
cmp_une_vf32
0 ra rb rc 0 1490
cmp_une_vf64
0 ra rb rc 0 1650
cnt_lz
0 ra rb sc 0 25
cnt_pop
0 ra rb sc 0 24
cnt_tz
0 ra rb sc 0 26
cpuid
0 ra rb simm10 513
crc32c
1 ra rb rc rd 32
cvt_f128_f16
0 ra rb 0 0 1757
cvt_f128_f32
0 ra rb 0 0 1756
cvt_f128_f64
0 ra rb 0 0 1755
cvt_f32_f16
0 ra rb 0 0 1274
cvt_f64_f16
0 ra rb 0 0 1275
cvt_f64_f32
0 ra rb 0 0 1433
cvt_i128_f128
0 ra rb 0 0 1746
cvt_i128_f16
0 ra rb 0 0 1266
cvt_i128_f32
0 ra rb 0 0 1426
cvt_i128_f64
0 ra rb 0 0 1586
cvt_i32_f128
0 ra rb 0 0 1738
cvt_i32_f16
0 ra rb 0 0 1258
cvt_i32_f32
0 ra rb 0 0 1418
cvt_i32_f64
0 ra rb 0 0 1578
cvt_i64_f128
0 ra rb 0 0 1742
cvt_i64_f16
0 ra rb 0 0 1262
cvt_i64_f32
0 ra rb 0 0 1422
cvt_i64_f64
0 ra rb 0 0 1582
cvt_u128_f128
0 ra rb 0 0 1747
cvt_u128_f16
0 ra rb 0 0 1267
cvt_u128_f32
0 ra rb 0 0 1427
cvt_u128_f64
0 ra rb 0 0 1587
cvt_u32_f128
0 ra rb 0 0 1739
cvt_u32_f16
0 ra rb 0 0 1259
cvt_u32_f32
0 ra rb 0 0 1419
cvt_u32_f64
0 ra rb 0 0 1579
cvt_u64_f128
0 ra rb 0 0 1743
cvt_u64_f16
0 ra rb 0 0 1263
cvt_u64_f32
0 ra rb 0 0 1423
cvt_u64_f64
0 ra rb 0 0 1583
cvt_vi16_vf16
0 ra rb 0 0 1349
cvt_vi32_vf32
0 ra rb 0 0 1509
cvt_vi64_vf64
0 ra rb 0 0 1669
cvt_vu16_vf16
0 ra rb 0 0 1350
cvt_vu32_vf32
0 ra rb 0 0 1510
cvt_vu64_vf64
0 ra rb 0 0 1670
dcbf
2 17 rb simm21
dcbi
2 18 rb simm21
dcbt
2 16 rb simm21
dep
23 ra rb rc sd se
dep_r
1 ra rb rc rd 43
div_f128
0 ra rb rc 0 1704
div_f16
0 ra rb rc 0 1224
div_f32
0 ra rb rc 0 1384
div_f64
0 ra rb rc 0 1544
div_i128
0 ra rb rc 0 308
div_i32
0 ra rb rc 0 148
div_i64
0 ra rb rc 0 228
div_imm_i32
40 ra rb simm21
div_imm_i64
36 ra rb simm21
div_imm_u32
41 ra rb uimm21
div_imm_u64
37 ra rb uimm21
div_u128
0 ra rb rc 0 309
div_u32
0 ra rb rc 0 149
div_u64
0 ra rb rc 0 229
div_vf16
0 ra rb rc 0 1308
div_vf32
0 ra rb rc 0 1468
div_vf64
0 ra rb rc 0 1628
dot_vf16
0 ra rb rc 0 1341
dot_vf32
0 ra rb rc 0 1501
dot_vf64
0 ra rb rc 0 1661
eh_adj
2 8 simm28x16
eh_catch
2 10 rb 0 simm17x16
eh_next
2 11 rb 0 simm17x16
eh_throw
2 9 rb simm21
extend_f16_f128
0 ra rb 0 0 1754
extend_f16_f32
0 ra rb 0 0 1272
extend_f16_f64
0 ra rb 0 0 1273
extend_f32_f128
0 ra rb 0 0 1752
extend_f32_f64
0 ra rb 0 0 1432
extend_f64_f128
0 ra rb 0 0 1753
fence_a
0 0 564
fence_ar
0 0 566
fence_r
0 0 565
fence_sc
0 0 567
floor_f128
0 ra rb 0 0 1718
floor_f16
0 ra rb 0 0 1238
floor_f32
0 ra rb 0 0 1398
floor_f64
0 ra rb 0 0 1558
floor_vf16
0 ra rb 0 0 1317
floor_vf32
0 ra rb 0 0 1477
floor_vf64
0 ra rb 0 0 1637
get_dbr
0 ra rb simm10 520
get_ibr
0 ra rb simm10 522
get_mr
0 ra rb simm10 524
get_spr
0 ra 0 spr 0 517
gtb
0 ra rb 0 0 22
halt
0 0 528
icbi
2 19 rb simm21
int
0 0 rb simm10 514
jmp
2 1 simm28x16
jmp_mi
0 0 rb rc scale 563
jmp_r
0 0 rb rc scale 12
jmp_t
0 0 rb rc 0 13
jmp_t_i32
0 0 rb rc 0 14
jmp_t_u32
0 0 rb rc 0 15
ld_i128
84 ra rb simm21
ld_i16
76 ra rb simm21
ld_i32
79 ra rb simm21
ld_i64
82 ra rb simm21
ld_i8
73 ra rb simm21
ld_imm
16 ra simm28
ld_imm_f32
16 ra simm28
ld_imm_f64
16 ra simm28
ld_imm_high
17 ra simm28
ld_iprel_f128
100 ra uimm28
ld_iprel_f32
94 ra uimm28
ld_iprel_f64
97 ra uimm28
ld_iprel_i128
100 ra uimm28
ld_iprel_i16
92 ra uimm28
ld_iprel_i32
95 ra uimm28
ld_iprel_i64
98 ra uimm28
ld_iprel_i8
89 ra uimm28
ld_iprel_u16
91 ra uimm28
ld_iprel_u32
94 ra uimm28
ld_iprel_u64
97 ra uimm28
ld_iprel_u8
88 ra uimm28
ld_mia_i128
0 ra rb simm10 108
ld_mia_i16
0 ra rb simm10 100
ld_mia_i32
0 ra rb simm10 103
ld_mia_i64
0 ra rb simm10 106
ld_mia_i8
0 ra rb simm10 97
ld_mia_u16
0 ra rb simm10 99
ld_mia_u32
0 ra rb simm10 102
ld_mia_u64
0 ra rb simm10 105
ld_mia_u8
0 ra rb simm10 96
ld_mib_i128
0 ra rb simm10 124
ld_mib_i16
0 ra rb simm10 116
ld_mib_i32
0 ra rb simm10 119
ld_mib_i64
0 ra rb simm10 122
ld_mib_i8
0 ra rb simm10 113
ld_mib_u16
0 ra rb simm10 115
ld_mib_u32
0 ra rb simm10 118
ld_mib_u64
0 ra rb simm10 121
ld_mib_u8
0 ra rb simm10 112
ld_u16
75 ra rb simm21
ld_u32
78 ra rb simm21
ld_u64
81 ra rb simm21
ld_u8
72 ra rb simm21
ld_xi32_i128
9 ra rb rc scale 12 simm7
ld_xi32_i16
9 ra rb rc scale 4 simm7
ld_xi32_i32
9 ra rb rc scale 7 simm7
ld_xi32_i64
9 ra rb rc scale 10 simm7
ld_xi32_i8
9 ra rb rc scale 1 simm7
ld_xi32_u16
9 ra rb rc scale 3 simm7
ld_xi32_u32
9 ra rb rc scale 6 simm7
ld_xi32_u64
9 ra rb rc scale 9 simm7
ld_xi32_u8
9 ra rb rc scale 0 simm7
ld_xi64_i128
8 ra rb rc scale 12 simm7
ld_xi64_i16
8 ra rb rc scale 4 simm7
ld_xi64_i32
8 ra rb rc scale 7 simm7
ld_xi64_i64
8 ra rb rc scale 10 simm7
ld_xi64_i8
8 ra rb rc scale 1 simm7
ld_xi64_u16
8 ra rb rc scale 3 simm7
ld_xi64_u32
8 ra rb rc scale 6 simm7
ld_xi64_u64
8 ra rb rc scale 9 simm7
ld_xi64_u8
8 ra rb rc scale 0 simm7
ld_xu32_i128
10 ra rb rc scale 12 simm7
ld_xu32_i16
10 ra rb rc scale 4 simm7
ld_xu32_i32
10 ra rb rc scale 7 simm7
ld_xu32_i64
10 ra rb rc scale 10 simm7
ld_xu32_i8
10 ra rb rc scale 1 simm7
ld_xu32_u16
10 ra rb rc scale 3 simm7
ld_xu32_u32
10 ra rb rc scale 6 simm7
ld_xu32_u64
10 ra rb rc scale 9 simm7
ld_xu32_u8
10 ra rb rc scale 0 simm7
lda_iprel
102 ra uimm28
lda_n
1 ra rb simm14 102
lda_nrc
1 ra rb simm14 103
lda_r
105 ra simm28x16
lda_xi32
9 ra rb rc scale 14 simm7
lda_xi64
8 ra rb rc scale 14 simm7
lda_xu32
10 ra rb rc scale 14 simm7
madd_alt_vf16
11 ra rb rc rd 96
madd_alt_vf32
11 ra rb rc rd 72
madd_alt_vf64
11 ra rb rc rd 48
madd_f128
11 ra rb rc rd 20
madd_f16
11 ra rb rc rd 84
madd_f32
11 ra rb rc rd 60
madd_f64
11 ra rb rc rd 36
madd_vf16
11 ra rb rc rd 92
madd_vf32
11 ra rb rc rd 68
madd_vf64
11 ra rb rc rd 44
max_f128
0 ra rb rc 0 1711
max_f16
0 ra rb rc 0 1231
max_f32
0 ra rb rc 0 1391
max_f64
0 ra rb rc 0 1551
max_i128
0 ra rb rc 0 312
max_i32
0 ra rb rc 0 152
max_i64
0 ra rb rc 0 232
max_imm_i32
48 ra rb simm21
max_imm_i64
44 ra rb simm21
max_imm_u32
49 ra rb uimm21
max_imm_u64
45 ra rb uimm21
max_u128
0 ra rb rc 0 313
max_u32
0 ra rb rc 0 153
max_u64
0 ra rb rc 0 233
max_vf16
0 ra rb rc 0 1310
max_vf32
0 ra rb rc 0 1470
max_vf64
0 ra rb rc 0 1630
max_vi16
0 ra rb rc 0 1856
max_vi32
0 ra rb rc 0 1920
max_vi64
0 ra rb rc 0 1984
max_vi8
0 ra rb rc 0 1792
max_vu16
0 ra rb rc 0 1857
max_vu32
0 ra rb rc 0 1921
max_vu64
0 ra rb rc 0 1985
max_vu8
0 ra rb rc 0 1793
maxnum_f128
0 ra rb rc 0 1713
maxnum_f16
0 ra rb rc 0 1233
maxnum_f32
0 ra rb rc 0 1393
maxnum_f64
0 ra rb rc 0 1553
maxnum_vf16
0 ra rb rc 0 1312
maxnum_vf32
0 ra rb rc 0 1472
maxnum_vf64
0 ra rb rc 0 1632
mbgath
0 ra rb rc 0 20
mbscat
0 ra rb rc 0 21
mbsel
1 ra rb rc rd 28
merge_f128
11 ra rb rc rd 24
merge_f16
11 ra rb rc rd 88
merge_f32
11 ra rb rc rd 64
merge_f64
11 ra rb rc rd 40
merge_high_vf16
0 ra rb rc 0 1343
merge_high_vf32
0 ra rb rc 0 1503
merge_high_vf64
0 ra rb rc 0 1663
merge_high_vu16
0 ra rb rc 0 1883
merge_high_vu32
0 ra rb rc 0 1947
merge_high_vu64
0 ra rb rc 0 2011
merge_high_vu8
0 ra rb rc 0 1819
merge_low_vf16
0 ra rb rc 0 1342
merge_low_vf32
0 ra rb rc 0 1502
merge_low_vf64
0 ra rb rc 0 1662
merge_low_vu16
0 ra rb rc 0 1884
merge_low_vu32
0 ra rb rc 0 1948
merge_low_vu64
0 ra rb rc 0 2012
merge_low_vu8
0 ra rb rc 0 1820
merge_vf16
11 ra rb rc rd 98
merge_vf32
11 ra rb rc rd 74
merge_vf64
11 ra rb rc rd 50
min_f128
0 ra rb rc 0 1710
min_f16
0 ra rb rc 0 1230
min_f32
0 ra rb rc 0 1390
min_f64
0 ra rb rc 0 1550
min_i128
0 ra rb rc 0 314
min_i32
0 ra rb rc 0 154
min_i64
0 ra rb rc 0 234
min_imm_i32
50 ra rb simm21
min_imm_i64
46 ra rb simm21
min_imm_u32
51 ra rb uimm21
min_imm_u64
47 ra rb uimm21
min_u128
0 ra rb rc 0 315
min_u32
0 ra rb rc 0 155
min_u64
0 ra rb rc 0 235
min_vf16
0 ra rb rc 0 1309
min_vf32
0 ra rb rc 0 1469
min_vf64
0 ra rb rc 0 1629
min_vi16
0 ra rb rc 0 1858
min_vi32
0 ra rb rc 0 1922
min_vi64
0 ra rb rc 0 1986
min_vi8
0 ra rb rc 0 1794
min_vu16
0 ra rb rc 0 1859
min_vu32
0 ra rb rc 0 1923
min_vu64
0 ra rb rc 0 1987
min_vu8
0 ra rb rc 0 1795
minnum_f128
0 ra rb rc 0 1712
minnum_f16
0 ra rb rc 0 1232
minnum_f32
0 ra rb rc 0 1392
minnum_f64
0 ra rb rc 0 1552
minnum_vf16
0 ra rb rc 0 1311
minnum_vf32
0 ra rb rc 0 1471
minnum_vf64
0 ra rb rc 0 1631
mov
0 ra rb 0 0 1
mov2
1 ra rb rc rd 30
mprobe
0 ra rb rc 0 512
msub_alt_vf16
11 ra rb rc rd 97
msub_alt_vf32
11 ra rb rc rd 73
msub_alt_vf64
11 ra rb rc rd 49
msub_f128
11 ra rb rc rd 21
msub_f16
11 ra rb rc rd 85
msub_f32
11 ra rb rc rd 61
msub_f64
11 ra rb rc rd 37
msub_vf16
11 ra rb rc rd 93
msub_vf32
11 ra rb rc rd 69
msub_vf64
11 ra rb rc rd 45
mul_add
11 ra rb rc rd 12
mul_f128
0 ra rb rc 0 1702
mul_f16
0 ra rb rc 0 1222
mul_f32
0 ra rb rc 0 1382
mul_f64
0 ra rb rc 0 1542
mul_h
0 ra rb rc 0 247
mul_horiz_vf16
0 ra rb rc 0 1340
mul_horiz_vf32
0 ra rb rc 0 1500
mul_horiz_vf64
0 ra rb rc 0 1660
mul_i128
0 ra rb rc 0 290
mul_i32
0 ra rb rc 0 130
mul_i64
0 ra rb rc 0 210
mul_imm_i32
28 ra rb simm21
mul_imm_i64
32 ra rb simm21
mul_imm_u32
29 ra rb uimm21
mul_sub
11 ra rb rc rd 13
mul_subr
11 ra rb rc rd 14
mul_u32
0 ra rb rc 0 160
mul_vf16
0 ra rb rc 0 1306
mul_vf32
0 ra rb rc 0 1466
mul_vf64
0 ra rb rc 0 1626
nabs_diff_f128
0 ra rb rc 0 1709
nabs_diff_f16
0 ra rb rc 0 1229
nabs_diff_f32
0 ra rb rc 0 1389
nabs_diff_f64
0 ra rb rc 0 1549
nabs_diff_vf16
0 ra rb rc 0 1300
nabs_diff_vf32
0 ra rb rc 0 1460
nabs_diff_vf64
0 ra rb rc 0 1620
nabs_f128
0 ra rb 0 0 1707
nabs_f16
0 ra rb 0 0 1227
nabs_f32
0 ra rb 0 0 1387
nabs_f64
0 ra rb 0 0 1547
nabs_vf16
0 ra rb 0 0 1298
nabs_vf32
0 ra rb 0 0 1458
nabs_vf64
0 ra rb 0 0 1618
nadd_f128
0 ra rb rc 0 1701
nadd_f16
0 ra rb rc 0 1221
nadd_f32
0 ra rb rc 0 1381
nadd_f64
0 ra rb rc 0 1541
nadd_vf16
0 ra rb rc 0 1305
nadd_vf32
0 ra rb rc 0 1465
nadd_vf64
0 ra rb rc 0 1625
nand
0 ra rb rc 0 8
nearbyint_f128
0 ra rb 0 0 1721
nearbyint_f16
0 ra rb 0 0 1241
nearbyint_f32
0 ra rb 0 0 1401
nearbyint_f64
0 ra rb 0 0 1561
nearbyint_vf16
0 ra rb 0 0 1320
nearbyint_vf32
0 ra rb 0 0 1480
nearbyint_vf64
0 ra rb 0 0 1640
neg_f128
0 ra rb 0 0 1705
neg_f16
0 ra rb 0 0 1225
neg_f32
0 ra rb 0 0 1385
neg_f64
0 ra rb 0 0 1545
neg_i128
0 ra rb 0 0 291
neg_i32
0 ra rb 0 0 131
neg_i64
0 ra rb 0 0 211
neg_vf16
0 ra rb 0 0 1296
neg_vf32
0 ra rb 0 0 1456
neg_vf64
0 ra rb 0 0 1616
nmadd_f128
11 ra rb rc rd 22
nmadd_f16
11 ra rb rc rd 86
nmadd_f32
11 ra rb rc rd 62
nmadd_f64
11 ra rb rc rd 38
nmadd_vf16
11 ra rb rc rd 94
nmadd_vf32
11 ra rb rc rd 70
nmadd_vf64
11 ra rb rc rd 46
nmsub_f128
11 ra rb rc rd 23
nmsub_f16
11 ra rb rc rd 87
nmsub_f32
11 ra rb rc rd 63
nmsub_f64
11 ra rb rc rd 39
nmsub_vf16
11 ra rb rc rd 95
nmsub_vf32
11 ra rb rc rd 71
nmsub_vf64
11 ra rb rc rd 47
nmul_f128
0 ra rb rc 0 1703
nmul_f16
0 ra rb rc 0 1223
nmul_f32
0 ra rb rc 0 1383
nmul_f64
0 ra rb rc 0 1543
nmul_vf16
0 ra rb rc 0 1307
nmul_vf32
0 ra rb rc 0 1467
nmul_vf64
0 ra rb rc 0 1627
nop
2 0 simm28
nor
0 ra rb rc 0 9
not
0 ra rb 0 0 3
nul_bc
1 ra rb 0 dn dy 0
nul_bc_imm
1 ra sb 1 dn dy 0
nul_bs
1 ra rb 2 dn dy 0
nul_bs_imm
1 ra sb 3 dn dy 0
nul_eq_i128
1 ra rb 0 dn dy 3
nul_eq_i32
1 ra rb 0 dn dy 1
nul_eq_i64
1 ra rb 0 dn dy 2
nul_eq_imm_i128
1 ra simm11 dn dy 118
nul_eq_imm_i32
1 ra simm11 dn dy 106
nul_eq_imm_i64
1 ra simm11 dn dy 112
nul_ge_i128
1 ra rb 3 dn dy 3
nul_ge_i32
1 ra rb 3 dn dy 1
nul_ge_i64
1 ra rb 3 dn dy 2
nul_ge_imm_i128
1 ra simm11 dn dy 121
nul_ge_imm_i32
1 ra simm11 dn dy 109
nul_ge_imm_i64
1 ra simm11 dn dy 115
nul_ge_imm_u128
1 ra uimm11 dn dy 123
nul_ge_imm_u32
1 ra uimm11 dn dy 111
nul_ge_imm_u64
1 ra uimm11 dn dy 117
nul_ge_u128
1 ra rb 5 dn dy 3
nul_ge_u32
1 ra rb 5 dn dy 1
nul_ge_u64
1 ra rb 5 dn dy 2
nul_lt_i128
1 ra rb 2 dn dy 3
nul_lt_i32
1 ra rb 2 dn dy 1
nul_lt_i64
1 ra rb 2 dn dy 2
nul_lt_imm_i128
1 ra simm11 dn dy 120
nul_lt_imm_i32
1 ra simm11 dn dy 108
nul_lt_imm_i64
1 ra simm11 dn dy 114
nul_lt_imm_u128
1 ra uimm11 dn dy 122
nul_lt_imm_u32
1 ra uimm11 dn dy 110
nul_lt_imm_u64
1 ra uimm11 dn dy 116
nul_lt_u128
1 ra rb 4 dn dy 3
nul_lt_u32
1 ra rb 4 dn dy 1
nul_lt_u64
1 ra rb 4 dn dy 2
nul_mask_all
1 ra uimm11 dn dy 124
nul_mask_any
1 ra uimm11 dn dy 127
nul_mask_none
1 ra uimm11 dn dy 126
nul_mask_notall
1 ra uimm11 dn dy 125
nul_ne_i128
1 ra rb 1 dn dy 3
nul_ne_i32
1 ra rb 1 dn dy 1
nul_ne_i64
1 ra rb 1 dn dy 2
nul_ne_imm_i128
1 ra simm11 dn dy 119
nul_ne_imm_i32
1 ra simm11 dn dy 107
nul_ne_imm_i64
1 ra simm11 dn dy 113
nul_o_f128
1 ra rb 10 dn dy 3
nul_o_f32
1 ra rb 10 dn dy 1
nul_o_f64
1 ra rb 10 dn dy 2
nul_oeq_f128
1 ra rb 6 dn dy 3
nul_oeq_f32
1 ra rb 6 dn dy 1
nul_oeq_f64
1 ra rb 6 dn dy 2
nul_oge_f128
1 ra rb 9 dn dy 3
nul_oge_f32
1 ra rb 9 dn dy 1
nul_oge_f64
1 ra rb 9 dn dy 2
nul_olt_f128
1 ra rb 8 dn dy 3
nul_olt_f32
1 ra rb 8 dn dy 1
nul_olt_f64
1 ra rb 8 dn dy 2
nul_one_f128
1 ra rb 7 dn dy 3
nul_one_f32
1 ra rb 7 dn dy 1
nul_one_f64
1 ra rb 7 dn dy 2
nul_u_f128
1 ra rb 15 dn dy 3
nul_u_f32
1 ra rb 15 dn dy 1
nul_u_f64
1 ra rb 15 dn dy 2
nul_ueq_f128
1 ra rb 11 dn dy 3
nul_ueq_f32
1 ra rb 11 dn dy 1
nul_ueq_f64
1 ra rb 11 dn dy 2
nul_uge_f128
1 ra rb 14 dn dy 3
nul_uge_f32
1 ra rb 14 dn dy 1
nul_uge_f64
1 ra rb 14 dn dy 2
nul_ult_f128
1 ra rb 13 dn dy 3
nul_ult_f32
1 ra rb 13 dn dy 1
nul_ult_f64
1 ra rb 13 dn dy 2
nul_une_f128
1 ra rb 12 dn dy 3
nul_une_f32
1 ra rb 12 dn dy 1
nul_une_f64
1 ra rb 12 dn dy 2
or
0 ra rb rc 0 5
or_imm
21 ra rb simm21
orn
0 ra rb rc 0 11
orn_imm
19 ra rb simm21
pack_mod_vu16
0 ra rb rc 0 1894
pack_mod_vu32
0 ra rb rc 0 1958
pack_mod_vu64
0 ra rb rc 0 2022
pack_sat_vi16
0 ra rb rc 0 1892
pack_sat_vi32
0 ra rb rc 0 1956
pack_sat_vi64
0 ra rb rc 0 2020
pack_sat_vu16
0 ra rb rc 0 1893
pack_sat_vu32
0 ra rb rc 0 1957
pack_sat_vu64
0 ra rb rc 0 2021
pack_usat_vi16
0 ra rb rc 0 1895
pack_usat_vi32
0 ra rb rc 0 1959
pack_usat_vi64
0 ra rb rc 0 2023
pack_vf16
0 ra rb rc 0 1346
pack_vf32
0 ra rb rc 0 1506
pack_vf64
0 ra rb rc 0 1666
perm
1 ra rb rc rd 29
permb
0 ra rb sc 0 27
ptc
0 ra rb rc 0 530
random
0 ra rb 0 0 516
rem_i128
0 ra rb rc 0 310
rem_i32
0 ra rb rc 0 150
rem_i64
0 ra rb rc 0 230
rem_imm_i32
42 ra rb simm21
rem_imm_i64
38 ra rb simm21
rem_imm_u32
43 ra rb uimm21
rem_imm_u64
39 ra rb uimm21
rem_u128
0 ra rb rc 0 311
rem_u32
0 ra rb rc 0 151
rem_u64
0 ra rb rc 0 231
rep_ge_i32
4 ra rb 11 uimm6 simm11x16
rep_ge_i64
4 ra rb 3 uimm6 simm11x16
rep_ge_u32
4 ra rb 15 uimm6 simm11x16
rep_ge_u64
4 ra rb 7 uimm6 simm11x16
rep_gt_i32
4 ra rb 9 uimm6 simm11x16
rep_gt_i64
4 ra rb 1 uimm6 simm11x16
rep_gt_u32
4 ra rb 13 uimm6 simm11x16
rep_gt_u64
4 ra rb 5 uimm6 simm11x16
rep_le_i32
4 ra rb 10 uimm6 simm11x16
rep_le_i64
4 ra rb 2 uimm6 simm11x16
rep_le_u32
4 ra rb 14 uimm6 simm11x16
rep_le_u64
4 ra rb 6 uimm6 simm11x16
rep_lt_i32
4 ra rb 8 uimm6 simm11x16
rep_lt_i64
4 ra rb 0 uimm6 simm11x16
rep_lt_u32
4 ra rb 12 uimm6 simm11x16
rep_lt_u64
4 ra rb 4 uimm6 simm11x16
ret
0 0 2
retf
2 2 0 uimm21
rfi
0 0 527
rint_f128
0 ra rb 0 0 1722
rint_f16
0 ra rb 0 0 1242
rint_f32
0 ra rb 0 0 1402
rint_f64
0 ra rb 0 0 1562
rint_vf16
0 ra rb 0 0 1321
rint_vf32
0 ra rb 0 0 1481
rint_vf64
0 ra rb 0 0 1641
rol_vu16
0 ra rb rc 0 1881
rol_vu32
0 ra rb rc 0 1945
rol_vu64
0 ra rb rc 0 2009
rol_vu8
0 ra rb rc 0 1817
ror_vu16
0 ra rb rc 0 1882
ror_vu32
0 ra rb rc 0 1946
ror_vu64
0 ra rb rc 0 2010
ror_vu8
0 ra rb rc 0 1818
round_f128
0 ra rb 0 0 1716
round_f16
0 ra rb 0 0 1236
round_f32
0 ra rb 0 0 1396
round_f64
0 ra rb 0 0 1556
round_vf16
0 ra rb 0 0 1315
round_vf32
0 ra rb 0 0 1475
round_vf64
0 ra rb 0 0 1635
roundeven_f128
0 ra rb 0 0 1720
roundeven_f16
0 ra rb 0 0 1240
roundeven_f32
0 ra rb 0 0 1400
roundeven_f64
0 ra rb 0 0 1560
roundeven_vf16
0 ra rb 0 0 1319
roundeven_vf32
0 ra rb 0 0 1479
roundeven_vf64
0 ra rb 0 0 1639
rscover
0 0 536
rsflush
0 0 537
rsload
0 0 538
rsqrt_f128
0 ra rb 0 0 1698
rsqrt_f16
0 ra rb 0 0 1218
rsqrt_f32
0 ra rb 0 0 1378
rsqrt_f64
0 ra rb 0 0 1538
rsqrt_vf16
0 ra rb 0 0 1301
rsqrt_vf32
0 ra rb 0 0 1461
rsqrt_vf64
0 ra rb 0 0 1621
scale_f128
0 ra rb sc 0 1758
set_dbr
0 ra rb simm10 519
set_dtr
0 ra rb rc 0 526
set_ibr
0 ra rb simm10 521
set_itr
0 ra rb rc 0 525
set_mr
0 ra rb simm10 523
set_spr
0 ra 0 spr 0 518
sext_i16
0 ra rb 0 0 37
sext_i32
0 ra rb 0 0 38
sext_i64
0 ra rb 0 0 39
sext_i8
0 ra rb 0 0 36
sl_add_i32
1 ra rb rc sd 37
sl_add_i64
1 ra rb rc sd 23
sl_add_u32
1 ra rb rc sd 38
sl_or
1 ra rb rc sd 50
sl_sub_i32
1 ra rb rc sd 41
sl_sub_i64
1 ra rb rc sd 24
sl_sub_u32
1 ra rb rc sd 42
sl_subr_i32
1 ra rb rc sd 39
sl_subr_i64
1 ra rb rc sd 25
sl_subr_u32
1 ra rb rc sd 40
sl_xor
1 ra rb rc sd 51
sll_imm_u128
0 ra rb sc 0 292
sll_imm_u32
0 ra rb sc 0 132
sll_imm_u64
0 ra rb sc 0 212
sll_imm_vu16
0 ra rb sc 0 1876
sll_imm_vu32
0 ra rb sc 0 1940
sll_imm_vu64
0 ra rb sc 0 2004
sll_imm_vu8
0 ra rb sc 0 1812
sll_u128
0 ra rb rc 0 296
sll_u32
0 ra rb rc 0 136
sll_u64
0 ra rb rc 0 216
sll_vu16
0 ra rb rc 0 1875
sll_vu32
0 ra rb rc 0 1939
sll_vu64
0 ra rb rc 0 2003
sll_vu8
0 ra rb rc 0 1811
slp_i128
1 ra rb rc rd 44
slp_i32
1 ra rb rc rd 13
slp_i64
1 ra rb rc rd 16
slsra_i32
1 ra rb rc rd 27
slsra_i64
1 ra rb rc rd 20
slsra_imm_i64
1 ra rb sc sd 22
slsrl_imm_u64
1 ra rb sc sd 21
slsrl_u32
1 ra rb rc rd 26
slsrl_u64
1 ra rb rc rd 19
sqrt_f128
0 ra rb 0 0 1697
sqrt_f16
0 ra rb 0 0 1217
sqrt_f32
0 ra rb 0 0 1377
sqrt_f64
0 ra rb 0 0 1537
sqrt_vf16
0 ra rb 0 0 1302
sqrt_vf32
0 ra rb 0 0 1462
sqrt_vf64
0 ra rb 0 0 1622
sra_i128
0 ra rb rc 0 298
sra_i32
0 ra rb rc 0 138
sra_i64
0 ra rb rc 0 218
sra_imm_i128
0 ra rb sc 0 294
sra_imm_i32
0 ra rb sc 0 134
sra_imm_i64
0 ra rb sc 0 214
sra_imm_vi16
0 ra rb sc 0 1880
sra_imm_vi32
0 ra rb sc 0 1944
sra_imm_vi64
0 ra rb sc 0 2008
sra_imm_vi8
0 ra rb sc 0 1816
sra_vi16
0 ra rb rc 0 1879
sra_vi32
0 ra rb rc 0 1943
sra_vi64
0 ra rb rc 0 2007
sra_vi8
0 ra rb rc 0 1815
srd_i128
0 ra rb rc 0 299
srd_i32
0 ra rb rc 0 139
srd_i64
0 ra rb rc 0 219
srd_imm_i128
0 ra rb sc 0 295
srd_imm_i32
0 ra rb sc 0 135
srd_imm_i64
0 ra rb sc 0 215
srl_imm_u128
0 ra rb sc 0 293
srl_imm_u32
0 ra rb sc 0 133
srl_imm_u64
0 ra rb sc 0 213
srl_imm_vu16
0 ra rb sc 0 1878
srl_imm_vu32
0 ra rb sc 0 1942
srl_imm_vu64
0 ra rb sc 0 2006
srl_imm_vu8
0 ra rb sc 0 1814
srl_u128
0 ra rb rc 0 297
srl_u32
0 ra rb rc 0 137
srl_u64
0 ra rb rc 0 217
srl_vu16
0 ra rb rc 0 1877
srl_vu32
0 ra rb rc 0 1941
srl_vu64
0 ra rb rc 0 2005
srl_vu8
0 ra rb rc 0 1813
srp_i128
1 ra rb rc rd 45
srp_i32
1 ra rb rc rd 14
srp_i64
1 ra rb rc rd 17
srp_imm_i128
1 ra rb rc sd 46
srp_imm_i32
1 ra rb rc sd 15
srp_imm_i64
1 ra rb rc sd 18
st_i128
85 ra rb simm21
st_i16
77 ra rb simm21
st_i32
80 ra rb simm21
st_i64
83 ra rb simm21
st_i8
74 ra rb simm21
st_iprel_i128
101 ra uimm28
st_iprel_i16
93 ra uimm28
st_iprel_i32
96 ra uimm28
st_iprel_i64
99 ra uimm28
st_iprel_i8
90 ra uimm28
st_mia_i128
0 ra rb simm10 109
st_mia_i16
0 ra rb simm10 101
st_mia_i32
0 ra rb simm10 104
st_mia_i64
0 ra rb simm10 107
st_mia_i8
0 ra rb simm10 98
st_mib_i128
0 ra rb simm10 125
st_mib_i16
0 ra rb simm10 117
st_mib_i32
0 ra rb simm10 120
st_mib_i64
0 ra rb simm10 123
st_mib_i8
0 ra rb simm10 114
st_xi32_i128
9 ra rb rc scale 13 simm7
st_xi32_i16
9 ra rb rc scale 5 simm7
st_xi32_i32
9 ra rb rc scale 8 simm7
st_xi32_i64
9 ra rb rc scale 11 simm7
st_xi32_i8
9 ra rb rc scale 2 simm7
st_xi64_i128
8 ra rb rc scale 13 simm7
st_xi64_i16
8 ra rb rc scale 5 simm7
st_xi64_i32
8 ra rb rc scale 8 simm7
st_xi64_i64
8 ra rb rc scale 11 simm7
st_xi64_i8
8 ra rb rc scale 2 simm7
st_xu32_i128
10 ra rb rc scale 13 simm7
st_xu32_i16
10 ra rb rc scale 5 simm7
st_xu32_i32
10 ra rb rc scale 8 simm7
st_xu32_i64
10 ra rb rc scale 11 simm7
st_xu32_i8
10 ra rb rc scale 2 simm7
sub_alt_vf16
0 ra rb rc 0 1337
sub_alt_vf32
0 ra rb rc 0 1497
sub_alt_vf64
0 ra rb rc 0 1657
sub_f128
0 ra rb rc 0 1700
sub_f16
0 ra rb rc 0 1220
sub_f32
0 ra rb rc 0 1380
sub_f64
0 ra rb rc 0 1540
sub_horiz_vf16
0 ra rb rc 0 1339
sub_horiz_vf32
0 ra rb rc 0 1499
sub_horiz_vf64
0 ra rb rc 0 1659
sub_i128
0 ra rb rc 0 289
sub_i32
0 ra rb rc 0 129
sub_i64
0 ra rb rc 0 209
sub_sat_vi16
0 ra rb rc 0 1868
sub_sat_vi32
0 ra rb rc 0 1932
sub_sat_vi64
0 ra rb rc 0 1996
sub_sat_vi8
0 ra rb rc 0 1804
sub_sat_vu16
0 ra rb rc 0 1869
sub_sat_vu32
0 ra rb rc 0 1933
sub_sat_vu64
0 ra rb rc 0 1997
sub_sat_vu8
0 ra rb rc 0 1805
sub_sub_i64
1 ra rb rc rd 10
sub_subb_u64
1 ra rb rc rd 12
sub_u32
0 ra rb rc 0 162
sub_vf16
0 ra rb rc 0 1304
sub_vf32
0 ra rb rc 0 1464
sub_vf64
0 ra rb rc 0 1624
sub_vu16
0 ra rb rc 0 1861
sub_vu32
0 ra rb rc 0 1925
sub_vu64
0 ra rb rc 0 1989
sub_vu8
0 ra rb rc 0 1797
subb_u64
0 ra rb rc 0 246
subb_vu16
0 ra rb rc 0 1865
subb_vu32
0 ra rb rc 0 1929
subb_vu64
0 ra rb rc 0 1993
subb_vu8
0 ra rb rc 0 1801
subo_i64
0 ra rb rc 0 244
subo_vi16
0 ra rb rc 0 1863
subo_vi32
0 ra rb rc 0 1927
subo_vi64
0 ra rb rc 0 1991
subo_vi8
0 ra rb rc 0 1799
subr_imm_i32
26 ra rb simm21
subr_imm_i64
31 ra rb simm21
subr_imm_u32
27 ra rb simm21
syscall
0 0 515
sysret
0 0 534
tpa
0 ra rb rc 0 529
trunk_f128
0 ra rb 0 0 1719
trunk_f128_i128
0 ra rb 0 0 1744
trunk_f128_i32
0 ra rb 0 0 1736
trunk_f128_i64
0 ra rb 0 0 1740
trunk_f128_u128
0 ra rb 0 0 1745
trunk_f128_u32
0 ra rb 0 0 1737
trunk_f128_u64
0 ra rb 0 0 1741
trunk_f16
0 ra rb 0 0 1239
trunk_f16_i128
0 ra rb 0 0 1264
trunk_f16_i32
0 ra rb 0 0 1256
trunk_f16_i64
0 ra rb 0 0 1260
trunk_f16_u128
0 ra rb 0 0 1265
trunk_f16_u32
0 ra rb 0 0 1257
trunk_f16_u64
0 ra rb 0 0 1261
trunk_f32
0 ra rb 0 0 1399
trunk_f32_i128
0 ra rb 0 0 1424
trunk_f32_i32
0 ra rb 0 0 1416
trunk_f32_i64
0 ra rb 0 0 1420
trunk_f32_u128
0 ra rb 0 0 1425
trunk_f32_u32
0 ra rb 0 0 1417
trunk_f32_u64
0 ra rb 0 0 1421
trunk_f64
0 ra rb 0 0 1559
trunk_f64_i128
0 ra rb 0 0 1584
trunk_f64_i32
0 ra rb 0 0 1576
trunk_f64_i64
0 ra rb 0 0 1580
trunk_f64_u128
0 ra rb 0 0 1585
trunk_f64_u32
0 ra rb 0 0 1577
trunk_f64_u64
0 ra rb 0 0 1581
trunk_vf16
0 ra rb 0 0 1318
trunk_vf16_vi16
0 ra rb 0 0 1347
trunk_vf16_vu16
0 ra rb 0 0 1348
trunk_vf32
0 ra rb 0 0 1478
trunk_vf32_vi32
0 ra rb 0 0 1507
trunk_vf32_vu32
0 ra rb 0 0 1508
trunk_vf64
0 ra rb 0 0 1638
trunk_vf64_vi64
0 ra rb 0 0 1667
trunk_vf64_vu64
0 ra rb 0 0 1668
undef
0 0 0
unpack_high_vf16
0 ra rb 0 0 1344
unpack_high_vf32
0 ra rb 0 0 1504
unpack_high_vf64
0 ra rb 0 0 1664
unpack_high_vi16
0 ra rb 0 0 1889
unpack_high_vi32
0 ra rb 0 0 1953
unpack_high_vi8
0 ra rb 0 0 1825
unpack_high_vu16
0 ra rb 0 0 1891
unpack_high_vu32
0 ra rb 0 0 1955
unpack_high_vu8
0 ra rb 0 0 1827
unpack_low_vf16
0 ra rb 0 0 1345
unpack_low_vf32
0 ra rb 0 0 1505
unpack_low_vf64
0 ra rb 0 0 1665
unpack_low_vi16
0 ra rb 0 0 1888
unpack_low_vi32
0 ra rb 0 0 1952
unpack_low_vi8
0 ra rb 0 0 1824
unpack_low_vu16
0 ra rb 0 0 1890
unpack_low_vu32
0 ra rb 0 0 1954
unpack_low_vu8
0 ra rb 0 0 1826
write
2 127 uimm28
xnor
0 ra rb rc 0 10
xor
0 ra rb rc 0 6
xor_dec
0 ra rb rc 0 240
xor_imm
22 ra rb simm21
zext_i16
0 ra rb 0 0 41
zext_i32
0 ra rb 0 0 42
zext_i64
0 ra rb 0 0 43
zext_i8
0 ra rb 0 0 40

register files

128-bit general purpose registers (128 from 128)
r0 r1 r2 r3 r4 r5 r6 r7
r8 r9 r10 r11 r12 r13 r14 r15
r16 r17 r18 r19 r20 r21 r22 r23
r24 r25 r26 r27 r28 r29 r30 r31
r32 r33 r34 r35 r36 r37 r38 r39
r40 r41 r42 r43 r44 r45 r46 r47
r48 r49 r50 r51 r52 r53 r54 r55
r56 r57 r58 r59 r60 r61 r62 r63
r64 r65 r66 r67 r68 r69 r70 r71
r72 r73 r74 r75 r76 r77 r78 r79
r80 r81 r82 r83 r84 r85 r86 r87
r88 r89 r90 r91 r92 r93 r94 r95
r96 r97 r98 r99 r100 r101 r102 r103
r104 r105 r106 r107 r108 r109 r110 r111
r112 r113 r114 r115 r116 r117 r118 r119
g0 g1 g2 g3 tp fp sp gz

64-bit special purpose registers (39 from 128)
ip (0) eip (1) fpcr (2) eca (3) 4 5 6 7
rsc (8) rsp (9) bsp (10) 11 12 13 14 15
psr (16) reip (17) kip (18) ksp (19) krsp (20) peb (21) teb (22) itc (23)
itm (24) pta (25) iva (26) 27 28 29 30 31
32 33 34 35 36 37 38 39
iip (40) iipa (41) ipsr (42) cause (43) ifa (44) iib (45) 46 47
48 49 50 51 52 53 54 55
56 57 58 59 60 61 62 63
irr0 (64) irr1 (65) irr2 (66) irr3 (67) 68 69 70 71
isr0 (72) isr1 (73) isr2 (74) isr3 (75) 76 77 78 79
iv (80) lid (81) tpr (82) itcv (83) tsv (84) pmv (85) cmcv (86) 87
88 89 90 91 92 93 94 95
96 97 98 99 100 101 102 103
104 105 106 107 108 109 110 111
112 113 114 115 116 117 118 119
120 121 122 123 124 125 126 127

machine instruction opcodes

primary opcodes (118 from 128)
misc 0 fused 1 raopx 2 br_misc 3
loop 4 br_32 5 br_64 6 br_128 7
mem_xi64 8 mem_xi32 9 mem_xu32 10 fma 11
reserved 12 reserved 13 reserved 14 reserved 15
ld_imm 16 ld_imm_high 17 andn_imm 18 orn_imm 19
and_imm 20 or_imm 21 xor_imm 22 dep 23
add_imm_i32 24 add_imm_u32 25 subr_imm_i32 26 subr_imm_u32 27
mul_imm_i32 28 mul_imm_u32 29 add_imm_i64 30 subr_imm_i64 31
mul_imm_i64 32 reserved 33 reserved 34 reserved 35
div_imm_i64 36 div_imm_u64 37 rem_imm_i64 38 rem_imm_u64 39
div_imm_i32 40 div_imm_u32 41 rem_imm_i32 42 rem_imm_u32 43
max_imm_i64 44 max_imm_u64 45 min_imm_i64 46 min_imm_u64 47
max_imm_i32 48 max_imm_u32 49 min_imm_i32 50 min_imm_u32 51
cmp_eq_imm_i64 52 cmp_ne_imm_i64 53 cmp_lt_imm_i64 54 cmp_ge_imm_i64 55
cmp_lt_imm_u64 56 cmp_ge_imm_u64 57 cmp_eq_imm_i32 58 cmp_ne_imm_i32 59
cmp_lt_imm_i32 60 cmp_ge_imm_i32 61 cmp_lt_imm_u32 62 cmp_ge_imm_u32 63
cmp_eq_imm_i128 64 cmp_ne_imm_i128 65 cmp_lt_imm_i128 66 cmp_ge_imm_i128 67
cmp_lt_imm_u128 68 cmp_ge_imm_u128 69 add_imm_i128 70 reserved 71
ld_u8 72 ld_i8 73 st_i8 74 ld_u16 75
ld_i16 76 st_i16 77 ld_u32 78 ld_i32 79
st_i32 80 ld_u64 81 ld_i64 82 st_i64 83
ld_i128 84 st_i128 85 reserved 86 reserved 87
ld_iprel_u8 88 ld_iprel_i8 89 st_iprel_i8 90 ld_iprel_u16 91
ld_iprel_i16 92 st_iprel_i16 93 ld_iprel_u32 94 ld_iprel_i32 95
st_iprel_i32 96 ld_iprel_u64 97 ld_iprel_i64 98 st_iprel_i64 99
ld_iprel_i128 100 st_iprel_i128 101 lda_iprel 102 call_plt 103
call 104 lda_r 105 br_eq_imm_i32 106 br_ne_imm_i32 107
br_lt_imm_i32 108 br_ge_imm_i32 109 br_lt_imm_u32 110 br_ge_imm_u32 111
br_eq_imm_i64 112 br_ne_imm_i64 113 br_lt_imm_i64 114 br_ge_imm_i64 115
br_lt_imm_u64 116 br_ge_imm_u64 117 br_eq_imm_i128 118 br_ne_imm_i128 119
br_lt_imm_i128 120 br_ge_imm_i128 121 br_lt_imm_u128 122 br_ge_imm_u128 123
br_mask_all 124 br_mask_notall 125 br_mask_none 126 br_mask_any 127

The «fused» extended opcodes (67 from 128)
nul_misc 0 nul_32 1 nul_64 2 nul_128 3
reserved 4 reserved 5 reserved 6 reserved 7
add_add_i64 8 add_sub_i64 9 sub_sub_i64 10 add_addc_u64 11
sub_subb_u64 12 slp_i32 13 srp_i32 14 srp_imm_i32 15
slp_i64 16 srp_i64 17 srp_imm_i64 18 slsrl_u64 19
slsra_i64 20 slsrl_imm_u64 21 slsra_imm_i64 22 sl_add_i64 23
sl_sub_i64 24 sl_subr_i64 25 slsrl_u32 26 slsra_i32 27
mbsel 28 perm 29 mov2 30 alignup_u64 31
crc32c 32 reserved 33 reserved 34 reserved 35
reserved 36 sl_add_i32 37 sl_add_u32 38 sl_subr_i32 39
sl_subr_u32 40 sl_sub_i32 41 sl_sub_u32 42 dep_r 43
slp_i128 44 srp_i128 45 srp_imm_i128 46 reserved 47
reserved 48 reserved 49 sl_or 50 sl_xor 51
reserved 52 reserved 53 reserved 54 reserved 55
reserved 56 reserved 57 reserved 58 reserved 59
reserved 60 reserved 61 reserved 62 reserved 63
reserved 64 reserved 65 reserved 66 reserved 67
reserved 68 reserved 69 reserved 70 reserved 71
reserved 72 reserved 73 reserved 74 reserved 75
reserved 76 reserved 77 reserved 78 reserved 79
reserved 80 reserved 81 reserved 82 reserved 83
reserved 84 reserved 85 reserved 86 reserved 87
reserved 88 reserved 89 reserved 90 reserved 91
reserved 92 reserved 93 reserved 94 reserved 95
reserved 96 reserved 97 reserved 98 reserved 99
call_mi 100 call_rvt 101 lda_n 102 lda_nrc 103
reserved 104 reserved 105 nul_eq_imm_i32 106 nul_ne_imm_i32 107
nul_lt_imm_i32 108 nul_ge_imm_i32 109 nul_lt_imm_u32 110 nul_ge_imm_u32 111
nul_eq_imm_i64 112 nul_ne_imm_i64 113 nul_lt_imm_i64 114 nul_ge_imm_i64 115
nul_lt_imm_u64 116 nul_ge_imm_u64 117 nul_eq_imm_i128 118 nul_ne_imm_i128 119
nul_lt_imm_i128 120 nul_ge_imm_i128 121 nul_lt_imm_u128 122 nul_ge_imm_u128 123
nul_mask_all 124 nul_mask_notall 125 nul_mask_none 126 nul_mask_any 127

The «nul_misc» extended opcodes (4 from 16)
nul_bc 0 nul_bc_imm 1 nul_bs 2 nul_bs_imm 3
reserved 4 reserved 5 reserved 6 reserved 7
reserved 8 reserved 9 reserved 10 reserved 11
reserved 12 reserved 13 reserved 14 reserved 15

The «nul_32» extended opcodes (16 from 16)
nul_eq_i32 0 nul_ne_i32 1 nul_lt_i32 2 nul_ge_i32 3
nul_lt_u32 4 nul_ge_u32 5 nul_oeq_f32 6 nul_one_f32 7
nul_olt_f32 8 nul_oge_f32 9 nul_o_f32 10 nul_ueq_f32 11
nul_une_f32 12 nul_ult_f32 13 nul_uge_f32 14 nul_u_f32 15

The «nul_64» extended opcodes (16 from 16)
nul_eq_i64 0 nul_ne_i64 1 nul_lt_i64 2 nul_ge_i64 3
nul_lt_u64 4 nul_ge_u64 5 nul_oeq_f64 6 nul_one_f64 7
nul_olt_f64 8 nul_oge_f64 9 nul_o_f64 10 nul_ueq_f64 11
nul_une_f64 12 nul_ult_f64 13 nul_uge_f64 14 nul_u_f64 15

The «nul_128» extended opcodes (16 from 16)
nul_eq_i128 0 nul_ne_i128 1 nul_lt_i128 2 nul_ge_i128 3
nul_lt_u128 4 nul_ge_u128 5 nul_oeq_f128 6 nul_one_f128 7
nul_olt_f128 8 nul_oge_f128 9 nul_o_f128 10 nul_ueq_f128 11
nul_une_f128 12 nul_ult_f128 13 nul_uge_f128 14 nul_u_f128 15

The «br_misc» extended opcodes (4 from 16)
br_bc 0 br_bc_imm 1 br_bs 2 br_bs_imm 3
reserved 4 reserved 5 reserved 6 reserved 7
reserved 8 reserved 9 reserved 10 reserved 11
reserved 12 reserved 13 reserved 14 reserved 15

The «br_32» extended opcodes (16 from 16)
br_eq_i32 0 br_ne_i32 1 br_lt_i32 2 br_ge_i32 3
br_lt_u32 4 br_ge_u32 5 br_oeq_f32 6 br_one_f32 7
br_olt_f32 8 br_oge_f32 9 br_o_f32 10 br_ueq_f32 11
br_une_f32 12 br_ult_f32 13 br_uge_f32 14 br_u_f32 15

The «br_64» extended opcodes (16 from 16)
br_eq_i64 0 br_ne_i64 1 br_lt_i64 2 br_ge_i64 3
br_lt_u64 4 br_ge_u64 5 br_oeq_f64 6 br_one_f64 7
br_olt_f64 8 br_oge_f64 9 br_o_f64 10 br_ueq_f64 11
br_une_f64 12 br_ult_f64 13 br_uge_f64 14 br_u_f64 15

The «br_128» extended opcodes (16 from 16)
br_eq_i128 0 br_ne_i128 1 br_lt_i128 2 br_ge_i128 3
br_lt_u128 4 br_ge_u128 5 br_oeq_f128 6 br_one_f128 7
br_olt_f128 8 br_oge_f128 9 br_o_f128 10 br_ueq_f128 11
br_une_f128 12 br_ult_f128 13 br_uge_f128 14 br_u_f128 15

The «loop» extended opcodes (16 from 16)
rep_lt_i64 0 rep_gt_i64 1 rep_le_i64 2 rep_ge_i64 3
rep_lt_u64 4 rep_gt_u64 5 rep_le_u64 6 rep_ge_u64 7
rep_lt_i32 8 rep_gt_i32 9 rep_le_i32 10 rep_ge_i32 11
rep_lt_u32 12 rep_gt_u32 13 rep_le_u32 14 rep_ge_u32 15

The «mem_xi64» extended opcodes (15 from 16)
ld_xi64_u8 0 ld_xi64_i8 1 st_xi64_i8 2 ld_xi64_u16 3
ld_xi64_i16 4 st_xi64_i16 5 ld_xi64_u32 6 ld_xi64_i32 7
st_xi64_i32 8 ld_xi64_u64 9 ld_xi64_i64 10 st_xi64_i64 11
ld_xi64_i128 12 st_xi64_i128 13 lda_xi64 14 reserved 15

The «mem_xi32» extended opcodes (15 from 16)
ld_xi32_u8 0 ld_xi32_i8 1 st_xi32_i8 2 ld_xi32_u16 3
ld_xi32_i16 4 st_xi32_i16 5 ld_xi32_u32 6 ld_xi32_i32 7
st_xi32_i32 8 ld_xi32_u64 9 ld_xi32_i64 10 st_xi32_i64 11
ld_xi32_i128 12 st_xi32_i128 13 lda_xi32 14 reserved 15

The «mem_xu32» extended opcodes (15 from 16)
ld_xu32_u8 0 ld_xu32_i8 1 st_xu32_i8 2 ld_xu32_u16 3
ld_xu32_i16 4 st_xu32_i16 5 ld_xu32_u32 6 ld_xu32_i32 7
st_xu32_i32 8 ld_xu32_u64 9 ld_xu32_i64 10 st_xu32_i64 11
ld_xu32_i128 12 st_xu32_i128 13 lda_xu32 14 reserved 15

The «fma» extended opcodes (74 from 128)
cmov_lsb 0 cmov_eq_i32 1 cmov_lt_i32 2 cmov_le_i32 3
reserved 4 cmov_eq_i64 5 cmov_lt_i64 6 cmov_le_i64 7
reserved 8 cmov_eq_i128 9 cmov_lt_i128 10 cmov_le_i128 11
mul_add 12 mul_sub 13 mul_subr 14 reserved 15
reserved 16 reserved 17 reserved 18 reserved 19
madd_f128 20 msub_f128 21 nmadd_f128 22 nmsub_f128 23
merge_f128 24 reserved 25 reserved 26 reserved 27
reserved 28 reserved 29 reserved 30 reserved 31
reserved 32 reserved 33 reserved 34 reserved 35
madd_f64 36 msub_f64 37 nmadd_f64 38 nmsub_f64 39
merge_f64 40 reserved 41 reserved 42 reserved 43
madd_vf64 44 msub_vf64 45 nmadd_vf64 46 nmsub_vf64 47
madd_alt_vf64 48 msub_alt_vf64 49 merge_vf64 50 reserved 51
reserved 52 reserved 53 reserved 54 reserved 55
reserved 56 reserved 57 reserved 58 reserved 59
madd_f32 60 msub_f32 61 nmadd_f32 62 nmsub_f32 63
merge_f32 64 reserved 65 reserved 66 reserved 67
madd_vf32 68 msub_vf32 69 nmadd_vf32 70 nmsub_vf32 71
madd_alt_vf32 72 msub_alt_vf32 73 merge_vf32 74 reserved 75
reserved 76 reserved 77 reserved 78 reserved 79
reserved 80 reserved 81 reserved 82 reserved 83
madd_f16 84 msub_f16 85 nmadd_f16 86 nmsub_f16 87
merge_f16 88 reserved 89 reserved 90 reserved 91
madd_vf16 92 msub_vf16 93 nmadd_vf16 94 nmsub_vf16 95
madd_alt_vf16 96 msub_alt_vf16 97 merge_vf16 98 reserved 99
reserved 100 reserved 101 reserved 102 reserved 103
reserved 104 reserved 105 reserved 106 reserved 107
amx_cas_i8 108 amq_cas_i8 109 amr_cas_i8 110 amz_cas_i8 111
amx_cas_i16 112 amq_cas_i16 113 amr_cas_i16 114 amz_cas_i16 115
amx_cas_i32 116 amq_cas_i32 117 amr_cas_i32 118 amz_cas_i32 119
amx_cas_i64 120 amq_cas_i64 121 amr_cas_i64 122 amz_cas_i64 123
amx_cas_i128 124 amq_cas_i128 125 amr_cas_i128 126 amz_cas_i128 127

The «raopx» extended opcodes (14 from 128)
nop 0 jmp 1 retf 2 alloc 3
alloc_sp 4 reserved 5 reserved 6 reserved 7
eh_adj 8 eh_throw 9 eh_catch 10 eh_next 11
reserved 12 reserved 13 reserved 14 reserved 15
dcbt 16 dcbf 17 dcbi 18 icbi 19
reserved 20 reserved 21 reserved 22 reserved 23
reserved 24 reserved 25 reserved 26 reserved 27
reserved 28 reserved 29 reserved 30 reserved 31
reserved 32 reserved 33 reserved 34 reserved 35
reserved 36 reserved 37 reserved 38 reserved 39
reserved 40 reserved 41 reserved 42 reserved 43
reserved 44 reserved 45 reserved 46 reserved 47
reserved 48 reserved 49 reserved 50 reserved 51
reserved 52 reserved 53 reserved 54 reserved 55
reserved 56 reserved 57 reserved 58 reserved 59
reserved 60 reserved 61 reserved 62 reserved 63
reserved 64 reserved 65 reserved 66 reserved 67
reserved 68 reserved 69 reserved 70 reserved 71
reserved 72 reserved 73 reserved 74 reserved 75
reserved 76 reserved 77 reserved 78 reserved 79
reserved 80 reserved 81 reserved 82 reserved 83
reserved 84 reserved 85 reserved 86 reserved 87
reserved 88 reserved 89 reserved 90 reserved 91
reserved 92 reserved 93 reserved 94 reserved 95
reserved 96 reserved 97 reserved 98 reserved 99
reserved 100 reserved 101 reserved 102 reserved 103
reserved 104 reserved 105 reserved 106 reserved 107
reserved 108 reserved 109 reserved 110 reserved 111
reserved 112 reserved 113 reserved 114 reserved 115
reserved 116 reserved 117 reserved 118 reserved 119
reserved 120 reserved 121 reserved 122 reserved 123
reserved 124 reserved 125 reserved 126 write 127

The «misc» extended opcodes (980 from 2048)
undef 0 mov 1 ret 2 not 3
and 4 or 5 xor 6 andn 7
nand 8 nor 9 xnor 10 orn 11
jmp_r 12 jmp_t 13 jmp_t_i32 14 jmp_t_u32 15
call_ri 16 reserved 17 reserved 18 reserved 19
mbgath 20 mbscat 21 gtb 22 reserved 23
cnt_pop 24 cnt_lz 25 cnt_tz 26 permb 27
bit_clear 28 bit_clear_imm 29 bit_set 30 bit_set_imm 31
bit_flip 32 bit_flip_imm 33 reserved 34 reserved 35
sext_i8 36 sext_i16 37 sext_i32 38 sext_i64 39
zext_i8 40 zext_i16 41 zext_i32 42 zext_i64 43
reserved 44 reserved 45 reserved 46 reserved 47
reserved 48 reserved 49 reserved 50 reserved 51
reserved 52 reserved 53 reserved 54 reserved 55
reserved 56 reserved 57 reserved 58 reserved 59
reserved 60 reserved 61 reserved 62 reserved 63
reserved 64 reserved 65 reserved 66 reserved 67
reserved 68 reserved 69 reserved 70 reserved 71
reserved 72 reserved 73 reserved 74 reserved 75
reserved 76 reserved 77 reserved 78 reserved 79
reserved 80 reserved 81 reserved 82 reserved 83
reserved 84 reserved 85 reserved 86 reserved 87
reserved 88 reserved 89 reserved 90 reserved 91
reserved 92 reserved 93 reserved 94 reserved 95
ld_mia_u8 96 ld_mia_i8 97 st_mia_i8 98 ld_mia_u16 99
ld_mia_i16 100 st_mia_i16 101 ld_mia_u32 102 ld_mia_i32 103
st_mia_i32 104 ld_mia_u64 105 ld_mia_i64 106 st_mia_i64 107
ld_mia_i128 108 st_mia_i128 109 reserved 110 reserved 111
ld_mib_u8 112 ld_mib_i8 113 st_mib_i8 114 ld_mib_u16 115
ld_mib_i16 116 st_mib_i16 117 ld_mib_u32 118 ld_mib_i32 119
st_mib_i32 120 ld_mib_u64 121 ld_mib_i64 122 st_mib_i64 123
ld_mib_i128 124 st_mib_i128 125 reserved 126 reserved 127
add_i32 128 sub_i32 129 mul_i32 130 neg_i32 131
sll_imm_u32 132 srl_imm_u32 133 sra_imm_i32 134 srd_imm_i32 135
sll_u32 136 srl_u32 137 sra_i32 138 srd_i32 139
cmp_eq_i32 140 cmp_ne_i32 141 cmp_lt_i32 142 cmp_ge_i32 143
cmp_lt_u32 144 cmp_ge_u32 145 abs_diff_i32 146 abs_i32 147
div_i32 148 div_u32 149 rem_i32 150 rem_u32 151
max_i32 152 max_u32 153 min_i32 154 min_u32 155
reserved 156 reserved 157 reserved 158 reserved 159
mul_u32 160 add_u32 161 sub_u32 162 reserved 163
reserved 164 reserved 165 reserved 166 reserved 167
reserved 168 reserved 169 reserved 170 reserved 171
reserved 172 reserved 173 reserved 174 reserved 175
reserved 176 reserved 177 reserved 178 reserved 179
reserved 180 reserved 181 reserved 182 reserved 183
reserved 184 reserved 185 reserved 186 reserved 187
reserved 188 reserved 189 reserved 190 reserved 191
reserved 192 reserved 193 reserved 194 reserved 195
reserved 196 reserved 197 reserved 198 reserved 199
reserved 200 reserved 201 reserved 202 reserved 203
reserved 204 reserved 205 reserved 206 reserved 207
add_i64 208 sub_i64 209 mul_i64 210 neg_i64 211
sll_imm_u64 212 srl_imm_u64 213 sra_imm_i64 214 srd_imm_i64 215
sll_u64 216 srl_u64 217 sra_i64 218 srd_i64 219
cmp_eq_i64 220 cmp_ne_i64 221 cmp_lt_i64 222 cmp_ge_i64 223
cmp_lt_u64 224 cmp_ge_u64 225 abs_diff_i64 226 abs_i64 227
div_i64 228 div_u64 229 rem_i64 230 rem_u64 231
max_i64 232 max_u64 233 min_i64 234 min_u64 235
reserved 236 reserved 237 reserved 238 reserved 239
xor_dec 240 and_dec 241 and_neg 242 addo_i64 243
subo_i64 244 addc_u64 245 subb_u64 246 mul_h 247
reserved 248 reserved 249 reserved 250 reserved 251
reserved 252 reserved 253 reserved 254 reserved 255
reserved 256 reserved 257 reserved 258 reserved 259
reserved 260 reserved 261 reserved 262 reserved 263
reserved 264 reserved 265 reserved 266 reserved 267
reserved 268 reserved 269 reserved 270 reserved 271
reserved 272 reserved 273 reserved 274 reserved 275
reserved 276 reserved 277 reserved 278 reserved 279
reserved 280 reserved 281 reserved 282 reserved 283
reserved 284 reserved 285 reserved 286 reserved 287
add_i128 288 sub_i128 289 mul_i128 290 neg_i128 291
sll_imm_u128 292 srl_imm_u128 293 sra_imm_i128 294 srd_imm_i128 295
sll_u128 296 srl_u128 297 sra_i128 298 srd_i128 299
cmp_eq_i128 300 cmp_ne_i128 301 cmp_lt_i128 302 cmp_ge_i128 303
cmp_lt_u128 304 cmp_ge_u128 305 abs_diff_i128 306 abs_i128 307
div_i128 308 div_u128 309 rem_i128 310 rem_u128 311
max_i128 312 max_u128 313 min_i128 314 min_u128 315
reserved 316 reserved 317 reserved 318 reserved 319
reserved 320 reserved 321 reserved 322 reserved 323
reserved 324 reserved 325 reserved 326 reserved 327
reserved 328 reserved 329 reserved 330 reserved 331
reserved 332 reserved 333 reserved 334 reserved 335
reserved 336 reserved 337 reserved 338 reserved 339
reserved 340 reserved 341 reserved 342 reserved 343
reserved 344 reserved 345 reserved 346 reserved 347
reserved 348 reserved 349 reserved 350 reserved 351
reserved 352 reserved 353 reserved 354 reserved 355
reserved 356 reserved 357 reserved 358 reserved 359
reserved 360 reserved 361 reserved 362 reserved 363
reserved 364 reserved 365 reserved 366 reserved 367
reserved 368 reserved 369 reserved 370 reserved 371
reserved 372 reserved 373 reserved 374 reserved 375
reserved 376 reserved 377 reserved 378 reserved 379
reserved 380 reserved 381 reserved 382 reserved 383
reserved 384 reserved 385 reserved 386 reserved 387
reserved 388 reserved 389 reserved 390 reserved 391
reserved 392 reserved 393 reserved 394 reserved 395
reserved 396 reserved 397 reserved 398 reserved 399
reserved 400 reserved 401 reserved 402 reserved 403
reserved 404 reserved 405 reserved 406 reserved 407
reserved 408 reserved 409 reserved 410 reserved 411
reserved 412 reserved 413 reserved 414 reserved 415
reserved 416 reserved 417 reserved 418 reserved 419
reserved 420 reserved 421 reserved 422 reserved 423
reserved 424 reserved 425 reserved 426 reserved 427
reserved 428 reserved 429 reserved 430 reserved 431
reserved 432 reserved 433 reserved 434 reserved 435
reserved 436 reserved 437 reserved 438 reserved 439
reserved 440 reserved 441 reserved 442 reserved 443
reserved 444 reserved 445 reserved 446 reserved 447
reserved 448 reserved 449 reserved 450 reserved 451
reserved 452 reserved 453 reserved 454 reserved 455
reserved 456 reserved 457 reserved 458 reserved 459
reserved 460 reserved 461 reserved 462 reserved 463
reserved 464 reserved 465 reserved 466 reserved 467
reserved 468 reserved 469 reserved 470 reserved 471
reserved 472 reserved 473 reserved 474 reserved 475
reserved 476 reserved 477 reserved 478 reserved 479
reserved 480 reserved 481 reserved 482 reserved 483
reserved 484 reserved 485 reserved 486 reserved 487
reserved 488 reserved 489 reserved 490 reserved 491
reserved 492 reserved 493 reserved 494 reserved 495
reserved 496 reserved 497 reserved 498 reserved 499
reserved 500 reserved 501 reserved 502 reserved 503
reserved 504 reserved 505 reserved 506 reserved 507
reserved 508 reserved 509 reserved 510 reserved 511
mprobe 512 cpuid 513 int 514 syscall 515
random 516 get_spr 517 set_spr 518 set_dbr 519
get_dbr 520 set_ibr 521 get_ibr 522 set_mr 523
get_mr 524 set_itr 525 set_dtr 526 rfi 527
halt 528 tpa 529 ptc 530 reserved 531
reserved 532 reserved 533 sysret 534 reserved 535
rscover 536 rsflush 537 rsload 538 reserved 539
reserved 540 reserved 541 reserved 542 reserved 543
clmul_ll 544 clmul_hl 545 clmul_hh 546 reserved 547
reserved 548 reserved 549 reserved 550 reserved 551
aes_enc 552 aes_enc_last 553 aes_dec 554 aes_dec_last 555
aes_imc 556 aes_keygen_assist 557 reserved 558 reserved 559
reserved 560 reserved 561 reserved 562 jmp_mi 563
fence_a 564 fence_r 565 fence_ar 566 fence_sc 567
reserved 568 reserved 569 reserved 570 reserved 571
reserved 572 reserved 573 reserved 574 reserved 575
reserved 576 reserved 577 reserved 578 reserved 579
reserved 580 reserved 581 reserved 582 reserved 583
reserved 584 reserved 585 reserved 586 reserved 587
reserved 588 reserved 589 reserved 590 reserved 591
reserved 592 reserved 593 reserved 594 reserved 595
reserved 596 reserved 597 reserved 598 reserved 599
reserved 600 reserved 601 reserved 602 reserved 603
reserved 604 reserved 605 reserved 606 reserved 607
reserved 608 reserved 609 reserved 610 reserved 611
reserved 612 reserved 613 reserved 614 reserved 615
reserved 616 reserved 617 reserved 618 reserved 619
reserved 620 reserved 621 reserved 622 reserved 623
reserved 624 reserved 625 reserved 626 reserved 627
reserved 628 reserved 629 reserved 630 reserved 631
reserved 632 reserved 633 reserved 634 reserved 635
reserved 636 reserved 637 reserved 638 reserved 639
reserved 640 reserved 641 reserved 642 reserved 643
reserved 644 reserved 645 reserved 646 reserved 647
reserved 648 reserved 649 reserved 650 reserved 651
reserved 652 reserved 653 reserved 654 reserved 655
reserved 656 reserved 657 reserved 658 reserved 659
reserved 660 reserved 661 reserved 662 reserved 663
reserved 664 reserved 665 reserved 666 reserved 667
reserved 668 reserved 669 reserved 670 reserved 671
reserved 672 reserved 673 reserved 674 reserved 675
reserved 676 reserved 677 reserved 678 reserved 679
reserved 680 reserved 681 reserved 682 reserved 683
reserved 684 reserved 685 reserved 686 reserved 687
reserved 688 reserved 689 reserved 690 reserved 691
reserved 692 reserved 693 reserved 694 reserved 695
reserved 696 reserved 697 reserved 698 reserved 699
reserved 700 reserved 701 reserved 702 reserved 703
reserved 704 reserved 705 reserved 706 reserved 707
reserved 708 reserved 709 reserved 710 reserved 711
reserved 712 reserved 713 reserved 714 reserved 715
reserved 716 reserved 717 reserved 718 reserved 719
reserved 720 reserved 721 reserved 722 reserved 723
reserved 724 reserved 725 reserved 726 reserved 727
reserved 728 reserved 729 reserved 730 reserved 731
reserved 732 reserved 733 reserved 734 reserved 735
reserved 736 reserved 737 reserved 738 reserved 739
reserved 740 reserved 741 reserved 742 reserved 743
reserved 744 reserved 745 reserved 746 reserved 747
reserved 748 reserved 749 reserved 750 reserved 751
reserved 752 reserved 753 reserved 754 reserved 755
reserved 756 reserved 757 reserved 758 reserved 759
reserved 760 reserved 761 reserved 762 reserved 763
reserved 764 reserved 765 reserved 766 reserved 767
amx_ld_i8 768 amq_ld_i8 769 amx_st_i8 770 amr_st_i8 771
amx_swap_u8 772 amq_swap_u8 773 amr_swap_u8 774 amz_swap_u8 775
reserved 776 reserved 777 reserved 778 reserved 779
amx_ld_add_u8 780 amq_ld_add_u8 781 amr_ld_add_u8 782 amz_ld_add_u8 783
amx_ld_and_u8 784 amq_ld_and_u8 785 amr_ld_and_u8 786 amz_ld_and_u8 787
amx_ld_or_u8 788 amq_ld_or_u8 789 amr_ld_or_u8 790 amz_ld_or_u8 791
amx_ld_xor_u8 792 amq_ld_xor_u8 793 amr_ld_xor_u8 794 amz_ld_xor_u8 795
amx_ld_min_i8 796 amq_ld_min_i8 797 amr_ld_min_i8 798 amz_ld_min_i8 799
amx_ld_max_i8 800 amq_ld_max_i8 801 amr_ld_max_i8 802 amz_ld_max_i8 803
amx_ld_min_u8 804 amq_ld_min_u8 805 amr_ld_min_u8 806 amz_ld_min_u8 807
amx_ld_max_u8 808 amq_ld_max_u8 809 amr_ld_max_u8 810 amz_ld_max_u8 811
amx_st_add_u8 812 amr_st_add_u8 813 amx_st_and_u8 814 amr_st_and_u8 815
amx_st_or_u8 816 amr_st_or_u8 817 amx_st_xor_u8 818 amr_st_xor_u8 819
amx_st_min_i8 820 amr_st_min_i8 821 amx_st_max_i8 822 amr_st_max_i8 823
amx_st_min_u8 824 amr_st_min_u8 825 amx_st_max_u8 826 amr_st_max_u8 827
reserved 828 reserved 829 reserved 830 reserved 831
reserved 832 reserved 833 reserved 834 reserved 835
reserved 836 reserved 837 reserved 838 reserved 839
reserved 840 reserved 841 reserved 842 reserved 843
reserved 844 reserved 845 reserved 846 reserved 847
amx_ld_i16 848 amq_ld_i16 849 amx_st_i16 850 amr_st_i16 851
amx_swap_u16 852 amq_swap_u16 853 amr_swap_u16 854 amz_swap_u16 855
reserved 856 reserved 857 reserved 858 reserved 859
amx_ld_add_u16 860 amq_ld_add_u16 861 amr_ld_add_u16 862 amz_ld_add_u16 863
amx_ld_and_u16 864 amq_ld_and_u16 865 amr_ld_and_u16 866 amz_ld_and_u16 867
amx_ld_or_u16 868 amq_ld_or_u16 869 amr_ld_or_u16 870 amz_ld_or_u16 871
amx_ld_xor_u16 872 amq_ld_xor_u16 873 amr_ld_xor_u16 874 amz_ld_xor_u16 875
amx_ld_min_i16 876 amq_ld_min_i16 877 amr_ld_min_i16 878 amz_ld_min_i16 879
amx_ld_max_i16 880 amq_ld_max_i16 881 amr_ld_max_i16 882 amz_ld_max_i16 883
amx_ld_min_u16 884 amq_ld_min_u16 885 amr_ld_min_u16 886 amz_ld_min_u16 887
amx_ld_max_u16 888 amq_ld_max_u16 889 amr_ld_max_u16 890 amz_ld_max_u16 891
amx_st_add_u16 892 amr_st_add_u16 893 amx_st_and_u16 894 amr_st_and_u16 895
amx_st_or_u16 896 amr_st_or_u16 897 amx_st_xor_u16 898 amr_st_xor_u16 899
amx_st_min_i16 900 amr_st_min_i16 901 amx_st_max_i16 902 amr_st_max_i16 903
amx_st_min_u16 904 amr_st_min_u16 905 amx_st_max_u16 906 amr_st_max_u16 907
reserved 908 reserved 909 reserved 910 reserved 911
reserved 912 reserved 913 reserved 914 reserved 915
reserved 916 reserved 917 reserved 918 reserved 919
reserved 920 reserved 921 reserved 922 reserved 923
reserved 924 reserved 925 reserved 926 reserved 927
amx_ld_i32 928 amq_ld_i32 929 amx_st_i32 930 amr_st_i32 931
amx_swap_u32 932 amq_swap_u32 933 amr_swap_u32 934 amz_swap_u32 935
reserved 936 reserved 937 reserved 938 reserved 939
amx_ld_add_u32 940 amq_ld_add_u32 941 amr_ld_add_u32 942 amz_ld_add_u32 943
amx_ld_and_u32 944 amq_ld_and_u32 945 amr_ld_and_u32 946 amz_ld_and_u32 947
amx_ld_or_u32 948 amq_ld_or_u32 949 amr_ld_or_u32 950 amz_ld_or_u32 951
amx_ld_xor_u32 952 amq_ld_xor_u32 953 amr_ld_xor_u32 954 amz_ld_xor_u32 955
amx_ld_min_i32 956 amq_ld_min_i32 957 amr_ld_min_i32 958 amz_ld_min_i32 959
amx_ld_max_i32 960 amq_ld_max_i32 961 amr_ld_max_i32 962 amz_ld_max_i32 963
amx_ld_min_u32 964 amq_ld_min_u32 965 amr_ld_min_u32 966 amz_ld_min_u32 967
amx_ld_max_u32 968 amq_ld_max_u32 969 amr_ld_max_u32 970 amz_ld_max_u32 971
amx_st_add_u32 972 amr_st_add_u32 973 amx_st_and_u32 974 amr_st_and_u32 975
amx_st_or_u32 976 amr_st_or_u32 977 amx_st_xor_u32 978 amr_st_xor_u32 979
amx_st_min_i32 980 amr_st_min_i32 981 amx_st_max_i32 982 amr_st_max_i32 983
amx_st_min_u32 984 amr_st_min_u32 985 amx_st_max_u32 986 amr_st_max_u32 987
reserved 988 reserved 989 reserved 990 reserved 991
reserved 992 reserved 993 reserved 994 reserved 995
reserved 996 reserved 997 reserved 998 reserved 999
reserved 1000 reserved 1001 reserved 1002 reserved 1003
reserved 1004 reserved 1005 reserved 1006 reserved 1007
amx_ld_i64 1008 amq_ld_i64 1009 amx_st_i64 1010 amr_st_i64 1011
amx_swap_u64 1012 amq_swap_u64 1013 amr_swap_u64 1014 amz_swap_u64 1015
reserved 1016 reserved 1017 reserved 1018 reserved 1019
amx_ld_add_u64 1020 amq_ld_add_u64 1021 amr_ld_add_u64 1022 amz_ld_add_u64 1023
amx_ld_and_u64 1024 amq_ld_and_u64 1025 amr_ld_and_u64 1026 amz_ld_and_u64 1027
amx_ld_or_u64 1028 amq_ld_or_u64 1029 amr_ld_or_u64 1030 amz_ld_or_u64 1031
amx_ld_xor_u64 1032 amq_ld_xor_u64 1033 amr_ld_xor_u64 1034 amz_ld_xor_u64 1035
amx_ld_min_i64 1036 amq_ld_min_i64 1037 amr_ld_min_i64 1038 amz_ld_min_i64 1039
amx_ld_max_i64 1040 amq_ld_max_i64 1041 amr_ld_max_i64 1042 amz_ld_max_i64 1043
amx_ld_min_u64 1044 amq_ld_min_u64 1045 amr_ld_min_u64 1046 amz_ld_min_u64 1047
amx_ld_max_u64 1048 amq_ld_max_u64 1049 amr_ld_max_u64 1050 amz_ld_max_u64 1051
amx_st_add_u64 1052 amr_st_add_u64 1053 amx_st_and_u64 1054 amr_st_and_u64 1055
amx_st_or_u64 1056 amr_st_or_u64 1057 amx_st_xor_u64 1058 amr_st_xor_u64 1059
amx_st_min_i64 1060 amr_st_min_i64 1061 amx_st_max_i64 1062 amr_st_max_i64 1063
amx_st_min_u64 1064 amr_st_min_u64 1065 amx_st_max_u64 1066 amr_st_max_u64 1067
reserved 1068 reserved 1069 reserved 1070 reserved 1071
reserved 1072 reserved 1073 reserved 1074 reserved 1075
reserved 1076 reserved 1077 reserved 1078 reserved 1079
reserved 1080 reserved 1081 reserved 1082 reserved 1083
reserved 1084 reserved 1085 reserved 1086 reserved 1087
amx_ld_i128 1088 amq_ld_i128 1089 amx_st_i128 1090 amr_st_i128 1091
amx_swap_u128 1092 amq_swap_u128 1093 amr_swap_u128 1094 amz_swap_u128 1095
reserved 1096 reserved 1097 reserved 1098 reserved 1099
amx_ld_add_u128 1100 amq_ld_add_u128 1101 amr_ld_add_u128 1102 amz_ld_add_u128 1103
amx_ld_and_u128 1104 amq_ld_and_u128 1105 amr_ld_and_u128 1106 amz_ld_and_u128 1107
amx_ld_or_u128 1108 amq_ld_or_u128 1109 amr_ld_or_u128 1110 amz_ld_or_u128 1111
amx_ld_xor_u128 1112 amq_ld_xor_u128 1113 amr_ld_xor_u128 1114 amz_ld_xor_u128 1115
amx_ld_min_i128 1116 amq_ld_min_i128 1117 amr_ld_min_i128 1118 amz_ld_min_i128 1119
amx_ld_max_i128 1120 amq_ld_max_i128 1121 amr_ld_max_i128 1122 amz_ld_max_i128 1123
amx_ld_min_u128 1124 amq_ld_min_u128 1125 amr_ld_min_u128 1126 amz_ld_min_u128 1127
amx_ld_max_u128 1128 amq_ld_max_u128 1129 amr_ld_max_u128 1130 amz_ld_max_u128 1131
amx_st_add_u128 1132 amr_st_add_u128 1133 amx_st_and_u128 1134 amr_st_and_u128 1135
amx_st_or_u128 1136 amr_st_or_u128 1137 amx_st_xor_u128 1138 amr_st_xor_u128 1139
amx_st_min_i128 1140 amr_st_min_i128 1141 amx_st_max_i128 1142 amr_st_max_i128 1143
amx_st_min_u128 1144 amr_st_min_u128 1145 amx_st_max_u128 1146 amr_st_max_u128 1147
reserved 1148 reserved 1149 reserved 1150 reserved 1151
reserved 1152 reserved 1153 reserved 1154 reserved 1155
reserved 1156 reserved 1157 reserved 1158 reserved 1159
reserved 1160 reserved 1161 reserved 1162 reserved 1163
reserved 1164 reserved 1165 reserved 1166 reserved 1167
reserved 1168 reserved 1169 reserved 1170 reserved 1171
reserved 1172 reserved 1173 reserved 1174 reserved 1175
reserved 1176 reserved 1177 reserved 1178 reserved 1179
reserved 1180 reserved 1181 reserved 1182 reserved 1183
reserved 1184 reserved 1185 reserved 1186 reserved 1187
reserved 1188 reserved 1189 reserved 1190 reserved 1191
reserved 1192 reserved 1193 reserved 1194 reserved 1195
reserved 1196 reserved 1197 reserved 1198 reserved 1199
reserved 1200 reserved 1201 reserved 1202 reserved 1203
reserved 1204 reserved 1205 reserved 1206 reserved 1207
reserved 1208 reserved 1209 reserved 1210 reserved 1211
reserved 1212 reserved 1213 reserved 1214 reserved 1215
class_f16 1216 sqrt_f16 1217 rsqrt_f16 1218 add_f16 1219
sub_f16 1220 nadd_f16 1221 mul_f16 1222 nmul_f16 1223
div_f16 1224 neg_f16 1225 abs_f16 1226 nabs_f16 1227
abs_diff_f16 1228 nabs_diff_f16 1229 min_f16 1230 max_f16 1231
minnum_f16 1232 maxnum_f16 1233 abs_min_f16 1234 abs_max_f16 1235
round_f16 1236 ceil_f16 1237 floor_f16 1238 trunk_f16 1239
roundeven_f16 1240 nearbyint_f16 1241 rint_f16 1242 reserved 1243
cmp_oeq_f16 1244 cmp_one_f16 1245 cmp_olt_f16 1246 cmp_oge_f16 1247
cmp_o_f16 1248 cmp_ueq_f16 1249 cmp_une_f16 1250 cmp_ult_f16 1251
cmp_uge_f16 1252 cmp_u_f16 1253 reserved 1254 reserved 1255
trunk_f16_i32 1256 trunk_f16_u32 1257 cvt_i32_f16 1258 cvt_u32_f16 1259
trunk_f16_i64 1260 trunk_f16_u64 1261 cvt_i64_f16 1262 cvt_u64_f16 1263
trunk_f16_i128 1264 trunk_f16_u128 1265 cvt_i128_f16 1266 cvt_u128_f16 1267
reserved 1268 reserved 1269 reserved 1270 reserved 1271
extend_f16_f32 1272 extend_f16_f64 1273 cvt_f32_f16 1274 cvt_f64_f16 1275
reserved 1276 reserved 1277 reserved 1278 reserved 1279
reserved 1280 reserved 1281 reserved 1282 reserved 1283
reserved 1284 reserved 1285 reserved 1286 reserved 1287
reserved 1288 reserved 1289 reserved 1290 reserved 1291
reserved 1292 reserved 1293 reserved 1294 reserved 1295
neg_vf16 1296 abs_vf16 1297 nabs_vf16 1298 abs_diff_vf16 1299
nabs_diff_vf16 1300 rsqrt_vf16 1301 sqrt_vf16 1302 add_vf16 1303
sub_vf16 1304 nadd_vf16 1305 mul_vf16 1306 nmul_vf16 1307
div_vf16 1308 min_vf16 1309 max_vf16 1310 minnum_vf16 1311
maxnum_vf16 1312 abs_min_vf16 1313 abs_max_vf16 1314 round_vf16 1315
ceil_vf16 1316 floor_vf16 1317 trunk_vf16 1318 roundeven_vf16 1319
nearbyint_vf16 1320 rint_vf16 1321 reserved 1322 reserved 1323
cmp_oeq_vf16 1324 cmp_one_vf16 1325 cmp_olt_vf16 1326 cmp_oge_vf16 1327
cmp_o_vf16 1328 cmp_ueq_vf16 1329 cmp_une_vf16 1330 cmp_ult_vf16 1331
cmp_uge_vf16 1332 cmp_u_vf16 1333 reserved 1334 reserved 1335
add_alt_vf16 1336 sub_alt_vf16 1337 add_horiz_vf16 1338 sub_horiz_vf16 1339
mul_horiz_vf16 1340 dot_vf16 1341 merge_low_vf16 1342 merge_high_vf16 1343
unpack_high_vf16 1344 unpack_low_vf16 1345 pack_vf16 1346 trunk_vf16_vi16 1347
trunk_vf16_vu16 1348 cvt_vi16_vf16 1349 cvt_vu16_vf16 1350 reserved 1351
reserved 1352 reserved 1353 reserved 1354 reserved 1355
reserved 1356 reserved 1357 reserved 1358 reserved 1359
reserved 1360 reserved 1361 reserved 1362 reserved 1363
reserved 1364 reserved 1365 reserved 1366 reserved 1367
reserved 1368 reserved 1369 reserved 1370 reserved 1371
reserved 1372 reserved 1373 reserved 1374 reserved 1375
class_f32 1376 sqrt_f32 1377 rsqrt_f32 1378 add_f32 1379
sub_f32 1380 nadd_f32 1381 mul_f32 1382 nmul_f32 1383
div_f32 1384 neg_f32 1385 abs_f32 1386 nabs_f32 1387
abs_diff_f32 1388 nabs_diff_f32 1389 min_f32 1390 max_f32 1391
minnum_f32 1392 maxnum_f32 1393 abs_min_f32 1394 abs_max_f32 1395
round_f32 1396 ceil_f32 1397 floor_f32 1398 trunk_f32 1399
roundeven_f32 1400 nearbyint_f32 1401 rint_f32 1402 reserved 1403
cmp_oeq_f32 1404 cmp_one_f32 1405 cmp_olt_f32 1406 cmp_oge_f32 1407
cmp_o_f32 1408 cmp_ueq_f32 1409 cmp_une_f32 1410 cmp_ult_f32 1411
cmp_uge_f32 1412 cmp_u_f32 1413 reserved 1414 reserved 1415
trunk_f32_i32 1416 trunk_f32_u32 1417 cvt_i32_f32 1418 cvt_u32_f32 1419
trunk_f32_i64 1420 trunk_f32_u64 1421 cvt_i64_f32 1422 cvt_u64_f32 1423
trunk_f32_i128 1424 trunk_f32_u128 1425 cvt_i128_f32 1426 cvt_u128_f32 1427
reserved 1428 reserved 1429 reserved 1430 reserved 1431
extend_f32_f64 1432 cvt_f64_f32 1433 reserved 1434 reserved 1435
reserved 1436 reserved 1437 reserved 1438 reserved 1439
reserved 1440 reserved 1441 reserved 1442 reserved 1443
reserved 1444 reserved 1445 reserved 1446 reserved 1447
reserved 1448 reserved 1449 reserved 1450 reserved 1451
reserved 1452 reserved 1453 reserved 1454 reserved 1455
neg_vf32 1456 abs_vf32 1457 nabs_vf32 1458 abs_diff_vf32 1459
nabs_diff_vf32 1460 rsqrt_vf32 1461 sqrt_vf32 1462 add_vf32 1463
sub_vf32 1464 nadd_vf32 1465 mul_vf32 1466 nmul_vf32 1467
div_vf32 1468 min_vf32 1469 max_vf32 1470 minnum_vf32 1471
maxnum_vf32 1472 abs_min_vf32 1473 abs_max_vf32 1474 round_vf32 1475
ceil_vf32 1476 floor_vf32 1477 trunk_vf32 1478 roundeven_vf32 1479
nearbyint_vf32 1480 rint_vf32 1481 reserved 1482 reserved 1483
cmp_oeq_vf32 1484 cmp_one_vf32 1485 cmp_olt_vf32 1486 cmp_oge_vf32 1487
cmp_o_vf32 1488 cmp_ueq_vf32 1489 cmp_une_vf32 1490 cmp_ult_vf32 1491
cmp_uge_vf32 1492 cmp_u_vf32 1493 reserved 1494 reserved 1495
add_alt_vf32 1496 sub_alt_vf32 1497 add_horiz_vf32 1498 sub_horiz_vf32 1499
mul_horiz_vf32 1500 dot_vf32 1501 merge_low_vf32 1502 merge_high_vf32 1503
unpack_high_vf32 1504 unpack_low_vf32 1505 pack_vf32 1506 trunk_vf32_vi32 1507
trunk_vf32_vu32 1508 cvt_vi32_vf32 1509 cvt_vu32_vf32 1510 reserved 1511
reserved 1512 reserved 1513 reserved 1514 reserved 1515
reserved 1516 reserved 1517 reserved 1518 reserved 1519
reserved 1520 reserved 1521 reserved 1522 reserved 1523
reserved 1524 reserved 1525 reserved 1526 reserved 1527
reserved 1528 reserved 1529 reserved 1530 reserved 1531
reserved 1532 reserved 1533 reserved 1534 reserved 1535
class_f64 1536 sqrt_f64 1537 rsqrt_f64 1538 add_f64 1539
sub_f64 1540 nadd_f64 1541 mul_f64 1542 nmul_f64 1543
div_f64 1544 neg_f64 1545 abs_f64 1546 nabs_f64 1547
abs_diff_f64 1548 nabs_diff_f64 1549 min_f64 1550 max_f64 1551
minnum_f64 1552 maxnum_f64 1553 abs_min_f64 1554 abs_max_f64 1555
round_f64 1556 ceil_f64 1557 floor_f64 1558 trunk_f64 1559
roundeven_f64 1560 nearbyint_f64 1561 rint_f64 1562 reserved 1563
cmp_oeq_f64 1564 cmp_one_f64 1565 cmp_olt_f64 1566 cmp_oge_f64 1567
cmp_o_f64 1568 cmp_ueq_f64 1569 cmp_une_f64 1570 cmp_ult_f64 1571
cmp_uge_f64 1572 cmp_u_f64 1573 reserved 1574 reserved 1575
trunk_f64_i32 1576 trunk_f64_u32 1577 cvt_i32_f64 1578 cvt_u32_f64 1579
trunk_f64_i64 1580 trunk_f64_u64 1581 cvt_i64_f64 1582 cvt_u64_f64 1583
trunk_f64_i128 1584 trunk_f64_u128 1585 cvt_i128_f64 1586 cvt_u128_f64 1587
reserved 1588 reserved 1589 reserved 1590 reserved 1591
reserved 1592 reserved 1593 reserved 1594 reserved 1595
reserved 1596 reserved 1597 reserved 1598 reserved 1599
reserved 1600 reserved 1601 reserved 1602 reserved 1603
reserved 1604 reserved 1605 reserved 1606 reserved 1607
reserved 1608 reserved 1609 reserved 1610 reserved 1611
reserved 1612 reserved 1613 reserved 1614 reserved 1615
neg_vf64 1616 abs_vf64 1617 nabs_vf64 1618 abs_diff_vf64 1619
nabs_diff_vf64 1620 rsqrt_vf64 1621 sqrt_vf64 1622 add_vf64 1623
sub_vf64 1624 nadd_vf64 1625 mul_vf64 1626 nmul_vf64 1627
div_vf64 1628 min_vf64 1629 max_vf64 1630 minnum_vf64 1631
maxnum_vf64 1632 abs_min_vf64 1633 abs_max_vf64 1634 round_vf64 1635
ceil_vf64 1636 floor_vf64 1637 trunk_vf64 1638 roundeven_vf64 1639
nearbyint_vf64 1640 rint_vf64 1641 reserved 1642 reserved 1643
cmp_oeq_vf64 1644 cmp_one_vf64 1645 cmp_olt_vf64 1646 cmp_oge_vf64 1647
cmp_o_vf64 1648 cmp_ueq_vf64 1649 cmp_une_vf64 1650 cmp_ult_vf64 1651
cmp_uge_vf64 1652 cmp_u_vf64 1653 reserved 1654 reserved 1655
add_alt_vf64 1656 sub_alt_vf64 1657 add_horiz_vf64 1658 sub_horiz_vf64 1659
mul_horiz_vf64 1660 dot_vf64 1661 merge_low_vf64 1662 merge_high_vf64 1663
unpack_high_vf64 1664 unpack_low_vf64 1665 pack_vf64 1666 trunk_vf64_vi64 1667
trunk_vf64_vu64 1668 cvt_vi64_vf64 1669 cvt_vu64_vf64 1670 reserved 1671
reserved 1672 reserved 1673 reserved 1674 reserved 1675
reserved 1676 reserved 1677 reserved 1678 reserved 1679
reserved 1680 reserved 1681 reserved 1682 reserved 1683
reserved 1684 reserved 1685 reserved 1686 reserved 1687
reserved 1688 reserved 1689 reserved 1690 reserved 1691
reserved 1692 reserved 1693 reserved 1694 reserved 1695
class_f128 1696 sqrt_f128 1697 rsqrt_f128 1698 add_f128 1699
sub_f128 1700 nadd_f128 1701 mul_f128 1702 nmul_f128 1703
div_f128 1704 neg_f128 1705 abs_f128 1706 nabs_f128 1707
abs_diff_f128 1708 nabs_diff_f128 1709 min_f128 1710 max_f128 1711
minnum_f128 1712 maxnum_f128 1713 abs_min_f128 1714 abs_max_f128 1715
round_f128 1716 ceil_f128 1717 floor_f128 1718 trunk_f128 1719
roundeven_f128 1720 nearbyint_f128 1721 rint_f128 1722 reserved 1723
cmp_oeq_f128 1724 cmp_one_f128 1725 cmp_olt_f128 1726 cmp_oge_f128 1727
cmp_o_f128 1728 cmp_ueq_f128 1729 cmp_une_f128 1730 cmp_ult_f128 1731
cmp_uge_f128 1732 cmp_u_f128 1733 reserved 1734 reserved 1735
trunk_f128_i32 1736 trunk_f128_u32 1737 cvt_i32_f128 1738 cvt_u32_f128 1739
trunk_f128_i64 1740 trunk_f128_u64 1741 cvt_i64_f128 1742 cvt_u64_f128 1743
trunk_f128_i128 1744 trunk_f128_u128 1745 cvt_i128_f128 1746 cvt_u128_f128 1747
reserved 1748 reserved 1749 reserved 1750 reserved 1751
extend_f32_f128 1752 extend_f64_f128 1753 extend_f16_f128 1754 cvt_f128_f64 1755
cvt_f128_f32 1756 cvt_f128_f16 1757 scale_f128 1758 reserved 1759
reserved 1760 reserved 1761 reserved 1762 reserved 1763
reserved 1764 reserved 1765 reserved 1766 reserved 1767
reserved 1768 reserved 1769 reserved 1770 reserved 1771
reserved 1772 reserved 1773 reserved 1774 reserved 1775
reserved 1776 reserved 1777 reserved 1778 reserved 1779
reserved 1780 reserved 1781 reserved 1782 reserved 1783
reserved 1784 reserved 1785 reserved 1786 reserved 1787
reserved 1788 reserved 1789 reserved 1790 reserved 1791
max_vi8 1792 max_vu8 1793 min_vi8 1794 min_vu8 1795
add_vu8 1796 sub_vu8 1797 addo_vi8 1798 subo_vi8 1799
addc_vu8 1800 subb_vu8 1801 add_sat_vu8 1802 add_sat_vi8 1803
sub_sat_vi8 1804 sub_sat_vu8 1805 avg_vi8 1806 avg_vu8 1807
cmp_eq_vi8 1808 cmp_lt_vi8 1809 cmp_lt_vu8 1810 sll_vu8 1811
sll_imm_vu8 1812 srl_vu8 1813 srl_imm_vu8 1814 sra_vi8 1815
sra_imm_vi8 1816 rol_vu8 1817 ror_vu8 1818 merge_high_vu8 1819
merge_low_vu8 1820 reserved 1821 reserved 1822 reserved 1823
unpack_low_vi8 1824 unpack_high_vi8 1825 unpack_low_vu8 1826 unpack_high_vu8 1827
reserved 1828 reserved 1829 reserved 1830 reserved 1831
reserved 1832 reserved 1833 reserved 1834 reserved 1835
reserved 1836 reserved 1837 reserved 1838 reserved 1839
reserved 1840 reserved 1841 reserved 1842 reserved 1843
reserved 1844 reserved 1845 reserved 1846 reserved 1847
reserved 1848 reserved 1849 reserved 1850 reserved 1851
reserved 1852 reserved 1853 reserved 1854 reserved 1855
max_vi16 1856 max_vu16 1857 min_vi16 1858 min_vu16 1859
add_vu16 1860 sub_vu16 1861 addo_vi16 1862 subo_vi16 1863
addc_vu16 1864 subb_vu16 1865 add_sat_vu16 1866 add_sat_vi16 1867
sub_sat_vi16 1868 sub_sat_vu16 1869 avg_vi16 1870 avg_vu16 1871
cmp_eq_vi16 1872 cmp_lt_vi16 1873 cmp_lt_vu16 1874 sll_vu16 1875
sll_imm_vu16 1876 srl_vu16 1877 srl_imm_vu16 1878 sra_vi16 1879
sra_imm_vi16 1880 rol_vu16 1881 ror_vu16 1882 merge_high_vu16 1883
merge_low_vu16 1884 reserved 1885 reserved 1886 reserved 1887
unpack_low_vi16 1888 unpack_high_vi16 1889 unpack_low_vu16 1890 unpack_high_vu16 1891
pack_sat_vi16 1892 pack_sat_vu16 1893 pack_mod_vu16 1894 pack_usat_vi16 1895
reserved 1896 reserved 1897 reserved 1898 reserved 1899
reserved 1900 reserved 1901 reserved 1902 reserved 1903
reserved 1904 reserved 1905 reserved 1906 reserved 1907
reserved 1908 reserved 1909 reserved 1910 reserved 1911
reserved 1912 reserved 1913 reserved 1914 reserved 1915
reserved 1916 reserved 1917 reserved 1918 reserved 1919
max_vi32 1920 max_vu32 1921 min_vi32 1922 min_vu32 1923
add_vu32 1924 sub_vu32 1925 addo_vi32 1926 subo_vi32 1927
addc_vu32 1928 subb_vu32 1929 add_sat_vu32 1930 add_sat_vi32 1931
sub_sat_vi32 1932 sub_sat_vu32 1933 avg_vi32 1934 avg_vu32 1935
cmp_eq_vi32 1936 cmp_lt_vi32 1937 cmp_lt_vu32 1938 sll_vu32 1939
sll_imm_vu32 1940 srl_vu32 1941 srl_imm_vu32 1942 sra_vi32 1943
sra_imm_vi32 1944 rol_vu32 1945 ror_vu32 1946 merge_high_vu32 1947
merge_low_vu32 1948 reserved 1949 reserved 1950 reserved 1951
unpack_low_vi32 1952 unpack_high_vi32 1953 unpack_low_vu32 1954 unpack_high_vu32 1955
pack_sat_vi32 1956 pack_sat_vu32 1957 pack_mod_vu32 1958 pack_usat_vi32 1959
reserved 1960 reserved 1961 reserved 1962 reserved 1963
reserved 1964 reserved 1965 reserved 1966 reserved 1967
reserved 1968 reserved 1969 reserved 1970 reserved 1971
reserved 1972 reserved 1973 reserved 1974 reserved 1975
reserved 1976 reserved 1977 reserved 1978 reserved 1979
reserved 1980 reserved 1981 reserved 1982 reserved 1983
max_vi64 1984 max_vu64 1985 min_vi64 1986 min_vu64 1987
add_vu64 1988 sub_vu64 1989 addo_vi64 1990 subo_vi64 1991
addc_vu64 1992 subb_vu64 1993 add_sat_vu64 1994 add_sat_vi64 1995
sub_sat_vi64 1996 sub_sat_vu64 1997 avg_vi64 1998 avg_vu64 1999
cmp_eq_vi64 2000 cmp_lt_vi64 2001 cmp_lt_vu64 2002 sll_vu64 2003
sll_imm_vu64 2004 srl_vu64 2005 srl_imm_vu64 2006 sra_vi64 2007
sra_imm_vi64 2008 rol_vu64 2009 ror_vu64 2010 merge_high_vu64 2011
merge_low_vu64 2012 reserved 2013 reserved 2014 reserved 2015
reserved 2016 reserved 2017 reserved 2018 reserved 2019
pack_sat_vi64 2020 pack_sat_vu64 2021 pack_mod_vu64 2022 pack_usat_vi64 2023
reserved 2024 reserved 2025 reserved 2026 reserved 2027
reserved 2028 reserved 2029 reserved 2030 reserved 2031
reserved 2032 reserved 2033 reserved 2034 reserved 2035
reserved 2036 reserved 2037 reserved 2038 reserved 2039
reserved 2040 reserved 2041 reserved 2042 reserved 2043
reserved 2044 reserved 2045 reserved 2046 reserved 2047

machine instruction statistic

statistic by instruction subsets:
instruction subset all hardwired pseudo-ops
sum: 1423 1418 5
base 180 180 0
memory 98 98 0
branch 74 74 0
jump 28 28 0
nullifying 74 74 0
bitmanip 14 14 0
i128 41 41 0
f128 62 61 1
f64 114 112 2
f32 116 114 2
f16 116 116 0
mmx 140 140 0
special 19 19 0
atomic 304 304 0
privileged 17 17 0
cipher 10 10 0
group 16 16 0

statistic by instruction opcodes (1402 codes, 16 groups):
opcode num
primary opcodes 118
fused 67
nul_misc 4
nul_32 16
nul_64 16
nul_128 16
br_misc 4
br_32 16
br_64 16
br_128 16
loop 16
mem_xi64 15
mem_xi32 15
mem_xu32 15
fma 74
raopx 14
misc 980