The reference and gcc (generated with https://gist.github.com/gnzlbg/3a27cca666f4a9f8a51d2eb008ce914c from the gcc docs):
fn add_a_b(i8x16, i8x16) -> i8x16
fn add_a_h(i16x8, i16x8) -> i16x8
fn add_a_w(i32x4, i32x4) -> i32x4
fn add_a_d(i64x2, i64x2) -> i64x2
fn adds_a_b(i8x16, i8x16) -> i8x16
fn adds_a_h(i16x8, i16x8) -> i16x8
fn adds_a_w(i32x4, i32x4) -> i32x4
fn adds_a_d(i64x2, i64x2) -> i64x2
fn adds_s_b(i8x16, i8x16) -> i8x16
fn adds_s_h(i16x8, i16x8) -> i16x8
fn adds_s_w(i32x4, i32x4) -> i32x4
fn adds_s_d(i64x2, i64x2) -> i64x2
fn adds_u_b(u8x16, u8x16) -> u8x16
fn adds_u_h(u16x8, u16x8) -> u16x8
fn adds_u_w(u32x4, u32x4) -> u32x4
fn adds_u_d(u64x2, u64x2) -> u64x2
fn addv_b(i8x16, i8x16) -> i8x16
fn addv_h(i16x8, i16x8) -> i16x8
fn addv_w(i32x4, i32x4) -> i32x4
fn addv_d(i64x2, i64x2) -> i64x2
fn addvi_b(i8x16, imm0_31) -> i8x16
fn addvi_h(i16x8, imm0_31) -> i16x8
fn addvi_w(i32x4, imm0_31) -> i32x4
fn addvi_d(i64x2, imm0_31) -> i64x2
fn and_v(u8x16, u8x16) -> u8x16
fn andi_b(u8x16, imm0_255) -> u8x16
fn asub_s_b(i8x16, i8x16) -> i8x16
fn asub_s_h(i16x8, i16x8) -> i16x8
fn asub_s_w(i32x4, i32x4) -> i32x4
fn asub_s_d(i64x2, i64x2) -> i64x2
fn asub_u_b(u8x16, u8x16) -> u8x16
fn asub_u_h(u16x8, u16x8) -> u16x8
fn asub_u_w(u32x4, u32x4) -> u32x4
fn asub_u_d(u64x2, u64x2) -> u64x2
fn ave_s_b(i8x16, i8x16) -> i8x16
fn ave_s_h(i16x8, i16x8) -> i16x8
fn ave_s_w(i32x4, i32x4) -> i32x4
fn ave_s_d(i64x2, i64x2) -> i64x2
fn ave_u_b(u8x16, u8x16) -> u8x16
fn ave_u_h(u16x8, u16x8) -> u16x8
fn ave_u_w(u32x4, u32x4) -> u32x4
fn ave_u_d(u64x2, u64x2) -> u64x2
fn aver_s_b(i8x16, i8x16) -> i8x16
fn aver_s_h(i16x8, i16x8) -> i16x8
fn aver_s_w(i32x4, i32x4) -> i32x4
fn aver_s_d(i64x2, i64x2) -> i64x2
fn aver_u_b(u8x16, u8x16) -> u8x16
fn aver_u_h(u16x8, u16x8) -> u16x8
fn aver_u_w(u32x4, u32x4) -> u32x4
fn aver_u_d(u64x2, u64x2) -> u64x2
fn bclr_b(u8x16, u8x16) -> u8x16
fn bclr_h(u16x8, u16x8) -> u16x8
fn bclr_w(u32x4, u32x4) -> u32x4
fn bclr_d(u64x2, u64x2) -> u64x2
fn bclri_b(u8x16, imm0_7) -> u8x16
fn bclri_h(u16x8, imm0_15) -> u16x8
fn bclri_w(u32x4, imm0_31) -> u32x4
fn bclri_d(u64x2, imm0_63) -> u64x2
fn binsl_b(u8x16, u8x16, u8x16) -> u8x16
fn binsl_h(u16x8, u16x8, u16x8) -> u16x8
fn binsl_w(u32x4, u32x4, u32x4) -> u32x4
fn binsl_d(u64x2, u64x2, u64x2) -> u64x2
fn binsli_b(u8x16, u8x16, imm0_7) -> u8x16
fn binsli_h(u16x8, u16x8, imm0_15) -> u16x8
fn binsli_w(u32x4, u32x4, imm0_31) -> u32x4
fn binsli_d(u64x2, u64x2, imm0_63) -> u64x2
fn binsr_b(u8x16, u8x16, u8x16) -> u8x16
fn binsr_h(u16x8, u16x8, u16x8) -> u16x8
fn binsr_w(u32x4, u32x4, u32x4) -> u32x4
fn binsr_d(u64x2, u64x2, u64x2) -> u64x2
fn binsri_b(u8x16, u8x16, imm0_7) -> u8x16
fn binsri_h(u16x8, u16x8, imm0_15) -> u16x8
fn binsri_w(u32x4, u32x4, imm0_31) -> u32x4
fn binsri_d(u64x2, u64x2, imm0_63) -> u64x2
fn bmnz_v(u8x16, u8x16, u8x16) -> u8x16
fn bmnzi_b(u8x16, u8x16, imm0_255) -> u8x16
fn bmz_v(u8x16, u8x16, u8x16) -> u8x16
fn bmzi_b(u8x16, u8x16, imm0_255) -> u8x16
fn bneg_b(u8x16, u8x16) -> u8x16
fn bneg_h(u16x8, u16x8) -> u16x8
fn bneg_w(u32x4, u32x4) -> u32x4
fn bneg_d(u64x2, u64x2) -> u64x2
fn bnegi_b(u8x16, imm0_7) -> u8x16
fn bnegi_h(u16x8, imm0_15) -> u16x8
fn bnegi_w(u32x4, imm0_31) -> u32x4
fn bnegi_d(u64x2, imm0_63) -> u64x2
fn bnz_b(u8x16) -> i32
fn bnz_h(u16x8) -> i32
fn bnz_w(u32x4) -> i32
fn bnz_d(u64x2) -> i32
fn bnz_v(u8x16) -> i32
fn bsel_v(u8x16, u8x16, u8x16) -> u8x16
fn bseli_b(u8x16, u8x16, imm0_255) -> u8x16
fn bset_b(u8x16, u8x16) -> u8x16
fn bset_h(u16x8, u16x8) -> u16x8
fn bset_w(u32x4, u32x4) -> u32x4
fn bset_d(u64x2, u64x2) -> u64x2
fn bseti_b(u8x16, imm0_7) -> u8x16
fn bseti_h(u16x8, imm0_15) -> u16x8
fn bseti_w(u32x4, imm0_31) -> u32x4
fn bseti_d(u64x2, imm0_63) -> u64x2
fn bz_b(u8x16) -> i32
fn bz_h(u16x8) -> i32
fn bz_w(u32x4) -> i32
fn bz_d(u64x2) -> i32
fn bz_v(u8x16) -> i32
fn ceq_b(i8x16, i8x16) -> i8x16
fn ceq_h(i16x8, i16x8) -> i16x8
fn ceq_w(i32x4, i32x4) -> i32x4
fn ceq_d(i64x2, i64x2) -> i64x2
fn ceqi_b(i8x16, imm_n16_15) -> i8x16
fn ceqi_h(i16x8, imm_n16_15) -> i16x8
fn ceqi_w(i32x4, imm_n16_15) -> i32x4
fn ceqi_d(i64x2, imm_n16_15) -> i64x2
fn cfcmsa(imm0_31) -> i32
fn cle_s_b(i8x16, i8x16) -> i8x16
fn cle_s_h(i16x8, i16x8) -> i16x8
fn cle_s_w(i32x4, i32x4) -> i32x4
fn cle_s_d(i64x2, i64x2) -> i64x2
fn cle_u_b(u8x16, u8x16) -> i8x16
fn cle_u_h(u16x8, u16x8) -> i16x8
fn cle_u_w(u32x4, u32x4) -> i32x4
fn cle_u_d(u64x2, u64x2) -> i64x2
fn clei_s_b(i8x16, imm_n16_15) -> i8x16
fn clei_s_h(i16x8, imm_n16_15) -> i16x8
fn clei_s_w(i32x4, imm_n16_15) -> i32x4
fn clei_s_d(i64x2, imm_n16_15) -> i64x2
fn clei_u_b(u8x16, imm0_31) -> i8x16
fn clei_u_h(u16x8, imm0_31) -> i16x8
fn clei_u_w(u32x4, imm0_31) -> i32x4
fn clei_u_d(u64x2, imm0_31) -> i64x2
fn clt_s_b(i8x16, i8x16) -> i8x16
fn clt_s_h(i16x8, i16x8) -> i16x8
fn clt_s_w(i32x4, i32x4) -> i32x4
fn clt_s_d(i64x2, i64x2) -> i64x2
fn clt_u_b(u8x16, u8x16) -> i8x16
fn clt_u_h(u16x8, u16x8) -> i16x8
fn clt_u_w(u32x4, u32x4) -> i32x4
fn clt_u_d(u64x2, u64x2) -> i64x2
fn clti_s_b(i8x16, imm_n16_15) -> i8x16
fn clti_s_h(i16x8, imm_n16_15) -> i16x8
fn clti_s_w(i32x4, imm_n16_15) -> i32x4
fn clti_s_d(i64x2, imm_n16_15) -> i64x2
fn clti_u_b(u8x16, imm0_31) -> i8x16
fn clti_u_h(u16x8, imm0_31) -> i16x8
fn clti_u_w(u32x4, imm0_31) -> i32x4
fn clti_u_d(u64x2, imm0_31) -> i64x2
fn copy_s_b(i8x16, imm0_15) -> i32
fn copy_s_h(i16x8, imm0_7) -> i32
fn copy_s_w(i32x4, imm0_3) -> i32
fn copy_s_d(i64x2, imm0_1) -> i64
fn copy_u_b(i8x16, imm0_15) -> u32
fn copy_u_h(i16x8, imm0_7) -> u32
fn copy_u_w(i32x4, imm0_3) -> u32
fn copy_u_d(i64x2, imm0_1) -> u64
fn ctcmsa(imm0_31, i32) -> ()
fn div_s_b(i8x16, i8x16) -> i8x16
fn div_s_h(i16x8, i16x8) -> i16x8
fn div_s_w(i32x4, i32x4) -> i32x4
fn div_s_d(i64x2, i64x2) -> i64x2
fn div_u_b(u8x16, u8x16) -> u8x16
fn div_u_h(u16x8, u16x8) -> u16x8
fn div_u_w(u32x4, u32x4) -> u32x4
fn div_u_d(u64x2, u64x2) -> u64x2
fn dotp_s_h(i8x16, i8x16) -> i16x8
fn dotp_s_w(i16x8, i16x8) -> i32x4
fn dotp_s_d(i32x4, i32x4) -> i64x2
fn dotp_u_h(u8x16, u8x16) -> u16x8
fn dotp_u_w(u16x8, u16x8) -> u32x4
fn dotp_u_d(u32x4, u32x4) -> u64x2
fn dpadd_s_h(i16x8, i8x16, i8x16) -> i16x8
fn dpadd_s_w(i32x4, i16x8, i16x8) -> i32x4
fn dpadd_s_d(i64x2, i32x4, i32x4) -> i64x2
fn dpadd_u_h(u16x8, u8x16, u8x16) -> u16x8
fn dpadd_u_w(u32x4, u16x8, u16x8) -> u32x4
fn dpadd_u_d(u64x2, u32x4, u32x4) -> u64x2
fn dpsub_s_h(i16x8, i8x16, i8x16) -> i16x8
fn dpsub_s_w(i32x4, i16x8, i16x8) -> i32x4
fn dpsub_s_d(i64x2, i32x4, i32x4) -> i64x2
fn dpsub_u_h(i16x8, u8x16, u8x16) -> i16x8
fn dpsub_u_w(i32x4, u16x8, u16x8) -> i32x4
fn dpsub_u_d(i64x2, u32x4, u32x4) -> i64x2
fn fadd_w(f32x4, f32x4) -> f32x4
fn fadd_d(f64x2, f64x2) -> f64x2
fn fcaf_w(f32x4, f32x4) -> i32x4
fn fcaf_d(f64x2, f64x2) -> i64x2
fn fceq_w(f32x4, f32x4) -> i32x4
fn fceq_d(f64x2, f64x2) -> i64x2
fn fclass_w(f32x4) -> i32x4
fn fclass_d(f64x2) -> i64x2
fn fcle_w(f32x4, f32x4) -> i32x4
fn fcle_d(f64x2, f64x2) -> i64x2
fn fclt_w(f32x4, f32x4) -> i32x4
fn fclt_d(f64x2, f64x2) -> i64x2
fn fcne_w(f32x4, f32x4) -> i32x4
fn fcne_d(f64x2, f64x2) -> i64x2
fn fcor_w(f32x4, f32x4) -> i32x4
fn fcor_d(f64x2, f64x2) -> i64x2
fn fcueq_w(f32x4, f32x4) -> i32x4
fn fcueq_d(f64x2, f64x2) -> i64x2
fn fcule_w(f32x4, f32x4) -> i32x4
fn fcule_d(f64x2, f64x2) -> i64x2
fn fcult_w(f32x4, f32x4) -> i32x4
fn fcult_d(f64x2, f64x2) -> i64x2
fn fcun_w(f32x4, f32x4) -> i32x4
fn fcun_d(f64x2, f64x2) -> i64x2
fn fcune_w(f32x4, f32x4) -> i32x4
fn fcune_d(f64x2, f64x2) -> i64x2
fn fdiv_w(f32x4, f32x4) -> f32x4
fn fdiv_d(f64x2, f64x2) -> f64x2
fn fexdo_h(f32x4, f32x4) -> i16x8
fn fexdo_w(f64x2, f64x2) -> f32x4
fn fexp2_w(f32x4, i32x4) -> f32x4
fn fexp2_d(f64x2, i64x2) -> f64x2
fn fexupl_w(i16x8) -> f32x4
fn fexupl_d(f32x4) -> f64x2
fn fexupr_w(i16x8) -> f32x4
fn fexupr_d(f32x4) -> f64x2
fn ffint_s_w(i32x4) -> f32x4
fn ffint_s_d(i64x2) -> f64x2
fn ffint_u_w(u32x4) -> f32x4
fn ffint_u_d(u64x2) -> f64x2
fn ffql_w(i16x8) -> f32x4
fn ffql_d(i32x4) -> f64x2
fn ffqr_w(i16x8) -> f32x4
fn ffqr_d(i32x4) -> f64x2
fn fill_b(i32) -> i8x16
fn fill_h(i32) -> i16x8
fn fill_w(i32) -> i32x4
fn fill_d(i64) -> i64x2
fn flog2_w(f32x4) -> f32x4
fn flog2_d(f64x2) -> f64x2
fn fmadd_w(f32x4, f32x4, f32x4) -> f32x4
fn fmadd_d(f64x2, f64x2, f64x2) -> f64x2
fn fmax_w(f32x4, f32x4) -> f32x4
fn fmax_d(f64x2, f64x2) -> f64x2
fn fmax_a_w(f32x4, f32x4) -> f32x4
fn fmax_a_d(f64x2, f64x2) -> f64x2
fn fmin_w(f32x4, f32x4) -> f32x4
fn fmin_d(f64x2, f64x2) -> f64x2
fn fmin_a_w(f32x4, f32x4) -> f32x4
fn fmin_a_d(f64x2, f64x2) -> f64x2
fn fmsub_w(f32x4, f32x4, f32x4) -> f32x4
fn fmsub_d(f64x2, f64x2, f64x2) -> f64x2
fn fmul_w(f32x4, f32x4) -> f32x4
fn fmul_d(f64x2, f64x2) -> f64x2
fn frint_w(f32x4) -> f32x4
fn frint_d(f64x2) -> f64x2
fn frcp_w(f32x4) -> f32x4
fn frcp_d(f64x2) -> f64x2
fn frsqrt_w(f32x4) -> f32x4
fn frsqrt_d(f64x2) -> f64x2
fn fsaf_w(f32x4, f32x4) -> i32x4
fn fsaf_d(f64x2, f64x2) -> i64x2
fn fseq_w(f32x4, f32x4) -> i32x4
fn fseq_d(f64x2, f64x2) -> i64x2
fn fsle_w(f32x4, f32x4) -> i32x4
fn fsle_d(f64x2, f64x2) -> i64x2
fn fslt_w(f32x4, f32x4) -> i32x4
fn fslt_d(f64x2, f64x2) -> i64x2
fn fsne_w(f32x4, f32x4) -> i32x4
fn fsne_d(f64x2, f64x2) -> i64x2
fn fsor_w(f32x4, f32x4) -> i32x4
fn fsor_d(f64x2, f64x2) -> i64x2
fn fsqrt_w(f32x4) -> f32x4
fn fsqrt_d(f64x2) -> f64x2
fn fsub_w(f32x4, f32x4) -> f32x4
fn fsub_d(f64x2, f64x2) -> f64x2
fn fsueq_w(f32x4, f32x4) -> i32x4
fn fsueq_d(f64x2, f64x2) -> i64x2
fn fsule_w(f32x4, f32x4) -> i32x4
fn fsule_d(f64x2, f64x2) -> i64x2
fn fsult_w(f32x4, f32x4) -> i32x4
fn fsult_d(f64x2, f64x2) -> i64x2
fn fsun_w(f32x4, f32x4) -> i32x4
fn fsun_d(f64x2, f64x2) -> i64x2
fn fsune_w(f32x4, f32x4) -> i32x4
fn fsune_d(f64x2, f64x2) -> i64x2
fn ftint_s_w(f32x4) -> i32x4
fn ftint_s_d(f64x2) -> i64x2
fn ftint_u_w(f32x4) -> u32x4
fn ftint_u_d(f64x2) -> u64x2
fn ftq_h(f32x4, f32x4) -> i16x8
fn ftq_w(f64x2, f64x2) -> i32x4
fn ftrunc_s_w(f32x4) -> i32x4
fn ftrunc_s_d(f64x2) -> i64x2
fn ftrunc_u_w(f32x4) -> u32x4
fn ftrunc_u_d(f64x2) -> u64x2
fn hadd_s_h(i8x16, i8x16) -> i16x8
fn hadd_s_w(i16x8, i16x8) -> i32x4
fn hadd_s_d(i32x4, i32x4) -> i64x2
fn hadd_u_h(u8x16, u8x16) -> u16x8
fn hadd_u_w(u16x8, u16x8) -> u32x4
fn hadd_u_d(u32x4, u32x4) -> u64x2
fn hsub_s_h(i8x16, i8x16) -> i16x8
fn hsub_s_w(i16x8, i16x8) -> i32x4
fn hsub_s_d(i32x4, i32x4) -> i64x2
fn hsub_u_h(u8x16, u8x16) -> i16x8
fn hsub_u_w(u16x8, u16x8) -> i32x4
fn hsub_u_d(u32x4, u32x4) -> i64x2
fn ilvev_b(i8x16, i8x16) -> i8x16
fn ilvev_h(i16x8, i16x8) -> i16x8
fn ilvev_w(i32x4, i32x4) -> i32x4
fn ilvev_d(i64x2, i64x2) -> i64x2
fn ilvl_b(i8x16, i8x16) -> i8x16
fn ilvl_h(i16x8, i16x8) -> i16x8
fn ilvl_w(i32x4, i32x4) -> i32x4
fn ilvl_d(i64x2, i64x2) -> i64x2
fn ilvod_b(i8x16, i8x16) -> i8x16
fn ilvod_h(i16x8, i16x8) -> i16x8
fn ilvod_w(i32x4, i32x4) -> i32x4
fn ilvod_d(i64x2, i64x2) -> i64x2
fn ilvr_b(i8x16, i8x16) -> i8x16
fn ilvr_h(i16x8, i16x8) -> i16x8
fn ilvr_w(i32x4, i32x4) -> i32x4
fn ilvr_d(i64x2, i64x2) -> i64x2
fn insert_b(i8x16, imm0_15, i32) -> i8x16
fn insert_h(i16x8, imm0_7, i32) -> i16x8
fn insert_w(i32x4, imm0_3, i32) -> i32x4
fn insert_d(i64x2, imm0_1, i64) -> i64x2
fn insve_b(i8x16, imm0_15, i8x16) -> i8x16
fn insve_h(i16x8, imm0_7, i16x8) -> i16x8
fn insve_w(i32x4, imm0_3, i32x4) -> i32x4
fn insve_d(i64x2, imm0_1, i64x2) -> i64x2
fn ld_b(*mut c_void, imm_n512_511) -> i8x16
fn ld_h(*mut c_void, imm_n1024_1022) -> i16x8
fn ld_w(*mut c_void, imm_n2048_2044) -> i32x4
fn ld_d(*mut c_void, imm_n4096_4088) -> i64x2
fn ldi_b(imm_n512_511) -> i8x16
fn ldi_h(imm_n512_511) -> i16x8
fn ldi_w(imm_n512_511) -> i32x4
fn ldi_d(imm_n512_511) -> i64x2
fn madd_q_h(i16x8, i16x8, i16x8) -> i16x8
fn madd_q_w(i32x4, i32x4, i32x4) -> i32x4
fn maddr_q_h(i16x8, i16x8, i16x8) -> i16x8
fn maddr_q_w(i32x4, i32x4, i32x4) -> i32x4
fn maddv_b(i8x16, i8x16, i8x16) -> i8x16
fn maddv_h(i16x8, i16x8, i16x8) -> i16x8
fn maddv_w(i32x4, i32x4, i32x4) -> i32x4
fn maddv_d(i64x2, i64x2, i64x2) -> i64x2
fn max_a_b(i8x16, i8x16) -> i8x16
fn max_a_h(i16x8, i16x8) -> i16x8
fn max_a_w(i32x4, i32x4) -> i32x4
fn max_a_d(i64x2, i64x2) -> i64x2
fn max_s_b(i8x16, i8x16) -> i8x16
fn max_s_h(i16x8, i16x8) -> i16x8
fn max_s_w(i32x4, i32x4) -> i32x4
fn max_s_d(i64x2, i64x2) -> i64x2
fn max_u_b(u8x16, u8x16) -> u8x16
fn max_u_h(u16x8, u16x8) -> u16x8
fn max_u_w(u32x4, u32x4) -> u32x4
fn max_u_d(u64x2, u64x2) -> u64x2
fn maxi_s_b(i8x16, imm_n16_15) -> i8x16
fn maxi_s_h(i16x8, imm_n16_15) -> i16x8
fn maxi_s_w(i32x4, imm_n16_15) -> i32x4
fn maxi_s_d(i64x2, imm_n16_15) -> i64x2
fn maxi_u_b(u8x16, imm0_31) -> u8x16
fn maxi_u_h(u16x8, imm0_31) -> u16x8
fn maxi_u_w(u32x4, imm0_31) -> u32x4
fn maxi_u_d(u64x2, imm0_31) -> u64x2
fn min_a_b(i8x16, i8x16) -> i8x16
fn min_a_h(i16x8, i16x8) -> i16x8
fn min_a_w(i32x4, i32x4) -> i32x4
fn min_a_d(i64x2, i64x2) -> i64x2
fn min_s_b(i8x16, i8x16) -> i8x16
fn min_s_h(i16x8, i16x8) -> i16x8
fn min_s_w(i32x4, i32x4) -> i32x4
fn min_s_d(i64x2, i64x2) -> i64x2
fn min_u_b(u8x16, u8x16) -> u8x16
fn min_u_h(u16x8, u16x8) -> u16x8
fn min_u_w(u32x4, u32x4) -> u32x4
fn min_u_d(u64x2, u64x2) -> u64x2
fn mini_s_b(i8x16, imm_n16_15) -> i8x16
fn mini_s_h(i16x8, imm_n16_15) -> i16x8
fn mini_s_w(i32x4, imm_n16_15) -> i32x4
fn mini_s_d(i64x2, imm_n16_15) -> i64x2
fn mini_u_b(u8x16, imm0_31) -> u8x16
fn mini_u_h(u16x8, imm0_31) -> u16x8
fn mini_u_w(u32x4, imm0_31) -> u32x4
fn mini_u_d(u64x2, imm0_31) -> u64x2
fn mod_s_b(i8x16, i8x16) -> i8x16
fn mod_s_h(i16x8, i16x8) -> i16x8
fn mod_s_w(i32x4, i32x4) -> i32x4
fn mod_s_d(i64x2, i64x2) -> i64x2
fn mod_u_b(u8x16, u8x16) -> u8x16
fn mod_u_h(u16x8, u16x8) -> u16x8
fn mod_u_w(u32x4, u32x4) -> u32x4
fn mod_u_d(u64x2, u64x2) -> u64x2
fn move_v(i8x16) -> i8x16
fn msub_q_h(i16x8, i16x8, i16x8) -> i16x8
fn msub_q_w(i32x4, i32x4, i32x4) -> i32x4
fn msubr_q_h(i16x8, i16x8, i16x8) -> i16x8
fn msubr_q_w(i32x4, i32x4, i32x4) -> i32x4
fn msubv_b(i8x16, i8x16, i8x16) -> i8x16
fn msubv_h(i16x8, i16x8, i16x8) -> i16x8
fn msubv_w(i32x4, i32x4, i32x4) -> i32x4
fn msubv_d(i64x2, i64x2, i64x2) -> i64x2
fn mul_q_h(i16x8, i16x8) -> i16x8
fn mul_q_w(i32x4, i32x4) -> i32x4
fn mulr_q_h(i16x8, i16x8) -> i16x8
fn mulr_q_w(i32x4, i32x4) -> i32x4
fn mulv_b(i8x16, i8x16) -> i8x16
fn mulv_h(i16x8, i16x8) -> i16x8
fn mulv_w(i32x4, i32x4) -> i32x4
fn mulv_d(i64x2, i64x2) -> i64x2
fn nloc_b(i8x16) -> i8x16
fn nloc_h(i16x8) -> i16x8
fn nloc_w(i32x4) -> i32x4
fn nloc_d(i64x2) -> i64x2
fn nlzc_b(i8x16) -> i8x16
fn nlzc_h(i16x8) -> i16x8
fn nlzc_w(i32x4) -> i32x4
fn nlzc_d(i64x2) -> i64x2
fn nor_v(u8x16, u8x16) -> u8x16
fn nori_b(u8x16, imm0_255) -> u8x16
fn or_v(u8x16, u8x16) -> u8x16
fn ori_b(u8x16, imm0_255) -> u8x16
fn pckev_b(i8x16, i8x16) -> i8x16
fn pckev_h(i16x8, i16x8) -> i16x8
fn pckev_w(i32x4, i32x4) -> i32x4
fn pckev_d(i64x2, i64x2) -> i64x2
fn pckod_b(i8x16, i8x16) -> i8x16
fn pckod_h(i16x8, i16x8) -> i16x8
fn pckod_w(i32x4, i32x4) -> i32x4
fn pckod_d(i64x2, i64x2) -> i64x2
fn pcnt_b(i8x16) -> i8x16
fn pcnt_h(i16x8) -> i16x8
fn pcnt_w(i32x4) -> i32x4
fn pcnt_d(i64x2) -> i64x2
fn sat_s_b(i8x16, imm0_7) -> i8x16
fn sat_s_h(i16x8, imm0_15) -> i16x8
fn sat_s_w(i32x4, imm0_31) -> i32x4
fn sat_s_d(i64x2, imm0_63) -> i64x2
fn sat_u_b(u8x16, imm0_7) -> u8x16
fn sat_u_h(u16x8, imm0_15) -> u16x8
fn sat_u_w(u32x4, imm0_31) -> u32x4
fn sat_u_d(u64x2, imm0_63) -> u64x2
fn shf_b(i8x16, imm0_255) -> i8x16
fn shf_h(i16x8, imm0_255) -> i16x8
fn shf_w(i32x4, imm0_255) -> i32x4
fn sld_b(i8x16, i8x16, i32) -> i8x16
fn sld_h(i16x8, i16x8, i32) -> i16x8
fn sld_w(i32x4, i32x4, i32) -> i32x4
fn sld_d(i64x2, i64x2, i32) -> i64x2
fn sldi_b(i8x16, i8x16, imm0_15) -> i8x16
fn sldi_h(i16x8, i16x8, imm0_7) -> i16x8
fn sldi_w(i32x4, i32x4, imm0_3) -> i32x4
fn sldi_d(i64x2, i64x2, imm0_1) -> i64x2
fn sll_b(i8x16, i8x16) -> i8x16
fn sll_h(i16x8, i16x8) -> i16x8
fn sll_w(i32x4, i32x4) -> i32x4
fn sll_d(i64x2, i64x2) -> i64x2
fn slli_b(i8x16, imm0_7) -> i8x16
fn slli_h(i16x8, imm0_15) -> i16x8
fn slli_w(i32x4, imm0_31) -> i32x4
fn slli_d(i64x2, imm0_63) -> i64x2
fn splat_b(i8x16, i32) -> i8x16
fn splat_h(i16x8, i32) -> i16x8
fn splat_w(i32x4, i32) -> i32x4
fn splat_d(i64x2, i32) -> i64x2
fn splati_b(i8x16, imm0_15) -> i8x16
fn splati_h(i16x8, imm0_7) -> i16x8
fn splati_w(i32x4, imm0_3) -> i32x4
fn splati_d(i64x2, imm0_1) -> i64x2
fn sra_b(i8x16, i8x16) -> i8x16
fn sra_h(i16x8, i16x8) -> i16x8
fn sra_w(i32x4, i32x4) -> i32x4
fn sra_d(i64x2, i64x2) -> i64x2
fn srai_b(i8x16, imm0_7) -> i8x16
fn srai_h(i16x8, imm0_15) -> i16x8
fn srai_w(i32x4, imm0_31) -> i32x4
fn srai_d(i64x2, imm0_63) -> i64x2
fn srar_b(i8x16, i8x16) -> i8x16
fn srar_h(i16x8, i16x8) -> i16x8
fn srar_w(i32x4, i32x4) -> i32x4
fn srar_d(i64x2, i64x2) -> i64x2
fn srari_b(i8x16, imm0_7) -> i8x16
fn srari_h(i16x8, imm0_15) -> i16x8
fn srari_w(i32x4, imm0_31) -> i32x4
fn srari_d(i64x2, imm0_63) -> i64x2
fn srl_b(i8x16, i8x16) -> i8x16
fn srl_h(i16x8, i16x8) -> i16x8
fn srl_w(i32x4, i32x4) -> i32x4
fn srl_d(i64x2, i64x2) -> i64x2
fn srli_b(i8x16, imm0_7) -> i8x16
fn srli_h(i16x8, imm0_15) -> i16x8
fn srli_w(i32x4, imm0_31) -> i32x4
fn srli_d(i64x2, imm0_63) -> i64x2
fn srlr_b(i8x16, i8x16) -> i8x16
fn srlr_h(i16x8, i16x8) -> i16x8
fn srlr_w(i32x4, i32x4) -> i32x4
fn srlr_d(i64x2, i64x2) -> i64x2
fn srlri_b(i8x16, imm0_7) -> i8x16
fn srlri_h(i16x8, imm0_15) -> i16x8
fn srlri_w(i32x4, imm0_31) -> i32x4
fn srlri_d(i64x2, imm0_63) -> i64x2
fn st_b(i8x16, *mut c_void, imm_n512_511) -> ()
fn st_h(i16x8, *mut c_void, imm_n1024_1022) -> ()
fn st_w(i32x4, *mut c_void, imm_n2048_2044) -> ()
fn st_d(i64x2, *mut c_void, imm_n4096_4088) -> ()
fn subs_s_b(i8x16, i8x16) -> i8x16
fn subs_s_h(i16x8, i16x8) -> i16x8
fn subs_s_w(i32x4, i32x4) -> i32x4
fn subs_s_d(i64x2, i64x2) -> i64x2
fn subs_u_b(u8x16, u8x16) -> u8x16
fn subs_u_h(u16x8, u16x8) -> u16x8
fn subs_u_w(u32x4, u32x4) -> u32x4
fn subs_u_d(u64x2, u64x2) -> u64x2
fn subsus_u_b(u8x16, i8x16) -> u8x16
fn subsus_u_h(u16x8, i16x8) -> u16x8
fn subsus_u_w(u32x4, i32x4) -> u32x4
fn subsus_u_d(u64x2, i64x2) -> u64x2
fn subsuu_s_b(u8x16, u8x16) -> i8x16
fn subsuu_s_h(u16x8, u16x8) -> i16x8
fn subsuu_s_w(u32x4, u32x4) -> i32x4
fn subsuu_s_d(u64x2, u64x2) -> i64x2
fn subv_b(i8x16, i8x16) -> i8x16
fn subv_h(i16x8, i16x8) -> i16x8
fn subv_w(i32x4, i32x4) -> i32x4
fn subv_d(i64x2, i64x2) -> i64x2
fn subvi_b(i8x16, imm0_31) -> i8x16
fn subvi_h(i16x8, imm0_31) -> i16x8
fn subvi_w(i32x4, imm0_31) -> i32x4
fn subvi_d(i64x2, imm0_31) -> i64x2
fn vshf_b(i8x16, i8x16, i8x16) -> i8x16
fn vshf_h(i16x8, i16x8, i16x8) -> i16x8
fn vshf_w(i32x4, i32x4, i32x4) -> i32x4
fn vshf_d(i64x2, i64x2, i64x2) -> i64x2
fn xor_v(u8x16, u8x16) -> u8x16
fn xori_b(u8x16, imm0_255) -> u8x16
The reference and gcc (generated with https://gist.github.com/gnzlbg/3a27cca666f4a9f8a51d2eb008ce914c from the gcc docs):
fn add_a_b(i8x16, i8x16) -> i8x16fn add_a_h(i16x8, i16x8) -> i16x8fn add_a_w(i32x4, i32x4) -> i32x4fn add_a_d(i64x2, i64x2) -> i64x2fn adds_a_b(i8x16, i8x16) -> i8x16fn adds_a_h(i16x8, i16x8) -> i16x8fn adds_a_w(i32x4, i32x4) -> i32x4fn adds_a_d(i64x2, i64x2) -> i64x2fn adds_s_b(i8x16, i8x16) -> i8x16fn adds_s_h(i16x8, i16x8) -> i16x8fn adds_s_w(i32x4, i32x4) -> i32x4fn adds_s_d(i64x2, i64x2) -> i64x2fn adds_u_b(u8x16, u8x16) -> u8x16fn adds_u_h(u16x8, u16x8) -> u16x8fn adds_u_w(u32x4, u32x4) -> u32x4fn adds_u_d(u64x2, u64x2) -> u64x2fn addv_b(i8x16, i8x16) -> i8x16fn addv_h(i16x8, i16x8) -> i16x8fn addv_w(i32x4, i32x4) -> i32x4fn addv_d(i64x2, i64x2) -> i64x2fn addvi_b(i8x16, imm0_31) -> i8x16fn addvi_h(i16x8, imm0_31) -> i16x8fn addvi_w(i32x4, imm0_31) -> i32x4fn addvi_d(i64x2, imm0_31) -> i64x2fn and_v(u8x16, u8x16) -> u8x16fn andi_b(u8x16, imm0_255) -> u8x16fn asub_s_b(i8x16, i8x16) -> i8x16fn asub_s_h(i16x8, i16x8) -> i16x8fn asub_s_w(i32x4, i32x4) -> i32x4fn asub_s_d(i64x2, i64x2) -> i64x2fn asub_u_b(u8x16, u8x16) -> u8x16fn asub_u_h(u16x8, u16x8) -> u16x8fn asub_u_w(u32x4, u32x4) -> u32x4fn asub_u_d(u64x2, u64x2) -> u64x2fn ave_s_b(i8x16, i8x16) -> i8x16fn ave_s_h(i16x8, i16x8) -> i16x8fn ave_s_w(i32x4, i32x4) -> i32x4fn ave_s_d(i64x2, i64x2) -> i64x2fn ave_u_b(u8x16, u8x16) -> u8x16fn ave_u_h(u16x8, u16x8) -> u16x8fn ave_u_w(u32x4, u32x4) -> u32x4fn ave_u_d(u64x2, u64x2) -> u64x2fn aver_s_b(i8x16, i8x16) -> i8x16fn aver_s_h(i16x8, i16x8) -> i16x8fn aver_s_w(i32x4, i32x4) -> i32x4fn aver_s_d(i64x2, i64x2) -> i64x2fn aver_u_b(u8x16, u8x16) -> u8x16fn aver_u_h(u16x8, u16x8) -> u16x8fn aver_u_w(u32x4, u32x4) -> u32x4fn aver_u_d(u64x2, u64x2) -> u64x2fn bclr_b(u8x16, u8x16) -> u8x16fn bclr_h(u16x8, u16x8) -> u16x8fn bclr_w(u32x4, u32x4) -> u32x4fn bclr_d(u64x2, u64x2) -> u64x2fn bclri_b(u8x16, imm0_7) -> u8x16fn bclri_h(u16x8, imm0_15) -> u16x8fn bclri_w(u32x4, imm0_31) -> u32x4fn bclri_d(u64x2, imm0_63) -> u64x2fn binsl_b(u8x16, u8x16, u8x16) -> u8x16fn binsl_h(u16x8, u16x8, u16x8) -> u16x8fn binsl_w(u32x4, u32x4, u32x4) -> u32x4fn binsl_d(u64x2, u64x2, u64x2) -> u64x2fn binsli_b(u8x16, u8x16, imm0_7) -> u8x16fn binsli_h(u16x8, u16x8, imm0_15) -> u16x8fn binsli_w(u32x4, u32x4, imm0_31) -> u32x4fn binsli_d(u64x2, u64x2, imm0_63) -> u64x2fn binsr_b(u8x16, u8x16, u8x16) -> u8x16fn binsr_h(u16x8, u16x8, u16x8) -> u16x8fn binsr_w(u32x4, u32x4, u32x4) -> u32x4fn binsr_d(u64x2, u64x2, u64x2) -> u64x2fn binsri_b(u8x16, u8x16, imm0_7) -> u8x16fn binsri_h(u16x8, u16x8, imm0_15) -> u16x8fn binsri_w(u32x4, u32x4, imm0_31) -> u32x4fn binsri_d(u64x2, u64x2, imm0_63) -> u64x2fn bmnz_v(u8x16, u8x16, u8x16) -> u8x16fn bmnzi_b(u8x16, u8x16, imm0_255) -> u8x16fn bmz_v(u8x16, u8x16, u8x16) -> u8x16fn bmzi_b(u8x16, u8x16, imm0_255) -> u8x16fn bneg_b(u8x16, u8x16) -> u8x16fn bneg_h(u16x8, u16x8) -> u16x8fn bneg_w(u32x4, u32x4) -> u32x4fn bneg_d(u64x2, u64x2) -> u64x2fn bnegi_b(u8x16, imm0_7) -> u8x16fn bnegi_h(u16x8, imm0_15) -> u16x8fn bnegi_w(u32x4, imm0_31) -> u32x4fn bnegi_d(u64x2, imm0_63) -> u64x2fn bnz_b(u8x16) -> i32fn bnz_h(u16x8) -> i32fn bnz_w(u32x4) -> i32fn bnz_d(u64x2) -> i32fn bnz_v(u8x16) -> i32fn bsel_v(u8x16, u8x16, u8x16) -> u8x16fn bseli_b(u8x16, u8x16, imm0_255) -> u8x16fn bset_b(u8x16, u8x16) -> u8x16fn bset_h(u16x8, u16x8) -> u16x8fn bset_w(u32x4, u32x4) -> u32x4fn bset_d(u64x2, u64x2) -> u64x2fn bseti_b(u8x16, imm0_7) -> u8x16fn bseti_h(u16x8, imm0_15) -> u16x8fn bseti_w(u32x4, imm0_31) -> u32x4fn bseti_d(u64x2, imm0_63) -> u64x2fn bz_b(u8x16) -> i32fn bz_h(u16x8) -> i32fn bz_w(u32x4) -> i32fn bz_d(u64x2) -> i32fn bz_v(u8x16) -> i32fn ceq_b(i8x16, i8x16) -> i8x16fn ceq_h(i16x8, i16x8) -> i16x8fn ceq_w(i32x4, i32x4) -> i32x4fn ceq_d(i64x2, i64x2) -> i64x2fn ceqi_b(i8x16, imm_n16_15) -> i8x16fn ceqi_h(i16x8, imm_n16_15) -> i16x8fn ceqi_w(i32x4, imm_n16_15) -> i32x4fn ceqi_d(i64x2, imm_n16_15) -> i64x2fn cfcmsa(imm0_31) -> i32fn cle_s_b(i8x16, i8x16) -> i8x16fn cle_s_h(i16x8, i16x8) -> i16x8fn cle_s_w(i32x4, i32x4) -> i32x4fn cle_s_d(i64x2, i64x2) -> i64x2fn cle_u_b(u8x16, u8x16) -> i8x16fn cle_u_h(u16x8, u16x8) -> i16x8fn cle_u_w(u32x4, u32x4) -> i32x4fn cle_u_d(u64x2, u64x2) -> i64x2fn clei_s_b(i8x16, imm_n16_15) -> i8x16fn clei_s_h(i16x8, imm_n16_15) -> i16x8fn clei_s_w(i32x4, imm_n16_15) -> i32x4fn clei_s_d(i64x2, imm_n16_15) -> i64x2fn clei_u_b(u8x16, imm0_31) -> i8x16fn clei_u_h(u16x8, imm0_31) -> i16x8fn clei_u_w(u32x4, imm0_31) -> i32x4fn clei_u_d(u64x2, imm0_31) -> i64x2fn clt_s_b(i8x16, i8x16) -> i8x16fn clt_s_h(i16x8, i16x8) -> i16x8fn clt_s_w(i32x4, i32x4) -> i32x4fn clt_s_d(i64x2, i64x2) -> i64x2fn clt_u_b(u8x16, u8x16) -> i8x16fn clt_u_h(u16x8, u16x8) -> i16x8fn clt_u_w(u32x4, u32x4) -> i32x4fn clt_u_d(u64x2, u64x2) -> i64x2fn clti_s_b(i8x16, imm_n16_15) -> i8x16fn clti_s_h(i16x8, imm_n16_15) -> i16x8fn clti_s_w(i32x4, imm_n16_15) -> i32x4fn clti_s_d(i64x2, imm_n16_15) -> i64x2fn clti_u_b(u8x16, imm0_31) -> i8x16fn clti_u_h(u16x8, imm0_31) -> i16x8fn clti_u_w(u32x4, imm0_31) -> i32x4fn clti_u_d(u64x2, imm0_31) -> i64x2fn copy_s_b(i8x16, imm0_15) -> i32fn copy_s_h(i16x8, imm0_7) -> i32fn copy_s_w(i32x4, imm0_3) -> i32fn copy_s_d(i64x2, imm0_1) -> i64fn copy_u_b(i8x16, imm0_15) -> u32fn copy_u_h(i16x8, imm0_7) -> u32fn copy_u_w(i32x4, imm0_3) -> u32fn copy_u_d(i64x2, imm0_1) -> u64fn ctcmsa(imm0_31, i32) -> ()fn div_s_b(i8x16, i8x16) -> i8x16fn div_s_h(i16x8, i16x8) -> i16x8fn div_s_w(i32x4, i32x4) -> i32x4fn div_s_d(i64x2, i64x2) -> i64x2fn div_u_b(u8x16, u8x16) -> u8x16fn div_u_h(u16x8, u16x8) -> u16x8fn div_u_w(u32x4, u32x4) -> u32x4fn div_u_d(u64x2, u64x2) -> u64x2fn dotp_s_h(i8x16, i8x16) -> i16x8fn dotp_s_w(i16x8, i16x8) -> i32x4fn dotp_s_d(i32x4, i32x4) -> i64x2fn dotp_u_h(u8x16, u8x16) -> u16x8fn dotp_u_w(u16x8, u16x8) -> u32x4fn dotp_u_d(u32x4, u32x4) -> u64x2fn dpadd_s_h(i16x8, i8x16, i8x16) -> i16x8fn dpadd_s_w(i32x4, i16x8, i16x8) -> i32x4fn dpadd_s_d(i64x2, i32x4, i32x4) -> i64x2fn dpadd_u_h(u16x8, u8x16, u8x16) -> u16x8fn dpadd_u_w(u32x4, u16x8, u16x8) -> u32x4fn dpadd_u_d(u64x2, u32x4, u32x4) -> u64x2fn dpsub_s_h(i16x8, i8x16, i8x16) -> i16x8fn dpsub_s_w(i32x4, i16x8, i16x8) -> i32x4fn dpsub_s_d(i64x2, i32x4, i32x4) -> i64x2fn dpsub_u_h(i16x8, u8x16, u8x16) -> i16x8fn dpsub_u_w(i32x4, u16x8, u16x8) -> i32x4fn dpsub_u_d(i64x2, u32x4, u32x4) -> i64x2fn fadd_w(f32x4, f32x4) -> f32x4fn fadd_d(f64x2, f64x2) -> f64x2fn fcaf_w(f32x4, f32x4) -> i32x4fn fcaf_d(f64x2, f64x2) -> i64x2fn fceq_w(f32x4, f32x4) -> i32x4fn fceq_d(f64x2, f64x2) -> i64x2fn fclass_w(f32x4) -> i32x4fn fclass_d(f64x2) -> i64x2fn fcle_w(f32x4, f32x4) -> i32x4fn fcle_d(f64x2, f64x2) -> i64x2fn fclt_w(f32x4, f32x4) -> i32x4fn fclt_d(f64x2, f64x2) -> i64x2fn fcne_w(f32x4, f32x4) -> i32x4fn fcne_d(f64x2, f64x2) -> i64x2fn fcor_w(f32x4, f32x4) -> i32x4fn fcor_d(f64x2, f64x2) -> i64x2fn fcueq_w(f32x4, f32x4) -> i32x4fn fcueq_d(f64x2, f64x2) -> i64x2fn fcule_w(f32x4, f32x4) -> i32x4fn fcule_d(f64x2, f64x2) -> i64x2fn fcult_w(f32x4, f32x4) -> i32x4fn fcult_d(f64x2, f64x2) -> i64x2fn fcun_w(f32x4, f32x4) -> i32x4fn fcun_d(f64x2, f64x2) -> i64x2fn fcune_w(f32x4, f32x4) -> i32x4fn fcune_d(f64x2, f64x2) -> i64x2fn fdiv_w(f32x4, f32x4) -> f32x4fn fdiv_d(f64x2, f64x2) -> f64x2fn fexdo_h(f32x4, f32x4) -> i16x8fn fexdo_w(f64x2, f64x2) -> f32x4fn fexp2_w(f32x4, i32x4) -> f32x4fn fexp2_d(f64x2, i64x2) -> f64x2fn fexupl_w(i16x8) -> f32x4fn fexupl_d(f32x4) -> f64x2fn fexupr_w(i16x8) -> f32x4fn fexupr_d(f32x4) -> f64x2fn ffint_s_w(i32x4) -> f32x4fn ffint_s_d(i64x2) -> f64x2fn ffint_u_w(u32x4) -> f32x4fn ffint_u_d(u64x2) -> f64x2fn ffql_w(i16x8) -> f32x4fn ffql_d(i32x4) -> f64x2fn ffqr_w(i16x8) -> f32x4fn ffqr_d(i32x4) -> f64x2fn fill_b(i32) -> i8x16fn fill_h(i32) -> i16x8fn fill_w(i32) -> i32x4fn fill_d(i64) -> i64x2fn flog2_w(f32x4) -> f32x4fn flog2_d(f64x2) -> f64x2fn fmadd_w(f32x4, f32x4, f32x4) -> f32x4fn fmadd_d(f64x2, f64x2, f64x2) -> f64x2fn fmax_w(f32x4, f32x4) -> f32x4fn fmax_d(f64x2, f64x2) -> f64x2fn fmax_a_w(f32x4, f32x4) -> f32x4fn fmax_a_d(f64x2, f64x2) -> f64x2fn fmin_w(f32x4, f32x4) -> f32x4fn fmin_d(f64x2, f64x2) -> f64x2fn fmin_a_w(f32x4, f32x4) -> f32x4fn fmin_a_d(f64x2, f64x2) -> f64x2fn fmsub_w(f32x4, f32x4, f32x4) -> f32x4fn fmsub_d(f64x2, f64x2, f64x2) -> f64x2fn fmul_w(f32x4, f32x4) -> f32x4fn fmul_d(f64x2, f64x2) -> f64x2fn frint_w(f32x4) -> f32x4fn frint_d(f64x2) -> f64x2fn frcp_w(f32x4) -> f32x4fn frcp_d(f64x2) -> f64x2fn frsqrt_w(f32x4) -> f32x4fn frsqrt_d(f64x2) -> f64x2fn fsaf_w(f32x4, f32x4) -> i32x4fn fsaf_d(f64x2, f64x2) -> i64x2fn fseq_w(f32x4, f32x4) -> i32x4fn fseq_d(f64x2, f64x2) -> i64x2fn fsle_w(f32x4, f32x4) -> i32x4fn fsle_d(f64x2, f64x2) -> i64x2fn fslt_w(f32x4, f32x4) -> i32x4fn fslt_d(f64x2, f64x2) -> i64x2fn fsne_w(f32x4, f32x4) -> i32x4fn fsne_d(f64x2, f64x2) -> i64x2fn fsor_w(f32x4, f32x4) -> i32x4fn fsor_d(f64x2, f64x2) -> i64x2fn fsqrt_w(f32x4) -> f32x4fn fsqrt_d(f64x2) -> f64x2fn fsub_w(f32x4, f32x4) -> f32x4fn fsub_d(f64x2, f64x2) -> f64x2fn fsueq_w(f32x4, f32x4) -> i32x4fn fsueq_d(f64x2, f64x2) -> i64x2fn fsule_w(f32x4, f32x4) -> i32x4fn fsule_d(f64x2, f64x2) -> i64x2fn fsult_w(f32x4, f32x4) -> i32x4fn fsult_d(f64x2, f64x2) -> i64x2fn fsun_w(f32x4, f32x4) -> i32x4fn fsun_d(f64x2, f64x2) -> i64x2fn fsune_w(f32x4, f32x4) -> i32x4fn fsune_d(f64x2, f64x2) -> i64x2fn ftint_s_w(f32x4) -> i32x4fn ftint_s_d(f64x2) -> i64x2fn ftint_u_w(f32x4) -> u32x4fn ftint_u_d(f64x2) -> u64x2fn ftq_h(f32x4, f32x4) -> i16x8fn ftq_w(f64x2, f64x2) -> i32x4fn ftrunc_s_w(f32x4) -> i32x4fn ftrunc_s_d(f64x2) -> i64x2fn ftrunc_u_w(f32x4) -> u32x4fn ftrunc_u_d(f64x2) -> u64x2fn hadd_s_h(i8x16, i8x16) -> i16x8fn hadd_s_w(i16x8, i16x8) -> i32x4fn hadd_s_d(i32x4, i32x4) -> i64x2fn hadd_u_h(u8x16, u8x16) -> u16x8fn hadd_u_w(u16x8, u16x8) -> u32x4fn hadd_u_d(u32x4, u32x4) -> u64x2fn hsub_s_h(i8x16, i8x16) -> i16x8fn hsub_s_w(i16x8, i16x8) -> i32x4fn hsub_s_d(i32x4, i32x4) -> i64x2fn hsub_u_h(u8x16, u8x16) -> i16x8fn hsub_u_w(u16x8, u16x8) -> i32x4fn hsub_u_d(u32x4, u32x4) -> i64x2fn ilvev_b(i8x16, i8x16) -> i8x16fn ilvev_h(i16x8, i16x8) -> i16x8fn ilvev_w(i32x4, i32x4) -> i32x4fn ilvev_d(i64x2, i64x2) -> i64x2fn ilvl_b(i8x16, i8x16) -> i8x16fn ilvl_h(i16x8, i16x8) -> i16x8fn ilvl_w(i32x4, i32x4) -> i32x4fn ilvl_d(i64x2, i64x2) -> i64x2fn ilvod_b(i8x16, i8x16) -> i8x16fn ilvod_h(i16x8, i16x8) -> i16x8fn ilvod_w(i32x4, i32x4) -> i32x4fn ilvod_d(i64x2, i64x2) -> i64x2fn ilvr_b(i8x16, i8x16) -> i8x16fn ilvr_h(i16x8, i16x8) -> i16x8fn ilvr_w(i32x4, i32x4) -> i32x4fn ilvr_d(i64x2, i64x2) -> i64x2fn insert_b(i8x16, imm0_15, i32) -> i8x16fn insert_h(i16x8, imm0_7, i32) -> i16x8fn insert_w(i32x4, imm0_3, i32) -> i32x4fn insert_d(i64x2, imm0_1, i64) -> i64x2fn insve_b(i8x16, imm0_15, i8x16) -> i8x16fn insve_h(i16x8, imm0_7, i16x8) -> i16x8fn insve_w(i32x4, imm0_3, i32x4) -> i32x4fn insve_d(i64x2, imm0_1, i64x2) -> i64x2fn ld_b(*mut c_void, imm_n512_511) -> i8x16fn ld_h(*mut c_void, imm_n1024_1022) -> i16x8fn ld_w(*mut c_void, imm_n2048_2044) -> i32x4fn ld_d(*mut c_void, imm_n4096_4088) -> i64x2fn ldi_b(imm_n512_511) -> i8x16fn ldi_h(imm_n512_511) -> i16x8fn ldi_w(imm_n512_511) -> i32x4fn ldi_d(imm_n512_511) -> i64x2fn madd_q_h(i16x8, i16x8, i16x8) -> i16x8fn madd_q_w(i32x4, i32x4, i32x4) -> i32x4fn maddr_q_h(i16x8, i16x8, i16x8) -> i16x8fn maddr_q_w(i32x4, i32x4, i32x4) -> i32x4fn maddv_b(i8x16, i8x16, i8x16) -> i8x16fn maddv_h(i16x8, i16x8, i16x8) -> i16x8fn maddv_w(i32x4, i32x4, i32x4) -> i32x4fn maddv_d(i64x2, i64x2, i64x2) -> i64x2fn max_a_b(i8x16, i8x16) -> i8x16fn max_a_h(i16x8, i16x8) -> i16x8fn max_a_w(i32x4, i32x4) -> i32x4fn max_a_d(i64x2, i64x2) -> i64x2fn max_s_b(i8x16, i8x16) -> i8x16fn max_s_h(i16x8, i16x8) -> i16x8fn max_s_w(i32x4, i32x4) -> i32x4fn max_s_d(i64x2, i64x2) -> i64x2fn max_u_b(u8x16, u8x16) -> u8x16fn max_u_h(u16x8, u16x8) -> u16x8fn max_u_w(u32x4, u32x4) -> u32x4fn max_u_d(u64x2, u64x2) -> u64x2fn maxi_s_b(i8x16, imm_n16_15) -> i8x16fn maxi_s_h(i16x8, imm_n16_15) -> i16x8fn maxi_s_w(i32x4, imm_n16_15) -> i32x4fn maxi_s_d(i64x2, imm_n16_15) -> i64x2fn maxi_u_b(u8x16, imm0_31) -> u8x16fn maxi_u_h(u16x8, imm0_31) -> u16x8fn maxi_u_w(u32x4, imm0_31) -> u32x4fn maxi_u_d(u64x2, imm0_31) -> u64x2fn min_a_b(i8x16, i8x16) -> i8x16fn min_a_h(i16x8, i16x8) -> i16x8fn min_a_w(i32x4, i32x4) -> i32x4fn min_a_d(i64x2, i64x2) -> i64x2fn min_s_b(i8x16, i8x16) -> i8x16fn min_s_h(i16x8, i16x8) -> i16x8fn min_s_w(i32x4, i32x4) -> i32x4fn min_s_d(i64x2, i64x2) -> i64x2fn min_u_b(u8x16, u8x16) -> u8x16fn min_u_h(u16x8, u16x8) -> u16x8fn min_u_w(u32x4, u32x4) -> u32x4fn min_u_d(u64x2, u64x2) -> u64x2fn mini_s_b(i8x16, imm_n16_15) -> i8x16fn mini_s_h(i16x8, imm_n16_15) -> i16x8fn mini_s_w(i32x4, imm_n16_15) -> i32x4fn mini_s_d(i64x2, imm_n16_15) -> i64x2fn mini_u_b(u8x16, imm0_31) -> u8x16fn mini_u_h(u16x8, imm0_31) -> u16x8fn mini_u_w(u32x4, imm0_31) -> u32x4fn mini_u_d(u64x2, imm0_31) -> u64x2fn mod_s_b(i8x16, i8x16) -> i8x16fn mod_s_h(i16x8, i16x8) -> i16x8fn mod_s_w(i32x4, i32x4) -> i32x4fn mod_s_d(i64x2, i64x2) -> i64x2fn mod_u_b(u8x16, u8x16) -> u8x16fn mod_u_h(u16x8, u16x8) -> u16x8fn mod_u_w(u32x4, u32x4) -> u32x4fn mod_u_d(u64x2, u64x2) -> u64x2fn move_v(i8x16) -> i8x16fn msub_q_h(i16x8, i16x8, i16x8) -> i16x8fn msub_q_w(i32x4, i32x4, i32x4) -> i32x4fn msubr_q_h(i16x8, i16x8, i16x8) -> i16x8fn msubr_q_w(i32x4, i32x4, i32x4) -> i32x4fn msubv_b(i8x16, i8x16, i8x16) -> i8x16fn msubv_h(i16x8, i16x8, i16x8) -> i16x8fn msubv_w(i32x4, i32x4, i32x4) -> i32x4fn msubv_d(i64x2, i64x2, i64x2) -> i64x2fn mul_q_h(i16x8, i16x8) -> i16x8fn mul_q_w(i32x4, i32x4) -> i32x4fn mulr_q_h(i16x8, i16x8) -> i16x8fn mulr_q_w(i32x4, i32x4) -> i32x4fn mulv_b(i8x16, i8x16) -> i8x16fn mulv_h(i16x8, i16x8) -> i16x8fn mulv_w(i32x4, i32x4) -> i32x4fn mulv_d(i64x2, i64x2) -> i64x2fn nloc_b(i8x16) -> i8x16fn nloc_h(i16x8) -> i16x8fn nloc_w(i32x4) -> i32x4fn nloc_d(i64x2) -> i64x2fn nlzc_b(i8x16) -> i8x16fn nlzc_h(i16x8) -> i16x8fn nlzc_w(i32x4) -> i32x4fn nlzc_d(i64x2) -> i64x2fn nor_v(u8x16, u8x16) -> u8x16fn nori_b(u8x16, imm0_255) -> u8x16fn or_v(u8x16, u8x16) -> u8x16fn ori_b(u8x16, imm0_255) -> u8x16fn pckev_b(i8x16, i8x16) -> i8x16fn pckev_h(i16x8, i16x8) -> i16x8fn pckev_w(i32x4, i32x4) -> i32x4fn pckev_d(i64x2, i64x2) -> i64x2fn pckod_b(i8x16, i8x16) -> i8x16fn pckod_h(i16x8, i16x8) -> i16x8fn pckod_w(i32x4, i32x4) -> i32x4fn pckod_d(i64x2, i64x2) -> i64x2fn pcnt_b(i8x16) -> i8x16fn pcnt_h(i16x8) -> i16x8fn pcnt_w(i32x4) -> i32x4fn pcnt_d(i64x2) -> i64x2fn sat_s_b(i8x16, imm0_7) -> i8x16fn sat_s_h(i16x8, imm0_15) -> i16x8fn sat_s_w(i32x4, imm0_31) -> i32x4fn sat_s_d(i64x2, imm0_63) -> i64x2fn sat_u_b(u8x16, imm0_7) -> u8x16fn sat_u_h(u16x8, imm0_15) -> u16x8fn sat_u_w(u32x4, imm0_31) -> u32x4fn sat_u_d(u64x2, imm0_63) -> u64x2fn shf_b(i8x16, imm0_255) -> i8x16fn shf_h(i16x8, imm0_255) -> i16x8fn shf_w(i32x4, imm0_255) -> i32x4fn sld_b(i8x16, i8x16, i32) -> i8x16fn sld_h(i16x8, i16x8, i32) -> i16x8fn sld_w(i32x4, i32x4, i32) -> i32x4fn sld_d(i64x2, i64x2, i32) -> i64x2fn sldi_b(i8x16, i8x16, imm0_15) -> i8x16fn sldi_h(i16x8, i16x8, imm0_7) -> i16x8fn sldi_w(i32x4, i32x4, imm0_3) -> i32x4fn sldi_d(i64x2, i64x2, imm0_1) -> i64x2fn sll_b(i8x16, i8x16) -> i8x16fn sll_h(i16x8, i16x8) -> i16x8fn sll_w(i32x4, i32x4) -> i32x4fn sll_d(i64x2, i64x2) -> i64x2fn slli_b(i8x16, imm0_7) -> i8x16fn slli_h(i16x8, imm0_15) -> i16x8fn slli_w(i32x4, imm0_31) -> i32x4fn slli_d(i64x2, imm0_63) -> i64x2fn splat_b(i8x16, i32) -> i8x16fn splat_h(i16x8, i32) -> i16x8fn splat_w(i32x4, i32) -> i32x4fn splat_d(i64x2, i32) -> i64x2fn splati_b(i8x16, imm0_15) -> i8x16fn splati_h(i16x8, imm0_7) -> i16x8fn splati_w(i32x4, imm0_3) -> i32x4fn splati_d(i64x2, imm0_1) -> i64x2fn sra_b(i8x16, i8x16) -> i8x16fn sra_h(i16x8, i16x8) -> i16x8fn sra_w(i32x4, i32x4) -> i32x4fn sra_d(i64x2, i64x2) -> i64x2fn srai_b(i8x16, imm0_7) -> i8x16fn srai_h(i16x8, imm0_15) -> i16x8fn srai_w(i32x4, imm0_31) -> i32x4fn srai_d(i64x2, imm0_63) -> i64x2fn srar_b(i8x16, i8x16) -> i8x16fn srar_h(i16x8, i16x8) -> i16x8fn srar_w(i32x4, i32x4) -> i32x4fn srar_d(i64x2, i64x2) -> i64x2fn srari_b(i8x16, imm0_7) -> i8x16fn srari_h(i16x8, imm0_15) -> i16x8fn srari_w(i32x4, imm0_31) -> i32x4fn srari_d(i64x2, imm0_63) -> i64x2fn srl_b(i8x16, i8x16) -> i8x16fn srl_h(i16x8, i16x8) -> i16x8fn srl_w(i32x4, i32x4) -> i32x4fn srl_d(i64x2, i64x2) -> i64x2fn srli_b(i8x16, imm0_7) -> i8x16fn srli_h(i16x8, imm0_15) -> i16x8fn srli_w(i32x4, imm0_31) -> i32x4fn srli_d(i64x2, imm0_63) -> i64x2fn srlr_b(i8x16, i8x16) -> i8x16fn srlr_h(i16x8, i16x8) -> i16x8fn srlr_w(i32x4, i32x4) -> i32x4fn srlr_d(i64x2, i64x2) -> i64x2fn srlri_b(i8x16, imm0_7) -> i8x16fn srlri_h(i16x8, imm0_15) -> i16x8fn srlri_w(i32x4, imm0_31) -> i32x4fn srlri_d(i64x2, imm0_63) -> i64x2fn st_b(i8x16, *mut c_void, imm_n512_511) -> ()fn st_h(i16x8, *mut c_void, imm_n1024_1022) -> ()fn st_w(i32x4, *mut c_void, imm_n2048_2044) -> ()fn st_d(i64x2, *mut c_void, imm_n4096_4088) -> ()fn subs_s_b(i8x16, i8x16) -> i8x16fn subs_s_h(i16x8, i16x8) -> i16x8fn subs_s_w(i32x4, i32x4) -> i32x4fn subs_s_d(i64x2, i64x2) -> i64x2fn subs_u_b(u8x16, u8x16) -> u8x16fn subs_u_h(u16x8, u16x8) -> u16x8fn subs_u_w(u32x4, u32x4) -> u32x4fn subs_u_d(u64x2, u64x2) -> u64x2fn subsus_u_b(u8x16, i8x16) -> u8x16fn subsus_u_h(u16x8, i16x8) -> u16x8fn subsus_u_w(u32x4, i32x4) -> u32x4fn subsus_u_d(u64x2, i64x2) -> u64x2fn subsuu_s_b(u8x16, u8x16) -> i8x16fn subsuu_s_h(u16x8, u16x8) -> i16x8fn subsuu_s_w(u32x4, u32x4) -> i32x4fn subsuu_s_d(u64x2, u64x2) -> i64x2fn subv_b(i8x16, i8x16) -> i8x16fn subv_h(i16x8, i16x8) -> i16x8fn subv_w(i32x4, i32x4) -> i32x4fn subv_d(i64x2, i64x2) -> i64x2fn subvi_b(i8x16, imm0_31) -> i8x16fn subvi_h(i16x8, imm0_31) -> i16x8fn subvi_w(i32x4, imm0_31) -> i32x4fn subvi_d(i64x2, imm0_31) -> i64x2fn vshf_b(i8x16, i8x16, i8x16) -> i8x16fn vshf_h(i16x8, i16x8, i16x8) -> i16x8fn vshf_w(i32x4, i32x4, i32x4) -> i32x4fn vshf_d(i64x2, i64x2, i64x2) -> i64x2fn xor_v(u8x16, u8x16) -> u8x16fn xori_b(u8x16, imm0_255) -> u8x16