blob: bb9ecce4ef29fd0fe5c466c2a28efee5552a5ef3 [file] [log] [blame]
/* Generated automatically by the program `genflags'
from the machine description file `md'. */
#ifndef GCC_INSN_FLAGS_H
#define GCC_INSN_FLAGS_H
#define HAVE_indirect_jump 1
#define HAVE_jump 1
#define HAVE_ccmpccsi 1
#define HAVE_ccmpccdi 1
#define HAVE_ccmpccfpsf (TARGET_FLOAT)
#define HAVE_ccmpccfpdf (TARGET_FLOAT)
#define HAVE_ccmpccfpesf (TARGET_FLOAT)
#define HAVE_ccmpccfpedf (TARGET_FLOAT)
#define HAVE_ccmpccsi_rev 1
#define HAVE_ccmpccdi_rev 1
#define HAVE_ccmpccfpsf_rev (TARGET_FLOAT)
#define HAVE_ccmpccfpdf_rev (TARGET_FLOAT)
#define HAVE_ccmpccfpesf_rev (TARGET_FLOAT)
#define HAVE_ccmpccfpedf_rev (TARGET_FLOAT)
#define HAVE_condjump 1
#define HAVE_nop 1
#define HAVE_prefetch 1
#define HAVE_trap 1
#define HAVE_simple_return 1
#define HAVE_insv_immsi (UINTVAL (operands[1]) < GET_MODE_BITSIZE (SImode) \
&& UINTVAL (operands[1]) % 16 == 0)
#define HAVE_insv_immdi (UINTVAL (operands[1]) < GET_MODE_BITSIZE (DImode) \
&& UINTVAL (operands[1]) % 16 == 0)
#define HAVE_aarch64_movksi (aarch64_movk_shift (rtx_mode_t (operands[2], SImode), \
rtx_mode_t (operands[3], SImode)) >= 0)
#define HAVE_aarch64_movkdi (aarch64_movk_shift (rtx_mode_t (operands[2], DImode), \
rtx_mode_t (operands[3], DImode)) >= 0)
#define HAVE_load_pair_sw_sisi (rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (SImode))))
#define HAVE_load_pair_sw_sfsi (rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (SFmode))))
#define HAVE_load_pair_sw_sisf (rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (SImode))))
#define HAVE_load_pair_sw_sfsf (rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (SFmode))))
#define HAVE_load_pair_dw_didi (rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (DImode))))
#define HAVE_load_pair_dw_didf (rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (DImode))))
#define HAVE_load_pair_dw_dfdi (rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_load_pair_dw_dfdf (rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_load_pair_dw_tftf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (TFmode))))
#define HAVE_store_pair_sw_sisi (rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (SImode))))
#define HAVE_store_pair_sw_sfsi (rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (SFmode))))
#define HAVE_store_pair_sw_sisf (rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (SImode))))
#define HAVE_store_pair_sw_sfsf (rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (SFmode))))
#define HAVE_store_pair_dw_didi (rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (DImode))))
#define HAVE_store_pair_dw_didf (rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (DImode))))
#define HAVE_store_pair_dw_dfdi (rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_store_pair_dw_dfdf (rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_store_pair_dw_tftf (TARGET_SIMD && \
rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (TFmode))))
#define HAVE_loadwb_pairsi_si ((INTVAL (operands[5]) == GET_MODE_SIZE (SImode)) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_loadwb_pairsi_di ((INTVAL (operands[5]) == GET_MODE_SIZE (SImode)) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_loadwb_pairdi_si ((INTVAL (operands[5]) == GET_MODE_SIZE (DImode)) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_loadwb_pairdi_di ((INTVAL (operands[5]) == GET_MODE_SIZE (DImode)) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_loadwb_pairsf_si ((INTVAL (operands[5]) == GET_MODE_SIZE (SFmode)) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_loadwb_pairdf_si ((INTVAL (operands[5]) == GET_MODE_SIZE (DFmode)) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_loadwb_pairsf_di ((INTVAL (operands[5]) == GET_MODE_SIZE (SFmode)) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_loadwb_pairdf_di ((INTVAL (operands[5]) == GET_MODE_SIZE (DFmode)) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_loadwb_pairti_si ((TARGET_SIMD && INTVAL (operands[5]) == GET_MODE_SIZE (TImode)) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_loadwb_pairtf_si ((TARGET_SIMD && INTVAL (operands[5]) == GET_MODE_SIZE (TFmode)) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_loadwb_pairti_di ((TARGET_SIMD && INTVAL (operands[5]) == GET_MODE_SIZE (TImode)) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_loadwb_pairtf_di ((TARGET_SIMD && INTVAL (operands[5]) == GET_MODE_SIZE (TFmode)) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_storewb_pairsi_si ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (SImode)) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_storewb_pairsi_di ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (SImode)) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_storewb_pairdi_si ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (DImode)) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_storewb_pairdi_di ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (DImode)) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_storewb_pairsf_si ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (SFmode)) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_storewb_pairdf_si ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (DFmode)) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_storewb_pairsf_di ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (SFmode)) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_storewb_pairdf_di ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (DFmode)) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_storewb_pairti_si ((TARGET_SIMD \
&& INTVAL (operands[5]) \
== INTVAL (operands[4]) + GET_MODE_SIZE (TImode)) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_storewb_pairtf_si ((TARGET_SIMD \
&& INTVAL (operands[5]) \
== INTVAL (operands[4]) + GET_MODE_SIZE (TFmode)) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_storewb_pairti_di ((TARGET_SIMD \
&& INTVAL (operands[5]) \
== INTVAL (operands[4]) + GET_MODE_SIZE (TImode)) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_storewb_pairtf_di ((TARGET_SIMD \
&& INTVAL (operands[5]) \
== INTVAL (operands[4]) + GET_MODE_SIZE (TFmode)) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_addsi3_compare0 1
#define HAVE_adddi3_compare0 1
#define HAVE_addsi3_compareC 1
#define HAVE_adddi3_compareC 1
#define HAVE_addsi3_compareV_imm 1
#define HAVE_adddi3_compareV_imm 1
#define HAVE_addsi3_compareV 1
#define HAVE_adddi3_compareV 1
#define HAVE_aarch64_subsi_compare0 1
#define HAVE_aarch64_subdi_compare0 1
#define HAVE_subsi3 1
#define HAVE_subdi3 1
#define HAVE_subvsi_insn 1
#define HAVE_subvdi_insn 1
#define HAVE_subvsi_imm 1
#define HAVE_subvdi_imm 1
#define HAVE_negvsi_insn 1
#define HAVE_negvdi_insn 1
#define HAVE_negvsi_cmp_only 1
#define HAVE_negvdi_cmp_only 1
#define HAVE_negdi_carryout 1
#define HAVE_negvdi_carryinV 1
#define HAVE_subsi3_compare1_imm (UINTVAL (operands[2]) == -UINTVAL (operands[3]))
#define HAVE_subdi3_compare1_imm (UINTVAL (operands[2]) == -UINTVAL (operands[3]))
#define HAVE_subsi3_compare1 1
#define HAVE_subdi3_compare1 1
#define HAVE_negsi2 1
#define HAVE_negdi2 1
#define HAVE_negsi2_compare0 1
#define HAVE_negdi2_compare0 1
#define HAVE_mulsi3 1
#define HAVE_muldi3 1
#define HAVE_maddsi 1
#define HAVE_madddi 1
#define HAVE_mulsidi3 1
#define HAVE_umulsidi3 1
#define HAVE_maddsidi4 1
#define HAVE_umaddsidi4 1
#define HAVE_msubsidi4 1
#define HAVE_umsubsidi4 1
#define HAVE_smuldi3_highpart 1
#define HAVE_umuldi3_highpart 1
#define HAVE_divsi3 1
#define HAVE_udivsi3 1
#define HAVE_divdi3 1
#define HAVE_udivdi3 1
#define HAVE_cmpsi 1
#define HAVE_cmpdi 1
#define HAVE_fcmpsf (TARGET_FLOAT)
#define HAVE_fcmpdf (TARGET_FLOAT)
#define HAVE_fcmpesf (TARGET_FLOAT)
#define HAVE_fcmpedf (TARGET_FLOAT)
#define HAVE_aarch64_cstoreqi 1
#define HAVE_aarch64_cstorehi 1
#define HAVE_aarch64_cstoresi 1
#define HAVE_aarch64_cstoredi 1
#define HAVE_cstoreqi_neg 1
#define HAVE_cstorehi_neg 1
#define HAVE_cstoresi_neg 1
#define HAVE_cstoredi_neg 1
#define HAVE_aarch64_crc32b (TARGET_CRC32)
#define HAVE_aarch64_crc32h (TARGET_CRC32)
#define HAVE_aarch64_crc32w (TARGET_CRC32)
#define HAVE_aarch64_crc32x (TARGET_CRC32)
#define HAVE_aarch64_crc32cb (TARGET_CRC32)
#define HAVE_aarch64_crc32ch (TARGET_CRC32)
#define HAVE_aarch64_crc32cw (TARGET_CRC32)
#define HAVE_aarch64_crc32cx (TARGET_CRC32)
#define HAVE_csinc3si_insn 1
#define HAVE_csinc3di_insn 1
#define HAVE_csneg3_uxtw_insn 1
#define HAVE_csneg3si_insn 1
#define HAVE_csneg3di_insn 1
#define HAVE_aarch64_uqdecsi (TARGET_SVE)
#define HAVE_aarch64_uqdecdi (TARGET_SVE)
#define HAVE_andsi3 1
#define HAVE_iorsi3 1
#define HAVE_xorsi3 1
#define HAVE_anddi3 1
#define HAVE_iordi3 1
#define HAVE_xordi3 1
#define HAVE_one_cmplsi2 1
#define HAVE_one_cmpldi2 1
#define HAVE_and_one_cmpl_ashlsi3 1
#define HAVE_ior_one_cmpl_ashlsi3 1
#define HAVE_xor_one_cmpl_ashlsi3 1
#define HAVE_and_one_cmpl_ashrsi3 1
#define HAVE_ior_one_cmpl_ashrsi3 1
#define HAVE_xor_one_cmpl_ashrsi3 1
#define HAVE_and_one_cmpl_lshrsi3 1
#define HAVE_ior_one_cmpl_lshrsi3 1
#define HAVE_xor_one_cmpl_lshrsi3 1
#define HAVE_and_one_cmpl_rotrsi3 1
#define HAVE_ior_one_cmpl_rotrsi3 1
#define HAVE_xor_one_cmpl_rotrsi3 1
#define HAVE_and_one_cmpl_ashldi3 1
#define HAVE_ior_one_cmpl_ashldi3 1
#define HAVE_xor_one_cmpl_ashldi3 1
#define HAVE_and_one_cmpl_ashrdi3 1
#define HAVE_ior_one_cmpl_ashrdi3 1
#define HAVE_xor_one_cmpl_ashrdi3 1
#define HAVE_and_one_cmpl_lshrdi3 1
#define HAVE_ior_one_cmpl_lshrdi3 1
#define HAVE_xor_one_cmpl_lshrdi3 1
#define HAVE_and_one_cmpl_rotrdi3 1
#define HAVE_ior_one_cmpl_rotrdi3 1
#define HAVE_xor_one_cmpl_rotrdi3 1
#define HAVE_clzsi2 1
#define HAVE_clzdi2 1
#define HAVE_clrsbsi2 1
#define HAVE_clrsbdi2 1
#define HAVE_rbitsi2 1
#define HAVE_rbitdi2 1
#define HAVE_ctzsi2 1
#define HAVE_ctzdi2 1
#define HAVE_bswapsi2 1
#define HAVE_bswapdi2 1
#define HAVE_bswaphi2 1
#define HAVE_rev16si2 (aarch_rev16_shleft_mask_imm_p (operands[3], SImode) \
&& aarch_rev16_shright_mask_imm_p (operands[2], SImode))
#define HAVE_rev16di2 (aarch_rev16_shleft_mask_imm_p (operands[3], DImode) \
&& aarch_rev16_shright_mask_imm_p (operands[2], DImode))
#define HAVE_rev16si2_alt (aarch_rev16_shleft_mask_imm_p (operands[3], SImode) \
&& aarch_rev16_shright_mask_imm_p (operands[2], SImode))
#define HAVE_rev16di2_alt (aarch_rev16_shleft_mask_imm_p (operands[3], DImode) \
&& aarch_rev16_shright_mask_imm_p (operands[2], DImode))
#define HAVE_btrunchf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_ceilhf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_floorhf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_frintnhf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_nearbyinthf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_rinthf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_roundhf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_btruncsf2 (TARGET_FLOAT)
#define HAVE_ceilsf2 (TARGET_FLOAT)
#define HAVE_floorsf2 (TARGET_FLOAT)
#define HAVE_frintnsf2 (TARGET_FLOAT)
#define HAVE_nearbyintsf2 (TARGET_FLOAT)
#define HAVE_rintsf2 (TARGET_FLOAT)
#define HAVE_roundsf2 (TARGET_FLOAT)
#define HAVE_btruncdf2 (TARGET_FLOAT)
#define HAVE_ceildf2 (TARGET_FLOAT)
#define HAVE_floordf2 (TARGET_FLOAT)
#define HAVE_frintndf2 (TARGET_FLOAT)
#define HAVE_nearbyintdf2 (TARGET_FLOAT)
#define HAVE_rintdf2 (TARGET_FLOAT)
#define HAVE_rounddf2 (TARGET_FLOAT)
#define HAVE_lbtrunchfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lceilhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lfloorhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lroundhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lfrintnhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lbtruncuhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lceiluhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lflooruhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lrounduhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lfrintnuhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lbtruncsfsi2 (TARGET_FLOAT)
#define HAVE_lceilsfsi2 (TARGET_FLOAT)
#define HAVE_lfloorsfsi2 (TARGET_FLOAT)
#define HAVE_lroundsfsi2 (TARGET_FLOAT)
#define HAVE_lfrintnsfsi2 (TARGET_FLOAT)
#define HAVE_lbtruncusfsi2 (TARGET_FLOAT)
#define HAVE_lceilusfsi2 (TARGET_FLOAT)
#define HAVE_lfloorusfsi2 (TARGET_FLOAT)
#define HAVE_lroundusfsi2 (TARGET_FLOAT)
#define HAVE_lfrintnusfsi2 (TARGET_FLOAT)
#define HAVE_lbtruncdfsi2 (TARGET_FLOAT)
#define HAVE_lceildfsi2 (TARGET_FLOAT)
#define HAVE_lfloordfsi2 (TARGET_FLOAT)
#define HAVE_lrounddfsi2 (TARGET_FLOAT)
#define HAVE_lfrintndfsi2 (TARGET_FLOAT)
#define HAVE_lbtruncudfsi2 (TARGET_FLOAT)
#define HAVE_lceiludfsi2 (TARGET_FLOAT)
#define HAVE_lfloorudfsi2 (TARGET_FLOAT)
#define HAVE_lroundudfsi2 (TARGET_FLOAT)
#define HAVE_lfrintnudfsi2 (TARGET_FLOAT)
#define HAVE_lbtrunchfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lceilhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lfloorhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lroundhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lfrintnhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lbtruncuhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lceiluhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lflooruhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lrounduhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lfrintnuhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_lbtruncsfdi2 (TARGET_FLOAT)
#define HAVE_lceilsfdi2 (TARGET_FLOAT)
#define HAVE_lfloorsfdi2 (TARGET_FLOAT)
#define HAVE_lroundsfdi2 (TARGET_FLOAT)
#define HAVE_lfrintnsfdi2 (TARGET_FLOAT)
#define HAVE_lbtruncusfdi2 (TARGET_FLOAT)
#define HAVE_lceilusfdi2 (TARGET_FLOAT)
#define HAVE_lfloorusfdi2 (TARGET_FLOAT)
#define HAVE_lroundusfdi2 (TARGET_FLOAT)
#define HAVE_lfrintnusfdi2 (TARGET_FLOAT)
#define HAVE_lbtruncdfdi2 (TARGET_FLOAT)
#define HAVE_lceildfdi2 (TARGET_FLOAT)
#define HAVE_lfloordfdi2 (TARGET_FLOAT)
#define HAVE_lrounddfdi2 (TARGET_FLOAT)
#define HAVE_lfrintndfdi2 (TARGET_FLOAT)
#define HAVE_lbtruncudfdi2 (TARGET_FLOAT)
#define HAVE_lceiludfdi2 (TARGET_FLOAT)
#define HAVE_lfloorudfdi2 (TARGET_FLOAT)
#define HAVE_lroundudfdi2 (TARGET_FLOAT)
#define HAVE_lfrintnudfdi2 (TARGET_FLOAT)
#define HAVE_extendsfdf2 (TARGET_FLOAT)
#define HAVE_extendhfsf2 (TARGET_FLOAT)
#define HAVE_extendhfdf2 (TARGET_FLOAT)
#define HAVE_truncdfsf2 (TARGET_FLOAT)
#define HAVE_truncsfhf2 (TARGET_FLOAT)
#define HAVE_truncdfhf2 (TARGET_FLOAT)
#define HAVE_fix_truncsfsi2 (TARGET_FLOAT)
#define HAVE_fixuns_truncsfsi2 (TARGET_FLOAT)
#define HAVE_fix_truncdfdi2 (TARGET_FLOAT)
#define HAVE_fixuns_truncdfdi2 (TARGET_FLOAT)
#define HAVE_fix_trunchfsi2 (TARGET_FP_F16INST)
#define HAVE_fixuns_trunchfsi2 (TARGET_FP_F16INST)
#define HAVE_fix_trunchfdi2 (TARGET_FP_F16INST)
#define HAVE_fixuns_trunchfdi2 (TARGET_FP_F16INST)
#define HAVE_fix_truncdfsi2 (TARGET_FLOAT)
#define HAVE_fixuns_truncdfsi2 (TARGET_FLOAT)
#define HAVE_fix_truncsfdi2 (TARGET_FLOAT)
#define HAVE_fixuns_truncsfdi2 (TARGET_FLOAT)
#define HAVE_floatsisf2 (TARGET_FLOAT)
#define HAVE_floatunssisf2 (TARGET_FLOAT)
#define HAVE_floatdidf2 (TARGET_FLOAT)
#define HAVE_floatunsdidf2 (TARGET_FLOAT)
#define HAVE_floatdisf2 (TARGET_FLOAT)
#define HAVE_floatunsdisf2 (TARGET_FLOAT)
#define HAVE_floatsidf2 (TARGET_FLOAT)
#define HAVE_floatunssidf2 (TARGET_FLOAT)
#define HAVE_aarch64_fp16_floatsihf2 (TARGET_FP_F16INST)
#define HAVE_aarch64_fp16_floatunssihf2 (TARGET_FP_F16INST)
#define HAVE_aarch64_fp16_floatdihf2 (TARGET_FP_F16INST)
#define HAVE_aarch64_fp16_floatunsdihf2 (TARGET_FP_F16INST)
#define HAVE_fcvtzssf3 1
#define HAVE_fcvtzusf3 1
#define HAVE_fcvtzsdf3 1
#define HAVE_fcvtzudf3 1
#define HAVE_scvtfsi3 1
#define HAVE_ucvtfsi3 1
#define HAVE_scvtfdi3 1
#define HAVE_ucvtfdi3 1
#define HAVE_fcvtzshfsi3 (TARGET_FP_F16INST)
#define HAVE_fcvtzuhfsi3 (TARGET_FP_F16INST)
#define HAVE_fcvtzshfdi3 (TARGET_FP_F16INST)
#define HAVE_fcvtzuhfdi3 (TARGET_FP_F16INST)
#define HAVE_scvtfsihf3 (TARGET_FP_F16INST)
#define HAVE_ucvtfsihf3 (TARGET_FP_F16INST)
#define HAVE_scvtfdihf3 (TARGET_FP_F16INST)
#define HAVE_ucvtfdihf3 (TARGET_FP_F16INST)
#define HAVE_fcvtzshf3 (TARGET_SIMD)
#define HAVE_fcvtzuhf3 (TARGET_SIMD)
#define HAVE_scvtfhi3 (TARGET_SIMD)
#define HAVE_ucvtfhi3 (TARGET_SIMD)
#define HAVE_addhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_addsf3 (TARGET_FLOAT)
#define HAVE_adddf3 (TARGET_FLOAT)
#define HAVE_subhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_subsf3 (TARGET_FLOAT)
#define HAVE_subdf3 (TARGET_FLOAT)
#define HAVE_mulhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_mulsf3 (TARGET_FLOAT)
#define HAVE_muldf3 (TARGET_FLOAT)
#define HAVE_neghf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_negsf2 (TARGET_FLOAT)
#define HAVE_negdf2 (TARGET_FLOAT)
#define HAVE_abshf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_abssf2 (TARGET_FLOAT)
#define HAVE_absdf2 (TARGET_FLOAT)
#define HAVE_smaxsf3 (TARGET_FLOAT)
#define HAVE_smaxdf3 (TARGET_FLOAT)
#define HAVE_sminsf3 (TARGET_FLOAT)
#define HAVE_smindf3 (TARGET_FLOAT)
#define HAVE_smax_nanhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_smin_nanhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_fmaxhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_fminhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_smax_nansf3 (TARGET_FLOAT)
#define HAVE_smin_nansf3 (TARGET_FLOAT)
#define HAVE_fmaxsf3 (TARGET_FLOAT)
#define HAVE_fminsf3 (TARGET_FLOAT)
#define HAVE_smax_nandf3 (TARGET_FLOAT)
#define HAVE_smin_nandf3 (TARGET_FLOAT)
#define HAVE_fmaxdf3 (TARGET_FLOAT)
#define HAVE_fmindf3 (TARGET_FLOAT)
#define HAVE_copysignsf3_insn (TARGET_FLOAT && TARGET_SIMD)
#define HAVE_copysigndf3_insn (TARGET_FLOAT && TARGET_SIMD)
#define HAVE_aarch64_movdi_tilow (TARGET_FLOAT && (reload_completed || reload_in_progress))
#define HAVE_aarch64_movdi_tflow (TARGET_FLOAT && (reload_completed || reload_in_progress))
#define HAVE_aarch64_movdi_tihigh (TARGET_FLOAT && (reload_completed || reload_in_progress))
#define HAVE_aarch64_movdi_tfhigh (TARGET_FLOAT && (reload_completed || reload_in_progress))
#define HAVE_aarch64_movtihigh_di (TARGET_FLOAT && (reload_completed || reload_in_progress))
#define HAVE_aarch64_movtfhigh_di (TARGET_FLOAT && (reload_completed || reload_in_progress))
#define HAVE_aarch64_movtilow_di (TARGET_FLOAT && (reload_completed || reload_in_progress))
#define HAVE_aarch64_movtflow_di (TARGET_FLOAT && (reload_completed || reload_in_progress))
#define HAVE_aarch64_movtilow_tilow (TARGET_FLOAT && (reload_completed || reload_in_progress))
#define HAVE_add_losym_si (ptr_mode == SImode || Pmode == SImode)
#define HAVE_add_losym_di (ptr_mode == DImode || Pmode == DImode)
#define HAVE_ldr_got_small_si (ptr_mode == SImode)
#define HAVE_ldr_got_small_di (ptr_mode == DImode)
#define HAVE_ldr_got_small_sidi (TARGET_ILP32)
#define HAVE_ldr_got_small_28k_si (ptr_mode == SImode)
#define HAVE_ldr_got_small_28k_di (ptr_mode == DImode)
#define HAVE_ldr_got_small_28k_sidi (TARGET_ILP32)
#define HAVE_ldr_got_tiny_si (ptr_mode == SImode)
#define HAVE_ldr_got_tiny_di (ptr_mode == DImode)
#define HAVE_ldr_got_tiny_sidi (TARGET_ILP32)
#define HAVE_aarch64_load_tp_hard 1
#define HAVE_tlsie_small_si (ptr_mode == SImode)
#define HAVE_tlsie_small_di (ptr_mode == DImode)
#define HAVE_tlsie_small_sidi 1
#define HAVE_tlsie_tiny_si (ptr_mode == SImode)
#define HAVE_tlsie_tiny_di (ptr_mode == DImode)
#define HAVE_tlsie_tiny_sidi 1
#define HAVE_tlsle12_si (ptr_mode == SImode || Pmode == SImode)
#define HAVE_tlsle12_di (ptr_mode == DImode || Pmode == DImode)
#define HAVE_tlsle24_si (ptr_mode == SImode || Pmode == SImode)
#define HAVE_tlsle24_di (ptr_mode == DImode || Pmode == DImode)
#define HAVE_tlsle32_si (ptr_mode == SImode || Pmode == SImode)
#define HAVE_tlsle32_di (ptr_mode == DImode || Pmode == DImode)
#define HAVE_tlsle48_si (ptr_mode == SImode || Pmode == SImode)
#define HAVE_tlsle48_di (ptr_mode == DImode || Pmode == DImode)
#define HAVE_tlsdesc_small_advsimd_si ((TARGET_TLS_DESC && !TARGET_SVE) && (ptr_mode == SImode))
#define HAVE_tlsdesc_small_advsimd_di ((TARGET_TLS_DESC && !TARGET_SVE) && (ptr_mode == DImode))
#define HAVE_tlsdesc_small_sve_si ((TARGET_TLS_DESC && TARGET_SVE) && (ptr_mode == SImode))
#define HAVE_tlsdesc_small_sve_di ((TARGET_TLS_DESC && TARGET_SVE) && (ptr_mode == DImode))
#define HAVE_stack_tie 1
#define HAVE_aarch64_fjcvtzs (TARGET_JSCVT)
#define HAVE_paciasp 1
#define HAVE_autiasp 1
#define HAVE_pacibsp 1
#define HAVE_autibsp 1
#define HAVE_pacia1716 1
#define HAVE_autia1716 1
#define HAVE_pacib1716 1
#define HAVE_autib1716 1
#define HAVE_xpaclri 1
#define HAVE_blockage 1
#define HAVE_probe_stack_range 1
#define HAVE_probe_sve_stack_clash_si ((TARGET_SVE) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_probe_sve_stack_clash_di ((TARGET_SVE) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_reg_stack_protect_address_si ((aarch64_stack_protector_guard != SSP_GLOBAL) && (ptr_mode == SImode))
#define HAVE_reg_stack_protect_address_di ((aarch64_stack_protector_guard != SSP_GLOBAL) && (ptr_mode == DImode))
#define HAVE_stack_protect_set_si (ptr_mode == SImode)
#define HAVE_stack_protect_set_di (ptr_mode == DImode)
#define HAVE_stack_protect_test_si (ptr_mode == SImode)
#define HAVE_stack_protect_test_di (ptr_mode == DImode)
#define HAVE_set_fpcr 1
#define HAVE_get_fpcr 1
#define HAVE_set_fpsr 1
#define HAVE_get_fpsr 1
#define HAVE_speculation_tracker 1
#define HAVE_speculation_tracker_rev 1
#define HAVE_bti_noarg 1
#define HAVE_bti_c 1
#define HAVE_bti_j 1
#define HAVE_bti_jc 1
#define HAVE_speculation_barrier 1
#define HAVE_despeculate_simpleqi 1
#define HAVE_despeculate_simplehi 1
#define HAVE_despeculate_simplesi 1
#define HAVE_despeculate_simpledi 1
#define HAVE_despeculate_simpleti 1
#define HAVE_aarch64_frint32zv2sf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (V2SFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint32xv2sf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (V2SFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint64zv2sf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (V2SFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint64xv2sf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (V2SFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint32zv4sf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (V4SFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint32xv4sf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (V4SFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint64zv4sf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (V4SFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint64xv4sf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (V4SFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint32zv2df (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (V2DFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint32xv2df (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (V2DFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint64zv2df (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (V2DFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint64xv2df (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (V2DFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint32zdf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (DFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint32xdf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (DFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint64zdf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (DFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint64xdf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (DFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint32zsf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (SFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint32xsf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (SFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint64zsf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (SFmode) && !TARGET_SIMD))
#define HAVE_aarch64_frint64xsf (TARGET_FRINT && TARGET_FLOAT \
&& !(VECTOR_MODE_P (SFmode) && !TARGET_SIMD))
#define HAVE_tstart (TARGET_TME)
#define HAVE_ttest (TARGET_TME)
#define HAVE_tcommit (TARGET_TME)
#define HAVE_tcancel (TARGET_TME && (UINTVAL (operands[0]) <= 65535))
#define HAVE_aarch64_rndr (TARGET_RNG)
#define HAVE_aarch64_rndrrs (TARGET_RNG)
#define HAVE_irg (TARGET_MEMTAG)
#define HAVE_gmi (TARGET_MEMTAG)
#define HAVE_addg (TARGET_MEMTAG)
#define HAVE_subp (TARGET_MEMTAG)
#define HAVE_ldg (TARGET_MEMTAG)
#define HAVE_stg (TARGET_MEMTAG)
#define HAVE_aarch64_simd_dupv8qi (TARGET_SIMD)
#define HAVE_aarch64_simd_dupv16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_dupv4hi (TARGET_SIMD)
#define HAVE_aarch64_simd_dupv8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_dupv2si (TARGET_SIMD)
#define HAVE_aarch64_simd_dupv4si (TARGET_SIMD)
#define HAVE_aarch64_simd_dupv2di (TARGET_SIMD)
#define HAVE_aarch64_simd_dupv4hf (TARGET_SIMD)
#define HAVE_aarch64_simd_dupv8hf (TARGET_SIMD)
#define HAVE_aarch64_simd_dupv2sf (TARGET_SIMD)
#define HAVE_aarch64_simd_dupv4sf (TARGET_SIMD)
#define HAVE_aarch64_simd_dupv2df (TARGET_SIMD)
#define HAVE_aarch64_simd_dupv4bf (TARGET_SIMD)
#define HAVE_aarch64_simd_dupv8bf (TARGET_SIMD)
#define HAVE_aarch64_dup_lanev8qi (TARGET_SIMD)
#define HAVE_aarch64_dup_lanev16qi (TARGET_SIMD)
#define HAVE_aarch64_dup_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_dup_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_dup_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_dup_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_dup_lanev2di (TARGET_SIMD)
#define HAVE_aarch64_dup_lanev4hf (TARGET_SIMD)
#define HAVE_aarch64_dup_lanev8hf (TARGET_SIMD)
#define HAVE_aarch64_dup_lanev4bf (TARGET_SIMD)
#define HAVE_aarch64_dup_lanev8bf (TARGET_SIMD)
#define HAVE_aarch64_dup_lanev2sf (TARGET_SIMD)
#define HAVE_aarch64_dup_lanev4sf (TARGET_SIMD)
#define HAVE_aarch64_dup_lanev2df (TARGET_SIMD)
#define HAVE_aarch64_dup_lane_to_128v8qi (TARGET_SIMD)
#define HAVE_aarch64_dup_lane_to_64v16qi (TARGET_SIMD)
#define HAVE_aarch64_dup_lane_to_128v4hi (TARGET_SIMD)
#define HAVE_aarch64_dup_lane_to_64v8hi (TARGET_SIMD)
#define HAVE_aarch64_dup_lane_to_128v2si (TARGET_SIMD)
#define HAVE_aarch64_dup_lane_to_64v4si (TARGET_SIMD)
#define HAVE_aarch64_dup_lane_to_128v4hf (TARGET_SIMD)
#define HAVE_aarch64_dup_lane_to_64v8hf (TARGET_SIMD)
#define HAVE_aarch64_dup_lane_to_128v2sf (TARGET_SIMD)
#define HAVE_aarch64_dup_lane_to_64v4sf (TARGET_SIMD)
#define HAVE_aarch64_store_lane0v8qi (TARGET_SIMD \
&& ENDIAN_LANE_N (8, INTVAL (operands[2])) == 0)
#define HAVE_aarch64_store_lane0v16qi (TARGET_SIMD \
&& ENDIAN_LANE_N (16, INTVAL (operands[2])) == 0)
#define HAVE_aarch64_store_lane0v4hi (TARGET_SIMD \
&& ENDIAN_LANE_N (4, INTVAL (operands[2])) == 0)
#define HAVE_aarch64_store_lane0v8hi (TARGET_SIMD \
&& ENDIAN_LANE_N (8, INTVAL (operands[2])) == 0)
#define HAVE_aarch64_store_lane0v2si (TARGET_SIMD \
&& ENDIAN_LANE_N (2, INTVAL (operands[2])) == 0)
#define HAVE_aarch64_store_lane0v4si (TARGET_SIMD \
&& ENDIAN_LANE_N (4, INTVAL (operands[2])) == 0)
#define HAVE_aarch64_store_lane0v2di (TARGET_SIMD \
&& ENDIAN_LANE_N (2, INTVAL (operands[2])) == 0)
#define HAVE_aarch64_store_lane0v4hf (TARGET_SIMD \
&& ENDIAN_LANE_N (4, INTVAL (operands[2])) == 0)
#define HAVE_aarch64_store_lane0v8hf (TARGET_SIMD \
&& ENDIAN_LANE_N (8, INTVAL (operands[2])) == 0)
#define HAVE_aarch64_store_lane0v4bf (TARGET_SIMD \
&& ENDIAN_LANE_N (4, INTVAL (operands[2])) == 0)
#define HAVE_aarch64_store_lane0v8bf (TARGET_SIMD \
&& ENDIAN_LANE_N (8, INTVAL (operands[2])) == 0)
#define HAVE_aarch64_store_lane0v2sf (TARGET_SIMD \
&& ENDIAN_LANE_N (2, INTVAL (operands[2])) == 0)
#define HAVE_aarch64_store_lane0v4sf (TARGET_SIMD \
&& ENDIAN_LANE_N (4, INTVAL (operands[2])) == 0)
#define HAVE_aarch64_store_lane0v2df (TARGET_SIMD \
&& ENDIAN_LANE_N (2, INTVAL (operands[2])) == 0)
#define HAVE_load_pairv8qiv8qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8QImode))))
#define HAVE_load_pairv4hiv8qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4HImode))))
#define HAVE_load_pairv4hfv8qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4HFmode))))
#define HAVE_load_pairv2siv8qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2SImode))))
#define HAVE_load_pairv2sfv8qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2SFmode))))
#define HAVE_load_pairdfv8qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_load_pairv8qiv4hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8QImode))))
#define HAVE_load_pairv4hiv4hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4HImode))))
#define HAVE_load_pairv4hfv4hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4HFmode))))
#define HAVE_load_pairv2siv4hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2SImode))))
#define HAVE_load_pairv2sfv4hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2SFmode))))
#define HAVE_load_pairdfv4hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_load_pairv8qiv4hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8QImode))))
#define HAVE_load_pairv4hiv4hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4HImode))))
#define HAVE_load_pairv4hfv4hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4HFmode))))
#define HAVE_load_pairv2siv4hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2SImode))))
#define HAVE_load_pairv2sfv4hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2SFmode))))
#define HAVE_load_pairdfv4hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_load_pairv8qiv2si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8QImode))))
#define HAVE_load_pairv4hiv2si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4HImode))))
#define HAVE_load_pairv4hfv2si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4HFmode))))
#define HAVE_load_pairv2siv2si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2SImode))))
#define HAVE_load_pairv2sfv2si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2SFmode))))
#define HAVE_load_pairdfv2si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_load_pairv8qiv2sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8QImode))))
#define HAVE_load_pairv4hiv2sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4HImode))))
#define HAVE_load_pairv4hfv2sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4HFmode))))
#define HAVE_load_pairv2siv2sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2SImode))))
#define HAVE_load_pairv2sfv2sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2SFmode))))
#define HAVE_load_pairdfv2sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_load_pairv8qidf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8QImode))))
#define HAVE_load_pairv4hidf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4HImode))))
#define HAVE_load_pairv4hfdf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4HFmode))))
#define HAVE_load_pairv2sidf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2SImode))))
#define HAVE_load_pairv2sfdf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2SFmode))))
#define HAVE_load_pairdfdf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_vec_store_pairv8qiv8qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8QImode))))
#define HAVE_vec_store_pairv4hiv8qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4HImode))))
#define HAVE_vec_store_pairv4hfv8qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4HFmode))))
#define HAVE_vec_store_pairv2siv8qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2SImode))))
#define HAVE_vec_store_pairv2sfv8qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2SFmode))))
#define HAVE_vec_store_pairdfv8qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_vec_store_pairv8qiv4hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8QImode))))
#define HAVE_vec_store_pairv4hiv4hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4HImode))))
#define HAVE_vec_store_pairv4hfv4hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4HFmode))))
#define HAVE_vec_store_pairv2siv4hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2SImode))))
#define HAVE_vec_store_pairv2sfv4hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2SFmode))))
#define HAVE_vec_store_pairdfv4hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_vec_store_pairv8qiv4hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8QImode))))
#define HAVE_vec_store_pairv4hiv4hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4HImode))))
#define HAVE_vec_store_pairv4hfv4hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4HFmode))))
#define HAVE_vec_store_pairv2siv4hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2SImode))))
#define HAVE_vec_store_pairv2sfv4hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2SFmode))))
#define HAVE_vec_store_pairdfv4hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_vec_store_pairv8qiv2si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8QImode))))
#define HAVE_vec_store_pairv4hiv2si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4HImode))))
#define HAVE_vec_store_pairv4hfv2si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4HFmode))))
#define HAVE_vec_store_pairv2siv2si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2SImode))))
#define HAVE_vec_store_pairv2sfv2si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2SFmode))))
#define HAVE_vec_store_pairdfv2si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_vec_store_pairv8qiv2sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8QImode))))
#define HAVE_vec_store_pairv4hiv2sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4HImode))))
#define HAVE_vec_store_pairv4hfv2sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4HFmode))))
#define HAVE_vec_store_pairv2siv2sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2SImode))))
#define HAVE_vec_store_pairv2sfv2sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2SFmode))))
#define HAVE_vec_store_pairdfv2sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_vec_store_pairv8qidf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8QImode))))
#define HAVE_vec_store_pairv4hidf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4HImode))))
#define HAVE_vec_store_pairv4hfdf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4HFmode))))
#define HAVE_vec_store_pairv2sidf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2SImode))))
#define HAVE_vec_store_pairv2sfdf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2SFmode))))
#define HAVE_vec_store_pairdfdf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_load_pairv16qiv16qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_load_pairv16qiv8hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_load_pairv16qiv4si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_load_pairv16qiv2di (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_load_pairv16qiv8hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_load_pairv16qiv8bf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_load_pairv16qiv4sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_load_pairv16qiv2df (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_load_pairv8hiv16qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_load_pairv8hiv8hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_load_pairv8hiv4si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_load_pairv8hiv2di (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_load_pairv8hiv8hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_load_pairv8hiv8bf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_load_pairv8hiv4sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_load_pairv8hiv2df (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_load_pairv4siv16qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_load_pairv4siv8hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_load_pairv4siv4si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_load_pairv4siv2di (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_load_pairv4siv8hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_load_pairv4siv8bf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_load_pairv4siv4sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_load_pairv4siv2df (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_load_pairv2div16qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_load_pairv2div8hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_load_pairv2div4si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_load_pairv2div2di (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_load_pairv2div8hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_load_pairv2div8bf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_load_pairv2div4sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_load_pairv2div2df (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_load_pairv8hfv16qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_load_pairv8hfv8hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_load_pairv8hfv4si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_load_pairv8hfv2di (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_load_pairv8hfv8hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_load_pairv8hfv8bf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_load_pairv8hfv4sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_load_pairv8hfv2df (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_load_pairv4sfv16qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_load_pairv4sfv8hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_load_pairv4sfv4si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_load_pairv4sfv2di (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_load_pairv4sfv8hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_load_pairv4sfv8bf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_load_pairv4sfv4sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_load_pairv4sfv2df (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_load_pairv2dfv16qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_load_pairv2dfv8hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_load_pairv2dfv4si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_load_pairv2dfv2di (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_load_pairv2dfv8hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_load_pairv2dfv8bf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_load_pairv2dfv4sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_load_pairv2dfv2df (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_load_pairv8bfv16qi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_load_pairv8bfv8hi (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_load_pairv8bfv4si (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_load_pairv8bfv2di (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_load_pairv8bfv8hf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_load_pairv8bfv8bf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_load_pairv8bfv4sf (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_load_pairv8bfv2df (TARGET_SIMD \
&& rtx_equal_p (XEXP (operands[3], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_vec_store_pairv16qiv16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_vec_store_pairv16qiv8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_vec_store_pairv16qiv4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_vec_store_pairv16qiv2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_vec_store_pairv16qiv8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_vec_store_pairv16qiv8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_vec_store_pairv16qiv4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_vec_store_pairv16qiv2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V16QImode))))
#define HAVE_vec_store_pairv8hiv16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_vec_store_pairv8hiv8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_vec_store_pairv8hiv4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_vec_store_pairv8hiv2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_vec_store_pairv8hiv8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_vec_store_pairv8hiv8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_vec_store_pairv8hiv4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_vec_store_pairv8hiv2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HImode))))
#define HAVE_vec_store_pairv4siv16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_vec_store_pairv4siv8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_vec_store_pairv4siv4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_vec_store_pairv4siv2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_vec_store_pairv4siv8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_vec_store_pairv4siv8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_vec_store_pairv4siv4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_vec_store_pairv4siv2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SImode))))
#define HAVE_vec_store_pairv2div16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_vec_store_pairv2div8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_vec_store_pairv2div4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_vec_store_pairv2div2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_vec_store_pairv2div8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_vec_store_pairv2div8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_vec_store_pairv2div4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_vec_store_pairv2div2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DImode))))
#define HAVE_vec_store_pairv8hfv16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_vec_store_pairv8hfv8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_vec_store_pairv8hfv4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_vec_store_pairv8hfv2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_vec_store_pairv8hfv8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_vec_store_pairv8hfv8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_vec_store_pairv8hfv4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_vec_store_pairv8hfv2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8HFmode))))
#define HAVE_vec_store_pairv4sfv16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_vec_store_pairv4sfv8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_vec_store_pairv4sfv4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_vec_store_pairv4sfv2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_vec_store_pairv4sfv8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_vec_store_pairv4sfv8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_vec_store_pairv4sfv4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_vec_store_pairv4sfv2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V4SFmode))))
#define HAVE_vec_store_pairv2dfv16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_vec_store_pairv2dfv8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_vec_store_pairv2dfv4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_vec_store_pairv2dfv2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_vec_store_pairv2dfv8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_vec_store_pairv2dfv8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_vec_store_pairv2dfv4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_vec_store_pairv2dfv2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V2DFmode))))
#define HAVE_vec_store_pairv8bfv16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_vec_store_pairv8bfv8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_vec_store_pairv8bfv4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_vec_store_pairv8bfv2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_vec_store_pairv8bfv8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_vec_store_pairv8bfv8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_vec_store_pairv8bfv4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_vec_store_pairv8bfv2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[0], 0), \
GET_MODE_SIZE (V8BFmode))))
#define HAVE_aarch64_simd_mov_from_v16qilow (TARGET_SIMD)
#define HAVE_aarch64_simd_mov_from_v8hilow (TARGET_SIMD)
#define HAVE_aarch64_simd_mov_from_v4silow (TARGET_SIMD)
#define HAVE_aarch64_simd_mov_from_v8hflow (TARGET_SIMD)
#define HAVE_aarch64_simd_mov_from_v8bflow (TARGET_SIMD)
#define HAVE_aarch64_simd_mov_from_v4sflow (TARGET_SIMD)
#define HAVE_aarch64_simd_mov_from_v16qihigh (TARGET_SIMD)
#define HAVE_aarch64_simd_mov_from_v8hihigh (TARGET_SIMD)
#define HAVE_aarch64_simd_mov_from_v4sihigh (TARGET_SIMD)
#define HAVE_aarch64_simd_mov_from_v8hfhigh (TARGET_SIMD)
#define HAVE_aarch64_simd_mov_from_v8bfhigh (TARGET_SIMD)
#define HAVE_aarch64_simd_mov_from_v4sfhigh (TARGET_SIMD)
#define HAVE_ornv8qi3 (TARGET_SIMD)
#define HAVE_ornv16qi3 (TARGET_SIMD)
#define HAVE_ornv4hi3 (TARGET_SIMD)
#define HAVE_ornv8hi3 (TARGET_SIMD)
#define HAVE_ornv2si3 (TARGET_SIMD)
#define HAVE_ornv4si3 (TARGET_SIMD)
#define HAVE_ornv2di3 (TARGET_SIMD)
#define HAVE_bicv8qi3 (TARGET_SIMD)
#define HAVE_bicv16qi3 (TARGET_SIMD)
#define HAVE_bicv4hi3 (TARGET_SIMD)
#define HAVE_bicv8hi3 (TARGET_SIMD)
#define HAVE_bicv2si3 (TARGET_SIMD)
#define HAVE_bicv4si3 (TARGET_SIMD)
#define HAVE_bicv2di3 (TARGET_SIMD)
#define HAVE_addv8qi3 (TARGET_SIMD)
#define HAVE_addv16qi3 (TARGET_SIMD)
#define HAVE_addv4hi3 (TARGET_SIMD)
#define HAVE_addv8hi3 (TARGET_SIMD)
#define HAVE_addv2si3 (TARGET_SIMD)
#define HAVE_addv4si3 (TARGET_SIMD)
#define HAVE_addv2di3 (TARGET_SIMD)
#define HAVE_subv8qi3 (TARGET_SIMD)
#define HAVE_subv16qi3 (TARGET_SIMD)
#define HAVE_subv4hi3 (TARGET_SIMD)
#define HAVE_subv8hi3 (TARGET_SIMD)
#define HAVE_subv2si3 (TARGET_SIMD)
#define HAVE_subv4si3 (TARGET_SIMD)
#define HAVE_subv2di3 (TARGET_SIMD)
#define HAVE_mulv8qi3 (TARGET_SIMD)
#define HAVE_mulv16qi3 (TARGET_SIMD)
#define HAVE_mulv4hi3 (TARGET_SIMD)
#define HAVE_mulv8hi3 (TARGET_SIMD)
#define HAVE_mulv2si3 (TARGET_SIMD)
#define HAVE_mulv4si3 (TARGET_SIMD)
#define HAVE_bswapv4hi2 (TARGET_SIMD)
#define HAVE_bswapv8hi2 (TARGET_SIMD)
#define HAVE_bswapv2si2 (TARGET_SIMD)
#define HAVE_bswapv4si2 (TARGET_SIMD)
#define HAVE_bswapv2di2 (TARGET_SIMD)
#define HAVE_aarch64_rbitv8qi (TARGET_SIMD)
#define HAVE_aarch64_rbitv16qi (TARGET_SIMD)
#define HAVE_aarch64_fcadd90v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcadd270v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcadd90v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcadd270v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcadd90v2sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcadd270v2sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcadd90v4sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcadd270v4sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcadd90v2df (TARGET_COMPLEX)
#define HAVE_aarch64_fcadd270v2df (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla0v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla90v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla180v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla270v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla0v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla90v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla180v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla270v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla0v2sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla90v2sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla180v2sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla270v2sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla0v4sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla90v4sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla180v4sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla270v4sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla0v2df (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla90v2df (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla180v2df (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla270v2df (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_lane0v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla_lane90v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla_lane180v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla_lane270v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla_lane0v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla_lane90v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla_lane180v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla_lane270v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fcmla_lane0v2sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_lane90v2sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_lane180v2sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_lane270v2sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_lane0v4sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_lane90v4sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_lane180v4sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_lane270v4sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_lane0v2df (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_lane90v2df (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_lane180v2df (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_lane270v2df (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_laneq0v4hf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_laneq90v4hf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_laneq180v4hf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmla_laneq270v4hf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmlaq_lane0v8hf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmlaq_lane90v8hf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmlaq_lane180v8hf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmlaq_lane270v8hf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmlaq_lane0v4sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmlaq_lane90v4sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmlaq_lane180v4sf (TARGET_COMPLEX)
#define HAVE_aarch64_fcmlaq_lane270v4sf (TARGET_COMPLEX)
#define HAVE_aarch64_sdotv8qi (TARGET_DOTPROD)
#define HAVE_aarch64_udotv8qi (TARGET_DOTPROD)
#define HAVE_aarch64_sdotv16qi (TARGET_DOTPROD)
#define HAVE_aarch64_udotv16qi (TARGET_DOTPROD)
#define HAVE_aarch64_usdotv8qi (TARGET_I8MM)
#define HAVE_aarch64_usdotv16qi (TARGET_I8MM)
#define HAVE_aarch64_sdot_lanev8qi (TARGET_DOTPROD)
#define HAVE_aarch64_udot_lanev8qi (TARGET_DOTPROD)
#define HAVE_aarch64_sdot_lanev16qi (TARGET_DOTPROD)
#define HAVE_aarch64_udot_lanev16qi (TARGET_DOTPROD)
#define HAVE_aarch64_sdot_laneqv8qi (TARGET_DOTPROD)
#define HAVE_aarch64_udot_laneqv8qi (TARGET_DOTPROD)
#define HAVE_aarch64_sdot_laneqv16qi (TARGET_DOTPROD)
#define HAVE_aarch64_udot_laneqv16qi (TARGET_DOTPROD)
#define HAVE_aarch64_usdot_lanev8qi (TARGET_I8MM)
#define HAVE_aarch64_sudot_lanev8qi (TARGET_I8MM)
#define HAVE_aarch64_usdot_lanev16qi (TARGET_I8MM)
#define HAVE_aarch64_sudot_lanev16qi (TARGET_I8MM)
#define HAVE_aarch64_usdot_laneqv8qi (TARGET_I8MM)
#define HAVE_aarch64_sudot_laneqv8qi (TARGET_I8MM)
#define HAVE_aarch64_usdot_laneqv16qi (TARGET_I8MM)
#define HAVE_aarch64_sudot_laneqv16qi (TARGET_I8MM)
#define HAVE_aarch64_rsqrtev4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_rsqrtev8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_rsqrtev2sf (TARGET_SIMD)
#define HAVE_aarch64_rsqrtev4sf (TARGET_SIMD)
#define HAVE_aarch64_rsqrtev2df (TARGET_SIMD)
#define HAVE_aarch64_rsqrtehf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_rsqrtesf (TARGET_SIMD)
#define HAVE_aarch64_rsqrtedf (TARGET_SIMD)
#define HAVE_aarch64_rsqrtsv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_rsqrtsv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_rsqrtsv2sf (TARGET_SIMD)
#define HAVE_aarch64_rsqrtsv4sf (TARGET_SIMD)
#define HAVE_aarch64_rsqrtsv2df (TARGET_SIMD)
#define HAVE_aarch64_rsqrtshf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_rsqrtssf (TARGET_SIMD)
#define HAVE_aarch64_rsqrtsdf (TARGET_SIMD)
#define HAVE_negv8qi2 (TARGET_SIMD)
#define HAVE_negv16qi2 (TARGET_SIMD)
#define HAVE_negv4hi2 (TARGET_SIMD)
#define HAVE_negv8hi2 (TARGET_SIMD)
#define HAVE_negv2si2 (TARGET_SIMD)
#define HAVE_negv4si2 (TARGET_SIMD)
#define HAVE_negv2di2 (TARGET_SIMD)
#define HAVE_absv8qi2 (TARGET_SIMD)
#define HAVE_absv16qi2 (TARGET_SIMD)
#define HAVE_absv4hi2 (TARGET_SIMD)
#define HAVE_absv8hi2 (TARGET_SIMD)
#define HAVE_absv2si2 (TARGET_SIMD)
#define HAVE_absv4si2 (TARGET_SIMD)
#define HAVE_absv2di2 (TARGET_SIMD)
#define HAVE_aarch64_absv8qi (TARGET_SIMD)
#define HAVE_aarch64_absv16qi (TARGET_SIMD)
#define HAVE_aarch64_absv4hi (TARGET_SIMD)
#define HAVE_aarch64_absv8hi (TARGET_SIMD)
#define HAVE_aarch64_absv2si (TARGET_SIMD)
#define HAVE_aarch64_absv4si (TARGET_SIMD)
#define HAVE_aarch64_absv2di (TARGET_SIMD)
#define HAVE_aarch64_absdi (TARGET_SIMD)
#define HAVE_aarch64_sabdv8qi_3 (TARGET_SIMD)
#define HAVE_aarch64_uabdv8qi_3 (TARGET_SIMD)
#define HAVE_aarch64_sabdv16qi_3 (TARGET_SIMD)
#define HAVE_aarch64_uabdv16qi_3 (TARGET_SIMD)
#define HAVE_aarch64_sabdv4hi_3 (TARGET_SIMD)
#define HAVE_aarch64_uabdv4hi_3 (TARGET_SIMD)
#define HAVE_aarch64_sabdv8hi_3 (TARGET_SIMD)
#define HAVE_aarch64_uabdv8hi_3 (TARGET_SIMD)
#define HAVE_aarch64_sabdv2si_3 (TARGET_SIMD)
#define HAVE_aarch64_uabdv2si_3 (TARGET_SIMD)
#define HAVE_aarch64_sabdv4si_3 (TARGET_SIMD)
#define HAVE_aarch64_uabdv4si_3 (TARGET_SIMD)
#define HAVE_aarch64_sabdl2v8qi_3 (TARGET_SIMD)
#define HAVE_aarch64_uabdl2v8qi_3 (TARGET_SIMD)
#define HAVE_aarch64_sabdl2v16qi_3 (TARGET_SIMD)
#define HAVE_aarch64_uabdl2v16qi_3 (TARGET_SIMD)
#define HAVE_aarch64_sabdl2v4hi_3 (TARGET_SIMD)
#define HAVE_aarch64_uabdl2v4hi_3 (TARGET_SIMD)
#define HAVE_aarch64_sabdl2v8hi_3 (TARGET_SIMD)
#define HAVE_aarch64_uabdl2v8hi_3 (TARGET_SIMD)
#define HAVE_aarch64_sabdl2v4si_3 (TARGET_SIMD)
#define HAVE_aarch64_uabdl2v4si_3 (TARGET_SIMD)
#define HAVE_aarch64_sabalv8qi_4 (TARGET_SIMD)
#define HAVE_aarch64_uabalv8qi_4 (TARGET_SIMD)
#define HAVE_aarch64_sabalv16qi_4 (TARGET_SIMD)
#define HAVE_aarch64_uabalv16qi_4 (TARGET_SIMD)
#define HAVE_aarch64_sabalv4hi_4 (TARGET_SIMD)
#define HAVE_aarch64_uabalv4hi_4 (TARGET_SIMD)
#define HAVE_aarch64_sabalv8hi_4 (TARGET_SIMD)
#define HAVE_aarch64_uabalv8hi_4 (TARGET_SIMD)
#define HAVE_aarch64_sabalv4si_4 (TARGET_SIMD)
#define HAVE_aarch64_uabalv4si_4 (TARGET_SIMD)
#define HAVE_aarch64_sadalpv8qi_3 (TARGET_SIMD)
#define HAVE_aarch64_uadalpv8qi_3 (TARGET_SIMD)
#define HAVE_aarch64_sadalpv16qi_3 (TARGET_SIMD)
#define HAVE_aarch64_uadalpv16qi_3 (TARGET_SIMD)
#define HAVE_aarch64_sadalpv4hi_3 (TARGET_SIMD)
#define HAVE_aarch64_uadalpv4hi_3 (TARGET_SIMD)
#define HAVE_aarch64_sadalpv8hi_3 (TARGET_SIMD)
#define HAVE_aarch64_uadalpv8hi_3 (TARGET_SIMD)
#define HAVE_aarch64_sadalpv4si_3 (TARGET_SIMD)
#define HAVE_aarch64_uadalpv4si_3 (TARGET_SIMD)
#define HAVE_abav8qi_3 (TARGET_SIMD)
#define HAVE_abav16qi_3 (TARGET_SIMD)
#define HAVE_abav4hi_3 (TARGET_SIMD)
#define HAVE_abav8hi_3 (TARGET_SIMD)
#define HAVE_abav2si_3 (TARGET_SIMD)
#define HAVE_abav4si_3 (TARGET_SIMD)
#define HAVE_fabdv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fabdv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fabdv2sf3 (TARGET_SIMD)
#define HAVE_fabdv4sf3 (TARGET_SIMD)
#define HAVE_fabdv2df3 (TARGET_SIMD)
#define HAVE_fabdhf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fabdsf3 (TARGET_SIMD)
#define HAVE_fabddf3 (TARGET_SIMD)
#define HAVE_andv8qi3 (TARGET_SIMD)
#define HAVE_andv16qi3 (TARGET_SIMD)
#define HAVE_andv4hi3 (TARGET_SIMD)
#define HAVE_andv8hi3 (TARGET_SIMD)
#define HAVE_andv2si3 (TARGET_SIMD)
#define HAVE_andv4si3 (TARGET_SIMD)
#define HAVE_andv2di3 (TARGET_SIMD)
#define HAVE_iorv8qi3 (TARGET_SIMD)
#define HAVE_iorv16qi3 (TARGET_SIMD)
#define HAVE_iorv4hi3 (TARGET_SIMD)
#define HAVE_iorv8hi3 (TARGET_SIMD)
#define HAVE_iorv2si3 (TARGET_SIMD)
#define HAVE_iorv4si3 (TARGET_SIMD)
#define HAVE_iorv2di3 (TARGET_SIMD)
#define HAVE_xorv8qi3 (TARGET_SIMD)
#define HAVE_xorv16qi3 (TARGET_SIMD)
#define HAVE_xorv4hi3 (TARGET_SIMD)
#define HAVE_xorv8hi3 (TARGET_SIMD)
#define HAVE_xorv2si3 (TARGET_SIMD)
#define HAVE_xorv4si3 (TARGET_SIMD)
#define HAVE_xorv2di3 (TARGET_SIMD)
#define HAVE_one_cmplv8qi2 (TARGET_SIMD)
#define HAVE_one_cmplv16qi2 (TARGET_SIMD)
#define HAVE_one_cmplv4hi2 (TARGET_SIMD)
#define HAVE_one_cmplv8hi2 (TARGET_SIMD)
#define HAVE_one_cmplv2si2 (TARGET_SIMD)
#define HAVE_one_cmplv4si2 (TARGET_SIMD)
#define HAVE_one_cmplv2di2 (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_setv8qi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_setv16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_setv4hi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_setv8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_setv2si (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_setv4si (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_setv2di (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_setv4hf (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_setv8hf (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_setv4bf (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_setv8bf (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_setv2sf (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_setv4sf (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_setv2df (TARGET_SIMD)
#define HAVE_aarch64_simd_lshrv8qi (TARGET_SIMD)
#define HAVE_aarch64_simd_lshrv16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_lshrv4hi (TARGET_SIMD)
#define HAVE_aarch64_simd_lshrv8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_lshrv2si (TARGET_SIMD)
#define HAVE_aarch64_simd_lshrv4si (TARGET_SIMD)
#define HAVE_aarch64_simd_lshrv2di (TARGET_SIMD)
#define HAVE_aarch64_simd_ashrv8qi (TARGET_SIMD)
#define HAVE_aarch64_simd_ashrv16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_ashrv4hi (TARGET_SIMD)
#define HAVE_aarch64_simd_ashrv8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_ashrv2si (TARGET_SIMD)
#define HAVE_aarch64_simd_ashrv4si (TARGET_SIMD)
#define HAVE_aarch64_simd_ashrv2di (TARGET_SIMD)
#define HAVE_aarch64_simd_imm_shlv8qi (TARGET_SIMD)
#define HAVE_aarch64_simd_imm_shlv16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_imm_shlv4hi (TARGET_SIMD)
#define HAVE_aarch64_simd_imm_shlv8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_imm_shlv2si (TARGET_SIMD)
#define HAVE_aarch64_simd_imm_shlv4si (TARGET_SIMD)
#define HAVE_aarch64_simd_imm_shlv2di (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_sshlv8qi (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_sshlv16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_sshlv4hi (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_sshlv8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_sshlv2si (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_sshlv4si (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_sshlv2di (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_shlv8qi_unsigned (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_shlv16qi_unsigned (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_shlv4hi_unsigned (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_shlv8hi_unsigned (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_shlv2si_unsigned (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_shlv4si_unsigned (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_shlv2di_unsigned (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_shlv8qi_signed (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_shlv16qi_signed (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_shlv4hi_signed (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_shlv8hi_signed (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_shlv2si_signed (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_shlv4si_signed (TARGET_SIMD)
#define HAVE_aarch64_simd_reg_shlv2di_signed (TARGET_SIMD)
#define HAVE_vec_shr_v8qi (TARGET_SIMD)
#define HAVE_vec_shr_v4hi (TARGET_SIMD)
#define HAVE_vec_shr_v4hf (TARGET_SIMD)
#define HAVE_vec_shr_v2si (TARGET_SIMD)
#define HAVE_vec_shr_v2sf (TARGET_SIMD)
#define HAVE_vec_shr_v4bf (TARGET_SIMD)
#define HAVE_aarch64_mlav8qi (TARGET_SIMD)
#define HAVE_aarch64_mlav16qi (TARGET_SIMD)
#define HAVE_aarch64_mlav4hi (TARGET_SIMD)
#define HAVE_aarch64_mlav8hi (TARGET_SIMD)
#define HAVE_aarch64_mlav2si (TARGET_SIMD)
#define HAVE_aarch64_mlav4si (TARGET_SIMD)
#define HAVE_aarch64_mlsv8qi (TARGET_SIMD)
#define HAVE_aarch64_mlsv16qi (TARGET_SIMD)
#define HAVE_aarch64_mlsv4hi (TARGET_SIMD)
#define HAVE_aarch64_mlsv8hi (TARGET_SIMD)
#define HAVE_aarch64_mlsv2si (TARGET_SIMD)
#define HAVE_aarch64_mlsv4si (TARGET_SIMD)
#define HAVE_smaxv8qi3 (TARGET_SIMD)
#define HAVE_sminv8qi3 (TARGET_SIMD)
#define HAVE_umaxv8qi3 (TARGET_SIMD)
#define HAVE_uminv8qi3 (TARGET_SIMD)
#define HAVE_smaxv16qi3 (TARGET_SIMD)
#define HAVE_sminv16qi3 (TARGET_SIMD)
#define HAVE_umaxv16qi3 (TARGET_SIMD)
#define HAVE_uminv16qi3 (TARGET_SIMD)
#define HAVE_smaxv4hi3 (TARGET_SIMD)
#define HAVE_sminv4hi3 (TARGET_SIMD)
#define HAVE_umaxv4hi3 (TARGET_SIMD)
#define HAVE_uminv4hi3 (TARGET_SIMD)
#define HAVE_smaxv8hi3 (TARGET_SIMD)
#define HAVE_sminv8hi3 (TARGET_SIMD)
#define HAVE_umaxv8hi3 (TARGET_SIMD)
#define HAVE_uminv8hi3 (TARGET_SIMD)
#define HAVE_smaxv2si3 (TARGET_SIMD)
#define HAVE_sminv2si3 (TARGET_SIMD)
#define HAVE_umaxv2si3 (TARGET_SIMD)
#define HAVE_uminv2si3 (TARGET_SIMD)
#define HAVE_smaxv4si3 (TARGET_SIMD)
#define HAVE_sminv4si3 (TARGET_SIMD)
#define HAVE_umaxv4si3 (TARGET_SIMD)
#define HAVE_uminv4si3 (TARGET_SIMD)
#define HAVE_aarch64_umaxpv8qi (TARGET_SIMD)
#define HAVE_aarch64_uminpv8qi (TARGET_SIMD)
#define HAVE_aarch64_smaxpv8qi (TARGET_SIMD)
#define HAVE_aarch64_sminpv8qi (TARGET_SIMD)
#define HAVE_aarch64_umaxpv16qi (TARGET_SIMD)
#define HAVE_aarch64_uminpv16qi (TARGET_SIMD)
#define HAVE_aarch64_smaxpv16qi (TARGET_SIMD)
#define HAVE_aarch64_sminpv16qi (TARGET_SIMD)
#define HAVE_aarch64_umaxpv4hi (TARGET_SIMD)
#define HAVE_aarch64_uminpv4hi (TARGET_SIMD)
#define HAVE_aarch64_smaxpv4hi (TARGET_SIMD)
#define HAVE_aarch64_sminpv4hi (TARGET_SIMD)
#define HAVE_aarch64_umaxpv8hi (TARGET_SIMD)
#define HAVE_aarch64_uminpv8hi (TARGET_SIMD)
#define HAVE_aarch64_smaxpv8hi (TARGET_SIMD)
#define HAVE_aarch64_sminpv8hi (TARGET_SIMD)
#define HAVE_aarch64_umaxpv2si (TARGET_SIMD)
#define HAVE_aarch64_uminpv2si (TARGET_SIMD)
#define HAVE_aarch64_smaxpv2si (TARGET_SIMD)
#define HAVE_aarch64_sminpv2si (TARGET_SIMD)
#define HAVE_aarch64_umaxpv4si (TARGET_SIMD)
#define HAVE_aarch64_uminpv4si (TARGET_SIMD)
#define HAVE_aarch64_smaxpv4si (TARGET_SIMD)
#define HAVE_aarch64_sminpv4si (TARGET_SIMD)
#define HAVE_aarch64_smax_nanpv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_smin_nanpv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_smaxpv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_sminpv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_smax_nanpv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_smin_nanpv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_smaxpv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_sminpv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_smax_nanpv2sf (TARGET_SIMD)
#define HAVE_aarch64_smin_nanpv2sf (TARGET_SIMD)
#define HAVE_aarch64_smaxpv2sf (TARGET_SIMD)
#define HAVE_aarch64_sminpv2sf (TARGET_SIMD)
#define HAVE_aarch64_smax_nanpv4sf (TARGET_SIMD)
#define HAVE_aarch64_smin_nanpv4sf (TARGET_SIMD)
#define HAVE_aarch64_smaxpv4sf (TARGET_SIMD)
#define HAVE_aarch64_sminpv4sf (TARGET_SIMD)
#define HAVE_aarch64_smax_nanpv2df (TARGET_SIMD)
#define HAVE_aarch64_smin_nanpv2df (TARGET_SIMD)
#define HAVE_aarch64_smaxpv2df (TARGET_SIMD)
#define HAVE_aarch64_sminpv2df (TARGET_SIMD)
#define HAVE_move_lo_quad_internal_v16qi (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_v8hi (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_v4si (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_v8hf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_v8bf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_v4sf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_v2di (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_v2df (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_be_v16qi (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_be_v8hi (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_be_v4si (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_be_v8hf (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_be_v8bf (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_be_v4sf (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_be_v2di (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_move_lo_quad_internal_be_v2df (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_v16qi (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_v8hi (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_v4si (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_v2di (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_v8hf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_v8bf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_v4sf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_v2df (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_be_v16qi (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_be_v8hi (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_be_v4si (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_be_v2di (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_be_v8hf (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_be_v8bf (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_be_v4sf (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_move_hi_quad_be_v2df (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_simd_vec_pack_trunc_v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_pack_trunc_v4si (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_pack_trunc_v2di (TARGET_SIMD)
#define HAVE_vec_pack_trunc_v8hi (TARGET_SIMD)
#define HAVE_vec_pack_trunc_v4si (TARGET_SIMD)
#define HAVE_vec_pack_trunc_v2di (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacks_lo_v16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacku_lo_v16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacks_lo_v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacku_lo_v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacks_lo_v4si (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacku_lo_v4si (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacks_hi_v16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacku_hi_v16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacks_hi_v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacku_hi_v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacks_hi_v4si (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacku_hi_v4si (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_smult_lo_v16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_umult_lo_v16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_smult_lo_v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_umult_lo_v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_smult_lo_v4si (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_umult_lo_v4si (TARGET_SIMD)
#define HAVE_aarch64_intrinsic_vec_smult_lo_v8qi (TARGET_SIMD)
#define HAVE_aarch64_intrinsic_vec_umult_lo_v8qi (TARGET_SIMD)
#define HAVE_aarch64_intrinsic_vec_smult_lo_v4hi (TARGET_SIMD)
#define HAVE_aarch64_intrinsic_vec_umult_lo_v4hi (TARGET_SIMD)
#define HAVE_aarch64_intrinsic_vec_smult_lo_v2si (TARGET_SIMD)
#define HAVE_aarch64_intrinsic_vec_umult_lo_v2si (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_smult_hi_v16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_umult_hi_v16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_smult_hi_v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_umult_hi_v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_smult_hi_v4si (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_umult_hi_v4si (TARGET_SIMD)
#define HAVE_aarch64_vec_smult_lane_v4hi (TARGET_SIMD)
#define HAVE_aarch64_vec_umult_lane_v4hi (TARGET_SIMD)
#define HAVE_aarch64_vec_smult_laneq_v4hi (TARGET_SIMD)
#define HAVE_aarch64_vec_umult_laneq_v4hi (TARGET_SIMD)
#define HAVE_aarch64_vec_smult_lane_v2si (TARGET_SIMD)
#define HAVE_aarch64_vec_umult_lane_v2si (TARGET_SIMD)
#define HAVE_aarch64_vec_smult_laneq_v2si (TARGET_SIMD)
#define HAVE_aarch64_vec_umult_laneq_v2si (TARGET_SIMD)
#define HAVE_aarch64_vec_smlal_lane_v4hi (TARGET_SIMD)
#define HAVE_aarch64_vec_umlal_lane_v4hi (TARGET_SIMD)
#define HAVE_aarch64_vec_smlal_laneq_v4hi (TARGET_SIMD)
#define HAVE_aarch64_vec_umlal_laneq_v4hi (TARGET_SIMD)
#define HAVE_aarch64_vec_smlal_lane_v2si (TARGET_SIMD)
#define HAVE_aarch64_vec_umlal_lane_v2si (TARGET_SIMD)
#define HAVE_aarch64_vec_smlal_laneq_v2si (TARGET_SIMD)
#define HAVE_aarch64_vec_umlal_laneq_v2si (TARGET_SIMD)
#define HAVE_addv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_addv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_addv2sf3 (TARGET_SIMD)
#define HAVE_addv4sf3 (TARGET_SIMD)
#define HAVE_addv2df3 (TARGET_SIMD)
#define HAVE_subv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_subv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_subv2sf3 (TARGET_SIMD)
#define HAVE_subv4sf3 (TARGET_SIMD)
#define HAVE_subv2df3 (TARGET_SIMD)
#define HAVE_mulv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_mulv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_mulv2sf3 (TARGET_SIMD)
#define HAVE_mulv4sf3 (TARGET_SIMD)
#define HAVE_mulv2df3 (TARGET_SIMD)
#define HAVE_negv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_negv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_negv2sf2 (TARGET_SIMD)
#define HAVE_negv4sf2 (TARGET_SIMD)
#define HAVE_negv2df2 (TARGET_SIMD)
#define HAVE_absv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_absv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_absv2sf2 (TARGET_SIMD)
#define HAVE_absv4sf2 (TARGET_SIMD)
#define HAVE_absv2df2 (TARGET_SIMD)
#define HAVE_fmav4hf4 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fmav8hf4 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fmav2sf4 (TARGET_SIMD)
#define HAVE_fmav4sf4 (TARGET_SIMD)
#define HAVE_fmav2df4 (TARGET_SIMD)
#define HAVE_fnmav4hf4 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fnmav8hf4 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fnmav2sf4 (TARGET_SIMD)
#define HAVE_fnmav4sf4 (TARGET_SIMD)
#define HAVE_fnmav2df4 (TARGET_SIMD)
#define HAVE_btruncv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_ceilv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_floorv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_frintnv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_nearbyintv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_rintv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_roundv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_btruncv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_ceilv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_floorv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_frintnv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_nearbyintv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_rintv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_roundv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_btruncv2sf2 (TARGET_SIMD)
#define HAVE_ceilv2sf2 (TARGET_SIMD)
#define HAVE_floorv2sf2 (TARGET_SIMD)
#define HAVE_frintnv2sf2 (TARGET_SIMD)
#define HAVE_nearbyintv2sf2 (TARGET_SIMD)
#define HAVE_rintv2sf2 (TARGET_SIMD)
#define HAVE_roundv2sf2 (TARGET_SIMD)
#define HAVE_btruncv4sf2 (TARGET_SIMD)
#define HAVE_ceilv4sf2 (TARGET_SIMD)
#define HAVE_floorv4sf2 (TARGET_SIMD)
#define HAVE_frintnv4sf2 (TARGET_SIMD)
#define HAVE_nearbyintv4sf2 (TARGET_SIMD)
#define HAVE_rintv4sf2 (TARGET_SIMD)
#define HAVE_roundv4sf2 (TARGET_SIMD)
#define HAVE_btruncv2df2 (TARGET_SIMD)
#define HAVE_ceilv2df2 (TARGET_SIMD)
#define HAVE_floorv2df2 (TARGET_SIMD)
#define HAVE_frintnv2df2 (TARGET_SIMD)
#define HAVE_nearbyintv2df2 (TARGET_SIMD)
#define HAVE_rintv2df2 (TARGET_SIMD)
#define HAVE_roundv2df2 (TARGET_SIMD)
#define HAVE_lbtruncv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lceilv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lfloorv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lroundv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lfrintnv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lbtruncuv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lceiluv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lflooruv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lrounduv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lfrintnuv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lbtruncv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lceilv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lfloorv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lroundv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lfrintnv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lbtruncuv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lceiluv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lflooruv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lrounduv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lfrintnuv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_lbtruncv2sfv2si2 (TARGET_SIMD)
#define HAVE_lceilv2sfv2si2 (TARGET_SIMD)
#define HAVE_lfloorv2sfv2si2 (TARGET_SIMD)
#define HAVE_lroundv2sfv2si2 (TARGET_SIMD)
#define HAVE_lfrintnv2sfv2si2 (TARGET_SIMD)
#define HAVE_lbtruncuv2sfv2si2 (TARGET_SIMD)
#define HAVE_lceiluv2sfv2si2 (TARGET_SIMD)
#define HAVE_lflooruv2sfv2si2 (TARGET_SIMD)
#define HAVE_lrounduv2sfv2si2 (TARGET_SIMD)
#define HAVE_lfrintnuv2sfv2si2 (TARGET_SIMD)
#define HAVE_lbtruncv4sfv4si2 (TARGET_SIMD)
#define HAVE_lceilv4sfv4si2 (TARGET_SIMD)
#define HAVE_lfloorv4sfv4si2 (TARGET_SIMD)
#define HAVE_lroundv4sfv4si2 (TARGET_SIMD)
#define HAVE_lfrintnv4sfv4si2 (TARGET_SIMD)
#define HAVE_lbtruncuv4sfv4si2 (TARGET_SIMD)
#define HAVE_lceiluv4sfv4si2 (TARGET_SIMD)
#define HAVE_lflooruv4sfv4si2 (TARGET_SIMD)
#define HAVE_lrounduv4sfv4si2 (TARGET_SIMD)
#define HAVE_lfrintnuv4sfv4si2 (TARGET_SIMD)
#define HAVE_lbtruncv2dfv2di2 (TARGET_SIMD)
#define HAVE_lceilv2dfv2di2 (TARGET_SIMD)
#define HAVE_lfloorv2dfv2di2 (TARGET_SIMD)
#define HAVE_lroundv2dfv2di2 (TARGET_SIMD)
#define HAVE_lfrintnv2dfv2di2 (TARGET_SIMD)
#define HAVE_lbtruncuv2dfv2di2 (TARGET_SIMD)
#define HAVE_lceiluv2dfv2di2 (TARGET_SIMD)
#define HAVE_lflooruv2dfv2di2 (TARGET_SIMD)
#define HAVE_lrounduv2dfv2di2 (TARGET_SIMD)
#define HAVE_lfrintnuv2dfv2di2 (TARGET_SIMD)
#define HAVE_lbtrunchfhi2 (TARGET_SIMD_F16INST)
#define HAVE_lceilhfhi2 (TARGET_SIMD_F16INST)
#define HAVE_lfloorhfhi2 (TARGET_SIMD_F16INST)
#define HAVE_lroundhfhi2 (TARGET_SIMD_F16INST)
#define HAVE_lfrintnhfhi2 (TARGET_SIMD_F16INST)
#define HAVE_lbtruncuhfhi2 (TARGET_SIMD_F16INST)
#define HAVE_lceiluhfhi2 (TARGET_SIMD_F16INST)
#define HAVE_lflooruhfhi2 (TARGET_SIMD_F16INST)
#define HAVE_lrounduhfhi2 (TARGET_SIMD_F16INST)
#define HAVE_lfrintnuhfhi2 (TARGET_SIMD_F16INST)
#define HAVE_fix_trunchfhi2 (TARGET_SIMD_F16INST)
#define HAVE_fixuns_trunchfhi2 (TARGET_SIMD_F16INST)
#define HAVE_floathihf2 (TARGET_SIMD_F16INST)
#define HAVE_floatunshihf2 (TARGET_SIMD_F16INST)
#define HAVE_floatv4hiv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_floatunsv4hiv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_floatv8hiv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_floatunsv8hiv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_floatv2siv2sf2 (TARGET_SIMD)
#define HAVE_floatunsv2siv2sf2 (TARGET_SIMD)
#define HAVE_floatv4siv4sf2 (TARGET_SIMD)
#define HAVE_floatunsv4siv4sf2 (TARGET_SIMD)
#define HAVE_floatv2div2df2 (TARGET_SIMD)
#define HAVE_floatunsv2div2df2 (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacks_lo_v8hf (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacks_lo_v4sf (TARGET_SIMD)
#define HAVE_fcvtzsv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fcvtzuv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fcvtzsv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fcvtzuv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fcvtzsv2sf3 (TARGET_SIMD)
#define HAVE_fcvtzuv2sf3 (TARGET_SIMD)
#define HAVE_fcvtzsv4sf3 (TARGET_SIMD)
#define HAVE_fcvtzuv4sf3 (TARGET_SIMD)
#define HAVE_fcvtzsv2df3 (TARGET_SIMD)
#define HAVE_fcvtzuv2df3 (TARGET_SIMD)
#define HAVE_scvtfv4hi3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_ucvtfv4hi3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_scvtfv8hi3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_ucvtfv8hi3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_scvtfv2si3 (TARGET_SIMD)
#define HAVE_ucvtfv2si3 (TARGET_SIMD)
#define HAVE_scvtfv4si3 (TARGET_SIMD)
#define HAVE_ucvtfv4si3 (TARGET_SIMD)
#define HAVE_scvtfv2di3 (TARGET_SIMD)
#define HAVE_ucvtfv2di3 (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacks_hi_v8hf (TARGET_SIMD)
#define HAVE_aarch64_simd_vec_unpacks_hi_v4sf (TARGET_SIMD)
#define HAVE_aarch64_float_extend_lo_v2df (TARGET_SIMD)
#define HAVE_aarch64_float_extend_lo_v4sf (TARGET_SIMD)
#define HAVE_aarch64_float_truncate_lo_v2sf (TARGET_SIMD)
#define HAVE_aarch64_float_truncate_lo_v4hf (TARGET_SIMD)
#define HAVE_aarch64_float_truncate_hi_v4sf_le (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_float_truncate_hi_v8hf_le (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_float_truncate_hi_v4sf_be (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_float_truncate_hi_v8hf_be (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_smaxv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_sminv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_smaxv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_sminv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_smaxv2sf3 (TARGET_SIMD)
#define HAVE_sminv2sf3 (TARGET_SIMD)
#define HAVE_smaxv4sf3 (TARGET_SIMD)
#define HAVE_sminv4sf3 (TARGET_SIMD)
#define HAVE_smaxv2df3 (TARGET_SIMD)
#define HAVE_sminv2df3 (TARGET_SIMD)
#define HAVE_smax_nanv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_smin_nanv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fmaxv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fminv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_smax_nanv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_smin_nanv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fmaxv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fminv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_smax_nanv2sf3 (TARGET_SIMD)
#define HAVE_smin_nanv2sf3 (TARGET_SIMD)
#define HAVE_fmaxv2sf3 (TARGET_SIMD)
#define HAVE_fminv2sf3 (TARGET_SIMD)
#define HAVE_smax_nanv4sf3 (TARGET_SIMD)
#define HAVE_smin_nanv4sf3 (TARGET_SIMD)
#define HAVE_fmaxv4sf3 (TARGET_SIMD)
#define HAVE_fminv4sf3 (TARGET_SIMD)
#define HAVE_smax_nanv2df3 (TARGET_SIMD)
#define HAVE_smin_nanv2df3 (TARGET_SIMD)
#define HAVE_fmaxv2df3 (TARGET_SIMD)
#define HAVE_fminv2df3 (TARGET_SIMD)
#define HAVE_aarch64_faddpv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_faddpv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_faddpv2sf (TARGET_SIMD)
#define HAVE_aarch64_faddpv4sf (TARGET_SIMD)
#define HAVE_aarch64_faddpv2df (TARGET_SIMD)
#define HAVE_aarch64_reduc_plus_internalv8qi (TARGET_SIMD)
#define HAVE_aarch64_reduc_plus_internalv16qi (TARGET_SIMD)
#define HAVE_aarch64_reduc_plus_internalv4hi (TARGET_SIMD)
#define HAVE_aarch64_reduc_plus_internalv8hi (TARGET_SIMD)
#define HAVE_aarch64_reduc_plus_internalv4si (TARGET_SIMD)
#define HAVE_aarch64_reduc_plus_internalv2di (TARGET_SIMD)
#define HAVE_aarch64_zero_extendsi_reduc_plus_v8qi (TARGET_SIMD)
#define HAVE_aarch64_zero_extenddi_reduc_plus_v8qi (TARGET_SIMD)
#define HAVE_aarch64_zero_extendsi_reduc_plus_v16qi (TARGET_SIMD)
#define HAVE_aarch64_zero_extenddi_reduc_plus_v16qi (TARGET_SIMD)
#define HAVE_aarch64_zero_extendsi_reduc_plus_v4hi (TARGET_SIMD)
#define HAVE_aarch64_zero_extenddi_reduc_plus_v4hi (TARGET_SIMD)
#define HAVE_aarch64_zero_extendsi_reduc_plus_v8hi (TARGET_SIMD)
#define HAVE_aarch64_zero_extenddi_reduc_plus_v8hi (TARGET_SIMD)
#define HAVE_aarch64_reduc_plus_internalv2si (TARGET_SIMD)
#define HAVE_reduc_plus_scal_v2sf (TARGET_SIMD)
#define HAVE_reduc_plus_scal_v2df (TARGET_SIMD)
#define HAVE_clrsbv8qi2 (TARGET_SIMD)
#define HAVE_clrsbv16qi2 (TARGET_SIMD)
#define HAVE_clrsbv4hi2 (TARGET_SIMD)
#define HAVE_clrsbv8hi2 (TARGET_SIMD)
#define HAVE_clrsbv2si2 (TARGET_SIMD)
#define HAVE_clrsbv4si2 (TARGET_SIMD)
#define HAVE_clzv8qi2 (TARGET_SIMD)
#define HAVE_clzv16qi2 (TARGET_SIMD)
#define HAVE_clzv4hi2 (TARGET_SIMD)
#define HAVE_clzv8hi2 (TARGET_SIMD)
#define HAVE_clzv2si2 (TARGET_SIMD)
#define HAVE_clzv4si2 (TARGET_SIMD)
#define HAVE_popcountv8qi2 (TARGET_SIMD)
#define HAVE_popcountv16qi2 (TARGET_SIMD)
#define HAVE_aarch64_reduc_umax_internalv8qi (TARGET_SIMD)
#define HAVE_aarch64_reduc_umin_internalv8qi (TARGET_SIMD)
#define HAVE_aarch64_reduc_smax_internalv8qi (TARGET_SIMD)
#define HAVE_aarch64_reduc_smin_internalv8qi (TARGET_SIMD)
#define HAVE_aarch64_reduc_umax_internalv16qi (TARGET_SIMD)
#define HAVE_aarch64_reduc_umin_internalv16qi (TARGET_SIMD)
#define HAVE_aarch64_reduc_smax_internalv16qi (TARGET_SIMD)
#define HAVE_aarch64_reduc_smin_internalv16qi (TARGET_SIMD)
#define HAVE_aarch64_reduc_umax_internalv4hi (TARGET_SIMD)
#define HAVE_aarch64_reduc_umin_internalv4hi (TARGET_SIMD)
#define HAVE_aarch64_reduc_smax_internalv4hi (TARGET_SIMD)
#define HAVE_aarch64_reduc_smin_internalv4hi (TARGET_SIMD)
#define HAVE_aarch64_reduc_umax_internalv8hi (TARGET_SIMD)
#define HAVE_aarch64_reduc_umin_internalv8hi (TARGET_SIMD)
#define HAVE_aarch64_reduc_smax_internalv8hi (TARGET_SIMD)
#define HAVE_aarch64_reduc_smin_internalv8hi (TARGET_SIMD)
#define HAVE_aarch64_reduc_umax_internalv4si (TARGET_SIMD)
#define HAVE_aarch64_reduc_umin_internalv4si (TARGET_SIMD)
#define HAVE_aarch64_reduc_smax_internalv4si (TARGET_SIMD)
#define HAVE_aarch64_reduc_smin_internalv4si (TARGET_SIMD)
#define HAVE_aarch64_reduc_umax_internalv2si (TARGET_SIMD)
#define HAVE_aarch64_reduc_umin_internalv2si (TARGET_SIMD)
#define HAVE_aarch64_reduc_smax_internalv2si (TARGET_SIMD)
#define HAVE_aarch64_reduc_smin_internalv2si (TARGET_SIMD)
#define HAVE_aarch64_reduc_smax_nan_internalv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_reduc_smin_nan_internalv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_reduc_smax_internalv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_reduc_smin_internalv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_reduc_smax_nan_internalv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_reduc_smin_nan_internalv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_reduc_smax_internalv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_reduc_smin_internalv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_reduc_smax_nan_internalv2sf (TARGET_SIMD)
#define HAVE_aarch64_reduc_smin_nan_internalv2sf (TARGET_SIMD)
#define HAVE_aarch64_reduc_smax_internalv2sf (TARGET_SIMD)
#define HAVE_aarch64_reduc_smin_internalv2sf (TARGET_SIMD)
#define HAVE_aarch64_reduc_smax_nan_internalv4sf (TARGET_SIMD)
#define HAVE_aarch64_reduc_smin_nan_internalv4sf (TARGET_SIMD)
#define HAVE_aarch64_reduc_smax_internalv4sf (TARGET_SIMD)
#define HAVE_aarch64_reduc_smin_internalv4sf (TARGET_SIMD)
#define HAVE_aarch64_reduc_smax_nan_internalv2df (TARGET_SIMD)
#define HAVE_aarch64_reduc_smin_nan_internalv2df (TARGET_SIMD)
#define HAVE_aarch64_reduc_smax_internalv2df (TARGET_SIMD)
#define HAVE_aarch64_reduc_smin_internalv2df (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv8qi_internal (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv16qi_internal (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv4hi_internal (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv2si_internal (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv4si_internal (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv2di_internal (TARGET_SIMD)
#define HAVE_aarch64_simd_bsldi_internal (TARGET_SIMD)
#define HAVE_aarch64_simd_bsldi_alt (TARGET_SIMD)
#define HAVE_aarch64_get_lanev8qi (TARGET_SIMD)
#define HAVE_aarch64_get_lanev16qi (TARGET_SIMD)
#define HAVE_aarch64_get_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_get_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_get_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_get_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_get_lanev2di (TARGET_SIMD)
#define HAVE_aarch64_get_lanev4hf (TARGET_SIMD)
#define HAVE_aarch64_get_lanev8hf (TARGET_SIMD)
#define HAVE_aarch64_get_lanev4bf (TARGET_SIMD)
#define HAVE_aarch64_get_lanev8bf (TARGET_SIMD)
#define HAVE_aarch64_get_lanev2sf (TARGET_SIMD)
#define HAVE_aarch64_get_lanev4sf (TARGET_SIMD)
#define HAVE_aarch64_get_lanev2df (TARGET_SIMD)
#define HAVE_load_pair_lanesv8qi (TARGET_SIMD && !STRICT_ALIGNMENT \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V8QImode))))
#define HAVE_load_pair_lanesv4hi (TARGET_SIMD && !STRICT_ALIGNMENT \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4HImode))))
#define HAVE_load_pair_lanesv4bf (TARGET_SIMD && !STRICT_ALIGNMENT \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4BFmode))))
#define HAVE_load_pair_lanesv4hf (TARGET_SIMD && !STRICT_ALIGNMENT \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V4HFmode))))
#define HAVE_load_pair_lanesv2si (TARGET_SIMD && !STRICT_ALIGNMENT \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2SImode))))
#define HAVE_load_pair_lanesv2sf (TARGET_SIMD && !STRICT_ALIGNMENT \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (V2SFmode))))
#define HAVE_load_pair_lanesdi (TARGET_SIMD && !STRICT_ALIGNMENT \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (DImode))))
#define HAVE_load_pair_lanesdf (TARGET_SIMD && !STRICT_ALIGNMENT \
&& rtx_equal_p (XEXP (operands[2], 0), \
plus_constant (Pmode, \
XEXP (operands[1], 0), \
GET_MODE_SIZE (DFmode))))
#define HAVE_store_pair_lanesv8qi (TARGET_SIMD)
#define HAVE_store_pair_lanesv4hi (TARGET_SIMD)
#define HAVE_store_pair_lanesv4bf (TARGET_SIMD)
#define HAVE_store_pair_lanesv4hf (TARGET_SIMD)
#define HAVE_store_pair_lanesv2si (TARGET_SIMD)
#define HAVE_store_pair_lanesv2sf (TARGET_SIMD)
#define HAVE_store_pair_lanesdi (TARGET_SIMD)
#define HAVE_store_pair_lanesdf (TARGET_SIMD)
#define HAVE_aarch64_combinezv8qi (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinezv4hi (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinezv4bf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinezv4hf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinezv2si (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinezv2sf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinezdi (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinezdf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinez_bev8qi (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinez_bev4hi (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinez_bev4bf (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinez_bev4hf (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinez_bev2si (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinez_bev2sf (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinez_bedi (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_combinez_bedf (TARGET_SIMD && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_saddlv16qi_hi_internal (TARGET_SIMD)
#define HAVE_aarch64_ssublv16qi_hi_internal (TARGET_SIMD)
#define HAVE_aarch64_uaddlv16qi_hi_internal (TARGET_SIMD)
#define HAVE_aarch64_usublv16qi_hi_internal (TARGET_SIMD)
#define HAVE_aarch64_saddlv8hi_hi_internal (TARGET_SIMD)
#define HAVE_aarch64_ssublv8hi_hi_internal (TARGET_SIMD)
#define HAVE_aarch64_uaddlv8hi_hi_internal (TARGET_SIMD)
#define HAVE_aarch64_usublv8hi_hi_internal (TARGET_SIMD)
#define HAVE_aarch64_saddlv4si_hi_internal (TARGET_SIMD)
#define HAVE_aarch64_ssublv4si_hi_internal (TARGET_SIMD)
#define HAVE_aarch64_uaddlv4si_hi_internal (TARGET_SIMD)
#define HAVE_aarch64_usublv4si_hi_internal (TARGET_SIMD)
#define HAVE_aarch64_saddlv16qi_lo_internal (TARGET_SIMD)
#define HAVE_aarch64_ssublv16qi_lo_internal (TARGET_SIMD)
#define HAVE_aarch64_uaddlv16qi_lo_internal (TARGET_SIMD)
#define HAVE_aarch64_usublv16qi_lo_internal (TARGET_SIMD)
#define HAVE_aarch64_saddlv8hi_lo_internal (TARGET_SIMD)
#define HAVE_aarch64_ssublv8hi_lo_internal (TARGET_SIMD)
#define HAVE_aarch64_uaddlv8hi_lo_internal (TARGET_SIMD)
#define HAVE_aarch64_usublv8hi_lo_internal (TARGET_SIMD)
#define HAVE_aarch64_saddlv4si_lo_internal (TARGET_SIMD)
#define HAVE_aarch64_ssublv4si_lo_internal (TARGET_SIMD)
#define HAVE_aarch64_uaddlv4si_lo_internal (TARGET_SIMD)
#define HAVE_aarch64_usublv4si_lo_internal (TARGET_SIMD)
#define HAVE_aarch64_saddlv8qi (TARGET_SIMD)
#define HAVE_aarch64_ssublv8qi (TARGET_SIMD)
#define HAVE_aarch64_uaddlv8qi (TARGET_SIMD)
#define HAVE_aarch64_usublv8qi (TARGET_SIMD)
#define HAVE_aarch64_saddlv4hi (TARGET_SIMD)
#define HAVE_aarch64_ssublv4hi (TARGET_SIMD)
#define HAVE_aarch64_uaddlv4hi (TARGET_SIMD)
#define HAVE_aarch64_usublv4hi (TARGET_SIMD)
#define HAVE_aarch64_saddlv2si (TARGET_SIMD)
#define HAVE_aarch64_ssublv2si (TARGET_SIMD)
#define HAVE_aarch64_uaddlv2si (TARGET_SIMD)
#define HAVE_aarch64_usublv2si (TARGET_SIMD)
#define HAVE_aarch64_ssubwv8qi (TARGET_SIMD)
#define HAVE_aarch64_usubwv8qi (TARGET_SIMD)
#define HAVE_aarch64_ssubwv4hi (TARGET_SIMD)
#define HAVE_aarch64_usubwv4hi (TARGET_SIMD)
#define HAVE_aarch64_ssubwv2si (TARGET_SIMD)
#define HAVE_aarch64_usubwv2si (TARGET_SIMD)
#define HAVE_aarch64_ssubwv16qi_internal (TARGET_SIMD)
#define HAVE_aarch64_usubwv16qi_internal (TARGET_SIMD)
#define HAVE_aarch64_ssubwv8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_usubwv8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_ssubwv4si_internal (TARGET_SIMD)
#define HAVE_aarch64_usubwv4si_internal (TARGET_SIMD)
#define HAVE_aarch64_ssubw2v16qi_internal (TARGET_SIMD)
#define HAVE_aarch64_usubw2v16qi_internal (TARGET_SIMD)
#define HAVE_aarch64_ssubw2v8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_usubw2v8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_ssubw2v4si_internal (TARGET_SIMD)
#define HAVE_aarch64_usubw2v4si_internal (TARGET_SIMD)
#define HAVE_aarch64_saddwv8qi (TARGET_SIMD)
#define HAVE_aarch64_uaddwv8qi (TARGET_SIMD)
#define HAVE_aarch64_saddwv4hi (TARGET_SIMD)
#define HAVE_aarch64_uaddwv4hi (TARGET_SIMD)
#define HAVE_aarch64_saddwv2si (TARGET_SIMD)
#define HAVE_aarch64_uaddwv2si (TARGET_SIMD)
#define HAVE_aarch64_saddwv16qi_internal (TARGET_SIMD)
#define HAVE_aarch64_uaddwv16qi_internal (TARGET_SIMD)
#define HAVE_aarch64_saddwv8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_uaddwv8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_saddwv4si_internal (TARGET_SIMD)
#define HAVE_aarch64_uaddwv4si_internal (TARGET_SIMD)
#define HAVE_aarch64_saddw2v16qi_internal (TARGET_SIMD)
#define HAVE_aarch64_uaddw2v16qi_internal (TARGET_SIMD)
#define HAVE_aarch64_saddw2v8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_uaddw2v8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_saddw2v4si_internal (TARGET_SIMD)
#define HAVE_aarch64_uaddw2v4si_internal (TARGET_SIMD)
#define HAVE_aarch64_shaddv8qi (TARGET_SIMD)
#define HAVE_aarch64_uhaddv8qi (TARGET_SIMD)
#define HAVE_aarch64_srhaddv8qi (TARGET_SIMD)
#define HAVE_aarch64_urhaddv8qi (TARGET_SIMD)
#define HAVE_aarch64_shsubv8qi (TARGET_SIMD)
#define HAVE_aarch64_uhsubv8qi (TARGET_SIMD)
#define HAVE_aarch64_shaddv16qi (TARGET_SIMD)
#define HAVE_aarch64_uhaddv16qi (TARGET_SIMD)
#define HAVE_aarch64_srhaddv16qi (TARGET_SIMD)
#define HAVE_aarch64_urhaddv16qi (TARGET_SIMD)
#define HAVE_aarch64_shsubv16qi (TARGET_SIMD)
#define HAVE_aarch64_uhsubv16qi (TARGET_SIMD)
#define HAVE_aarch64_shaddv4hi (TARGET_SIMD)
#define HAVE_aarch64_uhaddv4hi (TARGET_SIMD)
#define HAVE_aarch64_srhaddv4hi (TARGET_SIMD)
#define HAVE_aarch64_urhaddv4hi (TARGET_SIMD)
#define HAVE_aarch64_shsubv4hi (TARGET_SIMD)
#define HAVE_aarch64_uhsubv4hi (TARGET_SIMD)
#define HAVE_aarch64_shaddv8hi (TARGET_SIMD)
#define HAVE_aarch64_uhaddv8hi (TARGET_SIMD)
#define HAVE_aarch64_srhaddv8hi (TARGET_SIMD)
#define HAVE_aarch64_urhaddv8hi (TARGET_SIMD)
#define HAVE_aarch64_shsubv8hi (TARGET_SIMD)
#define HAVE_aarch64_uhsubv8hi (TARGET_SIMD)
#define HAVE_aarch64_shaddv2si (TARGET_SIMD)
#define HAVE_aarch64_uhaddv2si (TARGET_SIMD)
#define HAVE_aarch64_srhaddv2si (TARGET_SIMD)
#define HAVE_aarch64_urhaddv2si (TARGET_SIMD)
#define HAVE_aarch64_shsubv2si (TARGET_SIMD)
#define HAVE_aarch64_uhsubv2si (TARGET_SIMD)
#define HAVE_aarch64_shaddv4si (TARGET_SIMD)
#define HAVE_aarch64_uhaddv4si (TARGET_SIMD)
#define HAVE_aarch64_srhaddv4si (TARGET_SIMD)
#define HAVE_aarch64_urhaddv4si (TARGET_SIMD)
#define HAVE_aarch64_shsubv4si (TARGET_SIMD)
#define HAVE_aarch64_uhsubv4si (TARGET_SIMD)
#define HAVE_aarch64_addhnv8hi (TARGET_SIMD)
#define HAVE_aarch64_raddhnv8hi (TARGET_SIMD)
#define HAVE_aarch64_subhnv8hi (TARGET_SIMD)
#define HAVE_aarch64_rsubhnv8hi (TARGET_SIMD)
#define HAVE_aarch64_addhnv4si (TARGET_SIMD)
#define HAVE_aarch64_raddhnv4si (TARGET_SIMD)
#define HAVE_aarch64_subhnv4si (TARGET_SIMD)
#define HAVE_aarch64_rsubhnv4si (TARGET_SIMD)
#define HAVE_aarch64_addhnv2di (TARGET_SIMD)
#define HAVE_aarch64_raddhnv2di (TARGET_SIMD)
#define HAVE_aarch64_subhnv2di (TARGET_SIMD)
#define HAVE_aarch64_rsubhnv2di (TARGET_SIMD)
#define HAVE_aarch64_addhn2v8hi (TARGET_SIMD)
#define HAVE_aarch64_raddhn2v8hi (TARGET_SIMD)
#define HAVE_aarch64_subhn2v8hi (TARGET_SIMD)
#define HAVE_aarch64_rsubhn2v8hi (TARGET_SIMD)
#define HAVE_aarch64_addhn2v4si (TARGET_SIMD)
#define HAVE_aarch64_raddhn2v4si (TARGET_SIMD)
#define HAVE_aarch64_subhn2v4si (TARGET_SIMD)
#define HAVE_aarch64_rsubhn2v4si (TARGET_SIMD)
#define HAVE_aarch64_addhn2v2di (TARGET_SIMD)
#define HAVE_aarch64_raddhn2v2di (TARGET_SIMD)
#define HAVE_aarch64_subhn2v2di (TARGET_SIMD)
#define HAVE_aarch64_rsubhn2v2di (TARGET_SIMD)
#define HAVE_aarch64_pmulv8qi (TARGET_SIMD)
#define HAVE_aarch64_pmulv16qi (TARGET_SIMD)
#define HAVE_aarch64_fmulxv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fmulxv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fmulxv2sf (TARGET_SIMD)
#define HAVE_aarch64_fmulxv4sf (TARGET_SIMD)
#define HAVE_aarch64_fmulxv2df (TARGET_SIMD)
#define HAVE_aarch64_fmulxhf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_fmulxsf (TARGET_SIMD)
#define HAVE_aarch64_fmulxdf (TARGET_SIMD)
#define HAVE_aarch64_sqaddv8qi (TARGET_SIMD)
#define HAVE_aarch64_uqaddv8qi (TARGET_SIMD)
#define HAVE_aarch64_sqsubv8qi (TARGET_SIMD)
#define HAVE_aarch64_uqsubv8qi (TARGET_SIMD)
#define HAVE_aarch64_sqaddv16qi (TARGET_SIMD)
#define HAVE_aarch64_uqaddv16qi (TARGET_SIMD)
#define HAVE_aarch64_sqsubv16qi (TARGET_SIMD)
#define HAVE_aarch64_uqsubv16qi (TARGET_SIMD)
#define HAVE_aarch64_sqaddv4hi (TARGET_SIMD)
#define HAVE_aarch64_uqaddv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqsubv4hi (TARGET_SIMD)
#define HAVE_aarch64_uqsubv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqaddv8hi (TARGET_SIMD)
#define HAVE_aarch64_uqaddv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqsubv8hi (TARGET_SIMD)
#define HAVE_aarch64_uqsubv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqaddv2si (TARGET_SIMD)
#define HAVE_aarch64_uqaddv2si (TARGET_SIMD)
#define HAVE_aarch64_sqsubv2si (TARGET_SIMD)
#define HAVE_aarch64_uqsubv2si (TARGET_SIMD)
#define HAVE_aarch64_sqaddv4si (TARGET_SIMD)
#define HAVE_aarch64_uqaddv4si (TARGET_SIMD)
#define HAVE_aarch64_sqsubv4si (TARGET_SIMD)
#define HAVE_aarch64_uqsubv4si (TARGET_SIMD)
#define HAVE_aarch64_sqaddv2di (TARGET_SIMD)
#define HAVE_aarch64_uqaddv2di (TARGET_SIMD)
#define HAVE_aarch64_sqsubv2di (TARGET_SIMD)
#define HAVE_aarch64_uqsubv2di (TARGET_SIMD)
#define HAVE_aarch64_sqaddqi (TARGET_SIMD)
#define HAVE_aarch64_uqaddqi (TARGET_SIMD)
#define HAVE_aarch64_sqsubqi (TARGET_SIMD)
#define HAVE_aarch64_uqsubqi (TARGET_SIMD)
#define HAVE_aarch64_sqaddhi (TARGET_SIMD)
#define HAVE_aarch64_uqaddhi (TARGET_SIMD)
#define HAVE_aarch64_sqsubhi (TARGET_SIMD)
#define HAVE_aarch64_uqsubhi (TARGET_SIMD)
#define HAVE_aarch64_sqaddsi (TARGET_SIMD)
#define HAVE_aarch64_uqaddsi (TARGET_SIMD)
#define HAVE_aarch64_sqsubsi (TARGET_SIMD)
#define HAVE_aarch64_uqsubsi (TARGET_SIMD)
#define HAVE_aarch64_sqadddi (TARGET_SIMD)
#define HAVE_aarch64_uqadddi (TARGET_SIMD)
#define HAVE_aarch64_sqsubdi (TARGET_SIMD)
#define HAVE_aarch64_uqsubdi (TARGET_SIMD)
#define HAVE_aarch64_suqaddv8qi (TARGET_SIMD)
#define HAVE_aarch64_usqaddv8qi (TARGET_SIMD)
#define HAVE_aarch64_suqaddv16qi (TARGET_SIMD)
#define HAVE_aarch64_usqaddv16qi (TARGET_SIMD)
#define HAVE_aarch64_suqaddv4hi (TARGET_SIMD)
#define HAVE_aarch64_usqaddv4hi (TARGET_SIMD)
#define HAVE_aarch64_suqaddv8hi (TARGET_SIMD)
#define HAVE_aarch64_usqaddv8hi (TARGET_SIMD)
#define HAVE_aarch64_suqaddv2si (TARGET_SIMD)
#define HAVE_aarch64_usqaddv2si (TARGET_SIMD)
#define HAVE_aarch64_suqaddv4si (TARGET_SIMD)
#define HAVE_aarch64_usqaddv4si (TARGET_SIMD)
#define HAVE_aarch64_suqaddv2di (TARGET_SIMD)
#define HAVE_aarch64_usqaddv2di (TARGET_SIMD)
#define HAVE_aarch64_suqaddqi (TARGET_SIMD)
#define HAVE_aarch64_usqaddqi (TARGET_SIMD)
#define HAVE_aarch64_suqaddhi (TARGET_SIMD)
#define HAVE_aarch64_usqaddhi (TARGET_SIMD)
#define HAVE_aarch64_suqaddsi (TARGET_SIMD)
#define HAVE_aarch64_usqaddsi (TARGET_SIMD)
#define HAVE_aarch64_suqadddi (TARGET_SIMD)
#define HAVE_aarch64_usqadddi (TARGET_SIMD)
#define HAVE_aarch64_sqmovunv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqmovunv4si (TARGET_SIMD)
#define HAVE_aarch64_sqmovunv2di (TARGET_SIMD)
#define HAVE_aarch64_sqmovunhi (TARGET_SIMD)
#define HAVE_aarch64_sqmovunsi (TARGET_SIMD)
#define HAVE_aarch64_sqmovundi (TARGET_SIMD)
#define HAVE_aarch64_sqmovnv8hi (TARGET_SIMD)
#define HAVE_aarch64_uqmovnv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqmovnv4si (TARGET_SIMD)
#define HAVE_aarch64_uqmovnv4si (TARGET_SIMD)
#define HAVE_aarch64_sqmovnv2di (TARGET_SIMD)
#define HAVE_aarch64_uqmovnv2di (TARGET_SIMD)
#define HAVE_aarch64_sqmovnhi (TARGET_SIMD)
#define HAVE_aarch64_uqmovnhi (TARGET_SIMD)
#define HAVE_aarch64_sqmovnsi (TARGET_SIMD)
#define HAVE_aarch64_uqmovnsi (TARGET_SIMD)
#define HAVE_aarch64_sqmovndi (TARGET_SIMD)
#define HAVE_aarch64_uqmovndi (TARGET_SIMD)
#define HAVE_aarch64_sqnegv8qi (TARGET_SIMD)
#define HAVE_aarch64_sqabsv8qi (TARGET_SIMD)
#define HAVE_aarch64_sqnegv16qi (TARGET_SIMD)
#define HAVE_aarch64_sqabsv16qi (TARGET_SIMD)
#define HAVE_aarch64_sqnegv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqabsv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqnegv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqabsv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqnegv2si (TARGET_SIMD)
#define HAVE_aarch64_sqabsv2si (TARGET_SIMD)
#define HAVE_aarch64_sqnegv4si (TARGET_SIMD)
#define HAVE_aarch64_sqabsv4si (TARGET_SIMD)
#define HAVE_aarch64_sqnegv2di (TARGET_SIMD)
#define HAVE_aarch64_sqabsv2di (TARGET_SIMD)
#define HAVE_aarch64_sqnegqi (TARGET_SIMD)
#define HAVE_aarch64_sqabsqi (TARGET_SIMD)
#define HAVE_aarch64_sqneghi (TARGET_SIMD)
#define HAVE_aarch64_sqabshi (TARGET_SIMD)
#define HAVE_aarch64_sqnegsi (TARGET_SIMD)
#define HAVE_aarch64_sqabssi (TARGET_SIMD)
#define HAVE_aarch64_sqnegdi (TARGET_SIMD)
#define HAVE_aarch64_sqabsdi (TARGET_SIMD)
#define HAVE_aarch64_sqdmulhv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulhv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmulhv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulhv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmulhv2si (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulhv2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmulhv4si (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulhv4si (TARGET_SIMD)
#define HAVE_aarch64_sqdmulhhi (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulhhi (TARGET_SIMD)
#define HAVE_aarch64_sqdmulhsi (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulhsi (TARGET_SIMD)
#define HAVE_aarch64_sqdmulh_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulh_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmulh_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulh_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmulh_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulh_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmulh_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulh_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_sqdmulh_laneqv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulh_laneqv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmulh_laneqv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulh_laneqv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmulh_laneqv2si (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulh_laneqv2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmulh_laneqv4si (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulh_laneqv4si (TARGET_SIMD)
#define HAVE_aarch64_sqdmulh_lanehi (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulh_lanehi (TARGET_SIMD)
#define HAVE_aarch64_sqdmulh_lanesi (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulh_lanesi (TARGET_SIMD)
#define HAVE_aarch64_sqdmulh_laneqhi (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulh_laneqhi (TARGET_SIMD)
#define HAVE_aarch64_sqdmulh_laneqsi (TARGET_SIMD)
#define HAVE_aarch64_sqrdmulh_laneqsi (TARGET_SIMD)
#define HAVE_aarch64_sqrdmlahv4hi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlshv4hi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlahv8hi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlshv8hi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlahv2si (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlshv2si (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlahv4si (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlshv4si (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlahhi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlshhi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlahsi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlshsi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlah_lanev4hi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlsh_lanev4hi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlah_lanev8hi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlsh_lanev8hi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlah_lanev2si (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlsh_lanev2si (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlah_lanev4si (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlsh_lanev4si (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlah_lanehi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlsh_lanehi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlah_lanesi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlsh_lanesi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlah_laneqv4hi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlsh_laneqv4hi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlah_laneqv8hi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlsh_laneqv8hi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlah_laneqv2si (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlsh_laneqv2si (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlah_laneqv4si (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlsh_laneqv4si (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlah_laneqhi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlsh_laneqhi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlah_laneqsi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqrdmlsh_laneqsi (TARGET_SIMD_RDMA)
#define HAVE_aarch64_sqdmlalv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlslv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlalv2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlslv2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlalhi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlslhi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlalsi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlslsi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal_laneqv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl_laneqv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal_laneqv2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl_laneqv2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal_lanehi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl_lanehi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal_lanesi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl_lanesi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal_laneqhi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl_laneqhi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal_laneqsi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl_laneqsi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal_nv2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl_nv2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2v8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2v8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2v4si_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2v4si_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2_lanev8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2_lanev8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2_lanev4si_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2_lanev4si_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2_laneqv8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2_laneqv8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2_laneqv4si_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2_laneqv4si_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2_nv8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2_nv8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2_nv4si_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2_nv4si_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmullv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmullv2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmullhi (TARGET_SIMD)
#define HAVE_aarch64_sqdmullsi (TARGET_SIMD)
#define HAVE_aarch64_sqdmull_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmull_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmull_laneqv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmull_laneqv2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmull_lanehi (TARGET_SIMD)
#define HAVE_aarch64_sqdmull_lanesi (TARGET_SIMD)
#define HAVE_aarch64_sqdmull_laneqhi (TARGET_SIMD)
#define HAVE_aarch64_sqdmull_laneqsi (TARGET_SIMD)
#define HAVE_aarch64_sqdmull_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmull_nv2si (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2v8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2v4si_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2_lanev8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2_lanev4si_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2_laneqv8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2_laneqv4si_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2_nv8hi_internal (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2_nv4si_internal (TARGET_SIMD)
#define HAVE_aarch64_sshlv8qi (TARGET_SIMD)
#define HAVE_aarch64_ushlv8qi (TARGET_SIMD)
#define HAVE_aarch64_srshlv8qi (TARGET_SIMD)
#define HAVE_aarch64_urshlv8qi (TARGET_SIMD)
#define HAVE_aarch64_sshlv16qi (TARGET_SIMD)
#define HAVE_aarch64_ushlv16qi (TARGET_SIMD)
#define HAVE_aarch64_srshlv16qi (TARGET_SIMD)
#define HAVE_aarch64_urshlv16qi (TARGET_SIMD)
#define HAVE_aarch64_sshlv4hi (TARGET_SIMD)
#define HAVE_aarch64_ushlv4hi (TARGET_SIMD)
#define HAVE_aarch64_srshlv4hi (TARGET_SIMD)
#define HAVE_aarch64_urshlv4hi (TARGET_SIMD)
#define HAVE_aarch64_sshlv8hi (TARGET_SIMD)
#define HAVE_aarch64_ushlv8hi (TARGET_SIMD)
#define HAVE_aarch64_srshlv8hi (TARGET_SIMD)
#define HAVE_aarch64_urshlv8hi (TARGET_SIMD)
#define HAVE_aarch64_sshlv2si (TARGET_SIMD)
#define HAVE_aarch64_ushlv2si (TARGET_SIMD)
#define HAVE_aarch64_srshlv2si (TARGET_SIMD)
#define HAVE_aarch64_urshlv2si (TARGET_SIMD)
#define HAVE_aarch64_sshlv4si (TARGET_SIMD)
#define HAVE_aarch64_ushlv4si (TARGET_SIMD)
#define HAVE_aarch64_srshlv4si (TARGET_SIMD)
#define HAVE_aarch64_urshlv4si (TARGET_SIMD)
#define HAVE_aarch64_sshlv2di (TARGET_SIMD)
#define HAVE_aarch64_ushlv2di (TARGET_SIMD)
#define HAVE_aarch64_srshlv2di (TARGET_SIMD)
#define HAVE_aarch64_urshlv2di (TARGET_SIMD)
#define HAVE_aarch64_sshldi (TARGET_SIMD)
#define HAVE_aarch64_ushldi (TARGET_SIMD)
#define HAVE_aarch64_srshldi (TARGET_SIMD)
#define HAVE_aarch64_urshldi (TARGET_SIMD)
#define HAVE_aarch64_sqshlv8qi (TARGET_SIMD)
#define HAVE_aarch64_uqshlv8qi (TARGET_SIMD)
#define HAVE_aarch64_sqrshlv8qi (TARGET_SIMD)
#define HAVE_aarch64_uqrshlv8qi (TARGET_SIMD)
#define HAVE_aarch64_sqshlv16qi (TARGET_SIMD)
#define HAVE_aarch64_uqshlv16qi (TARGET_SIMD)
#define HAVE_aarch64_sqrshlv16qi (TARGET_SIMD)
#define HAVE_aarch64_uqrshlv16qi (TARGET_SIMD)
#define HAVE_aarch64_sqshlv4hi (TARGET_SIMD)
#define HAVE_aarch64_uqshlv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqrshlv4hi (TARGET_SIMD)
#define HAVE_aarch64_uqrshlv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqshlv8hi (TARGET_SIMD)
#define HAVE_aarch64_uqshlv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqrshlv8hi (TARGET_SIMD)
#define HAVE_aarch64_uqrshlv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqshlv2si (TARGET_SIMD)
#define HAVE_aarch64_uqshlv2si (TARGET_SIMD)
#define HAVE_aarch64_sqrshlv2si (TARGET_SIMD)
#define HAVE_aarch64_uqrshlv2si (TARGET_SIMD)
#define HAVE_aarch64_sqshlv4si (TARGET_SIMD)
#define HAVE_aarch64_uqshlv4si (TARGET_SIMD)
#define HAVE_aarch64_sqrshlv4si (TARGET_SIMD)
#define HAVE_aarch64_uqrshlv4si (TARGET_SIMD)
#define HAVE_aarch64_sqshlv2di (TARGET_SIMD)
#define HAVE_aarch64_uqshlv2di (TARGET_SIMD)
#define HAVE_aarch64_sqrshlv2di (TARGET_SIMD)
#define HAVE_aarch64_uqrshlv2di (TARGET_SIMD)
#define HAVE_aarch64_sqshlqi (TARGET_SIMD)
#define HAVE_aarch64_uqshlqi (TARGET_SIMD)
#define HAVE_aarch64_sqrshlqi (TARGET_SIMD)
#define HAVE_aarch64_uqrshlqi (TARGET_SIMD)
#define HAVE_aarch64_sqshlhi (TARGET_SIMD)
#define HAVE_aarch64_uqshlhi (TARGET_SIMD)
#define HAVE_aarch64_sqrshlhi (TARGET_SIMD)
#define HAVE_aarch64_uqrshlhi (TARGET_SIMD)
#define HAVE_aarch64_sqshlsi (TARGET_SIMD)
#define HAVE_aarch64_uqshlsi (TARGET_SIMD)
#define HAVE_aarch64_sqrshlsi (TARGET_SIMD)
#define HAVE_aarch64_uqrshlsi (TARGET_SIMD)
#define HAVE_aarch64_sqshldi (TARGET_SIMD)
#define HAVE_aarch64_uqshldi (TARGET_SIMD)
#define HAVE_aarch64_sqrshldi (TARGET_SIMD)
#define HAVE_aarch64_uqrshldi (TARGET_SIMD)
#define HAVE_aarch64_sshll_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_ushll_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_sshll_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_ushll_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_sshll_nv2si (TARGET_SIMD)
#define HAVE_aarch64_ushll_nv2si (TARGET_SIMD)
#define HAVE_aarch64_sshll2_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_ushll2_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_sshll2_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_ushll2_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_sshll2_nv4si (TARGET_SIMD)
#define HAVE_aarch64_ushll2_nv4si (TARGET_SIMD)
#define HAVE_aarch64_srshr_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_urshr_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_srshr_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_urshr_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_srshr_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_urshr_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_srshr_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_urshr_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_srshr_nv2si (TARGET_SIMD)
#define HAVE_aarch64_urshr_nv2si (TARGET_SIMD)
#define HAVE_aarch64_srshr_nv4si (TARGET_SIMD)
#define HAVE_aarch64_urshr_nv4si (TARGET_SIMD)
#define HAVE_aarch64_srshr_nv2di (TARGET_SIMD)
#define HAVE_aarch64_urshr_nv2di (TARGET_SIMD)
#define HAVE_aarch64_srshr_ndi (TARGET_SIMD)
#define HAVE_aarch64_urshr_ndi (TARGET_SIMD)
#define HAVE_aarch64_ssra_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_usra_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_srsra_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_ursra_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_ssra_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_usra_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_srsra_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_ursra_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_ssra_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_usra_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_srsra_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_ursra_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_ssra_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_usra_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_srsra_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_ursra_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_ssra_nv2si (TARGET_SIMD)
#define HAVE_aarch64_usra_nv2si (TARGET_SIMD)
#define HAVE_aarch64_srsra_nv2si (TARGET_SIMD)
#define HAVE_aarch64_ursra_nv2si (TARGET_SIMD)
#define HAVE_aarch64_ssra_nv4si (TARGET_SIMD)
#define HAVE_aarch64_usra_nv4si (TARGET_SIMD)
#define HAVE_aarch64_srsra_nv4si (TARGET_SIMD)
#define HAVE_aarch64_ursra_nv4si (TARGET_SIMD)
#define HAVE_aarch64_ssra_nv2di (TARGET_SIMD)
#define HAVE_aarch64_usra_nv2di (TARGET_SIMD)
#define HAVE_aarch64_srsra_nv2di (TARGET_SIMD)
#define HAVE_aarch64_ursra_nv2di (TARGET_SIMD)
#define HAVE_aarch64_ssra_ndi (TARGET_SIMD)
#define HAVE_aarch64_usra_ndi (TARGET_SIMD)
#define HAVE_aarch64_srsra_ndi (TARGET_SIMD)
#define HAVE_aarch64_ursra_ndi (TARGET_SIMD)
#define HAVE_aarch64_ssli_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_usli_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_ssri_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_usri_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_ssli_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_usli_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_ssri_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_usri_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_ssli_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_usli_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_ssri_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_usri_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_ssli_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_usli_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_ssri_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_usri_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_ssli_nv2si (TARGET_SIMD)
#define HAVE_aarch64_usli_nv2si (TARGET_SIMD)
#define HAVE_aarch64_ssri_nv2si (TARGET_SIMD)
#define HAVE_aarch64_usri_nv2si (TARGET_SIMD)
#define HAVE_aarch64_ssli_nv4si (TARGET_SIMD)
#define HAVE_aarch64_usli_nv4si (TARGET_SIMD)
#define HAVE_aarch64_ssri_nv4si (TARGET_SIMD)
#define HAVE_aarch64_usri_nv4si (TARGET_SIMD)
#define HAVE_aarch64_ssli_nv2di (TARGET_SIMD)
#define HAVE_aarch64_usli_nv2di (TARGET_SIMD)
#define HAVE_aarch64_ssri_nv2di (TARGET_SIMD)
#define HAVE_aarch64_usri_nv2di (TARGET_SIMD)
#define HAVE_aarch64_ssli_ndi (TARGET_SIMD)
#define HAVE_aarch64_usli_ndi (TARGET_SIMD)
#define HAVE_aarch64_ssri_ndi (TARGET_SIMD)
#define HAVE_aarch64_usri_ndi (TARGET_SIMD)
#define HAVE_aarch64_sqshlu_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_sqshl_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_uqshl_nv8qi (TARGET_SIMD)
#define HAVE_aarch64_sqshlu_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_sqshl_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_uqshl_nv16qi (TARGET_SIMD)
#define HAVE_aarch64_sqshlu_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqshl_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_uqshl_nv4hi (TARGET_SIMD)
#define HAVE_aarch64_sqshlu_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqshl_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_uqshl_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqshlu_nv2si (TARGET_SIMD)
#define HAVE_aarch64_sqshl_nv2si (TARGET_SIMD)
#define HAVE_aarch64_uqshl_nv2si (TARGET_SIMD)
#define HAVE_aarch64_sqshlu_nv4si (TARGET_SIMD)
#define HAVE_aarch64_sqshl_nv4si (TARGET_SIMD)
#define HAVE_aarch64_uqshl_nv4si (TARGET_SIMD)
#define HAVE_aarch64_sqshlu_nv2di (TARGET_SIMD)
#define HAVE_aarch64_sqshl_nv2di (TARGET_SIMD)
#define HAVE_aarch64_uqshl_nv2di (TARGET_SIMD)
#define HAVE_aarch64_sqshlu_nqi (TARGET_SIMD)
#define HAVE_aarch64_sqshl_nqi (TARGET_SIMD)
#define HAVE_aarch64_uqshl_nqi (TARGET_SIMD)
#define HAVE_aarch64_sqshlu_nhi (TARGET_SIMD)
#define HAVE_aarch64_sqshl_nhi (TARGET_SIMD)
#define HAVE_aarch64_uqshl_nhi (TARGET_SIMD)
#define HAVE_aarch64_sqshlu_nsi (TARGET_SIMD)
#define HAVE_aarch64_sqshl_nsi (TARGET_SIMD)
#define HAVE_aarch64_uqshl_nsi (TARGET_SIMD)
#define HAVE_aarch64_sqshlu_ndi (TARGET_SIMD)
#define HAVE_aarch64_sqshl_ndi (TARGET_SIMD)
#define HAVE_aarch64_uqshl_ndi (TARGET_SIMD)
#define HAVE_aarch64_sqshrun_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqrshrun_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqshrn_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_uqshrn_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqrshrn_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_uqrshrn_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqshrun_nv4si (TARGET_SIMD)
#define HAVE_aarch64_sqrshrun_nv4si (TARGET_SIMD)
#define HAVE_aarch64_sqshrn_nv4si (TARGET_SIMD)
#define HAVE_aarch64_uqshrn_nv4si (TARGET_SIMD)
#define HAVE_aarch64_sqrshrn_nv4si (TARGET_SIMD)
#define HAVE_aarch64_uqrshrn_nv4si (TARGET_SIMD)
#define HAVE_aarch64_sqshrun_nv2di (TARGET_SIMD)
#define HAVE_aarch64_sqrshrun_nv2di (TARGET_SIMD)
#define HAVE_aarch64_sqshrn_nv2di (TARGET_SIMD)
#define HAVE_aarch64_uqshrn_nv2di (TARGET_SIMD)
#define HAVE_aarch64_sqrshrn_nv2di (TARGET_SIMD)
#define HAVE_aarch64_uqrshrn_nv2di (TARGET_SIMD)
#define HAVE_aarch64_sqshrun_nhi (TARGET_SIMD)
#define HAVE_aarch64_sqrshrun_nhi (TARGET_SIMD)
#define HAVE_aarch64_sqshrn_nhi (TARGET_SIMD)
#define HAVE_aarch64_uqshrn_nhi (TARGET_SIMD)
#define HAVE_aarch64_sqrshrn_nhi (TARGET_SIMD)
#define HAVE_aarch64_uqrshrn_nhi (TARGET_SIMD)
#define HAVE_aarch64_sqshrun_nsi (TARGET_SIMD)
#define HAVE_aarch64_sqrshrun_nsi (TARGET_SIMD)
#define HAVE_aarch64_sqshrn_nsi (TARGET_SIMD)
#define HAVE_aarch64_uqshrn_nsi (TARGET_SIMD)
#define HAVE_aarch64_sqrshrn_nsi (TARGET_SIMD)
#define HAVE_aarch64_uqrshrn_nsi (TARGET_SIMD)
#define HAVE_aarch64_sqshrun_ndi (TARGET_SIMD)
#define HAVE_aarch64_sqrshrun_ndi (TARGET_SIMD)
#define HAVE_aarch64_sqshrn_ndi (TARGET_SIMD)
#define HAVE_aarch64_uqshrn_ndi (TARGET_SIMD)
#define HAVE_aarch64_sqrshrn_ndi (TARGET_SIMD)
#define HAVE_aarch64_uqrshrn_ndi (TARGET_SIMD)
#define HAVE_aarch64_cmltv8qi (TARGET_SIMD)
#define HAVE_aarch64_cmlev8qi (TARGET_SIMD)
#define HAVE_aarch64_cmeqv8qi (TARGET_SIMD)
#define HAVE_aarch64_cmgev8qi (TARGET_SIMD)
#define HAVE_aarch64_cmgtv8qi (TARGET_SIMD)
#define HAVE_aarch64_cmltv16qi (TARGET_SIMD)
#define HAVE_aarch64_cmlev16qi (TARGET_SIMD)
#define HAVE_aarch64_cmeqv16qi (TARGET_SIMD)
#define HAVE_aarch64_cmgev16qi (TARGET_SIMD)
#define HAVE_aarch64_cmgtv16qi (TARGET_SIMD)
#define HAVE_aarch64_cmltv4hi (TARGET_SIMD)
#define HAVE_aarch64_cmlev4hi (TARGET_SIMD)
#define HAVE_aarch64_cmeqv4hi (TARGET_SIMD)
#define HAVE_aarch64_cmgev4hi (TARGET_SIMD)
#define HAVE_aarch64_cmgtv4hi (TARGET_SIMD)
#define HAVE_aarch64_cmltv8hi (TARGET_SIMD)
#define HAVE_aarch64_cmlev8hi (TARGET_SIMD)
#define HAVE_aarch64_cmeqv8hi (TARGET_SIMD)
#define HAVE_aarch64_cmgev8hi (TARGET_SIMD)
#define HAVE_aarch64_cmgtv8hi (TARGET_SIMD)
#define HAVE_aarch64_cmltv2si (TARGET_SIMD)
#define HAVE_aarch64_cmlev2si (TARGET_SIMD)
#define HAVE_aarch64_cmeqv2si (TARGET_SIMD)
#define HAVE_aarch64_cmgev2si (TARGET_SIMD)
#define HAVE_aarch64_cmgtv2si (TARGET_SIMD)
#define HAVE_aarch64_cmltv4si (TARGET_SIMD)
#define HAVE_aarch64_cmlev4si (TARGET_SIMD)
#define HAVE_aarch64_cmeqv4si (TARGET_SIMD)
#define HAVE_aarch64_cmgev4si (TARGET_SIMD)
#define HAVE_aarch64_cmgtv4si (TARGET_SIMD)
#define HAVE_aarch64_cmltv2di (TARGET_SIMD)
#define HAVE_aarch64_cmlev2di (TARGET_SIMD)
#define HAVE_aarch64_cmeqv2di (TARGET_SIMD)
#define HAVE_aarch64_cmgev2di (TARGET_SIMD)
#define HAVE_aarch64_cmgtv2di (TARGET_SIMD)
#define HAVE_aarch64_cmltdi (TARGET_SIMD)
#define HAVE_aarch64_cmledi (TARGET_SIMD)
#define HAVE_aarch64_cmeqdi (TARGET_SIMD)
#define HAVE_aarch64_cmgedi (TARGET_SIMD)
#define HAVE_aarch64_cmgtdi (TARGET_SIMD)
#define HAVE_aarch64_cmltuv8qi (TARGET_SIMD)
#define HAVE_aarch64_cmleuv8qi (TARGET_SIMD)
#define HAVE_aarch64_cmgeuv8qi (TARGET_SIMD)
#define HAVE_aarch64_cmgtuv8qi (TARGET_SIMD)
#define HAVE_aarch64_cmltuv16qi (TARGET_SIMD)
#define HAVE_aarch64_cmleuv16qi (TARGET_SIMD)
#define HAVE_aarch64_cmgeuv16qi (TARGET_SIMD)
#define HAVE_aarch64_cmgtuv16qi (TARGET_SIMD)
#define HAVE_aarch64_cmltuv4hi (TARGET_SIMD)
#define HAVE_aarch64_cmleuv4hi (TARGET_SIMD)
#define HAVE_aarch64_cmgeuv4hi (TARGET_SIMD)
#define HAVE_aarch64_cmgtuv4hi (TARGET_SIMD)
#define HAVE_aarch64_cmltuv8hi (TARGET_SIMD)
#define HAVE_aarch64_cmleuv8hi (TARGET_SIMD)
#define HAVE_aarch64_cmgeuv8hi (TARGET_SIMD)
#define HAVE_aarch64_cmgtuv8hi (TARGET_SIMD)
#define HAVE_aarch64_cmltuv2si (TARGET_SIMD)
#define HAVE_aarch64_cmleuv2si (TARGET_SIMD)
#define HAVE_aarch64_cmgeuv2si (TARGET_SIMD)
#define HAVE_aarch64_cmgtuv2si (TARGET_SIMD)
#define HAVE_aarch64_cmltuv4si (TARGET_SIMD)
#define HAVE_aarch64_cmleuv4si (TARGET_SIMD)
#define HAVE_aarch64_cmgeuv4si (TARGET_SIMD)
#define HAVE_aarch64_cmgtuv4si (TARGET_SIMD)
#define HAVE_aarch64_cmltuv2di (TARGET_SIMD)
#define HAVE_aarch64_cmleuv2di (TARGET_SIMD)
#define HAVE_aarch64_cmgeuv2di (TARGET_SIMD)
#define HAVE_aarch64_cmgtuv2di (TARGET_SIMD)
#define HAVE_aarch64_cmltudi (TARGET_SIMD)
#define HAVE_aarch64_cmleudi (TARGET_SIMD)
#define HAVE_aarch64_cmgeudi (TARGET_SIMD)
#define HAVE_aarch64_cmgtudi (TARGET_SIMD)
#define HAVE_aarch64_cmtstv8qi (TARGET_SIMD)
#define HAVE_aarch64_cmtstv16qi (TARGET_SIMD)
#define HAVE_aarch64_cmtstv4hi (TARGET_SIMD)
#define HAVE_aarch64_cmtstv8hi (TARGET_SIMD)
#define HAVE_aarch64_cmtstv2si (TARGET_SIMD)
#define HAVE_aarch64_cmtstv4si (TARGET_SIMD)
#define HAVE_aarch64_cmtstv2di (TARGET_SIMD)
#define HAVE_aarch64_cmtstdi (TARGET_SIMD)
#define HAVE_aarch64_cmltv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmlev4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmeqv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmgev4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmgtv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmltv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmlev8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmeqv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmgev8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmgtv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmltv2sf (TARGET_SIMD)
#define HAVE_aarch64_cmlev2sf (TARGET_SIMD)
#define HAVE_aarch64_cmeqv2sf (TARGET_SIMD)
#define HAVE_aarch64_cmgev2sf (TARGET_SIMD)
#define HAVE_aarch64_cmgtv2sf (TARGET_SIMD)
#define HAVE_aarch64_cmltv4sf (TARGET_SIMD)
#define HAVE_aarch64_cmlev4sf (TARGET_SIMD)
#define HAVE_aarch64_cmeqv4sf (TARGET_SIMD)
#define HAVE_aarch64_cmgev4sf (TARGET_SIMD)
#define HAVE_aarch64_cmgtv4sf (TARGET_SIMD)
#define HAVE_aarch64_cmltv2df (TARGET_SIMD)
#define HAVE_aarch64_cmlev2df (TARGET_SIMD)
#define HAVE_aarch64_cmeqv2df (TARGET_SIMD)
#define HAVE_aarch64_cmgev2df (TARGET_SIMD)
#define HAVE_aarch64_cmgtv2df (TARGET_SIMD)
#define HAVE_aarch64_cmlthf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmlehf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmeqhf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmgehf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmgthf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_cmltsf (TARGET_SIMD)
#define HAVE_aarch64_cmlesf (TARGET_SIMD)
#define HAVE_aarch64_cmeqsf (TARGET_SIMD)
#define HAVE_aarch64_cmgesf (TARGET_SIMD)
#define HAVE_aarch64_cmgtsf (TARGET_SIMD)
#define HAVE_aarch64_cmltdf (TARGET_SIMD)
#define HAVE_aarch64_cmledf (TARGET_SIMD)
#define HAVE_aarch64_cmeqdf (TARGET_SIMD)
#define HAVE_aarch64_cmgedf (TARGET_SIMD)
#define HAVE_aarch64_cmgtdf (TARGET_SIMD)
#define HAVE_aarch64_facltv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_faclev4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_facgev4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_facgtv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_facltv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_faclev8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_facgev8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_facgtv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_facltv2sf (TARGET_SIMD)
#define HAVE_aarch64_faclev2sf (TARGET_SIMD)
#define HAVE_aarch64_facgev2sf (TARGET_SIMD)
#define HAVE_aarch64_facgtv2sf (TARGET_SIMD)
#define HAVE_aarch64_facltv4sf (TARGET_SIMD)
#define HAVE_aarch64_faclev4sf (TARGET_SIMD)
#define HAVE_aarch64_facgev4sf (TARGET_SIMD)
#define HAVE_aarch64_facgtv4sf (TARGET_SIMD)
#define HAVE_aarch64_facltv2df (TARGET_SIMD)
#define HAVE_aarch64_faclev2df (TARGET_SIMD)
#define HAVE_aarch64_facgev2df (TARGET_SIMD)
#define HAVE_aarch64_facgtv2df (TARGET_SIMD)
#define HAVE_aarch64_faclthf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_faclehf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_facgehf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_facgthf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_facltsf (TARGET_SIMD)
#define HAVE_aarch64_faclesf (TARGET_SIMD)
#define HAVE_aarch64_facgesf (TARGET_SIMD)
#define HAVE_aarch64_facgtsf (TARGET_SIMD)
#define HAVE_aarch64_facltdf (TARGET_SIMD)
#define HAVE_aarch64_facledf (TARGET_SIMD)
#define HAVE_aarch64_facgedf (TARGET_SIMD)
#define HAVE_aarch64_facgtdf (TARGET_SIMD)
#define HAVE_aarch64_addpv8qi (TARGET_SIMD)
#define HAVE_aarch64_addpv4hi (TARGET_SIMD)
#define HAVE_aarch64_addpv2si (TARGET_SIMD)
#define HAVE_aarch64_addpdi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2v16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2v4si (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2v2di (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2v8hf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2v4sf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2v2df (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2v8bf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rv8qi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rv16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rv4hi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rv8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rv2si (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rv4si (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rv4bf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rv8bf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rv2di (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rv4hf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rv8hf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rv2sf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rv4sf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rv2df (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rdi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld2rdf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanev8qi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanev16qi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanev4bf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanev8bf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanev2di (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanev4hf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanev8hf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanev2sf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanev4sf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanev2df (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanedi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesoi_lanedf (TARGET_SIMD)
#define HAVE_aarch64_simd_st2v16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_st2v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_st2v4si (TARGET_SIMD)
#define HAVE_aarch64_simd_st2v2di (TARGET_SIMD)
#define HAVE_aarch64_simd_st2v8hf (TARGET_SIMD)
#define HAVE_aarch64_simd_st2v4sf (TARGET_SIMD)
#define HAVE_aarch64_simd_st2v2df (TARGET_SIMD)
#define HAVE_aarch64_simd_st2v8bf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanev8qi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanev16qi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanev4bf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanev8bf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanev2di (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanev4hf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanev8hf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanev2sf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanev4sf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanev2df (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanedi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesoi_lanedf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3v16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3v4si (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3v2di (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3v8hf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3v4sf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3v2df (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3v8bf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rv8qi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rv16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rv4hi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rv8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rv2si (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rv4si (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rv4bf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rv8bf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rv2di (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rv4hf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rv8hf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rv2sf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rv4sf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rv2df (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rdi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld3rdf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanev8qi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanev16qi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanev4bf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanev8bf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanev2di (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanev4hf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanev8hf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanev2sf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanev4sf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanev2df (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanedi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesci_lanedf (TARGET_SIMD)
#define HAVE_aarch64_simd_st3v16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_st3v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_st3v4si (TARGET_SIMD)
#define HAVE_aarch64_simd_st3v2di (TARGET_SIMD)
#define HAVE_aarch64_simd_st3v8hf (TARGET_SIMD)
#define HAVE_aarch64_simd_st3v4sf (TARGET_SIMD)
#define HAVE_aarch64_simd_st3v2df (TARGET_SIMD)
#define HAVE_aarch64_simd_st3v8bf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanev8qi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanev16qi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanev4bf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanev8bf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanev2di (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanev4hf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanev8hf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanev2sf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanev4sf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanev2df (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanedi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesci_lanedf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4v16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4v4si (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4v2di (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4v8hf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4v4sf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4v2df (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4v8bf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rv8qi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rv16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rv4hi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rv8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rv2si (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rv4si (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rv4bf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rv8bf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rv2di (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rv4hf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rv8hf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rv2sf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rv4sf (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rv2df (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rdi (TARGET_SIMD)
#define HAVE_aarch64_simd_ld4rdf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanev8qi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanev16qi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanev4bf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanev8bf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanev2di (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanev4hf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanev8hf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanev2sf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanev4sf (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanev2df (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanedi (TARGET_SIMD)
#define HAVE_aarch64_vec_load_lanesxi_lanedf (TARGET_SIMD)
#define HAVE_aarch64_simd_st4v16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_st4v8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_st4v4si (TARGET_SIMD)
#define HAVE_aarch64_simd_st4v2di (TARGET_SIMD)
#define HAVE_aarch64_simd_st4v8hf (TARGET_SIMD)
#define HAVE_aarch64_simd_st4v4sf (TARGET_SIMD)
#define HAVE_aarch64_simd_st4v2df (TARGET_SIMD)
#define HAVE_aarch64_simd_st4v8bf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanev8qi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanev16qi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanev4bf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanev8bf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanev2di (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanev4hf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanev8hf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanev2sf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanev4sf (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanev2df (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanedi (TARGET_SIMD)
#define HAVE_aarch64_vec_store_lanesxi_lanedf (TARGET_SIMD)
#define HAVE_aarch64_rev_reglistoi (TARGET_SIMD)
#define HAVE_aarch64_rev_reglistci (TARGET_SIMD)
#define HAVE_aarch64_rev_reglistxi (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_v8qi (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_v16qi (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_v4hi (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_v8hi (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_v2si (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_v4si (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_v4bf (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_v8bf (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_v2di (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_v4hf (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_v8hf (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_v2sf (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_v4sf (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_v2df (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_di (TARGET_SIMD)
#define HAVE_aarch64_ld1_x3_df (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_v8qi (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_v16qi (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_v4hi (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_v8hi (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_v2si (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_v4si (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_v4bf (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_v8bf (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_v2di (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_v4hf (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_v8hf (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_v2sf (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_v4sf (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_v2df (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_di (TARGET_SIMD)
#define HAVE_aarch64_ld1_x4_df (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_v8qi (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_v16qi (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_v4hi (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_v8hi (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_v2si (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_v4si (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_v4bf (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_v8bf (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_v2di (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_v4hf (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_v8hf (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_v2sf (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_v4sf (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_v2df (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_di (TARGET_SIMD)
#define HAVE_aarch64_st1_x2_df (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_v8qi (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_v16qi (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_v4hi (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_v8hi (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_v2si (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_v4si (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_v4bf (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_v8bf (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_v2di (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_v4hf (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_v8hf (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_v2sf (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_v4sf (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_v2df (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_di (TARGET_SIMD)
#define HAVE_aarch64_st1_x3_df (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_v8qi (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_v16qi (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_v4hi (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_v8hi (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_v2si (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_v4si (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_v4bf (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_v8bf (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_v2di (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_v4hf (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_v8hf (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_v2sf (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_v4sf (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_v2df (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_di (TARGET_SIMD)
#define HAVE_aarch64_st1_x4_df (TARGET_SIMD)
#define HAVE_aarch64_be_ld1v8qi (TARGET_SIMD)
#define HAVE_aarch64_be_ld1v16qi (TARGET_SIMD)
#define HAVE_aarch64_be_ld1v4hi (TARGET_SIMD)
#define HAVE_aarch64_be_ld1v8hi (TARGET_SIMD)
#define HAVE_aarch64_be_ld1v2si (TARGET_SIMD)
#define HAVE_aarch64_be_ld1v4si (TARGET_SIMD)
#define HAVE_aarch64_be_ld1v2di (TARGET_SIMD)
#define HAVE_aarch64_be_ld1v4hf (TARGET_SIMD)
#define HAVE_aarch64_be_ld1v8hf (TARGET_SIMD)
#define HAVE_aarch64_be_ld1v4bf (TARGET_SIMD)
#define HAVE_aarch64_be_ld1v8bf (TARGET_SIMD)
#define HAVE_aarch64_be_ld1v2sf (TARGET_SIMD)
#define HAVE_aarch64_be_ld1v4sf (TARGET_SIMD)
#define HAVE_aarch64_be_ld1v2df (TARGET_SIMD)
#define HAVE_aarch64_be_ld1di (TARGET_SIMD)
#define HAVE_aarch64_be_st1v8qi (TARGET_SIMD)
#define HAVE_aarch64_be_st1v16qi (TARGET_SIMD)
#define HAVE_aarch64_be_st1v4hi (TARGET_SIMD)
#define HAVE_aarch64_be_st1v8hi (TARGET_SIMD)
#define HAVE_aarch64_be_st1v2si (TARGET_SIMD)
#define HAVE_aarch64_be_st1v4si (TARGET_SIMD)
#define HAVE_aarch64_be_st1v2di (TARGET_SIMD)
#define HAVE_aarch64_be_st1v4hf (TARGET_SIMD)
#define HAVE_aarch64_be_st1v8hf (TARGET_SIMD)
#define HAVE_aarch64_be_st1v4bf (TARGET_SIMD)
#define HAVE_aarch64_be_st1v8bf (TARGET_SIMD)
#define HAVE_aarch64_be_st1v2sf (TARGET_SIMD)
#define HAVE_aarch64_be_st1v4sf (TARGET_SIMD)
#define HAVE_aarch64_be_st1v2df (TARGET_SIMD)
#define HAVE_aarch64_be_st1di (TARGET_SIMD)
#define HAVE_aarch64_ld2v8qi_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld2v4hi_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld2v4hf_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld2v2si_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld2v2sf_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld2v4bf_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld2di_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld2df_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld3v8qi_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld3v4hi_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld3v4hf_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld3v2si_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld3v2sf_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld3v4bf_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld3di_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld3df_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld4v8qi_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld4v4hi_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld4v4hf_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld4v2si_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld4v2sf_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld4v4bf_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld4di_dreg (TARGET_SIMD)
#define HAVE_aarch64_ld4df_dreg (TARGET_SIMD)
#define HAVE_aarch64_tbl1v8qi (TARGET_SIMD)
#define HAVE_aarch64_tbl1v16qi (TARGET_SIMD)
#define HAVE_aarch64_tbl2v16qi (TARGET_SIMD)
#define HAVE_aarch64_tbl3v8qi (TARGET_SIMD)
#define HAVE_aarch64_tbl3v16qi (TARGET_SIMD)
#define HAVE_aarch64_tbx4v8qi (TARGET_SIMD)
#define HAVE_aarch64_tbx4v16qi (TARGET_SIMD)
#define HAVE_aarch64_qtbl3v8qi (TARGET_SIMD)
#define HAVE_aarch64_qtbl3v16qi (TARGET_SIMD)
#define HAVE_aarch64_qtbx3v8qi (TARGET_SIMD)
#define HAVE_aarch64_qtbx3v16qi (TARGET_SIMD)
#define HAVE_aarch64_qtbl4v8qi (TARGET_SIMD)
#define HAVE_aarch64_qtbl4v16qi (TARGET_SIMD)
#define HAVE_aarch64_qtbx4v8qi (TARGET_SIMD)
#define HAVE_aarch64_qtbx4v16qi (TARGET_SIMD)
#define HAVE_aarch64_combinev16qi (TARGET_SIMD)
#define HAVE_aarch64_zip1v8qi (TARGET_SIMD)
#define HAVE_aarch64_zip2v8qi (TARGET_SIMD)
#define HAVE_aarch64_trn1v8qi (TARGET_SIMD)
#define HAVE_aarch64_trn2v8qi (TARGET_SIMD)
#define HAVE_aarch64_uzp1v8qi (TARGET_SIMD)
#define HAVE_aarch64_uzp2v8qi (TARGET_SIMD)
#define HAVE_aarch64_zip1v16qi (TARGET_SIMD)
#define HAVE_aarch64_zip2v16qi (TARGET_SIMD)
#define HAVE_aarch64_trn1v16qi (TARGET_SIMD)
#define HAVE_aarch64_trn2v16qi (TARGET_SIMD)
#define HAVE_aarch64_uzp1v16qi (TARGET_SIMD)
#define HAVE_aarch64_uzp2v16qi (TARGET_SIMD)
#define HAVE_aarch64_zip1v4hi (TARGET_SIMD)
#define HAVE_aarch64_zip2v4hi (TARGET_SIMD)
#define HAVE_aarch64_trn1v4hi (TARGET_SIMD)
#define HAVE_aarch64_trn2v4hi (TARGET_SIMD)
#define HAVE_aarch64_uzp1v4hi (TARGET_SIMD)
#define HAVE_aarch64_uzp2v4hi (TARGET_SIMD)
#define HAVE_aarch64_zip1v8hi (TARGET_SIMD)
#define HAVE_aarch64_zip2v8hi (TARGET_SIMD)
#define HAVE_aarch64_trn1v8hi (TARGET_SIMD)
#define HAVE_aarch64_trn2v8hi (TARGET_SIMD)
#define HAVE_aarch64_uzp1v8hi (TARGET_SIMD)
#define HAVE_aarch64_uzp2v8hi (TARGET_SIMD)
#define HAVE_aarch64_zip1v2si (TARGET_SIMD)
#define HAVE_aarch64_zip2v2si (TARGET_SIMD)
#define HAVE_aarch64_trn1v2si (TARGET_SIMD)
#define HAVE_aarch64_trn2v2si (TARGET_SIMD)
#define HAVE_aarch64_uzp1v2si (TARGET_SIMD)
#define HAVE_aarch64_uzp2v2si (TARGET_SIMD)
#define HAVE_aarch64_zip1v4si (TARGET_SIMD)
#define HAVE_aarch64_zip2v4si (TARGET_SIMD)
#define HAVE_aarch64_trn1v4si (TARGET_SIMD)
#define HAVE_aarch64_trn2v4si (TARGET_SIMD)
#define HAVE_aarch64_uzp1v4si (TARGET_SIMD)
#define HAVE_aarch64_uzp2v4si (TARGET_SIMD)
#define HAVE_aarch64_zip1v2di (TARGET_SIMD)
#define HAVE_aarch64_zip2v2di (TARGET_SIMD)
#define HAVE_aarch64_trn1v2di (TARGET_SIMD)
#define HAVE_aarch64_trn2v2di (TARGET_SIMD)
#define HAVE_aarch64_uzp1v2di (TARGET_SIMD)
#define HAVE_aarch64_uzp2v2di (TARGET_SIMD)
#define HAVE_aarch64_zip1v4hf (TARGET_SIMD)
#define HAVE_aarch64_zip2v4hf (TARGET_SIMD)
#define HAVE_aarch64_trn1v4hf (TARGET_SIMD)
#define HAVE_aarch64_trn2v4hf (TARGET_SIMD)
#define HAVE_aarch64_uzp1v4hf (TARGET_SIMD)
#define HAVE_aarch64_uzp2v4hf (TARGET_SIMD)
#define HAVE_aarch64_zip1v8hf (TARGET_SIMD)
#define HAVE_aarch64_zip2v8hf (TARGET_SIMD)
#define HAVE_aarch64_trn1v8hf (TARGET_SIMD)
#define HAVE_aarch64_trn2v8hf (TARGET_SIMD)
#define HAVE_aarch64_uzp1v8hf (TARGET_SIMD)
#define HAVE_aarch64_uzp2v8hf (TARGET_SIMD)
#define HAVE_aarch64_zip1v4bf (TARGET_SIMD)
#define HAVE_aarch64_zip2v4bf (TARGET_SIMD)
#define HAVE_aarch64_trn1v4bf (TARGET_SIMD)
#define HAVE_aarch64_trn2v4bf (TARGET_SIMD)
#define HAVE_aarch64_uzp1v4bf (TARGET_SIMD)
#define HAVE_aarch64_uzp2v4bf (TARGET_SIMD)
#define HAVE_aarch64_zip1v8bf (TARGET_SIMD)
#define HAVE_aarch64_zip2v8bf (TARGET_SIMD)
#define HAVE_aarch64_trn1v8bf (TARGET_SIMD)
#define HAVE_aarch64_trn2v8bf (TARGET_SIMD)
#define HAVE_aarch64_uzp1v8bf (TARGET_SIMD)
#define HAVE_aarch64_uzp2v8bf (TARGET_SIMD)
#define HAVE_aarch64_zip1v2sf (TARGET_SIMD)
#define HAVE_aarch64_zip2v2sf (TARGET_SIMD)
#define HAVE_aarch64_trn1v2sf (TARGET_SIMD)
#define HAVE_aarch64_trn2v2sf (TARGET_SIMD)
#define HAVE_aarch64_uzp1v2sf (TARGET_SIMD)
#define HAVE_aarch64_uzp2v2sf (TARGET_SIMD)
#define HAVE_aarch64_zip1v4sf (TARGET_SIMD)
#define HAVE_aarch64_zip2v4sf (TARGET_SIMD)
#define HAVE_aarch64_trn1v4sf (TARGET_SIMD)
#define HAVE_aarch64_trn2v4sf (TARGET_SIMD)
#define HAVE_aarch64_uzp1v4sf (TARGET_SIMD)
#define HAVE_aarch64_uzp2v4sf (TARGET_SIMD)
#define HAVE_aarch64_zip1v2df (TARGET_SIMD)
#define HAVE_aarch64_zip2v2df (TARGET_SIMD)
#define HAVE_aarch64_trn1v2df (TARGET_SIMD)
#define HAVE_aarch64_trn2v2df (TARGET_SIMD)
#define HAVE_aarch64_uzp1v2df (TARGET_SIMD)
#define HAVE_aarch64_uzp2v2df (TARGET_SIMD)
#define HAVE_aarch64_extv8qi (TARGET_SIMD)
#define HAVE_aarch64_extv16qi (TARGET_SIMD)
#define HAVE_aarch64_extv4hi (TARGET_SIMD)
#define HAVE_aarch64_extv8hi (TARGET_SIMD)
#define HAVE_aarch64_extv2si (TARGET_SIMD)
#define HAVE_aarch64_extv4si (TARGET_SIMD)
#define HAVE_aarch64_extv2di (TARGET_SIMD)
#define HAVE_aarch64_extv4hf (TARGET_SIMD)
#define HAVE_aarch64_extv8hf (TARGET_SIMD)
#define HAVE_aarch64_extv4bf (TARGET_SIMD)
#define HAVE_aarch64_extv8bf (TARGET_SIMD)
#define HAVE_aarch64_extv2sf (TARGET_SIMD)
#define HAVE_aarch64_extv4sf (TARGET_SIMD)
#define HAVE_aarch64_extv2df (TARGET_SIMD)
#define HAVE_aarch64_rev64v8qi (TARGET_SIMD)
#define HAVE_aarch64_rev32v8qi (TARGET_SIMD)
#define HAVE_aarch64_rev16v8qi (TARGET_SIMD)
#define HAVE_aarch64_rev64v16qi (TARGET_SIMD)
#define HAVE_aarch64_rev32v16qi (TARGET_SIMD)
#define HAVE_aarch64_rev16v16qi (TARGET_SIMD)
#define HAVE_aarch64_rev64v4hi (TARGET_SIMD)
#define HAVE_aarch64_rev32v4hi (TARGET_SIMD)
#define HAVE_aarch64_rev16v4hi (TARGET_SIMD)
#define HAVE_aarch64_rev64v8hi (TARGET_SIMD)
#define HAVE_aarch64_rev32v8hi (TARGET_SIMD)
#define HAVE_aarch64_rev16v8hi (TARGET_SIMD)
#define HAVE_aarch64_rev64v2si (TARGET_SIMD)
#define HAVE_aarch64_rev32v2si (TARGET_SIMD)
#define HAVE_aarch64_rev16v2si (TARGET_SIMD)
#define HAVE_aarch64_rev64v4si (TARGET_SIMD)
#define HAVE_aarch64_rev32v4si (TARGET_SIMD)
#define HAVE_aarch64_rev16v4si (TARGET_SIMD)
#define HAVE_aarch64_rev64v2di (TARGET_SIMD)
#define HAVE_aarch64_rev32v2di (TARGET_SIMD)
#define HAVE_aarch64_rev16v2di (TARGET_SIMD)
#define HAVE_aarch64_rev64v4hf (TARGET_SIMD)
#define HAVE_aarch64_rev32v4hf (TARGET_SIMD)
#define HAVE_aarch64_rev16v4hf (TARGET_SIMD)
#define HAVE_aarch64_rev64v8hf (TARGET_SIMD)
#define HAVE_aarch64_rev32v8hf (TARGET_SIMD)
#define HAVE_aarch64_rev16v8hf (TARGET_SIMD)
#define HAVE_aarch64_rev64v4bf (TARGET_SIMD)
#define HAVE_aarch64_rev32v4bf (TARGET_SIMD)
#define HAVE_aarch64_rev16v4bf (TARGET_SIMD)
#define HAVE_aarch64_rev64v8bf (TARGET_SIMD)
#define HAVE_aarch64_rev32v8bf (TARGET_SIMD)
#define HAVE_aarch64_rev16v8bf (TARGET_SIMD)
#define HAVE_aarch64_rev64v2sf (TARGET_SIMD)
#define HAVE_aarch64_rev32v2sf (TARGET_SIMD)
#define HAVE_aarch64_rev16v2sf (TARGET_SIMD)
#define HAVE_aarch64_rev64v4sf (TARGET_SIMD)
#define HAVE_aarch64_rev32v4sf (TARGET_SIMD)
#define HAVE_aarch64_rev16v4sf (TARGET_SIMD)
#define HAVE_aarch64_rev64v2df (TARGET_SIMD)
#define HAVE_aarch64_rev32v2df (TARGET_SIMD)
#define HAVE_aarch64_rev16v2df (TARGET_SIMD)
#define HAVE_aarch64_st2v8qi_dreg (TARGET_SIMD)
#define HAVE_aarch64_st2v4hi_dreg (TARGET_SIMD)
#define HAVE_aarch64_st2v4hf_dreg (TARGET_SIMD)
#define HAVE_aarch64_st2v2si_dreg (TARGET_SIMD)
#define HAVE_aarch64_st2v2sf_dreg (TARGET_SIMD)
#define HAVE_aarch64_st2v4bf_dreg (TARGET_SIMD)
#define HAVE_aarch64_st2di_dreg (TARGET_SIMD)
#define HAVE_aarch64_st2df_dreg (TARGET_SIMD)
#define HAVE_aarch64_st3v8qi_dreg (TARGET_SIMD)
#define HAVE_aarch64_st3v4hi_dreg (TARGET_SIMD)
#define HAVE_aarch64_st3v4hf_dreg (TARGET_SIMD)
#define HAVE_aarch64_st3v2si_dreg (TARGET_SIMD)
#define HAVE_aarch64_st3v2sf_dreg (TARGET_SIMD)
#define HAVE_aarch64_st3v4bf_dreg (TARGET_SIMD)
#define HAVE_aarch64_st3di_dreg (TARGET_SIMD)
#define HAVE_aarch64_st3df_dreg (TARGET_SIMD)
#define HAVE_aarch64_st4v8qi_dreg (TARGET_SIMD)
#define HAVE_aarch64_st4v4hi_dreg (TARGET_SIMD)
#define HAVE_aarch64_st4v4hf_dreg (TARGET_SIMD)
#define HAVE_aarch64_st4v2si_dreg (TARGET_SIMD)
#define HAVE_aarch64_st4v2sf_dreg (TARGET_SIMD)
#define HAVE_aarch64_st4v4bf_dreg (TARGET_SIMD)
#define HAVE_aarch64_st4di_dreg (TARGET_SIMD)
#define HAVE_aarch64_st4df_dreg (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1v16qi_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1v8hi_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1v4si_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1v2di_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1v8hf_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1v4sf_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1v2df_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1v8bf_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1v8qi_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1v4hi_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1v4bf_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1v4hf_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1v2si_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1v2sf_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1di_x2 (TARGET_SIMD)
#define HAVE_aarch64_simd_ld1df_x2 (TARGET_SIMD)
#define HAVE_aarch64_frecpev4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_frecpev8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_frecpev2sf (TARGET_SIMD)
#define HAVE_aarch64_frecpev4sf (TARGET_SIMD)
#define HAVE_aarch64_frecpev2df (TARGET_SIMD)
#define HAVE_aarch64_frecpehf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_frecpesf (TARGET_SIMD)
#define HAVE_aarch64_frecpedf (TARGET_SIMD)
#define HAVE_aarch64_frecpxhf ((TARGET_SIMD) && (AARCH64_ISA_F16))
#define HAVE_aarch64_frecpxsf (TARGET_SIMD)
#define HAVE_aarch64_frecpxdf (TARGET_SIMD)
#define HAVE_aarch64_frecpsv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_frecpsv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_frecpsv2sf (TARGET_SIMD)
#define HAVE_aarch64_frecpsv4sf (TARGET_SIMD)
#define HAVE_aarch64_frecpsv2df (TARGET_SIMD)
#define HAVE_aarch64_frecpshf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_aarch64_frecpssf (TARGET_SIMD)
#define HAVE_aarch64_frecpsdf (TARGET_SIMD)
#define HAVE_aarch64_urecpev2si (TARGET_SIMD)
#define HAVE_aarch64_urecpev4si (TARGET_SIMD)
#define HAVE_aarch64_crypto_aesev16qi (TARGET_SIMD && TARGET_AES)
#define HAVE_aarch64_crypto_aesdv16qi (TARGET_SIMD && TARGET_AES)
#define HAVE_aarch64_crypto_aesmcv16qi (TARGET_SIMD && TARGET_AES)
#define HAVE_aarch64_crypto_aesimcv16qi (TARGET_SIMD && TARGET_AES)
#define HAVE_aarch64_crypto_sha1hsi (TARGET_SIMD && TARGET_SHA2)
#define HAVE_aarch64_crypto_sha1hv4si (TARGET_SIMD && TARGET_SHA2 && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_be_crypto_sha1hv4si (TARGET_SIMD && TARGET_SHA2 && BYTES_BIG_ENDIAN)
#define HAVE_aarch64_crypto_sha1su1v4si (TARGET_SIMD && TARGET_SHA2)
#define HAVE_aarch64_crypto_sha1cv4si (TARGET_SIMD && TARGET_SHA2)
#define HAVE_aarch64_crypto_sha1mv4si (TARGET_SIMD && TARGET_SHA2)
#define HAVE_aarch64_crypto_sha1pv4si (TARGET_SIMD && TARGET_SHA2)
#define HAVE_aarch64_crypto_sha1su0v4si (TARGET_SIMD && TARGET_SHA2)
#define HAVE_aarch64_crypto_sha256hv4si (TARGET_SIMD && TARGET_SHA2)
#define HAVE_aarch64_crypto_sha256h2v4si (TARGET_SIMD && TARGET_SHA2)
#define HAVE_aarch64_crypto_sha256su0v4si (TARGET_SIMD && TARGET_SHA2)
#define HAVE_aarch64_crypto_sha256su1v4si (TARGET_SIMD && TARGET_SHA2)
#define HAVE_aarch64_crypto_sha512hqv2di (TARGET_SIMD && TARGET_SHA3)
#define HAVE_aarch64_crypto_sha512h2qv2di (TARGET_SIMD && TARGET_SHA3)
#define HAVE_aarch64_crypto_sha512su0qv2di (TARGET_SIMD && TARGET_SHA3)
#define HAVE_aarch64_crypto_sha512su1qv2di (TARGET_SIMD && TARGET_SHA3)
#define HAVE_eor3qv16qi4 (TARGET_SIMD && TARGET_SHA3)
#define HAVE_eor3qv8hi4 (TARGET_SIMD && TARGET_SHA3)
#define HAVE_eor3qv4si4 (TARGET_SIMD && TARGET_SHA3)
#define HAVE_eor3qv2di4 (TARGET_SIMD && TARGET_SHA3)
#define HAVE_aarch64_rax1qv2di (TARGET_SIMD && TARGET_SHA3)
#define HAVE_aarch64_xarqv2di (TARGET_SIMD && TARGET_SHA3)
#define HAVE_bcaxqv16qi4 (TARGET_SIMD && TARGET_SHA3)
#define HAVE_bcaxqv8hi4 (TARGET_SIMD && TARGET_SHA3)
#define HAVE_bcaxqv4si4 (TARGET_SIMD && TARGET_SHA3)
#define HAVE_bcaxqv2di4 (TARGET_SIMD && TARGET_SHA3)
#define HAVE_aarch64_sm3ss1qv4si (TARGET_SIMD && TARGET_SM4)
#define HAVE_aarch64_sm3tt1aqv4si (TARGET_SIMD && TARGET_SM4)
#define HAVE_aarch64_sm3tt1bqv4si (TARGET_SIMD && TARGET_SM4)
#define HAVE_aarch64_sm3tt2aqv4si (TARGET_SIMD && TARGET_SM4)
#define HAVE_aarch64_sm3tt2bqv4si (TARGET_SIMD && TARGET_SM4)
#define HAVE_aarch64_sm3partw1qv4si (TARGET_SIMD && TARGET_SM4)
#define HAVE_aarch64_sm3partw2qv4si (TARGET_SIMD && TARGET_SM4)
#define HAVE_aarch64_sm4eqv4si (TARGET_SIMD && TARGET_SM4)
#define HAVE_aarch64_sm4ekeyqv4si (TARGET_SIMD && TARGET_SM4)
#define HAVE_aarch64_simd_fmlal_lowv2sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlalq_lowv4sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlsl_lowv2sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlslq_lowv4sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlal_highv2sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlalq_highv4sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlsl_highv2sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlslq_highv4sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlal_lane_lowv2sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlsl_lane_lowv2sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlal_lane_highv2sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlsl_lane_highv2sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlalq_laneq_lowv4sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlslq_laneq_lowv4sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlalq_laneq_highv4sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlslq_laneq_highv4sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlal_laneq_lowv2sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlsl_laneq_lowv2sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlal_laneq_highv2sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlsl_laneq_highv2sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlalq_lane_lowv4sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlslq_lane_lowv4sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlalq_lane_highv4sf (TARGET_F16FML)
#define HAVE_aarch64_simd_fmlslq_lane_highv4sf (TARGET_F16FML)
#define HAVE_aarch64_crypto_pmulldi (TARGET_SIMD && TARGET_AES)
#define HAVE_aarch64_crypto_pmullv2di (TARGET_SIMD && TARGET_AES)
#define HAVE_extendv8qiv8hi2 (TARGET_SIMD)
#define HAVE_zero_extendv8qiv8hi2 (TARGET_SIMD)
#define HAVE_extendv4hiv4si2 (TARGET_SIMD)
#define HAVE_zero_extendv4hiv4si2 (TARGET_SIMD)
#define HAVE_extendv2siv2di2 (TARGET_SIMD)
#define HAVE_zero_extendv2siv2di2 (TARGET_SIMD)
#define HAVE_truncv8hiv8qi2 (TARGET_SIMD)
#define HAVE_truncv4siv4hi2 (TARGET_SIMD)
#define HAVE_truncv2div2si2 (TARGET_SIMD)
#define HAVE_aarch64_bfdotv2sf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfdotv4sf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfdot_lanev2sf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfdot_laneqv2sf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfdot_lanev4sf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfdot_laneqv4sf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfmmlaqv4sf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfmlalbv4sf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfmlaltv4sf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfmlalb_lanev4sf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfmlalt_lanev4sf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfmlalb_lane_qv4sf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfmlalt_lane_qv4sf (TARGET_BF16_SIMD)
#define HAVE_aarch64_simd_smmlav16qi (TARGET_I8MM)
#define HAVE_aarch64_simd_ummlav16qi (TARGET_I8MM)
#define HAVE_aarch64_simd_usmmlav16qi (TARGET_I8MM)
#define HAVE_aarch64_bfcvtnv4bf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfcvtn_qv8bf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfcvtn2v8bf (TARGET_BF16_SIMD)
#define HAVE_aarch64_bfcvtbf (TARGET_BF16_FP)
#define HAVE_aarch64_compare_and_swapqi 1
#define HAVE_aarch64_compare_and_swaphi 1
#define HAVE_aarch64_compare_and_swapsi 1
#define HAVE_aarch64_compare_and_swapdi 1
#define HAVE_aarch64_compare_and_swapti 1
#define HAVE_aarch64_compare_and_swapqi_lse (TARGET_LSE)
#define HAVE_aarch64_compare_and_swaphi_lse (TARGET_LSE)
#define HAVE_aarch64_compare_and_swapsi_lse (TARGET_LSE)
#define HAVE_aarch64_compare_and_swapdi_lse (TARGET_LSE)
#define HAVE_aarch64_compare_and_swapti_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_exchangeqi 1
#define HAVE_aarch64_atomic_exchangehi 1
#define HAVE_aarch64_atomic_exchangesi 1
#define HAVE_aarch64_atomic_exchangedi 1
#define HAVE_aarch64_atomic_exchangeqi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_exchangehi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_exchangesi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_exchangedi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_addqi 1
#define HAVE_aarch64_atomic_subqi 1
#define HAVE_aarch64_atomic_orqi 1
#define HAVE_aarch64_atomic_xorqi 1
#define HAVE_aarch64_atomic_andqi 1
#define HAVE_aarch64_atomic_addhi 1
#define HAVE_aarch64_atomic_subhi 1
#define HAVE_aarch64_atomic_orhi 1
#define HAVE_aarch64_atomic_xorhi 1
#define HAVE_aarch64_atomic_andhi 1
#define HAVE_aarch64_atomic_addsi 1
#define HAVE_aarch64_atomic_subsi 1
#define HAVE_aarch64_atomic_orsi 1
#define HAVE_aarch64_atomic_xorsi 1
#define HAVE_aarch64_atomic_andsi 1
#define HAVE_aarch64_atomic_adddi 1
#define HAVE_aarch64_atomic_subdi 1
#define HAVE_aarch64_atomic_ordi 1
#define HAVE_aarch64_atomic_xordi 1
#define HAVE_aarch64_atomic_anddi 1
#define HAVE_aarch64_atomic_iorqi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_bicqi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_xorqi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_addqi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_iorhi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_bichi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_xorhi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_addhi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_iorsi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_bicsi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_xorsi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_addsi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_iordi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_bicdi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_xordi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_adddi_lse (TARGET_LSE)
#define HAVE_atomic_nandqi 1
#define HAVE_atomic_nandhi 1
#define HAVE_atomic_nandsi 1
#define HAVE_atomic_nanddi 1
#define HAVE_aarch64_atomic_fetch_addqi 1
#define HAVE_aarch64_atomic_fetch_subqi 1
#define HAVE_aarch64_atomic_fetch_orqi 1
#define HAVE_aarch64_atomic_fetch_xorqi 1
#define HAVE_aarch64_atomic_fetch_andqi 1
#define HAVE_aarch64_atomic_fetch_addhi 1
#define HAVE_aarch64_atomic_fetch_subhi 1
#define HAVE_aarch64_atomic_fetch_orhi 1
#define HAVE_aarch64_atomic_fetch_xorhi 1
#define HAVE_aarch64_atomic_fetch_andhi 1
#define HAVE_aarch64_atomic_fetch_addsi 1
#define HAVE_aarch64_atomic_fetch_subsi 1
#define HAVE_aarch64_atomic_fetch_orsi 1
#define HAVE_aarch64_atomic_fetch_xorsi 1
#define HAVE_aarch64_atomic_fetch_andsi 1
#define HAVE_aarch64_atomic_fetch_adddi 1
#define HAVE_aarch64_atomic_fetch_subdi 1
#define HAVE_aarch64_atomic_fetch_ordi 1
#define HAVE_aarch64_atomic_fetch_xordi 1
#define HAVE_aarch64_atomic_fetch_anddi 1
#define HAVE_aarch64_atomic_fetch_iorqi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_bicqi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_xorqi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_addqi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_iorhi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_bichi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_xorhi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_addhi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_iorsi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_bicsi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_xorsi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_addsi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_iordi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_bicdi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_xordi_lse (TARGET_LSE)
#define HAVE_aarch64_atomic_fetch_adddi_lse (TARGET_LSE)
#define HAVE_atomic_fetch_nandqi 1
#define HAVE_atomic_fetch_nandhi 1
#define HAVE_atomic_fetch_nandsi 1
#define HAVE_atomic_fetch_nanddi 1
#define HAVE_aarch64_atomic_add_fetchqi 1
#define HAVE_aarch64_atomic_sub_fetchqi 1
#define HAVE_aarch64_atomic_or_fetchqi 1
#define HAVE_aarch64_atomic_xor_fetchqi 1
#define HAVE_aarch64_atomic_and_fetchqi 1
#define HAVE_aarch64_atomic_add_fetchhi 1
#define HAVE_aarch64_atomic_sub_fetchhi 1
#define HAVE_aarch64_atomic_or_fetchhi 1
#define HAVE_aarch64_atomic_xor_fetchhi 1
#define HAVE_aarch64_atomic_and_fetchhi 1
#define HAVE_aarch64_atomic_add_fetchsi 1
#define HAVE_aarch64_atomic_sub_fetchsi 1
#define HAVE_aarch64_atomic_or_fetchsi 1
#define HAVE_aarch64_atomic_xor_fetchsi 1
#define HAVE_aarch64_atomic_and_fetchsi 1
#define HAVE_aarch64_atomic_add_fetchdi 1
#define HAVE_aarch64_atomic_sub_fetchdi 1
#define HAVE_aarch64_atomic_or_fetchdi 1
#define HAVE_aarch64_atomic_xor_fetchdi 1
#define HAVE_aarch64_atomic_and_fetchdi 1
#define HAVE_atomic_nand_fetchqi 1
#define HAVE_atomic_nand_fetchhi 1
#define HAVE_atomic_nand_fetchsi 1
#define HAVE_atomic_nand_fetchdi 1
#define HAVE_atomic_loadqi 1
#define HAVE_atomic_loadhi 1
#define HAVE_atomic_loadsi 1
#define HAVE_atomic_loaddi 1
#define HAVE_atomic_storeqi 1
#define HAVE_atomic_storehi 1
#define HAVE_atomic_storesi 1
#define HAVE_atomic_storedi 1
#define HAVE_aarch64_load_exclusiveqi 1
#define HAVE_aarch64_load_exclusivehi 1
#define HAVE_aarch64_load_exclusivesi 1
#define HAVE_aarch64_load_exclusivedi 1
#define HAVE_aarch64_load_exclusive_pair 1
#define HAVE_aarch64_store_exclusiveqi 1
#define HAVE_aarch64_store_exclusivehi 1
#define HAVE_aarch64_store_exclusivesi 1
#define HAVE_aarch64_store_exclusivedi 1
#define HAVE_aarch64_store_exclusive_pair 1
#define HAVE_aarch64_pred_movvnx16qi (TARGET_SVE \
&& (register_operand (operands[0], VNx16QImode) \
|| register_operand (operands[2], VNx16QImode)))
#define HAVE_aarch64_pred_movvnx8qi (TARGET_SVE \
&& (register_operand (operands[0], VNx8QImode) \
|| register_operand (operands[2], VNx8QImode)))
#define HAVE_aarch64_pred_movvnx4qi (TARGET_SVE \
&& (register_operand (operands[0], VNx4QImode) \
|| register_operand (operands[2], VNx4QImode)))
#define HAVE_aarch64_pred_movvnx2qi (TARGET_SVE \
&& (register_operand (operands[0], VNx2QImode) \
|| register_operand (operands[2], VNx2QImode)))
#define HAVE_aarch64_pred_movvnx8hi (TARGET_SVE \
&& (register_operand (operands[0], VNx8HImode) \
|| register_operand (operands[2], VNx8HImode)))
#define HAVE_aarch64_pred_movvnx4hi (TARGET_SVE \
&& (register_operand (operands[0], VNx4HImode) \
|| register_operand (operands[2], VNx4HImode)))
#define HAVE_aarch64_pred_movvnx2hi (TARGET_SVE \
&& (register_operand (operands[0], VNx2HImode) \
|| register_operand (operands[2], VNx2HImode)))
#define HAVE_aarch64_pred_movvnx8hf (TARGET_SVE \
&& (register_operand (operands[0], VNx8HFmode) \
|| register_operand (operands[2], VNx8HFmode)))
#define HAVE_aarch64_pred_movvnx4hf (TARGET_SVE \
&& (register_operand (operands[0], VNx4HFmode) \
|| register_operand (operands[2], VNx4HFmode)))
#define HAVE_aarch64_pred_movvnx2hf (TARGET_SVE \
&& (register_operand (operands[0], VNx2HFmode) \
|| register_operand (operands[2], VNx2HFmode)))
#define HAVE_aarch64_pred_movvnx8bf (TARGET_SVE \
&& (register_operand (operands[0], VNx8BFmode) \
|| register_operand (operands[2], VNx8BFmode)))
#define HAVE_aarch64_pred_movvnx4si (TARGET_SVE \
&& (register_operand (operands[0], VNx4SImode) \
|| register_operand (operands[2], VNx4SImode)))
#define HAVE_aarch64_pred_movvnx2si (TARGET_SVE \
&& (register_operand (operands[0], VNx2SImode) \
|| register_operand (operands[2], VNx2SImode)))
#define HAVE_aarch64_pred_movvnx4sf (TARGET_SVE \
&& (register_operand (operands[0], VNx4SFmode) \
|| register_operand (operands[2], VNx4SFmode)))
#define HAVE_aarch64_pred_movvnx2sf (TARGET_SVE \
&& (register_operand (operands[0], VNx2SFmode) \
|| register_operand (operands[2], VNx2SFmode)))
#define HAVE_aarch64_pred_movvnx2di (TARGET_SVE \
&& (register_operand (operands[0], VNx2DImode) \
|| register_operand (operands[2], VNx2DImode)))
#define HAVE_aarch64_pred_movvnx2df (TARGET_SVE \
&& (register_operand (operands[0], VNx2DFmode) \
|| register_operand (operands[2], VNx2DFmode)))
#define HAVE_aarch64_pred_movvnx32qi (TARGET_SVE \
&& (register_operand (operands[0], VNx32QImode) \
|| register_operand (operands[2], VNx32QImode)))
#define HAVE_aarch64_pred_movvnx16hi (TARGET_SVE \
&& (register_operand (operands[0], VNx16HImode) \
|| register_operand (operands[2], VNx16HImode)))
#define HAVE_aarch64_pred_movvnx8si (TARGET_SVE \
&& (register_operand (operands[0], VNx8SImode) \
|| register_operand (operands[2], VNx8SImode)))
#define HAVE_aarch64_pred_movvnx4di (TARGET_SVE \
&& (register_operand (operands[0], VNx4DImode) \
|| register_operand (operands[2], VNx4DImode)))
#define HAVE_aarch64_pred_movvnx16bf (TARGET_SVE \
&& (register_operand (operands[0], VNx16BFmode) \
|| register_operand (operands[2], VNx16BFmode)))
#define HAVE_aarch64_pred_movvnx16hf (TARGET_SVE \
&& (register_operand (operands[0], VNx16HFmode) \
|| register_operand (operands[2], VNx16HFmode)))
#define HAVE_aarch64_pred_movvnx8sf (TARGET_SVE \
&& (register_operand (operands[0], VNx8SFmode) \
|| register_operand (operands[2], VNx8SFmode)))
#define HAVE_aarch64_pred_movvnx4df (TARGET_SVE \
&& (register_operand (operands[0], VNx4DFmode) \
|| register_operand (operands[2], VNx4DFmode)))
#define HAVE_aarch64_pred_movvnx48qi (TARGET_SVE \
&& (register_operand (operands[0], VNx48QImode) \
|| register_operand (operands[2], VNx48QImode)))
#define HAVE_aarch64_pred_movvnx24hi (TARGET_SVE \
&& (register_operand (operands[0], VNx24HImode) \
|| register_operand (operands[2], VNx24HImode)))
#define HAVE_aarch64_pred_movvnx12si (TARGET_SVE \
&& (register_operand (operands[0], VNx12SImode) \
|| register_operand (operands[2], VNx12SImode)))
#define HAVE_aarch64_pred_movvnx6di (TARGET_SVE \
&& (register_operand (operands[0], VNx6DImode) \
|| register_operand (operands[2], VNx6DImode)))
#define HAVE_aarch64_pred_movvnx24bf (TARGET_SVE \
&& (register_operand (operands[0], VNx24BFmode) \
|| register_operand (operands[2], VNx24BFmode)))
#define HAVE_aarch64_pred_movvnx24hf (TARGET_SVE \
&& (register_operand (operands[0], VNx24HFmode) \
|| register_operand (operands[2], VNx24HFmode)))
#define HAVE_aarch64_pred_movvnx12sf (TARGET_SVE \
&& (register_operand (operands[0], VNx12SFmode) \
|| register_operand (operands[2], VNx12SFmode)))
#define HAVE_aarch64_pred_movvnx6df (TARGET_SVE \
&& (register_operand (operands[0], VNx6DFmode) \
|| register_operand (operands[2], VNx6DFmode)))
#define HAVE_aarch64_pred_movvnx64qi (TARGET_SVE \
&& (register_operand (operands[0], VNx64QImode) \
|| register_operand (operands[2], VNx64QImode)))
#define HAVE_aarch64_pred_movvnx32hi (TARGET_SVE \
&& (register_operand (operands[0], VNx32HImode) \
|| register_operand (operands[2], VNx32HImode)))
#define HAVE_aarch64_pred_movvnx16si (TARGET_SVE \
&& (register_operand (operands[0], VNx16SImode) \
|| register_operand (operands[2], VNx16SImode)))
#define HAVE_aarch64_pred_movvnx8di (TARGET_SVE \
&& (register_operand (operands[0], VNx8DImode) \
|| register_operand (operands[2], VNx8DImode)))
#define HAVE_aarch64_pred_movvnx32bf (TARGET_SVE \
&& (register_operand (operands[0], VNx32BFmode) \
|| register_operand (operands[2], VNx32BFmode)))
#define HAVE_aarch64_pred_movvnx32hf (TARGET_SVE \
&& (register_operand (operands[0], VNx32HFmode) \
|| register_operand (operands[2], VNx32HFmode)))
#define HAVE_aarch64_pred_movvnx16sf (TARGET_SVE \
&& (register_operand (operands[0], VNx16SFmode) \
|| register_operand (operands[2], VNx16SFmode)))
#define HAVE_aarch64_pred_movvnx8df (TARGET_SVE \
&& (register_operand (operands[0], VNx8DFmode) \
|| register_operand (operands[2], VNx8DFmode)))
#define HAVE_aarch64_wrffr (TARGET_SVE)
#define HAVE_aarch64_update_ffr_for_load (TARGET_SVE)
#define HAVE_aarch64_copy_ffr_to_ffrt (TARGET_SVE)
#define HAVE_aarch64_rdffr (TARGET_SVE)
#define HAVE_aarch64_rdffr_z (TARGET_SVE)
#define HAVE_aarch64_update_ffrt (TARGET_SVE)
#define HAVE_maskloadvnx16qivnx16bi (TARGET_SVE)
#define HAVE_maskloadvnx8qivnx8bi (TARGET_SVE)
#define HAVE_maskloadvnx4qivnx4bi (TARGET_SVE)
#define HAVE_maskloadvnx2qivnx2bi (TARGET_SVE)
#define HAVE_maskloadvnx8hivnx8bi (TARGET_SVE)
#define HAVE_maskloadvnx4hivnx4bi (TARGET_SVE)
#define HAVE_maskloadvnx2hivnx2bi (TARGET_SVE)
#define HAVE_maskloadvnx8hfvnx8bi (TARGET_SVE)
#define HAVE_maskloadvnx4hfvnx4bi (TARGET_SVE)
#define HAVE_maskloadvnx2hfvnx2bi (TARGET_SVE)
#define HAVE_maskloadvnx8bfvnx8bi (TARGET_SVE)
#define HAVE_maskloadvnx4sivnx4bi (TARGET_SVE)
#define HAVE_maskloadvnx2sivnx2bi (TARGET_SVE)
#define HAVE_maskloadvnx4sfvnx4bi (TARGET_SVE)
#define HAVE_maskloadvnx2sfvnx2bi (TARGET_SVE)
#define HAVE_maskloadvnx2divnx2bi (TARGET_SVE)
#define HAVE_maskloadvnx2dfvnx2bi (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx32qivnx16qi (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx16hivnx8hi (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx8sivnx4si (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx4divnx2di (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx16bfvnx8bf (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx16hfvnx8hf (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx8sfvnx4sf (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx4dfvnx2df (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx48qivnx16qi (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx24hivnx8hi (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx12sivnx4si (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx6divnx2di (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx24bfvnx8bf (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx24hfvnx8hf (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx12sfvnx4sf (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx6dfvnx2df (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx64qivnx16qi (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx32hivnx8hi (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx16sivnx4si (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx8divnx2di (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx32bfvnx8bf (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx32hfvnx8hf (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx16sfvnx4sf (TARGET_SVE)
#define HAVE_vec_mask_load_lanesvnx8dfvnx2df (TARGET_SVE)
#define HAVE_aarch64_load_extendvnx8hivnx8qi (TARGET_SVE && (~0x81 & 0x81) == 0)
#define HAVE_aarch64_load_zero_extendvnx8hivnx8qi (TARGET_SVE && (~0x81 & 0x81) == 0)
#define HAVE_aarch64_load_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
#define HAVE_aarch64_load_zero_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
#define HAVE_aarch64_load_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
#define HAVE_aarch64_load_zero_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
#define HAVE_aarch64_load_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
#define HAVE_aarch64_load_zero_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
#define HAVE_aarch64_load_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
#define HAVE_aarch64_load_zero_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
#define HAVE_aarch64_load_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
#define HAVE_aarch64_load_zero_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
#define HAVE_aarch64_load_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
#define HAVE_aarch64_load_zero_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
#define HAVE_aarch64_load_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
#define HAVE_aarch64_load_zero_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
#define HAVE_aarch64_load_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
#define HAVE_aarch64_load_zero_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
#define HAVE_aarch64_load_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
#define HAVE_aarch64_load_zero_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
#define HAVE_aarch64_ldff1vnx16qi (TARGET_SVE)
#define HAVE_aarch64_ldnf1vnx16qi (TARGET_SVE)
#define HAVE_aarch64_ldff1vnx8hi (TARGET_SVE)
#define HAVE_aarch64_ldnf1vnx8hi (TARGET_SVE)
#define HAVE_aarch64_ldff1vnx4si (TARGET_SVE)
#define HAVE_aarch64_ldnf1vnx4si (TARGET_SVE)
#define HAVE_aarch64_ldff1vnx2di (TARGET_SVE)
#define HAVE_aarch64_ldnf1vnx2di (TARGET_SVE)
#define HAVE_aarch64_ldff1vnx8bf (TARGET_SVE)
#define HAVE_aarch64_ldnf1vnx8bf (TARGET_SVE)
#define HAVE_aarch64_ldff1vnx8hf (TARGET_SVE)
#define HAVE_aarch64_ldnf1vnx8hf (TARGET_SVE)
#define HAVE_aarch64_ldff1vnx4sf (TARGET_SVE)
#define HAVE_aarch64_ldnf1vnx4sf (TARGET_SVE)
#define HAVE_aarch64_ldff1vnx2df (TARGET_SVE)
#define HAVE_aarch64_ldnf1vnx2df (TARGET_SVE)
#define HAVE_aarch64_ldff1_extendvnx8hivnx8qi (TARGET_SVE && (~0x81 & 0x81) == 0)
#define HAVE_aarch64_ldnf1_extendvnx8hivnx8qi (TARGET_SVE && (~0x81 & 0x81) == 0)
#define HAVE_aarch64_ldff1_zero_extendvnx8hivnx8qi (TARGET_SVE && (~0x81 & 0x81) == 0)
#define HAVE_aarch64_ldnf1_zero_extendvnx8hivnx8qi (TARGET_SVE && (~0x81 & 0x81) == 0)
#define HAVE_aarch64_ldff1_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
#define HAVE_aarch64_ldnf1_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
#define HAVE_aarch64_ldff1_zero_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
#define HAVE_aarch64_ldnf1_zero_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
#define HAVE_aarch64_ldff1_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
#define HAVE_aarch64_ldnf1_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
#define HAVE_aarch64_ldff1_zero_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
#define HAVE_aarch64_ldnf1_zero_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
#define HAVE_aarch64_ldff1_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
#define HAVE_aarch64_ldnf1_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
#define HAVE_aarch64_ldff1_zero_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
#define HAVE_aarch64_ldnf1_zero_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
#define HAVE_aarch64_ldff1_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
#define HAVE_aarch64_ldnf1_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
#define HAVE_aarch64_ldff1_zero_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
#define HAVE_aarch64_ldnf1_zero_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
#define HAVE_aarch64_ldff1_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
#define HAVE_aarch64_ldnf1_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
#define HAVE_aarch64_ldff1_zero_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
#define HAVE_aarch64_ldnf1_zero_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
#define HAVE_aarch64_ldff1_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
#define HAVE_aarch64_ldnf1_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
#define HAVE_aarch64_ldff1_zero_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
#define HAVE_aarch64_ldnf1_zero_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
#define HAVE_aarch64_ldff1_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
#define HAVE_aarch64_ldnf1_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
#define HAVE_aarch64_ldff1_zero_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
#define HAVE_aarch64_ldnf1_zero_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
#define HAVE_aarch64_ldff1_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
#define HAVE_aarch64_ldnf1_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
#define HAVE_aarch64_ldff1_zero_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
#define HAVE_aarch64_ldnf1_zero_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
#define HAVE_aarch64_ldff1_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
#define HAVE_aarch64_ldnf1_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
#define HAVE_aarch64_ldff1_zero_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
#define HAVE_aarch64_ldnf1_zero_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
#define HAVE_aarch64_ldnt1vnx16qi (TARGET_SVE)
#define HAVE_aarch64_ldnt1vnx8hi (TARGET_SVE)
#define HAVE_aarch64_ldnt1vnx4si (TARGET_SVE)
#define HAVE_aarch64_ldnt1vnx2di (TARGET_SVE)
#define HAVE_aarch64_ldnt1vnx8bf (TARGET_SVE)
#define HAVE_aarch64_ldnt1vnx8hf (TARGET_SVE)
#define HAVE_aarch64_ldnt1vnx4sf (TARGET_SVE)
#define HAVE_aarch64_ldnt1vnx2df (TARGET_SVE)
#define HAVE_mask_gather_loadvnx4qivnx4si (TARGET_SVE)
#define HAVE_mask_gather_loadvnx4hivnx4si (TARGET_SVE)
#define HAVE_mask_gather_loadvnx4hfvnx4si (TARGET_SVE)
#define HAVE_mask_gather_loadvnx4sivnx4si (TARGET_SVE)
#define HAVE_mask_gather_loadvnx4sfvnx4si (TARGET_SVE)
#define HAVE_mask_gather_loadvnx2qivnx2di (TARGET_SVE)
#define HAVE_mask_gather_loadvnx2hivnx2di (TARGET_SVE)
#define HAVE_mask_gather_loadvnx2hfvnx2di (TARGET_SVE)
#define HAVE_mask_gather_loadvnx2sivnx2di (TARGET_SVE)
#define HAVE_mask_gather_loadvnx2sfvnx2di (TARGET_SVE)
#define HAVE_mask_gather_loadvnx2divnx2di (TARGET_SVE)
#define HAVE_mask_gather_loadvnx2dfvnx2di (TARGET_SVE)
#define HAVE_aarch64_gather_load_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
#define HAVE_aarch64_gather_load_zero_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
#define HAVE_aarch64_gather_load_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
#define HAVE_aarch64_gather_load_zero_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
#define HAVE_aarch64_gather_load_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
#define HAVE_aarch64_gather_load_zero_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
#define HAVE_aarch64_gather_load_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
#define HAVE_aarch64_gather_load_zero_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
#define HAVE_aarch64_gather_load_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
#define HAVE_aarch64_gather_load_zero_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
#define HAVE_aarch64_gather_load_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
#define HAVE_aarch64_gather_load_zero_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
#define HAVE_aarch64_gather_load_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
#define HAVE_aarch64_gather_load_zero_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
#define HAVE_aarch64_gather_load_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
#define HAVE_aarch64_gather_load_zero_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
#define HAVE_aarch64_gather_load_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
#define HAVE_aarch64_gather_load_zero_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
#define HAVE_aarch64_ldff1_gathervnx4si (TARGET_SVE)
#define HAVE_aarch64_ldff1_gathervnx4sf (TARGET_SVE)
#define HAVE_aarch64_ldff1_gathervnx2di (TARGET_SVE)
#define HAVE_aarch64_ldff1_gathervnx2df (TARGET_SVE)
#define HAVE_aarch64_ldff1_gather_extendvnx4sivnx4qi (TARGET_SVE)
#define HAVE_aarch64_ldff1_gather_zero_extendvnx4sivnx4qi (TARGET_SVE)
#define HAVE_aarch64_ldff1_gather_extendvnx4sivnx4hi (TARGET_SVE)
#define HAVE_aarch64_ldff1_gather_zero_extendvnx4sivnx4hi (TARGET_SVE)
#define HAVE_aarch64_ldff1_gather_extendvnx2divnx2qi (TARGET_SVE)
#define HAVE_aarch64_ldff1_gather_zero_extendvnx2divnx2qi (TARGET_SVE)
#define HAVE_aarch64_ldff1_gather_extendvnx2divnx2hi (TARGET_SVE)
#define HAVE_aarch64_ldff1_gather_zero_extendvnx2divnx2hi (TARGET_SVE)
#define HAVE_aarch64_ldff1_gather_extendvnx2divnx2si (TARGET_SVE)
#define HAVE_aarch64_ldff1_gather_zero_extendvnx2divnx2si (TARGET_SVE)
#define HAVE_aarch64_sve_prefetchvnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_prefetchvnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_prefetchvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_prefetchvnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_gather_prefetchvnx16qivnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_gather_prefetchvnx8hivnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_gather_prefetchvnx4sivnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_gather_prefetchvnx2divnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_gather_prefetchvnx16qivnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_gather_prefetchvnx8hivnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_gather_prefetchvnx4sivnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_gather_prefetchvnx2divnx2di (TARGET_SVE)
#define HAVE_maskstorevnx16qivnx16bi (TARGET_SVE)
#define HAVE_maskstorevnx8qivnx8bi (TARGET_SVE)
#define HAVE_maskstorevnx4qivnx4bi (TARGET_SVE)
#define HAVE_maskstorevnx2qivnx2bi (TARGET_SVE)
#define HAVE_maskstorevnx8hivnx8bi (TARGET_SVE)
#define HAVE_maskstorevnx4hivnx4bi (TARGET_SVE)
#define HAVE_maskstorevnx2hivnx2bi (TARGET_SVE)
#define HAVE_maskstorevnx8hfvnx8bi (TARGET_SVE)
#define HAVE_maskstorevnx4hfvnx4bi (TARGET_SVE)
#define HAVE_maskstorevnx2hfvnx2bi (TARGET_SVE)
#define HAVE_maskstorevnx8bfvnx8bi (TARGET_SVE)
#define HAVE_maskstorevnx4sivnx4bi (TARGET_SVE)
#define HAVE_maskstorevnx2sivnx2bi (TARGET_SVE)
#define HAVE_maskstorevnx4sfvnx4bi (TARGET_SVE)
#define HAVE_maskstorevnx2sfvnx2bi (TARGET_SVE)
#define HAVE_maskstorevnx2divnx2bi (TARGET_SVE)
#define HAVE_maskstorevnx2dfvnx2bi (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx32qivnx16qi (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx16hivnx8hi (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx8sivnx4si (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx4divnx2di (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx16bfvnx8bf (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx16hfvnx8hf (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx8sfvnx4sf (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx4dfvnx2df (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx48qivnx16qi (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx24hivnx8hi (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx12sivnx4si (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx6divnx2di (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx24bfvnx8bf (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx24hfvnx8hf (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx12sfvnx4sf (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx6dfvnx2df (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx64qivnx16qi (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx32hivnx8hi (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx16sivnx4si (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx8divnx2di (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx32bfvnx8bf (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx32hfvnx8hf (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx16sfvnx4sf (TARGET_SVE)
#define HAVE_vec_mask_store_lanesvnx8dfvnx2df (TARGET_SVE)
#define HAVE_aarch64_store_truncvnx8qivnx8hi (TARGET_SVE)
#define HAVE_aarch64_store_truncvnx4qivnx4si (TARGET_SVE)
#define HAVE_aarch64_store_truncvnx4hivnx4si (TARGET_SVE)
#define HAVE_aarch64_store_truncvnx2qivnx2di (TARGET_SVE)
#define HAVE_aarch64_store_truncvnx2hivnx2di (TARGET_SVE)
#define HAVE_aarch64_store_truncvnx2sivnx2di (TARGET_SVE)
#define HAVE_aarch64_stnt1vnx16qi (TARGET_SVE)
#define HAVE_aarch64_stnt1vnx8hi (TARGET_SVE)
#define HAVE_aarch64_stnt1vnx4si (TARGET_SVE)
#define HAVE_aarch64_stnt1vnx2di (TARGET_SVE)
#define HAVE_aarch64_stnt1vnx8bf (TARGET_SVE)
#define HAVE_aarch64_stnt1vnx8hf (TARGET_SVE)
#define HAVE_aarch64_stnt1vnx4sf (TARGET_SVE)
#define HAVE_aarch64_stnt1vnx2df (TARGET_SVE)
#define HAVE_mask_scatter_storevnx4qivnx4si (TARGET_SVE)
#define HAVE_mask_scatter_storevnx4hivnx4si (TARGET_SVE)
#define HAVE_mask_scatter_storevnx4hfvnx4si (TARGET_SVE)
#define HAVE_mask_scatter_storevnx4sivnx4si (TARGET_SVE)
#define HAVE_mask_scatter_storevnx4sfvnx4si (TARGET_SVE)
#define HAVE_mask_scatter_storevnx2qivnx2di (TARGET_SVE)
#define HAVE_mask_scatter_storevnx2hivnx2di (TARGET_SVE)
#define HAVE_mask_scatter_storevnx2hfvnx2di (TARGET_SVE)
#define HAVE_mask_scatter_storevnx2sivnx2di (TARGET_SVE)
#define HAVE_mask_scatter_storevnx2sfvnx2di (TARGET_SVE)
#define HAVE_mask_scatter_storevnx2divnx2di (TARGET_SVE)
#define HAVE_mask_scatter_storevnx2dfvnx2di (TARGET_SVE)
#define HAVE_aarch64_scatter_store_truncvnx4qivnx4si (TARGET_SVE)
#define HAVE_aarch64_scatter_store_truncvnx4hivnx4si (TARGET_SVE)
#define HAVE_aarch64_scatter_store_truncvnx2qivnx2di (TARGET_SVE)
#define HAVE_aarch64_scatter_store_truncvnx2hivnx2di (TARGET_SVE)
#define HAVE_aarch64_scatter_store_truncvnx2sivnx2di (TARGET_SVE)
#define HAVE_aarch64_vec_duplicate_vqvnx16qi_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_vec_duplicate_vqvnx8hi_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_vec_duplicate_vqvnx4si_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_vec_duplicate_vqvnx2di_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_vec_duplicate_vqvnx8bf_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_vec_duplicate_vqvnx8hf_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_vec_duplicate_vqvnx4sf_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_vec_duplicate_vqvnx2df_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
#define HAVE_aarch64_vec_duplicate_vqvnx16qi_be (TARGET_SVE \
&& BYTES_BIG_ENDIAN \
&& known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
GET_MODE_NUNITS (V16QImode) - 1))
#define HAVE_aarch64_vec_duplicate_vqvnx8hi_be (TARGET_SVE \
&& BYTES_BIG_ENDIAN \
&& known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
GET_MODE_NUNITS (V8HImode) - 1))
#define HAVE_aarch64_vec_duplicate_vqvnx4si_be (TARGET_SVE \
&& BYTES_BIG_ENDIAN \
&& known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
GET_MODE_NUNITS (V4SImode) - 1))
#define HAVE_aarch64_vec_duplicate_vqvnx2di_be (TARGET_SVE \
&& BYTES_BIG_ENDIAN \
&& known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
GET_MODE_NUNITS (V2DImode) - 1))
#define HAVE_aarch64_vec_duplicate_vqvnx8bf_be (TARGET_SVE \
&& BYTES_BIG_ENDIAN \
&& known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
GET_MODE_NUNITS (V8BFmode) - 1))
#define HAVE_aarch64_vec_duplicate_vqvnx8hf_be (TARGET_SVE \
&& BYTES_BIG_ENDIAN \
&& known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
GET_MODE_NUNITS (V8HFmode) - 1))
#define HAVE_aarch64_vec_duplicate_vqvnx4sf_be (TARGET_SVE \
&& BYTES_BIG_ENDIAN \
&& known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
GET_MODE_NUNITS (V4SFmode) - 1))
#define HAVE_aarch64_vec_duplicate_vqvnx2df_be (TARGET_SVE \
&& BYTES_BIG_ENDIAN \
&& known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
GET_MODE_NUNITS (V2DFmode) - 1))
#define HAVE_sve_ld1rvnx16qi (TARGET_SVE)
#define HAVE_sve_ld1rvnx8qi (TARGET_SVE)
#define HAVE_sve_ld1rvnx4qi (TARGET_SVE)
#define HAVE_sve_ld1rvnx2qi (TARGET_SVE)
#define HAVE_sve_ld1rvnx8hi (TARGET_SVE)
#define HAVE_sve_ld1rvnx4hi (TARGET_SVE)
#define HAVE_sve_ld1rvnx2hi (TARGET_SVE)
#define HAVE_sve_ld1rvnx8hf (TARGET_SVE)
#define HAVE_sve_ld1rvnx4hf (TARGET_SVE)
#define HAVE_sve_ld1rvnx2hf (TARGET_SVE)
#define HAVE_sve_ld1rvnx8bf (TARGET_SVE)
#define HAVE_sve_ld1rvnx4si (TARGET_SVE)
#define HAVE_sve_ld1rvnx2si (TARGET_SVE)
#define HAVE_sve_ld1rvnx4sf (TARGET_SVE)
#define HAVE_sve_ld1rvnx2sf (TARGET_SVE)
#define HAVE_sve_ld1rvnx2di (TARGET_SVE)
#define HAVE_sve_ld1rvnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_ld1rqvnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_ld1rqvnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_ld1rqvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_ld1rqvnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_ld1rqvnx8bf (TARGET_SVE)
#define HAVE_aarch64_sve_ld1rqvnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_ld1rqvnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_ld1rqvnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_ld1rovnx16qi (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_ld1rovnx8hi (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_ld1rovnx4si (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_ld1rovnx2di (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_ld1rovnx8bf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_ld1rovnx8hf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_ld1rovnx4sf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_ld1rovnx2df (TARGET_SVE_F64MM)
#define HAVE_vec_shl_insert_vnx16qi (TARGET_SVE)
#define HAVE_vec_shl_insert_vnx8hi (TARGET_SVE)
#define HAVE_vec_shl_insert_vnx4si (TARGET_SVE)
#define HAVE_vec_shl_insert_vnx2di (TARGET_SVE)
#define HAVE_vec_shl_insert_vnx8bf (TARGET_SVE)
#define HAVE_vec_shl_insert_vnx8hf (TARGET_SVE)
#define HAVE_vec_shl_insert_vnx4sf (TARGET_SVE)
#define HAVE_vec_shl_insert_vnx2df (TARGET_SVE)
#define HAVE_vec_seriesvnx16qi (TARGET_SVE)
#define HAVE_vec_seriesvnx8qi (TARGET_SVE)
#define HAVE_vec_seriesvnx4qi (TARGET_SVE)
#define HAVE_vec_seriesvnx2qi (TARGET_SVE)
#define HAVE_vec_seriesvnx8hi (TARGET_SVE)
#define HAVE_vec_seriesvnx4hi (TARGET_SVE)
#define HAVE_vec_seriesvnx2hi (TARGET_SVE)
#define HAVE_vec_seriesvnx4si (TARGET_SVE)
#define HAVE_vec_seriesvnx2si (TARGET_SVE)
#define HAVE_vec_seriesvnx2di (TARGET_SVE)
#define HAVE_extract_after_last_vnx16qi (TARGET_SVE)
#define HAVE_extract_last_vnx16qi (TARGET_SVE)
#define HAVE_extract_after_last_vnx8hi (TARGET_SVE)
#define HAVE_extract_last_vnx8hi (TARGET_SVE)
#define HAVE_extract_after_last_vnx4si (TARGET_SVE)
#define HAVE_extract_last_vnx4si (TARGET_SVE)
#define HAVE_extract_after_last_vnx2di (TARGET_SVE)
#define HAVE_extract_last_vnx2di (TARGET_SVE)
#define HAVE_extract_after_last_vnx8bf (TARGET_SVE)
#define HAVE_extract_last_vnx8bf (TARGET_SVE)
#define HAVE_extract_after_last_vnx8hf (TARGET_SVE)
#define HAVE_extract_last_vnx8hf (TARGET_SVE)
#define HAVE_extract_after_last_vnx4sf (TARGET_SVE)
#define HAVE_extract_last_vnx4sf (TARGET_SVE)
#define HAVE_extract_after_last_vnx2df (TARGET_SVE)
#define HAVE_extract_last_vnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_absvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_negvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_one_cmplvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_clrsbvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_clzvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_popcountvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_qabsvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_aarch64_pred_qnegvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_aarch64_pred_absvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_negvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_one_cmplvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_clrsbvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_clzvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_popcountvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_qabsvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_aarch64_pred_qnegvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_aarch64_pred_absvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_negvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_one_cmplvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_clrsbvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_clzvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_popcountvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_qabsvnx4si ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_aarch64_pred_qnegvnx4si ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_aarch64_pred_absvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_negvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_one_cmplvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_clrsbvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_clzvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_popcountvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_qabsvnx2di ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_aarch64_pred_qnegvnx2di ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_aarch64_pred_rbitvnx16qi (TARGET_SVE && 8 >= 8)
#define HAVE_aarch64_pred_rbitvnx8hi (TARGET_SVE && 16 >= 8)
#define HAVE_aarch64_pred_revbvnx8hi (TARGET_SVE && 16 >= 16)
#define HAVE_aarch64_pred_rbitvnx4si (TARGET_SVE && 32 >= 8)
#define HAVE_aarch64_pred_revbvnx4si (TARGET_SVE && 32 >= 16)
#define HAVE_aarch64_pred_revhvnx4si (TARGET_SVE && 32 >= 32)
#define HAVE_aarch64_pred_rbitvnx2di (TARGET_SVE && 64 >= 8)
#define HAVE_aarch64_pred_revbvnx2di (TARGET_SVE && 64 >= 16)
#define HAVE_aarch64_pred_revhvnx2di (TARGET_SVE && 64 >= 32)
#define HAVE_aarch64_pred_revwvnx2di (TARGET_SVE && 64 >= 64)
#define HAVE_cond_rbitvnx16qi (TARGET_SVE && 8 >= 8)
#define HAVE_cond_rbitvnx8hi (TARGET_SVE && 16 >= 8)
#define HAVE_cond_revbvnx8hi (TARGET_SVE && 16 >= 16)
#define HAVE_cond_rbitvnx4si (TARGET_SVE && 32 >= 8)
#define HAVE_cond_revbvnx4si (TARGET_SVE && 32 >= 16)
#define HAVE_cond_revhvnx4si (TARGET_SVE && 32 >= 32)
#define HAVE_cond_rbitvnx2di (TARGET_SVE && 64 >= 8)
#define HAVE_cond_revbvnx2di (TARGET_SVE && 64 >= 16)
#define HAVE_cond_revhvnx2di (TARGET_SVE && 64 >= 32)
#define HAVE_cond_revwvnx2di (TARGET_SVE && 64 >= 64)
#define HAVE_aarch64_pred_sxtvnx8hivnx8qi (TARGET_SVE \
&& (~0x81 & 0x81) == 0)
#define HAVE_aarch64_pred_sxtvnx4sivnx4qi (TARGET_SVE \
&& (~0x43 & 0x41) == 0)
#define HAVE_aarch64_pred_sxtvnx4sivnx4hi (TARGET_SVE \
&& (~0x43 & 0x42) == 0)
#define HAVE_aarch64_pred_sxtvnx2divnx2qi (TARGET_SVE \
&& (~0x27 & 0x21) == 0)
#define HAVE_aarch64_pred_sxtvnx2divnx2hi (TARGET_SVE \
&& (~0x27 & 0x22) == 0)
#define HAVE_aarch64_pred_sxtvnx2divnx2si (TARGET_SVE \
&& (~0x27 & 0x24) == 0)
#define HAVE_aarch64_cond_sxtvnx8hivnx8qi (TARGET_SVE \
&& (~0x81 & 0x81) == 0)
#define HAVE_aarch64_cond_sxtvnx4sivnx4qi (TARGET_SVE \
&& (~0x43 & 0x41) == 0)
#define HAVE_aarch64_cond_sxtvnx4sivnx4hi (TARGET_SVE \
&& (~0x43 & 0x42) == 0)
#define HAVE_aarch64_cond_sxtvnx2divnx2qi (TARGET_SVE \
&& (~0x27 & 0x21) == 0)
#define HAVE_aarch64_cond_sxtvnx2divnx2hi (TARGET_SVE \
&& (~0x27 & 0x22) == 0)
#define HAVE_aarch64_cond_sxtvnx2divnx2si (TARGET_SVE \
&& (~0x27 & 0x24) == 0)
#define HAVE_truncvnx8hivnx8qi2 (TARGET_SVE && (~0x81 & 0x81) == 0)
#define HAVE_truncvnx4hivnx4qi2 (TARGET_SVE && (~0x41 & 0x41) == 0)
#define HAVE_truncvnx2hivnx2qi2 (TARGET_SVE && (~0x21 & 0x21) == 0)
#define HAVE_truncvnx4sivnx4qi2 (TARGET_SVE && (~0x43 & 0x41) == 0)
#define HAVE_truncvnx4sivnx4hi2 (TARGET_SVE && (~0x43 & 0x42) == 0)
#define HAVE_truncvnx2sivnx2qi2 (TARGET_SVE && (~0x23 & 0x21) == 0)
#define HAVE_truncvnx2sivnx2hi2 (TARGET_SVE && (~0x23 & 0x22) == 0)
#define HAVE_truncvnx2divnx2qi2 (TARGET_SVE && (~0x27 & 0x21) == 0)
#define HAVE_truncvnx2divnx2hi2 (TARGET_SVE && (~0x27 & 0x22) == 0)
#define HAVE_truncvnx2divnx2si2 (TARGET_SVE && (~0x27 & 0x24) == 0)
#define HAVE_aarch64_sve_fexpavnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_fexpavnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_fexpavnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_frecpevnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_frsqrtevnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_frecpevnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_frsqrtevnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_frecpevnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_frsqrtevnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_absvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_negvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_frecpxvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_roundvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_nearbyintvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_floorvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_frintnvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_ceilvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_rintvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_btruncvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_sqrtvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_absvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_negvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_frecpxvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_roundvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_nearbyintvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_floorvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_frintnvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_ceilvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_rintvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_btruncvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_sqrtvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_absvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_negvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_frecpxvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_roundvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_nearbyintvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_floorvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_frintnvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_ceilvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_rintvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_btruncvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_sqrtvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_mulvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_smaxvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_sminvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_umaxvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_uminvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_mulvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_smaxvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_sminvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_umaxvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_uminvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_mulvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_smaxvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_sminvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_umaxvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_uminvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_mulvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_smaxvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_sminvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_umaxvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_uminvnx2di (TARGET_SVE)
#define HAVE_addvnx16qi3 (TARGET_SVE)
#define HAVE_addvnx8qi3 (TARGET_SVE)
#define HAVE_addvnx4qi3 (TARGET_SVE)
#define HAVE_addvnx2qi3 (TARGET_SVE)
#define HAVE_addvnx8hi3 (TARGET_SVE)
#define HAVE_addvnx4hi3 (TARGET_SVE)
#define HAVE_addvnx2hi3 (TARGET_SVE)
#define HAVE_addvnx4si3 (TARGET_SVE)
#define HAVE_addvnx2si3 (TARGET_SVE)
#define HAVE_addvnx2di3 (TARGET_SVE)
#define HAVE_subvnx16qi3 (TARGET_SVE)
#define HAVE_subvnx8hi3 (TARGET_SVE)
#define HAVE_subvnx4si3 (TARGET_SVE)
#define HAVE_subvnx2di3 (TARGET_SVE)
#define HAVE_aarch64_adrvnx4si (TARGET_SVE)
#define HAVE_aarch64_adrvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_sabdvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_uabdvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_sabdvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_uabdvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_sabdvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_uabdvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_sabdvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_uabdvnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_ssaddvnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_sssubvnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_ssaddvnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_sssubvnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_ssaddvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_sssubvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_ssaddvnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_sssubvnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_usaddvnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_ussubvnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_usaddvnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_ussubvnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_usaddvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_ussubvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_usaddvnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_ussubvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_smulhvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_umulhvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_smulhvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_umulhvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_smulhvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_umulhvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_smulhvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_umulhvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_divvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_udivvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_divvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_udivvnx2di (TARGET_SVE)
#define HAVE_andvnx16qi3 (TARGET_SVE)
#define HAVE_iorvnx16qi3 (TARGET_SVE)
#define HAVE_xorvnx16qi3 (TARGET_SVE)
#define HAVE_andvnx8hi3 (TARGET_SVE)
#define HAVE_iorvnx8hi3 (TARGET_SVE)
#define HAVE_xorvnx8hi3 (TARGET_SVE)
#define HAVE_andvnx4si3 (TARGET_SVE)
#define HAVE_iorvnx4si3 (TARGET_SVE)
#define HAVE_xorvnx4si3 (TARGET_SVE)
#define HAVE_andvnx2di3 (TARGET_SVE)
#define HAVE_iorvnx2di3 (TARGET_SVE)
#define HAVE_xorvnx2di3 (TARGET_SVE)
#define HAVE_aarch64_pred_ashlvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_ashrvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_lshrvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_ashlvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_ashrvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_lshrvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_ashlvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_ashrvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_lshrvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_ashlvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_ashrvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_lshrvnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_lslvnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_asrvnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_lsrvnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_lslvnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_asrvnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_lsrvnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_lslvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_asrvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_lsrvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_ftsmulvnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_ftsselvnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_ftsmulvnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_ftsselvnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_ftsmulvnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_ftsselvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_fscalevnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fscalevnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fscalevnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_frecpsvnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_frsqrtsvnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_frecpsvnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_frsqrtsvnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_frecpsvnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_frsqrtsvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_divvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_mulxvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_divvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_mulxvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_divvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_mulxvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_addvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_addvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_addvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_cadd90vnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_cadd270vnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_cadd90vnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_cadd270vnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_cadd90vnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_cadd270vnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_subvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_subvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_subvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_mulvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_mulvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_mulvnx2df (TARGET_SVE)
#define HAVE_aarch64_mul_lane_vnx8hf (TARGET_SVE)
#define HAVE_aarch64_mul_lane_vnx4sf (TARGET_SVE)
#define HAVE_aarch64_mul_lane_vnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_smax_nanvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_smaxvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_smin_nanvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_sminvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_smax_nanvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_smaxvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_smin_nanvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_sminvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_smax_nanvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_smaxvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_smin_nanvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_sminvnx2df (TARGET_SVE)
#define HAVE_andvnx16bi3 (TARGET_SVE)
#define HAVE_andvnx8bi3 (TARGET_SVE)
#define HAVE_andvnx4bi3 (TARGET_SVE)
#define HAVE_andvnx2bi3 (TARGET_SVE)
#define HAVE_aarch64_pred_andvnx16bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_iorvnx16bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_xorvnx16bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_andvnx8bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_iorvnx8bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_xorvnx8bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_andvnx4bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_iorvnx4bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_xorvnx4bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_andvnx2bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_iorvnx2bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_xorvnx2bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_bicvnx16bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_ornvnx16bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_bicvnx8bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_ornvnx8bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_bicvnx4bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_ornvnx4bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_bicvnx2bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_ornvnx2bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_norvnx16bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_nandvnx16bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_norvnx8bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_nandvnx8bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_norvnx4bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_nandvnx4bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_norvnx2bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_nandvnx2bi_z (TARGET_SVE)
#define HAVE_aarch64_pred_fmavnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_fmavnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_fmavnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_fmavnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_fnmavnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_fnmavnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_fnmavnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_fnmavnx2di (TARGET_SVE)
#define HAVE_sdot_prodvnx16qi (TARGET_SVE)
#define HAVE_udot_prodvnx16qi (TARGET_SVE)
#define HAVE_sdot_prodvnx8hi (TARGET_SVE)
#define HAVE_udot_prodvnx8hi (TARGET_SVE)
#define HAVE_aarch64_sdot_prod_lanevnx16qi (TARGET_SVE)
#define HAVE_aarch64_udot_prod_lanevnx16qi (TARGET_SVE)
#define HAVE_aarch64_sdot_prod_lanevnx8hi (TARGET_SVE)
#define HAVE_aarch64_udot_prod_lanevnx8hi (TARGET_SVE)
#define HAVE_aarch64_usdot_prodvnx16qi (TARGET_SVE_I8MM)
#define HAVE_aarch64_usdot_prod_lanevnx16qi (TARGET_SVE_I8MM)
#define HAVE_aarch64_sudot_prod_lanevnx16qi (TARGET_SVE_I8MM)
#define HAVE_aarch64_sve_add_smatmulvnx16qi (TARGET_SVE_I8MM)
#define HAVE_aarch64_sve_add_umatmulvnx16qi (TARGET_SVE_I8MM)
#define HAVE_aarch64_sve_add_usmatmulvnx16qi (TARGET_SVE_I8MM)
#define HAVE_aarch64_pred_fmavnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fnmavnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fnmsvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fmsvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fmavnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fnmavnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fnmsvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fmsvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fmavnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_fnmavnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_fnmsvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_fmsvnx2df (TARGET_SVE)
#define HAVE_aarch64_fma_lane_vnx8hf (TARGET_SVE)
#define HAVE_aarch64_fnma_lane_vnx8hf (TARGET_SVE)
#define HAVE_aarch64_fma_lane_vnx4sf (TARGET_SVE)
#define HAVE_aarch64_fnma_lane_vnx4sf (TARGET_SVE)
#define HAVE_aarch64_fma_lane_vnx2df (TARGET_SVE)
#define HAVE_aarch64_fnma_lane_vnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_fcmlavnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmla90vnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmla180vnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmla270vnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmlavnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmla90vnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmla180vnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmla270vnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmlavnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_fcmla90vnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_fcmla180vnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_fcmla270vnx2df (TARGET_SVE)
#define HAVE_aarch64_fcmla_lane_vnx8hf (TARGET_SVE)
#define HAVE_aarch64_fcmla90_lane_vnx8hf (TARGET_SVE)
#define HAVE_aarch64_fcmla180_lane_vnx8hf (TARGET_SVE)
#define HAVE_aarch64_fcmla270_lane_vnx8hf (TARGET_SVE)
#define HAVE_aarch64_fcmla_lane_vnx4sf (TARGET_SVE)
#define HAVE_aarch64_fcmla90_lane_vnx4sf (TARGET_SVE)
#define HAVE_aarch64_fcmla180_lane_vnx4sf (TARGET_SVE)
#define HAVE_aarch64_fcmla270_lane_vnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_tmadvnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_tmadvnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_tmadvnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_bfdotvnx4sf (TARGET_SVE_BF16)
#define HAVE_aarch64_sve_bfmlalbvnx4sf (TARGET_SVE_BF16)
#define HAVE_aarch64_sve_bfmlaltvnx4sf (TARGET_SVE_BF16)
#define HAVE_aarch64_sve_bfmmlavnx4sf (TARGET_SVE_BF16)
#define HAVE_aarch64_sve_bfdot_lanevnx4sf (TARGET_SVE_BF16)
#define HAVE_aarch64_sve_bfmlalb_lanevnx4sf (TARGET_SVE_BF16)
#define HAVE_aarch64_sve_bfmlalt_lanevnx4sf (TARGET_SVE_BF16)
#define HAVE_aarch64_sve_fmmlavnx4sf ((TARGET_SVE) && (TARGET_SVE_F32MM))
#define HAVE_aarch64_sve_fmmlavnx2df ((TARGET_SVE) && (TARGET_SVE_F64MM))
#define HAVE_aarch64_sel_dupvnx16qi (TARGET_SVE)
#define HAVE_aarch64_sel_dupvnx8hi (TARGET_SVE)
#define HAVE_aarch64_sel_dupvnx4si (TARGET_SVE)
#define HAVE_aarch64_sel_dupvnx2di (TARGET_SVE)
#define HAVE_aarch64_sel_dupvnx8bf (TARGET_SVE)
#define HAVE_aarch64_sel_dupvnx8hf (TARGET_SVE)
#define HAVE_aarch64_sel_dupvnx4sf (TARGET_SVE)
#define HAVE_aarch64_sel_dupvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_cmpltvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_cmplevnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_cmpeqvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_cmpnevnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_cmpgevnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_cmpgtvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_cmplovnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_cmplsvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_cmphsvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_cmphivnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_cmpltvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_cmplevnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_cmpeqvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_cmpnevnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_cmpgevnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_cmpgtvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_cmplovnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_cmplsvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_cmphsvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_cmphivnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_cmpltvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_cmplevnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_cmpeqvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_cmpnevnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_cmpgevnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_cmpgtvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_cmplovnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_cmplsvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_cmphsvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_cmphivnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_cmpltvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_cmplevnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_cmpeqvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_cmpnevnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_cmpgevnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_cmpgtvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_cmplovnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_cmplsvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_cmphsvnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_cmphivnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_cmpeqvnx16qi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmpgevnx16qi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmpgtvnx16qi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmphivnx16qi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmphsvnx16qi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmplevnx16qi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmplovnx16qi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmplsvnx16qi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmpltvnx16qi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmpnevnx16qi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmpeqvnx8hi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmpgevnx8hi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmpgtvnx8hi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmphivnx8hi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmphsvnx8hi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmplevnx8hi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmplovnx8hi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmplsvnx8hi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmpltvnx8hi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmpnevnx8hi_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmpeqvnx4si_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmpgevnx4si_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmpgtvnx4si_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmphivnx4si_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmphsvnx4si_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmplevnx4si_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmplovnx4si_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmplsvnx4si_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmpltvnx4si_wide (TARGET_SVE)
#define HAVE_aarch64_pred_cmpnevnx4si_wide (TARGET_SVE)
#define HAVE_while_lesivnx16bi (TARGET_SVE)
#define HAVE_while_ultsivnx16bi (TARGET_SVE)
#define HAVE_while_ulesivnx16bi (TARGET_SVE)
#define HAVE_while_ltsivnx16bi (TARGET_SVE)
#define HAVE_while_gesivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtsivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtsivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugesivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwsivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrsivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_lesivnx8bi (TARGET_SVE)
#define HAVE_while_ultsivnx8bi (TARGET_SVE)
#define HAVE_while_ulesivnx8bi (TARGET_SVE)
#define HAVE_while_ltsivnx8bi (TARGET_SVE)
#define HAVE_while_gesivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtsivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtsivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugesivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwsivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrsivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_lesivnx4bi (TARGET_SVE)
#define HAVE_while_ultsivnx4bi (TARGET_SVE)
#define HAVE_while_ulesivnx4bi (TARGET_SVE)
#define HAVE_while_ltsivnx4bi (TARGET_SVE)
#define HAVE_while_gesivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtsivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtsivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugesivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwsivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrsivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_lesivnx2bi (TARGET_SVE)
#define HAVE_while_ultsivnx2bi (TARGET_SVE)
#define HAVE_while_ulesivnx2bi (TARGET_SVE)
#define HAVE_while_ltsivnx2bi (TARGET_SVE)
#define HAVE_while_gesivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtsivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtsivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugesivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwsivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrsivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ledivnx16bi (TARGET_SVE)
#define HAVE_while_ultdivnx16bi (TARGET_SVE)
#define HAVE_while_uledivnx16bi (TARGET_SVE)
#define HAVE_while_ltdivnx16bi (TARGET_SVE)
#define HAVE_while_gedivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtdivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtdivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugedivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwdivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrdivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ledivnx8bi (TARGET_SVE)
#define HAVE_while_ultdivnx8bi (TARGET_SVE)
#define HAVE_while_uledivnx8bi (TARGET_SVE)
#define HAVE_while_ltdivnx8bi (TARGET_SVE)
#define HAVE_while_gedivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtdivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtdivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugedivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwdivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrdivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ledivnx4bi (TARGET_SVE)
#define HAVE_while_ultdivnx4bi (TARGET_SVE)
#define HAVE_while_uledivnx4bi (TARGET_SVE)
#define HAVE_while_ltdivnx4bi (TARGET_SVE)
#define HAVE_while_gedivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtdivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtdivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugedivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwdivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrdivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ledivnx2bi (TARGET_SVE)
#define HAVE_while_ultdivnx2bi (TARGET_SVE)
#define HAVE_while_uledivnx2bi (TARGET_SVE)
#define HAVE_while_ltdivnx2bi (TARGET_SVE)
#define HAVE_while_gedivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtdivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtdivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugedivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwdivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrdivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_lesivnx16bi_ptest (TARGET_SVE)
#define HAVE_while_ultsivnx16bi_ptest (TARGET_SVE)
#define HAVE_while_ulesivnx16bi_ptest (TARGET_SVE)
#define HAVE_while_ltsivnx16bi_ptest (TARGET_SVE)
#define HAVE_while_gesivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtsivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtsivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugesivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwsivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrsivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_lesivnx8bi_ptest (TARGET_SVE)
#define HAVE_while_ultsivnx8bi_ptest (TARGET_SVE)
#define HAVE_while_ulesivnx8bi_ptest (TARGET_SVE)
#define HAVE_while_ltsivnx8bi_ptest (TARGET_SVE)
#define HAVE_while_gesivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtsivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtsivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugesivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwsivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrsivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_lesivnx4bi_ptest (TARGET_SVE)
#define HAVE_while_ultsivnx4bi_ptest (TARGET_SVE)
#define HAVE_while_ulesivnx4bi_ptest (TARGET_SVE)
#define HAVE_while_ltsivnx4bi_ptest (TARGET_SVE)
#define HAVE_while_gesivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtsivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtsivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugesivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwsivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrsivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_lesivnx2bi_ptest (TARGET_SVE)
#define HAVE_while_ultsivnx2bi_ptest (TARGET_SVE)
#define HAVE_while_ulesivnx2bi_ptest (TARGET_SVE)
#define HAVE_while_ltsivnx2bi_ptest (TARGET_SVE)
#define HAVE_while_gesivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtsivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtsivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugesivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwsivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrsivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ledivnx16bi_ptest (TARGET_SVE)
#define HAVE_while_ultdivnx16bi_ptest (TARGET_SVE)
#define HAVE_while_uledivnx16bi_ptest (TARGET_SVE)
#define HAVE_while_ltdivnx16bi_ptest (TARGET_SVE)
#define HAVE_while_gedivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtdivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtdivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugedivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwdivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrdivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ledivnx8bi_ptest (TARGET_SVE)
#define HAVE_while_ultdivnx8bi_ptest (TARGET_SVE)
#define HAVE_while_uledivnx8bi_ptest (TARGET_SVE)
#define HAVE_while_ltdivnx8bi_ptest (TARGET_SVE)
#define HAVE_while_gedivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtdivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtdivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugedivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwdivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrdivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ledivnx4bi_ptest (TARGET_SVE)
#define HAVE_while_ultdivnx4bi_ptest (TARGET_SVE)
#define HAVE_while_uledivnx4bi_ptest (TARGET_SVE)
#define HAVE_while_ltdivnx4bi_ptest (TARGET_SVE)
#define HAVE_while_gedivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtdivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtdivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugedivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwdivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrdivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ledivnx2bi_ptest (TARGET_SVE)
#define HAVE_while_ultdivnx2bi_ptest (TARGET_SVE)
#define HAVE_while_uledivnx2bi_ptest (TARGET_SVE)
#define HAVE_while_ltdivnx2bi_ptest (TARGET_SVE)
#define HAVE_while_gedivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_gtdivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugtdivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_ugedivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_rwdivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_while_wrdivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_aarch64_pred_fcmeqvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmgevnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmgtvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmlevnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmltvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmnevnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmeqvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmgevnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmgtvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmlevnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmltvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmnevnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmeqvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_fcmgevnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_fcmgtvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_fcmlevnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_fcmltvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_fcmnevnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_fcmuovnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmuovnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_fcmuovnx2df (TARGET_SVE)
#define HAVE_vcond_mask_vnx16bivnx16bi (TARGET_SVE)
#define HAVE_vcond_mask_vnx8bivnx8bi (TARGET_SVE)
#define HAVE_vcond_mask_vnx4bivnx4bi (TARGET_SVE)
#define HAVE_vcond_mask_vnx2bivnx2bi (TARGET_SVE)
#define HAVE_aarch64_ptestvnx16bi (TARGET_SVE)
#define HAVE_aarch64_ptestvnx8bi (TARGET_SVE)
#define HAVE_aarch64_ptestvnx4bi (TARGET_SVE)
#define HAVE_aarch64_ptestvnx2bi (TARGET_SVE)
#define HAVE_fold_extract_after_last_vnx16qi (TARGET_SVE)
#define HAVE_fold_extract_last_vnx16qi (TARGET_SVE)
#define HAVE_fold_extract_after_last_vnx8hi (TARGET_SVE)
#define HAVE_fold_extract_last_vnx8hi (TARGET_SVE)
#define HAVE_fold_extract_after_last_vnx4si (TARGET_SVE)
#define HAVE_fold_extract_last_vnx4si (TARGET_SVE)
#define HAVE_fold_extract_after_last_vnx2di (TARGET_SVE)
#define HAVE_fold_extract_last_vnx2di (TARGET_SVE)
#define HAVE_fold_extract_after_last_vnx8bf (TARGET_SVE)
#define HAVE_fold_extract_last_vnx8bf (TARGET_SVE)
#define HAVE_fold_extract_after_last_vnx8hf (TARGET_SVE)
#define HAVE_fold_extract_last_vnx8hf (TARGET_SVE)
#define HAVE_fold_extract_after_last_vnx4sf (TARGET_SVE)
#define HAVE_fold_extract_last_vnx4sf (TARGET_SVE)
#define HAVE_fold_extract_after_last_vnx2df (TARGET_SVE)
#define HAVE_fold_extract_last_vnx2df (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_after_last_vnx16qi (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_last_vnx16qi (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_after_last_vnx8hi (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_last_vnx8hi (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_after_last_vnx4si (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_last_vnx4si (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_after_last_vnx2di (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_last_vnx2di (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_after_last_vnx8bf (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_last_vnx8bf (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_after_last_vnx8hf (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_last_vnx8hf (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_after_last_vnx4sf (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_last_vnx4sf (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_after_last_vnx2df (TARGET_SVE)
#define HAVE_aarch64_fold_extract_vector_last_vnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_sadd_vnx16qi (TARGET_SVE && 32 >= 8)
#define HAVE_aarch64_pred_reduc_uadd_vnx16qi (TARGET_SVE && 64 >= 8)
#define HAVE_aarch64_pred_reduc_sadd_vnx8hi (TARGET_SVE && 32 >= 16)
#define HAVE_aarch64_pred_reduc_uadd_vnx8hi (TARGET_SVE && 64 >= 16)
#define HAVE_aarch64_pred_reduc_sadd_vnx4si (TARGET_SVE && 32 >= 32)
#define HAVE_aarch64_pred_reduc_uadd_vnx4si (TARGET_SVE && 64 >= 32)
#define HAVE_aarch64_pred_reduc_uadd_vnx2di (TARGET_SVE && 64 >= 64)
#define HAVE_aarch64_pred_reduc_and_vnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_ior_vnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smax_vnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smin_vnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_umax_vnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_umin_vnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_xor_vnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_and_vnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_ior_vnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smax_vnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smin_vnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_umax_vnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_umin_vnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_xor_vnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_and_vnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_ior_vnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smax_vnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smin_vnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_umax_vnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_umin_vnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_xor_vnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_and_vnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_ior_vnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smax_vnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smin_vnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_umax_vnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_umin_vnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_xor_vnx2di (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_plus_vnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smax_nan_vnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smax_vnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smin_nan_vnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smin_vnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_plus_vnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smax_nan_vnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smax_vnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smin_nan_vnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smin_vnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_plus_vnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smax_nan_vnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smax_vnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smin_nan_vnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_reduc_smin_vnx2df (TARGET_SVE)
#define HAVE_mask_fold_left_plus_vnx8hf (TARGET_SVE)
#define HAVE_mask_fold_left_plus_vnx4sf (TARGET_SVE)
#define HAVE_mask_fold_left_plus_vnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_tblvnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_tblvnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_tblvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_tblvnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_tblvnx8bf (TARGET_SVE)
#define HAVE_aarch64_sve_tblvnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_tblvnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_tblvnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_compactvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_compactvnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_compactvnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_compactvnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_dup_lanevnx16qi (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (QImode), 0, 63))
#define HAVE_aarch64_sve_dup_lanevnx8hi (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (HImode), 0, 63))
#define HAVE_aarch64_sve_dup_lanevnx4si (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (SImode), 0, 63))
#define HAVE_aarch64_sve_dup_lanevnx2di (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (DImode), 0, 63))
#define HAVE_aarch64_sve_dup_lanevnx8bf (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (BFmode), 0, 63))
#define HAVE_aarch64_sve_dup_lanevnx8hf (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (HFmode), 0, 63))
#define HAVE_aarch64_sve_dup_lanevnx4sf (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (SFmode), 0, 63))
#define HAVE_aarch64_sve_dup_lanevnx2df (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (DFmode), 0, 63))
#define HAVE_aarch64_sve_dupq_lanevnx16qi (TARGET_SVE \
&& (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (QImode)) % 16 == 0 \
&& IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (QImode), 0, 63))
#define HAVE_aarch64_sve_dupq_lanevnx8hi (TARGET_SVE \
&& (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (HImode)) % 16 == 0 \
&& IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (HImode), 0, 63))
#define HAVE_aarch64_sve_dupq_lanevnx4si (TARGET_SVE \
&& (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (SImode)) % 16 == 0 \
&& IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (SImode), 0, 63))
#define HAVE_aarch64_sve_dupq_lanevnx2di (TARGET_SVE \
&& (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (DImode)) % 16 == 0 \
&& IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (DImode), 0, 63))
#define HAVE_aarch64_sve_dupq_lanevnx8bf (TARGET_SVE \
&& (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (BFmode)) % 16 == 0 \
&& IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (BFmode), 0, 63))
#define HAVE_aarch64_sve_dupq_lanevnx8hf (TARGET_SVE \
&& (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (HFmode)) % 16 == 0 \
&& IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (HFmode), 0, 63))
#define HAVE_aarch64_sve_dupq_lanevnx4sf (TARGET_SVE \
&& (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (SFmode)) % 16 == 0 \
&& IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (SFmode), 0, 63))
#define HAVE_aarch64_sve_dupq_lanevnx2df (TARGET_SVE \
&& (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (DFmode)) % 16 == 0 \
&& IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
* GET_MODE_SIZE (DFmode), 0, 63))
#define HAVE_aarch64_sve_revvnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_revvnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_revvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_revvnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_revvnx8bf (TARGET_SVE)
#define HAVE_aarch64_sve_revvnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_revvnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_revvnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_splicevnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_splicevnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_splicevnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_splicevnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_splicevnx8bf (TARGET_SVE)
#define HAVE_aarch64_sve_splicevnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_splicevnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_splicevnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_zip1vnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_zip2vnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_trn1vnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_trn2vnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_uzp1vnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_uzp2vnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_zip1vnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_zip2vnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_trn1vnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_trn2vnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_uzp1vnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_uzp2vnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_zip1vnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_zip2vnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_trn1vnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_trn2vnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_uzp1vnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_uzp2vnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_zip1vnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_zip2vnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_trn1vnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_trn2vnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_uzp1vnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_uzp2vnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_zip1vnx8bf (TARGET_SVE)
#define HAVE_aarch64_sve_zip2vnx8bf (TARGET_SVE)
#define HAVE_aarch64_sve_trn1vnx8bf (TARGET_SVE)
#define HAVE_aarch64_sve_trn2vnx8bf (TARGET_SVE)
#define HAVE_aarch64_sve_uzp1vnx8bf (TARGET_SVE)
#define HAVE_aarch64_sve_uzp2vnx8bf (TARGET_SVE)
#define HAVE_aarch64_sve_zip1vnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_zip2vnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_trn1vnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_trn2vnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_uzp1vnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_uzp2vnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_zip1vnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_zip2vnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_trn1vnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_trn2vnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_uzp1vnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_uzp2vnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_zip1vnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_zip2vnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_trn1vnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_trn2vnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_uzp1vnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_uzp2vnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_zip1qvnx16qi (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip2qvnx16qi (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn1qvnx16qi (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn2qvnx16qi (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp1qvnx16qi (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp2qvnx16qi (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip1qvnx8hi (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip2qvnx8hi (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn1qvnx8hi (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn2qvnx8hi (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp1qvnx8hi (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp2qvnx8hi (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip1qvnx4si (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip2qvnx4si (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn1qvnx4si (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn2qvnx4si (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp1qvnx4si (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp2qvnx4si (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip1qvnx2di (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip2qvnx2di (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn1qvnx2di (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn2qvnx2di (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp1qvnx2di (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp2qvnx2di (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip1qvnx8bf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip2qvnx8bf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn1qvnx8bf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn2qvnx8bf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp1qvnx8bf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp2qvnx8bf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip1qvnx8hf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip2qvnx8hf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn1qvnx8hf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn2qvnx8hf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp1qvnx8hf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp2qvnx8hf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip1qvnx4sf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip2qvnx4sf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn1qvnx4sf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn2qvnx4sf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp1qvnx4sf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp2qvnx4sf (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip1qvnx2df (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_zip2qvnx2df (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn1qvnx2df (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_trn2qvnx2df (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp1qvnx2df (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_uzp2qvnx2df (TARGET_SVE_F64MM)
#define HAVE_aarch64_sve_extvnx16qi (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (QImode), 0, 255))
#define HAVE_aarch64_sve_extvnx8hi (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (HImode), 0, 255))
#define HAVE_aarch64_sve_extvnx4si (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (SImode), 0, 255))
#define HAVE_aarch64_sve_extvnx2di (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (DImode), 0, 255))
#define HAVE_aarch64_sve_extvnx8bf (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (BFmode), 0, 255))
#define HAVE_aarch64_sve_extvnx8hf (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (HFmode), 0, 255))
#define HAVE_aarch64_sve_extvnx4sf (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (SFmode), 0, 255))
#define HAVE_aarch64_sve_extvnx2df (TARGET_SVE \
&& IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (DFmode), 0, 255))
#define HAVE_aarch64_sve_revvnx16bi (TARGET_SVE)
#define HAVE_aarch64_sve_revvnx8bi (TARGET_SVE)
#define HAVE_aarch64_sve_revvnx4bi (TARGET_SVE)
#define HAVE_aarch64_sve_revvnx2bi (TARGET_SVE)
#define HAVE_aarch64_sve_zip1vnx16bi (TARGET_SVE)
#define HAVE_aarch64_sve_zip2vnx16bi (TARGET_SVE)
#define HAVE_aarch64_sve_trn1vnx16bi (TARGET_SVE)
#define HAVE_aarch64_sve_trn2vnx16bi (TARGET_SVE)
#define HAVE_aarch64_sve_uzp1vnx16bi (TARGET_SVE)
#define HAVE_aarch64_sve_uzp2vnx16bi (TARGET_SVE)
#define HAVE_aarch64_sve_zip1vnx8bi (TARGET_SVE)
#define HAVE_aarch64_sve_zip2vnx8bi (TARGET_SVE)
#define HAVE_aarch64_sve_trn1vnx8bi (TARGET_SVE)
#define HAVE_aarch64_sve_trn2vnx8bi (TARGET_SVE)
#define HAVE_aarch64_sve_uzp1vnx8bi (TARGET_SVE)
#define HAVE_aarch64_sve_uzp2vnx8bi (TARGET_SVE)
#define HAVE_aarch64_sve_zip1vnx4bi (TARGET_SVE)
#define HAVE_aarch64_sve_zip2vnx4bi (TARGET_SVE)
#define HAVE_aarch64_sve_trn1vnx4bi (TARGET_SVE)
#define HAVE_aarch64_sve_trn2vnx4bi (TARGET_SVE)
#define HAVE_aarch64_sve_uzp1vnx4bi (TARGET_SVE)
#define HAVE_aarch64_sve_uzp2vnx4bi (TARGET_SVE)
#define HAVE_aarch64_sve_zip1vnx2bi (TARGET_SVE)
#define HAVE_aarch64_sve_zip2vnx2bi (TARGET_SVE)
#define HAVE_aarch64_sve_trn1vnx2bi (TARGET_SVE)
#define HAVE_aarch64_sve_trn2vnx2bi (TARGET_SVE)
#define HAVE_aarch64_sve_uzp1vnx2bi (TARGET_SVE)
#define HAVE_aarch64_sve_uzp2vnx2bi (TARGET_SVE)
#define HAVE_vec_pack_trunc_vnx8hi (TARGET_SVE)
#define HAVE_vec_pack_trunc_vnx4si (TARGET_SVE)
#define HAVE_vec_pack_trunc_vnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_sunpkhi_vnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_uunpkhi_vnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_sunpklo_vnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_uunpklo_vnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_sunpkhi_vnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_uunpkhi_vnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_sunpklo_vnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_uunpklo_vnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_sunpkhi_vnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_uunpkhi_vnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_sunpklo_vnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_uunpklo_vnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_fix_trunc_nontruncvnx8hfvnx8hi (TARGET_SVE && 16 >= 16)
#define HAVE_aarch64_sve_fixuns_trunc_nontruncvnx8hfvnx8hi (TARGET_SVE && 16 >= 16)
#define HAVE_aarch64_sve_fix_trunc_nontruncvnx8hfvnx4si (TARGET_SVE && 32 >= 16)
#define HAVE_aarch64_sve_fixuns_trunc_nontruncvnx8hfvnx4si (TARGET_SVE && 32 >= 16)
#define HAVE_aarch64_sve_fix_trunc_nontruncvnx4sfvnx4si (TARGET_SVE && 32 >= 32)
#define HAVE_aarch64_sve_fixuns_trunc_nontruncvnx4sfvnx4si (TARGET_SVE && 32 >= 32)
#define HAVE_aarch64_sve_fix_trunc_nontruncvnx8hfvnx2di (TARGET_SVE && 64 >= 16)
#define HAVE_aarch64_sve_fixuns_trunc_nontruncvnx8hfvnx2di (TARGET_SVE && 64 >= 16)
#define HAVE_aarch64_sve_fix_trunc_nontruncvnx4sfvnx2di (TARGET_SVE && 64 >= 32)
#define HAVE_aarch64_sve_fixuns_trunc_nontruncvnx4sfvnx2di (TARGET_SVE && 64 >= 32)
#define HAVE_aarch64_sve_fix_trunc_nontruncvnx2dfvnx2di (TARGET_SVE && 64 >= 64)
#define HAVE_aarch64_sve_fixuns_trunc_nontruncvnx2dfvnx2di (TARGET_SVE && 64 >= 64)
#define HAVE_aarch64_sve_fix_trunc_truncvnx2dfvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_fixuns_trunc_truncvnx2dfvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_float_nonextendvnx8hivnx8hf (TARGET_SVE && 16 >= 16)
#define HAVE_aarch64_sve_floatuns_nonextendvnx8hivnx8hf (TARGET_SVE && 16 >= 16)
#define HAVE_aarch64_sve_float_nonextendvnx4sivnx8hf (TARGET_SVE && 32 >= 16)
#define HAVE_aarch64_sve_floatuns_nonextendvnx4sivnx8hf (TARGET_SVE && 32 >= 16)
#define HAVE_aarch64_sve_float_nonextendvnx4sivnx4sf (TARGET_SVE && 32 >= 32)
#define HAVE_aarch64_sve_floatuns_nonextendvnx4sivnx4sf (TARGET_SVE && 32 >= 32)
#define HAVE_aarch64_sve_float_nonextendvnx2divnx8hf (TARGET_SVE && 64 >= 16)
#define HAVE_aarch64_sve_floatuns_nonextendvnx2divnx8hf (TARGET_SVE && 64 >= 16)
#define HAVE_aarch64_sve_float_nonextendvnx2divnx4sf (TARGET_SVE && 64 >= 32)
#define HAVE_aarch64_sve_floatuns_nonextendvnx2divnx4sf (TARGET_SVE && 64 >= 32)
#define HAVE_aarch64_sve_float_nonextendvnx2divnx2df (TARGET_SVE && 64 >= 64)
#define HAVE_aarch64_sve_floatuns_nonextendvnx2divnx2df (TARGET_SVE && 64 >= 64)
#define HAVE_aarch64_sve_float_extendvnx4sivnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_floatuns_extendvnx4sivnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_fcvt_truncvnx4sfvnx8hf (TARGET_SVE && 32 > 16)
#define HAVE_aarch64_sve_fcvt_truncvnx2dfvnx8hf (TARGET_SVE && 64 > 16)
#define HAVE_aarch64_sve_fcvt_truncvnx2dfvnx4sf (TARGET_SVE && 64 > 32)
#define HAVE_aarch64_sve_fcvt_truncvnx4sfvnx8bf (TARGET_SVE_BF16)
#define HAVE_aarch64_sve_cvtntvnx8bf (TARGET_SVE_BF16)
#define HAVE_aarch64_sve_fcvt_nontruncvnx8hfvnx4sf (TARGET_SVE && 32 > 16)
#define HAVE_aarch64_sve_fcvt_nontruncvnx8hfvnx2df (TARGET_SVE && 64 > 16)
#define HAVE_aarch64_sve_fcvt_nontruncvnx4sfvnx2df (TARGET_SVE && 64 > 32)
#define HAVE_vec_pack_trunc_vnx8bi (TARGET_SVE)
#define HAVE_vec_pack_trunc_vnx4bi (TARGET_SVE)
#define HAVE_vec_pack_trunc_vnx2bi (TARGET_SVE)
#define HAVE_aarch64_sve_punpklo_vnx16bi (TARGET_SVE)
#define HAVE_aarch64_sve_punpkhi_vnx16bi (TARGET_SVE)
#define HAVE_aarch64_sve_punpklo_vnx8bi (TARGET_SVE)
#define HAVE_aarch64_sve_punpkhi_vnx8bi (TARGET_SVE)
#define HAVE_aarch64_sve_punpklo_vnx4bi (TARGET_SVE)
#define HAVE_aarch64_sve_punpkhi_vnx4bi (TARGET_SVE)
#define HAVE_aarch64_brka (TARGET_SVE)
#define HAVE_aarch64_brkb (TARGET_SVE)
#define HAVE_aarch64_brkn (TARGET_SVE)
#define HAVE_aarch64_brkpa (TARGET_SVE)
#define HAVE_aarch64_brkpb (TARGET_SVE)
#define HAVE_aarch64_sve_pfirstvnx16bi (TARGET_SVE && 8 >= 8)
#define HAVE_aarch64_sve_pnextvnx16bi (TARGET_SVE && 64 >= 8)
#define HAVE_aarch64_sve_pnextvnx8bi (TARGET_SVE && 64 >= 16)
#define HAVE_aarch64_sve_pnextvnx4bi (TARGET_SVE && 64 >= 32)
#define HAVE_aarch64_sve_pnextvnx2bi (TARGET_SVE && 64 >= 64)
#define HAVE_aarch64_sve_cnt_pat (TARGET_SVE)
#define HAVE_aarch64_sve_incdi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_sqincdi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_uqincdi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_sqincsi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_uqincsi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_incvnx2di_pat (TARGET_SVE)
#define HAVE_aarch64_sve_sqincvnx2di_pat (TARGET_SVE)
#define HAVE_aarch64_sve_uqincvnx2di_pat (TARGET_SVE)
#define HAVE_aarch64_sve_incvnx4si_pat (TARGET_SVE)
#define HAVE_aarch64_sve_sqincvnx4si_pat (TARGET_SVE)
#define HAVE_aarch64_sve_uqincvnx4si_pat (TARGET_SVE)
#define HAVE_aarch64_sve_decdi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecdi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecdi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecsi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecsi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_decvnx2di_pat (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecvnx2di_pat (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecvnx2di_pat (TARGET_SVE)
#define HAVE_aarch64_sve_decvnx4si_pat (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecvnx4si_pat (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecvnx4si_pat (TARGET_SVE)
#define HAVE_aarch64_pred_cntpvnx16bi (TARGET_SVE)
#define HAVE_aarch64_pred_cntpvnx8bi (TARGET_SVE)
#define HAVE_aarch64_pred_cntpvnx4bi (TARGET_SVE)
#define HAVE_aarch64_pred_cntpvnx2bi (TARGET_SVE)
#define HAVE_aarch64_gather_ldntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_gather_ldntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_gather_ldntvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_gather_ldntvnx2df (TARGET_SVE2)
#define HAVE_aarch64_gather_ldnt_extendvnx4sivnx4qi (TARGET_SVE2 \
&& (~0x43 & 0x41) == 0)
#define HAVE_aarch64_gather_ldnt_zero_extendvnx4sivnx4qi (TARGET_SVE2 \
&& (~0x43 & 0x41) == 0)
#define HAVE_aarch64_gather_ldnt_extendvnx2divnx2qi (TARGET_SVE2 \
&& (~0x27 & 0x21) == 0)
#define HAVE_aarch64_gather_ldnt_zero_extendvnx2divnx2qi (TARGET_SVE2 \
&& (~0x27 & 0x21) == 0)
#define HAVE_aarch64_gather_ldnt_extendvnx4sivnx4hi (TARGET_SVE2 \
&& (~0x43 & 0x42) == 0)
#define HAVE_aarch64_gather_ldnt_zero_extendvnx4sivnx4hi (TARGET_SVE2 \
&& (~0x43 & 0x42) == 0)
#define HAVE_aarch64_gather_ldnt_extendvnx2divnx2hi (TARGET_SVE2 \
&& (~0x27 & 0x22) == 0)
#define HAVE_aarch64_gather_ldnt_zero_extendvnx2divnx2hi (TARGET_SVE2 \
&& (~0x27 & 0x22) == 0)
#define HAVE_aarch64_gather_ldnt_extendvnx2divnx2si (TARGET_SVE2 \
&& (~0x27 & 0x24) == 0)
#define HAVE_aarch64_gather_ldnt_zero_extendvnx2divnx2si (TARGET_SVE2 \
&& (~0x27 & 0x24) == 0)
#define HAVE_aarch64_scatter_stntvnx4si (TARGET_SVE)
#define HAVE_aarch64_scatter_stntvnx2di (TARGET_SVE)
#define HAVE_aarch64_scatter_stntvnx4sf (TARGET_SVE)
#define HAVE_aarch64_scatter_stntvnx2df (TARGET_SVE)
#define HAVE_aarch64_scatter_stnt_vnx4sivnx4qi (TARGET_SVE2 \
&& (~0x43 & 0x41) == 0)
#define HAVE_aarch64_scatter_stnt_vnx2divnx2qi (TARGET_SVE2 \
&& (~0x27 & 0x21) == 0)
#define HAVE_aarch64_scatter_stnt_vnx4sivnx4hi (TARGET_SVE2 \
&& (~0x43 & 0x42) == 0)
#define HAVE_aarch64_scatter_stnt_vnx2divnx2hi (TARGET_SVE2 \
&& (~0x27 & 0x22) == 0)
#define HAVE_aarch64_scatter_stnt_vnx2divnx2si (TARGET_SVE2 \
&& (~0x27 & 0x24) == 0)
#define HAVE_aarch64_mul_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_mul_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_mul_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_suqaddvnx16qi_const (TARGET_SVE2)
#define HAVE_aarch64_sve_suqaddvnx8hi_const (TARGET_SVE2)
#define HAVE_aarch64_sve_suqaddvnx4si_const (TARGET_SVE2)
#define HAVE_aarch64_sve_suqaddvnx2di_const (TARGET_SVE2)
#define HAVE_aarch64_pred_shaddvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_shsubvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_sqrshlvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_srhaddvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_srshlvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_uhaddvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_uhsubvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_uqrshlvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_urhaddvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_urshlvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_shaddvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_shsubvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_sqrshlvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_srhaddvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_srshlvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_uhaddvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_uhsubvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_uqrshlvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_urhaddvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_urshlvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_shaddvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_shsubvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_sqrshlvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_srhaddvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_srshlvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_uhaddvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_uhsubvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_uqrshlvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_urhaddvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_urshlvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_shaddvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_shsubvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_sqrshlvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_srhaddvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_srshlvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_uhaddvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_uhsubvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_uqrshlvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_urhaddvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_urshlvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmulhvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmulhvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmulhvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmulhvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmulhvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmulhvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmulhvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmulhvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmulh_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmulh_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmulh_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmulh_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmulh_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmulh_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_sqshlvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_uqshlvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_sqshlvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_uqshlvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_sqshlvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_uqshlvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_sqshlvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_uqshlvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_adclbvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_adcltvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_eorbtvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_eortbvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_sbclbvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_sbcltvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmlahvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmlshvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_adclbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_adcltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_eorbtvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_eortbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sbclbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sbcltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmlahvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmlshvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_adclbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_adcltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_eorbtvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_eortbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sbclbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sbcltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmlahvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmlshvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_adclbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_adcltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_eorbtvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_eortbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sbclbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sbcltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmlahvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmlshvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmlah_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmlsh_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmlah_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmlsh_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmlah_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdmlsh_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_mul_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_mul_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_mul_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_mul_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_mul_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_mul_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve2_xarvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve2_xarvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve2_xarvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve2_xarvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve2_bcaxvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve2_bcaxvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve2_bcaxvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve2_bcaxvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve2_eor3vnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve2_eor3vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve2_eor3vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve2_eor3vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_srshrvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_urshrvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_srshrvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_urshrvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_srshrvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_urshrvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_srshrvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_urshrvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_slivnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_srivnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_slivnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_srivnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_slivnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_srivnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_slivnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_srivnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_saddwbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_saddwtvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_ssubwbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_ssubwtvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_uaddwbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_uaddwtvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_usubwbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_usubwtvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_saddwbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_saddwtvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_ssubwbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_ssubwtvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_uaddwbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_uaddwtvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_usubwbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_usubwtvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_saddwbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_saddwtvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_ssubwbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_ssubwtvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_uaddwbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_uaddwtvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_usubwbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_usubwtvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sabdlbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sabdltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_saddlbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_saddlbtvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_saddltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_smullbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_smulltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmullbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmulltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_ssublbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_ssublbtvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_ssubltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_ssubltbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_uabdlbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_uabdltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_uaddlbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_uaddltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_umullbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_umulltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_usublbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_usubltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sabdlbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sabdltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_saddlbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_saddlbtvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_saddltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_smullbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_smulltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmullbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmulltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_ssublbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_ssublbtvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_ssubltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_ssubltbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_uabdlbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_uabdltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_uaddlbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_uaddltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_umullbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_umulltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_usublbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_usubltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sabdlbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sabdltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_saddlbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_saddlbtvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_saddltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_smullbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_smulltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmullbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmulltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_ssublbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_ssublbtvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_ssubltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_ssubltbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_uabdlbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_uabdltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_uaddlbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_uaddltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_umullbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_umulltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_usublbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_usubltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_smullb_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_smullt_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmullb_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmullt_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_umullb_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_umullt_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_smullb_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_smullt_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmullb_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqdmullt_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_umullb_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_umullt_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sshllbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sshlltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_ushllbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_ushlltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sshllbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sshlltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_ushllbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_ushlltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sshllbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sshlltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_ushllbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_ushlltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_sabdlbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_sabdltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_smullbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_smulltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_uabdlbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_uabdltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_umullbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_umulltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_sabdlbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_sabdltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_smullbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_smulltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_uabdlbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_uabdltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_umullbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_umulltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_sabdlbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_sabdltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_smullbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_smulltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_uabdlbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_uabdltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_umullbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_umulltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_smullb_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_smullt_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_umullb_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_umullt_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_smullb_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_smullt_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_umullb_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_umullt_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_qadd_sqdmullbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_qadd_sqdmullbtvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_qadd_sqdmulltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_qadd_sqdmullbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_qadd_sqdmullbtvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_qadd_sqdmulltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_qadd_sqdmullbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_qadd_sqdmullbtvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_qadd_sqdmulltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_qadd_sqdmullb_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_qadd_sqdmullt_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_qadd_sqdmullb_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_qadd_sqdmullt_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_smullbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_smulltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_umullbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_umulltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_smullbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_smulltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_umullbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_umulltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_smullbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_smulltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_umullbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_umulltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_smullb_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_smullt_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_umullb_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_umullt_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_smullb_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_smullt_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_umullb_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sub_umullt_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_qsub_sqdmullbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_qsub_sqdmullbtvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_qsub_sqdmulltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_qsub_sqdmullbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_qsub_sqdmullbtvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_qsub_sqdmulltvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_qsub_sqdmullbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_qsub_sqdmullbtvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_qsub_sqdmulltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_qsub_sqdmullb_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_qsub_sqdmullt_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_qsub_sqdmullb_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_qsub_sqdmullt_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_fmlalbvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_sve_fmlaltvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_sve_fmlslbvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_sve_fmlsltvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_fmlalb_lane_vnx4sf (TARGET_SVE2)
#define HAVE_aarch64_fmlalt_lane_vnx4sf (TARGET_SVE2)
#define HAVE_aarch64_fmlslb_lane_vnx4sf (TARGET_SVE2)
#define HAVE_aarch64_fmlslt_lane_vnx4sf (TARGET_SVE2)
#define HAVE_aarch64_sve_sqxtnbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqxtunbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_uqxtnbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqxtnbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqxtunbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_uqxtnbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqxtnbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqxtunbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_uqxtnbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqxtntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqxtuntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_uqxtntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqxtntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqxtuntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_uqxtntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqxtntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqxtuntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_uqxtntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_addhnbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_raddhnbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_rsubhnbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_subhnbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_addhnbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_raddhnbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_rsubhnbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_subhnbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_addhnbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_raddhnbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_rsubhnbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_subhnbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_addhntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_raddhntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_rsubhntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_subhntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_addhntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_raddhntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_rsubhntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_subhntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_addhntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_raddhntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_rsubhntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_subhntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_rshrnbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_shrnbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrshrnbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrshrunbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqshrnbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqshrunbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_uqrshrnbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_uqshrnbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_rshrnbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_shrnbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrshrnbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrshrunbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqshrnbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqshrunbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_uqrshrnbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_uqshrnbvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_rshrnbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_shrnbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrshrnbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrshrunbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqshrnbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqshrunbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_uqrshrnbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_uqshrnbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_rshrntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_shrntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrshrntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrshruntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqshrntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqshruntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_uqrshrntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_uqshrntvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_rshrntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_shrntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrshrntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrshruntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqshrntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqshruntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_uqrshrntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_uqshrntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_rshrntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_shrntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrshrntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrshruntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqshrntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqshruntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_uqrshrntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_uqshrntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_addpvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_smaxpvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_sminpvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_umaxpvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_uminpvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_addpvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_smaxpvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_sminpvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_umaxpvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_uminpvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_addpvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_smaxpvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_sminpvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_umaxpvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_uminpvnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_addpvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_smaxpvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_sminpvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_umaxpvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_uminpvnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_faddpvnx8hf (TARGET_SVE2)
#define HAVE_aarch64_pred_fmaxpvnx8hf (TARGET_SVE2)
#define HAVE_aarch64_pred_fmaxnmpvnx8hf (TARGET_SVE2)
#define HAVE_aarch64_pred_fminpvnx8hf (TARGET_SVE2)
#define HAVE_aarch64_pred_fminnmpvnx8hf (TARGET_SVE2)
#define HAVE_aarch64_pred_faddpvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_pred_fmaxpvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_pred_fmaxnmpvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_pred_fminpvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_pred_fminnmpvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_pred_faddpvnx2df (TARGET_SVE2)
#define HAVE_aarch64_pred_fmaxpvnx2df (TARGET_SVE2)
#define HAVE_aarch64_pred_fmaxnmpvnx2df (TARGET_SVE2)
#define HAVE_aarch64_pred_fminpvnx2df (TARGET_SVE2)
#define HAVE_aarch64_pred_fminnmpvnx2df (TARGET_SVE2)
#define HAVE_aarch64_sve_cadd90vnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_cadd270vnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqcadd90vnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqcadd270vnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_cadd90vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_cadd270vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqcadd90vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqcadd270vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_cadd90vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_cadd270vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqcadd90vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqcadd270vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_cadd90vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_cadd270vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqcadd90vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqcadd270vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_cmlavnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_cmla90vnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_cmla180vnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_cmla270vnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlahvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlah90vnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlah180vnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlah270vnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_cmlavnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_cmla90vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_cmla180vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_cmla270vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlahvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlah90vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlah180vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlah270vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_cmlavnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_cmla90vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_cmla180vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_cmla270vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlahvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlah90vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlah180vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlah270vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_cmlavnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_cmla90vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_cmla180vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_cmla270vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlahvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlah90vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlah180vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_sqrdcmlah270vnx2di (TARGET_SVE2)
#define HAVE_aarch64_cmla_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_cmla90_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_cmla180_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_cmla270_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sqrdcmlah_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sqrdcmlah90_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sqrdcmlah180_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sqrdcmlah270_lane_vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_cmla_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_cmla90_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_cmla180_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_cmla270_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sqrdcmlah_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sqrdcmlah90_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sqrdcmlah180_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sqrdcmlah270_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_cdotvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_cdot90vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_cdot180vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_cdot270vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_cdotvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_cdot90vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_cdot180vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_cdot270vnx2di (TARGET_SVE2)
#define HAVE_aarch64_cdot_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_cdot90_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_cdot180_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_cdot270_lane_vnx4si (TARGET_SVE2)
#define HAVE_aarch64_cdot_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_cdot90_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_cdot180_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_cdot270_lane_vnx2di (TARGET_SVE2)
#define HAVE_aarch64_pred_fcvtltvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_pred_fcvtltvnx2df (TARGET_SVE2)
#define HAVE_aarch64_sve_cvtntvnx8hf (TARGET_SVE2)
#define HAVE_aarch64_sve_cvtntvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_pred_fcvtxvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_sve2_cvtxntvnx2df (TARGET_SVE2)
#define HAVE_aarch64_pred_urecpevnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_ursqrtevnx4si (TARGET_SVE2)
#define HAVE_aarch64_pred_flogbvnx8hf (TARGET_SVE2)
#define HAVE_aarch64_pred_flogbvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_pred_flogbvnx2df (TARGET_SVE2)
#define HAVE_aarch64_sve2_pmulvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_pmullbvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_pmulltvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_pmullbvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_pmulltvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_pmullb_pairvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_pmullt_pairvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_pmullb_pairvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_pmullt_pairvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_pmullb_pairvnx2di ((TARGET_SVE2) && (TARGET_SVE2_AES))
#define HAVE_aarch64_sve_pmullt_pairvnx2di ((TARGET_SVE2) && (TARGET_SVE2_AES))
#define HAVE_aarch64_sve2_tbl2vnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbl2vnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbl2vnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbl2vnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbl2vnx8bf (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbl2vnx8hf (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbl2vnx4sf (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbl2vnx2df (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbxvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbxvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbxvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbxvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbxvnx8bf (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbxvnx8hf (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbxvnx4sf (TARGET_SVE2)
#define HAVE_aarch64_sve2_tbxvnx2df (TARGET_SVE2)
#define HAVE_aarch64_sve_bdepvnx16qi (TARGET_SVE2_BITPERM)
#define HAVE_aarch64_sve_bextvnx16qi (TARGET_SVE2_BITPERM)
#define HAVE_aarch64_sve_bgrpvnx16qi (TARGET_SVE2_BITPERM)
#define HAVE_aarch64_sve_bdepvnx8hi (TARGET_SVE2_BITPERM)
#define HAVE_aarch64_sve_bextvnx8hi (TARGET_SVE2_BITPERM)
#define HAVE_aarch64_sve_bgrpvnx8hi (TARGET_SVE2_BITPERM)
#define HAVE_aarch64_sve_bdepvnx4si (TARGET_SVE2_BITPERM)
#define HAVE_aarch64_sve_bextvnx4si (TARGET_SVE2_BITPERM)
#define HAVE_aarch64_sve_bgrpvnx4si (TARGET_SVE2_BITPERM)
#define HAVE_aarch64_sve_bdepvnx2di (TARGET_SVE2_BITPERM)
#define HAVE_aarch64_sve_bextvnx2di (TARGET_SVE2_BITPERM)
#define HAVE_aarch64_sve_bgrpvnx2di (TARGET_SVE2_BITPERM)
#define HAVE_aarch64_sve2_histcntvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve2_histcntvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve2_histsegvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_matchvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_nmatchvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_pred_matchvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_pred_nmatchvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve2_aese (TARGET_SVE2_AES)
#define HAVE_aarch64_sve2_aesd (TARGET_SVE2_AES)
#define HAVE_aarch64_sve2_aesmc (TARGET_SVE2_AES)
#define HAVE_aarch64_sve2_aesimc (TARGET_SVE2_AES)
#define HAVE_aarch64_sve2_rax1 (TARGET_SVE2_SHA3)
#define HAVE_aarch64_sve2_sm4e (TARGET_SVE2_SM4)
#define HAVE_aarch64_sve2_sm4ekey (TARGET_SVE2_SM4)
#define HAVE_cbranchsi4 1
#define HAVE_cbranchdi4 1
#define HAVE_cbranchsf4 1
#define HAVE_cbranchdf4 1
#define HAVE_cbranchcc4 1
#define HAVE_modsi3 1
#define HAVE_moddi3 1
#define HAVE_casesi 1
#define HAVE_casesi_dispatch 1
#define HAVE_prologue 1
#define HAVE_epilogue 1
#define HAVE_sibcall_epilogue 1
#define HAVE_return (aarch64_use_return_insn_p ())
#define HAVE_call 1
#define HAVE_call_value 1
#define HAVE_sibcall 1
#define HAVE_sibcall_value 1
#define HAVE_untyped_call 1
#define HAVE_movqi 1
#define HAVE_movhi 1
#define HAVE_movsi 1
#define HAVE_movdi 1
#define HAVE_movti 1
#define HAVE_movhf 1
#define HAVE_movbf 1
#define HAVE_movsf 1
#define HAVE_movdf 1
#define HAVE_movtf 1
#define HAVE_cpymemdi (!STRICT_ALIGNMENT)
#define HAVE_extendsidi2 1
#define HAVE_zero_extendsidi2 1
#define HAVE_extendqisi2 1
#define HAVE_zero_extendqisi2 1
#define HAVE_extendhisi2 1
#define HAVE_zero_extendhisi2 1
#define HAVE_extendqidi2 1
#define HAVE_zero_extendqidi2 1
#define HAVE_extendhidi2 1
#define HAVE_zero_extendhidi2 1
#define HAVE_extendqihi2 1
#define HAVE_zero_extendqihi2 1
#define HAVE_addsi3 1
#define HAVE_adddi3 1
#define HAVE_addvsi4 1
#define HAVE_addvdi4 1
#define HAVE_uaddvsi4 1
#define HAVE_uaddvdi4 1
#define HAVE_addti3 1
#define HAVE_addvti4 1
#define HAVE_uaddvti4 1
#define HAVE_addsi3_carryin 1
#define HAVE_adddi3_carryin 1
#define HAVE_addsi3_carryinC 1
#define HAVE_adddi3_carryinC 1
#define HAVE_addsi3_carryinV 1
#define HAVE_adddi3_carryinV 1
#define HAVE_subvsi4 1
#define HAVE_subvdi4 1
#define HAVE_negvsi3 1
#define HAVE_negvdi3 1
#define HAVE_usubvsi4 1
#define HAVE_usubvdi4 1
#define HAVE_subti3 1
#define HAVE_subvti4 1
#define HAVE_usubvti4 1
#define HAVE_negvti3 1
#define HAVE_subsi3_carryin 1
#define HAVE_subdi3_carryin 1
#define HAVE_usubsi3_carryinC 1
#define HAVE_usubdi3_carryinC 1
#define HAVE_subsi3_carryinV 1
#define HAVE_subdi3_carryinV 1
#define HAVE_abssi2 1
#define HAVE_absdi2 1
#define HAVE_mulditi3 1
#define HAVE_umulditi3 1
#define HAVE_multi3 1
#define HAVE_cstoresi4 1
#define HAVE_cstoredi4 1
#define HAVE_cstorecc4 1
#define HAVE_cstoresf4 1
#define HAVE_cstoredf4 1
#define HAVE_cmovsi6 1
#define HAVE_cmovdi6 1
#define HAVE_cmovsf6 1
#define HAVE_cmovdf6 1
#define HAVE_movqicc 1
#define HAVE_movhicc 1
#define HAVE_movsicc 1
#define HAVE_movdicc 1
#define HAVE_movsfsicc 1
#define HAVE_movdfsicc 1
#define HAVE_movsfdicc 1
#define HAVE_movdfdicc 1
#define HAVE_movsfcc 1
#define HAVE_movdfcc 1
#define HAVE_negsicc 1
#define HAVE_notsicc 1
#define HAVE_negdicc 1
#define HAVE_notdicc 1
#define HAVE_umaxsi3 (TARGET_SVE)
#define HAVE_umaxdi3 (TARGET_SVE)
#define HAVE_ffssi2 1
#define HAVE_ffsdi2 1
#define HAVE_popcountsi2 (TARGET_SIMD)
#define HAVE_popcountdi2 (TARGET_SIMD)
#define HAVE_ashlsi3 1
#define HAVE_ashrsi3 1
#define HAVE_lshrsi3 1
#define HAVE_ashldi3 1
#define HAVE_ashrdi3 1
#define HAVE_lshrdi3 1
#define HAVE_ashlqi3 1
#define HAVE_ashlhi3 1
#define HAVE_rotrsi3 1
#define HAVE_rotrdi3 1
#define HAVE_rotlsi3 1
#define HAVE_rotldi3 1
#define HAVE_extv 1
#define HAVE_extzv 1
#define HAVE_insvsi 1
#define HAVE_insvdi 1
#define HAVE_fmahf4 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_fmasf4 (TARGET_FLOAT)
#define HAVE_fmadf4 (TARGET_FLOAT)
#define HAVE_fnmahf4 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_fnmasf4 (TARGET_FLOAT)
#define HAVE_fnmadf4 (TARGET_FLOAT)
#define HAVE_fmssf4 (TARGET_FLOAT)
#define HAVE_fmsdf4 (TARGET_FLOAT)
#define HAVE_fnmssf4 (TARGET_FLOAT)
#define HAVE_fnmsdf4 (TARGET_FLOAT)
#define HAVE_floatsihf2 (TARGET_FLOAT)
#define HAVE_floatunssihf2 (TARGET_FLOAT)
#define HAVE_floatdihf2 (TARGET_FLOAT && (TARGET_FP_F16INST || TARGET_SIMD))
#define HAVE_floatunsdihf2 (TARGET_FLOAT && (TARGET_FP_F16INST || TARGET_SIMD))
#define HAVE_divhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_divsf3 (TARGET_FLOAT)
#define HAVE_divdf3 (TARGET_FLOAT)
#define HAVE_sqrthf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
#define HAVE_sqrtsf2 (TARGET_FLOAT)
#define HAVE_sqrtdf2 (TARGET_FLOAT)
#define HAVE_lrintsfsi2 (TARGET_FLOAT \
&& ((GET_MODE_BITSIZE (SFmode) <= LONG_TYPE_SIZE) \
|| !flag_trapping_math || flag_fp_int_builtin_inexact))
#define HAVE_lrintdfsi2 (TARGET_FLOAT \
&& ((GET_MODE_BITSIZE (DFmode) <= LONG_TYPE_SIZE) \
|| !flag_trapping_math || flag_fp_int_builtin_inexact))
#define HAVE_lrintsfdi2 (TARGET_FLOAT \
&& ((GET_MODE_BITSIZE (SFmode) <= LONG_TYPE_SIZE) \
|| !flag_trapping_math || flag_fp_int_builtin_inexact))
#define HAVE_lrintdfdi2 (TARGET_FLOAT \
&& ((GET_MODE_BITSIZE (DFmode) <= LONG_TYPE_SIZE) \
|| !flag_trapping_math || flag_fp_int_builtin_inexact))
#define HAVE_copysignsf3 (TARGET_FLOAT && TARGET_SIMD)
#define HAVE_copysigndf3 (TARGET_FLOAT && TARGET_SIMD)
#define HAVE_xorsignsf3 (TARGET_FLOAT && TARGET_SIMD)
#define HAVE_xorsigndf3 (TARGET_FLOAT && TARGET_SIMD)
#define HAVE_aarch64_reload_movcpsfsi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_aarch64_reload_movcpsfdi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_aarch64_reload_movcpdfsi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_aarch64_reload_movcpdfdi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_aarch64_reload_movcptfsi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_aarch64_reload_movcptfdi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_aarch64_reload_movcpv8qisi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_aarch64_reload_movcpv8qidi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_aarch64_reload_movcpv16qisi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_aarch64_reload_movcpv16qidi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_aarch64_reload_movcpv4hisi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_aarch64_reload_movcpv4hidi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_aarch64_reload_movcpv8hisi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_aarch64_reload_movcpv8hidi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_aarch64_reload_movcpv2sisi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_aarch64_reload_movcpv2sidi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_aarch64_reload_movcpv4sisi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_aarch64_reload_movcpv4sidi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_aarch64_reload_movcpv2disi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_aarch64_reload_movcpv2didi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_aarch64_reload_movcpv2sfsi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_aarch64_reload_movcpv2sfdi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_aarch64_reload_movcpv4sfsi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_aarch64_reload_movcpv4sfdi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_aarch64_reload_movcpv2dfsi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
#define HAVE_aarch64_reload_movcpv2dfdi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
#define HAVE_aarch64_reload_movti (TARGET_FLOAT)
#define HAVE_aarch64_reload_movtf (TARGET_FLOAT)
#define HAVE_add_losym 1
#define HAVE_tlsgd_small_si (ptr_mode == SImode)
#define HAVE_tlsgd_small_di (ptr_mode == DImode)
#define HAVE_tlsdesc_small_si ((TARGET_TLS_DESC) && (ptr_mode == SImode))
#define HAVE_tlsdesc_small_di ((TARGET_TLS_DESC) && (ptr_mode == DImode))
#define HAVE_get_thread_pointerdi 1
#define HAVE_stack_protect_set 1
#define HAVE_stack_protect_test 1
#define HAVE_doloop_end (optimize > 0 && flag_modulo_sched)
#define HAVE_despeculate_copyqi 1
#define HAVE_despeculate_copyhi 1
#define HAVE_despeculate_copysi 1
#define HAVE_despeculate_copydi 1
#define HAVE_despeculate_copyti 1
#define HAVE_movv8qi (TARGET_SIMD)
#define HAVE_movv16qi (TARGET_SIMD)
#define HAVE_movv4hi (TARGET_SIMD)
#define HAVE_movv8hi (TARGET_SIMD)
#define HAVE_movv2si (TARGET_SIMD)
#define HAVE_movv4si (TARGET_SIMD)
#define HAVE_movv2di (TARGET_SIMD)
#define HAVE_movv4hf (TARGET_SIMD)
#define HAVE_movv8hf (TARGET_SIMD)
#define HAVE_movv4bf (TARGET_SIMD)
#define HAVE_movv8bf (TARGET_SIMD)
#define HAVE_movv2sf (TARGET_SIMD)
#define HAVE_movv4sf (TARGET_SIMD)
#define HAVE_movv2df (TARGET_SIMD)
#define HAVE_movmisalignv8qi (TARGET_SIMD && !STRICT_ALIGNMENT)
#define HAVE_movmisalignv16qi (TARGET_SIMD && !STRICT_ALIGNMENT)
#define HAVE_movmisalignv4hi (TARGET_SIMD && !STRICT_ALIGNMENT)
#define HAVE_movmisalignv8hi (TARGET_SIMD && !STRICT_ALIGNMENT)
#define HAVE_movmisalignv2si (TARGET_SIMD && !STRICT_ALIGNMENT)
#define HAVE_movmisalignv4si (TARGET_SIMD && !STRICT_ALIGNMENT)
#define HAVE_movmisalignv2di (TARGET_SIMD && !STRICT_ALIGNMENT)
#define HAVE_movmisalignv2sf (TARGET_SIMD && !STRICT_ALIGNMENT)
#define HAVE_movmisalignv4sf (TARGET_SIMD && !STRICT_ALIGNMENT)
#define HAVE_movmisalignv2df (TARGET_SIMD && !STRICT_ALIGNMENT)
#define HAVE_aarch64_split_simd_movv16qi (TARGET_SIMD)
#define HAVE_aarch64_split_simd_movv8hi (TARGET_SIMD)
#define HAVE_aarch64_split_simd_movv4si (TARGET_SIMD)
#define HAVE_aarch64_split_simd_movv2di (TARGET_SIMD)
#define HAVE_aarch64_split_simd_movv8hf (TARGET_SIMD)
#define HAVE_aarch64_split_simd_movv8bf (TARGET_SIMD)
#define HAVE_aarch64_split_simd_movv4sf (TARGET_SIMD)
#define HAVE_aarch64_split_simd_movv2df (TARGET_SIMD)
#define HAVE_aarch64_get_halfv16qi (TARGET_SIMD)
#define HAVE_aarch64_get_halfv8hi (TARGET_SIMD)
#define HAVE_aarch64_get_halfv4si (TARGET_SIMD)
#define HAVE_aarch64_get_halfv2di (TARGET_SIMD)
#define HAVE_aarch64_get_halfv8hf (TARGET_SIMD)
#define HAVE_aarch64_get_halfv8bf (TARGET_SIMD)
#define HAVE_aarch64_get_halfv4sf (TARGET_SIMD)
#define HAVE_aarch64_get_halfv2df (TARGET_SIMD)
#define HAVE_ctzv2si2 (TARGET_SIMD)
#define HAVE_ctzv4si2 (TARGET_SIMD)
#define HAVE_xorsignv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_xorsignv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_xorsignv2sf3 (TARGET_SIMD)
#define HAVE_xorsignv4sf3 (TARGET_SIMD)
#define HAVE_xorsignv2df3 (TARGET_SIMD)
#define HAVE_sdot_prodv8qi (TARGET_DOTPROD)
#define HAVE_udot_prodv8qi (TARGET_DOTPROD)
#define HAVE_sdot_prodv16qi (TARGET_DOTPROD)
#define HAVE_udot_prodv16qi (TARGET_DOTPROD)
#define HAVE_copysignv4hf3 ((TARGET_FLOAT && TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_copysignv8hf3 ((TARGET_FLOAT && TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_copysignv2sf3 (TARGET_FLOAT && TARGET_SIMD)
#define HAVE_copysignv4sf3 (TARGET_FLOAT && TARGET_SIMD)
#define HAVE_copysignv2df3 (TARGET_FLOAT && TARGET_SIMD)
#define HAVE_rsqrtv2sf2 (TARGET_SIMD)
#define HAVE_rsqrtv4sf2 (TARGET_SIMD)
#define HAVE_rsqrtv2df2 (TARGET_SIMD)
#define HAVE_rsqrtsf2 (TARGET_SIMD)
#define HAVE_rsqrtdf2 (TARGET_SIMD)
#define HAVE_ssadv16qi (TARGET_SIMD)
#define HAVE_usadv16qi (TARGET_SIMD)
#define HAVE_signbitv2sf2 (TARGET_SIMD)
#define HAVE_signbitv4sf2 (TARGET_SIMD)
#define HAVE_ashlv8qi3 (TARGET_SIMD)
#define HAVE_ashlv16qi3 (TARGET_SIMD)
#define HAVE_ashlv4hi3 (TARGET_SIMD)
#define HAVE_ashlv8hi3 (TARGET_SIMD)
#define HAVE_ashlv2si3 (TARGET_SIMD)
#define HAVE_ashlv4si3 (TARGET_SIMD)
#define HAVE_ashlv2di3 (TARGET_SIMD)
#define HAVE_lshrv8qi3 (TARGET_SIMD)
#define HAVE_lshrv16qi3 (TARGET_SIMD)
#define HAVE_lshrv4hi3 (TARGET_SIMD)
#define HAVE_lshrv8hi3 (TARGET_SIMD)
#define HAVE_lshrv2si3 (TARGET_SIMD)
#define HAVE_lshrv4si3 (TARGET_SIMD)
#define HAVE_lshrv2di3 (TARGET_SIMD)
#define HAVE_ashrv8qi3 (TARGET_SIMD)
#define HAVE_ashrv16qi3 (TARGET_SIMD)
#define HAVE_ashrv4hi3 (TARGET_SIMD)
#define HAVE_ashrv8hi3 (TARGET_SIMD)
#define HAVE_ashrv2si3 (TARGET_SIMD)
#define HAVE_ashrv4si3 (TARGET_SIMD)
#define HAVE_ashrv2di3 (TARGET_SIMD)
#define HAVE_vashlv8qi3 (TARGET_SIMD)
#define HAVE_vashlv16qi3 (TARGET_SIMD)
#define HAVE_vashlv4hi3 (TARGET_SIMD)
#define HAVE_vashlv8hi3 (TARGET_SIMD)
#define HAVE_vashlv2si3 (TARGET_SIMD)
#define HAVE_vashlv4si3 (TARGET_SIMD)
#define HAVE_vashlv2di3 (TARGET_SIMD)
#define HAVE_vashrv8qi3 (TARGET_SIMD)
#define HAVE_vashrv16qi3 (TARGET_SIMD)
#define HAVE_vashrv4hi3 (TARGET_SIMD)
#define HAVE_vashrv8hi3 (TARGET_SIMD)
#define HAVE_vashrv2si3 (TARGET_SIMD)
#define HAVE_vashrv4si3 (TARGET_SIMD)
#define HAVE_aarch64_ashr_simddi (TARGET_SIMD)
#define HAVE_vlshrv8qi3 (TARGET_SIMD)
#define HAVE_vlshrv16qi3 (TARGET_SIMD)
#define HAVE_vlshrv4hi3 (TARGET_SIMD)
#define HAVE_vlshrv8hi3 (TARGET_SIMD)
#define HAVE_vlshrv2si3 (TARGET_SIMD)
#define HAVE_vlshrv4si3 (TARGET_SIMD)
#define HAVE_aarch64_lshr_simddi (TARGET_SIMD)
#define HAVE_vec_setv8qi (TARGET_SIMD)
#define HAVE_vec_setv16qi (TARGET_SIMD)
#define HAVE_vec_setv4hi (TARGET_SIMD)
#define HAVE_vec_setv8hi (TARGET_SIMD)
#define HAVE_vec_setv2si (TARGET_SIMD)
#define HAVE_vec_setv4si (TARGET_SIMD)
#define HAVE_vec_setv2di (TARGET_SIMD)
#define HAVE_vec_setv4hf (TARGET_SIMD)
#define HAVE_vec_setv8hf (TARGET_SIMD)
#define HAVE_vec_setv4bf (TARGET_SIMD)
#define HAVE_vec_setv8bf (TARGET_SIMD)
#define HAVE_vec_setv2sf (TARGET_SIMD)
#define HAVE_vec_setv4sf (TARGET_SIMD)
#define HAVE_vec_setv2df (TARGET_SIMD)
#define HAVE_smaxv2di3 (TARGET_SIMD)
#define HAVE_sminv2di3 (TARGET_SIMD)
#define HAVE_umaxv2di3 (TARGET_SIMD)
#define HAVE_uminv2di3 (TARGET_SIMD)
#define HAVE_move_lo_quad_v16qi (TARGET_SIMD)
#define HAVE_move_lo_quad_v8hi (TARGET_SIMD)
#define HAVE_move_lo_quad_v4si (TARGET_SIMD)
#define HAVE_move_lo_quad_v2di (TARGET_SIMD)
#define HAVE_move_lo_quad_v8hf (TARGET_SIMD)
#define HAVE_move_lo_quad_v8bf (TARGET_SIMD)
#define HAVE_move_lo_quad_v4sf (TARGET_SIMD)
#define HAVE_move_lo_quad_v2df (TARGET_SIMD)
#define HAVE_move_hi_quad_v16qi (TARGET_SIMD)
#define HAVE_move_hi_quad_v8hi (TARGET_SIMD)
#define HAVE_move_hi_quad_v4si (TARGET_SIMD)
#define HAVE_move_hi_quad_v2di (TARGET_SIMD)
#define HAVE_move_hi_quad_v8hf (TARGET_SIMD)
#define HAVE_move_hi_quad_v8bf (TARGET_SIMD)
#define HAVE_move_hi_quad_v4sf (TARGET_SIMD)
#define HAVE_move_hi_quad_v2df (TARGET_SIMD)
#define HAVE_vec_pack_trunc_v4hi (TARGET_SIMD)
#define HAVE_vec_pack_trunc_v2si (TARGET_SIMD)
#define HAVE_vec_pack_trunc_di (TARGET_SIMD)
#define HAVE_vec_unpacks_hi_v16qi (TARGET_SIMD)
#define HAVE_vec_unpacku_hi_v16qi (TARGET_SIMD)
#define HAVE_vec_unpacks_hi_v8hi (TARGET_SIMD)
#define HAVE_vec_unpacku_hi_v8hi (TARGET_SIMD)
#define HAVE_vec_unpacks_hi_v4si (TARGET_SIMD)
#define HAVE_vec_unpacku_hi_v4si (TARGET_SIMD)
#define HAVE_vec_unpacks_lo_v16qi (TARGET_SIMD)
#define HAVE_vec_unpacku_lo_v16qi (TARGET_SIMD)
#define HAVE_vec_unpacks_lo_v8hi (TARGET_SIMD)
#define HAVE_vec_unpacku_lo_v8hi (TARGET_SIMD)
#define HAVE_vec_unpacks_lo_v4si (TARGET_SIMD)
#define HAVE_vec_unpacku_lo_v4si (TARGET_SIMD)
#define HAVE_vec_widen_smult_lo_v16qi (TARGET_SIMD)
#define HAVE_vec_widen_umult_lo_v16qi (TARGET_SIMD)
#define HAVE_vec_widen_smult_lo_v8hi (TARGET_SIMD)
#define HAVE_vec_widen_umult_lo_v8hi (TARGET_SIMD)
#define HAVE_vec_widen_smult_lo_v4si (TARGET_SIMD)
#define HAVE_vec_widen_umult_lo_v4si (TARGET_SIMD)
#define HAVE_vec_widen_smult_hi_v16qi (TARGET_SIMD)
#define HAVE_vec_widen_umult_hi_v16qi (TARGET_SIMD)
#define HAVE_vec_widen_smult_hi_v8hi (TARGET_SIMD)
#define HAVE_vec_widen_umult_hi_v8hi (TARGET_SIMD)
#define HAVE_vec_widen_smult_hi_v4si (TARGET_SIMD)
#define HAVE_vec_widen_umult_hi_v4si (TARGET_SIMD)
#define HAVE_divv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_divv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_divv2sf3 (TARGET_SIMD)
#define HAVE_divv4sf3 (TARGET_SIMD)
#define HAVE_divv2df3 (TARGET_SIMD)
#define HAVE_fixv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fixunsv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fixv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fixunsv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fixv2sfv2si2 (TARGET_SIMD)
#define HAVE_fixunsv2sfv2si2 (TARGET_SIMD)
#define HAVE_fixv4sfv4si2 (TARGET_SIMD)
#define HAVE_fixunsv4sfv4si2 (TARGET_SIMD)
#define HAVE_fixv2dfv2di2 (TARGET_SIMD)
#define HAVE_fixunsv2dfv2di2 (TARGET_SIMD)
#define HAVE_fix_truncv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fixuns_truncv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fix_truncv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fixuns_truncv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_fix_truncv2sfv2si2 (TARGET_SIMD)
#define HAVE_fixuns_truncv2sfv2si2 (TARGET_SIMD)
#define HAVE_fix_truncv4sfv4si2 (TARGET_SIMD)
#define HAVE_fixuns_truncv4sfv4si2 (TARGET_SIMD)
#define HAVE_fix_truncv2dfv2di2 (TARGET_SIMD)
#define HAVE_fixuns_truncv2dfv2di2 (TARGET_SIMD)
#define HAVE_ftruncv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_ftruncv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_ftruncv2sf2 (TARGET_SIMD)
#define HAVE_ftruncv4sf2 (TARGET_SIMD)
#define HAVE_ftruncv2df2 (TARGET_SIMD)
#define HAVE_vec_unpacks_lo_v8hf (TARGET_SIMD)
#define HAVE_vec_unpacks_lo_v4sf (TARGET_SIMD)
#define HAVE_vec_unpacks_hi_v8hf (TARGET_SIMD)
#define HAVE_vec_unpacks_hi_v4sf (TARGET_SIMD)
#define HAVE_aarch64_float_truncate_hi_v4sf (TARGET_SIMD)
#define HAVE_aarch64_float_truncate_hi_v8hf (TARGET_SIMD)
#define HAVE_vec_pack_trunc_v2df (TARGET_SIMD)
#define HAVE_vec_pack_trunc_df (TARGET_SIMD)
#define HAVE_reduc_plus_scal_v8qi (TARGET_SIMD)
#define HAVE_reduc_plus_scal_v16qi (TARGET_SIMD)
#define HAVE_reduc_plus_scal_v4hi (TARGET_SIMD)
#define HAVE_reduc_plus_scal_v8hi (TARGET_SIMD)
#define HAVE_reduc_plus_scal_v2si (TARGET_SIMD)
#define HAVE_reduc_plus_scal_v4si (TARGET_SIMD)
#define HAVE_reduc_plus_scal_v2di (TARGET_SIMD)
#define HAVE_reduc_plus_scal_v4sf (TARGET_SIMD)
#define HAVE_reduc_smax_nan_scal_v4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_reduc_smin_nan_scal_v4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_reduc_smax_scal_v4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_reduc_smin_scal_v4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_reduc_smax_nan_scal_v8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_reduc_smin_nan_scal_v8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_reduc_smax_scal_v8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_reduc_smin_scal_v8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_reduc_smax_nan_scal_v2sf (TARGET_SIMD)
#define HAVE_reduc_smin_nan_scal_v2sf (TARGET_SIMD)
#define HAVE_reduc_smax_scal_v2sf (TARGET_SIMD)
#define HAVE_reduc_smin_scal_v2sf (TARGET_SIMD)
#define HAVE_reduc_smax_nan_scal_v4sf (TARGET_SIMD)
#define HAVE_reduc_smin_nan_scal_v4sf (TARGET_SIMD)
#define HAVE_reduc_smax_scal_v4sf (TARGET_SIMD)
#define HAVE_reduc_smin_scal_v4sf (TARGET_SIMD)
#define HAVE_reduc_smax_nan_scal_v2df (TARGET_SIMD)
#define HAVE_reduc_smin_nan_scal_v2df (TARGET_SIMD)
#define HAVE_reduc_smax_scal_v2df (TARGET_SIMD)
#define HAVE_reduc_smin_scal_v2df (TARGET_SIMD)
#define HAVE_reduc_umax_scal_v8qi (TARGET_SIMD)
#define HAVE_reduc_umin_scal_v8qi (TARGET_SIMD)
#define HAVE_reduc_smax_scal_v8qi (TARGET_SIMD)
#define HAVE_reduc_smin_scal_v8qi (TARGET_SIMD)
#define HAVE_reduc_umax_scal_v16qi (TARGET_SIMD)
#define HAVE_reduc_umin_scal_v16qi (TARGET_SIMD)
#define HAVE_reduc_smax_scal_v16qi (TARGET_SIMD)
#define HAVE_reduc_smin_scal_v16qi (TARGET_SIMD)
#define HAVE_reduc_umax_scal_v4hi (TARGET_SIMD)
#define HAVE_reduc_umin_scal_v4hi (TARGET_SIMD)
#define HAVE_reduc_smax_scal_v4hi (TARGET_SIMD)
#define HAVE_reduc_smin_scal_v4hi (TARGET_SIMD)
#define HAVE_reduc_umax_scal_v8hi (TARGET_SIMD)
#define HAVE_reduc_umin_scal_v8hi (TARGET_SIMD)
#define HAVE_reduc_smax_scal_v8hi (TARGET_SIMD)
#define HAVE_reduc_smin_scal_v8hi (TARGET_SIMD)
#define HAVE_reduc_umax_scal_v2si (TARGET_SIMD)
#define HAVE_reduc_umin_scal_v2si (TARGET_SIMD)
#define HAVE_reduc_smax_scal_v2si (TARGET_SIMD)
#define HAVE_reduc_smin_scal_v2si (TARGET_SIMD)
#define HAVE_reduc_umax_scal_v4si (TARGET_SIMD)
#define HAVE_reduc_umin_scal_v4si (TARGET_SIMD)
#define HAVE_reduc_smax_scal_v4si (TARGET_SIMD)
#define HAVE_reduc_smin_scal_v4si (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv8qi (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv16qi (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv4hi (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv8hi (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv2si (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv4si (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv4bf (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv8bf (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv2di (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv4hf (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv8hf (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv2sf (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv4sf (TARGET_SIMD)
#define HAVE_aarch64_simd_bslv2df (TARGET_SIMD)
#define HAVE_aarch64_simd_bsldi (TARGET_SIMD)
#define HAVE_aarch64_simd_bsldf (TARGET_SIMD)
#define HAVE_vcond_mask_v8qiv8qi (TARGET_SIMD)
#define HAVE_vcond_mask_v16qiv16qi (TARGET_SIMD)
#define HAVE_vcond_mask_v4hiv4hi (TARGET_SIMD)
#define HAVE_vcond_mask_v8hiv8hi (TARGET_SIMD)
#define HAVE_vcond_mask_v2siv2si (TARGET_SIMD)
#define HAVE_vcond_mask_v4siv4si (TARGET_SIMD)
#define HAVE_vcond_mask_v2div2di (TARGET_SIMD)
#define HAVE_vcond_mask_v2sfv2si (TARGET_SIMD)
#define HAVE_vcond_mask_v4sfv4si (TARGET_SIMD)
#define HAVE_vcond_mask_v2dfv2di (TARGET_SIMD)
#define HAVE_vcond_mask_didi (TARGET_SIMD)
#define HAVE_vec_cmpv8qiv8qi (TARGET_SIMD)
#define HAVE_vec_cmpv16qiv16qi (TARGET_SIMD)
#define HAVE_vec_cmpv4hiv4hi (TARGET_SIMD)
#define HAVE_vec_cmpv8hiv8hi (TARGET_SIMD)
#define HAVE_vec_cmpv2siv2si (TARGET_SIMD)
#define HAVE_vec_cmpv4siv4si (TARGET_SIMD)
#define HAVE_vec_cmpv2div2di (TARGET_SIMD)
#define HAVE_vec_cmpdidi (TARGET_SIMD)
#define HAVE_vec_cmpv2sfv2si (TARGET_SIMD)
#define HAVE_vec_cmpv4sfv4si (TARGET_SIMD)
#define HAVE_vec_cmpv2dfv2di (TARGET_SIMD)
#define HAVE_vec_cmpuv8qiv8qi (TARGET_SIMD)
#define HAVE_vec_cmpuv16qiv16qi (TARGET_SIMD)
#define HAVE_vec_cmpuv4hiv4hi (TARGET_SIMD)
#define HAVE_vec_cmpuv8hiv8hi (TARGET_SIMD)
#define HAVE_vec_cmpuv2siv2si (TARGET_SIMD)
#define HAVE_vec_cmpuv4siv4si (TARGET_SIMD)
#define HAVE_vec_cmpuv2div2di (TARGET_SIMD)
#define HAVE_vec_cmpudidi (TARGET_SIMD)
#define HAVE_vcondv8qiv8qi (TARGET_SIMD)
#define HAVE_vcondv16qiv16qi (TARGET_SIMD)
#define HAVE_vcondv4hiv4hi (TARGET_SIMD)
#define HAVE_vcondv8hiv8hi (TARGET_SIMD)
#define HAVE_vcondv2siv2si (TARGET_SIMD)
#define HAVE_vcondv4siv4si (TARGET_SIMD)
#define HAVE_vcondv2div2di (TARGET_SIMD)
#define HAVE_vcondv2sfv2sf (TARGET_SIMD)
#define HAVE_vcondv4sfv4sf (TARGET_SIMD)
#define HAVE_vcondv2dfv2df (TARGET_SIMD)
#define HAVE_vconddidi (TARGET_SIMD)
#define HAVE_vcondv2siv2sf (TARGET_SIMD)
#define HAVE_vcondv2sfv2si (TARGET_SIMD)
#define HAVE_vcondv4siv4sf (TARGET_SIMD)
#define HAVE_vcondv4sfv4si (TARGET_SIMD)
#define HAVE_vcondv2div2df (TARGET_SIMD)
#define HAVE_vcondv2dfv2di (TARGET_SIMD)
#define HAVE_vconduv8qiv8qi (TARGET_SIMD)
#define HAVE_vconduv16qiv16qi (TARGET_SIMD)
#define HAVE_vconduv4hiv4hi (TARGET_SIMD)
#define HAVE_vconduv8hiv8hi (TARGET_SIMD)
#define HAVE_vconduv2siv2si (TARGET_SIMD)
#define HAVE_vconduv4siv4si (TARGET_SIMD)
#define HAVE_vconduv2div2di (TARGET_SIMD)
#define HAVE_vcondudidi (TARGET_SIMD)
#define HAVE_vconduv2sfv2si (TARGET_SIMD)
#define HAVE_vconduv4sfv4si (TARGET_SIMD)
#define HAVE_vconduv2dfv2di (TARGET_SIMD)
#define HAVE_aarch64_combinev8qi (TARGET_SIMD)
#define HAVE_aarch64_combinev4hi (TARGET_SIMD)
#define HAVE_aarch64_combinev4bf (TARGET_SIMD)
#define HAVE_aarch64_combinev4hf (TARGET_SIMD)
#define HAVE_aarch64_combinev2si (TARGET_SIMD)
#define HAVE_aarch64_combinev2sf (TARGET_SIMD)
#define HAVE_aarch64_combinedi (TARGET_SIMD)
#define HAVE_aarch64_combinedf (TARGET_SIMD)
#define HAVE_aarch64_simd_combinev8qi (TARGET_SIMD)
#define HAVE_aarch64_simd_combinev4hi (TARGET_SIMD)
#define HAVE_aarch64_simd_combinev4bf (TARGET_SIMD)
#define HAVE_aarch64_simd_combinev4hf (TARGET_SIMD)
#define HAVE_aarch64_simd_combinev2si (TARGET_SIMD)
#define HAVE_aarch64_simd_combinev2sf (TARGET_SIMD)
#define HAVE_aarch64_simd_combinedi (TARGET_SIMD)
#define HAVE_aarch64_simd_combinedf (TARGET_SIMD)
#define HAVE_aarch64_saddl2v16qi (TARGET_SIMD)
#define HAVE_aarch64_saddl2v8hi (TARGET_SIMD)
#define HAVE_aarch64_saddl2v4si (TARGET_SIMD)
#define HAVE_aarch64_uaddl2v16qi (TARGET_SIMD)
#define HAVE_aarch64_uaddl2v8hi (TARGET_SIMD)
#define HAVE_aarch64_uaddl2v4si (TARGET_SIMD)
#define HAVE_aarch64_ssubl2v16qi (TARGET_SIMD)
#define HAVE_aarch64_ssubl2v8hi (TARGET_SIMD)
#define HAVE_aarch64_ssubl2v4si (TARGET_SIMD)
#define HAVE_aarch64_usubl2v16qi (TARGET_SIMD)
#define HAVE_aarch64_usubl2v8hi (TARGET_SIMD)
#define HAVE_aarch64_usubl2v4si (TARGET_SIMD)
#define HAVE_widen_ssumv16qi3 (TARGET_SIMD)
#define HAVE_widen_ssumv8hi3 (TARGET_SIMD)
#define HAVE_widen_ssumv4si3 (TARGET_SIMD)
#define HAVE_widen_ssumv8qi3 (TARGET_SIMD)
#define HAVE_widen_ssumv4hi3 (TARGET_SIMD)
#define HAVE_widen_ssumv2si3 (TARGET_SIMD)
#define HAVE_widen_usumv16qi3 (TARGET_SIMD)
#define HAVE_widen_usumv8hi3 (TARGET_SIMD)
#define HAVE_widen_usumv4si3 (TARGET_SIMD)
#define HAVE_widen_usumv8qi3 (TARGET_SIMD)
#define HAVE_widen_usumv4hi3 (TARGET_SIMD)
#define HAVE_widen_usumv2si3 (TARGET_SIMD)
#define HAVE_aarch64_saddw2v16qi (TARGET_SIMD)
#define HAVE_aarch64_saddw2v8hi (TARGET_SIMD)
#define HAVE_aarch64_saddw2v4si (TARGET_SIMD)
#define HAVE_aarch64_uaddw2v16qi (TARGET_SIMD)
#define HAVE_aarch64_uaddw2v8hi (TARGET_SIMD)
#define HAVE_aarch64_uaddw2v4si (TARGET_SIMD)
#define HAVE_aarch64_ssubw2v16qi (TARGET_SIMD)
#define HAVE_aarch64_ssubw2v8hi (TARGET_SIMD)
#define HAVE_aarch64_ssubw2v4si (TARGET_SIMD)
#define HAVE_aarch64_usubw2v16qi (TARGET_SIMD)
#define HAVE_aarch64_usubw2v8hi (TARGET_SIMD)
#define HAVE_aarch64_usubw2v4si (TARGET_SIMD)
#define HAVE_avgv8qi3_floor (TARGET_SIMD)
#define HAVE_uavgv8qi3_floor (TARGET_SIMD)
#define HAVE_avgv16qi3_floor (TARGET_SIMD)
#define HAVE_uavgv16qi3_floor (TARGET_SIMD)
#define HAVE_avgv4hi3_floor (TARGET_SIMD)
#define HAVE_uavgv4hi3_floor (TARGET_SIMD)
#define HAVE_avgv8hi3_floor (TARGET_SIMD)
#define HAVE_uavgv8hi3_floor (TARGET_SIMD)
#define HAVE_avgv2si3_floor (TARGET_SIMD)
#define HAVE_uavgv2si3_floor (TARGET_SIMD)
#define HAVE_avgv4si3_floor (TARGET_SIMD)
#define HAVE_uavgv4si3_floor (TARGET_SIMD)
#define HAVE_avgv8qi3_ceil (TARGET_SIMD)
#define HAVE_uavgv8qi3_ceil (TARGET_SIMD)
#define HAVE_avgv16qi3_ceil (TARGET_SIMD)
#define HAVE_uavgv16qi3_ceil (TARGET_SIMD)
#define HAVE_avgv4hi3_ceil (TARGET_SIMD)
#define HAVE_uavgv4hi3_ceil (TARGET_SIMD)
#define HAVE_avgv8hi3_ceil (TARGET_SIMD)
#define HAVE_uavgv8hi3_ceil (TARGET_SIMD)
#define HAVE_avgv2si3_ceil (TARGET_SIMD)
#define HAVE_uavgv2si3_ceil (TARGET_SIMD)
#define HAVE_avgv4si3_ceil (TARGET_SIMD)
#define HAVE_uavgv4si3_ceil (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2v8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2v4si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2v8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2v4si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2_laneqv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2_laneqv4si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2_laneqv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2_laneqv4si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlal2_nv4si (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmlsl2_nv4si (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2v8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2v4si (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2_laneqv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2_laneqv4si (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2_nv8hi (TARGET_SIMD)
#define HAVE_aarch64_sqdmull2_nv4si (TARGET_SIMD)
#define HAVE_sqrtv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_sqrtv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
#define HAVE_sqrtv2sf2 (TARGET_SIMD)
#define HAVE_sqrtv4sf2 (TARGET_SIMD)
#define HAVE_sqrtv2df2 (TARGET_SIMD)
#define HAVE_vec_load_lanesoiv16qi (TARGET_SIMD)
#define HAVE_vec_load_lanesoiv8hi (TARGET_SIMD)
#define HAVE_vec_load_lanesoiv4si (TARGET_SIMD)
#define HAVE_vec_load_lanesoiv2di (TARGET_SIMD)
#define HAVE_vec_load_lanesoiv8hf (TARGET_SIMD)
#define HAVE_vec_load_lanesoiv4sf (TARGET_SIMD)
#define HAVE_vec_load_lanesoiv2df (TARGET_SIMD)
#define HAVE_vec_load_lanesoiv8bf (TARGET_SIMD)
#define HAVE_vec_store_lanesoiv16qi (TARGET_SIMD)
#define HAVE_vec_store_lanesoiv8hi (TARGET_SIMD)
#define HAVE_vec_store_lanesoiv4si (TARGET_SIMD)
#define HAVE_vec_store_lanesoiv2di (TARGET_SIMD)
#define HAVE_vec_store_lanesoiv8hf (TARGET_SIMD)
#define HAVE_vec_store_lanesoiv4sf (TARGET_SIMD)
#define HAVE_vec_store_lanesoiv2df (TARGET_SIMD)
#define HAVE_vec_store_lanesoiv8bf (TARGET_SIMD)
#define HAVE_vec_load_lanesciv16qi (TARGET_SIMD)
#define HAVE_vec_load_lanesciv8hi (TARGET_SIMD)
#define HAVE_vec_load_lanesciv4si (TARGET_SIMD)
#define HAVE_vec_load_lanesciv2di (TARGET_SIMD)
#define HAVE_vec_load_lanesciv8hf (TARGET_SIMD)
#define HAVE_vec_load_lanesciv4sf (TARGET_SIMD)
#define HAVE_vec_load_lanesciv2df (TARGET_SIMD)
#define HAVE_vec_load_lanesciv8bf (TARGET_SIMD)
#define HAVE_vec_store_lanesciv16qi (TARGET_SIMD)
#define HAVE_vec_store_lanesciv8hi (TARGET_SIMD)
#define HAVE_vec_store_lanesciv4si (TARGET_SIMD)
#define HAVE_vec_store_lanesciv2di (TARGET_SIMD)
#define HAVE_vec_store_lanesciv8hf (TARGET_SIMD)
#define HAVE_vec_store_lanesciv4sf (TARGET_SIMD)
#define HAVE_vec_store_lanesciv2df (TARGET_SIMD)
#define HAVE_vec_store_lanesciv8bf (TARGET_SIMD)
#define HAVE_vec_load_lanesxiv16qi (TARGET_SIMD)
#define HAVE_vec_load_lanesxiv8hi (TARGET_SIMD)
#define HAVE_vec_load_lanesxiv4si (TARGET_SIMD)
#define HAVE_vec_load_lanesxiv2di (TARGET_SIMD)
#define HAVE_vec_load_lanesxiv8hf (TARGET_SIMD)
#define HAVE_vec_load_lanesxiv4sf (TARGET_SIMD)
#define HAVE_vec_load_lanesxiv2df (TARGET_SIMD)
#define HAVE_vec_load_lanesxiv8bf (TARGET_SIMD)
#define HAVE_vec_store_lanesxiv16qi (TARGET_SIMD)
#define HAVE_vec_store_lanesxiv8hi (TARGET_SIMD)
#define HAVE_vec_store_lanesxiv4si (TARGET_SIMD)
#define HAVE_vec_store_lanesxiv2di (TARGET_SIMD)
#define HAVE_vec_store_lanesxiv8hf (TARGET_SIMD)
#define HAVE_vec_store_lanesxiv4sf (TARGET_SIMD)
#define HAVE_vec_store_lanesxiv2df (TARGET_SIMD)
#define HAVE_vec_store_lanesxiv8bf (TARGET_SIMD)
#define HAVE_movoi (TARGET_SIMD)
#define HAVE_movci (TARGET_SIMD)
#define HAVE_movxi (TARGET_SIMD)
#define HAVE_aarch64_ld1x3v8qi (TARGET_SIMD)
#define HAVE_aarch64_ld1x3v16qi (TARGET_SIMD)
#define HAVE_aarch64_ld1x3v4hi (TARGET_SIMD)
#define HAVE_aarch64_ld1x3v8hi (TARGET_SIMD)
#define HAVE_aarch64_ld1x3v2si (TARGET_SIMD)
#define HAVE_aarch64_ld1x3v4si (TARGET_SIMD)
#define HAVE_aarch64_ld1x3v4bf (TARGET_SIMD)
#define HAVE_aarch64_ld1x3v8bf (TARGET_SIMD)
#define HAVE_aarch64_ld1x3v2di (TARGET_SIMD)
#define HAVE_aarch64_ld1x3v4hf (TARGET_SIMD)
#define HAVE_aarch64_ld1x3v8hf (TARGET_SIMD)
#define HAVE_aarch64_ld1x3v2sf (TARGET_SIMD)
#define HAVE_aarch64_ld1x3v4sf (TARGET_SIMD)
#define HAVE_aarch64_ld1x3v2df (TARGET_SIMD)
#define HAVE_aarch64_ld1x3di (TARGET_SIMD)
#define HAVE_aarch64_ld1x3df (TARGET_SIMD)
#define HAVE_aarch64_ld1x4v8qi (TARGET_SIMD)
#define HAVE_aarch64_ld1x4v16qi (TARGET_SIMD)
#define HAVE_aarch64_ld1x4v4hi (TARGET_SIMD)
#define HAVE_aarch64_ld1x4v8hi (TARGET_SIMD)
#define HAVE_aarch64_ld1x4v2si (TARGET_SIMD)
#define HAVE_aarch64_ld1x4v4si (TARGET_SIMD)
#define HAVE_aarch64_ld1x4v4bf (TARGET_SIMD)
#define HAVE_aarch64_ld1x4v8bf (TARGET_SIMD)
#define HAVE_aarch64_ld1x4v2di (TARGET_SIMD)
#define HAVE_aarch64_ld1x4v4hf (TARGET_SIMD)
#define HAVE_aarch64_ld1x4v8hf (TARGET_SIMD)
#define HAVE_aarch64_ld1x4v2sf (TARGET_SIMD)
#define HAVE_aarch64_ld1x4v4sf (TARGET_SIMD)
#define HAVE_aarch64_ld1x4v2df (TARGET_SIMD)
#define HAVE_aarch64_ld1x4di (TARGET_SIMD)
#define HAVE_aarch64_ld1x4df (TARGET_SIMD)
#define HAVE_aarch64_st1x2v8qi (TARGET_SIMD)
#define HAVE_aarch64_st1x2v16qi (TARGET_SIMD)
#define HAVE_aarch64_st1x2v4hi (TARGET_SIMD)
#define HAVE_aarch64_st1x2v8hi (TARGET_SIMD)
#define HAVE_aarch64_st1x2v2si (TARGET_SIMD)
#define HAVE_aarch64_st1x2v4si (TARGET_SIMD)
#define HAVE_aarch64_st1x2v4bf (TARGET_SIMD)
#define HAVE_aarch64_st1x2v8bf (TARGET_SIMD)
#define HAVE_aarch64_st1x2v2di (TARGET_SIMD)
#define HAVE_aarch64_st1x2v4hf (TARGET_SIMD)
#define HAVE_aarch64_st1x2v8hf (TARGET_SIMD)
#define HAVE_aarch64_st1x2v2sf (TARGET_SIMD)
#define HAVE_aarch64_st1x2v4sf (TARGET_SIMD)
#define HAVE_aarch64_st1x2v2df (TARGET_SIMD)
#define HAVE_aarch64_st1x2di (TARGET_SIMD)
#define HAVE_aarch64_st1x2df (TARGET_SIMD)
#define HAVE_aarch64_st1x3v8qi (TARGET_SIMD)
#define HAVE_aarch64_st1x3v16qi (TARGET_SIMD)
#define HAVE_aarch64_st1x3v4hi (TARGET_SIMD)
#define HAVE_aarch64_st1x3v8hi (TARGET_SIMD)
#define HAVE_aarch64_st1x3v2si (TARGET_SIMD)
#define HAVE_aarch64_st1x3v4si (TARGET_SIMD)
#define HAVE_aarch64_st1x3v4bf (TARGET_SIMD)
#define HAVE_aarch64_st1x3v8bf (TARGET_SIMD)
#define HAVE_aarch64_st1x3v2di (TARGET_SIMD)
#define HAVE_aarch64_st1x3v4hf (TARGET_SIMD)
#define HAVE_aarch64_st1x3v8hf (TARGET_SIMD)
#define HAVE_aarch64_st1x3v2sf (TARGET_SIMD)
#define HAVE_aarch64_st1x3v4sf (TARGET_SIMD)
#define HAVE_aarch64_st1x3v2df (TARGET_SIMD)
#define HAVE_aarch64_st1x3di (TARGET_SIMD)
#define HAVE_aarch64_st1x3df (TARGET_SIMD)
#define HAVE_aarch64_st1x4v8qi (TARGET_SIMD)
#define HAVE_aarch64_st1x4v16qi (TARGET_SIMD)
#define HAVE_aarch64_st1x4v4hi (TARGET_SIMD)
#define HAVE_aarch64_st1x4v8hi (TARGET_SIMD)
#define HAVE_aarch64_st1x4v2si (TARGET_SIMD)
#define HAVE_aarch64_st1x4v4si (TARGET_SIMD)
#define HAVE_aarch64_st1x4v4bf (TARGET_SIMD)
#define HAVE_aarch64_st1x4v8bf (TARGET_SIMD)
#define HAVE_aarch64_st1x4v2di (TARGET_SIMD)
#define HAVE_aarch64_st1x4v4hf (TARGET_SIMD)
#define HAVE_aarch64_st1x4v8hf (TARGET_SIMD)
#define HAVE_aarch64_st1x4v2sf (TARGET_SIMD)
#define HAVE_aarch64_st1x4v4sf (TARGET_SIMD)
#define HAVE_aarch64_st1x4v2df (TARGET_SIMD)
#define HAVE_aarch64_st1x4di (TARGET_SIMD)
#define HAVE_aarch64_st1x4df (TARGET_SIMD)
#define HAVE_aarch64_ld2rv8qi (TARGET_SIMD)
#define HAVE_aarch64_ld2rv16qi (TARGET_SIMD)
#define HAVE_aarch64_ld2rv4hi (TARGET_SIMD)
#define HAVE_aarch64_ld2rv8hi (TARGET_SIMD)
#define HAVE_aarch64_ld2rv2si (TARGET_SIMD)
#define HAVE_aarch64_ld2rv4si (TARGET_SIMD)
#define HAVE_aarch64_ld2rv4bf (TARGET_SIMD)
#define HAVE_aarch64_ld2rv8bf (TARGET_SIMD)
#define HAVE_aarch64_ld2rv2di (TARGET_SIMD)
#define HAVE_aarch64_ld2rv4hf (TARGET_SIMD)
#define HAVE_aarch64_ld2rv8hf (TARGET_SIMD)
#define HAVE_aarch64_ld2rv2sf (TARGET_SIMD)
#define HAVE_aarch64_ld2rv4sf (TARGET_SIMD)
#define HAVE_aarch64_ld2rv2df (TARGET_SIMD)
#define HAVE_aarch64_ld2rdi (TARGET_SIMD)
#define HAVE_aarch64_ld2rdf (TARGET_SIMD)
#define HAVE_aarch64_ld3rv8qi (TARGET_SIMD)
#define HAVE_aarch64_ld3rv16qi (TARGET_SIMD)
#define HAVE_aarch64_ld3rv4hi (TARGET_SIMD)
#define HAVE_aarch64_ld3rv8hi (TARGET_SIMD)
#define HAVE_aarch64_ld3rv2si (TARGET_SIMD)
#define HAVE_aarch64_ld3rv4si (TARGET_SIMD)
#define HAVE_aarch64_ld3rv4bf (TARGET_SIMD)
#define HAVE_aarch64_ld3rv8bf (TARGET_SIMD)
#define HAVE_aarch64_ld3rv2di (TARGET_SIMD)
#define HAVE_aarch64_ld3rv4hf (TARGET_SIMD)
#define HAVE_aarch64_ld3rv8hf (TARGET_SIMD)
#define HAVE_aarch64_ld3rv2sf (TARGET_SIMD)
#define HAVE_aarch64_ld3rv4sf (TARGET_SIMD)
#define HAVE_aarch64_ld3rv2df (TARGET_SIMD)
#define HAVE_aarch64_ld3rdi (TARGET_SIMD)
#define HAVE_aarch64_ld3rdf (TARGET_SIMD)
#define HAVE_aarch64_ld4rv8qi (TARGET_SIMD)
#define HAVE_aarch64_ld4rv16qi (TARGET_SIMD)
#define HAVE_aarch64_ld4rv4hi (TARGET_SIMD)
#define HAVE_aarch64_ld4rv8hi (TARGET_SIMD)
#define HAVE_aarch64_ld4rv2si (TARGET_SIMD)
#define HAVE_aarch64_ld4rv4si (TARGET_SIMD)
#define HAVE_aarch64_ld4rv4bf (TARGET_SIMD)
#define HAVE_aarch64_ld4rv8bf (TARGET_SIMD)
#define HAVE_aarch64_ld4rv2di (TARGET_SIMD)
#define HAVE_aarch64_ld4rv4hf (TARGET_SIMD)
#define HAVE_aarch64_ld4rv8hf (TARGET_SIMD)
#define HAVE_aarch64_ld4rv2sf (TARGET_SIMD)
#define HAVE_aarch64_ld4rv4sf (TARGET_SIMD)
#define HAVE_aarch64_ld4rv2df (TARGET_SIMD)
#define HAVE_aarch64_ld4rdi (TARGET_SIMD)
#define HAVE_aarch64_ld4rdf (TARGET_SIMD)
#define HAVE_aarch64_ld2v8qi (TARGET_SIMD)
#define HAVE_aarch64_ld2v4hi (TARGET_SIMD)
#define HAVE_aarch64_ld2v4bf (TARGET_SIMD)
#define HAVE_aarch64_ld2v4hf (TARGET_SIMD)
#define HAVE_aarch64_ld2v2si (TARGET_SIMD)
#define HAVE_aarch64_ld2v2sf (TARGET_SIMD)
#define HAVE_aarch64_ld2di (TARGET_SIMD)
#define HAVE_aarch64_ld2df (TARGET_SIMD)
#define HAVE_aarch64_ld3v8qi (TARGET_SIMD)
#define HAVE_aarch64_ld3v4hi (TARGET_SIMD)
#define HAVE_aarch64_ld3v4bf (TARGET_SIMD)
#define HAVE_aarch64_ld3v4hf (TARGET_SIMD)
#define HAVE_aarch64_ld3v2si (TARGET_SIMD)
#define HAVE_aarch64_ld3v2sf (TARGET_SIMD)
#define HAVE_aarch64_ld3di (TARGET_SIMD)
#define HAVE_aarch64_ld3df (TARGET_SIMD)
#define HAVE_aarch64_ld4v8qi (TARGET_SIMD)
#define HAVE_aarch64_ld4v4hi (TARGET_SIMD)
#define HAVE_aarch64_ld4v4bf (TARGET_SIMD)
#define HAVE_aarch64_ld4v4hf (TARGET_SIMD)
#define HAVE_aarch64_ld4v2si (TARGET_SIMD)
#define HAVE_aarch64_ld4v2sf (TARGET_SIMD)
#define HAVE_aarch64_ld4di (TARGET_SIMD)
#define HAVE_aarch64_ld4df (TARGET_SIMD)
#define HAVE_aarch64_ld1v8qi (TARGET_SIMD)
#define HAVE_aarch64_ld1v16qi (TARGET_SIMD)
#define HAVE_aarch64_ld1v4hi (TARGET_SIMD)
#define HAVE_aarch64_ld1v8hi (TARGET_SIMD)
#define HAVE_aarch64_ld1v2si (TARGET_SIMD)
#define HAVE_aarch64_ld1v4si (TARGET_SIMD)
#define HAVE_aarch64_ld1v2di (TARGET_SIMD)
#define HAVE_aarch64_ld1v4hf (TARGET_SIMD)
#define HAVE_aarch64_ld1v8hf (TARGET_SIMD)
#define HAVE_aarch64_ld1v4bf (TARGET_SIMD)
#define HAVE_aarch64_ld1v8bf (TARGET_SIMD)
#define HAVE_aarch64_ld1v2sf (TARGET_SIMD)
#define HAVE_aarch64_ld1v4sf (TARGET_SIMD)
#define HAVE_aarch64_ld1v2df (TARGET_SIMD)
#define HAVE_aarch64_ld2v16qi (TARGET_SIMD)
#define HAVE_aarch64_ld3v16qi (TARGET_SIMD)
#define HAVE_aarch64_ld4v16qi (TARGET_SIMD)
#define HAVE_aarch64_ld2v8hi (TARGET_SIMD)
#define HAVE_aarch64_ld3v8hi (TARGET_SIMD)
#define HAVE_aarch64_ld4v8hi (TARGET_SIMD)
#define HAVE_aarch64_ld2v4si (TARGET_SIMD)
#define HAVE_aarch64_ld3v4si (TARGET_SIMD)
#define HAVE_aarch64_ld4v4si (TARGET_SIMD)
#define HAVE_aarch64_ld2v2di (TARGET_SIMD)
#define HAVE_aarch64_ld3v2di (TARGET_SIMD)
#define HAVE_aarch64_ld4v2di (TARGET_SIMD)
#define HAVE_aarch64_ld2v8hf (TARGET_SIMD)
#define HAVE_aarch64_ld3v8hf (TARGET_SIMD)
#define HAVE_aarch64_ld4v8hf (TARGET_SIMD)
#define HAVE_aarch64_ld2v4sf (TARGET_SIMD)
#define HAVE_aarch64_ld3v4sf (TARGET_SIMD)
#define HAVE_aarch64_ld4v4sf (TARGET_SIMD)
#define HAVE_aarch64_ld2v2df (TARGET_SIMD)
#define HAVE_aarch64_ld3v2df (TARGET_SIMD)
#define HAVE_aarch64_ld4v2df (TARGET_SIMD)
#define HAVE_aarch64_ld2v8bf (TARGET_SIMD)
#define HAVE_aarch64_ld3v8bf (TARGET_SIMD)
#define HAVE_aarch64_ld4v8bf (TARGET_SIMD)
#define HAVE_aarch64_ld1x2v16qi (TARGET_SIMD)
#define HAVE_aarch64_ld1x2v8hi (TARGET_SIMD)
#define HAVE_aarch64_ld1x2v4si (TARGET_SIMD)
#define HAVE_aarch64_ld1x2v2di (TARGET_SIMD)
#define HAVE_aarch64_ld1x2v8hf (TARGET_SIMD)
#define HAVE_aarch64_ld1x2v4sf (TARGET_SIMD)
#define HAVE_aarch64_ld1x2v2df (TARGET_SIMD)
#define HAVE_aarch64_ld1x2v8bf (TARGET_SIMD)
#define HAVE_aarch64_ld1x2v8qi (TARGET_SIMD)
#define HAVE_aarch64_ld1x2v4hi (TARGET_SIMD)
#define HAVE_aarch64_ld1x2v4bf (TARGET_SIMD)
#define HAVE_aarch64_ld1x2v4hf (TARGET_SIMD)
#define HAVE_aarch64_ld1x2v2si (TARGET_SIMD)
#define HAVE_aarch64_ld1x2v2sf (TARGET_SIMD)
#define HAVE_aarch64_ld1x2di (TARGET_SIMD)
#define HAVE_aarch64_ld1x2df (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanev8qi (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanev16qi (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanev4bf (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanev8bf (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanev2di (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanev4hf (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanev8hf (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanev2sf (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanev4sf (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanev2df (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanedi (TARGET_SIMD)
#define HAVE_aarch64_ld2_lanedf (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanev8qi (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanev16qi (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanev4bf (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanev8bf (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanev2di (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanev4hf (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanev8hf (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanev2sf (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanev4sf (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanev2df (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanedi (TARGET_SIMD)
#define HAVE_aarch64_ld3_lanedf (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanev8qi (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanev16qi (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanev4bf (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanev8bf (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanev2di (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanev4hf (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanev8hf (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanev2sf (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanev4sf (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanev2df (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanedi (TARGET_SIMD)
#define HAVE_aarch64_ld4_lanedf (TARGET_SIMD)
#define HAVE_aarch64_get_dregoiv8qi (TARGET_SIMD)
#define HAVE_aarch64_get_dregoiv4hi (TARGET_SIMD)
#define HAVE_aarch64_get_dregoiv4bf (TARGET_SIMD)
#define HAVE_aarch64_get_dregoiv4hf (TARGET_SIMD)
#define HAVE_aarch64_get_dregoiv2si (TARGET_SIMD)
#define HAVE_aarch64_get_dregoiv2sf (TARGET_SIMD)
#define HAVE_aarch64_get_dregoidi (TARGET_SIMD)
#define HAVE_aarch64_get_dregoidf (TARGET_SIMD)
#define HAVE_aarch64_get_dregciv8qi (TARGET_SIMD)
#define HAVE_aarch64_get_dregciv4hi (TARGET_SIMD)
#define HAVE_aarch64_get_dregciv4bf (TARGET_SIMD)
#define HAVE_aarch64_get_dregciv4hf (TARGET_SIMD)
#define HAVE_aarch64_get_dregciv2si (TARGET_SIMD)
#define HAVE_aarch64_get_dregciv2sf (TARGET_SIMD)
#define HAVE_aarch64_get_dregcidi (TARGET_SIMD)
#define HAVE_aarch64_get_dregcidf (TARGET_SIMD)
#define HAVE_aarch64_get_dregxiv8qi (TARGET_SIMD)
#define HAVE_aarch64_get_dregxiv4hi (TARGET_SIMD)
#define HAVE_aarch64_get_dregxiv4bf (TARGET_SIMD)
#define HAVE_aarch64_get_dregxiv4hf (TARGET_SIMD)
#define HAVE_aarch64_get_dregxiv2si (TARGET_SIMD)
#define HAVE_aarch64_get_dregxiv2sf (TARGET_SIMD)
#define HAVE_aarch64_get_dregxidi (TARGET_SIMD)
#define HAVE_aarch64_get_dregxidf (TARGET_SIMD)
#define HAVE_aarch64_get_qregoiv16qi (TARGET_SIMD)
#define HAVE_aarch64_get_qregciv16qi (TARGET_SIMD)
#define HAVE_aarch64_get_qregxiv16qi (TARGET_SIMD)
#define HAVE_aarch64_get_qregoiv8hi (TARGET_SIMD)
#define HAVE_aarch64_get_qregciv8hi (TARGET_SIMD)
#define HAVE_aarch64_get_qregxiv8hi (TARGET_SIMD)
#define HAVE_aarch64_get_qregoiv4si (TARGET_SIMD)
#define HAVE_aarch64_get_qregciv4si (TARGET_SIMD)
#define HAVE_aarch64_get_qregxiv4si (TARGET_SIMD)
#define HAVE_aarch64_get_qregoiv2di (TARGET_SIMD)
#define HAVE_aarch64_get_qregciv2di (TARGET_SIMD)
#define HAVE_aarch64_get_qregxiv2di (TARGET_SIMD)
#define HAVE_aarch64_get_qregoiv8hf (TARGET_SIMD)
#define HAVE_aarch64_get_qregciv8hf (TARGET_SIMD)
#define HAVE_aarch64_get_qregxiv8hf (TARGET_SIMD)
#define HAVE_aarch64_get_qregoiv4sf (TARGET_SIMD)
#define HAVE_aarch64_get_qregciv4sf (TARGET_SIMD)
#define HAVE_aarch64_get_qregxiv4sf (TARGET_SIMD)
#define HAVE_aarch64_get_qregoiv2df (TARGET_SIMD)
#define HAVE_aarch64_get_qregciv2df (TARGET_SIMD)
#define HAVE_aarch64_get_qregxiv2df (TARGET_SIMD)
#define HAVE_aarch64_get_qregoiv8bf (TARGET_SIMD)
#define HAVE_aarch64_get_qregciv8bf (TARGET_SIMD)
#define HAVE_aarch64_get_qregxiv8bf (TARGET_SIMD)
#define HAVE_vec_permv8qi (TARGET_SIMD)
#define HAVE_vec_permv16qi (TARGET_SIMD)
#define HAVE_aarch64_st2v8qi (TARGET_SIMD)
#define HAVE_aarch64_st2v4hi (TARGET_SIMD)
#define HAVE_aarch64_st2v4bf (TARGET_SIMD)
#define HAVE_aarch64_st2v4hf (TARGET_SIMD)
#define HAVE_aarch64_st2v2si (TARGET_SIMD)
#define HAVE_aarch64_st2v2sf (TARGET_SIMD)
#define HAVE_aarch64_st2di (TARGET_SIMD)
#define HAVE_aarch64_st2df (TARGET_SIMD)
#define HAVE_aarch64_st3v8qi (TARGET_SIMD)
#define HAVE_aarch64_st3v4hi (TARGET_SIMD)
#define HAVE_aarch64_st3v4bf (TARGET_SIMD)
#define HAVE_aarch64_st3v4hf (TARGET_SIMD)
#define HAVE_aarch64_st3v2si (TARGET_SIMD)
#define HAVE_aarch64_st3v2sf (TARGET_SIMD)
#define HAVE_aarch64_st3di (TARGET_SIMD)
#define HAVE_aarch64_st3df (TARGET_SIMD)
#define HAVE_aarch64_st4v8qi (TARGET_SIMD)
#define HAVE_aarch64_st4v4hi (TARGET_SIMD)
#define HAVE_aarch64_st4v4bf (TARGET_SIMD)
#define HAVE_aarch64_st4v4hf (TARGET_SIMD)
#define HAVE_aarch64_st4v2si (TARGET_SIMD)
#define HAVE_aarch64_st4v2sf (TARGET_SIMD)
#define HAVE_aarch64_st4di (TARGET_SIMD)
#define HAVE_aarch64_st4df (TARGET_SIMD)
#define HAVE_aarch64_st2v16qi (TARGET_SIMD)
#define HAVE_aarch64_st3v16qi (TARGET_SIMD)
#define HAVE_aarch64_st4v16qi (TARGET_SIMD)
#define HAVE_aarch64_st2v8hi (TARGET_SIMD)
#define HAVE_aarch64_st3v8hi (TARGET_SIMD)
#define HAVE_aarch64_st4v8hi (TARGET_SIMD)
#define HAVE_aarch64_st2v4si (TARGET_SIMD)
#define HAVE_aarch64_st3v4si (TARGET_SIMD)
#define HAVE_aarch64_st4v4si (TARGET_SIMD)
#define HAVE_aarch64_st2v2di (TARGET_SIMD)
#define HAVE_aarch64_st3v2di (TARGET_SIMD)
#define HAVE_aarch64_st4v2di (TARGET_SIMD)
#define HAVE_aarch64_st2v8hf (TARGET_SIMD)
#define HAVE_aarch64_st3v8hf (TARGET_SIMD)
#define HAVE_aarch64_st4v8hf (TARGET_SIMD)
#define HAVE_aarch64_st2v4sf (TARGET_SIMD)
#define HAVE_aarch64_st3v4sf (TARGET_SIMD)
#define HAVE_aarch64_st4v4sf (TARGET_SIMD)
#define HAVE_aarch64_st2v2df (TARGET_SIMD)
#define HAVE_aarch64_st3v2df (TARGET_SIMD)
#define HAVE_aarch64_st4v2df (TARGET_SIMD)
#define HAVE_aarch64_st2v8bf (TARGET_SIMD)
#define HAVE_aarch64_st3v8bf (TARGET_SIMD)
#define HAVE_aarch64_st4v8bf (TARGET_SIMD)
#define HAVE_aarch64_st2_lanev8qi (TARGET_SIMD)
#define HAVE_aarch64_st2_lanev16qi (TARGET_SIMD)
#define HAVE_aarch64_st2_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_st2_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_st2_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_st2_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_st2_lanev4bf (TARGET_SIMD)
#define HAVE_aarch64_st2_lanev8bf (TARGET_SIMD)
#define HAVE_aarch64_st2_lanev2di (TARGET_SIMD)
#define HAVE_aarch64_st2_lanev4hf (TARGET_SIMD)
#define HAVE_aarch64_st2_lanev8hf (TARGET_SIMD)
#define HAVE_aarch64_st2_lanev2sf (TARGET_SIMD)
#define HAVE_aarch64_st2_lanev4sf (TARGET_SIMD)
#define HAVE_aarch64_st2_lanev2df (TARGET_SIMD)
#define HAVE_aarch64_st2_lanedi (TARGET_SIMD)
#define HAVE_aarch64_st2_lanedf (TARGET_SIMD)
#define HAVE_aarch64_st3_lanev8qi (TARGET_SIMD)
#define HAVE_aarch64_st3_lanev16qi (TARGET_SIMD)
#define HAVE_aarch64_st3_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_st3_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_st3_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_st3_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_st3_lanev4bf (TARGET_SIMD)
#define HAVE_aarch64_st3_lanev8bf (TARGET_SIMD)
#define HAVE_aarch64_st3_lanev2di (TARGET_SIMD)
#define HAVE_aarch64_st3_lanev4hf (TARGET_SIMD)
#define HAVE_aarch64_st3_lanev8hf (TARGET_SIMD)
#define HAVE_aarch64_st3_lanev2sf (TARGET_SIMD)
#define HAVE_aarch64_st3_lanev4sf (TARGET_SIMD)
#define HAVE_aarch64_st3_lanev2df (TARGET_SIMD)
#define HAVE_aarch64_st3_lanedi (TARGET_SIMD)
#define HAVE_aarch64_st3_lanedf (TARGET_SIMD)
#define HAVE_aarch64_st4_lanev8qi (TARGET_SIMD)
#define HAVE_aarch64_st4_lanev16qi (TARGET_SIMD)
#define HAVE_aarch64_st4_lanev4hi (TARGET_SIMD)
#define HAVE_aarch64_st4_lanev8hi (TARGET_SIMD)
#define HAVE_aarch64_st4_lanev2si (TARGET_SIMD)
#define HAVE_aarch64_st4_lanev4si (TARGET_SIMD)
#define HAVE_aarch64_st4_lanev4bf (TARGET_SIMD)
#define HAVE_aarch64_st4_lanev8bf (TARGET_SIMD)
#define HAVE_aarch64_st4_lanev2di (TARGET_SIMD)
#define HAVE_aarch64_st4_lanev4hf (TARGET_SIMD)
#define HAVE_aarch64_st4_lanev8hf (TARGET_SIMD)
#define HAVE_aarch64_st4_lanev2sf (TARGET_SIMD)
#define HAVE_aarch64_st4_lanev4sf (TARGET_SIMD)
#define HAVE_aarch64_st4_lanev2df (TARGET_SIMD)
#define HAVE_aarch64_st4_lanedi (TARGET_SIMD)
#define HAVE_aarch64_st4_lanedf (TARGET_SIMD)
#define HAVE_aarch64_st1v8qi (TARGET_SIMD)
#define HAVE_aarch64_st1v16qi (TARGET_SIMD)
#define HAVE_aarch64_st1v4hi (TARGET_SIMD)
#define HAVE_aarch64_st1v8hi (TARGET_SIMD)
#define HAVE_aarch64_st1v2si (TARGET_SIMD)
#define HAVE_aarch64_st1v4si (TARGET_SIMD)
#define HAVE_aarch64_st1v2di (TARGET_SIMD)
#define HAVE_aarch64_st1v4hf (TARGET_SIMD)
#define HAVE_aarch64_st1v8hf (TARGET_SIMD)
#define HAVE_aarch64_st1v4bf (TARGET_SIMD)
#define HAVE_aarch64_st1v8bf (TARGET_SIMD)
#define HAVE_aarch64_st1v2sf (TARGET_SIMD)
#define HAVE_aarch64_st1v4sf (TARGET_SIMD)
#define HAVE_aarch64_st1v2df (TARGET_SIMD)
#define HAVE_aarch64_set_qregoiv16qi (TARGET_SIMD)
#define HAVE_aarch64_set_qregciv16qi (TARGET_SIMD)
#define HAVE_aarch64_set_qregxiv16qi (TARGET_SIMD)
#define HAVE_aarch64_set_qregoiv8hi (TARGET_SIMD)
#define HAVE_aarch64_set_qregciv8hi (TARGET_SIMD)
#define HAVE_aarch64_set_qregxiv8hi (TARGET_SIMD)
#define HAVE_aarch64_set_qregoiv4si (TARGET_SIMD)
#define HAVE_aarch64_set_qregciv4si (TARGET_SIMD)
#define HAVE_aarch64_set_qregxiv4si (TARGET_SIMD)
#define HAVE_aarch64_set_qregoiv2di (TARGET_SIMD)
#define HAVE_aarch64_set_qregciv2di (TARGET_SIMD)
#define HAVE_aarch64_set_qregxiv2di (TARGET_SIMD)
#define HAVE_aarch64_set_qregoiv8hf (TARGET_SIMD)
#define HAVE_aarch64_set_qregciv8hf (TARGET_SIMD)
#define HAVE_aarch64_set_qregxiv8hf (TARGET_SIMD)
#define HAVE_aarch64_set_qregoiv4sf (TARGET_SIMD)
#define HAVE_aarch64_set_qregciv4sf (TARGET_SIMD)
#define HAVE_aarch64_set_qregxiv4sf (TARGET_SIMD)
#define HAVE_aarch64_set_qregoiv2df (TARGET_SIMD)
#define HAVE_aarch64_set_qregciv2df (TARGET_SIMD)
#define HAVE_aarch64_set_qregxiv2df (TARGET_SIMD)
#define HAVE_aarch64_set_qregoiv8bf (TARGET_SIMD)
#define HAVE_aarch64_set_qregciv8bf (TARGET_SIMD)
#define HAVE_aarch64_set_qregxiv8bf (TARGET_SIMD)
#define HAVE_vec_initv8qiqi (TARGET_SIMD)
#define HAVE_vec_initv16qiqi (TARGET_SIMD)
#define HAVE_vec_initv4hihi (TARGET_SIMD)
#define HAVE_vec_initv8hihi (TARGET_SIMD)
#define HAVE_vec_initv2sisi (TARGET_SIMD)
#define HAVE_vec_initv4sisi (TARGET_SIMD)
#define HAVE_vec_initv2didi (TARGET_SIMD)
#define HAVE_vec_initv4hfhf (TARGET_SIMD)
#define HAVE_vec_initv8hfhf (TARGET_SIMD)
#define HAVE_vec_initv4bfbf (TARGET_SIMD)
#define HAVE_vec_initv8bfbf (TARGET_SIMD)
#define HAVE_vec_initv2sfsf (TARGET_SIMD)
#define HAVE_vec_initv4sfsf (TARGET_SIMD)
#define HAVE_vec_initv2dfdf (TARGET_SIMD)
#define HAVE_vec_initv16qiv8qi (TARGET_SIMD)
#define HAVE_vec_initv8hiv4hi (TARGET_SIMD)
#define HAVE_vec_initv4siv2si (TARGET_SIMD)
#define HAVE_vec_initv8hfv4hf (TARGET_SIMD)
#define HAVE_vec_initv4sfv2sf (TARGET_SIMD)
#define HAVE_vec_initv8bfv4bf (TARGET_SIMD)
#define HAVE_vec_extractv8qiqi (TARGET_SIMD)
#define HAVE_vec_extractv16qiqi (TARGET_SIMD)
#define HAVE_vec_extractv4hihi (TARGET_SIMD)
#define HAVE_vec_extractv8hihi (TARGET_SIMD)
#define HAVE_vec_extractv2sisi (TARGET_SIMD)
#define HAVE_vec_extractv4sisi (TARGET_SIMD)
#define HAVE_vec_extractv2didi (TARGET_SIMD)
#define HAVE_vec_extractv4hfhf (TARGET_SIMD)
#define HAVE_vec_extractv8hfhf (TARGET_SIMD)
#define HAVE_vec_extractv4bfbf (TARGET_SIMD)
#define HAVE_vec_extractv8bfbf (TARGET_SIMD)
#define HAVE_vec_extractv2sfsf (TARGET_SIMD)
#define HAVE_vec_extractv4sfsf (TARGET_SIMD)
#define HAVE_vec_extractv2dfdf (TARGET_SIMD)
#define HAVE_vec_extractv16qiv8qi (TARGET_SIMD)
#define HAVE_vec_extractv8hiv4hi (TARGET_SIMD)
#define HAVE_vec_extractv4siv2si (TARGET_SIMD)
#define HAVE_vec_extractv8hfv4hf (TARGET_SIMD)
#define HAVE_vec_extractv8bfv4bf (TARGET_SIMD)
#define HAVE_vec_extractv4sfv2sf (TARGET_SIMD)
#define HAVE_vec_extractv2dfv1df (TARGET_SIMD)
#define HAVE_aarch64_fmlal_lowv2sf (TARGET_F16FML)
#define HAVE_aarch64_fmlsl_lowv2sf (TARGET_F16FML)
#define HAVE_aarch64_fmlalq_lowv4sf (TARGET_F16FML)
#define HAVE_aarch64_fmlslq_lowv4sf (TARGET_F16FML)
#define HAVE_aarch64_fmlal_highv2sf (TARGET_F16FML)
#define HAVE_aarch64_fmlsl_highv2sf (TARGET_F16FML)
#define HAVE_aarch64_fmlalq_highv4sf (TARGET_F16FML)
#define HAVE_aarch64_fmlslq_highv4sf (TARGET_F16FML)
#define HAVE_aarch64_fmlal_lane_lowv2sf (TARGET_F16FML)
#define HAVE_aarch64_fmlsl_lane_lowv2sf (TARGET_F16FML)
#define HAVE_aarch64_fmlal_lane_highv2sf (TARGET_F16FML)
#define HAVE_aarch64_fmlsl_lane_highv2sf (TARGET_F16FML)
#define HAVE_aarch64_fmlalq_laneq_lowv4sf (TARGET_F16FML)
#define HAVE_aarch64_fmlslq_laneq_lowv4sf (TARGET_F16FML)
#define HAVE_aarch64_fmlalq_laneq_highv4sf (TARGET_F16FML)
#define HAVE_aarch64_fmlslq_laneq_highv4sf (TARGET_F16FML)
#define HAVE_aarch64_fmlal_laneq_lowv2sf (TARGET_F16FML)
#define HAVE_aarch64_fmlsl_laneq_lowv2sf (TARGET_F16FML)
#define HAVE_aarch64_fmlal_laneq_highv2sf (TARGET_F16FML)
#define HAVE_aarch64_fmlsl_laneq_highv2sf (TARGET_F16FML)
#define HAVE_aarch64_fmlalq_lane_lowv4sf (TARGET_F16FML)
#define HAVE_aarch64_fmlslq_lane_lowv4sf (TARGET_F16FML)
#define HAVE_aarch64_fmlalq_lane_highv4sf (TARGET_F16FML)
#define HAVE_aarch64_fmlslq_lane_highv4sf (TARGET_F16FML)
#define HAVE_atomic_compare_and_swapqi 1
#define HAVE_atomic_compare_and_swaphi 1
#define HAVE_atomic_compare_and_swapsi 1
#define HAVE_atomic_compare_and_swapdi 1
#define HAVE_atomic_compare_and_swapti 1
#define HAVE_atomic_exchangeqi 1
#define HAVE_atomic_exchangehi 1
#define HAVE_atomic_exchangesi 1
#define HAVE_atomic_exchangedi 1
#define HAVE_atomic_addqi 1
#define HAVE_atomic_subqi 1
#define HAVE_atomic_orqi 1
#define HAVE_atomic_xorqi 1
#define HAVE_atomic_andqi 1
#define HAVE_atomic_addhi 1
#define HAVE_atomic_subhi 1
#define HAVE_atomic_orhi 1
#define HAVE_atomic_xorhi 1
#define HAVE_atomic_andhi 1
#define HAVE_atomic_addsi 1
#define HAVE_atomic_subsi 1
#define HAVE_atomic_orsi 1
#define HAVE_atomic_xorsi 1
#define HAVE_atomic_andsi 1
#define HAVE_atomic_adddi 1
#define HAVE_atomic_subdi 1
#define HAVE_atomic_ordi 1
#define HAVE_atomic_xordi 1
#define HAVE_atomic_anddi 1
#define HAVE_atomic_fetch_addqi 1
#define HAVE_atomic_fetch_subqi 1
#define HAVE_atomic_fetch_orqi 1
#define HAVE_atomic_fetch_xorqi 1
#define HAVE_atomic_fetch_andqi 1
#define HAVE_atomic_fetch_addhi 1
#define HAVE_atomic_fetch_subhi 1
#define HAVE_atomic_fetch_orhi 1
#define HAVE_atomic_fetch_xorhi 1
#define HAVE_atomic_fetch_andhi 1
#define HAVE_atomic_fetch_addsi 1
#define HAVE_atomic_fetch_subsi 1
#define HAVE_atomic_fetch_orsi 1
#define HAVE_atomic_fetch_xorsi 1
#define HAVE_atomic_fetch_andsi 1
#define HAVE_atomic_fetch_adddi 1
#define HAVE_atomic_fetch_subdi 1
#define HAVE_atomic_fetch_ordi 1
#define HAVE_atomic_fetch_xordi 1
#define HAVE_atomic_fetch_anddi 1
#define HAVE_atomic_add_fetchqi 1
#define HAVE_atomic_sub_fetchqi 1
#define HAVE_atomic_or_fetchqi 1
#define HAVE_atomic_xor_fetchqi 1
#define HAVE_atomic_and_fetchqi 1
#define HAVE_atomic_add_fetchhi 1
#define HAVE_atomic_sub_fetchhi 1
#define HAVE_atomic_or_fetchhi 1
#define HAVE_atomic_xor_fetchhi 1
#define HAVE_atomic_and_fetchhi 1
#define HAVE_atomic_add_fetchsi 1
#define HAVE_atomic_sub_fetchsi 1
#define HAVE_atomic_or_fetchsi 1
#define HAVE_atomic_xor_fetchsi 1
#define HAVE_atomic_and_fetchsi 1
#define HAVE_atomic_add_fetchdi 1
#define HAVE_atomic_sub_fetchdi 1
#define HAVE_atomic_or_fetchdi 1
#define HAVE_atomic_xor_fetchdi 1
#define HAVE_atomic_and_fetchdi 1
#define HAVE_mem_thread_fence 1
#define HAVE_dmb 1
#define HAVE_movvnx16qi (TARGET_SVE)
#define HAVE_movvnx8qi (TARGET_SVE)
#define HAVE_movvnx4qi (TARGET_SVE)
#define HAVE_movvnx2qi (TARGET_SVE)
#define HAVE_movvnx8hi (TARGET_SVE)
#define HAVE_movvnx4hi (TARGET_SVE)
#define HAVE_movvnx2hi (TARGET_SVE)
#define HAVE_movvnx8hf (TARGET_SVE)
#define HAVE_movvnx4hf (TARGET_SVE)
#define HAVE_movvnx2hf (TARGET_SVE)
#define HAVE_movvnx8bf (TARGET_SVE)
#define HAVE_movvnx4si (TARGET_SVE)
#define HAVE_movvnx2si (TARGET_SVE)
#define HAVE_movvnx4sf (TARGET_SVE)
#define HAVE_movvnx2sf (TARGET_SVE)
#define HAVE_movvnx2di (TARGET_SVE)
#define HAVE_movvnx2df (TARGET_SVE)
#define HAVE_movmisalignvnx16qi (TARGET_SVE)
#define HAVE_movmisalignvnx8qi (TARGET_SVE)
#define HAVE_movmisalignvnx4qi (TARGET_SVE)
#define HAVE_movmisalignvnx2qi (TARGET_SVE)
#define HAVE_movmisalignvnx8hi (TARGET_SVE)
#define HAVE_movmisalignvnx4hi (TARGET_SVE)
#define HAVE_movmisalignvnx2hi (TARGET_SVE)
#define HAVE_movmisalignvnx8hf (TARGET_SVE)
#define HAVE_movmisalignvnx4hf (TARGET_SVE)
#define HAVE_movmisalignvnx2hf (TARGET_SVE)
#define HAVE_movmisalignvnx8bf (TARGET_SVE)
#define HAVE_movmisalignvnx4si (TARGET_SVE)
#define HAVE_movmisalignvnx2si (TARGET_SVE)
#define HAVE_movmisalignvnx4sf (TARGET_SVE)
#define HAVE_movmisalignvnx2sf (TARGET_SVE)
#define HAVE_movmisalignvnx2di (TARGET_SVE)
#define HAVE_movmisalignvnx2df (TARGET_SVE)
#define HAVE_aarch64_sve_reload_mem (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx16qi (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx8qi (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx4qi (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx2qi (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx8hi (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx4hi (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx2hi (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx8hf (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx4hf (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx2hf (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx8bf (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx4si (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx2si (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx4sf (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx2sf (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx2di (TARGET_SVE)
#define HAVE_aarch64_sve_reinterpretvnx2df (TARGET_SVE)
#define HAVE_movvnx32qi (TARGET_SVE)
#define HAVE_movvnx16hi (TARGET_SVE)
#define HAVE_movvnx8si (TARGET_SVE)
#define HAVE_movvnx4di (TARGET_SVE)
#define HAVE_movvnx16bf (TARGET_SVE)
#define HAVE_movvnx16hf (TARGET_SVE)
#define HAVE_movvnx8sf (TARGET_SVE)
#define HAVE_movvnx4df (TARGET_SVE)
#define HAVE_movvnx48qi (TARGET_SVE)
#define HAVE_movvnx24hi (TARGET_SVE)
#define HAVE_movvnx12si (TARGET_SVE)
#define HAVE_movvnx6di (TARGET_SVE)
#define HAVE_movvnx24bf (TARGET_SVE)
#define HAVE_movvnx24hf (TARGET_SVE)
#define HAVE_movvnx12sf (TARGET_SVE)
#define HAVE_movvnx6df (TARGET_SVE)
#define HAVE_movvnx64qi (TARGET_SVE)
#define HAVE_movvnx32hi (TARGET_SVE)
#define HAVE_movvnx16si (TARGET_SVE)
#define HAVE_movvnx8di (TARGET_SVE)
#define HAVE_movvnx32bf (TARGET_SVE)
#define HAVE_movvnx32hf (TARGET_SVE)
#define HAVE_movvnx16sf (TARGET_SVE)
#define HAVE_movvnx8df (TARGET_SVE)
#define HAVE_movvnx16bi (TARGET_SVE)
#define HAVE_movvnx8bi (TARGET_SVE)
#define HAVE_movvnx4bi (TARGET_SVE)
#define HAVE_movvnx2bi (TARGET_SVE)
#define HAVE_vec_load_lanesvnx32qivnx16qi (TARGET_SVE)
#define HAVE_vec_load_lanesvnx16hivnx8hi (TARGET_SVE)
#define HAVE_vec_load_lanesvnx8sivnx4si (TARGET_SVE)
#define HAVE_vec_load_lanesvnx4divnx2di (TARGET_SVE)
#define HAVE_vec_load_lanesvnx16bfvnx8bf (TARGET_SVE)
#define HAVE_vec_load_lanesvnx16hfvnx8hf (TARGET_SVE)
#define HAVE_vec_load_lanesvnx8sfvnx4sf (TARGET_SVE)
#define HAVE_vec_load_lanesvnx4dfvnx2df (TARGET_SVE)
#define HAVE_vec_load_lanesvnx48qivnx16qi (TARGET_SVE)
#define HAVE_vec_load_lanesvnx24hivnx8hi (TARGET_SVE)
#define HAVE_vec_load_lanesvnx12sivnx4si (TARGET_SVE)
#define HAVE_vec_load_lanesvnx6divnx2di (TARGET_SVE)
#define HAVE_vec_load_lanesvnx24bfvnx8bf (TARGET_SVE)
#define HAVE_vec_load_lanesvnx24hfvnx8hf (TARGET_SVE)
#define HAVE_vec_load_lanesvnx12sfvnx4sf (TARGET_SVE)
#define HAVE_vec_load_lanesvnx6dfvnx2df (TARGET_SVE)
#define HAVE_vec_load_lanesvnx64qivnx16qi (TARGET_SVE)
#define HAVE_vec_load_lanesvnx32hivnx8hi (TARGET_SVE)
#define HAVE_vec_load_lanesvnx16sivnx4si (TARGET_SVE)
#define HAVE_vec_load_lanesvnx8divnx2di (TARGET_SVE)
#define HAVE_vec_load_lanesvnx32bfvnx8bf (TARGET_SVE)
#define HAVE_vec_load_lanesvnx32hfvnx8hf (TARGET_SVE)
#define HAVE_vec_load_lanesvnx16sfvnx4sf (TARGET_SVE)
#define HAVE_vec_load_lanesvnx8dfvnx2df (TARGET_SVE)
#define HAVE_gather_loadvnx2qivnx2di (TARGET_SVE)
#define HAVE_gather_loadvnx2hivnx2di (TARGET_SVE)
#define HAVE_gather_loadvnx2hfvnx2di (TARGET_SVE)
#define HAVE_gather_loadvnx2sivnx2di (TARGET_SVE)
#define HAVE_gather_loadvnx2sfvnx2di (TARGET_SVE)
#define HAVE_gather_loadvnx2divnx2di (TARGET_SVE)
#define HAVE_gather_loadvnx2dfvnx2di (TARGET_SVE)
#define HAVE_gather_loadvnx4qivnx4si (TARGET_SVE)
#define HAVE_gather_loadvnx4hivnx4si (TARGET_SVE)
#define HAVE_gather_loadvnx4hfvnx4si (TARGET_SVE)
#define HAVE_gather_loadvnx4sivnx4si (TARGET_SVE)
#define HAVE_gather_loadvnx4sfvnx4si (TARGET_SVE)
#define HAVE_vec_store_lanesvnx32qivnx16qi (TARGET_SVE)
#define HAVE_vec_store_lanesvnx16hivnx8hi (TARGET_SVE)
#define HAVE_vec_store_lanesvnx8sivnx4si (TARGET_SVE)
#define HAVE_vec_store_lanesvnx4divnx2di (TARGET_SVE)
#define HAVE_vec_store_lanesvnx16bfvnx8bf (TARGET_SVE)
#define HAVE_vec_store_lanesvnx16hfvnx8hf (TARGET_SVE)
#define HAVE_vec_store_lanesvnx8sfvnx4sf (TARGET_SVE)
#define HAVE_vec_store_lanesvnx4dfvnx2df (TARGET_SVE)
#define HAVE_vec_store_lanesvnx48qivnx16qi (TARGET_SVE)
#define HAVE_vec_store_lanesvnx24hivnx8hi (TARGET_SVE)
#define HAVE_vec_store_lanesvnx12sivnx4si (TARGET_SVE)
#define HAVE_vec_store_lanesvnx6divnx2di (TARGET_SVE)
#define HAVE_vec_store_lanesvnx24bfvnx8bf (TARGET_SVE)
#define HAVE_vec_store_lanesvnx24hfvnx8hf (TARGET_SVE)
#define HAVE_vec_store_lanesvnx12sfvnx4sf (TARGET_SVE)
#define HAVE_vec_store_lanesvnx6dfvnx2df (TARGET_SVE)
#define HAVE_vec_store_lanesvnx64qivnx16qi (TARGET_SVE)
#define HAVE_vec_store_lanesvnx32hivnx8hi (TARGET_SVE)
#define HAVE_vec_store_lanesvnx16sivnx4si (TARGET_SVE)
#define HAVE_vec_store_lanesvnx8divnx2di (TARGET_SVE)
#define HAVE_vec_store_lanesvnx32bfvnx8bf (TARGET_SVE)
#define HAVE_vec_store_lanesvnx32hfvnx8hf (TARGET_SVE)
#define HAVE_vec_store_lanesvnx16sfvnx4sf (TARGET_SVE)
#define HAVE_vec_store_lanesvnx8dfvnx2df (TARGET_SVE)
#define HAVE_scatter_storevnx2qivnx2di (TARGET_SVE)
#define HAVE_scatter_storevnx2hivnx2di (TARGET_SVE)
#define HAVE_scatter_storevnx2hfvnx2di (TARGET_SVE)
#define HAVE_scatter_storevnx2sivnx2di (TARGET_SVE)
#define HAVE_scatter_storevnx2sfvnx2di (TARGET_SVE)
#define HAVE_scatter_storevnx2divnx2di (TARGET_SVE)
#define HAVE_scatter_storevnx2dfvnx2di (TARGET_SVE)
#define HAVE_scatter_storevnx4qivnx4si (TARGET_SVE)
#define HAVE_scatter_storevnx4hivnx4si (TARGET_SVE)
#define HAVE_scatter_storevnx4hfvnx4si (TARGET_SVE)
#define HAVE_scatter_storevnx4sivnx4si (TARGET_SVE)
#define HAVE_scatter_storevnx4sfvnx4si (TARGET_SVE)
#define HAVE_vec_duplicatevnx16qi (TARGET_SVE)
#define HAVE_vec_duplicatevnx8qi (TARGET_SVE)
#define HAVE_vec_duplicatevnx4qi (TARGET_SVE)
#define HAVE_vec_duplicatevnx2qi (TARGET_SVE)
#define HAVE_vec_duplicatevnx8hi (TARGET_SVE)
#define HAVE_vec_duplicatevnx4hi (TARGET_SVE)
#define HAVE_vec_duplicatevnx2hi (TARGET_SVE)
#define HAVE_vec_duplicatevnx8hf (TARGET_SVE)
#define HAVE_vec_duplicatevnx4hf (TARGET_SVE)
#define HAVE_vec_duplicatevnx2hf (TARGET_SVE)
#define HAVE_vec_duplicatevnx8bf (TARGET_SVE)
#define HAVE_vec_duplicatevnx4si (TARGET_SVE)
#define HAVE_vec_duplicatevnx2si (TARGET_SVE)
#define HAVE_vec_duplicatevnx4sf (TARGET_SVE)
#define HAVE_vec_duplicatevnx2sf (TARGET_SVE)
#define HAVE_vec_duplicatevnx2di (TARGET_SVE)
#define HAVE_vec_duplicatevnx2df (TARGET_SVE)
#define HAVE_vec_initvnx16qiqi (TARGET_SVE)
#define HAVE_vec_initvnx8hihi (TARGET_SVE)
#define HAVE_vec_initvnx4sisi (TARGET_SVE)
#define HAVE_vec_initvnx2didi (TARGET_SVE)
#define HAVE_vec_initvnx8bfbf (TARGET_SVE)
#define HAVE_vec_initvnx8hfhf (TARGET_SVE)
#define HAVE_vec_initvnx4sfsf (TARGET_SVE)
#define HAVE_vec_initvnx2dfdf (TARGET_SVE)
#define HAVE_vec_duplicatevnx16bi (TARGET_SVE)
#define HAVE_vec_duplicatevnx8bi (TARGET_SVE)
#define HAVE_vec_duplicatevnx4bi (TARGET_SVE)
#define HAVE_vec_duplicatevnx2bi (TARGET_SVE)
#define HAVE_vec_extractvnx16qiqi (TARGET_SVE)
#define HAVE_vec_extractvnx8hihi (TARGET_SVE)
#define HAVE_vec_extractvnx4sisi (TARGET_SVE)
#define HAVE_vec_extractvnx2didi (TARGET_SVE)
#define HAVE_vec_extractvnx8bfbf (TARGET_SVE)
#define HAVE_vec_extractvnx8hfhf (TARGET_SVE)
#define HAVE_vec_extractvnx4sfsf (TARGET_SVE)
#define HAVE_vec_extractvnx2dfdf (TARGET_SVE)
#define HAVE_vec_extractvnx16biqi (TARGET_SVE)
#define HAVE_vec_extractvnx8bihi (TARGET_SVE)
#define HAVE_vec_extractvnx4bisi (TARGET_SVE)
#define HAVE_vec_extractvnx2bidi (TARGET_SVE)
#define HAVE_absvnx16qi2 (TARGET_SVE)
#define HAVE_negvnx16qi2 (TARGET_SVE)
#define HAVE_one_cmplvnx16qi2 (TARGET_SVE)
#define HAVE_clrsbvnx16qi2 (TARGET_SVE)
#define HAVE_clzvnx16qi2 (TARGET_SVE)
#define HAVE_popcountvnx16qi2 (TARGET_SVE)
#define HAVE_qabsvnx16qi2 ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_qnegvnx16qi2 ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_absvnx8hi2 (TARGET_SVE)
#define HAVE_negvnx8hi2 (TARGET_SVE)
#define HAVE_one_cmplvnx8hi2 (TARGET_SVE)
#define HAVE_clrsbvnx8hi2 (TARGET_SVE)
#define HAVE_clzvnx8hi2 (TARGET_SVE)
#define HAVE_popcountvnx8hi2 (TARGET_SVE)
#define HAVE_qabsvnx8hi2 ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_qnegvnx8hi2 ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_absvnx4si2 (TARGET_SVE)
#define HAVE_negvnx4si2 (TARGET_SVE)
#define HAVE_one_cmplvnx4si2 (TARGET_SVE)
#define HAVE_clrsbvnx4si2 (TARGET_SVE)
#define HAVE_clzvnx4si2 (TARGET_SVE)
#define HAVE_popcountvnx4si2 (TARGET_SVE)
#define HAVE_qabsvnx4si2 ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_qnegvnx4si2 ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_absvnx2di2 (TARGET_SVE)
#define HAVE_negvnx2di2 (TARGET_SVE)
#define HAVE_one_cmplvnx2di2 (TARGET_SVE)
#define HAVE_clrsbvnx2di2 (TARGET_SVE)
#define HAVE_clzvnx2di2 (TARGET_SVE)
#define HAVE_popcountvnx2di2 (TARGET_SVE)
#define HAVE_qabsvnx2di2 ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_qnegvnx2di2 ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_absvnx16qi (TARGET_SVE)
#define HAVE_cond_negvnx16qi (TARGET_SVE)
#define HAVE_cond_one_cmplvnx16qi (TARGET_SVE)
#define HAVE_cond_clrsbvnx16qi (TARGET_SVE)
#define HAVE_cond_clzvnx16qi (TARGET_SVE)
#define HAVE_cond_popcountvnx16qi (TARGET_SVE)
#define HAVE_cond_qabsvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_qnegvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_absvnx8hi (TARGET_SVE)
#define HAVE_cond_negvnx8hi (TARGET_SVE)
#define HAVE_cond_one_cmplvnx8hi (TARGET_SVE)
#define HAVE_cond_clrsbvnx8hi (TARGET_SVE)
#define HAVE_cond_clzvnx8hi (TARGET_SVE)
#define HAVE_cond_popcountvnx8hi (TARGET_SVE)
#define HAVE_cond_qabsvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_qnegvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_absvnx4si (TARGET_SVE)
#define HAVE_cond_negvnx4si (TARGET_SVE)
#define HAVE_cond_one_cmplvnx4si (TARGET_SVE)
#define HAVE_cond_clrsbvnx4si (TARGET_SVE)
#define HAVE_cond_clzvnx4si (TARGET_SVE)
#define HAVE_cond_popcountvnx4si (TARGET_SVE)
#define HAVE_cond_qabsvnx4si ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_qnegvnx4si ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_absvnx2di (TARGET_SVE)
#define HAVE_cond_negvnx2di (TARGET_SVE)
#define HAVE_cond_one_cmplvnx2di (TARGET_SVE)
#define HAVE_cond_clrsbvnx2di (TARGET_SVE)
#define HAVE_cond_clzvnx2di (TARGET_SVE)
#define HAVE_cond_popcountvnx2di (TARGET_SVE)
#define HAVE_cond_qabsvnx2di ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_qnegvnx2di ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_extendvnx8qivnx8hi2 (TARGET_SVE && (~0x81 & 0x81) == 0)
#define HAVE_zero_extendvnx8qivnx8hi2 (TARGET_SVE && (~0x81 & 0x81) == 0)
#define HAVE_extendvnx4qivnx4hi2 (TARGET_SVE && (~0x41 & 0x41) == 0)
#define HAVE_zero_extendvnx4qivnx4hi2 (TARGET_SVE && (~0x41 & 0x41) == 0)
#define HAVE_extendvnx2qivnx2hi2 (TARGET_SVE && (~0x21 & 0x21) == 0)
#define HAVE_zero_extendvnx2qivnx2hi2 (TARGET_SVE && (~0x21 & 0x21) == 0)
#define HAVE_extendvnx4qivnx4si2 (TARGET_SVE && (~0x43 & 0x41) == 0)
#define HAVE_zero_extendvnx4qivnx4si2 (TARGET_SVE && (~0x43 & 0x41) == 0)
#define HAVE_extendvnx4hivnx4si2 (TARGET_SVE && (~0x43 & 0x42) == 0)
#define HAVE_zero_extendvnx4hivnx4si2 (TARGET_SVE && (~0x43 & 0x42) == 0)
#define HAVE_extendvnx2qivnx2si2 (TARGET_SVE && (~0x23 & 0x21) == 0)
#define HAVE_zero_extendvnx2qivnx2si2 (TARGET_SVE && (~0x23 & 0x21) == 0)
#define HAVE_extendvnx2hivnx2si2 (TARGET_SVE && (~0x23 & 0x22) == 0)
#define HAVE_zero_extendvnx2hivnx2si2 (TARGET_SVE && (~0x23 & 0x22) == 0)
#define HAVE_extendvnx2qivnx2di2 (TARGET_SVE && (~0x27 & 0x21) == 0)
#define HAVE_zero_extendvnx2qivnx2di2 (TARGET_SVE && (~0x27 & 0x21) == 0)
#define HAVE_extendvnx2hivnx2di2 (TARGET_SVE && (~0x27 & 0x22) == 0)
#define HAVE_zero_extendvnx2hivnx2di2 (TARGET_SVE && (~0x27 & 0x22) == 0)
#define HAVE_extendvnx2sivnx2di2 (TARGET_SVE && (~0x27 & 0x24) == 0)
#define HAVE_zero_extendvnx2sivnx2di2 (TARGET_SVE && (~0x27 & 0x24) == 0)
#define HAVE_aarch64_pred_cnotvnx16qi (TARGET_SVE)
#define HAVE_aarch64_pred_cnotvnx8hi (TARGET_SVE)
#define HAVE_aarch64_pred_cnotvnx4si (TARGET_SVE)
#define HAVE_aarch64_pred_cnotvnx2di (TARGET_SVE)
#define HAVE_cond_cnotvnx16qi (TARGET_SVE)
#define HAVE_cond_cnotvnx8hi (TARGET_SVE)
#define HAVE_cond_cnotvnx4si (TARGET_SVE)
#define HAVE_cond_cnotvnx2di (TARGET_SVE)
#define HAVE_absvnx8hf2 (TARGET_SVE)
#define HAVE_negvnx8hf2 (TARGET_SVE)
#define HAVE_frecpxvnx8hf2 (TARGET_SVE)
#define HAVE_roundvnx8hf2 (TARGET_SVE)
#define HAVE_nearbyintvnx8hf2 (TARGET_SVE)
#define HAVE_floorvnx8hf2 (TARGET_SVE)
#define HAVE_frintnvnx8hf2 (TARGET_SVE)
#define HAVE_ceilvnx8hf2 (TARGET_SVE)
#define HAVE_rintvnx8hf2 (TARGET_SVE)
#define HAVE_btruncvnx8hf2 (TARGET_SVE)
#define HAVE_absvnx4sf2 (TARGET_SVE)
#define HAVE_negvnx4sf2 (TARGET_SVE)
#define HAVE_frecpxvnx4sf2 (TARGET_SVE)
#define HAVE_roundvnx4sf2 (TARGET_SVE)
#define HAVE_nearbyintvnx4sf2 (TARGET_SVE)
#define HAVE_floorvnx4sf2 (TARGET_SVE)
#define HAVE_frintnvnx4sf2 (TARGET_SVE)
#define HAVE_ceilvnx4sf2 (TARGET_SVE)
#define HAVE_rintvnx4sf2 (TARGET_SVE)
#define HAVE_btruncvnx4sf2 (TARGET_SVE)
#define HAVE_absvnx2df2 (TARGET_SVE)
#define HAVE_negvnx2df2 (TARGET_SVE)
#define HAVE_frecpxvnx2df2 (TARGET_SVE)
#define HAVE_roundvnx2df2 (TARGET_SVE)
#define HAVE_nearbyintvnx2df2 (TARGET_SVE)
#define HAVE_floorvnx2df2 (TARGET_SVE)
#define HAVE_frintnvnx2df2 (TARGET_SVE)
#define HAVE_ceilvnx2df2 (TARGET_SVE)
#define HAVE_rintvnx2df2 (TARGET_SVE)
#define HAVE_btruncvnx2df2 (TARGET_SVE)
#define HAVE_cond_absvnx8hf (TARGET_SVE)
#define HAVE_cond_negvnx8hf (TARGET_SVE)
#define HAVE_cond_frecpxvnx8hf (TARGET_SVE)
#define HAVE_cond_roundvnx8hf (TARGET_SVE)
#define HAVE_cond_nearbyintvnx8hf (TARGET_SVE)
#define HAVE_cond_floorvnx8hf (TARGET_SVE)
#define HAVE_cond_frintnvnx8hf (TARGET_SVE)
#define HAVE_cond_ceilvnx8hf (TARGET_SVE)
#define HAVE_cond_rintvnx8hf (TARGET_SVE)
#define HAVE_cond_btruncvnx8hf (TARGET_SVE)
#define HAVE_cond_sqrtvnx8hf (TARGET_SVE)
#define HAVE_cond_absvnx4sf (TARGET_SVE)
#define HAVE_cond_negvnx4sf (TARGET_SVE)
#define HAVE_cond_frecpxvnx4sf (TARGET_SVE)
#define HAVE_cond_roundvnx4sf (TARGET_SVE)
#define HAVE_cond_nearbyintvnx4sf (TARGET_SVE)
#define HAVE_cond_floorvnx4sf (TARGET_SVE)
#define HAVE_cond_frintnvnx4sf (TARGET_SVE)
#define HAVE_cond_ceilvnx4sf (TARGET_SVE)
#define HAVE_cond_rintvnx4sf (TARGET_SVE)
#define HAVE_cond_btruncvnx4sf (TARGET_SVE)
#define HAVE_cond_sqrtvnx4sf (TARGET_SVE)
#define HAVE_cond_absvnx2df (TARGET_SVE)
#define HAVE_cond_negvnx2df (TARGET_SVE)
#define HAVE_cond_frecpxvnx2df (TARGET_SVE)
#define HAVE_cond_roundvnx2df (TARGET_SVE)
#define HAVE_cond_nearbyintvnx2df (TARGET_SVE)
#define HAVE_cond_floorvnx2df (TARGET_SVE)
#define HAVE_cond_frintnvnx2df (TARGET_SVE)
#define HAVE_cond_ceilvnx2df (TARGET_SVE)
#define HAVE_cond_rintvnx2df (TARGET_SVE)
#define HAVE_cond_btruncvnx2df (TARGET_SVE)
#define HAVE_cond_sqrtvnx2df (TARGET_SVE)
#define HAVE_sqrtvnx8hf2 (TARGET_SVE)
#define HAVE_sqrtvnx4sf2 (TARGET_SVE)
#define HAVE_sqrtvnx2df2 (TARGET_SVE)
#define HAVE_rsqrtvnx4sf2 (TARGET_SVE)
#define HAVE_rsqrtvnx2df2 (TARGET_SVE)
#define HAVE_aarch64_rsqrtevnx4sf (TARGET_SVE)
#define HAVE_aarch64_rsqrtevnx2df (TARGET_SVE)
#define HAVE_aarch64_rsqrtsvnx4sf (TARGET_SVE)
#define HAVE_aarch64_rsqrtsvnx2df (TARGET_SVE)
#define HAVE_one_cmplvnx16bi2 (TARGET_SVE)
#define HAVE_one_cmplvnx8bi2 (TARGET_SVE)
#define HAVE_one_cmplvnx4bi2 (TARGET_SVE)
#define HAVE_one_cmplvnx2bi2 (TARGET_SVE)
#define HAVE_mulvnx16qi3 (TARGET_SVE)
#define HAVE_smaxvnx16qi3 (TARGET_SVE)
#define HAVE_sminvnx16qi3 (TARGET_SVE)
#define HAVE_umaxvnx16qi3 (TARGET_SVE)
#define HAVE_uminvnx16qi3 (TARGET_SVE)
#define HAVE_mulvnx8hi3 (TARGET_SVE)
#define HAVE_smaxvnx8hi3 (TARGET_SVE)
#define HAVE_sminvnx8hi3 (TARGET_SVE)
#define HAVE_umaxvnx8hi3 (TARGET_SVE)
#define HAVE_uminvnx8hi3 (TARGET_SVE)
#define HAVE_mulvnx4si3 (TARGET_SVE)
#define HAVE_smaxvnx4si3 (TARGET_SVE)
#define HAVE_sminvnx4si3 (TARGET_SVE)
#define HAVE_umaxvnx4si3 (TARGET_SVE)
#define HAVE_uminvnx4si3 (TARGET_SVE)
#define HAVE_mulvnx2di3 (TARGET_SVE)
#define HAVE_smaxvnx2di3 (TARGET_SVE)
#define HAVE_sminvnx2di3 (TARGET_SVE)
#define HAVE_umaxvnx2di3 (TARGET_SVE)
#define HAVE_uminvnx2di3 (TARGET_SVE)
#define HAVE_cond_addvnx16qi (TARGET_SVE)
#define HAVE_cond_subvnx16qi (TARGET_SVE)
#define HAVE_cond_mulvnx16qi (TARGET_SVE)
#define HAVE_cond_smaxvnx16qi (TARGET_SVE)
#define HAVE_cond_umaxvnx16qi (TARGET_SVE)
#define HAVE_cond_sminvnx16qi (TARGET_SVE)
#define HAVE_cond_uminvnx16qi (TARGET_SVE)
#define HAVE_cond_ashlvnx16qi (TARGET_SVE)
#define HAVE_cond_ashrvnx16qi (TARGET_SVE)
#define HAVE_cond_lshrvnx16qi (TARGET_SVE)
#define HAVE_cond_andvnx16qi (TARGET_SVE)
#define HAVE_cond_iorvnx16qi (TARGET_SVE)
#define HAVE_cond_xorvnx16qi (TARGET_SVE)
#define HAVE_cond_ssaddvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_usaddvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_sssubvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_ussubvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_addvnx8hi (TARGET_SVE)
#define HAVE_cond_subvnx8hi (TARGET_SVE)
#define HAVE_cond_mulvnx8hi (TARGET_SVE)
#define HAVE_cond_smaxvnx8hi (TARGET_SVE)
#define HAVE_cond_umaxvnx8hi (TARGET_SVE)
#define HAVE_cond_sminvnx8hi (TARGET_SVE)
#define HAVE_cond_uminvnx8hi (TARGET_SVE)
#define HAVE_cond_ashlvnx8hi (TARGET_SVE)
#define HAVE_cond_ashrvnx8hi (TARGET_SVE)
#define HAVE_cond_lshrvnx8hi (TARGET_SVE)
#define HAVE_cond_andvnx8hi (TARGET_SVE)
#define HAVE_cond_iorvnx8hi (TARGET_SVE)
#define HAVE_cond_xorvnx8hi (TARGET_SVE)
#define HAVE_cond_ssaddvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_usaddvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_sssubvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_ussubvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_addvnx4si (TARGET_SVE)
#define HAVE_cond_subvnx4si (TARGET_SVE)
#define HAVE_cond_mulvnx4si (TARGET_SVE)
#define HAVE_cond_smaxvnx4si (TARGET_SVE)
#define HAVE_cond_umaxvnx4si (TARGET_SVE)
#define HAVE_cond_sminvnx4si (TARGET_SVE)
#define HAVE_cond_uminvnx4si (TARGET_SVE)
#define HAVE_cond_ashlvnx4si (TARGET_SVE)
#define HAVE_cond_ashrvnx4si (TARGET_SVE)
#define HAVE_cond_lshrvnx4si (TARGET_SVE)
#define HAVE_cond_andvnx4si (TARGET_SVE)
#define HAVE_cond_iorvnx4si (TARGET_SVE)
#define HAVE_cond_xorvnx4si (TARGET_SVE)
#define HAVE_cond_ssaddvnx4si ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_usaddvnx4si ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_sssubvnx4si ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_ussubvnx4si ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_addvnx2di (TARGET_SVE)
#define HAVE_cond_subvnx2di (TARGET_SVE)
#define HAVE_cond_mulvnx2di (TARGET_SVE)
#define HAVE_cond_smaxvnx2di (TARGET_SVE)
#define HAVE_cond_umaxvnx2di (TARGET_SVE)
#define HAVE_cond_sminvnx2di (TARGET_SVE)
#define HAVE_cond_uminvnx2di (TARGET_SVE)
#define HAVE_cond_ashlvnx2di (TARGET_SVE)
#define HAVE_cond_ashrvnx2di (TARGET_SVE)
#define HAVE_cond_lshrvnx2di (TARGET_SVE)
#define HAVE_cond_andvnx2di (TARGET_SVE)
#define HAVE_cond_iorvnx2di (TARGET_SVE)
#define HAVE_cond_xorvnx2di (TARGET_SVE)
#define HAVE_cond_ssaddvnx2di ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_usaddvnx2di ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_sssubvnx2di ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_ussubvnx2di ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_aarch64_adrvnx4si_shift (TARGET_SVE)
#define HAVE_aarch64_adrvnx2di_shift (TARGET_SVE)
#define HAVE_sabdvnx16qi_3 (TARGET_SVE)
#define HAVE_uabdvnx16qi_3 (TARGET_SVE)
#define HAVE_sabdvnx8hi_3 (TARGET_SVE)
#define HAVE_uabdvnx8hi_3 (TARGET_SVE)
#define HAVE_sabdvnx4si_3 (TARGET_SVE)
#define HAVE_uabdvnx4si_3 (TARGET_SVE)
#define HAVE_sabdvnx2di_3 (TARGET_SVE)
#define HAVE_uabdvnx2di_3 (TARGET_SVE)
#define HAVE_aarch64_cond_sabdvnx16qi (TARGET_SVE)
#define HAVE_aarch64_cond_uabdvnx16qi (TARGET_SVE)
#define HAVE_aarch64_cond_sabdvnx8hi (TARGET_SVE)
#define HAVE_aarch64_cond_uabdvnx8hi (TARGET_SVE)
#define HAVE_aarch64_cond_sabdvnx4si (TARGET_SVE)
#define HAVE_aarch64_cond_uabdvnx4si (TARGET_SVE)
#define HAVE_aarch64_cond_sabdvnx2di (TARGET_SVE)
#define HAVE_aarch64_cond_uabdvnx2di (TARGET_SVE)
#define HAVE_smulvnx16qi3_highpart (TARGET_SVE)
#define HAVE_umulvnx16qi3_highpart (TARGET_SVE)
#define HAVE_smulvnx8hi3_highpart (TARGET_SVE)
#define HAVE_umulvnx8hi3_highpart (TARGET_SVE)
#define HAVE_smulvnx4si3_highpart (TARGET_SVE)
#define HAVE_umulvnx4si3_highpart (TARGET_SVE)
#define HAVE_smulvnx2di3_highpart (TARGET_SVE)
#define HAVE_umulvnx2di3_highpart (TARGET_SVE)
#define HAVE_cond_smulhvnx16qi (TARGET_SVE)
#define HAVE_cond_umulhvnx16qi (TARGET_SVE)
#define HAVE_cond_smulhvnx8hi (TARGET_SVE)
#define HAVE_cond_umulhvnx8hi (TARGET_SVE)
#define HAVE_cond_smulhvnx4si (TARGET_SVE)
#define HAVE_cond_umulhvnx4si (TARGET_SVE)
#define HAVE_cond_smulhvnx2di (TARGET_SVE)
#define HAVE_cond_umulhvnx2di (TARGET_SVE)
#define HAVE_divvnx4si3 (TARGET_SVE)
#define HAVE_udivvnx4si3 (TARGET_SVE)
#define HAVE_divvnx2di3 (TARGET_SVE)
#define HAVE_udivvnx2di3 (TARGET_SVE)
#define HAVE_cond_divvnx4si (TARGET_SVE)
#define HAVE_cond_udivvnx4si (TARGET_SVE)
#define HAVE_cond_divvnx2di (TARGET_SVE)
#define HAVE_cond_udivvnx2di (TARGET_SVE)
#define HAVE_aarch64_bicvnx16qi (TARGET_SVE)
#define HAVE_aarch64_bicvnx8hi (TARGET_SVE)
#define HAVE_aarch64_bicvnx4si (TARGET_SVE)
#define HAVE_aarch64_bicvnx2di (TARGET_SVE)
#define HAVE_cond_bicvnx16qi (TARGET_SVE)
#define HAVE_cond_bicvnx8hi (TARGET_SVE)
#define HAVE_cond_bicvnx4si (TARGET_SVE)
#define HAVE_cond_bicvnx2di (TARGET_SVE)
#define HAVE_ashlvnx16qi3 (TARGET_SVE)
#define HAVE_ashrvnx16qi3 (TARGET_SVE)
#define HAVE_lshrvnx16qi3 (TARGET_SVE)
#define HAVE_ashlvnx8hi3 (TARGET_SVE)
#define HAVE_ashrvnx8hi3 (TARGET_SVE)
#define HAVE_lshrvnx8hi3 (TARGET_SVE)
#define HAVE_ashlvnx4si3 (TARGET_SVE)
#define HAVE_ashrvnx4si3 (TARGET_SVE)
#define HAVE_lshrvnx4si3 (TARGET_SVE)
#define HAVE_ashlvnx2di3 (TARGET_SVE)
#define HAVE_ashrvnx2di3 (TARGET_SVE)
#define HAVE_lshrvnx2di3 (TARGET_SVE)
#define HAVE_vashlvnx16qi3 (TARGET_SVE)
#define HAVE_vashrvnx16qi3 (TARGET_SVE)
#define HAVE_vlshrvnx16qi3 (TARGET_SVE)
#define HAVE_vashlvnx8hi3 (TARGET_SVE)
#define HAVE_vashrvnx8hi3 (TARGET_SVE)
#define HAVE_vlshrvnx8hi3 (TARGET_SVE)
#define HAVE_vashlvnx4si3 (TARGET_SVE)
#define HAVE_vashrvnx4si3 (TARGET_SVE)
#define HAVE_vlshrvnx4si3 (TARGET_SVE)
#define HAVE_vashlvnx2di3 (TARGET_SVE)
#define HAVE_vashrvnx2di3 (TARGET_SVE)
#define HAVE_vlshrvnx2di3 (TARGET_SVE)
#define HAVE_cond_lslvnx16qi (TARGET_SVE)
#define HAVE_cond_asrvnx16qi (TARGET_SVE)
#define HAVE_cond_lsrvnx16qi (TARGET_SVE)
#define HAVE_cond_lslvnx8hi (TARGET_SVE)
#define HAVE_cond_asrvnx8hi (TARGET_SVE)
#define HAVE_cond_lsrvnx8hi (TARGET_SVE)
#define HAVE_cond_lslvnx4si (TARGET_SVE)
#define HAVE_cond_asrvnx4si (TARGET_SVE)
#define HAVE_cond_lsrvnx4si (TARGET_SVE)
#define HAVE_sdiv_pow2vnx16qi3 (TARGET_SVE)
#define HAVE_sdiv_pow2vnx8hi3 (TARGET_SVE)
#define HAVE_sdiv_pow2vnx4si3 (TARGET_SVE)
#define HAVE_sdiv_pow2vnx2di3 (TARGET_SVE)
#define HAVE_cond_asrdvnx16qi (TARGET_SVE)
#define HAVE_cond_sqshluvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_srshrvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_urshrvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_asrdvnx8hi (TARGET_SVE)
#define HAVE_cond_sqshluvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_srshrvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_urshrvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_asrdvnx4si (TARGET_SVE)
#define HAVE_cond_sqshluvnx4si ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_srshrvnx4si ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_urshrvnx4si ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_asrdvnx2di (TARGET_SVE)
#define HAVE_cond_sqshluvnx2di ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_srshrvnx2di ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_urshrvnx2di ((TARGET_SVE) && (TARGET_SVE2))
#define HAVE_cond_fscalevnx8hf (TARGET_SVE)
#define HAVE_cond_fscalevnx4sf (TARGET_SVE)
#define HAVE_cond_fscalevnx2df (TARGET_SVE)
#define HAVE_addvnx8hf3 (TARGET_SVE)
#define HAVE_smax_nanvnx8hf3 (TARGET_SVE)
#define HAVE_smaxvnx8hf3 (TARGET_SVE)
#define HAVE_smin_nanvnx8hf3 (TARGET_SVE)
#define HAVE_sminvnx8hf3 (TARGET_SVE)
#define HAVE_mulvnx8hf3 (TARGET_SVE)
#define HAVE_mulxvnx8hf3 (TARGET_SVE)
#define HAVE_subvnx8hf3 (TARGET_SVE)
#define HAVE_addvnx4sf3 (TARGET_SVE)
#define HAVE_smax_nanvnx4sf3 (TARGET_SVE)
#define HAVE_smaxvnx4sf3 (TARGET_SVE)
#define HAVE_smin_nanvnx4sf3 (TARGET_SVE)
#define HAVE_sminvnx4sf3 (TARGET_SVE)
#define HAVE_mulvnx4sf3 (TARGET_SVE)
#define HAVE_mulxvnx4sf3 (TARGET_SVE)
#define HAVE_subvnx4sf3 (TARGET_SVE)
#define HAVE_addvnx2df3 (TARGET_SVE)
#define HAVE_smax_nanvnx2df3 (TARGET_SVE)
#define HAVE_smaxvnx2df3 (TARGET_SVE)
#define HAVE_smin_nanvnx2df3 (TARGET_SVE)
#define HAVE_sminvnx2df3 (TARGET_SVE)
#define HAVE_mulvnx2df3 (TARGET_SVE)
#define HAVE_mulxvnx2df3 (TARGET_SVE)
#define HAVE_subvnx2df3 (TARGET_SVE)
#define HAVE_cond_addvnx8hf (TARGET_SVE)
#define HAVE_cond_divvnx8hf (TARGET_SVE)
#define HAVE_cond_smax_nanvnx8hf (TARGET_SVE)
#define HAVE_cond_smaxvnx8hf (TARGET_SVE)
#define HAVE_cond_smin_nanvnx8hf (TARGET_SVE)
#define HAVE_cond_sminvnx8hf (TARGET_SVE)
#define HAVE_cond_mulvnx8hf (TARGET_SVE)
#define HAVE_cond_mulxvnx8hf (TARGET_SVE)
#define HAVE_cond_subvnx8hf (TARGET_SVE)
#define HAVE_cond_addvnx4sf (TARGET_SVE)
#define HAVE_cond_divvnx4sf (TARGET_SVE)
#define HAVE_cond_smax_nanvnx4sf (TARGET_SVE)
#define HAVE_cond_smaxvnx4sf (TARGET_SVE)
#define HAVE_cond_smin_nanvnx4sf (TARGET_SVE)
#define HAVE_cond_sminvnx4sf (TARGET_SVE)
#define HAVE_cond_mulvnx4sf (TARGET_SVE)
#define HAVE_cond_mulxvnx4sf (TARGET_SVE)
#define HAVE_cond_subvnx4sf (TARGET_SVE)
#define HAVE_cond_addvnx2df (TARGET_SVE)
#define HAVE_cond_divvnx2df (TARGET_SVE)
#define HAVE_cond_smax_nanvnx2df (TARGET_SVE)
#define HAVE_cond_smaxvnx2df (TARGET_SVE)
#define HAVE_cond_smin_nanvnx2df (TARGET_SVE)
#define HAVE_cond_sminvnx2df (TARGET_SVE)
#define HAVE_cond_mulvnx2df (TARGET_SVE)
#define HAVE_cond_mulxvnx2df (TARGET_SVE)
#define HAVE_cond_subvnx2df (TARGET_SVE)
#define HAVE_cond_cadd90vnx8hf (TARGET_SVE)
#define HAVE_cond_cadd270vnx8hf (TARGET_SVE)
#define HAVE_cond_cadd90vnx4sf (TARGET_SVE)
#define HAVE_cond_cadd270vnx4sf (TARGET_SVE)
#define HAVE_cond_cadd90vnx2df (TARGET_SVE)
#define HAVE_cond_cadd270vnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_abdvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_abdvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_abdvnx2df (TARGET_SVE)
#define HAVE_aarch64_cond_abdvnx8hf (TARGET_SVE)
#define HAVE_aarch64_cond_abdvnx4sf (TARGET_SVE)
#define HAVE_aarch64_cond_abdvnx2df (TARGET_SVE)
#define HAVE_divvnx8hf3 (TARGET_SVE)
#define HAVE_divvnx4sf3 (TARGET_SVE)
#define HAVE_divvnx2df3 (TARGET_SVE)
#define HAVE_aarch64_frecpevnx8hf (TARGET_SVE)
#define HAVE_aarch64_frecpevnx4sf (TARGET_SVE)
#define HAVE_aarch64_frecpevnx2df (TARGET_SVE)
#define HAVE_aarch64_frecpsvnx8hf (TARGET_SVE)
#define HAVE_aarch64_frecpsvnx4sf (TARGET_SVE)
#define HAVE_aarch64_frecpsvnx2df (TARGET_SVE)
#define HAVE_copysignvnx8hf3 (TARGET_SVE)
#define HAVE_copysignvnx4sf3 (TARGET_SVE)
#define HAVE_copysignvnx2df3 (TARGET_SVE)
#define HAVE_xorsignvnx8hf3 (TARGET_SVE)
#define HAVE_xorsignvnx4sf3 (TARGET_SVE)
#define HAVE_xorsignvnx2df3 (TARGET_SVE)
#define HAVE_fmaxvnx8hf3 (TARGET_SVE)
#define HAVE_fminvnx8hf3 (TARGET_SVE)
#define HAVE_fmaxvnx4sf3 (TARGET_SVE)
#define HAVE_fminvnx4sf3 (TARGET_SVE)
#define HAVE_fmaxvnx2df3 (TARGET_SVE)
#define HAVE_fminvnx2df3 (TARGET_SVE)
#define HAVE_iorvnx16bi3 (TARGET_SVE)
#define HAVE_xorvnx16bi3 (TARGET_SVE)
#define HAVE_iorvnx8bi3 (TARGET_SVE)
#define HAVE_xorvnx8bi3 (TARGET_SVE)
#define HAVE_iorvnx4bi3 (TARGET_SVE)
#define HAVE_xorvnx4bi3 (TARGET_SVE)
#define HAVE_iorvnx2bi3 (TARGET_SVE)
#define HAVE_xorvnx2bi3 (TARGET_SVE)
#define HAVE_fmavnx16qi4 (TARGET_SVE)
#define HAVE_fmavnx8hi4 (TARGET_SVE)
#define HAVE_fmavnx4si4 (TARGET_SVE)
#define HAVE_fmavnx2di4 (TARGET_SVE)
#define HAVE_cond_fmavnx16qi (TARGET_SVE)
#define HAVE_cond_fmavnx8hi (TARGET_SVE)
#define HAVE_cond_fmavnx4si (TARGET_SVE)
#define HAVE_cond_fmavnx2di (TARGET_SVE)
#define HAVE_fnmavnx16qi4 (TARGET_SVE)
#define HAVE_fnmavnx8hi4 (TARGET_SVE)
#define HAVE_fnmavnx4si4 (TARGET_SVE)
#define HAVE_fnmavnx2di4 (TARGET_SVE)
#define HAVE_cond_fnmavnx16qi (TARGET_SVE)
#define HAVE_cond_fnmavnx8hi (TARGET_SVE)
#define HAVE_cond_fnmavnx4si (TARGET_SVE)
#define HAVE_cond_fnmavnx2di (TARGET_SVE)
#define HAVE_ssadvnx16qi (TARGET_SVE)
#define HAVE_usadvnx16qi (TARGET_SVE)
#define HAVE_ssadvnx8hi (TARGET_SVE)
#define HAVE_usadvnx8hi (TARGET_SVE)
#define HAVE_fmavnx8hf4 (TARGET_SVE)
#define HAVE_fnmavnx8hf4 (TARGET_SVE)
#define HAVE_fnmsvnx8hf4 (TARGET_SVE)
#define HAVE_fmsvnx8hf4 (TARGET_SVE)
#define HAVE_fmavnx4sf4 (TARGET_SVE)
#define HAVE_fnmavnx4sf4 (TARGET_SVE)
#define HAVE_fnmsvnx4sf4 (TARGET_SVE)
#define HAVE_fmsvnx4sf4 (TARGET_SVE)
#define HAVE_fmavnx2df4 (TARGET_SVE)
#define HAVE_fnmavnx2df4 (TARGET_SVE)
#define HAVE_fnmsvnx2df4 (TARGET_SVE)
#define HAVE_fmsvnx2df4 (TARGET_SVE)
#define HAVE_cond_fmavnx8hf (TARGET_SVE)
#define HAVE_cond_fnmavnx8hf (TARGET_SVE)
#define HAVE_cond_fnmsvnx8hf (TARGET_SVE)
#define HAVE_cond_fmsvnx8hf (TARGET_SVE)
#define HAVE_cond_fmavnx4sf (TARGET_SVE)
#define HAVE_cond_fnmavnx4sf (TARGET_SVE)
#define HAVE_cond_fnmsvnx4sf (TARGET_SVE)
#define HAVE_cond_fmsvnx4sf (TARGET_SVE)
#define HAVE_cond_fmavnx2df (TARGET_SVE)
#define HAVE_cond_fnmavnx2df (TARGET_SVE)
#define HAVE_cond_fnmsvnx2df (TARGET_SVE)
#define HAVE_cond_fmsvnx2df (TARGET_SVE)
#define HAVE_cond_fcmlavnx8hf (TARGET_SVE)
#define HAVE_cond_fcmla90vnx8hf (TARGET_SVE)
#define HAVE_cond_fcmla180vnx8hf (TARGET_SVE)
#define HAVE_cond_fcmla270vnx8hf (TARGET_SVE)
#define HAVE_cond_fcmlavnx4sf (TARGET_SVE)
#define HAVE_cond_fcmla90vnx4sf (TARGET_SVE)
#define HAVE_cond_fcmla180vnx4sf (TARGET_SVE)
#define HAVE_cond_fcmla270vnx4sf (TARGET_SVE)
#define HAVE_cond_fcmlavnx2df (TARGET_SVE)
#define HAVE_cond_fcmla90vnx2df (TARGET_SVE)
#define HAVE_cond_fcmla180vnx2df (TARGET_SVE)
#define HAVE_cond_fcmla270vnx2df (TARGET_SVE)
#define HAVE_vcond_mask_vnx16qivnx16bi (TARGET_SVE)
#define HAVE_vcond_mask_vnx8hivnx8bi (TARGET_SVE)
#define HAVE_vcond_mask_vnx4sivnx4bi (TARGET_SVE)
#define HAVE_vcond_mask_vnx2divnx2bi (TARGET_SVE)
#define HAVE_vcond_mask_vnx8bfvnx8bi (TARGET_SVE)
#define HAVE_vcond_mask_vnx8hfvnx8bi (TARGET_SVE)
#define HAVE_vcond_mask_vnx4sfvnx4bi (TARGET_SVE)
#define HAVE_vcond_mask_vnx2dfvnx2bi (TARGET_SVE)
#define HAVE_vcondvnx16qivnx16qi (TARGET_SVE)
#define HAVE_vcondvnx8hivnx8hi (TARGET_SVE)
#define HAVE_vcondvnx4sivnx4si (TARGET_SVE)
#define HAVE_vcondvnx2divnx2di (TARGET_SVE)
#define HAVE_vcondvnx8bfvnx8hi (TARGET_SVE)
#define HAVE_vcondvnx8hfvnx8hi (TARGET_SVE)
#define HAVE_vcondvnx4sfvnx4si (TARGET_SVE)
#define HAVE_vcondvnx2dfvnx2di (TARGET_SVE)
#define HAVE_vconduvnx16qivnx16qi (TARGET_SVE)
#define HAVE_vconduvnx8hivnx8hi (TARGET_SVE)
#define HAVE_vconduvnx4sivnx4si (TARGET_SVE)
#define HAVE_vconduvnx2divnx2di (TARGET_SVE)
#define HAVE_vconduvnx8bfvnx8hi (TARGET_SVE)
#define HAVE_vconduvnx8hfvnx8hi (TARGET_SVE)
#define HAVE_vconduvnx4sfvnx4si (TARGET_SVE)
#define HAVE_vconduvnx2dfvnx2di (TARGET_SVE)
#define HAVE_vcondvnx8hivnx8hf (TARGET_SVE)
#define HAVE_vcondvnx4sivnx4sf (TARGET_SVE)
#define HAVE_vcondvnx2divnx2df (TARGET_SVE)
#define HAVE_vcondvnx8bfvnx8hf (TARGET_SVE)
#define HAVE_vcondvnx8hfvnx8hf (TARGET_SVE)
#define HAVE_vcondvnx4sfvnx4sf (TARGET_SVE)
#define HAVE_vcondvnx2dfvnx2df (TARGET_SVE)
#define HAVE_vec_cmpvnx16qivnx16bi (TARGET_SVE)
#define HAVE_vec_cmpvnx8hivnx8bi (TARGET_SVE)
#define HAVE_vec_cmpvnx4sivnx4bi (TARGET_SVE)
#define HAVE_vec_cmpvnx2divnx2bi (TARGET_SVE)
#define HAVE_vec_cmpuvnx16qivnx16bi (TARGET_SVE)
#define HAVE_vec_cmpuvnx8hivnx8bi (TARGET_SVE)
#define HAVE_vec_cmpuvnx4sivnx4bi (TARGET_SVE)
#define HAVE_vec_cmpuvnx2divnx2bi (TARGET_SVE)
#define HAVE_vec_cmpvnx8hfvnx8bi (TARGET_SVE)
#define HAVE_vec_cmpvnx4sfvnx4bi (TARGET_SVE)
#define HAVE_vec_cmpvnx2dfvnx2bi (TARGET_SVE)
#define HAVE_aarch64_pred_facgevnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_facgtvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_faclevnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_facltvnx8hf (TARGET_SVE)
#define HAVE_aarch64_pred_facgevnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_facgtvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_faclevnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_facltvnx4sf (TARGET_SVE)
#define HAVE_aarch64_pred_facgevnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_facgtvnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_faclevnx2df (TARGET_SVE)
#define HAVE_aarch64_pred_facltvnx2df (TARGET_SVE)
#define HAVE_cbranchvnx16bi4 1
#define HAVE_cbranchvnx8bi4 1
#define HAVE_cbranchvnx4bi4 1
#define HAVE_cbranchvnx2bi4 1
#define HAVE_reduc_plus_scal_vnx16qi (TARGET_SVE)
#define HAVE_reduc_plus_scal_vnx8hi (TARGET_SVE)
#define HAVE_reduc_plus_scal_vnx4si (TARGET_SVE)
#define HAVE_reduc_plus_scal_vnx2di (TARGET_SVE)
#define HAVE_reduc_and_scal_vnx16qi (TARGET_SVE)
#define HAVE_reduc_ior_scal_vnx16qi (TARGET_SVE)
#define HAVE_reduc_smax_scal_vnx16qi (TARGET_SVE)
#define HAVE_reduc_smin_scal_vnx16qi (TARGET_SVE)
#define HAVE_reduc_umax_scal_vnx16qi (TARGET_SVE)
#define HAVE_reduc_umin_scal_vnx16qi (TARGET_SVE)
#define HAVE_reduc_xor_scal_vnx16qi (TARGET_SVE)
#define HAVE_reduc_and_scal_vnx8hi (TARGET_SVE)
#define HAVE_reduc_ior_scal_vnx8hi (TARGET_SVE)
#define HAVE_reduc_smax_scal_vnx8hi (TARGET_SVE)
#define HAVE_reduc_smin_scal_vnx8hi (TARGET_SVE)
#define HAVE_reduc_umax_scal_vnx8hi (TARGET_SVE)
#define HAVE_reduc_umin_scal_vnx8hi (TARGET_SVE)
#define HAVE_reduc_xor_scal_vnx8hi (TARGET_SVE)
#define HAVE_reduc_and_scal_vnx4si (TARGET_SVE)
#define HAVE_reduc_ior_scal_vnx4si (TARGET_SVE)
#define HAVE_reduc_smax_scal_vnx4si (TARGET_SVE)
#define HAVE_reduc_smin_scal_vnx4si (TARGET_SVE)
#define HAVE_reduc_umax_scal_vnx4si (TARGET_SVE)
#define HAVE_reduc_umin_scal_vnx4si (TARGET_SVE)
#define HAVE_reduc_xor_scal_vnx4si (TARGET_SVE)
#define HAVE_reduc_and_scal_vnx2di (TARGET_SVE)
#define HAVE_reduc_ior_scal_vnx2di (TARGET_SVE)
#define HAVE_reduc_smax_scal_vnx2di (TARGET_SVE)
#define HAVE_reduc_smin_scal_vnx2di (TARGET_SVE)
#define HAVE_reduc_umax_scal_vnx2di (TARGET_SVE)
#define HAVE_reduc_umin_scal_vnx2di (TARGET_SVE)
#define HAVE_reduc_xor_scal_vnx2di (TARGET_SVE)
#define HAVE_reduc_plus_scal_vnx8hf (TARGET_SVE)
#define HAVE_reduc_smax_nan_scal_vnx8hf (TARGET_SVE)
#define HAVE_reduc_smax_scal_vnx8hf (TARGET_SVE)
#define HAVE_reduc_smin_nan_scal_vnx8hf (TARGET_SVE)
#define HAVE_reduc_smin_scal_vnx8hf (TARGET_SVE)
#define HAVE_reduc_plus_scal_vnx4sf (TARGET_SVE)
#define HAVE_reduc_smax_nan_scal_vnx4sf (TARGET_SVE)
#define HAVE_reduc_smax_scal_vnx4sf (TARGET_SVE)
#define HAVE_reduc_smin_nan_scal_vnx4sf (TARGET_SVE)
#define HAVE_reduc_smin_scal_vnx4sf (TARGET_SVE)
#define HAVE_reduc_plus_scal_vnx2df (TARGET_SVE)
#define HAVE_reduc_smax_nan_scal_vnx2df (TARGET_SVE)
#define HAVE_reduc_smax_scal_vnx2df (TARGET_SVE)
#define HAVE_reduc_smin_nan_scal_vnx2df (TARGET_SVE)
#define HAVE_reduc_smin_scal_vnx2df (TARGET_SVE)
#define HAVE_fold_left_plus_vnx8hf (TARGET_SVE)
#define HAVE_fold_left_plus_vnx4sf (TARGET_SVE)
#define HAVE_fold_left_plus_vnx2df (TARGET_SVE)
#define HAVE_vec_permvnx16qi (TARGET_SVE && GET_MODE_NUNITS (VNx16QImode).is_constant ())
#define HAVE_vec_permvnx8hi (TARGET_SVE && GET_MODE_NUNITS (VNx8HImode).is_constant ())
#define HAVE_vec_permvnx4si (TARGET_SVE && GET_MODE_NUNITS (VNx4SImode).is_constant ())
#define HAVE_vec_permvnx2di (TARGET_SVE && GET_MODE_NUNITS (VNx2DImode).is_constant ())
#define HAVE_vec_permvnx8bf (TARGET_SVE && GET_MODE_NUNITS (VNx8BFmode).is_constant ())
#define HAVE_vec_permvnx8hf (TARGET_SVE && GET_MODE_NUNITS (VNx8HFmode).is_constant ())
#define HAVE_vec_permvnx4sf (TARGET_SVE && GET_MODE_NUNITS (VNx4SFmode).is_constant ())
#define HAVE_vec_permvnx2df (TARGET_SVE && GET_MODE_NUNITS (VNx2DFmode).is_constant ())
#define HAVE_vec_unpacks_hi_vnx16qi (TARGET_SVE)
#define HAVE_vec_unpacku_hi_vnx16qi (TARGET_SVE)
#define HAVE_vec_unpacks_lo_vnx16qi (TARGET_SVE)
#define HAVE_vec_unpacku_lo_vnx16qi (TARGET_SVE)
#define HAVE_vec_unpacks_hi_vnx8hi (TARGET_SVE)
#define HAVE_vec_unpacku_hi_vnx8hi (TARGET_SVE)
#define HAVE_vec_unpacks_lo_vnx8hi (TARGET_SVE)
#define HAVE_vec_unpacku_lo_vnx8hi (TARGET_SVE)
#define HAVE_vec_unpacks_hi_vnx4si (TARGET_SVE)
#define HAVE_vec_unpacku_hi_vnx4si (TARGET_SVE)
#define HAVE_vec_unpacks_lo_vnx4si (TARGET_SVE)
#define HAVE_vec_unpacku_lo_vnx4si (TARGET_SVE)
#define HAVE_fix_truncvnx8hfvnx8hi2 (TARGET_SVE)
#define HAVE_fixuns_truncvnx8hfvnx8hi2 (TARGET_SVE)
#define HAVE_fix_truncvnx4sfvnx4si2 (TARGET_SVE)
#define HAVE_fixuns_truncvnx4sfvnx4si2 (TARGET_SVE)
#define HAVE_fix_truncvnx2dfvnx2di2 (TARGET_SVE)
#define HAVE_fixuns_truncvnx2dfvnx2di2 (TARGET_SVE)
#define HAVE_cond_fix_trunc_nontruncvnx8hfvnx8hi (TARGET_SVE && 16 >= 16)
#define HAVE_cond_fixuns_trunc_nontruncvnx8hfvnx8hi (TARGET_SVE && 16 >= 16)
#define HAVE_cond_fix_trunc_nontruncvnx8hfvnx4si (TARGET_SVE && 32 >= 16)
#define HAVE_cond_fixuns_trunc_nontruncvnx8hfvnx4si (TARGET_SVE && 32 >= 16)
#define HAVE_cond_fix_trunc_nontruncvnx4sfvnx4si (TARGET_SVE && 32 >= 32)
#define HAVE_cond_fixuns_trunc_nontruncvnx4sfvnx4si (TARGET_SVE && 32 >= 32)
#define HAVE_cond_fix_trunc_nontruncvnx8hfvnx2di (TARGET_SVE && 64 >= 16)
#define HAVE_cond_fixuns_trunc_nontruncvnx8hfvnx2di (TARGET_SVE && 64 >= 16)
#define HAVE_cond_fix_trunc_nontruncvnx4sfvnx2di (TARGET_SVE && 64 >= 32)
#define HAVE_cond_fixuns_trunc_nontruncvnx4sfvnx2di (TARGET_SVE && 64 >= 32)
#define HAVE_cond_fix_trunc_nontruncvnx2dfvnx2di (TARGET_SVE && 64 >= 64)
#define HAVE_cond_fixuns_trunc_nontruncvnx2dfvnx2di (TARGET_SVE && 64 >= 64)
#define HAVE_cond_fix_trunc_truncvnx2dfvnx4si (TARGET_SVE)
#define HAVE_cond_fixuns_trunc_truncvnx2dfvnx4si (TARGET_SVE)
#define HAVE_vec_pack_sfix_trunc_vnx2df (TARGET_SVE)
#define HAVE_vec_pack_ufix_trunc_vnx2df (TARGET_SVE)
#define HAVE_floatvnx8hivnx8hf2 (TARGET_SVE)
#define HAVE_floatunsvnx8hivnx8hf2 (TARGET_SVE)
#define HAVE_floatvnx4sivnx4sf2 (TARGET_SVE)
#define HAVE_floatunsvnx4sivnx4sf2 (TARGET_SVE)
#define HAVE_floatvnx2divnx2df2 (TARGET_SVE)
#define HAVE_floatunsvnx2divnx2df2 (TARGET_SVE)
#define HAVE_cond_float_nonextendvnx8hivnx8hf (TARGET_SVE && 16 >= 16)
#define HAVE_cond_floatuns_nonextendvnx8hivnx8hf (TARGET_SVE && 16 >= 16)
#define HAVE_cond_float_nonextendvnx4sivnx8hf (TARGET_SVE && 32 >= 16)
#define HAVE_cond_floatuns_nonextendvnx4sivnx8hf (TARGET_SVE && 32 >= 16)
#define HAVE_cond_float_nonextendvnx4sivnx4sf (TARGET_SVE && 32 >= 32)
#define HAVE_cond_floatuns_nonextendvnx4sivnx4sf (TARGET_SVE && 32 >= 32)
#define HAVE_cond_float_nonextendvnx2divnx8hf (TARGET_SVE && 64 >= 16)
#define HAVE_cond_floatuns_nonextendvnx2divnx8hf (TARGET_SVE && 64 >= 16)
#define HAVE_cond_float_nonextendvnx2divnx4sf (TARGET_SVE && 64 >= 32)
#define HAVE_cond_floatuns_nonextendvnx2divnx4sf (TARGET_SVE && 64 >= 32)
#define HAVE_cond_float_nonextendvnx2divnx2df (TARGET_SVE && 64 >= 64)
#define HAVE_cond_floatuns_nonextendvnx2divnx2df (TARGET_SVE && 64 >= 64)
#define HAVE_cond_float_extendvnx4sivnx2df (TARGET_SVE)
#define HAVE_cond_floatuns_extendvnx4sivnx2df (TARGET_SVE)
#define HAVE_vec_unpacks_float_lo_vnx4si (TARGET_SVE)
#define HAVE_vec_unpacks_float_hi_vnx4si (TARGET_SVE)
#define HAVE_vec_unpacku_float_lo_vnx4si (TARGET_SVE)
#define HAVE_vec_unpacku_float_hi_vnx4si (TARGET_SVE)
#define HAVE_vec_pack_trunc_vnx4sf (TARGET_SVE)
#define HAVE_vec_pack_trunc_vnx2df (TARGET_SVE)
#define HAVE_cond_fcvt_truncvnx4sfvnx8hf (TARGET_SVE && 32 > 16)
#define HAVE_cond_fcvt_truncvnx2dfvnx8hf (TARGET_SVE && 64 > 16)
#define HAVE_cond_fcvt_truncvnx2dfvnx4sf (TARGET_SVE && 64 > 32)
#define HAVE_cond_fcvt_truncvnx4sfvnx8bf (TARGET_SVE_BF16)
#define HAVE_vec_unpacks_lo_vnx8hf (TARGET_SVE)
#define HAVE_vec_unpacks_hi_vnx8hf (TARGET_SVE)
#define HAVE_vec_unpacks_lo_vnx4sf (TARGET_SVE)
#define HAVE_vec_unpacks_hi_vnx4sf (TARGET_SVE)
#define HAVE_cond_fcvt_nontruncvnx8hfvnx4sf (TARGET_SVE && 32 > 16)
#define HAVE_cond_fcvt_nontruncvnx8hfvnx2df (TARGET_SVE && 64 > 16)
#define HAVE_cond_fcvt_nontruncvnx4sfvnx2df (TARGET_SVE && 64 > 32)
#define HAVE_vec_unpacks_hi_vnx16bi (TARGET_SVE)
#define HAVE_vec_unpacku_hi_vnx16bi (TARGET_SVE)
#define HAVE_vec_unpacks_lo_vnx16bi (TARGET_SVE)
#define HAVE_vec_unpacku_lo_vnx16bi (TARGET_SVE)
#define HAVE_vec_unpacks_hi_vnx8bi (TARGET_SVE)
#define HAVE_vec_unpacku_hi_vnx8bi (TARGET_SVE)
#define HAVE_vec_unpacks_lo_vnx8bi (TARGET_SVE)
#define HAVE_vec_unpacku_lo_vnx8bi (TARGET_SVE)
#define HAVE_vec_unpacks_hi_vnx4bi (TARGET_SVE)
#define HAVE_vec_unpacku_hi_vnx4bi (TARGET_SVE)
#define HAVE_vec_unpacks_lo_vnx4bi (TARGET_SVE)
#define HAVE_vec_unpacku_lo_vnx4bi (TARGET_SVE)
#define HAVE_aarch64_sve_incvnx8hi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_sqincvnx8hi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_uqincvnx8hi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_decvnx8hi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecvnx8hi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecvnx8hi_pat (TARGET_SVE)
#define HAVE_aarch64_sve_incdivnx16bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqincdivnx16bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqincdivnx16bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_incdivnx8bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqincdivnx8bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqincdivnx8bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_incdivnx4bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqincdivnx4bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqincdivnx4bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_incdivnx2bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqincdivnx2bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqincdivnx2bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqincsivnx16bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqincsivnx16bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqincsivnx8bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqincsivnx8bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqincsivnx4bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqincsivnx4bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqincsivnx2bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqincsivnx2bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_incvnx2di_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqincvnx2di_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqincvnx2di_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_incvnx4si_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqincvnx4si_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqincvnx4si_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_incvnx8hi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqincvnx8hi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqincvnx8hi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_decdivnx16bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecdivnx16bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecdivnx16bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_decdivnx8bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecdivnx8bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecdivnx8bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_decdivnx4bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecdivnx4bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecdivnx4bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_decdivnx2bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecdivnx2bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecdivnx2bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecsivnx16bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecsivnx16bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecsivnx8bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecsivnx8bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecsivnx4bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecsivnx4bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecsivnx2bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecsivnx2bi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_decvnx2di_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecvnx2di_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecvnx2di_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_decvnx4si_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecvnx4si_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecvnx4si_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_decvnx8hi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_sqdecvnx8hi_cntp (TARGET_SVE)
#define HAVE_aarch64_sve_uqdecvnx8hi_cntp (TARGET_SVE)
#define HAVE_smulhsvnx16qi3 (TARGET_SVE2)
#define HAVE_umulhsvnx16qi3 (TARGET_SVE2)
#define HAVE_smulhrsvnx16qi3 (TARGET_SVE2)
#define HAVE_umulhrsvnx16qi3 (TARGET_SVE2)
#define HAVE_smulhsvnx8hi3 (TARGET_SVE2)
#define HAVE_umulhsvnx8hi3 (TARGET_SVE2)
#define HAVE_smulhrsvnx8hi3 (TARGET_SVE2)
#define HAVE_umulhrsvnx8hi3 (TARGET_SVE2)
#define HAVE_smulhsvnx4si3 (TARGET_SVE2)
#define HAVE_umulhsvnx4si3 (TARGET_SVE2)
#define HAVE_smulhrsvnx4si3 (TARGET_SVE2)
#define HAVE_umulhrsvnx4si3 (TARGET_SVE2)
#define HAVE_avgvnx16qi3_floor (TARGET_SVE2)
#define HAVE_uavgvnx16qi3_floor (TARGET_SVE2)
#define HAVE_avgvnx8hi3_floor (TARGET_SVE2)
#define HAVE_uavgvnx8hi3_floor (TARGET_SVE2)
#define HAVE_avgvnx4si3_floor (TARGET_SVE2)
#define HAVE_uavgvnx4si3_floor (TARGET_SVE2)
#define HAVE_avgvnx2di3_floor (TARGET_SVE2)
#define HAVE_uavgvnx2di3_floor (TARGET_SVE2)
#define HAVE_avgvnx16qi3_ceil (TARGET_SVE2)
#define HAVE_uavgvnx16qi3_ceil (TARGET_SVE2)
#define HAVE_avgvnx8hi3_ceil (TARGET_SVE2)
#define HAVE_uavgvnx8hi3_ceil (TARGET_SVE2)
#define HAVE_avgvnx4si3_ceil (TARGET_SVE2)
#define HAVE_uavgvnx4si3_ceil (TARGET_SVE2)
#define HAVE_avgvnx2di3_ceil (TARGET_SVE2)
#define HAVE_uavgvnx2di3_ceil (TARGET_SVE2)
#define HAVE_cond_shaddvnx16qi (TARGET_SVE2)
#define HAVE_cond_shsubvnx16qi (TARGET_SVE2)
#define HAVE_cond_sqrshlvnx16qi (TARGET_SVE2)
#define HAVE_cond_srhaddvnx16qi (TARGET_SVE2)
#define HAVE_cond_srshlvnx16qi (TARGET_SVE2)
#define HAVE_cond_suqaddvnx16qi (TARGET_SVE2)
#define HAVE_cond_uhaddvnx16qi (TARGET_SVE2)
#define HAVE_cond_uhsubvnx16qi (TARGET_SVE2)
#define HAVE_cond_uqrshlvnx16qi (TARGET_SVE2)
#define HAVE_cond_urhaddvnx16qi (TARGET_SVE2)
#define HAVE_cond_urshlvnx16qi (TARGET_SVE2)
#define HAVE_cond_usqaddvnx16qi (TARGET_SVE2)
#define HAVE_cond_shaddvnx8hi (TARGET_SVE2)
#define HAVE_cond_shsubvnx8hi (TARGET_SVE2)
#define HAVE_cond_sqrshlvnx8hi (TARGET_SVE2)
#define HAVE_cond_srhaddvnx8hi (TARGET_SVE2)
#define HAVE_cond_srshlvnx8hi (TARGET_SVE2)
#define HAVE_cond_suqaddvnx8hi (TARGET_SVE2)
#define HAVE_cond_uhaddvnx8hi (TARGET_SVE2)
#define HAVE_cond_uhsubvnx8hi (TARGET_SVE2)
#define HAVE_cond_uqrshlvnx8hi (TARGET_SVE2)
#define HAVE_cond_urhaddvnx8hi (TARGET_SVE2)
#define HAVE_cond_urshlvnx8hi (TARGET_SVE2)
#define HAVE_cond_usqaddvnx8hi (TARGET_SVE2)
#define HAVE_cond_shaddvnx4si (TARGET_SVE2)
#define HAVE_cond_shsubvnx4si (TARGET_SVE2)
#define HAVE_cond_sqrshlvnx4si (TARGET_SVE2)
#define HAVE_cond_srhaddvnx4si (TARGET_SVE2)
#define HAVE_cond_srshlvnx4si (TARGET_SVE2)
#define HAVE_cond_suqaddvnx4si (TARGET_SVE2)
#define HAVE_cond_uhaddvnx4si (TARGET_SVE2)
#define HAVE_cond_uhsubvnx4si (TARGET_SVE2)
#define HAVE_cond_uqrshlvnx4si (TARGET_SVE2)
#define HAVE_cond_urhaddvnx4si (TARGET_SVE2)
#define HAVE_cond_urshlvnx4si (TARGET_SVE2)
#define HAVE_cond_usqaddvnx4si (TARGET_SVE2)
#define HAVE_cond_shaddvnx2di (TARGET_SVE2)
#define HAVE_cond_shsubvnx2di (TARGET_SVE2)
#define HAVE_cond_sqrshlvnx2di (TARGET_SVE2)
#define HAVE_cond_srhaddvnx2di (TARGET_SVE2)
#define HAVE_cond_srshlvnx2di (TARGET_SVE2)
#define HAVE_cond_suqaddvnx2di (TARGET_SVE2)
#define HAVE_cond_uhaddvnx2di (TARGET_SVE2)
#define HAVE_cond_uhsubvnx2di (TARGET_SVE2)
#define HAVE_cond_uqrshlvnx2di (TARGET_SVE2)
#define HAVE_cond_urhaddvnx2di (TARGET_SVE2)
#define HAVE_cond_urshlvnx2di (TARGET_SVE2)
#define HAVE_cond_usqaddvnx2di (TARGET_SVE2)
#define HAVE_cond_sqshlvnx16qi (TARGET_SVE2)
#define HAVE_cond_uqshlvnx16qi (TARGET_SVE2)
#define HAVE_cond_sqshlvnx8hi (TARGET_SVE2)
#define HAVE_cond_uqshlvnx8hi (TARGET_SVE2)
#define HAVE_cond_sqshlvnx4si (TARGET_SVE2)
#define HAVE_cond_uqshlvnx4si (TARGET_SVE2)
#define HAVE_cond_sqshlvnx2di (TARGET_SVE2)
#define HAVE_cond_uqshlvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve2_bslvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve2_bslvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve2_bslvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve2_bslvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve2_nbslvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve2_nbslvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve2_nbslvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve2_nbslvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve2_bsl1nvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve2_bsl1nvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve2_bsl1nvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve2_bsl1nvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve2_bsl2nvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve2_bsl2nvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve2_bsl2nvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve2_bsl2nvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_asrvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_lsrvnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_asrvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_lsrvnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve_add_asrvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_lsrvnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve_add_asrvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve_add_lsrvnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve2_sabavnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve2_uabavnx16qi (TARGET_SVE2)
#define HAVE_aarch64_sve2_sabavnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve2_uabavnx8hi (TARGET_SVE2)
#define HAVE_aarch64_sve2_sabavnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve2_uabavnx4si (TARGET_SVE2)
#define HAVE_aarch64_sve2_sabavnx2di (TARGET_SVE2)
#define HAVE_aarch64_sve2_uabavnx2di (TARGET_SVE2)
#define HAVE_cond_sadalpvnx8hi (TARGET_SVE2)
#define HAVE_cond_uadalpvnx8hi (TARGET_SVE2)
#define HAVE_cond_sadalpvnx4si (TARGET_SVE2)
#define HAVE_cond_uadalpvnx4si (TARGET_SVE2)
#define HAVE_cond_sadalpvnx2di (TARGET_SVE2)
#define HAVE_cond_uadalpvnx2di (TARGET_SVE2)
#define HAVE_cond_fcvtltvnx4sf (TARGET_SVE2)
#define HAVE_cond_fcvtltvnx2df (TARGET_SVE2)
#define HAVE_cond_fcvtxvnx4sf (TARGET_SVE2)
#define HAVE_cond_urecpevnx4si (TARGET_SVE2)
#define HAVE_cond_ursqrtevnx4si (TARGET_SVE2)
#define HAVE_cond_flogbvnx8hf (TARGET_SVE2)
#define HAVE_cond_flogbvnx4sf (TARGET_SVE2)
#define HAVE_cond_flogbvnx2df (TARGET_SVE2)
#define HAVE_check_raw_ptrssi (TARGET_SVE2)
#define HAVE_check_war_ptrssi (TARGET_SVE2)
#define HAVE_check_raw_ptrsdi (TARGET_SVE2)
#define HAVE_check_war_ptrsdi (TARGET_SVE2)
extern rtx gen_indirect_jump (rtx);
extern rtx gen_jump (rtx);
extern rtx gen_ccmpccsi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ccmpccdi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ccmpccfpsf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ccmpccfpdf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ccmpccfpesf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ccmpccfpedf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ccmpccsi_rev (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ccmpccdi_rev (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ccmpccfpsf_rev (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ccmpccfpdf_rev (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ccmpccfpesf_rev (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ccmpccfpedf_rev (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_condjump (rtx, rtx, rtx);
extern rtx gen_nop (void);
extern rtx gen_prefetch (rtx, rtx, rtx);
extern rtx gen_trap (void);
extern rtx gen_simple_return (void);
extern rtx gen_insv_immsi (rtx, rtx, rtx);
extern rtx gen_insv_immdi (rtx, rtx, rtx);
extern rtx gen_aarch64_movksi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_movkdi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pair_sw_sisi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pair_sw_sfsi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pair_sw_sisf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pair_sw_sfsf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pair_dw_didi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pair_dw_didf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pair_dw_dfdi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pair_dw_dfdf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pair_dw_tftf (rtx, rtx, rtx, rtx);
extern rtx gen_store_pair_sw_sisi (rtx, rtx, rtx, rtx);
extern rtx gen_store_pair_sw_sfsi (rtx, rtx, rtx, rtx);
extern rtx gen_store_pair_sw_sisf (rtx, rtx, rtx, rtx);
extern rtx gen_store_pair_sw_sfsf (rtx, rtx, rtx, rtx);
extern rtx gen_store_pair_dw_didi (rtx, rtx, rtx, rtx);
extern rtx gen_store_pair_dw_didf (rtx, rtx, rtx, rtx);
extern rtx gen_store_pair_dw_dfdi (rtx, rtx, rtx, rtx);
extern rtx gen_store_pair_dw_dfdf (rtx, rtx, rtx, rtx);
extern rtx gen_store_pair_dw_tftf (rtx, rtx, rtx, rtx);
extern rtx gen_loadwb_pairsi_si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_loadwb_pairsi_di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_loadwb_pairdi_si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_loadwb_pairdi_di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_loadwb_pairsf_si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_loadwb_pairdf_si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_loadwb_pairsf_di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_loadwb_pairdf_di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_loadwb_pairti_si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_loadwb_pairtf_si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_loadwb_pairti_di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_loadwb_pairtf_di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_storewb_pairsi_si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_storewb_pairsi_di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_storewb_pairdi_si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_storewb_pairdi_di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_storewb_pairsf_si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_storewb_pairdf_si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_storewb_pairsf_di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_storewb_pairdf_di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_storewb_pairti_si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_storewb_pairtf_si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_storewb_pairti_di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_storewb_pairtf_di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addsi3_compare0 (rtx, rtx, rtx);
extern rtx gen_adddi3_compare0 (rtx, rtx, rtx);
extern rtx gen_addsi3_compareC (rtx, rtx, rtx);
extern rtx gen_adddi3_compareC (rtx, rtx, rtx);
extern rtx gen_addsi3_compareV_imm (rtx, rtx, rtx);
extern rtx gen_adddi3_compareV_imm (rtx, rtx, rtx);
extern rtx gen_addsi3_compareV (rtx, rtx, rtx);
extern rtx gen_adddi3_compareV (rtx, rtx, rtx);
extern rtx gen_aarch64_subsi_compare0 (rtx, rtx);
extern rtx gen_aarch64_subdi_compare0 (rtx, rtx);
extern rtx gen_subsi3 (rtx, rtx, rtx);
extern rtx gen_subdi3 (rtx, rtx, rtx);
extern rtx gen_subvsi_insn (rtx, rtx, rtx);
extern rtx gen_subvdi_insn (rtx, rtx, rtx);
extern rtx gen_subvsi_imm (rtx, rtx, rtx);
extern rtx gen_subvdi_imm (rtx, rtx, rtx);
extern rtx gen_negvsi_insn (rtx, rtx);
extern rtx gen_negvdi_insn (rtx, rtx);
extern rtx gen_negvsi_cmp_only (rtx);
extern rtx gen_negvdi_cmp_only (rtx);
extern rtx gen_negdi_carryout (rtx, rtx);
extern rtx gen_negvdi_carryinV (rtx, rtx);
extern rtx gen_subsi3_compare1_imm (rtx, rtx, rtx, rtx);
extern rtx gen_subdi3_compare1_imm (rtx, rtx, rtx, rtx);
extern rtx gen_subsi3_compare1 (rtx, rtx, rtx);
extern rtx gen_subdi3_compare1 (rtx, rtx, rtx);
extern rtx gen_negsi2 (rtx, rtx);
extern rtx gen_negdi2 (rtx, rtx);
extern rtx gen_negsi2_compare0 (rtx, rtx);
extern rtx gen_negdi2_compare0 (rtx, rtx);
extern rtx gen_mulsi3 (rtx, rtx, rtx);
extern rtx gen_muldi3 (rtx, rtx, rtx);
extern rtx gen_maddsi (rtx, rtx, rtx, rtx);
extern rtx gen_madddi (rtx, rtx, rtx, rtx);
extern rtx gen_mulsidi3 (rtx, rtx, rtx);
extern rtx gen_umulsidi3 (rtx, rtx, rtx);
extern rtx gen_maddsidi4 (rtx, rtx, rtx, rtx);
extern rtx gen_umaddsidi4 (rtx, rtx, rtx, rtx);
extern rtx gen_msubsidi4 (rtx, rtx, rtx, rtx);
extern rtx gen_umsubsidi4 (rtx, rtx, rtx, rtx);
extern rtx gen_smuldi3_highpart (rtx, rtx, rtx);
extern rtx gen_umuldi3_highpart (rtx, rtx, rtx);
extern rtx gen_divsi3 (rtx, rtx, rtx);
extern rtx gen_udivsi3 (rtx, rtx, rtx);
extern rtx gen_divdi3 (rtx, rtx, rtx);
extern rtx gen_udivdi3 (rtx, rtx, rtx);
extern rtx gen_cmpsi (rtx, rtx);
extern rtx gen_cmpdi (rtx, rtx);
extern rtx gen_fcmpsf (rtx, rtx);
extern rtx gen_fcmpdf (rtx, rtx);
extern rtx gen_fcmpesf (rtx, rtx);
extern rtx gen_fcmpedf (rtx, rtx);
extern rtx gen_aarch64_cstoreqi (rtx, rtx, rtx);
extern rtx gen_aarch64_cstorehi (rtx, rtx, rtx);
extern rtx gen_aarch64_cstoresi (rtx, rtx, rtx);
extern rtx gen_aarch64_cstoredi (rtx, rtx, rtx);
extern rtx gen_cstoreqi_neg (rtx, rtx, rtx);
extern rtx gen_cstorehi_neg (rtx, rtx, rtx);
extern rtx gen_cstoresi_neg (rtx, rtx, rtx);
extern rtx gen_cstoredi_neg (rtx, rtx, rtx);
extern rtx gen_aarch64_crc32b (rtx, rtx, rtx);
extern rtx gen_aarch64_crc32h (rtx, rtx, rtx);
extern rtx gen_aarch64_crc32w (rtx, rtx, rtx);
extern rtx gen_aarch64_crc32x (rtx, rtx, rtx);
extern rtx gen_aarch64_crc32cb (rtx, rtx, rtx);
extern rtx gen_aarch64_crc32ch (rtx, rtx, rtx);
extern rtx gen_aarch64_crc32cw (rtx, rtx, rtx);
extern rtx gen_aarch64_crc32cx (rtx, rtx, rtx);
extern rtx gen_csinc3si_insn (rtx, rtx, rtx, rtx);
extern rtx gen_csinc3di_insn (rtx, rtx, rtx, rtx);
extern rtx gen_csneg3_uxtw_insn (rtx, rtx, rtx, rtx);
extern rtx gen_csneg3si_insn (rtx, rtx, rtx, rtx);
extern rtx gen_csneg3di_insn (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uqdecsi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqdecdi (rtx, rtx, rtx);
extern rtx gen_andsi3 (rtx, rtx, rtx);
extern rtx gen_iorsi3 (rtx, rtx, rtx);
extern rtx gen_xorsi3 (rtx, rtx, rtx);
extern rtx gen_anddi3 (rtx, rtx, rtx);
extern rtx gen_iordi3 (rtx, rtx, rtx);
extern rtx gen_xordi3 (rtx, rtx, rtx);
extern rtx gen_one_cmplsi2 (rtx, rtx);
extern rtx gen_one_cmpldi2 (rtx, rtx);
extern rtx gen_and_one_cmpl_ashlsi3 (rtx, rtx, rtx, rtx);
extern rtx gen_ior_one_cmpl_ashlsi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xor_one_cmpl_ashlsi3 (rtx, rtx, rtx, rtx);
extern rtx gen_and_one_cmpl_ashrsi3 (rtx, rtx, rtx, rtx);
extern rtx gen_ior_one_cmpl_ashrsi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xor_one_cmpl_ashrsi3 (rtx, rtx, rtx, rtx);
extern rtx gen_and_one_cmpl_lshrsi3 (rtx, rtx, rtx, rtx);
extern rtx gen_ior_one_cmpl_lshrsi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xor_one_cmpl_lshrsi3 (rtx, rtx, rtx, rtx);
extern rtx gen_and_one_cmpl_rotrsi3 (rtx, rtx, rtx, rtx);
extern rtx gen_ior_one_cmpl_rotrsi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xor_one_cmpl_rotrsi3 (rtx, rtx, rtx, rtx);
extern rtx gen_and_one_cmpl_ashldi3 (rtx, rtx, rtx, rtx);
extern rtx gen_ior_one_cmpl_ashldi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xor_one_cmpl_ashldi3 (rtx, rtx, rtx, rtx);
extern rtx gen_and_one_cmpl_ashrdi3 (rtx, rtx, rtx, rtx);
extern rtx gen_ior_one_cmpl_ashrdi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xor_one_cmpl_ashrdi3 (rtx, rtx, rtx, rtx);
extern rtx gen_and_one_cmpl_lshrdi3 (rtx, rtx, rtx, rtx);
extern rtx gen_ior_one_cmpl_lshrdi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xor_one_cmpl_lshrdi3 (rtx, rtx, rtx, rtx);
extern rtx gen_and_one_cmpl_rotrdi3 (rtx, rtx, rtx, rtx);
extern rtx gen_ior_one_cmpl_rotrdi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xor_one_cmpl_rotrdi3 (rtx, rtx, rtx, rtx);
extern rtx gen_clzsi2 (rtx, rtx);
extern rtx gen_clzdi2 (rtx, rtx);
extern rtx gen_clrsbsi2 (rtx, rtx);
extern rtx gen_clrsbdi2 (rtx, rtx);
extern rtx gen_rbitsi2 (rtx, rtx);
extern rtx gen_rbitdi2 (rtx, rtx);
extern rtx gen_ctzsi2 (rtx, rtx);
extern rtx gen_ctzdi2 (rtx, rtx);
extern rtx gen_bswapsi2 (rtx, rtx);
extern rtx gen_bswapdi2 (rtx, rtx);
extern rtx gen_bswaphi2 (rtx, rtx);
extern rtx gen_rev16si2 (rtx, rtx, rtx, rtx);
extern rtx gen_rev16di2 (rtx, rtx, rtx, rtx);
extern rtx gen_rev16si2_alt (rtx, rtx, rtx, rtx);
extern rtx gen_rev16di2_alt (rtx, rtx, rtx, rtx);
extern rtx gen_btrunchf2 (rtx, rtx);
extern rtx gen_ceilhf2 (rtx, rtx);
extern rtx gen_floorhf2 (rtx, rtx);
extern rtx gen_frintnhf2 (rtx, rtx);
extern rtx gen_nearbyinthf2 (rtx, rtx);
extern rtx gen_rinthf2 (rtx, rtx);
extern rtx gen_roundhf2 (rtx, rtx);
extern rtx gen_btruncsf2 (rtx, rtx);
extern rtx gen_ceilsf2 (rtx, rtx);
extern rtx gen_floorsf2 (rtx, rtx);
extern rtx gen_frintnsf2 (rtx, rtx);
extern rtx gen_nearbyintsf2 (rtx, rtx);
extern rtx gen_rintsf2 (rtx, rtx);
extern rtx gen_roundsf2 (rtx, rtx);
extern rtx gen_btruncdf2 (rtx, rtx);
extern rtx gen_ceildf2 (rtx, rtx);
extern rtx gen_floordf2 (rtx, rtx);
extern rtx gen_frintndf2 (rtx, rtx);
extern rtx gen_nearbyintdf2 (rtx, rtx);
extern rtx gen_rintdf2 (rtx, rtx);
extern rtx gen_rounddf2 (rtx, rtx);
extern rtx gen_lbtrunchfsi2 (rtx, rtx);
extern rtx gen_lceilhfsi2 (rtx, rtx);
extern rtx gen_lfloorhfsi2 (rtx, rtx);
extern rtx gen_lroundhfsi2 (rtx, rtx);
extern rtx gen_lfrintnhfsi2 (rtx, rtx);
extern rtx gen_lbtruncuhfsi2 (rtx, rtx);
extern rtx gen_lceiluhfsi2 (rtx, rtx);
extern rtx gen_lflooruhfsi2 (rtx, rtx);
extern rtx gen_lrounduhfsi2 (rtx, rtx);
extern rtx gen_lfrintnuhfsi2 (rtx, rtx);
extern rtx gen_lbtruncsfsi2 (rtx, rtx);
extern rtx gen_lceilsfsi2 (rtx, rtx);
extern rtx gen_lfloorsfsi2 (rtx, rtx);
extern rtx gen_lroundsfsi2 (rtx, rtx);
extern rtx gen_lfrintnsfsi2 (rtx, rtx);
extern rtx gen_lbtruncusfsi2 (rtx, rtx);
extern rtx gen_lceilusfsi2 (rtx, rtx);
extern rtx gen_lfloorusfsi2 (rtx, rtx);
extern rtx gen_lroundusfsi2 (rtx, rtx);
extern rtx gen_lfrintnusfsi2 (rtx, rtx);
extern rtx gen_lbtruncdfsi2 (rtx, rtx);
extern rtx gen_lceildfsi2 (rtx, rtx);
extern rtx gen_lfloordfsi2 (rtx, rtx);
extern rtx gen_lrounddfsi2 (rtx, rtx);
extern rtx gen_lfrintndfsi2 (rtx, rtx);
extern rtx gen_lbtruncudfsi2 (rtx, rtx);
extern rtx gen_lceiludfsi2 (rtx, rtx);
extern rtx gen_lfloorudfsi2 (rtx, rtx);
extern rtx gen_lroundudfsi2 (rtx, rtx);
extern rtx gen_lfrintnudfsi2 (rtx, rtx);
extern rtx gen_lbtrunchfdi2 (rtx, rtx);
extern rtx gen_lceilhfdi2 (rtx, rtx);
extern rtx gen_lfloorhfdi2 (rtx, rtx);
extern rtx gen_lroundhfdi2 (rtx, rtx);
extern rtx gen_lfrintnhfdi2 (rtx, rtx);
extern rtx gen_lbtruncuhfdi2 (rtx, rtx);
extern rtx gen_lceiluhfdi2 (rtx, rtx);
extern rtx gen_lflooruhfdi2 (rtx, rtx);
extern rtx gen_lrounduhfdi2 (rtx, rtx);
extern rtx gen_lfrintnuhfdi2 (rtx, rtx);
extern rtx gen_lbtruncsfdi2 (rtx, rtx);
extern rtx gen_lceilsfdi2 (rtx, rtx);
extern rtx gen_lfloorsfdi2 (rtx, rtx);
extern rtx gen_lroundsfdi2 (rtx, rtx);
extern rtx gen_lfrintnsfdi2 (rtx, rtx);
extern rtx gen_lbtruncusfdi2 (rtx, rtx);
extern rtx gen_lceilusfdi2 (rtx, rtx);
extern rtx gen_lfloorusfdi2 (rtx, rtx);
extern rtx gen_lroundusfdi2 (rtx, rtx);
extern rtx gen_lfrintnusfdi2 (rtx, rtx);
extern rtx gen_lbtruncdfdi2 (rtx, rtx);
extern rtx gen_lceildfdi2 (rtx, rtx);
extern rtx gen_lfloordfdi2 (rtx, rtx);
extern rtx gen_lrounddfdi2 (rtx, rtx);
extern rtx gen_lfrintndfdi2 (rtx, rtx);
extern rtx gen_lbtruncudfdi2 (rtx, rtx);
extern rtx gen_lceiludfdi2 (rtx, rtx);
extern rtx gen_lfloorudfdi2 (rtx, rtx);
extern rtx gen_lroundudfdi2 (rtx, rtx);
extern rtx gen_lfrintnudfdi2 (rtx, rtx);
extern rtx gen_extendsfdf2 (rtx, rtx);
extern rtx gen_extendhfsf2 (rtx, rtx);
extern rtx gen_extendhfdf2 (rtx, rtx);
extern rtx gen_truncdfsf2 (rtx, rtx);
extern rtx gen_truncsfhf2 (rtx, rtx);
extern rtx gen_truncdfhf2 (rtx, rtx);
extern rtx gen_fix_truncsfsi2 (rtx, rtx);
extern rtx gen_fixuns_truncsfsi2 (rtx, rtx);
extern rtx gen_fix_truncdfdi2 (rtx, rtx);
extern rtx gen_fixuns_truncdfdi2 (rtx, rtx);
extern rtx gen_fix_trunchfsi2 (rtx, rtx);
extern rtx gen_fixuns_trunchfsi2 (rtx, rtx);
extern rtx gen_fix_trunchfdi2 (rtx, rtx);
extern rtx gen_fixuns_trunchfdi2 (rtx, rtx);
extern rtx gen_fix_truncdfsi2 (rtx, rtx);
extern rtx gen_fixuns_truncdfsi2 (rtx, rtx);
extern rtx gen_fix_truncsfdi2 (rtx, rtx);
extern rtx gen_fixuns_truncsfdi2 (rtx, rtx);
extern rtx gen_floatsisf2 (rtx, rtx);
extern rtx gen_floatunssisf2 (rtx, rtx);
extern rtx gen_floatdidf2 (rtx, rtx);
extern rtx gen_floatunsdidf2 (rtx, rtx);
extern rtx gen_floatdisf2 (rtx, rtx);
extern rtx gen_floatunsdisf2 (rtx, rtx);
extern rtx gen_floatsidf2 (rtx, rtx);
extern rtx gen_floatunssidf2 (rtx, rtx);
extern rtx gen_aarch64_fp16_floatsihf2 (rtx, rtx);
extern rtx gen_aarch64_fp16_floatunssihf2 (rtx, rtx);
extern rtx gen_aarch64_fp16_floatdihf2 (rtx, rtx);
extern rtx gen_aarch64_fp16_floatunsdihf2 (rtx, rtx);
extern rtx gen_fcvtzssf3 (rtx, rtx, rtx);
extern rtx gen_fcvtzusf3 (rtx, rtx, rtx);
extern rtx gen_fcvtzsdf3 (rtx, rtx, rtx);
extern rtx gen_fcvtzudf3 (rtx, rtx, rtx);
extern rtx gen_scvtfsi3 (rtx, rtx, rtx);
extern rtx gen_ucvtfsi3 (rtx, rtx, rtx);
extern rtx gen_scvtfdi3 (rtx, rtx, rtx);
extern rtx gen_ucvtfdi3 (rtx, rtx, rtx);
extern rtx gen_fcvtzshfsi3 (rtx, rtx, rtx);
extern rtx gen_fcvtzuhfsi3 (rtx, rtx, rtx);
extern rtx gen_fcvtzshfdi3 (rtx, rtx, rtx);
extern rtx gen_fcvtzuhfdi3 (rtx, rtx, rtx);
extern rtx gen_scvtfsihf3 (rtx, rtx, rtx);
extern rtx gen_ucvtfsihf3 (rtx, rtx, rtx);
extern rtx gen_scvtfdihf3 (rtx, rtx, rtx);
extern rtx gen_ucvtfdihf3 (rtx, rtx, rtx);
extern rtx gen_fcvtzshf3 (rtx, rtx, rtx);
extern rtx gen_fcvtzuhf3 (rtx, rtx, rtx);
extern rtx gen_scvtfhi3 (rtx, rtx, rtx);
extern rtx gen_ucvtfhi3 (rtx, rtx, rtx);
extern rtx gen_addhf3 (rtx, rtx, rtx);
extern rtx gen_addsf3 (rtx, rtx, rtx);
extern rtx gen_adddf3 (rtx, rtx, rtx);
extern rtx gen_subhf3 (rtx, rtx, rtx);
extern rtx gen_subsf3 (rtx, rtx, rtx);
extern rtx gen_subdf3 (rtx, rtx, rtx);
extern rtx gen_mulhf3 (rtx, rtx, rtx);
extern rtx gen_mulsf3 (rtx, rtx, rtx);
extern rtx gen_muldf3 (rtx, rtx, rtx);
extern rtx gen_neghf2 (rtx, rtx);
extern rtx gen_negsf2 (rtx, rtx);
extern rtx gen_negdf2 (rtx, rtx);
extern rtx gen_abshf2 (rtx, rtx);
extern rtx gen_abssf2 (rtx, rtx);
extern rtx gen_absdf2 (rtx, rtx);
extern rtx gen_smaxsf3 (rtx, rtx, rtx);
extern rtx gen_smaxdf3 (rtx, rtx, rtx);
extern rtx gen_sminsf3 (rtx, rtx, rtx);
extern rtx gen_smindf3 (rtx, rtx, rtx);
extern rtx gen_smax_nanhf3 (rtx, rtx, rtx);
extern rtx gen_smin_nanhf3 (rtx, rtx, rtx);
extern rtx gen_fmaxhf3 (rtx, rtx, rtx);
extern rtx gen_fminhf3 (rtx, rtx, rtx);
extern rtx gen_smax_nansf3 (rtx, rtx, rtx);
extern rtx gen_smin_nansf3 (rtx, rtx, rtx);
extern rtx gen_fmaxsf3 (rtx, rtx, rtx);
extern rtx gen_fminsf3 (rtx, rtx, rtx);
extern rtx gen_smax_nandf3 (rtx, rtx, rtx);
extern rtx gen_smin_nandf3 (rtx, rtx, rtx);
extern rtx gen_fmaxdf3 (rtx, rtx, rtx);
extern rtx gen_fmindf3 (rtx, rtx, rtx);
extern rtx gen_copysignsf3_insn (rtx, rtx, rtx, rtx);
extern rtx gen_copysigndf3_insn (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_movdi_tilow (rtx, rtx);
extern rtx gen_aarch64_movdi_tflow (rtx, rtx);
extern rtx gen_aarch64_movdi_tihigh (rtx, rtx);
extern rtx gen_aarch64_movdi_tfhigh (rtx, rtx);
extern rtx gen_aarch64_movtihigh_di (rtx, rtx);
extern rtx gen_aarch64_movtfhigh_di (rtx, rtx);
extern rtx gen_aarch64_movtilow_di (rtx, rtx);
extern rtx gen_aarch64_movtflow_di (rtx, rtx);
extern rtx gen_aarch64_movtilow_tilow (rtx, rtx);
extern rtx gen_add_losym_si (rtx, rtx, rtx);
extern rtx gen_add_losym_di (rtx, rtx, rtx);
extern rtx gen_ldr_got_small_si (rtx, rtx, rtx);
extern rtx gen_ldr_got_small_di (rtx, rtx, rtx);
extern rtx gen_ldr_got_small_sidi (rtx, rtx, rtx);
extern rtx gen_ldr_got_small_28k_si (rtx, rtx, rtx);
extern rtx gen_ldr_got_small_28k_di (rtx, rtx, rtx);
extern rtx gen_ldr_got_small_28k_sidi (rtx, rtx, rtx);
extern rtx gen_ldr_got_tiny_si (rtx, rtx);
extern rtx gen_ldr_got_tiny_di (rtx, rtx);
extern rtx gen_ldr_got_tiny_sidi (rtx, rtx);
extern rtx gen_aarch64_load_tp_hard (rtx);
extern rtx gen_tlsie_small_si (rtx, rtx);
extern rtx gen_tlsie_small_di (rtx, rtx);
extern rtx gen_tlsie_small_sidi (rtx, rtx);
extern rtx gen_tlsie_tiny_si (rtx, rtx, rtx);
extern rtx gen_tlsie_tiny_di (rtx, rtx, rtx);
extern rtx gen_tlsie_tiny_sidi (rtx, rtx, rtx);
extern rtx gen_tlsle12_si (rtx, rtx, rtx);
extern rtx gen_tlsle12_di (rtx, rtx, rtx);
extern rtx gen_tlsle24_si (rtx, rtx, rtx);
extern rtx gen_tlsle24_di (rtx, rtx, rtx);
extern rtx gen_tlsle32_si (rtx, rtx);
extern rtx gen_tlsle32_di (rtx, rtx);
extern rtx gen_tlsle48_si (rtx, rtx);
extern rtx gen_tlsle48_di (rtx, rtx);
extern rtx gen_tlsdesc_small_advsimd_si (rtx);
extern rtx gen_tlsdesc_small_advsimd_di (rtx);
extern rtx gen_tlsdesc_small_sve_si (rtx, rtx);
extern rtx gen_tlsdesc_small_sve_di (rtx, rtx);
extern rtx gen_stack_tie (rtx, rtx);
extern rtx gen_aarch64_fjcvtzs (rtx, rtx);
extern rtx gen_paciasp (void);
extern rtx gen_autiasp (void);
extern rtx gen_pacibsp (void);
extern rtx gen_autibsp (void);
extern rtx gen_pacia1716 (void);
extern rtx gen_autia1716 (void);
extern rtx gen_pacib1716 (void);
extern rtx gen_autib1716 (void);
extern rtx gen_xpaclri (void);
extern rtx gen_blockage (void);
extern rtx gen_probe_stack_range (rtx, rtx, rtx);
extern rtx gen_probe_sve_stack_clash_si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_probe_sve_stack_clash_di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_reg_stack_protect_address_si (rtx);
extern rtx gen_reg_stack_protect_address_di (rtx);
extern rtx gen_stack_protect_set_si (rtx, rtx);
extern rtx gen_stack_protect_set_di (rtx, rtx);
extern rtx gen_stack_protect_test_si (rtx, rtx, rtx);
extern rtx gen_stack_protect_test_di (rtx, rtx, rtx);
extern rtx gen_set_fpcr (rtx);
extern rtx gen_get_fpcr (rtx);
extern rtx gen_set_fpsr (rtx);
extern rtx gen_get_fpsr (rtx);
extern rtx gen_speculation_tracker (rtx);
extern rtx gen_speculation_tracker_rev (rtx);
extern rtx gen_bti_noarg (void);
extern rtx gen_bti_c (void);
extern rtx gen_bti_j (void);
extern rtx gen_bti_jc (void);
extern rtx gen_speculation_barrier (void);
extern rtx gen_despeculate_simpleqi (rtx, rtx, rtx);
extern rtx gen_despeculate_simplehi (rtx, rtx, rtx);
extern rtx gen_despeculate_simplesi (rtx, rtx, rtx);
extern rtx gen_despeculate_simpledi (rtx, rtx, rtx);
extern rtx gen_despeculate_simpleti (rtx, rtx, rtx);
extern rtx gen_aarch64_frint32zv2sf (rtx, rtx);
extern rtx gen_aarch64_frint32xv2sf (rtx, rtx);
extern rtx gen_aarch64_frint64zv2sf (rtx, rtx);
extern rtx gen_aarch64_frint64xv2sf (rtx, rtx);
extern rtx gen_aarch64_frint32zv4sf (rtx, rtx);
extern rtx gen_aarch64_frint32xv4sf (rtx, rtx);
extern rtx gen_aarch64_frint64zv4sf (rtx, rtx);
extern rtx gen_aarch64_frint64xv4sf (rtx, rtx);
extern rtx gen_aarch64_frint32zv2df (rtx, rtx);
extern rtx gen_aarch64_frint32xv2df (rtx, rtx);
extern rtx gen_aarch64_frint64zv2df (rtx, rtx);
extern rtx gen_aarch64_frint64xv2df (rtx, rtx);
extern rtx gen_aarch64_frint32zdf (rtx, rtx);
extern rtx gen_aarch64_frint32xdf (rtx, rtx);
extern rtx gen_aarch64_frint64zdf (rtx, rtx);
extern rtx gen_aarch64_frint64xdf (rtx, rtx);
extern rtx gen_aarch64_frint32zsf (rtx, rtx);
extern rtx gen_aarch64_frint32xsf (rtx, rtx);
extern rtx gen_aarch64_frint64zsf (rtx, rtx);
extern rtx gen_aarch64_frint64xsf (rtx, rtx);
extern rtx gen_tstart (rtx);
extern rtx gen_ttest (rtx);
extern rtx gen_tcommit (void);
extern rtx gen_tcancel (rtx);
extern rtx gen_aarch64_rndr (rtx);
extern rtx gen_aarch64_rndrrs (rtx);
extern rtx gen_irg (rtx, rtx, rtx);
extern rtx gen_gmi (rtx, rtx, rtx);
extern rtx gen_addg (rtx, rtx, rtx, rtx);
extern rtx gen_subp (rtx, rtx, rtx);
extern rtx gen_ldg (rtx, rtx, rtx);
extern rtx gen_stg (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_dupv8qi (rtx, rtx);
extern rtx gen_aarch64_simd_dupv16qi (rtx, rtx);
extern rtx gen_aarch64_simd_dupv4hi (rtx, rtx);
extern rtx gen_aarch64_simd_dupv8hi (rtx, rtx);
extern rtx gen_aarch64_simd_dupv2si (rtx, rtx);
extern rtx gen_aarch64_simd_dupv4si (rtx, rtx);
extern rtx gen_aarch64_simd_dupv2di (rtx, rtx);
extern rtx gen_aarch64_simd_dupv4hf (rtx, rtx);
extern rtx gen_aarch64_simd_dupv8hf (rtx, rtx);
extern rtx gen_aarch64_simd_dupv2sf (rtx, rtx);
extern rtx gen_aarch64_simd_dupv4sf (rtx, rtx);
extern rtx gen_aarch64_simd_dupv2df (rtx, rtx);
extern rtx gen_aarch64_simd_dupv4bf (rtx, rtx);
extern rtx gen_aarch64_simd_dupv8bf (rtx, rtx);
extern rtx gen_aarch64_dup_lanev8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lanev16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lanev4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lanev8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lanev2si (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lanev4si (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lanev2di (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lanev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lanev8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lanev4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lanev8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lanev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lanev4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lanev2df (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lane_to_128v8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lane_to_64v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lane_to_128v4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lane_to_64v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lane_to_128v2si (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lane_to_64v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lane_to_128v4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lane_to_64v8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lane_to_128v2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_dup_lane_to_64v4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_store_lane0v8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_store_lane0v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_store_lane0v4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_store_lane0v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_store_lane0v2si (rtx, rtx, rtx);
extern rtx gen_aarch64_store_lane0v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_store_lane0v2di (rtx, rtx, rtx);
extern rtx gen_aarch64_store_lane0v4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_store_lane0v8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_store_lane0v4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_store_lane0v8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_store_lane0v2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_store_lane0v4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_store_lane0v2df (rtx, rtx, rtx);
extern rtx gen_load_pairv8qiv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4hiv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4hfv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2siv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2sfv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairdfv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8qiv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4hiv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4hfv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2siv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2sfv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairdfv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8qiv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4hiv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4hfv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2siv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2sfv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairdfv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8qiv2si (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4hiv2si (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4hfv2si (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2siv2si (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2sfv2si (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairdfv2si (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8qiv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4hiv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4hfv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2siv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2sfv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairdfv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8qidf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4hidf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4hfdf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2sidf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2sfdf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairdfdf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8qiv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4hiv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4hfv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2siv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2sfv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairdfv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8qiv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4hiv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4hfv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2siv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2sfv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairdfv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8qiv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4hiv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4hfv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2siv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2sfv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairdfv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8qiv2si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4hiv2si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4hfv2si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2siv2si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2sfv2si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairdfv2si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8qiv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4hiv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4hfv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2siv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2sfv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairdfv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8qidf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4hidf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4hfdf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2sidf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2sfdf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairdfdf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv16qiv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv16qiv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv16qiv4si (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv16qiv2di (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv16qiv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv16qiv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv16qiv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv16qiv2df (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hiv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hiv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hiv4si (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hiv2di (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hiv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hiv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hiv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hiv2df (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4siv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4siv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4siv4si (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4siv2di (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4siv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4siv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4siv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4siv2df (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2div16qi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2div8hi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2div4si (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2div2di (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2div8hf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2div8bf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2div4sf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2div2df (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hfv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hfv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hfv4si (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hfv2di (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hfv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hfv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hfv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8hfv2df (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4sfv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4sfv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4sfv4si (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4sfv2di (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4sfv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4sfv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4sfv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv4sfv2df (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2dfv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2dfv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2dfv4si (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2dfv2di (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2dfv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2dfv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2dfv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv2dfv2df (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8bfv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8bfv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8bfv4si (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8bfv2di (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8bfv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8bfv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8bfv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_load_pairv8bfv2df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv16qiv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv16qiv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv16qiv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv16qiv2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv16qiv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv16qiv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv16qiv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv16qiv2df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hiv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hiv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hiv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hiv2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hiv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hiv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hiv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hiv2df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4siv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4siv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4siv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4siv2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4siv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4siv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4siv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4siv2df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2div16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2div8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2div4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2div2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2div8hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2div8bf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2div4sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2div2df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hfv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hfv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hfv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hfv2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hfv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hfv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hfv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8hfv2df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4sfv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4sfv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4sfv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4sfv2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4sfv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4sfv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4sfv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv4sfv2df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2dfv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2dfv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2dfv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2dfv2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2dfv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2dfv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2dfv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv2dfv2df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8bfv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8bfv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8bfv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8bfv2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8bfv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8bfv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8bfv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_pairv8bfv2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_mov_from_v16qilow (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_mov_from_v8hilow (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_mov_from_v4silow (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_mov_from_v8hflow (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_mov_from_v8bflow (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_mov_from_v4sflow (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_mov_from_v16qihigh (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_mov_from_v8hihigh (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_mov_from_v4sihigh (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_mov_from_v8hfhigh (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_mov_from_v8bfhigh (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_mov_from_v4sfhigh (rtx, rtx, rtx);
extern rtx gen_ornv8qi3 (rtx, rtx, rtx);
extern rtx gen_ornv16qi3 (rtx, rtx, rtx);
extern rtx gen_ornv4hi3 (rtx, rtx, rtx);
extern rtx gen_ornv8hi3 (rtx, rtx, rtx);
extern rtx gen_ornv2si3 (rtx, rtx, rtx);
extern rtx gen_ornv4si3 (rtx, rtx, rtx);
extern rtx gen_ornv2di3 (rtx, rtx, rtx);
extern rtx gen_bicv8qi3 (rtx, rtx, rtx);
extern rtx gen_bicv16qi3 (rtx, rtx, rtx);
extern rtx gen_bicv4hi3 (rtx, rtx, rtx);
extern rtx gen_bicv8hi3 (rtx, rtx, rtx);
extern rtx gen_bicv2si3 (rtx, rtx, rtx);
extern rtx gen_bicv4si3 (rtx, rtx, rtx);
extern rtx gen_bicv2di3 (rtx, rtx, rtx);
extern rtx gen_addv8qi3 (rtx, rtx, rtx);
extern rtx gen_addv16qi3 (rtx, rtx, rtx);
extern rtx gen_addv4hi3 (rtx, rtx, rtx);
extern rtx gen_addv8hi3 (rtx, rtx, rtx);
extern rtx gen_addv2si3 (rtx, rtx, rtx);
extern rtx gen_addv4si3 (rtx, rtx, rtx);
extern rtx gen_addv2di3 (rtx, rtx, rtx);
extern rtx gen_subv8qi3 (rtx, rtx, rtx);
extern rtx gen_subv16qi3 (rtx, rtx, rtx);
extern rtx gen_subv4hi3 (rtx, rtx, rtx);
extern rtx gen_subv8hi3 (rtx, rtx, rtx);
extern rtx gen_subv2si3 (rtx, rtx, rtx);
extern rtx gen_subv4si3 (rtx, rtx, rtx);
extern rtx gen_subv2di3 (rtx, rtx, rtx);
extern rtx gen_mulv8qi3 (rtx, rtx, rtx);
extern rtx gen_mulv16qi3 (rtx, rtx, rtx);
extern rtx gen_mulv4hi3 (rtx, rtx, rtx);
extern rtx gen_mulv8hi3 (rtx, rtx, rtx);
extern rtx gen_mulv2si3 (rtx, rtx, rtx);
extern rtx gen_mulv4si3 (rtx, rtx, rtx);
extern rtx gen_bswapv4hi2 (rtx, rtx);
extern rtx gen_bswapv8hi2 (rtx, rtx);
extern rtx gen_bswapv2si2 (rtx, rtx);
extern rtx gen_bswapv4si2 (rtx, rtx);
extern rtx gen_bswapv2di2 (rtx, rtx);
extern rtx gen_aarch64_rbitv8qi (rtx, rtx);
extern rtx gen_aarch64_rbitv16qi (rtx, rtx);
extern rtx gen_aarch64_fcadd90v4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_fcadd270v4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_fcadd90v8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_fcadd270v8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_fcadd90v2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_fcadd270v2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_fcadd90v4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_fcadd270v4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_fcadd90v2df (rtx, rtx, rtx);
extern rtx gen_aarch64_fcadd270v2df (rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla0v4hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla90v4hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla180v4hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla270v4hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla0v8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla90v8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla180v8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla270v8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla0v2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla90v2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla180v2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla270v2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla0v4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla90v4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla180v4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla270v4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla0v2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla90v2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla180v2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla270v2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane0v4hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane90v4hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane180v4hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane270v4hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane0v8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane90v8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane180v8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane270v8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane0v2sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane90v2sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane180v2sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane270v2sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane0v4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane90v4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane180v4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane270v4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane0v2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane90v2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane180v2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane270v2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_laneq0v4hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_laneq90v4hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_laneq180v4hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_laneq270v4hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmlaq_lane0v8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmlaq_lane90v8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmlaq_lane180v8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmlaq_lane270v8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmlaq_lane0v4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmlaq_lane90v4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmlaq_lane180v4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmlaq_lane270v4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sdotv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_udotv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sdotv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_udotv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usdotv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usdotv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sdot_lanev8qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_udot_lanev8qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sdot_lanev16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_udot_lanev16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sdot_laneqv8qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_udot_laneqv8qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sdot_laneqv16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_udot_laneqv16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usdot_lanev8qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sudot_lanev8qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usdot_lanev16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sudot_lanev16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usdot_laneqv8qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sudot_laneqv8qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usdot_laneqv16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sudot_laneqv16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_rsqrtev4hf (rtx, rtx);
extern rtx gen_aarch64_rsqrtev8hf (rtx, rtx);
extern rtx gen_aarch64_rsqrtev2sf (rtx, rtx);
extern rtx gen_aarch64_rsqrtev4sf (rtx, rtx);
extern rtx gen_aarch64_rsqrtev2df (rtx, rtx);
extern rtx gen_aarch64_rsqrtehf (rtx, rtx);
extern rtx gen_aarch64_rsqrtesf (rtx, rtx);
extern rtx gen_aarch64_rsqrtedf (rtx, rtx);
extern rtx gen_aarch64_rsqrtsv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_rsqrtsv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_rsqrtsv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_rsqrtsv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_rsqrtsv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_rsqrtshf (rtx, rtx, rtx);
extern rtx gen_aarch64_rsqrtssf (rtx, rtx, rtx);
extern rtx gen_aarch64_rsqrtsdf (rtx, rtx, rtx);
extern rtx gen_negv8qi2 (rtx, rtx);
extern rtx gen_negv16qi2 (rtx, rtx);
extern rtx gen_negv4hi2 (rtx, rtx);
extern rtx gen_negv8hi2 (rtx, rtx);
extern rtx gen_negv2si2 (rtx, rtx);
extern rtx gen_negv4si2 (rtx, rtx);
extern rtx gen_negv2di2 (rtx, rtx);
extern rtx gen_absv8qi2 (rtx, rtx);
extern rtx gen_absv16qi2 (rtx, rtx);
extern rtx gen_absv4hi2 (rtx, rtx);
extern rtx gen_absv8hi2 (rtx, rtx);
extern rtx gen_absv2si2 (rtx, rtx);
extern rtx gen_absv4si2 (rtx, rtx);
extern rtx gen_absv2di2 (rtx, rtx);
extern rtx gen_aarch64_absv8qi (rtx, rtx);
extern rtx gen_aarch64_absv16qi (rtx, rtx);
extern rtx gen_aarch64_absv4hi (rtx, rtx);
extern rtx gen_aarch64_absv8hi (rtx, rtx);
extern rtx gen_aarch64_absv2si (rtx, rtx);
extern rtx gen_aarch64_absv4si (rtx, rtx);
extern rtx gen_aarch64_absv2di (rtx, rtx);
extern rtx gen_aarch64_absdi (rtx, rtx);
extern rtx gen_aarch64_sabdv8qi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uabdv8qi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sabdv16qi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uabdv16qi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sabdv4hi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uabdv4hi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sabdv8hi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uabdv8hi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sabdv2si_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uabdv2si_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sabdv4si_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uabdv4si_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sabdl2v8qi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uabdl2v8qi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sabdl2v16qi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uabdl2v16qi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sabdl2v4hi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uabdl2v4hi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sabdl2v8hi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uabdl2v8hi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sabdl2v4si_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uabdl2v4si_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sabalv8qi_4 (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uabalv8qi_4 (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sabalv16qi_4 (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uabalv16qi_4 (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sabalv4hi_4 (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uabalv4hi_4 (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sabalv8hi_4 (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uabalv8hi_4 (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sabalv4si_4 (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uabalv4si_4 (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sadalpv8qi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uadalpv8qi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sadalpv16qi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uadalpv16qi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sadalpv4hi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uadalpv4hi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sadalpv8hi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uadalpv8hi_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_sadalpv4si_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_uadalpv4si_3 (rtx, rtx, rtx);
extern rtx gen_abav8qi_3 (rtx, rtx, rtx, rtx);
extern rtx gen_abav16qi_3 (rtx, rtx, rtx, rtx);
extern rtx gen_abav4hi_3 (rtx, rtx, rtx, rtx);
extern rtx gen_abav8hi_3 (rtx, rtx, rtx, rtx);
extern rtx gen_abav2si_3 (rtx, rtx, rtx, rtx);
extern rtx gen_abav4si_3 (rtx, rtx, rtx, rtx);
extern rtx gen_fabdv4hf3 (rtx, rtx, rtx);
extern rtx gen_fabdv8hf3 (rtx, rtx, rtx);
extern rtx gen_fabdv2sf3 (rtx, rtx, rtx);
extern rtx gen_fabdv4sf3 (rtx, rtx, rtx);
extern rtx gen_fabdv2df3 (rtx, rtx, rtx);
extern rtx gen_fabdhf3 (rtx, rtx, rtx);
extern rtx gen_fabdsf3 (rtx, rtx, rtx);
extern rtx gen_fabddf3 (rtx, rtx, rtx);
extern rtx gen_andv8qi3 (rtx, rtx, rtx);
extern rtx gen_andv16qi3 (rtx, rtx, rtx);
extern rtx gen_andv4hi3 (rtx, rtx, rtx);
extern rtx gen_andv8hi3 (rtx, rtx, rtx);
extern rtx gen_andv2si3 (rtx, rtx, rtx);
extern rtx gen_andv4si3 (rtx, rtx, rtx);
extern rtx gen_andv2di3 (rtx, rtx, rtx);
extern rtx gen_iorv8qi3 (rtx, rtx, rtx);
extern rtx gen_iorv16qi3 (rtx, rtx, rtx);
extern rtx gen_iorv4hi3 (rtx, rtx, rtx);
extern rtx gen_iorv8hi3 (rtx, rtx, rtx);
extern rtx gen_iorv2si3 (rtx, rtx, rtx);
extern rtx gen_iorv4si3 (rtx, rtx, rtx);
extern rtx gen_iorv2di3 (rtx, rtx, rtx);
extern rtx gen_xorv8qi3 (rtx, rtx, rtx);
extern rtx gen_xorv16qi3 (rtx, rtx, rtx);
extern rtx gen_xorv4hi3 (rtx, rtx, rtx);
extern rtx gen_xorv8hi3 (rtx, rtx, rtx);
extern rtx gen_xorv2si3 (rtx, rtx, rtx);
extern rtx gen_xorv4si3 (rtx, rtx, rtx);
extern rtx gen_xorv2di3 (rtx, rtx, rtx);
extern rtx gen_one_cmplv8qi2 (rtx, rtx);
extern rtx gen_one_cmplv16qi2 (rtx, rtx);
extern rtx gen_one_cmplv4hi2 (rtx, rtx);
extern rtx gen_one_cmplv8hi2 (rtx, rtx);
extern rtx gen_one_cmplv2si2 (rtx, rtx);
extern rtx gen_one_cmplv4si2 (rtx, rtx);
extern rtx gen_one_cmplv2di2 (rtx, rtx);
extern rtx gen_aarch64_simd_vec_setv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_setv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_setv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_setv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_setv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_setv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_setv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_setv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_setv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_setv4bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_setv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_setv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_setv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_setv2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_lshrv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_lshrv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_lshrv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_lshrv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_lshrv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_lshrv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_lshrv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_ashrv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_ashrv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_ashrv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_ashrv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_ashrv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_ashrv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_ashrv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_imm_shlv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_imm_shlv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_imm_shlv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_imm_shlv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_imm_shlv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_imm_shlv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_imm_shlv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_sshlv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_sshlv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_sshlv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_sshlv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_sshlv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_sshlv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_sshlv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_shlv8qi_unsigned (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_shlv16qi_unsigned (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_shlv4hi_unsigned (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_shlv8hi_unsigned (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_shlv2si_unsigned (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_shlv4si_unsigned (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_shlv2di_unsigned (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_shlv8qi_signed (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_shlv16qi_signed (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_shlv4hi_signed (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_shlv8hi_signed (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_shlv2si_signed (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_shlv4si_signed (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_reg_shlv2di_signed (rtx, rtx, rtx);
extern rtx gen_vec_shr_v8qi (rtx, rtx, rtx);
extern rtx gen_vec_shr_v4hi (rtx, rtx, rtx);
extern rtx gen_vec_shr_v4hf (rtx, rtx, rtx);
extern rtx gen_vec_shr_v2si (rtx, rtx, rtx);
extern rtx gen_vec_shr_v2sf (rtx, rtx, rtx);
extern rtx gen_vec_shr_v4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_mlav8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mlav16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mlav4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mlav8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mlav2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mlav4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mlsv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mlsv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mlsv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mlsv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mlsv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mlsv4si (rtx, rtx, rtx, rtx);
extern rtx gen_smaxv8qi3 (rtx, rtx, rtx);
extern rtx gen_sminv8qi3 (rtx, rtx, rtx);
extern rtx gen_umaxv8qi3 (rtx, rtx, rtx);
extern rtx gen_uminv8qi3 (rtx, rtx, rtx);
extern rtx gen_smaxv16qi3 (rtx, rtx, rtx);
extern rtx gen_sminv16qi3 (rtx, rtx, rtx);
extern rtx gen_umaxv16qi3 (rtx, rtx, rtx);
extern rtx gen_uminv16qi3 (rtx, rtx, rtx);
extern rtx gen_smaxv4hi3 (rtx, rtx, rtx);
extern rtx gen_sminv4hi3 (rtx, rtx, rtx);
extern rtx gen_umaxv4hi3 (rtx, rtx, rtx);
extern rtx gen_uminv4hi3 (rtx, rtx, rtx);
extern rtx gen_smaxv8hi3 (rtx, rtx, rtx);
extern rtx gen_sminv8hi3 (rtx, rtx, rtx);
extern rtx gen_umaxv8hi3 (rtx, rtx, rtx);
extern rtx gen_uminv8hi3 (rtx, rtx, rtx);
extern rtx gen_smaxv2si3 (rtx, rtx, rtx);
extern rtx gen_sminv2si3 (rtx, rtx, rtx);
extern rtx gen_umaxv2si3 (rtx, rtx, rtx);
extern rtx gen_uminv2si3 (rtx, rtx, rtx);
extern rtx gen_smaxv4si3 (rtx, rtx, rtx);
extern rtx gen_sminv4si3 (rtx, rtx, rtx);
extern rtx gen_umaxv4si3 (rtx, rtx, rtx);
extern rtx gen_uminv4si3 (rtx, rtx, rtx);
extern rtx gen_aarch64_umaxpv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uminpv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_smaxpv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sminpv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_umaxpv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uminpv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_smaxpv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sminpv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_umaxpv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uminpv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_smaxpv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sminpv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_umaxpv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uminpv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_smaxpv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sminpv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_umaxpv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_uminpv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_smaxpv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_sminpv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_umaxpv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_uminpv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_smaxpv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sminpv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_smax_nanpv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_smin_nanpv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_smaxpv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sminpv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_smax_nanpv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_smin_nanpv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_smaxpv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sminpv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_smax_nanpv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_smin_nanpv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_smaxpv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sminpv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_smax_nanpv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_smin_nanpv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_smaxpv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sminpv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_smax_nanpv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_smin_nanpv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_smaxpv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sminpv2df (rtx, rtx, rtx);
extern rtx gen_move_lo_quad_internal_v16qi (rtx, rtx);
extern rtx gen_move_lo_quad_internal_v8hi (rtx, rtx);
extern rtx gen_move_lo_quad_internal_v4si (rtx, rtx);
extern rtx gen_move_lo_quad_internal_v8hf (rtx, rtx);
extern rtx gen_move_lo_quad_internal_v8bf (rtx, rtx);
extern rtx gen_move_lo_quad_internal_v4sf (rtx, rtx);
extern rtx gen_move_lo_quad_internal_v2di (rtx, rtx);
extern rtx gen_move_lo_quad_internal_v2df (rtx, rtx);
extern rtx gen_move_lo_quad_internal_be_v16qi (rtx, rtx);
extern rtx gen_move_lo_quad_internal_be_v8hi (rtx, rtx);
extern rtx gen_move_lo_quad_internal_be_v4si (rtx, rtx);
extern rtx gen_move_lo_quad_internal_be_v8hf (rtx, rtx);
extern rtx gen_move_lo_quad_internal_be_v8bf (rtx, rtx);
extern rtx gen_move_lo_quad_internal_be_v4sf (rtx, rtx);
extern rtx gen_move_lo_quad_internal_be_v2di (rtx, rtx);
extern rtx gen_move_lo_quad_internal_be_v2df (rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_v2di (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_v8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_v8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_v4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_v2df (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_be_v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_be_v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_be_v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_be_v2di (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_be_v8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_be_v8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_be_v4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_move_hi_quad_be_v2df (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_pack_trunc_v8hi (rtx, rtx);
extern rtx gen_aarch64_simd_vec_pack_trunc_v4si (rtx, rtx);
extern rtx gen_aarch64_simd_vec_pack_trunc_v2di (rtx, rtx);
extern rtx gen_vec_pack_trunc_v8hi (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v4si (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v2di (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacks_lo_v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacku_lo_v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacks_lo_v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacku_lo_v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacks_lo_v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacku_lo_v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacks_hi_v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacku_hi_v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacks_hi_v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacku_hi_v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacks_hi_v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacku_hi_v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_smult_lo_v16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_umult_lo_v16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_smult_lo_v8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_umult_lo_v8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_smult_lo_v4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_umult_lo_v4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_intrinsic_vec_smult_lo_v8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_intrinsic_vec_umult_lo_v8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_intrinsic_vec_smult_lo_v4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_intrinsic_vec_umult_lo_v4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_intrinsic_vec_smult_lo_v2si (rtx, rtx, rtx);
extern rtx gen_aarch64_intrinsic_vec_umult_lo_v2si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_smult_hi_v16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_umult_hi_v16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_smult_hi_v8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_umult_hi_v8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_smult_hi_v4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_umult_hi_v4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_smult_lane_v4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_umult_lane_v4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_smult_laneq_v4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_umult_laneq_v4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_smult_lane_v2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_umult_lane_v2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_smult_laneq_v2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_umult_laneq_v2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_smlal_lane_v4hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_umlal_lane_v4hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_smlal_laneq_v4hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_umlal_laneq_v4hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_smlal_lane_v2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_umlal_lane_v2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_smlal_laneq_v2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_umlal_laneq_v2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv4hf3 (rtx, rtx, rtx);
extern rtx gen_addv8hf3 (rtx, rtx, rtx);
extern rtx gen_addv2sf3 (rtx, rtx, rtx);
extern rtx gen_addv4sf3 (rtx, rtx, rtx);
extern rtx gen_addv2df3 (rtx, rtx, rtx);
extern rtx gen_subv4hf3 (rtx, rtx, rtx);
extern rtx gen_subv8hf3 (rtx, rtx, rtx);
extern rtx gen_subv2sf3 (rtx, rtx, rtx);
extern rtx gen_subv4sf3 (rtx, rtx, rtx);
extern rtx gen_subv2df3 (rtx, rtx, rtx);
extern rtx gen_mulv4hf3 (rtx, rtx, rtx);
extern rtx gen_mulv8hf3 (rtx, rtx, rtx);
extern rtx gen_mulv2sf3 (rtx, rtx, rtx);
extern rtx gen_mulv4sf3 (rtx, rtx, rtx);
extern rtx gen_mulv2df3 (rtx, rtx, rtx);
extern rtx gen_negv4hf2 (rtx, rtx);
extern rtx gen_negv8hf2 (rtx, rtx);
extern rtx gen_negv2sf2 (rtx, rtx);
extern rtx gen_negv4sf2 (rtx, rtx);
extern rtx gen_negv2df2 (rtx, rtx);
extern rtx gen_absv4hf2 (rtx, rtx);
extern rtx gen_absv8hf2 (rtx, rtx);
extern rtx gen_absv2sf2 (rtx, rtx);
extern rtx gen_absv4sf2 (rtx, rtx);
extern rtx gen_absv2df2 (rtx, rtx);
extern rtx gen_fmav4hf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmav8hf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmav2sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmav4sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmav2df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmav4hf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmav8hf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmav2sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmav4sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmav2df4 (rtx, rtx, rtx, rtx);
extern rtx gen_btruncv4hf2 (rtx, rtx);
extern rtx gen_ceilv4hf2 (rtx, rtx);
extern rtx gen_floorv4hf2 (rtx, rtx);
extern rtx gen_frintnv4hf2 (rtx, rtx);
extern rtx gen_nearbyintv4hf2 (rtx, rtx);
extern rtx gen_rintv4hf2 (rtx, rtx);
extern rtx gen_roundv4hf2 (rtx, rtx);
extern rtx gen_btruncv8hf2 (rtx, rtx);
extern rtx gen_ceilv8hf2 (rtx, rtx);
extern rtx gen_floorv8hf2 (rtx, rtx);
extern rtx gen_frintnv8hf2 (rtx, rtx);
extern rtx gen_nearbyintv8hf2 (rtx, rtx);
extern rtx gen_rintv8hf2 (rtx, rtx);
extern rtx gen_roundv8hf2 (rtx, rtx);
extern rtx gen_btruncv2sf2 (rtx, rtx);
extern rtx gen_ceilv2sf2 (rtx, rtx);
extern rtx gen_floorv2sf2 (rtx, rtx);
extern rtx gen_frintnv2sf2 (rtx, rtx);
extern rtx gen_nearbyintv2sf2 (rtx, rtx);
extern rtx gen_rintv2sf2 (rtx, rtx);
extern rtx gen_roundv2sf2 (rtx, rtx);
extern rtx gen_btruncv4sf2 (rtx, rtx);
extern rtx gen_ceilv4sf2 (rtx, rtx);
extern rtx gen_floorv4sf2 (rtx, rtx);
extern rtx gen_frintnv4sf2 (rtx, rtx);
extern rtx gen_nearbyintv4sf2 (rtx, rtx);
extern rtx gen_rintv4sf2 (rtx, rtx);
extern rtx gen_roundv4sf2 (rtx, rtx);
extern rtx gen_btruncv2df2 (rtx, rtx);
extern rtx gen_ceilv2df2 (rtx, rtx);
extern rtx gen_floorv2df2 (rtx, rtx);
extern rtx gen_frintnv2df2 (rtx, rtx);
extern rtx gen_nearbyintv2df2 (rtx, rtx);
extern rtx gen_rintv2df2 (rtx, rtx);
extern rtx gen_roundv2df2 (rtx, rtx);
extern rtx gen_lbtruncv4hfv4hi2 (rtx, rtx);
extern rtx gen_lceilv4hfv4hi2 (rtx, rtx);
extern rtx gen_lfloorv4hfv4hi2 (rtx, rtx);
extern rtx gen_lroundv4hfv4hi2 (rtx, rtx);
extern rtx gen_lfrintnv4hfv4hi2 (rtx, rtx);
extern rtx gen_lbtruncuv4hfv4hi2 (rtx, rtx);
extern rtx gen_lceiluv4hfv4hi2 (rtx, rtx);
extern rtx gen_lflooruv4hfv4hi2 (rtx, rtx);
extern rtx gen_lrounduv4hfv4hi2 (rtx, rtx);
extern rtx gen_lfrintnuv4hfv4hi2 (rtx, rtx);
extern rtx gen_lbtruncv8hfv8hi2 (rtx, rtx);
extern rtx gen_lceilv8hfv8hi2 (rtx, rtx);
extern rtx gen_lfloorv8hfv8hi2 (rtx, rtx);
extern rtx gen_lroundv8hfv8hi2 (rtx, rtx);
extern rtx gen_lfrintnv8hfv8hi2 (rtx, rtx);
extern rtx gen_lbtruncuv8hfv8hi2 (rtx, rtx);
extern rtx gen_lceiluv8hfv8hi2 (rtx, rtx);
extern rtx gen_lflooruv8hfv8hi2 (rtx, rtx);
extern rtx gen_lrounduv8hfv8hi2 (rtx, rtx);
extern rtx gen_lfrintnuv8hfv8hi2 (rtx, rtx);
extern rtx gen_lbtruncv2sfv2si2 (rtx, rtx);
extern rtx gen_lceilv2sfv2si2 (rtx, rtx);
extern rtx gen_lfloorv2sfv2si2 (rtx, rtx);
extern rtx gen_lroundv2sfv2si2 (rtx, rtx);
extern rtx gen_lfrintnv2sfv2si2 (rtx, rtx);
extern rtx gen_lbtruncuv2sfv2si2 (rtx, rtx);
extern rtx gen_lceiluv2sfv2si2 (rtx, rtx);
extern rtx gen_lflooruv2sfv2si2 (rtx, rtx);
extern rtx gen_lrounduv2sfv2si2 (rtx, rtx);
extern rtx gen_lfrintnuv2sfv2si2 (rtx, rtx);
extern rtx gen_lbtruncv4sfv4si2 (rtx, rtx);
extern rtx gen_lceilv4sfv4si2 (rtx, rtx);
extern rtx gen_lfloorv4sfv4si2 (rtx, rtx);
extern rtx gen_lroundv4sfv4si2 (rtx, rtx);
extern rtx gen_lfrintnv4sfv4si2 (rtx, rtx);
extern rtx gen_lbtruncuv4sfv4si2 (rtx, rtx);
extern rtx gen_lceiluv4sfv4si2 (rtx, rtx);
extern rtx gen_lflooruv4sfv4si2 (rtx, rtx);
extern rtx gen_lrounduv4sfv4si2 (rtx, rtx);
extern rtx gen_lfrintnuv4sfv4si2 (rtx, rtx);
extern rtx gen_lbtruncv2dfv2di2 (rtx, rtx);
extern rtx gen_lceilv2dfv2di2 (rtx, rtx);
extern rtx gen_lfloorv2dfv2di2 (rtx, rtx);
extern rtx gen_lroundv2dfv2di2 (rtx, rtx);
extern rtx gen_lfrintnv2dfv2di2 (rtx, rtx);
extern rtx gen_lbtruncuv2dfv2di2 (rtx, rtx);
extern rtx gen_lceiluv2dfv2di2 (rtx, rtx);
extern rtx gen_lflooruv2dfv2di2 (rtx, rtx);
extern rtx gen_lrounduv2dfv2di2 (rtx, rtx);
extern rtx gen_lfrintnuv2dfv2di2 (rtx, rtx);
extern rtx gen_lbtrunchfhi2 (rtx, rtx);
extern rtx gen_lceilhfhi2 (rtx, rtx);
extern rtx gen_lfloorhfhi2 (rtx, rtx);
extern rtx gen_lroundhfhi2 (rtx, rtx);
extern rtx gen_lfrintnhfhi2 (rtx, rtx);
extern rtx gen_lbtruncuhfhi2 (rtx, rtx);
extern rtx gen_lceiluhfhi2 (rtx, rtx);
extern rtx gen_lflooruhfhi2 (rtx, rtx);
extern rtx gen_lrounduhfhi2 (rtx, rtx);
extern rtx gen_lfrintnuhfhi2 (rtx, rtx);
extern rtx gen_fix_trunchfhi2 (rtx, rtx);
extern rtx gen_fixuns_trunchfhi2 (rtx, rtx);
extern rtx gen_floathihf2 (rtx, rtx);
extern rtx gen_floatunshihf2 (rtx, rtx);
extern rtx gen_floatv4hiv4hf2 (rtx, rtx);
extern rtx gen_floatunsv4hiv4hf2 (rtx, rtx);
extern rtx gen_floatv8hiv8hf2 (rtx, rtx);
extern rtx gen_floatunsv8hiv8hf2 (rtx, rtx);
extern rtx gen_floatv2siv2sf2 (rtx, rtx);
extern rtx gen_floatunsv2siv2sf2 (rtx, rtx);
extern rtx gen_floatv4siv4sf2 (rtx, rtx);
extern rtx gen_floatunsv4siv4sf2 (rtx, rtx);
extern rtx gen_floatv2div2df2 (rtx, rtx);
extern rtx gen_floatunsv2div2df2 (rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacks_lo_v8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacks_lo_v4sf (rtx, rtx, rtx);
extern rtx gen_fcvtzsv4hf3 (rtx, rtx, rtx);
extern rtx gen_fcvtzuv4hf3 (rtx, rtx, rtx);
extern rtx gen_fcvtzsv8hf3 (rtx, rtx, rtx);
extern rtx gen_fcvtzuv8hf3 (rtx, rtx, rtx);
extern rtx gen_fcvtzsv2sf3 (rtx, rtx, rtx);
extern rtx gen_fcvtzuv2sf3 (rtx, rtx, rtx);
extern rtx gen_fcvtzsv4sf3 (rtx, rtx, rtx);
extern rtx gen_fcvtzuv4sf3 (rtx, rtx, rtx);
extern rtx gen_fcvtzsv2df3 (rtx, rtx, rtx);
extern rtx gen_fcvtzuv2df3 (rtx, rtx, rtx);
extern rtx gen_scvtfv4hi3 (rtx, rtx, rtx);
extern rtx gen_ucvtfv4hi3 (rtx, rtx, rtx);
extern rtx gen_scvtfv8hi3 (rtx, rtx, rtx);
extern rtx gen_ucvtfv8hi3 (rtx, rtx, rtx);
extern rtx gen_scvtfv2si3 (rtx, rtx, rtx);
extern rtx gen_ucvtfv2si3 (rtx, rtx, rtx);
extern rtx gen_scvtfv4si3 (rtx, rtx, rtx);
extern rtx gen_ucvtfv4si3 (rtx, rtx, rtx);
extern rtx gen_scvtfv2di3 (rtx, rtx, rtx);
extern rtx gen_ucvtfv2di3 (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacks_hi_v8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_vec_unpacks_hi_v4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_float_extend_lo_v2df (rtx, rtx);
extern rtx gen_aarch64_float_extend_lo_v4sf (rtx, rtx);
extern rtx gen_aarch64_float_truncate_lo_v2sf (rtx, rtx);
extern rtx gen_aarch64_float_truncate_lo_v4hf (rtx, rtx);
extern rtx gen_aarch64_float_truncate_hi_v4sf_le (rtx, rtx, rtx);
extern rtx gen_aarch64_float_truncate_hi_v8hf_le (rtx, rtx, rtx);
extern rtx gen_aarch64_float_truncate_hi_v4sf_be (rtx, rtx, rtx);
extern rtx gen_aarch64_float_truncate_hi_v8hf_be (rtx, rtx, rtx);
extern rtx gen_smaxv4hf3 (rtx, rtx, rtx);
extern rtx gen_sminv4hf3 (rtx, rtx, rtx);
extern rtx gen_smaxv8hf3 (rtx, rtx, rtx);
extern rtx gen_sminv8hf3 (rtx, rtx, rtx);
extern rtx gen_smaxv2sf3 (rtx, rtx, rtx);
extern rtx gen_sminv2sf3 (rtx, rtx, rtx);
extern rtx gen_smaxv4sf3 (rtx, rtx, rtx);
extern rtx gen_sminv4sf3 (rtx, rtx, rtx);
extern rtx gen_smaxv2df3 (rtx, rtx, rtx);
extern rtx gen_sminv2df3 (rtx, rtx, rtx);
extern rtx gen_smax_nanv4hf3 (rtx, rtx, rtx);
extern rtx gen_smin_nanv4hf3 (rtx, rtx, rtx);
extern rtx gen_fmaxv4hf3 (rtx, rtx, rtx);
extern rtx gen_fminv4hf3 (rtx, rtx, rtx);
extern rtx gen_smax_nanv8hf3 (rtx, rtx, rtx);
extern rtx gen_smin_nanv8hf3 (rtx, rtx, rtx);
extern rtx gen_fmaxv8hf3 (rtx, rtx, rtx);
extern rtx gen_fminv8hf3 (rtx, rtx, rtx);
extern rtx gen_smax_nanv2sf3 (rtx, rtx, rtx);
extern rtx gen_smin_nanv2sf3 (rtx, rtx, rtx);
extern rtx gen_fmaxv2sf3 (rtx, rtx, rtx);
extern rtx gen_fminv2sf3 (rtx, rtx, rtx);
extern rtx gen_smax_nanv4sf3 (rtx, rtx, rtx);
extern rtx gen_smin_nanv4sf3 (rtx, rtx, rtx);
extern rtx gen_fmaxv4sf3 (rtx, rtx, rtx);
extern rtx gen_fminv4sf3 (rtx, rtx, rtx);
extern rtx gen_smax_nanv2df3 (rtx, rtx, rtx);
extern rtx gen_smin_nanv2df3 (rtx, rtx, rtx);
extern rtx gen_fmaxv2df3 (rtx, rtx, rtx);
extern rtx gen_fminv2df3 (rtx, rtx, rtx);
extern rtx gen_aarch64_faddpv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_faddpv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_faddpv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_faddpv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_faddpv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_reduc_plus_internalv8qi (rtx, rtx);
extern rtx gen_aarch64_reduc_plus_internalv16qi (rtx, rtx);
extern rtx gen_aarch64_reduc_plus_internalv4hi (rtx, rtx);
extern rtx gen_aarch64_reduc_plus_internalv8hi (rtx, rtx);
extern rtx gen_aarch64_reduc_plus_internalv4si (rtx, rtx);
extern rtx gen_aarch64_reduc_plus_internalv2di (rtx, rtx);
extern rtx gen_aarch64_zero_extendsi_reduc_plus_v8qi (rtx, rtx);
extern rtx gen_aarch64_zero_extenddi_reduc_plus_v8qi (rtx, rtx);
extern rtx gen_aarch64_zero_extendsi_reduc_plus_v16qi (rtx, rtx);
extern rtx gen_aarch64_zero_extenddi_reduc_plus_v16qi (rtx, rtx);
extern rtx gen_aarch64_zero_extendsi_reduc_plus_v4hi (rtx, rtx);
extern rtx gen_aarch64_zero_extenddi_reduc_plus_v4hi (rtx, rtx);
extern rtx gen_aarch64_zero_extendsi_reduc_plus_v8hi (rtx, rtx);
extern rtx gen_aarch64_zero_extenddi_reduc_plus_v8hi (rtx, rtx);
extern rtx gen_aarch64_reduc_plus_internalv2si (rtx, rtx);
extern rtx gen_reduc_plus_scal_v2sf (rtx, rtx);
extern rtx gen_reduc_plus_scal_v2df (rtx, rtx);
extern rtx gen_clrsbv8qi2 (rtx, rtx);
extern rtx gen_clrsbv16qi2 (rtx, rtx);
extern rtx gen_clrsbv4hi2 (rtx, rtx);
extern rtx gen_clrsbv8hi2 (rtx, rtx);
extern rtx gen_clrsbv2si2 (rtx, rtx);
extern rtx gen_clrsbv4si2 (rtx, rtx);
extern rtx gen_clzv8qi2 (rtx, rtx);
extern rtx gen_clzv16qi2 (rtx, rtx);
extern rtx gen_clzv4hi2 (rtx, rtx);
extern rtx gen_clzv8hi2 (rtx, rtx);
extern rtx gen_clzv2si2 (rtx, rtx);
extern rtx gen_clzv4si2 (rtx, rtx);
extern rtx gen_popcountv8qi2 (rtx, rtx);
extern rtx gen_popcountv16qi2 (rtx, rtx);
extern rtx gen_aarch64_reduc_umax_internalv8qi (rtx, rtx);
extern rtx gen_aarch64_reduc_umin_internalv8qi (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_internalv8qi (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_internalv8qi (rtx, rtx);
extern rtx gen_aarch64_reduc_umax_internalv16qi (rtx, rtx);
extern rtx gen_aarch64_reduc_umin_internalv16qi (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_internalv16qi (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_internalv16qi (rtx, rtx);
extern rtx gen_aarch64_reduc_umax_internalv4hi (rtx, rtx);
extern rtx gen_aarch64_reduc_umin_internalv4hi (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_internalv4hi (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_internalv4hi (rtx, rtx);
extern rtx gen_aarch64_reduc_umax_internalv8hi (rtx, rtx);
extern rtx gen_aarch64_reduc_umin_internalv8hi (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_internalv8hi (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_internalv8hi (rtx, rtx);
extern rtx gen_aarch64_reduc_umax_internalv4si (rtx, rtx);
extern rtx gen_aarch64_reduc_umin_internalv4si (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_internalv4si (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_internalv4si (rtx, rtx);
extern rtx gen_aarch64_reduc_umax_internalv2si (rtx, rtx);
extern rtx gen_aarch64_reduc_umin_internalv2si (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_internalv2si (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_internalv2si (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_nan_internalv4hf (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_nan_internalv4hf (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_internalv4hf (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_internalv4hf (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_nan_internalv8hf (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_nan_internalv8hf (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_internalv8hf (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_internalv8hf (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_nan_internalv2sf (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_nan_internalv2sf (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_internalv2sf (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_internalv2sf (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_nan_internalv4sf (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_nan_internalv4sf (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_internalv4sf (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_internalv4sf (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_nan_internalv2df (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_nan_internalv2df (rtx, rtx);
extern rtx gen_aarch64_reduc_smax_internalv2df (rtx, rtx);
extern rtx gen_aarch64_reduc_smin_internalv2df (rtx, rtx);
extern rtx gen_aarch64_simd_bslv8qi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv16qi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv4hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv8hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv2si_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv4si_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv2di_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bsldi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bsldi_alt (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_get_lanev8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_lanev16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_lanev4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_lanev8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_lanev2si (rtx, rtx, rtx);
extern rtx gen_aarch64_get_lanev4si (rtx, rtx, rtx);
extern rtx gen_aarch64_get_lanev2di (rtx, rtx, rtx);
extern rtx gen_aarch64_get_lanev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_lanev8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_lanev4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_lanev8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_lanev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_lanev4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_lanev2df (rtx, rtx, rtx);
extern rtx gen_load_pair_lanesv8qi (rtx, rtx, rtx);
extern rtx gen_load_pair_lanesv4hi (rtx, rtx, rtx);
extern rtx gen_load_pair_lanesv4bf (rtx, rtx, rtx);
extern rtx gen_load_pair_lanesv4hf (rtx, rtx, rtx);
extern rtx gen_load_pair_lanesv2si (rtx, rtx, rtx);
extern rtx gen_load_pair_lanesv2sf (rtx, rtx, rtx);
extern rtx gen_load_pair_lanesdi (rtx, rtx, rtx);
extern rtx gen_load_pair_lanesdf (rtx, rtx, rtx);
extern rtx gen_store_pair_lanesv8qi (rtx, rtx, rtx);
extern rtx gen_store_pair_lanesv4hi (rtx, rtx, rtx);
extern rtx gen_store_pair_lanesv4bf (rtx, rtx, rtx);
extern rtx gen_store_pair_lanesv4hf (rtx, rtx, rtx);
extern rtx gen_store_pair_lanesv2si (rtx, rtx, rtx);
extern rtx gen_store_pair_lanesv2sf (rtx, rtx, rtx);
extern rtx gen_store_pair_lanesdi (rtx, rtx, rtx);
extern rtx gen_store_pair_lanesdf (rtx, rtx, rtx);
extern rtx gen_aarch64_combinezv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_combinezv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_combinezv4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_combinezv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_combinezv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_combinezv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_combinezdi (rtx, rtx, rtx);
extern rtx gen_aarch64_combinezdf (rtx, rtx, rtx);
extern rtx gen_aarch64_combinez_bev8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_combinez_bev4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_combinez_bev4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_combinez_bev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_combinez_bev2si (rtx, rtx, rtx);
extern rtx gen_aarch64_combinez_bev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_combinez_bedi (rtx, rtx, rtx);
extern rtx gen_aarch64_combinez_bedf (rtx, rtx, rtx);
extern rtx gen_aarch64_saddlv16qi_hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssublv16qi_hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uaddlv16qi_hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usublv16qi_hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_saddlv8hi_hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssublv8hi_hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uaddlv8hi_hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usublv8hi_hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_saddlv4si_hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssublv4si_hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uaddlv4si_hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usublv4si_hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_saddlv16qi_lo_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssublv16qi_lo_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uaddlv16qi_lo_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usublv16qi_lo_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_saddlv8hi_lo_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssublv8hi_lo_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uaddlv8hi_lo_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usublv8hi_lo_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_saddlv4si_lo_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssublv4si_lo_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uaddlv4si_lo_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usublv4si_lo_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_saddlv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_ssublv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uaddlv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_usublv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_saddlv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_ssublv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uaddlv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_usublv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_saddlv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_ssublv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_uaddlv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_usublv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_ssubwv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_usubwv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_ssubwv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_usubwv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_ssubwv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_usubwv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_ssubwv16qi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usubwv16qi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssubwv8hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usubwv8hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssubwv4si_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usubwv4si_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssubw2v16qi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usubw2v16qi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssubw2v8hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usubw2v8hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssubw2v4si_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usubw2v4si_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_saddwv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uaddwv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_saddwv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uaddwv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_saddwv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_uaddwv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_saddwv16qi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uaddwv16qi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_saddwv8hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uaddwv8hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_saddwv4si_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uaddwv4si_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_saddw2v16qi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uaddw2v16qi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_saddw2v8hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uaddw2v8hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_saddw2v4si_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_uaddw2v4si_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_shaddv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uhaddv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_srhaddv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_urhaddv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_shsubv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uhsubv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_shaddv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uhaddv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_srhaddv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_urhaddv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_shsubv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uhsubv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_shaddv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uhaddv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_srhaddv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_urhaddv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_shsubv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uhsubv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_shaddv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uhaddv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_srhaddv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_urhaddv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_shsubv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uhsubv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_shaddv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_uhaddv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_srhaddv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_urhaddv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_shsubv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_uhsubv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_shaddv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_uhaddv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_srhaddv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_urhaddv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_shsubv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_uhsubv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_addhnv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_raddhnv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_subhnv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_rsubhnv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_addhnv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_raddhnv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_subhnv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_rsubhnv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_addhnv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_raddhnv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_subhnv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_rsubhnv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_addhn2v8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_raddhn2v8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_subhn2v8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_rsubhn2v8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_addhn2v4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_raddhn2v4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_subhn2v4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_rsubhn2v4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_addhn2v2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_raddhn2v2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_subhn2v2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_rsubhn2v2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pmulv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pmulv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_fmulxv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_fmulxv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_fmulxv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_fmulxv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_fmulxv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_fmulxhf (rtx, rtx, rtx);
extern rtx gen_aarch64_fmulxsf (rtx, rtx, rtx);
extern rtx gen_aarch64_fmulxdf (rtx, rtx, rtx);
extern rtx gen_aarch64_sqaddv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqaddv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqsubv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqsubv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqaddv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqaddv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqsubv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqsubv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqaddv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqaddv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqsubv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqsubv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqaddv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqaddv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqsubv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqsubv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqaddv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_uqaddv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqsubv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_uqsubv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqaddv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_uqaddv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqsubv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_uqsubv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqaddv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_uqaddv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sqsubv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_uqsubv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sqaddqi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqaddqi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqsubqi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqsubqi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqaddhi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqaddhi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqsubhi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqsubhi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqaddsi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqaddsi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqsubsi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqsubsi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqadddi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqadddi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqsubdi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqsubdi (rtx, rtx, rtx);
extern rtx gen_aarch64_suqaddv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_usqaddv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_suqaddv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_usqaddv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_suqaddv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_usqaddv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_suqaddv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_usqaddv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_suqaddv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_usqaddv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_suqaddv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_usqaddv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_suqaddv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_usqaddv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_suqaddqi (rtx, rtx, rtx);
extern rtx gen_aarch64_usqaddqi (rtx, rtx, rtx);
extern rtx gen_aarch64_suqaddhi (rtx, rtx, rtx);
extern rtx gen_aarch64_usqaddhi (rtx, rtx, rtx);
extern rtx gen_aarch64_suqaddsi (rtx, rtx, rtx);
extern rtx gen_aarch64_usqaddsi (rtx, rtx, rtx);
extern rtx gen_aarch64_suqadddi (rtx, rtx, rtx);
extern rtx gen_aarch64_usqadddi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqmovunv8hi (rtx, rtx);
extern rtx gen_aarch64_sqmovunv4si (rtx, rtx);
extern rtx gen_aarch64_sqmovunv2di (rtx, rtx);
extern rtx gen_aarch64_sqmovunhi (rtx, rtx);
extern rtx gen_aarch64_sqmovunsi (rtx, rtx);
extern rtx gen_aarch64_sqmovundi (rtx, rtx);
extern rtx gen_aarch64_sqmovnv8hi (rtx, rtx);
extern rtx gen_aarch64_uqmovnv8hi (rtx, rtx);
extern rtx gen_aarch64_sqmovnv4si (rtx, rtx);
extern rtx gen_aarch64_uqmovnv4si (rtx, rtx);
extern rtx gen_aarch64_sqmovnv2di (rtx, rtx);
extern rtx gen_aarch64_uqmovnv2di (rtx, rtx);
extern rtx gen_aarch64_sqmovnhi (rtx, rtx);
extern rtx gen_aarch64_uqmovnhi (rtx, rtx);
extern rtx gen_aarch64_sqmovnsi (rtx, rtx);
extern rtx gen_aarch64_uqmovnsi (rtx, rtx);
extern rtx gen_aarch64_sqmovndi (rtx, rtx);
extern rtx gen_aarch64_uqmovndi (rtx, rtx);
extern rtx gen_aarch64_sqnegv8qi (rtx, rtx);
extern rtx gen_aarch64_sqabsv8qi (rtx, rtx);
extern rtx gen_aarch64_sqnegv16qi (rtx, rtx);
extern rtx gen_aarch64_sqabsv16qi (rtx, rtx);
extern rtx gen_aarch64_sqnegv4hi (rtx, rtx);
extern rtx gen_aarch64_sqabsv4hi (rtx, rtx);
extern rtx gen_aarch64_sqnegv8hi (rtx, rtx);
extern rtx gen_aarch64_sqabsv8hi (rtx, rtx);
extern rtx gen_aarch64_sqnegv2si (rtx, rtx);
extern rtx gen_aarch64_sqabsv2si (rtx, rtx);
extern rtx gen_aarch64_sqnegv4si (rtx, rtx);
extern rtx gen_aarch64_sqabsv4si (rtx, rtx);
extern rtx gen_aarch64_sqnegv2di (rtx, rtx);
extern rtx gen_aarch64_sqabsv2di (rtx, rtx);
extern rtx gen_aarch64_sqnegqi (rtx, rtx);
extern rtx gen_aarch64_sqabsqi (rtx, rtx);
extern rtx gen_aarch64_sqneghi (rtx, rtx);
extern rtx gen_aarch64_sqabshi (rtx, rtx);
extern rtx gen_aarch64_sqnegsi (rtx, rtx);
extern rtx gen_aarch64_sqabssi (rtx, rtx);
extern rtx gen_aarch64_sqnegdi (rtx, rtx);
extern rtx gen_aarch64_sqabsdi (rtx, rtx);
extern rtx gen_aarch64_sqdmulhv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulhv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulhv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulhv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulhv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulhv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulhv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulhv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulhhi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulhhi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulhsi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulhsi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulh_lanev4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulh_lanev4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulh_lanev8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulh_lanev8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulh_lanev2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulh_lanev2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulh_lanev4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulh_lanev4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulh_laneqv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulh_laneqv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulh_laneqv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulh_laneqv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulh_laneqv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulh_laneqv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulh_laneqv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulh_laneqv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulh_lanehi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulh_lanehi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulh_lanesi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulh_lanesi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulh_laneqhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulh_laneqhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmulh_laneqsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmulh_laneqsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlahv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlshv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlahv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlshv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlahv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlshv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlahv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlshv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlahhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlshhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlahsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlshsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlah_lanev4hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlsh_lanev4hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlah_lanev8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlsh_lanev8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlah_lanev2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlsh_lanev2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlah_lanev4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlsh_lanev4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlah_lanehi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlsh_lanehi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlah_lanesi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlsh_lanesi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlah_laneqv4hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlsh_laneqv4hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlah_laneqv8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlsh_laneqv8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlah_laneqv2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlsh_laneqv2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlah_laneqv4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlsh_laneqv4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlah_laneqhi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlsh_laneqhi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlah_laneqsi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdmlsh_laneqsi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlalv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlslv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlalv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlslv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlalhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlslhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlalsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlslsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal_lanev4hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl_lanev4hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal_lanev2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl_lanev2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal_laneqv4hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl_laneqv4hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal_laneqv2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl_laneqv2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal_lanehi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl_lanehi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal_lanesi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl_lanesi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal_laneqhi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl_laneqhi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal_laneqsi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl_laneqsi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal_nv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl_nv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal_nv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl_nv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2v8hi_internal (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2v8hi_internal (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2v4si_internal (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2v4si_internal (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2_lanev8hi_internal (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2_lanev8hi_internal (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2_lanev4si_internal (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2_lanev4si_internal (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2_laneqv8hi_internal (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2_laneqv8hi_internal (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2_laneqv4si_internal (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2_laneqv4si_internal (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2_nv8hi_internal (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2_nv8hi_internal (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2_nv4si_internal (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2_nv4si_internal (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmullv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmullv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmullhi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmullsi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull_lanev4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull_lanev2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull_laneqv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull_laneqv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull_lanehi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull_lanesi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull_laneqhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull_laneqsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull_nv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull_nv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2v8hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2v4si_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2_lanev8hi_internal (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2_lanev4si_internal (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2_laneqv8hi_internal (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2_laneqv4si_internal (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2_nv8hi_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2_nv4si_internal (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sshlv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_ushlv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_srshlv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_urshlv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sshlv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_ushlv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_srshlv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_urshlv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sshlv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_ushlv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_srshlv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_urshlv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sshlv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_ushlv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_srshlv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_urshlv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sshlv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_ushlv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_srshlv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_urshlv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_sshlv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_ushlv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_srshlv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_urshlv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sshlv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_ushlv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_srshlv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_urshlv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sshldi (rtx, rtx, rtx);
extern rtx gen_aarch64_ushldi (rtx, rtx, rtx);
extern rtx gen_aarch64_srshldi (rtx, rtx, rtx);
extern rtx gen_aarch64_urshldi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshlv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshlv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshlv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshlv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshlv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshlv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshlv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshlv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshlv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshlv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshlv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshlv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshlv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshlv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshlv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshlv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshlv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshlv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshlv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshlv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshlv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlqi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshlqi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshlqi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshlqi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlhi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshlhi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshlhi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshlhi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlsi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshlsi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshlsi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshlsi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshldi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshldi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshldi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshldi (rtx, rtx, rtx);
extern rtx gen_aarch64_sshll_nv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_ushll_nv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sshll_nv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_ushll_nv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sshll_nv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_ushll_nv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_sshll2_nv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_ushll2_nv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sshll2_nv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_ushll2_nv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sshll2_nv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_ushll2_nv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_srshr_nv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_urshr_nv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_srshr_nv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_urshr_nv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_srshr_nv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_urshr_nv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_srshr_nv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_urshr_nv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_srshr_nv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_urshr_nv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_srshr_nv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_urshr_nv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_srshr_nv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_urshr_nv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_srshr_ndi (rtx, rtx, rtx);
extern rtx gen_aarch64_urshr_ndi (rtx, rtx, rtx);
extern rtx gen_aarch64_ssra_nv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usra_nv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_srsra_nv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ursra_nv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssra_nv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usra_nv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_srsra_nv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ursra_nv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssra_nv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usra_nv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_srsra_nv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ursra_nv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssra_nv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usra_nv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_srsra_nv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ursra_nv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssra_nv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usra_nv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_srsra_nv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ursra_nv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssra_nv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usra_nv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_srsra_nv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ursra_nv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssra_nv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usra_nv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_srsra_nv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ursra_nv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssra_ndi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usra_ndi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_srsra_ndi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ursra_ndi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssli_nv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usli_nv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssri_nv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usri_nv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssli_nv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usli_nv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssri_nv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usri_nv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssli_nv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usli_nv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssri_nv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usri_nv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssli_nv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usli_nv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssri_nv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usri_nv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssli_nv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usli_nv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssri_nv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usri_nv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssli_nv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usli_nv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssri_nv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usri_nv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssli_nv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usli_nv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssri_nv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usri_nv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssli_ndi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usli_ndi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ssri_ndi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usri_ndi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlu_nv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshl_nv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshl_nv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlu_nv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshl_nv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshl_nv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlu_nv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshl_nv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshl_nv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlu_nv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshl_nv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshl_nv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlu_nv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshl_nv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshl_nv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlu_nv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshl_nv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshl_nv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlu_nv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshl_nv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshl_nv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlu_nqi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshl_nqi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshl_nqi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlu_nhi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshl_nhi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshl_nhi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlu_nsi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshl_nsi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshl_nsi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshlu_ndi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshl_ndi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshl_ndi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshrun_nv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshrun_nv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshrn_nv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshrn_nv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshrn_nv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshrn_nv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshrun_nv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshrun_nv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshrn_nv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshrn_nv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshrn_nv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshrn_nv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshrun_nv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshrun_nv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshrn_nv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshrn_nv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshrn_nv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshrn_nv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshrun_nhi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshrun_nhi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshrn_nhi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshrn_nhi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshrn_nhi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshrn_nhi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshrun_nsi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshrun_nsi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshrn_nsi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshrn_nsi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshrn_nsi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshrn_nsi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshrun_ndi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshrun_ndi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqshrn_ndi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqshrn_ndi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqrshrn_ndi (rtx, rtx, rtx);
extern rtx gen_aarch64_uqrshrn_ndi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlev8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgev8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlev16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgev16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlev4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgev4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlev8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgev8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlev2si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgev2si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlev4si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgev4si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlev2di (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgev2di (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltdi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmledi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqdi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgedi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtdi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltuv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmleuv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgeuv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtuv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltuv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmleuv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgeuv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtuv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltuv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmleuv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgeuv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtuv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltuv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmleuv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgeuv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtuv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltuv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmleuv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgeuv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtuv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltuv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmleuv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgeuv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtuv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltuv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_cmleuv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgeuv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtuv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltudi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmleudi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgeudi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtudi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmtstv8qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmtstv16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmtstv4hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmtstv8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmtstv2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmtstv4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmtstv2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmtstdi (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlev8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgev8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlev4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgev4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlev2df (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgev2df (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlthf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlehf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqhf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgehf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgthf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltsf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmlesf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqsf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgesf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtsf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmltdf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmledf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmeqdf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgedf (rtx, rtx, rtx);
extern rtx gen_aarch64_cmgtdf (rtx, rtx, rtx);
extern rtx gen_aarch64_facltv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_faclev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_facgev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_facgtv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_facltv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_faclev8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_facgev8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_facgtv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_facltv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_faclev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_facgev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_facgtv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_facltv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_faclev4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_facgev4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_facgtv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_facltv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_faclev2df (rtx, rtx, rtx);
extern rtx gen_aarch64_facgev2df (rtx, rtx, rtx);
extern rtx gen_aarch64_facgtv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_faclthf (rtx, rtx, rtx);
extern rtx gen_aarch64_faclehf (rtx, rtx, rtx);
extern rtx gen_aarch64_facgehf (rtx, rtx, rtx);
extern rtx gen_aarch64_facgthf (rtx, rtx, rtx);
extern rtx gen_aarch64_facltsf (rtx, rtx, rtx);
extern rtx gen_aarch64_faclesf (rtx, rtx, rtx);
extern rtx gen_aarch64_facgesf (rtx, rtx, rtx);
extern rtx gen_aarch64_facgtsf (rtx, rtx, rtx);
extern rtx gen_aarch64_facltdf (rtx, rtx, rtx);
extern rtx gen_aarch64_facledf (rtx, rtx, rtx);
extern rtx gen_aarch64_facgedf (rtx, rtx, rtx);
extern rtx gen_aarch64_facgtdf (rtx, rtx, rtx);
extern rtx gen_aarch64_addpv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_addpv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_addpv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_addpdi (rtx, rtx);
extern rtx gen_aarch64_simd_ld2v16qi (rtx, rtx);
extern rtx gen_aarch64_simd_ld2v8hi (rtx, rtx);
extern rtx gen_aarch64_simd_ld2v4si (rtx, rtx);
extern rtx gen_aarch64_simd_ld2v2di (rtx, rtx);
extern rtx gen_aarch64_simd_ld2v8hf (rtx, rtx);
extern rtx gen_aarch64_simd_ld2v4sf (rtx, rtx);
extern rtx gen_aarch64_simd_ld2v2df (rtx, rtx);
extern rtx gen_aarch64_simd_ld2v8bf (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rv8qi (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rv16qi (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rv4hi (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rv8hi (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rv2si (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rv4si (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rv4bf (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rv8bf (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rv2di (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rv4hf (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rv8hf (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rv2sf (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rv4sf (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rv2df (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rdi (rtx, rtx);
extern rtx gen_aarch64_simd_ld2rdf (rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanev8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanev16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanev4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanev8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanev2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanev4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanev4bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanev8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanev2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanev4hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanev8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanev2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanev4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanev2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanedi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesoi_lanedf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_st2v16qi (rtx, rtx);
extern rtx gen_aarch64_simd_st2v8hi (rtx, rtx);
extern rtx gen_aarch64_simd_st2v4si (rtx, rtx);
extern rtx gen_aarch64_simd_st2v2di (rtx, rtx);
extern rtx gen_aarch64_simd_st2v8hf (rtx, rtx);
extern rtx gen_aarch64_simd_st2v4sf (rtx, rtx);
extern rtx gen_aarch64_simd_st2v2df (rtx, rtx);
extern rtx gen_aarch64_simd_st2v8bf (rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanev8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanev16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanev4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanev8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanev2si (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanev4si (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanev4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanev8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanev2di (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanev8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanev4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanev2df (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanedi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesoi_lanedf (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_ld3v16qi (rtx, rtx);
extern rtx gen_aarch64_simd_ld3v8hi (rtx, rtx);
extern rtx gen_aarch64_simd_ld3v4si (rtx, rtx);
extern rtx gen_aarch64_simd_ld3v2di (rtx, rtx);
extern rtx gen_aarch64_simd_ld3v8hf (rtx, rtx);
extern rtx gen_aarch64_simd_ld3v4sf (rtx, rtx);
extern rtx gen_aarch64_simd_ld3v2df (rtx, rtx);
extern rtx gen_aarch64_simd_ld3v8bf (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rv8qi (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rv16qi (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rv4hi (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rv8hi (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rv2si (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rv4si (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rv4bf (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rv8bf (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rv2di (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rv4hf (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rv8hf (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rv2sf (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rv4sf (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rv2df (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rdi (rtx, rtx);
extern rtx gen_aarch64_simd_ld3rdf (rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanev8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanev16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanev4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanev8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanev2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanev4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanev4bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanev8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanev2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanev4hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanev8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanev2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanev4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanev2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanedi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesci_lanedf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_st3v16qi (rtx, rtx);
extern rtx gen_aarch64_simd_st3v8hi (rtx, rtx);
extern rtx gen_aarch64_simd_st3v4si (rtx, rtx);
extern rtx gen_aarch64_simd_st3v2di (rtx, rtx);
extern rtx gen_aarch64_simd_st3v8hf (rtx, rtx);
extern rtx gen_aarch64_simd_st3v4sf (rtx, rtx);
extern rtx gen_aarch64_simd_st3v2df (rtx, rtx);
extern rtx gen_aarch64_simd_st3v8bf (rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanev8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanev16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanev4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanev8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanev2si (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanev4si (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanev4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanev8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanev2di (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanev8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanev4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanev2df (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanedi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesci_lanedf (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_ld4v16qi (rtx, rtx);
extern rtx gen_aarch64_simd_ld4v8hi (rtx, rtx);
extern rtx gen_aarch64_simd_ld4v4si (rtx, rtx);
extern rtx gen_aarch64_simd_ld4v2di (rtx, rtx);
extern rtx gen_aarch64_simd_ld4v8hf (rtx, rtx);
extern rtx gen_aarch64_simd_ld4v4sf (rtx, rtx);
extern rtx gen_aarch64_simd_ld4v2df (rtx, rtx);
extern rtx gen_aarch64_simd_ld4v8bf (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rv8qi (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rv16qi (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rv4hi (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rv8hi (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rv2si (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rv4si (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rv4bf (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rv8bf (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rv2di (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rv4hf (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rv8hf (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rv2sf (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rv4sf (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rv2df (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rdi (rtx, rtx);
extern rtx gen_aarch64_simd_ld4rdf (rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanev8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanev16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanev4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanev8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanev2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanev4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanev4bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanev8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanev2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanev4hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanev8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanev2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanev4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanev2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanedi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_load_lanesxi_lanedf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_st4v16qi (rtx, rtx);
extern rtx gen_aarch64_simd_st4v8hi (rtx, rtx);
extern rtx gen_aarch64_simd_st4v4si (rtx, rtx);
extern rtx gen_aarch64_simd_st4v2di (rtx, rtx);
extern rtx gen_aarch64_simd_st4v8hf (rtx, rtx);
extern rtx gen_aarch64_simd_st4v4sf (rtx, rtx);
extern rtx gen_aarch64_simd_st4v2df (rtx, rtx);
extern rtx gen_aarch64_simd_st4v8bf (rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanev8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanev16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanev4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanev8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanev2si (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanev4si (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanev4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanev8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanev2di (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanev8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanev4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanev2df (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanedi (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_store_lanesxi_lanedf (rtx, rtx, rtx);
extern rtx gen_aarch64_rev_reglistoi (rtx, rtx, rtx);
extern rtx gen_aarch64_rev_reglistci (rtx, rtx, rtx);
extern rtx gen_aarch64_rev_reglistxi (rtx, rtx, rtx);
extern rtx gen_aarch64_ld1_x3_v8qi (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_v16qi (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_v4hi (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_v8hi (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_v2si (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_v4si (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_v4bf (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_v8bf (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_v2di (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_v4hf (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_v8hf (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_v2sf (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_v4sf (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_v2df (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_di (rtx, rtx);
extern rtx gen_aarch64_ld1_x3_df (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_v8qi (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_v16qi (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_v4hi (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_v8hi (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_v2si (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_v4si (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_v4bf (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_v8bf (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_v2di (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_v4hf (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_v8hf (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_v2sf (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_v4sf (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_v2df (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_di (rtx, rtx);
extern rtx gen_aarch64_ld1_x4_df (rtx, rtx);
extern rtx gen_aarch64_st1_x2_v8qi (rtx, rtx);
extern rtx gen_aarch64_st1_x2_v16qi (rtx, rtx);
extern rtx gen_aarch64_st1_x2_v4hi (rtx, rtx);
extern rtx gen_aarch64_st1_x2_v8hi (rtx, rtx);
extern rtx gen_aarch64_st1_x2_v2si (rtx, rtx);
extern rtx gen_aarch64_st1_x2_v4si (rtx, rtx);
extern rtx gen_aarch64_st1_x2_v4bf (rtx, rtx);
extern rtx gen_aarch64_st1_x2_v8bf (rtx, rtx);
extern rtx gen_aarch64_st1_x2_v2di (rtx, rtx);
extern rtx gen_aarch64_st1_x2_v4hf (rtx, rtx);
extern rtx gen_aarch64_st1_x2_v8hf (rtx, rtx);
extern rtx gen_aarch64_st1_x2_v2sf (rtx, rtx);
extern rtx gen_aarch64_st1_x2_v4sf (rtx, rtx);
extern rtx gen_aarch64_st1_x2_v2df (rtx, rtx);
extern rtx gen_aarch64_st1_x2_di (rtx, rtx);
extern rtx gen_aarch64_st1_x2_df (rtx, rtx);
extern rtx gen_aarch64_st1_x3_v8qi (rtx, rtx);
extern rtx gen_aarch64_st1_x3_v16qi (rtx, rtx);
extern rtx gen_aarch64_st1_x3_v4hi (rtx, rtx);
extern rtx gen_aarch64_st1_x3_v8hi (rtx, rtx);
extern rtx gen_aarch64_st1_x3_v2si (rtx, rtx);
extern rtx gen_aarch64_st1_x3_v4si (rtx, rtx);
extern rtx gen_aarch64_st1_x3_v4bf (rtx, rtx);
extern rtx gen_aarch64_st1_x3_v8bf (rtx, rtx);
extern rtx gen_aarch64_st1_x3_v2di (rtx, rtx);
extern rtx gen_aarch64_st1_x3_v4hf (rtx, rtx);
extern rtx gen_aarch64_st1_x3_v8hf (rtx, rtx);
extern rtx gen_aarch64_st1_x3_v2sf (rtx, rtx);
extern rtx gen_aarch64_st1_x3_v4sf (rtx, rtx);
extern rtx gen_aarch64_st1_x3_v2df (rtx, rtx);
extern rtx gen_aarch64_st1_x3_di (rtx, rtx);
extern rtx gen_aarch64_st1_x3_df (rtx, rtx);
extern rtx gen_aarch64_st1_x4_v8qi (rtx, rtx);
extern rtx gen_aarch64_st1_x4_v16qi (rtx, rtx);
extern rtx gen_aarch64_st1_x4_v4hi (rtx, rtx);
extern rtx gen_aarch64_st1_x4_v8hi (rtx, rtx);
extern rtx gen_aarch64_st1_x4_v2si (rtx, rtx);
extern rtx gen_aarch64_st1_x4_v4si (rtx, rtx);
extern rtx gen_aarch64_st1_x4_v4bf (rtx, rtx);
extern rtx gen_aarch64_st1_x4_v8bf (rtx, rtx);
extern rtx gen_aarch64_st1_x4_v2di (rtx, rtx);
extern rtx gen_aarch64_st1_x4_v4hf (rtx, rtx);
extern rtx gen_aarch64_st1_x4_v8hf (rtx, rtx);
extern rtx gen_aarch64_st1_x4_v2sf (rtx, rtx);
extern rtx gen_aarch64_st1_x4_v4sf (rtx, rtx);
extern rtx gen_aarch64_st1_x4_v2df (rtx, rtx);
extern rtx gen_aarch64_st1_x4_di (rtx, rtx);
extern rtx gen_aarch64_st1_x4_df (rtx, rtx);
extern rtx gen_aarch64_be_ld1v8qi (rtx, rtx);
extern rtx gen_aarch64_be_ld1v16qi (rtx, rtx);
extern rtx gen_aarch64_be_ld1v4hi (rtx, rtx);
extern rtx gen_aarch64_be_ld1v8hi (rtx, rtx);
extern rtx gen_aarch64_be_ld1v2si (rtx, rtx);
extern rtx gen_aarch64_be_ld1v4si (rtx, rtx);
extern rtx gen_aarch64_be_ld1v2di (rtx, rtx);
extern rtx gen_aarch64_be_ld1v4hf (rtx, rtx);
extern rtx gen_aarch64_be_ld1v8hf (rtx, rtx);
extern rtx gen_aarch64_be_ld1v4bf (rtx, rtx);
extern rtx gen_aarch64_be_ld1v8bf (rtx, rtx);
extern rtx gen_aarch64_be_ld1v2sf (rtx, rtx);
extern rtx gen_aarch64_be_ld1v4sf (rtx, rtx);
extern rtx gen_aarch64_be_ld1v2df (rtx, rtx);
extern rtx gen_aarch64_be_ld1di (rtx, rtx);
extern rtx gen_aarch64_be_st1v8qi (rtx, rtx);
extern rtx gen_aarch64_be_st1v16qi (rtx, rtx);
extern rtx gen_aarch64_be_st1v4hi (rtx, rtx);
extern rtx gen_aarch64_be_st1v8hi (rtx, rtx);
extern rtx gen_aarch64_be_st1v2si (rtx, rtx);
extern rtx gen_aarch64_be_st1v4si (rtx, rtx);
extern rtx gen_aarch64_be_st1v2di (rtx, rtx);
extern rtx gen_aarch64_be_st1v4hf (rtx, rtx);
extern rtx gen_aarch64_be_st1v8hf (rtx, rtx);
extern rtx gen_aarch64_be_st1v4bf (rtx, rtx);
extern rtx gen_aarch64_be_st1v8bf (rtx, rtx);
extern rtx gen_aarch64_be_st1v2sf (rtx, rtx);
extern rtx gen_aarch64_be_st1v4sf (rtx, rtx);
extern rtx gen_aarch64_be_st1v2df (rtx, rtx);
extern rtx gen_aarch64_be_st1di (rtx, rtx);
extern rtx gen_aarch64_ld2v8qi_dreg (rtx, rtx);
extern rtx gen_aarch64_ld2v4hi_dreg (rtx, rtx);
extern rtx gen_aarch64_ld2v4hf_dreg (rtx, rtx);
extern rtx gen_aarch64_ld2v2si_dreg (rtx, rtx);
extern rtx gen_aarch64_ld2v2sf_dreg (rtx, rtx);
extern rtx gen_aarch64_ld2v4bf_dreg (rtx, rtx);
extern rtx gen_aarch64_ld2di_dreg (rtx, rtx);
extern rtx gen_aarch64_ld2df_dreg (rtx, rtx);
extern rtx gen_aarch64_ld3v8qi_dreg (rtx, rtx);
extern rtx gen_aarch64_ld3v4hi_dreg (rtx, rtx);
extern rtx gen_aarch64_ld3v4hf_dreg (rtx, rtx);
extern rtx gen_aarch64_ld3v2si_dreg (rtx, rtx);
extern rtx gen_aarch64_ld3v2sf_dreg (rtx, rtx);
extern rtx gen_aarch64_ld3v4bf_dreg (rtx, rtx);
extern rtx gen_aarch64_ld3di_dreg (rtx, rtx);
extern rtx gen_aarch64_ld3df_dreg (rtx, rtx);
extern rtx gen_aarch64_ld4v8qi_dreg (rtx, rtx);
extern rtx gen_aarch64_ld4v4hi_dreg (rtx, rtx);
extern rtx gen_aarch64_ld4v4hf_dreg (rtx, rtx);
extern rtx gen_aarch64_ld4v2si_dreg (rtx, rtx);
extern rtx gen_aarch64_ld4v2sf_dreg (rtx, rtx);
extern rtx gen_aarch64_ld4v4bf_dreg (rtx, rtx);
extern rtx gen_aarch64_ld4di_dreg (rtx, rtx);
extern rtx gen_aarch64_ld4df_dreg (rtx, rtx);
extern rtx gen_aarch64_tbl1v8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_tbl1v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_tbl2v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_tbl3v8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_tbl3v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_tbx4v8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_tbx4v16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_qtbl3v8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_qtbl3v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_qtbx3v8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_qtbx3v16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_qtbl4v8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_qtbl4v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_qtbx4v8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_qtbx4v16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_combinev16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_zip1v8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_zip2v8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_trn1v8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_trn2v8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp1v8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp2v8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_zip1v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_zip2v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_trn1v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_trn2v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp1v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp2v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_zip1v4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_zip2v4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_trn1v4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_trn2v4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp1v4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp2v4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_zip1v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_zip2v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_trn1v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_trn2v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp1v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp2v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_zip1v2si (rtx, rtx, rtx);
extern rtx gen_aarch64_zip2v2si (rtx, rtx, rtx);
extern rtx gen_aarch64_trn1v2si (rtx, rtx, rtx);
extern rtx gen_aarch64_trn2v2si (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp1v2si (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp2v2si (rtx, rtx, rtx);
extern rtx gen_aarch64_zip1v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_zip2v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_trn1v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_trn2v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp1v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp2v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_zip1v2di (rtx, rtx, rtx);
extern rtx gen_aarch64_zip2v2di (rtx, rtx, rtx);
extern rtx gen_aarch64_trn1v2di (rtx, rtx, rtx);
extern rtx gen_aarch64_trn2v2di (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp1v2di (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp2v2di (rtx, rtx, rtx);
extern rtx gen_aarch64_zip1v4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_zip2v4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_trn1v4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_trn2v4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp1v4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp2v4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_zip1v8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_zip2v8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_trn1v8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_trn2v8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp1v8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp2v8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_zip1v4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_zip2v4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_trn1v4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_trn2v4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp1v4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp2v4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_zip1v8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_zip2v8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_trn1v8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_trn2v8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp1v8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp2v8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_zip1v2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_zip2v2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_trn1v2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_trn2v2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp1v2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp2v2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_zip1v4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_zip2v4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_trn1v4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_trn2v4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp1v4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp2v4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_zip1v2df (rtx, rtx, rtx);
extern rtx gen_aarch64_zip2v2df (rtx, rtx, rtx);
extern rtx gen_aarch64_trn1v2df (rtx, rtx, rtx);
extern rtx gen_aarch64_trn2v2df (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp1v2df (rtx, rtx, rtx);
extern rtx gen_aarch64_uzp2v2df (rtx, rtx, rtx);
extern rtx gen_aarch64_extv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_extv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_extv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_extv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_extv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_extv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_extv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_extv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_extv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_extv4bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_extv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_extv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_extv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_extv2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_rev64v8qi (rtx, rtx);
extern rtx gen_aarch64_rev32v8qi (rtx, rtx);
extern rtx gen_aarch64_rev16v8qi (rtx, rtx);
extern rtx gen_aarch64_rev64v16qi (rtx, rtx);
extern rtx gen_aarch64_rev32v16qi (rtx, rtx);
extern rtx gen_aarch64_rev16v16qi (rtx, rtx);
extern rtx gen_aarch64_rev64v4hi (rtx, rtx);
extern rtx gen_aarch64_rev32v4hi (rtx, rtx);
extern rtx gen_aarch64_rev16v4hi (rtx, rtx);
extern rtx gen_aarch64_rev64v8hi (rtx, rtx);
extern rtx gen_aarch64_rev32v8hi (rtx, rtx);
extern rtx gen_aarch64_rev16v8hi (rtx, rtx);
extern rtx gen_aarch64_rev64v2si (rtx, rtx);
extern rtx gen_aarch64_rev32v2si (rtx, rtx);
extern rtx gen_aarch64_rev16v2si (rtx, rtx);
extern rtx gen_aarch64_rev64v4si (rtx, rtx);
extern rtx gen_aarch64_rev32v4si (rtx, rtx);
extern rtx gen_aarch64_rev16v4si (rtx, rtx);
extern rtx gen_aarch64_rev64v2di (rtx, rtx);
extern rtx gen_aarch64_rev32v2di (rtx, rtx);
extern rtx gen_aarch64_rev16v2di (rtx, rtx);
extern rtx gen_aarch64_rev64v4hf (rtx, rtx);
extern rtx gen_aarch64_rev32v4hf (rtx, rtx);
extern rtx gen_aarch64_rev16v4hf (rtx, rtx);
extern rtx gen_aarch64_rev64v8hf (rtx, rtx);
extern rtx gen_aarch64_rev32v8hf (rtx, rtx);
extern rtx gen_aarch64_rev16v8hf (rtx, rtx);
extern rtx gen_aarch64_rev64v4bf (rtx, rtx);
extern rtx gen_aarch64_rev32v4bf (rtx, rtx);
extern rtx gen_aarch64_rev16v4bf (rtx, rtx);
extern rtx gen_aarch64_rev64v8bf (rtx, rtx);
extern rtx gen_aarch64_rev32v8bf (rtx, rtx);
extern rtx gen_aarch64_rev16v8bf (rtx, rtx);
extern rtx gen_aarch64_rev64v2sf (rtx, rtx);
extern rtx gen_aarch64_rev32v2sf (rtx, rtx);
extern rtx gen_aarch64_rev16v2sf (rtx, rtx);
extern rtx gen_aarch64_rev64v4sf (rtx, rtx);
extern rtx gen_aarch64_rev32v4sf (rtx, rtx);
extern rtx gen_aarch64_rev16v4sf (rtx, rtx);
extern rtx gen_aarch64_rev64v2df (rtx, rtx);
extern rtx gen_aarch64_rev32v2df (rtx, rtx);
extern rtx gen_aarch64_rev16v2df (rtx, rtx);
extern rtx gen_aarch64_st2v8qi_dreg (rtx, rtx);
extern rtx gen_aarch64_st2v4hi_dreg (rtx, rtx);
extern rtx gen_aarch64_st2v4hf_dreg (rtx, rtx);
extern rtx gen_aarch64_st2v2si_dreg (rtx, rtx);
extern rtx gen_aarch64_st2v2sf_dreg (rtx, rtx);
extern rtx gen_aarch64_st2v4bf_dreg (rtx, rtx);
extern rtx gen_aarch64_st2di_dreg (rtx, rtx);
extern rtx gen_aarch64_st2df_dreg (rtx, rtx);
extern rtx gen_aarch64_st3v8qi_dreg (rtx, rtx);
extern rtx gen_aarch64_st3v4hi_dreg (rtx, rtx);
extern rtx gen_aarch64_st3v4hf_dreg (rtx, rtx);
extern rtx gen_aarch64_st3v2si_dreg (rtx, rtx);
extern rtx gen_aarch64_st3v2sf_dreg (rtx, rtx);
extern rtx gen_aarch64_st3v4bf_dreg (rtx, rtx);
extern rtx gen_aarch64_st3di_dreg (rtx, rtx);
extern rtx gen_aarch64_st3df_dreg (rtx, rtx);
extern rtx gen_aarch64_st4v8qi_dreg (rtx, rtx);
extern rtx gen_aarch64_st4v4hi_dreg (rtx, rtx);
extern rtx gen_aarch64_st4v4hf_dreg (rtx, rtx);
extern rtx gen_aarch64_st4v2si_dreg (rtx, rtx);
extern rtx gen_aarch64_st4v2sf_dreg (rtx, rtx);
extern rtx gen_aarch64_st4v4bf_dreg (rtx, rtx);
extern rtx gen_aarch64_st4di_dreg (rtx, rtx);
extern rtx gen_aarch64_st4df_dreg (rtx, rtx);
extern rtx gen_aarch64_simd_ld1v16qi_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1v8hi_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1v4si_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1v2di_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1v8hf_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1v4sf_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1v2df_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1v8bf_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1v8qi_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1v4hi_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1v4bf_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1v4hf_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1v2si_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1v2sf_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1di_x2 (rtx, rtx);
extern rtx gen_aarch64_simd_ld1df_x2 (rtx, rtx);
extern rtx gen_aarch64_frecpev4hf (rtx, rtx);
extern rtx gen_aarch64_frecpev8hf (rtx, rtx);
extern rtx gen_aarch64_frecpev2sf (rtx, rtx);
extern rtx gen_aarch64_frecpev4sf (rtx, rtx);
extern rtx gen_aarch64_frecpev2df (rtx, rtx);
extern rtx gen_aarch64_frecpehf (rtx, rtx);
extern rtx gen_aarch64_frecpesf (rtx, rtx);
extern rtx gen_aarch64_frecpedf (rtx, rtx);
extern rtx gen_aarch64_frecpxhf (rtx, rtx);
extern rtx gen_aarch64_frecpxsf (rtx, rtx);
extern rtx gen_aarch64_frecpxdf (rtx, rtx);
extern rtx gen_aarch64_frecpsv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_frecpsv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_frecpsv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_frecpsv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_frecpsv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_frecpshf (rtx, rtx, rtx);
extern rtx gen_aarch64_frecpssf (rtx, rtx, rtx);
extern rtx gen_aarch64_frecpsdf (rtx, rtx, rtx);
extern rtx gen_aarch64_urecpev2si (rtx, rtx);
extern rtx gen_aarch64_urecpev4si (rtx, rtx);
extern rtx gen_aarch64_crypto_aesev16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_aesdv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_aesmcv16qi (rtx, rtx);
extern rtx gen_aarch64_crypto_aesimcv16qi (rtx, rtx);
extern rtx gen_aarch64_crypto_sha1hsi (rtx, rtx);
extern rtx gen_aarch64_crypto_sha1hv4si (rtx, rtx);
extern rtx gen_aarch64_be_crypto_sha1hv4si (rtx, rtx);
extern rtx gen_aarch64_crypto_sha1su1v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_sha1cv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_sha1mv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_sha1pv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_sha1su0v4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_sha256hv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_sha256h2v4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_sha256su0v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_sha256su1v4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_sha512hqv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_sha512h2qv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_sha512su0qv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_sha512su1qv2di (rtx, rtx, rtx, rtx);
extern rtx gen_eor3qv16qi4 (rtx, rtx, rtx, rtx);
extern rtx gen_eor3qv8hi4 (rtx, rtx, rtx, rtx);
extern rtx gen_eor3qv4si4 (rtx, rtx, rtx, rtx);
extern rtx gen_eor3qv2di4 (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_rax1qv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_xarqv2di (rtx, rtx, rtx, rtx);
extern rtx gen_bcaxqv16qi4 (rtx, rtx, rtx, rtx);
extern rtx gen_bcaxqv8hi4 (rtx, rtx, rtx, rtx);
extern rtx gen_bcaxqv4si4 (rtx, rtx, rtx, rtx);
extern rtx gen_bcaxqv2di4 (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sm3ss1qv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sm3tt1aqv4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sm3tt1bqv4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sm3tt2aqv4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sm3tt2bqv4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sm3partw1qv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sm3partw2qv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sm4eqv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sm4ekeyqv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlal_lowv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlalq_lowv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlsl_lowv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlslq_lowv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlal_highv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlalq_highv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlsl_highv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlslq_highv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlal_lane_lowv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlsl_lane_lowv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlal_lane_highv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlsl_lane_highv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlalq_laneq_lowv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlslq_laneq_lowv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlalq_laneq_highv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlslq_laneq_highv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlal_laneq_lowv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlsl_laneq_lowv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlal_laneq_highv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlsl_laneq_highv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlalq_lane_lowv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlslq_lane_lowv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlalq_lane_highv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_fmlslq_lane_highv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_pmulldi (rtx, rtx, rtx);
extern rtx gen_aarch64_crypto_pmullv2di (rtx, rtx, rtx);
extern rtx gen_extendv8qiv8hi2 (rtx, rtx);
extern rtx gen_zero_extendv8qiv8hi2 (rtx, rtx);
extern rtx gen_extendv4hiv4si2 (rtx, rtx);
extern rtx gen_zero_extendv4hiv4si2 (rtx, rtx);
extern rtx gen_extendv2siv2di2 (rtx, rtx);
extern rtx gen_zero_extendv2siv2di2 (rtx, rtx);
extern rtx gen_truncv8hiv8qi2 (rtx, rtx);
extern rtx gen_truncv4siv4hi2 (rtx, rtx);
extern rtx gen_truncv2div2si2 (rtx, rtx);
extern rtx gen_aarch64_bfdotv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_bfdotv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_bfdot_lanev2sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_bfdot_laneqv2sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_bfdot_lanev4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_bfdot_laneqv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_bfmmlaqv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_bfmlalbv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_bfmlaltv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_bfmlalb_lanev4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_bfmlalt_lanev4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_bfmlalb_lane_qv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_bfmlalt_lane_qv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_smmlav16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_ummlav16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_usmmlav16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_bfcvtnv4bf (rtx, rtx);
extern rtx gen_aarch64_bfcvtn_qv8bf (rtx, rtx);
extern rtx gen_aarch64_bfcvtn2v8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_bfcvtbf (rtx, rtx);
extern rtx gen_aarch64_compare_and_swapqi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_compare_and_swaphi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_compare_and_swapsi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_compare_and_swapdi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_compare_and_swapti (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_compare_and_swapqi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_compare_and_swaphi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_compare_and_swapsi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_compare_and_swapdi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_compare_and_swapti_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_exchangeqi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_exchangehi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_exchangesi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_exchangedi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_exchangeqi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_exchangehi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_exchangesi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_exchangedi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_addqi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_subqi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_orqi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_xorqi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_andqi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_addhi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_subhi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_orhi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_xorhi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_andhi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_addsi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_subsi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_orsi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_xorsi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_andsi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_adddi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_subdi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_ordi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_xordi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_anddi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_iorqi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_bicqi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_xorqi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_addqi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_iorhi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_bichi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_xorhi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_addhi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_iorsi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_bicsi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_xorsi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_addsi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_iordi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_bicdi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_xordi_lse (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_adddi_lse (rtx, rtx, rtx);
extern rtx gen_atomic_nandqi (rtx, rtx, rtx);
extern rtx gen_atomic_nandhi (rtx, rtx, rtx);
extern rtx gen_atomic_nandsi (rtx, rtx, rtx);
extern rtx gen_atomic_nanddi (rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_addqi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_subqi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_orqi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_xorqi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_andqi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_addhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_subhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_orhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_xorhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_andhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_addsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_subsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_orsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_xorsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_andsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_adddi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_subdi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_ordi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_xordi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_anddi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_iorqi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_bicqi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_xorqi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_addqi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_iorhi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_bichi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_xorhi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_addhi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_iorsi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_bicsi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_xorsi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_addsi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_iordi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_bicdi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_xordi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_fetch_adddi_lse (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_nandqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_nandhi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_nandsi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_nanddi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_add_fetchqi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_sub_fetchqi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_or_fetchqi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_xor_fetchqi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_and_fetchqi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_add_fetchhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_sub_fetchhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_or_fetchhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_xor_fetchhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_and_fetchhi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_add_fetchsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_sub_fetchsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_or_fetchsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_xor_fetchsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_and_fetchsi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_add_fetchdi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_sub_fetchdi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_or_fetchdi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_xor_fetchdi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_atomic_and_fetchdi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_nand_fetchqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_nand_fetchhi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_nand_fetchsi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_nand_fetchdi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_loadqi (rtx, rtx, rtx);
extern rtx gen_atomic_loadhi (rtx, rtx, rtx);
extern rtx gen_atomic_loadsi (rtx, rtx, rtx);
extern rtx gen_atomic_loaddi (rtx, rtx, rtx);
extern rtx gen_atomic_storeqi (rtx, rtx, rtx);
extern rtx gen_atomic_storehi (rtx, rtx, rtx);
extern rtx gen_atomic_storesi (rtx, rtx, rtx);
extern rtx gen_atomic_storedi (rtx, rtx, rtx);
extern rtx gen_aarch64_load_exclusiveqi (rtx, rtx, rtx);
extern rtx gen_aarch64_load_exclusivehi (rtx, rtx, rtx);
extern rtx gen_aarch64_load_exclusivesi (rtx, rtx, rtx);
extern rtx gen_aarch64_load_exclusivedi (rtx, rtx, rtx);
extern rtx gen_aarch64_load_exclusive_pair (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_store_exclusiveqi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_store_exclusivehi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_store_exclusivesi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_store_exclusivedi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_store_exclusive_pair (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx4qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx2qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx2hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx2hf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx2si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx32qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx16hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx8si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx4di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx16bf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx16hf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx8sf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx4df (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx48qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx24hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx12si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx6di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx24bf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx24hf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx12sf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx6df (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx64qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx32hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx16si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx8di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx32bf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx32hf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx16sf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_movvnx8df (rtx, rtx, rtx);
extern rtx gen_aarch64_wrffr (rtx);
extern rtx gen_aarch64_update_ffr_for_load (void);
extern rtx gen_aarch64_copy_ffr_to_ffrt (void);
extern rtx gen_aarch64_rdffr (rtx);
extern rtx gen_aarch64_rdffr_z (rtx, rtx);
extern rtx gen_aarch64_update_ffrt (void);
extern rtx gen_maskloadvnx16qivnx16bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx8qivnx8bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx4qivnx4bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx2qivnx2bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx8hivnx8bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx4hivnx4bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx2hivnx2bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx8hfvnx8bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx4hfvnx4bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx2hfvnx2bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx8bfvnx8bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx4sivnx4bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx2sivnx2bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx4sfvnx4bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx2sfvnx2bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx2divnx2bi (rtx, rtx, rtx);
extern rtx gen_maskloadvnx2dfvnx2bi (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx32qivnx16qi (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx16hivnx8hi (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx8sivnx4si (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx4divnx2di (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx16bfvnx8bf (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx16hfvnx8hf (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx8sfvnx4sf (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx4dfvnx2df (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx48qivnx16qi (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx24hivnx8hi (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx12sivnx4si (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx6divnx2di (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx24bfvnx8bf (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx24hfvnx8hf (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx12sfvnx4sf (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx6dfvnx2df (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx64qivnx16qi (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx32hivnx8hi (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx16sivnx4si (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx8divnx2di (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx32bfvnx8bf (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx32hfvnx8hf (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx16sfvnx4sf (rtx, rtx, rtx);
extern rtx gen_vec_mask_load_lanesvnx8dfvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_load_extendvnx8hivnx8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_load_zero_extendvnx8hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_load_extendvnx8hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx8hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx8hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx8hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx8hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx8hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx8hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx8hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx8hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx8hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx4hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx4hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx4hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx4hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_load_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_load_zero_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_load_extendvnx4hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx4hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx4hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx4hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx4hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx4hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx4hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx4hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx4hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx4hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx4hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx4hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx2hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx2hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx2hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx2hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx2hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx2hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx2hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx2hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_load_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_load_zero_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_load_extendvnx2hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx2hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx2hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx2hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx2hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx2hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_load_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_load_zero_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_load_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_load_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_load_zero_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_load_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx4sivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx4sivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx2sivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx2sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx2sivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx2sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx2sivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx2sivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx2sivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx2sivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_load_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_load_zero_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_load_extendvnx2sivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx2sivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx2sivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx2sivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_load_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_load_zero_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_load_extendvnx2sivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx2sivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx2divnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx2divnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_extendvnx2divnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx2divnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_load_extendvnx2divnx2qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_load_zero_extendvnx2divnx2qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_load_extendvnx2divnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_load_zero_extendvnx2divnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_load_zero_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_load_extendvnx2divnx2hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_load_zero_extendvnx2divnx2hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_load_extendvnx2divnx2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_load_zero_extendvnx2divnx2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1vnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1vnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1vnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1vnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_extendvnx8hivnx8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_extendvnx8hivnx8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_zero_extendvnx8hivnx8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_zero_extendvnx8hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_ldff1_extendvnx8hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx8hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx8hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx8hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx8hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx8hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx8hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx8hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx8hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx8hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx8hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx8hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx8hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx8hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx8hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx8hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx8hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx8hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx8hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx8hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx4hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx4hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx4hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx4hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx4hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx4hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx4hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx4hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_ldff1_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_zero_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_zero_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_ldff1_extendvnx4hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx4hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx4hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx4hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx4hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx4hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx4hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx4hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx4hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx4hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx4hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx4hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx4hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx4hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx4hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx4hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx4hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx4hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx4hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx4hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx4hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx4hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx4hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx4hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx2hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx2hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx2hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx2hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx2hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx2hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx2hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx2hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx2hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx2hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx2hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx2hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx2hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx2hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx2hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx2hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_ldff1_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_zero_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_zero_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_ldff1_extendvnx2hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx2hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx2hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx2hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx2hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx2hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx2hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx2hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx2hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx2hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx2hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx2hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_ldff1_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_zero_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_zero_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_ldff1_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_ldff1_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_zero_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_zero_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_ldff1_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx4sivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx4sivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx4sivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx4sivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx2sivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx2sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx2sivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx2sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx2sivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx2sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx2sivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx2sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx2sivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx2sivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx2sivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx2sivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx2sivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx2sivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx2sivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx2sivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_ldff1_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_zero_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_zero_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_ldff1_extendvnx2sivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx2sivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx2sivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx2sivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx2sivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx2sivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx2sivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx2sivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_ldff1_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_zero_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_zero_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_ldff1_extendvnx2sivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx2sivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx2sivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx2sivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx2divnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx2divnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx2divnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx2divnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_extendvnx2divnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx2divnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx2divnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx2divnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_ldff1_extendvnx2divnx2qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_extendvnx2divnx2qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_zero_extendvnx2divnx2qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_zero_extendvnx2divnx2qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_ldff1_extendvnx2divnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_extendvnx2divnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldff1_zero_extendvnx2divnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldff1_zero_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_ldnf1_zero_extendvnx2divnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_ldnf1_zero_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_ldff1_extendvnx2divnx2hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_extendvnx2divnx2hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_zero_extendvnx2divnx2hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_zero_extendvnx2divnx2hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_extendvnx2divnx2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_extendvnx2divnx2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_zero_extendvnx2divnx2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnf1_zero_extendvnx2divnx2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldnt1vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnt1vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnt1vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnt1vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnt1vnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnt1vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnt1vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_ldnt1vnx2df (rtx, rtx, rtx);
extern rtx gen_mask_gather_loadvnx4qivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_gather_loadvnx4hivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_gather_loadvnx4hfvnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_gather_loadvnx4sivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_gather_loadvnx4sfvnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_gather_loadvnx2qivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_gather_loadvnx2hivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_gather_loadvnx2hfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_gather_loadvnx2sivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_gather_loadvnx2sfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_gather_loadvnx2divnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_gather_loadvnx2dfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_zero_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_gather_load_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_load_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
static inline rtx gen_aarch64_gather_load_zero_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_load_zero_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_aarch64_gather_load_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_zero_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_zero_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_zero_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_zero_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_extendvnx2divnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_zero_extendvnx2divnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_gather_load_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_load_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
static inline rtx gen_aarch64_gather_load_zero_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_load_zero_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_aarch64_gather_load_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_zero_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_extendvnx2divnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_zero_extendvnx2divnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_gather_load_extendvnx2hivnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_load_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
static inline rtx gen_aarch64_gather_load_zero_extendvnx2hivnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_load_zero_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
static inline rtx gen_aarch64_gather_load_extendvnx2sivnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_load_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
static inline rtx gen_aarch64_gather_load_zero_extendvnx2sivnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_load_zero_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_aarch64_gather_load_extendvnx2divnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_load_zero_extendvnx2divnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_gathervnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_gathervnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_gathervnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_gathervnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_gather_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_gather_zero_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_gather_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_gather_zero_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_gather_extendvnx2divnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_gather_zero_extendvnx2divnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_gather_extendvnx2divnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_gather_zero_extendvnx2divnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_gather_extendvnx2divnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ldff1_gather_zero_extendvnx2divnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_prefetchvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_prefetchvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_prefetchvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_prefetchvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_gather_prefetchvnx16qivnx4si (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_gather_prefetchvnx8hivnx4si (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_gather_prefetchvnx4sivnx4si (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_gather_prefetchvnx2divnx4si (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_gather_prefetchvnx16qivnx2di (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_gather_prefetchvnx8hivnx2di (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_gather_prefetchvnx4sivnx2di (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_gather_prefetchvnx2divnx2di (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_maskstorevnx16qivnx16bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx8qivnx8bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx4qivnx4bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx2qivnx2bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx8hivnx8bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx4hivnx4bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx2hivnx2bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx8hfvnx8bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx4hfvnx4bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx2hfvnx2bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx8bfvnx8bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx4sivnx4bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx2sivnx2bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx4sfvnx4bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx2sfvnx2bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx2divnx2bi (rtx, rtx, rtx);
extern rtx gen_maskstorevnx2dfvnx2bi (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx32qivnx16qi (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx16hivnx8hi (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx8sivnx4si (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx4divnx2di (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx16bfvnx8bf (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx16hfvnx8hf (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx8sfvnx4sf (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx4dfvnx2df (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx48qivnx16qi (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx24hivnx8hi (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx12sivnx4si (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx6divnx2di (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx24bfvnx8bf (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx24hfvnx8hf (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx12sfvnx4sf (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx6dfvnx2df (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx64qivnx16qi (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx32hivnx8hi (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx16sivnx4si (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx8divnx2di (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx32bfvnx8bf (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx32hfvnx8hf (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx16sfvnx4sf (rtx, rtx, rtx);
extern rtx gen_vec_mask_store_lanesvnx8dfvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_store_truncvnx8qivnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_store_truncvnx4qivnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_store_truncvnx4hivnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_store_truncvnx2qivnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_store_truncvnx2hivnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_store_truncvnx2sivnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_stnt1vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_stnt1vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_stnt1vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_stnt1vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_stnt1vnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_stnt1vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_stnt1vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_stnt1vnx2df (rtx, rtx, rtx);
extern rtx gen_mask_scatter_storevnx4qivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_scatter_storevnx4hivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_scatter_storevnx4hfvnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_scatter_storevnx4sivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_scatter_storevnx4sfvnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_scatter_storevnx2qivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_scatter_storevnx2hivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_scatter_storevnx2hfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_scatter_storevnx2sivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_scatter_storevnx2sfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_scatter_storevnx2divnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mask_scatter_storevnx2dfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_scatter_store_truncvnx4qivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_scatter_store_truncvnx4hivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_scatter_store_truncvnx2qivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_scatter_store_truncvnx2hivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_scatter_store_truncvnx2sivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx16qi_le (rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx8hi_le (rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx4si_le (rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx2di_le (rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx8bf_le (rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx8hf_le (rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx4sf_le (rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx2df_le (rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx16qi_be (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx8hi_be (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx4si_be (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx2di_be (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx8bf_be (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx8hf_be (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx4sf_be (rtx, rtx, rtx);
extern rtx gen_aarch64_vec_duplicate_vqvnx2df_be (rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx8qi (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx4qi (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx2qi (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx4hi (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx2hi (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx4hf (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx2hf (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx8bf (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx2si (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx2sf (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_sve_ld1rvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rqvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rqvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rqvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rqvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rqvnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rqvnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rqvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rqvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rovnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rovnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rovnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rovnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rovnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rovnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rovnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ld1rovnx2df (rtx, rtx, rtx);
extern rtx gen_vec_shl_insert_vnx16qi (rtx, rtx, rtx);
extern rtx gen_vec_shl_insert_vnx8hi (rtx, rtx, rtx);
extern rtx gen_vec_shl_insert_vnx4si (rtx, rtx, rtx);
extern rtx gen_vec_shl_insert_vnx2di (rtx, rtx, rtx);
extern rtx gen_vec_shl_insert_vnx8bf (rtx, rtx, rtx);
extern rtx gen_vec_shl_insert_vnx8hf (rtx, rtx, rtx);
extern rtx gen_vec_shl_insert_vnx4sf (rtx, rtx, rtx);
extern rtx gen_vec_shl_insert_vnx2df (rtx, rtx, rtx);
extern rtx gen_vec_seriesvnx16qi (rtx, rtx, rtx);
extern rtx gen_vec_seriesvnx8qi (rtx, rtx, rtx);
extern rtx gen_vec_seriesvnx4qi (rtx, rtx, rtx);
extern rtx gen_vec_seriesvnx2qi (rtx, rtx, rtx);
extern rtx gen_vec_seriesvnx8hi (rtx, rtx, rtx);
extern rtx gen_vec_seriesvnx4hi (rtx, rtx, rtx);
extern rtx gen_vec_seriesvnx2hi (rtx, rtx, rtx);
extern rtx gen_vec_seriesvnx4si (rtx, rtx, rtx);
extern rtx gen_vec_seriesvnx2si (rtx, rtx, rtx);
extern rtx gen_vec_seriesvnx2di (rtx, rtx, rtx);
extern rtx gen_extract_after_last_vnx16qi (rtx, rtx, rtx);
extern rtx gen_extract_last_vnx16qi (rtx, rtx, rtx);
extern rtx gen_extract_after_last_vnx8hi (rtx, rtx, rtx);
extern rtx gen_extract_last_vnx8hi (rtx, rtx, rtx);
extern rtx gen_extract_after_last_vnx4si (rtx, rtx, rtx);
extern rtx gen_extract_last_vnx4si (rtx, rtx, rtx);
extern rtx gen_extract_after_last_vnx2di (rtx, rtx, rtx);
extern rtx gen_extract_last_vnx2di (rtx, rtx, rtx);
extern rtx gen_extract_after_last_vnx8bf (rtx, rtx, rtx);
extern rtx gen_extract_last_vnx8bf (rtx, rtx, rtx);
extern rtx gen_extract_after_last_vnx8hf (rtx, rtx, rtx);
extern rtx gen_extract_last_vnx8hf (rtx, rtx, rtx);
extern rtx gen_extract_after_last_vnx4sf (rtx, rtx, rtx);
extern rtx gen_extract_last_vnx4sf (rtx, rtx, rtx);
extern rtx gen_extract_after_last_vnx2df (rtx, rtx, rtx);
extern rtx gen_extract_last_vnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_absvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_negvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_one_cmplvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_clrsbvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_clzvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_popcountvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_qabsvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_qnegvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_absvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_negvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_one_cmplvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_clrsbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_clzvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_popcountvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_qabsvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_qnegvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_absvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_negvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_one_cmplvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_clrsbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_clzvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_popcountvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_qabsvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_qnegvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_absvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_negvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_one_cmplvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_clrsbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_clzvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_popcountvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_qabsvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_qnegvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_rbitvnx16qi (rtx, rtx, rtx);
static inline rtx gen_aarch64_pred_revbvnx16qi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_revbvnx16qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
static inline rtx gen_aarch64_pred_revhvnx16qi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_revhvnx16qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
static inline rtx gen_aarch64_pred_revwvnx16qi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_revwvnx16qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_aarch64_pred_rbitvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_revbvnx8hi (rtx, rtx, rtx);
static inline rtx gen_aarch64_pred_revhvnx8hi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_revhvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
static inline rtx gen_aarch64_pred_revwvnx8hi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_revwvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_aarch64_pred_rbitvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_revbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_revhvnx4si (rtx, rtx, rtx);
static inline rtx gen_aarch64_pred_revwvnx4si (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_revwvnx4si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_aarch64_pred_rbitvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_revbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_revhvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_revwvnx2di (rtx, rtx, rtx);
extern rtx gen_cond_rbitvnx16qi (rtx, rtx, rtx, rtx);
static inline rtx gen_cond_revbvnx16qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_revbvnx16qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_cond_revhvnx16qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_revhvnx16qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_cond_revwvnx16qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_revwvnx16qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_cond_rbitvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_revbvnx8hi (rtx, rtx, rtx, rtx);
static inline rtx gen_cond_revhvnx8hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_revhvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_cond_revwvnx8hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_revwvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_cond_rbitvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_revbvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_revhvnx4si (rtx, rtx, rtx, rtx);
static inline rtx gen_cond_revwvnx4si (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_revwvnx4si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_cond_rbitvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_revbvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_revhvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_revwvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sxtvnx8hivnx8qi (rtx, rtx, rtx);
static inline rtx gen_aarch64_pred_sxtvnx8hivnx4qi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_sxtvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
static inline rtx gen_aarch64_pred_sxtvnx8hivnx2qi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_sxtvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
static inline rtx gen_aarch64_pred_sxtvnx8hivnx4hi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_sxtvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
static inline rtx gen_aarch64_pred_sxtvnx8hivnx2hi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_sxtvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
static inline rtx gen_aarch64_pred_sxtvnx8hivnx2si (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_sxtvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
static inline rtx gen_aarch64_pred_sxtvnx4sivnx8qi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_sxtvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_aarch64_pred_sxtvnx4sivnx4qi (rtx, rtx, rtx);
static inline rtx gen_aarch64_pred_sxtvnx4sivnx2qi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_sxtvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_aarch64_pred_sxtvnx4sivnx4hi (rtx, rtx, rtx);
static inline rtx gen_aarch64_pred_sxtvnx4sivnx2hi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_sxtvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
static inline rtx gen_aarch64_pred_sxtvnx4sivnx2si (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_sxtvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
static inline rtx gen_aarch64_pred_sxtvnx2divnx8qi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_sxtvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
static inline rtx gen_aarch64_pred_sxtvnx2divnx4qi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_sxtvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_aarch64_pred_sxtvnx2divnx2qi (rtx, rtx, rtx);
static inline rtx gen_aarch64_pred_sxtvnx2divnx4hi (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_sxtvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_aarch64_pred_sxtvnx2divnx2hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sxtvnx2divnx2si (rtx, rtx, rtx);
extern rtx gen_aarch64_cond_sxtvnx8hivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_cond_sxtvnx8hivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_cond_sxtvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_cond_sxtvnx8hivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_cond_sxtvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_cond_sxtvnx8hivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_cond_sxtvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_cond_sxtvnx8hivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_cond_sxtvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_cond_sxtvnx8hivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_cond_sxtvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_cond_sxtvnx4sivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_cond_sxtvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_cond_sxtvnx4sivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_cond_sxtvnx4sivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_cond_sxtvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_cond_sxtvnx4sivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_cond_sxtvnx4sivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_cond_sxtvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_cond_sxtvnx4sivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_cond_sxtvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_cond_sxtvnx2divnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_cond_sxtvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_cond_sxtvnx2divnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_cond_sxtvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_cond_sxtvnx2divnx2qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_cond_sxtvnx2divnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_cond_sxtvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_cond_sxtvnx2divnx2hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cond_sxtvnx2divnx2si (rtx, rtx, rtx, rtx);
extern rtx gen_truncvnx8hivnx8qi2 (rtx, rtx);
static inline rtx gen_truncvnx8hivnx4qi2 (rtx, rtx);
static inline rtx
gen_truncvnx8hivnx4qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx8hivnx2qi2 (rtx, rtx);
static inline rtx
gen_truncvnx8hivnx2qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx8hivnx4hi2 (rtx, rtx);
static inline rtx
gen_truncvnx8hivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx8hivnx2hi2 (rtx, rtx);
static inline rtx
gen_truncvnx8hivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx8hivnx2si2 (rtx, rtx);
static inline rtx
gen_truncvnx8hivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx4hivnx8qi2 (rtx, rtx);
static inline rtx
gen_truncvnx4hivnx8qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_truncvnx4hivnx4qi2 (rtx, rtx);
static inline rtx gen_truncvnx4hivnx2qi2 (rtx, rtx);
static inline rtx
gen_truncvnx4hivnx2qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx4hivnx4hi2 (rtx, rtx);
static inline rtx
gen_truncvnx4hivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx4hivnx2hi2 (rtx, rtx);
static inline rtx
gen_truncvnx4hivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx4hivnx2si2 (rtx, rtx);
static inline rtx
gen_truncvnx4hivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx2hivnx8qi2 (rtx, rtx);
static inline rtx
gen_truncvnx2hivnx8qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx2hivnx4qi2 (rtx, rtx);
static inline rtx
gen_truncvnx2hivnx4qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_truncvnx2hivnx2qi2 (rtx, rtx);
static inline rtx gen_truncvnx2hivnx4hi2 (rtx, rtx);
static inline rtx
gen_truncvnx2hivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx2hivnx2hi2 (rtx, rtx);
static inline rtx
gen_truncvnx2hivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx2hivnx2si2 (rtx, rtx);
static inline rtx
gen_truncvnx2hivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx4sivnx8qi2 (rtx, rtx);
static inline rtx
gen_truncvnx4sivnx8qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_truncvnx4sivnx4qi2 (rtx, rtx);
static inline rtx gen_truncvnx4sivnx2qi2 (rtx, rtx);
static inline rtx
gen_truncvnx4sivnx2qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_truncvnx4sivnx4hi2 (rtx, rtx);
static inline rtx gen_truncvnx4sivnx2hi2 (rtx, rtx);
static inline rtx
gen_truncvnx4sivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx4sivnx2si2 (rtx, rtx);
static inline rtx
gen_truncvnx4sivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx2sivnx8qi2 (rtx, rtx);
static inline rtx
gen_truncvnx2sivnx8qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx2sivnx4qi2 (rtx, rtx);
static inline rtx
gen_truncvnx2sivnx4qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_truncvnx2sivnx2qi2 (rtx, rtx);
static inline rtx gen_truncvnx2sivnx4hi2 (rtx, rtx);
static inline rtx
gen_truncvnx2sivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_truncvnx2sivnx2hi2 (rtx, rtx);
static inline rtx gen_truncvnx2sivnx2si2 (rtx, rtx);
static inline rtx
gen_truncvnx2sivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx2divnx8qi2 (rtx, rtx);
static inline rtx
gen_truncvnx2divnx8qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_truncvnx2divnx4qi2 (rtx, rtx);
static inline rtx
gen_truncvnx2divnx4qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_truncvnx2divnx2qi2 (rtx, rtx);
static inline rtx gen_truncvnx2divnx4hi2 (rtx, rtx);
static inline rtx
gen_truncvnx2divnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_truncvnx2divnx2hi2 (rtx, rtx);
extern rtx gen_truncvnx2divnx2si2 (rtx, rtx);
extern rtx gen_aarch64_sve_fexpavnx8hf (rtx, rtx);
extern rtx gen_aarch64_sve_fexpavnx4sf (rtx, rtx);
extern rtx gen_aarch64_sve_fexpavnx2df (rtx, rtx);
extern rtx gen_aarch64_sve_frecpevnx8hf (rtx, rtx);
extern rtx gen_aarch64_sve_frsqrtevnx8hf (rtx, rtx);
extern rtx gen_aarch64_sve_frecpevnx4sf (rtx, rtx);
extern rtx gen_aarch64_sve_frsqrtevnx4sf (rtx, rtx);
extern rtx gen_aarch64_sve_frecpevnx2df (rtx, rtx);
extern rtx gen_aarch64_sve_frsqrtevnx2df (rtx, rtx);
extern rtx gen_aarch64_pred_absvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_negvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_frecpxvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_roundvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_nearbyintvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_floorvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_frintnvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ceilvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_rintvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_btruncvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sqrtvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_absvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_negvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_frecpxvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_roundvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_nearbyintvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_floorvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_frintnvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ceilvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_rintvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_btruncvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sqrtvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_absvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_negvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_frecpxvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_roundvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_nearbyintvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_floorvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_frintnvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ceilvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_rintvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_btruncvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sqrtvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_mulvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smaxvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sminvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_umaxvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uminvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_mulvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smaxvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sminvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_umaxvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uminvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_mulvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smaxvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sminvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_umaxvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uminvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_mulvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smaxvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sminvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_umaxvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uminvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_addvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_addvnx8qi3 (rtx, rtx, rtx);
extern rtx gen_addvnx4qi3 (rtx, rtx, rtx);
extern rtx gen_addvnx2qi3 (rtx, rtx, rtx);
extern rtx gen_addvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_addvnx4hi3 (rtx, rtx, rtx);
extern rtx gen_addvnx2hi3 (rtx, rtx, rtx);
extern rtx gen_addvnx4si3 (rtx, rtx, rtx);
extern rtx gen_addvnx2si3 (rtx, rtx, rtx);
extern rtx gen_addvnx2di3 (rtx, rtx, rtx);
extern rtx gen_subvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_subvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_subvnx4si3 (rtx, rtx, rtx);
extern rtx gen_subvnx2di3 (rtx, rtx, rtx);
extern rtx gen_aarch64_adrvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_adrvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sabdvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uabdvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sabdvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uabdvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sabdvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uabdvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sabdvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uabdvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssaddvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sssubvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssaddvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sssubvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssaddvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sssubvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssaddvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sssubvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usaddvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ussubvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usaddvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ussubvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usaddvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ussubvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usaddvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ussubvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smulhvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_umulhvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smulhvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_umulhvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smulhvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_umulhvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smulhvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_umulhvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_divvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_udivvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_divvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_udivvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_andvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_iorvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_xorvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_andvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_iorvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_xorvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_andvnx4si3 (rtx, rtx, rtx);
extern rtx gen_iorvnx4si3 (rtx, rtx, rtx);
extern rtx gen_xorvnx4si3 (rtx, rtx, rtx);
extern rtx gen_andvnx2di3 (rtx, rtx, rtx);
extern rtx gen_iorvnx2di3 (rtx, rtx, rtx);
extern rtx gen_xorvnx2di3 (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ashlvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ashrvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_lshrvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ashlvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ashrvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_lshrvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ashlvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ashrvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_lshrvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ashlvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ashrvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_lshrvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_lslvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_asrvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_lsrvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_lslvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_asrvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_lsrvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_lslvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_asrvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_lsrvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ftsmulvnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ftsselvnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ftsmulvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ftsselvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ftsmulvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ftsselvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fscalevnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fscalevnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fscalevnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_frecpsvnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_frsqrtsvnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_frecpsvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_frsqrtsvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_frecpsvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_frsqrtsvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_divvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_mulxvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_divvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_mulxvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_divvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_mulxvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_addvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_addvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_addvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cadd90vnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cadd270vnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cadd90vnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cadd270vnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cadd90vnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cadd270vnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_subvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_subvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_subvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_mulvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_mulvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_mulvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mul_lane_vnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mul_lane_vnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mul_lane_vnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smax_nanvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smaxvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smin_nanvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sminvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smax_nanvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smaxvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smin_nanvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sminvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smax_nanvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smaxvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smin_nanvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sminvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_andvnx16bi3 (rtx, rtx, rtx);
extern rtx gen_andvnx8bi3 (rtx, rtx, rtx);
extern rtx gen_andvnx4bi3 (rtx, rtx, rtx);
extern rtx gen_andvnx2bi3 (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_andvnx16bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_iorvnx16bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_xorvnx16bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_andvnx8bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_iorvnx8bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_xorvnx8bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_andvnx4bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_iorvnx4bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_xorvnx4bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_andvnx2bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_iorvnx2bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_xorvnx2bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_bicvnx16bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ornvnx16bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_bicvnx8bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ornvnx8bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_bicvnx4bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ornvnx4bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_bicvnx2bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ornvnx2bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_norvnx16bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_nandvnx16bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_norvnx8bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_nandvnx8bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_norvnx4bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_nandvnx4bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_norvnx2bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_nandvnx2bi_z (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmavnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmavnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmavnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmavnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fnmavnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fnmavnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fnmavnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fnmavnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sdot_prodvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_udot_prodvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_sdot_prodvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_udot_prodvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sdot_prod_lanevnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_udot_prod_lanevnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sdot_prod_lanevnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_udot_prod_lanevnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usdot_prodvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_usdot_prod_lanevnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sudot_prod_lanevnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_smatmulvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_umatmulvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_usmatmulvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmavnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fnmavnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fnmsvnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmsvnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmavnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fnmavnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fnmsvnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmsvnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmavnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fnmavnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fnmsvnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmsvnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fma_lane_vnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fnma_lane_vnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fma_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fnma_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fma_lane_vnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fnma_lane_vnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmlavnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmla90vnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmla180vnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmla270vnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmlavnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmla90vnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmla180vnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmla270vnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmlavnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmla90vnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmla180vnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmla270vnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane_vnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla90_lane_vnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla180_lane_vnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla270_lane_vnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla90_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla180_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fcmla270_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_tmadvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_tmadvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_tmadvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bfdotvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bfmlalbvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bfmlaltvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bfmmlavnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bfdot_lanevnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bfmlalb_lanevnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bfmlalt_lanevnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fmmlavnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fmmlavnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sel_dupvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sel_dupvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sel_dupvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sel_dupvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sel_dupvnx8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sel_dupvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sel_dupvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sel_dupvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpltvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplevnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpeqvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpnevnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpgevnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpgtvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplovnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplsvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmphsvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmphivnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpltvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplevnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpeqvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpnevnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpgevnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpgtvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplovnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplsvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmphsvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmphivnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpltvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplevnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpeqvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpnevnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpgevnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpgtvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplovnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplsvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmphsvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmphivnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpltvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplevnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpeqvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpnevnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpgevnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpgtvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplovnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplsvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmphsvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmphivnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpeqvnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpgevnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpgtvnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmphivnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmphsvnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplevnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplovnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplsvnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpltvnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpnevnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpeqvnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpgevnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpgtvnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmphivnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmphsvnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplevnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplovnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplsvnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpltvnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpnevnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpeqvnx4si_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpgevnx4si_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpgtvnx4si_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmphivnx4si_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmphsvnx4si_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplevnx4si_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplovnx4si_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmplsvnx4si_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpltvnx4si_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cmpnevnx4si_wide (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_lesivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_ultsivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_ulesivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_ltsivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_gesivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_gtsivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_ugtsivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_ugesivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_rwsivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_wrsivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_lesivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_ultsivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_ulesivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_ltsivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_gesivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_gtsivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_ugtsivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_ugesivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_rwsivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_wrsivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_lesivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_ultsivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_ulesivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_ltsivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_gesivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_gtsivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_ugtsivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_ugesivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_rwsivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_wrsivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_lesivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_ultsivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_ulesivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_ltsivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_gesivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_gtsivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_ugtsivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_ugesivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_rwsivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_wrsivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_ledivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_ultdivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_uledivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_ltdivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_gedivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_gtdivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_ugtdivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_ugedivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_rwdivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_wrdivnx16bi (rtx, rtx, rtx);
extern rtx gen_while_ledivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_ultdivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_uledivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_ltdivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_gedivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_gtdivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_ugtdivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_ugedivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_rwdivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_wrdivnx8bi (rtx, rtx, rtx);
extern rtx gen_while_ledivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_ultdivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_uledivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_ltdivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_gedivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_gtdivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_ugtdivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_ugedivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_rwdivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_wrdivnx4bi (rtx, rtx, rtx);
extern rtx gen_while_ledivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_ultdivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_uledivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_ltdivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_gedivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_gtdivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_ugtdivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_ugedivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_rwdivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_wrdivnx2bi (rtx, rtx, rtx);
extern rtx gen_while_lesivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ultsivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ulesivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ltsivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gesivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gtsivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugtsivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugesivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_rwsivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_wrsivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_lesivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ultsivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ulesivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ltsivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gesivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gtsivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugtsivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugesivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_rwsivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_wrsivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_lesivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ultsivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ulesivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ltsivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gesivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gtsivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugtsivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugesivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_rwsivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_wrsivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_lesivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ultsivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ulesivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ltsivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gesivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gtsivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugtsivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugesivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_rwsivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_wrsivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ledivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ultdivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_uledivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ltdivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gedivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gtdivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugtdivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugedivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_rwdivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_wrdivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ledivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ultdivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_uledivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ltdivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gedivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gtdivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugtdivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugedivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_rwdivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_wrdivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ledivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ultdivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_uledivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ltdivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gedivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gtdivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugtdivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugedivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_rwdivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_wrdivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ledivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ultdivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_uledivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ltdivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gedivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_gtdivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugtdivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_ugedivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_rwdivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_while_wrdivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmeqvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmgevnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmgtvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmlevnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmltvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmnevnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmeqvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmgevnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmgtvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmlevnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmltvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmnevnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmeqvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmgevnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmgtvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmlevnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmltvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmnevnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmuovnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmuovnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcmuovnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_vnx16bivnx16bi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_vnx8bivnx8bi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_vnx4bivnx4bi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_vnx2bivnx2bi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ptestvnx16bi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ptestvnx8bi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ptestvnx4bi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ptestvnx2bi (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_after_last_vnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_last_vnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_after_last_vnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_last_vnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_after_last_vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_last_vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_after_last_vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_last_vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_after_last_vnx8bf (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_last_vnx8bf (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_after_last_vnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_last_vnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_after_last_vnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_last_vnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_after_last_vnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_fold_extract_last_vnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_after_last_vnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_last_vnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_after_last_vnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_last_vnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_after_last_vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_last_vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_after_last_vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_last_vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_after_last_vnx8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_last_vnx8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_after_last_vnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_last_vnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_after_last_vnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_last_vnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_after_last_vnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fold_extract_vector_last_vnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_sadd_vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_uadd_vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_sadd_vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_uadd_vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_sadd_vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_uadd_vnx4si (rtx, rtx, rtx);
static inline rtx gen_aarch64_pred_reduc_sadd_vnx2di (rtx, rtx, rtx);
static inline rtx
gen_aarch64_pred_reduc_sadd_vnx2di(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_aarch64_pred_reduc_uadd_vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_and_vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_ior_vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smax_vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smin_vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_umax_vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_umin_vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_xor_vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_and_vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_ior_vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smax_vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smin_vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_umax_vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_umin_vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_xor_vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_and_vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_ior_vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smax_vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smin_vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_umax_vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_umin_vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_xor_vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_and_vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_ior_vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smax_vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smin_vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_umax_vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_umin_vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_xor_vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_plus_vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smax_nan_vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smax_vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smin_nan_vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smin_vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_plus_vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smax_nan_vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smax_vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smin_nan_vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smin_vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_plus_vnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smax_nan_vnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smax_vnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smin_nan_vnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_reduc_smin_vnx2df (rtx, rtx, rtx);
extern rtx gen_mask_fold_left_plus_vnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_mask_fold_left_plus_vnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_mask_fold_left_plus_vnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_tblvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_tblvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_tblvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_tblvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_tblvnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_tblvnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_tblvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_tblvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_compactvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_compactvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_compactvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_compactvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dup_lanevnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dup_lanevnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dup_lanevnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dup_lanevnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dup_lanevnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dup_lanevnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dup_lanevnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dup_lanevnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dupq_lanevnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dupq_lanevnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dupq_lanevnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dupq_lanevnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dupq_lanevnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dupq_lanevnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dupq_lanevnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_dupq_lanevnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_revvnx16qi (rtx, rtx);
extern rtx gen_aarch64_sve_revvnx8hi (rtx, rtx);
extern rtx gen_aarch64_sve_revvnx4si (rtx, rtx);
extern rtx gen_aarch64_sve_revvnx2di (rtx, rtx);
extern rtx gen_aarch64_sve_revvnx8bf (rtx, rtx);
extern rtx gen_aarch64_sve_revvnx8hf (rtx, rtx);
extern rtx gen_aarch64_sve_revvnx4sf (rtx, rtx);
extern rtx gen_aarch64_sve_revvnx2df (rtx, rtx);
extern rtx gen_aarch64_sve_splicevnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_splicevnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_splicevnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_splicevnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_splicevnx8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_splicevnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_splicevnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_splicevnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1vnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2vnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1vnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2vnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1vnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2vnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1vnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2vnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1vnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2vnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1vnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2vnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1qvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2qvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1qvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2qvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1qvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2qvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1qvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2qvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1qvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2qvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1qvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2qvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1qvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2qvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1qvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2qvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1qvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2qvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1qvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2qvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1qvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2qvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1qvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2qvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1qvnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2qvnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1qvnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2qvnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1qvnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2qvnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1qvnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2qvnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1qvnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2qvnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1qvnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2qvnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1qvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2qvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1qvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2qvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1qvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2qvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1qvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2qvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1qvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2qvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1qvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2qvnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_extvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_extvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_extvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_extvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_extvnx8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_extvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_extvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_extvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_revvnx16bi (rtx, rtx);
extern rtx gen_aarch64_sve_revvnx8bi (rtx, rtx);
extern rtx gen_aarch64_sve_revvnx4bi (rtx, rtx);
extern rtx gen_aarch64_sve_revvnx2bi (rtx, rtx);
extern rtx gen_aarch64_sve_zip1vnx16bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2vnx16bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1vnx16bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2vnx16bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1vnx16bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2vnx16bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1vnx8bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2vnx8bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1vnx8bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2vnx8bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1vnx8bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2vnx8bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1vnx4bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2vnx4bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1vnx4bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2vnx4bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1vnx4bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2vnx4bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip1vnx2bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_zip2vnx2bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn1vnx2bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_trn2vnx2bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp1vnx2bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uzp2vnx2bi (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_vnx8hi (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_vnx4si (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sunpkhi_vnx16qi (rtx, rtx);
extern rtx gen_aarch64_sve_uunpkhi_vnx16qi (rtx, rtx);
extern rtx gen_aarch64_sve_sunpklo_vnx16qi (rtx, rtx);
extern rtx gen_aarch64_sve_uunpklo_vnx16qi (rtx, rtx);
extern rtx gen_aarch64_sve_sunpkhi_vnx8hi (rtx, rtx);
extern rtx gen_aarch64_sve_uunpkhi_vnx8hi (rtx, rtx);
extern rtx gen_aarch64_sve_sunpklo_vnx8hi (rtx, rtx);
extern rtx gen_aarch64_sve_uunpklo_vnx8hi (rtx, rtx);
extern rtx gen_aarch64_sve_sunpkhi_vnx4si (rtx, rtx);
extern rtx gen_aarch64_sve_uunpkhi_vnx4si (rtx, rtx);
extern rtx gen_aarch64_sve_sunpklo_vnx4si (rtx, rtx);
extern rtx gen_aarch64_sve_uunpklo_vnx4si (rtx, rtx);
extern rtx gen_aarch64_sve_fix_trunc_nontruncvnx8hfvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx8hfvnx8hi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_sve_fix_trunc_nontruncvnx4sfvnx8hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_fix_trunc_nontruncvnx4sfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx4sfvnx8hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_fixuns_trunc_nontruncvnx4sfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_sve_fix_trunc_nontruncvnx2dfvnx8hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_fix_trunc_nontruncvnx2dfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx2dfvnx8hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_fixuns_trunc_nontruncvnx2dfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_sve_fix_trunc_nontruncvnx8hfvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx8hfvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fix_trunc_nontruncvnx4sfvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx4sfvnx4si (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_sve_fix_trunc_nontruncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_fix_trunc_nontruncvnx2dfvnx4si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_fixuns_trunc_nontruncvnx2dfvnx4si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_sve_fix_trunc_nontruncvnx8hfvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx8hfvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fix_trunc_nontruncvnx4sfvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx4sfvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fix_trunc_nontruncvnx2dfvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx2dfvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fix_trunc_truncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fixuns_trunc_truncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_float_nonextendvnx8hivnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_floatuns_nonextendvnx8hivnx8hf (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_sve_float_nonextendvnx8hivnx4sf (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_float_nonextendvnx8hivnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_sve_floatuns_nonextendvnx8hivnx4sf (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_floatuns_nonextendvnx8hivnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_sve_float_nonextendvnx8hivnx2df (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_float_nonextendvnx8hivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_sve_floatuns_nonextendvnx8hivnx2df (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_floatuns_nonextendvnx8hivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_sve_float_nonextendvnx4sivnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_floatuns_nonextendvnx4sivnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_float_nonextendvnx4sivnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_floatuns_nonextendvnx4sivnx4sf (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_sve_float_nonextendvnx4sivnx2df (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_float_nonextendvnx4sivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_sve_floatuns_nonextendvnx4sivnx2df (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_floatuns_nonextendvnx4sivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_sve_float_nonextendvnx2divnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_floatuns_nonextendvnx2divnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_float_nonextendvnx2divnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_floatuns_nonextendvnx2divnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_float_nonextendvnx2divnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_floatuns_nonextendvnx2divnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_float_extendvnx4sivnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_floatuns_extendvnx4sivnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fcvt_truncvnx4sfvnx8hf (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_sve_fcvt_truncvnx4sfvnx4sf (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_fcvt_truncvnx4sfvnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_sve_fcvt_truncvnx2dfvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fcvt_truncvnx2dfvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fcvt_truncvnx4sfvnx8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cvtntvnx8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fcvt_nontruncvnx8hfvnx4sf (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_sve_fcvt_nontruncvnx4sfvnx4sf (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_fcvt_nontruncvnx4sfvnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_sve_fcvt_nontruncvnx8hfvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fcvt_nontruncvnx4sfvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_vnx8bi (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_vnx4bi (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_vnx2bi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_punpklo_vnx16bi (rtx, rtx);
extern rtx gen_aarch64_sve_punpkhi_vnx16bi (rtx, rtx);
extern rtx gen_aarch64_sve_punpklo_vnx8bi (rtx, rtx);
extern rtx gen_aarch64_sve_punpkhi_vnx8bi (rtx, rtx);
extern rtx gen_aarch64_sve_punpklo_vnx4bi (rtx, rtx);
extern rtx gen_aarch64_sve_punpkhi_vnx4bi (rtx, rtx);
extern rtx gen_aarch64_brka (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_brkb (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_brkn (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_brkpa (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_brkpb (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_pfirstvnx16bi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_pnextvnx16bi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_sve_pfirstvnx8bi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_pfirstvnx8bi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_sve_pnextvnx8bi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_sve_pfirstvnx4bi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_pfirstvnx4bi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_sve_pnextvnx4bi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_sve_pfirstvnx2bi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_sve_pfirstvnx2bi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_sve_pnextvnx2bi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cnt_pat (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_incdi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincdi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincdi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincsi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincsi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_incvnx2di_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincvnx2di_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincvnx2di_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_incvnx4si_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincvnx4si_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincvnx4si_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_decdi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecdi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecdi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecsi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecsi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_decvnx2di_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecvnx2di_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecvnx2di_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_decvnx4si_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecvnx4si_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecvnx4si_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cntpvnx16bi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cntpvnx8bi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cntpvnx4bi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cntpvnx2bi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_ldntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_ldntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_ldntvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_ldntvnx2df (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_gather_ldnt_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_ldnt_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_aarch64_gather_ldnt_zero_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_ldnt_zero_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_aarch64_gather_ldnt_extendvnx2divnx8qi (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_ldnt_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_aarch64_gather_ldnt_zero_extendvnx2divnx8qi (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_ldnt_zero_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_aarch64_gather_ldnt_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_ldnt_zero_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_gather_ldnt_extendvnx2divnx4qi (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_ldnt_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_aarch64_gather_ldnt_zero_extendvnx2divnx4qi (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_ldnt_zero_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_aarch64_gather_ldnt_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_ldnt_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_aarch64_gather_ldnt_zero_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_ldnt_zero_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_aarch64_gather_ldnt_extendvnx2divnx2qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_ldnt_zero_extendvnx2divnx2qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_ldnt_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_ldnt_zero_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_gather_ldnt_extendvnx2divnx4hi (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_ldnt_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_aarch64_gather_ldnt_zero_extendvnx2divnx4hi (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_ldnt_zero_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_aarch64_gather_ldnt_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_ldnt_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_aarch64_gather_ldnt_zero_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_ldnt_zero_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_aarch64_gather_ldnt_extendvnx2divnx2hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_ldnt_zero_extendvnx2divnx2hi (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_gather_ldnt_extendvnx4sivnx2si (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_ldnt_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_aarch64_gather_ldnt_zero_extendvnx4sivnx2si (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_gather_ldnt_zero_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_aarch64_gather_ldnt_extendvnx2divnx2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_gather_ldnt_zero_extendvnx2divnx2si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_scatter_stntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_scatter_stntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_scatter_stntvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_scatter_stntvnx2df (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_scatter_stnt_vnx4sivnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_scatter_stnt_vnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_scatter_stnt_vnx2divnx8qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_scatter_stnt_vnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_scatter_stnt_vnx4sivnx4qi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_scatter_stnt_vnx2divnx4qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_scatter_stnt_vnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_scatter_stnt_vnx4sivnx2qi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_scatter_stnt_vnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_scatter_stnt_vnx2divnx2qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_scatter_stnt_vnx4sivnx4hi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_scatter_stnt_vnx2divnx4hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_scatter_stnt_vnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_aarch64_scatter_stnt_vnx4sivnx2hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_scatter_stnt_vnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_scatter_stnt_vnx2divnx2hi (rtx, rtx, rtx, rtx);
static inline rtx gen_aarch64_scatter_stnt_vnx4sivnx2si (rtx, rtx, rtx, rtx);
static inline rtx
gen_aarch64_scatter_stnt_vnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_aarch64_scatter_stnt_vnx2divnx2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mul_lane_vnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mul_lane_vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_mul_lane_vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_suqaddvnx16qi_const (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_suqaddvnx8hi_const (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_suqaddvnx4si_const (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_suqaddvnx2di_const (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_shaddvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_shsubvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sqrshlvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_srhaddvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_srshlvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uhaddvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uhsubvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uqrshlvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_urhaddvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_urshlvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_shaddvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_shsubvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sqrshlvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_srhaddvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_srshlvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uhaddvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uhsubvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uqrshlvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_urhaddvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_urshlvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_shaddvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_shsubvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sqrshlvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_srhaddvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_srshlvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uhaddvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uhsubvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uqrshlvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_urhaddvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_urshlvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_shaddvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_shsubvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sqrshlvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_srhaddvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_srshlvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uhaddvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uhsubvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uqrshlvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_urhaddvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_urshlvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmulhvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmulhvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmulhvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmulhvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmulhvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmulhvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmulhvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmulhvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmulh_lane_vnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmulh_lane_vnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmulh_lane_vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmulh_lane_vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmulh_lane_vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmulh_lane_vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sqshlvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uqshlvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sqshlvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uqshlvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sqshlvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uqshlvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sqshlvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uqshlvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_adclbvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_adcltvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_eorbtvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_eortbvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sbclbvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sbcltvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmlahvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmlshvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_adclbvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_adcltvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_eorbtvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_eortbvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sbclbvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sbcltvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmlahvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmlshvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_adclbvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_adcltvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_eorbtvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_eortbvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sbclbvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sbcltvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmlahvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmlshvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_adclbvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_adcltvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_eorbtvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_eortbvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sbclbvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sbcltvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmlahvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmlshvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmlah_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmlsh_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmlah_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmlsh_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmlah_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdmlsh_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_mul_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_mul_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_mul_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_mul_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_mul_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_mul_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_xarvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_xarvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_xarvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_xarvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bcaxvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bcaxvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bcaxvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bcaxvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_eor3vnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_eor3vnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_eor3vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_eor3vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_srshrvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_urshrvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_srshrvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_urshrvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_srshrvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_urshrvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_srshrvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_urshrvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_slivnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_srivnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_slivnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_srivnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_slivnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_srivnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_slivnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_srivnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddwbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddwtvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssubwbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssubwtvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uaddwbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uaddwtvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usubwbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usubwtvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddwbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddwtvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssubwbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssubwtvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uaddwbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uaddwtvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usubwbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usubwtvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddwbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddwtvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssubwbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssubwtvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uaddwbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uaddwtvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usubwbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usubwtvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sabdlbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sabdltvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddlbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddlbtvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddltvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_smullbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_smulltvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmullbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmulltvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssublbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssublbtvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssubltvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssubltbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uabdlbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uabdltvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uaddlbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uaddltvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_umullbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_umulltvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usublbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usubltvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sabdlbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sabdltvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddlbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddlbtvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddltvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_smullbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_smulltvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmullbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmulltvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssublbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssublbtvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssubltvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssubltbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uabdlbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uabdltvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uaddlbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uaddltvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_umullbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_umulltvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usublbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usubltvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sabdlbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sabdltvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddlbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddlbtvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_saddltvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_smullbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_smulltvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmullbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmulltvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssublbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssublbtvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssubltvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ssubltbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uabdlbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uabdltvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uaddlbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uaddltvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_umullbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_umulltvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usublbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_usubltvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_smullb_lane_vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_smullt_lane_vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmullb_lane_vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmullt_lane_vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_umullb_lane_vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_umullt_lane_vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_smullb_lane_vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_smullt_lane_vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmullb_lane_vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdmullt_lane_vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_umullb_lane_vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_umullt_lane_vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sshllbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sshlltvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ushllbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ushlltvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sshllbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sshlltvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ushllbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ushlltvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sshllbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sshlltvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ushllbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_ushlltvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_sabdlbvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_sabdltvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_smullbvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_smulltvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_uabdlbvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_uabdltvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_umullbvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_umulltvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_sabdlbvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_sabdltvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_smullbvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_smulltvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_uabdlbvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_uabdltvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_umullbvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_umulltvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_sabdlbvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_sabdltvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_smullbvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_smulltvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_uabdlbvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_uabdltvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_umullbvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_umulltvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_smullb_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_smullt_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_umullb_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_umullt_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_smullb_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_smullt_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_umullb_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_umullt_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qadd_sqdmullbvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qadd_sqdmullbtvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qadd_sqdmulltvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qadd_sqdmullbvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qadd_sqdmullbtvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qadd_sqdmulltvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qadd_sqdmullbvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qadd_sqdmullbtvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qadd_sqdmulltvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qadd_sqdmullb_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qadd_sqdmullt_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qadd_sqdmullb_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qadd_sqdmullt_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_smullbvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_smulltvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_umullbvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_umulltvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_smullbvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_smulltvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_umullbvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_umulltvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_smullbvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_smulltvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_umullbvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_umulltvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_smullb_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_smullt_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_umullb_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_umullt_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_smullb_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_smullt_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_umullb_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sub_umullt_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qsub_sqdmullbvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qsub_sqdmullbtvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qsub_sqdmulltvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qsub_sqdmullbvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qsub_sqdmullbtvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qsub_sqdmulltvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qsub_sqdmullbvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qsub_sqdmullbtvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qsub_sqdmulltvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qsub_sqdmullb_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qsub_sqdmullt_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qsub_sqdmullb_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_qsub_sqdmullt_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fmlalbvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fmlaltvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fmlslbvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_fmlsltvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlalb_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlalt_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlslb_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlslt_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqxtnbvnx8hi (rtx, rtx);
extern rtx gen_aarch64_sve_sqxtunbvnx8hi (rtx, rtx);
extern rtx gen_aarch64_sve_uqxtnbvnx8hi (rtx, rtx);
extern rtx gen_aarch64_sve_sqxtnbvnx4si (rtx, rtx);
extern rtx gen_aarch64_sve_sqxtunbvnx4si (rtx, rtx);
extern rtx gen_aarch64_sve_uqxtnbvnx4si (rtx, rtx);
extern rtx gen_aarch64_sve_sqxtnbvnx2di (rtx, rtx);
extern rtx gen_aarch64_sve_sqxtunbvnx2di (rtx, rtx);
extern rtx gen_aarch64_sve_uqxtnbvnx2di (rtx, rtx);
extern rtx gen_aarch64_sve_sqxtntvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqxtuntvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqxtntvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqxtntvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqxtuntvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqxtntvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqxtntvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqxtuntvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqxtntvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_addhnbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_raddhnbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_rsubhnbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_subhnbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_addhnbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_raddhnbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_rsubhnbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_subhnbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_addhnbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_raddhnbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_rsubhnbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_subhnbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_addhntvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_raddhntvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_rsubhntvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_subhntvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_addhntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_raddhntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_rsubhntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_subhntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_addhntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_raddhntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_rsubhntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_subhntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_rshrnbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_shrnbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrshrnbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrshrunbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqshrnbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqshrunbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqrshrnbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqshrnbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_rshrnbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_shrnbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrshrnbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrshrunbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqshrnbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqshrunbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqrshrnbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqshrnbvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_rshrnbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_shrnbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrshrnbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrshrunbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqshrnbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqshrunbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqrshrnbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqshrnbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_rshrntvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_shrntvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrshrntvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrshruntvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqshrntvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqshruntvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqrshrntvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqshrntvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_rshrntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_shrntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrshrntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrshruntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqshrntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqshruntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqrshrntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqshrntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_rshrntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_shrntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrshrntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrshruntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqshrntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqshruntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqrshrntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqshrntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_addpvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smaxpvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sminpvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_umaxpvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uminpvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_addpvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smaxpvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sminpvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_umaxpvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uminpvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_addpvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smaxpvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sminpvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_umaxpvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uminpvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_addpvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_smaxpvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_sminpvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_umaxpvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_uminpvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_faddpvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmaxpvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmaxnmpvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fminpvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fminnmpvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_faddpvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmaxpvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmaxnmpvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fminpvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fminnmpvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_faddpvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmaxpvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fmaxnmpvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fminpvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fminnmpvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cadd90vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cadd270vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqcadd90vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqcadd270vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cadd90vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cadd270vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqcadd90vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqcadd270vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cadd90vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cadd270vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqcadd90vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqcadd270vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cadd90vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cadd270vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqcadd90vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqcadd270vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmlavnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmla90vnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmla180vnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmla270vnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlahvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlah90vnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlah180vnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlah270vnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmlavnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmla90vnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmla180vnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmla270vnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlahvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlah90vnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlah180vnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlah270vnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmlavnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmla90vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmla180vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmla270vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlahvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlah90vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlah180vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlah270vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmlavnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmla90vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmla180vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cmla270vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlahvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlah90vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlah180vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqrdcmlah270vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmla_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmla90_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmla180_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmla270_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdcmlah_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdcmlah90_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdcmlah180_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdcmlah270_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmla_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmla90_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmla180_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cmla270_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdcmlah_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdcmlah90_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdcmlah180_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqrdcmlah270_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cdotvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cdot90vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cdot180vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cdot270vnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cdotvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cdot90vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cdot180vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cdot270vnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cdot_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cdot90_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cdot180_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cdot270_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cdot_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cdot90_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cdot180_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cdot270_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcvtltvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcvtltvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cvtntvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_cvtntvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_fcvtxvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_cvtxntvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_urecpevnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_ursqrtevnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_flogbvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_flogbvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_flogbvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_pmulvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_pmullbvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_pmulltvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_pmullbvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_pmulltvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_pmullb_pairvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_pmullt_pairvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_pmullb_pairvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_pmullt_pairvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_pmullb_pairvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_pmullt_pairvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbl2vnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbl2vnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbl2vnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbl2vnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbl2vnx8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbl2vnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbl2vnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbl2vnx2df (rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbxvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbxvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbxvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbxvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbxvnx8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbxvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbxvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_tbxvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bdepvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bextvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bgrpvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bdepvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bextvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bgrpvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bdepvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bextvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bgrpvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bdepvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bextvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_bgrpvnx2di (rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_histcntvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_histcntvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_histsegvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_pred_matchvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_nmatchvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_matchvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_nmatchvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_aese (rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_aesd (rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_aesmc (rtx, rtx);
extern rtx gen_aarch64_sve2_aesimc (rtx, rtx);
extern rtx gen_aarch64_sve2_rax1 (rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_sm4e (rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_sm4ekey (rtx, rtx, rtx);
extern rtx gen_cbranchsi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchdi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchsf4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchdf4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchcc4 (rtx, rtx, rtx, rtx);
extern rtx gen_modsi3 (rtx, rtx, rtx);
extern rtx gen_moddi3 (rtx, rtx, rtx);
extern rtx gen_casesi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_casesi_dispatch (rtx, rtx);
extern rtx gen_prologue (void);
extern rtx gen_epilogue (void);
extern rtx gen_sibcall_epilogue (void);
extern rtx gen_return (void);
extern rtx gen_call (rtx, rtx, rtx);
extern rtx gen_call_value (rtx, rtx, rtx, rtx);
extern rtx gen_sibcall (rtx, rtx, rtx);
extern rtx gen_sibcall_value (rtx, rtx, rtx, rtx);
extern rtx gen_untyped_call (rtx, rtx, rtx);
extern rtx gen_movqi (rtx, rtx);
extern rtx gen_movhi (rtx, rtx);
extern rtx gen_movsi (rtx, rtx);
extern rtx gen_movdi (rtx, rtx);
extern rtx gen_movti (rtx, rtx);
extern rtx gen_movhf (rtx, rtx);
extern rtx gen_movbf (rtx, rtx);
extern rtx gen_movsf (rtx, rtx);
extern rtx gen_movdf (rtx, rtx);
extern rtx gen_movtf (rtx, rtx);
extern rtx gen_cpymemdi (rtx, rtx, rtx, rtx);
extern rtx gen_extendsidi2 (rtx, rtx);
extern rtx gen_zero_extendsidi2 (rtx, rtx);
extern rtx gen_extendqisi2 (rtx, rtx);
extern rtx gen_zero_extendqisi2 (rtx, rtx);
extern rtx gen_extendhisi2 (rtx, rtx);
extern rtx gen_zero_extendhisi2 (rtx, rtx);
extern rtx gen_extendqidi2 (rtx, rtx);
extern rtx gen_zero_extendqidi2 (rtx, rtx);
extern rtx gen_extendhidi2 (rtx, rtx);
extern rtx gen_zero_extendhidi2 (rtx, rtx);
extern rtx gen_extendqihi2 (rtx, rtx);
extern rtx gen_zero_extendqihi2 (rtx, rtx);
extern rtx gen_addsi3 (rtx, rtx, rtx);
extern rtx gen_adddi3 (rtx, rtx, rtx);
extern rtx gen_addvsi4 (rtx, rtx, rtx, rtx);
extern rtx gen_addvdi4 (rtx, rtx, rtx, rtx);
extern rtx gen_uaddvsi4 (rtx, rtx, rtx, rtx);
extern rtx gen_uaddvdi4 (rtx, rtx, rtx, rtx);
extern rtx gen_addti3 (rtx, rtx, rtx);
extern rtx gen_addvti4 (rtx, rtx, rtx, rtx);
extern rtx gen_uaddvti4 (rtx, rtx, rtx, rtx);
extern rtx gen_addsi3_carryin (rtx, rtx, rtx);
extern rtx gen_adddi3_carryin (rtx, rtx, rtx);
extern rtx gen_addsi3_carryinC (rtx, rtx, rtx);
extern rtx gen_adddi3_carryinC (rtx, rtx, rtx);
extern rtx gen_addsi3_carryinV (rtx, rtx, rtx);
extern rtx gen_adddi3_carryinV (rtx, rtx, rtx);
extern rtx gen_subvsi4 (rtx, rtx, rtx, rtx);
extern rtx gen_subvdi4 (rtx, rtx, rtx, rtx);
extern rtx gen_negvsi3 (rtx, rtx, rtx);
extern rtx gen_negvdi3 (rtx, rtx, rtx);
extern rtx gen_usubvsi4 (rtx, rtx, rtx, rtx);
extern rtx gen_usubvdi4 (rtx, rtx, rtx, rtx);
extern rtx gen_subti3 (rtx, rtx, rtx);
extern rtx gen_subvti4 (rtx, rtx, rtx, rtx);
extern rtx gen_usubvti4 (rtx, rtx, rtx, rtx);
extern rtx gen_negvti3 (rtx, rtx, rtx);
extern rtx gen_subsi3_carryin (rtx, rtx, rtx);
extern rtx gen_subdi3_carryin (rtx, rtx, rtx);
extern rtx gen_usubsi3_carryinC (rtx, rtx, rtx);
extern rtx gen_usubdi3_carryinC (rtx, rtx, rtx);
extern rtx gen_subsi3_carryinV (rtx, rtx, rtx);
extern rtx gen_subdi3_carryinV (rtx, rtx, rtx);
extern rtx gen_abssi2 (rtx, rtx);
extern rtx gen_absdi2 (rtx, rtx);
extern rtx gen_mulditi3 (rtx, rtx, rtx);
extern rtx gen_umulditi3 (rtx, rtx, rtx);
extern rtx gen_multi3 (rtx, rtx, rtx);
extern rtx gen_cstoresi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cstoredi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cstorecc4 (rtx, rtx, rtx, rtx);
extern rtx gen_cstoresf4 (rtx, rtx, rtx, rtx);
extern rtx gen_cstoredf4 (rtx, rtx, rtx, rtx);
extern rtx gen_cmovsi6 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cmovdi6 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cmovsf6 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cmovdf6 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_movqicc (rtx, rtx, rtx, rtx);
extern rtx gen_movhicc (rtx, rtx, rtx, rtx);
extern rtx gen_movsicc (rtx, rtx, rtx, rtx);
extern rtx gen_movdicc (rtx, rtx, rtx, rtx);
extern rtx gen_movsfsicc (rtx, rtx, rtx, rtx);
extern rtx gen_movdfsicc (rtx, rtx, rtx, rtx);
extern rtx gen_movsfdicc (rtx, rtx, rtx, rtx);
extern rtx gen_movdfdicc (rtx, rtx, rtx, rtx);
extern rtx gen_movsfcc (rtx, rtx, rtx, rtx);
extern rtx gen_movdfcc (rtx, rtx, rtx, rtx);
extern rtx gen_negsicc (rtx, rtx, rtx, rtx);
extern rtx gen_notsicc (rtx, rtx, rtx, rtx);
extern rtx gen_negdicc (rtx, rtx, rtx, rtx);
extern rtx gen_notdicc (rtx, rtx, rtx, rtx);
extern rtx gen_umaxsi3 (rtx, rtx, rtx);
extern rtx gen_umaxdi3 (rtx, rtx, rtx);
extern rtx gen_ffssi2 (rtx, rtx);
extern rtx gen_ffsdi2 (rtx, rtx);
extern rtx gen_popcountsi2 (rtx, rtx);
extern rtx gen_popcountdi2 (rtx, rtx);
extern rtx gen_ashlsi3 (rtx, rtx, rtx);
extern rtx gen_ashrsi3 (rtx, rtx, rtx);
extern rtx gen_lshrsi3 (rtx, rtx, rtx);
extern rtx gen_ashldi3 (rtx, rtx, rtx);
extern rtx gen_ashrdi3 (rtx, rtx, rtx);
extern rtx gen_lshrdi3 (rtx, rtx, rtx);
extern rtx gen_ashlqi3 (rtx, rtx, rtx);
extern rtx gen_ashlhi3 (rtx, rtx, rtx);
extern rtx gen_rotrsi3 (rtx, rtx, rtx);
extern rtx gen_rotrdi3 (rtx, rtx, rtx);
extern rtx gen_rotlsi3 (rtx, rtx, rtx);
extern rtx gen_rotldi3 (rtx, rtx, rtx);
extern rtx gen_extv (rtx, rtx, rtx, rtx);
extern rtx gen_extzv (rtx, rtx, rtx, rtx);
extern rtx gen_insvsi (rtx, rtx, rtx, rtx);
extern rtx gen_insvdi (rtx, rtx, rtx, rtx);
extern rtx gen_fmahf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmasf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmadf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmahf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmasf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmadf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmssf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmsdf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmssf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmsdf4 (rtx, rtx, rtx, rtx);
extern rtx gen_floatsihf2 (rtx, rtx);
extern rtx gen_floatunssihf2 (rtx, rtx);
extern rtx gen_floatdihf2 (rtx, rtx);
extern rtx gen_floatunsdihf2 (rtx, rtx);
extern rtx gen_divhf3 (rtx, rtx, rtx);
extern rtx gen_divsf3 (rtx, rtx, rtx);
extern rtx gen_divdf3 (rtx, rtx, rtx);
extern rtx gen_sqrthf2 (rtx, rtx);
extern rtx gen_sqrtsf2 (rtx, rtx);
extern rtx gen_sqrtdf2 (rtx, rtx);
extern rtx gen_lrintsfsi2 (rtx, rtx);
extern rtx gen_lrintdfsi2 (rtx, rtx);
extern rtx gen_lrintsfdi2 (rtx, rtx);
extern rtx gen_lrintdfdi2 (rtx, rtx);
extern rtx gen_copysignsf3 (rtx, rtx, rtx);
extern rtx gen_copysigndf3 (rtx, rtx, rtx);
extern rtx gen_xorsignsf3 (rtx, rtx, rtx);
extern rtx gen_xorsigndf3 (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpsfsi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpsfdi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpdfsi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpdfdi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcptfsi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcptfdi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv8qisi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv8qidi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv16qisi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv16qidi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv4hisi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv4hidi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv8hisi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv8hidi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv2sisi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv2sidi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv4sisi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv4sidi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv2disi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv2didi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv2sfsi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv2sfdi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv4sfsi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv4sfdi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv2dfsi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movcpv2dfdi (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movti (rtx, rtx, rtx);
extern rtx gen_aarch64_reload_movtf (rtx, rtx, rtx);
extern rtx gen_add_losym (rtx, rtx, rtx);
extern rtx gen_tlsgd_small_si (rtx, rtx);
extern rtx gen_tlsgd_small_di (rtx, rtx);
extern rtx gen_tlsdesc_small_si (rtx);
extern rtx gen_tlsdesc_small_di (rtx);
extern rtx gen_get_thread_pointerdi (rtx);
extern rtx gen_stack_protect_set (rtx, rtx);
extern rtx gen_stack_protect_test (rtx, rtx, rtx);
extern rtx gen_doloop_end (rtx, rtx);
extern rtx gen_despeculate_copyqi (rtx, rtx, rtx);
extern rtx gen_despeculate_copyhi (rtx, rtx, rtx);
extern rtx gen_despeculate_copysi (rtx, rtx, rtx);
extern rtx gen_despeculate_copydi (rtx, rtx, rtx);
extern rtx gen_despeculate_copyti (rtx, rtx, rtx);
extern rtx gen_movv8qi (rtx, rtx);
extern rtx gen_movv16qi (rtx, rtx);
extern rtx gen_movv4hi (rtx, rtx);
extern rtx gen_movv8hi (rtx, rtx);
extern rtx gen_movv2si (rtx, rtx);
extern rtx gen_movv4si (rtx, rtx);
extern rtx gen_movv2di (rtx, rtx);
extern rtx gen_movv4hf (rtx, rtx);
extern rtx gen_movv8hf (rtx, rtx);
extern rtx gen_movv4bf (rtx, rtx);
extern rtx gen_movv8bf (rtx, rtx);
extern rtx gen_movv2sf (rtx, rtx);
extern rtx gen_movv4sf (rtx, rtx);
extern rtx gen_movv2df (rtx, rtx);
extern rtx gen_movmisalignv8qi (rtx, rtx);
extern rtx gen_movmisalignv16qi (rtx, rtx);
extern rtx gen_movmisalignv4hi (rtx, rtx);
extern rtx gen_movmisalignv8hi (rtx, rtx);
extern rtx gen_movmisalignv2si (rtx, rtx);
extern rtx gen_movmisalignv4si (rtx, rtx);
extern rtx gen_movmisalignv2di (rtx, rtx);
extern rtx gen_movmisalignv2sf (rtx, rtx);
extern rtx gen_movmisalignv4sf (rtx, rtx);
extern rtx gen_movmisalignv2df (rtx, rtx);
extern rtx gen_aarch64_split_simd_movv16qi (rtx, rtx);
extern rtx gen_aarch64_split_simd_movv8hi (rtx, rtx);
extern rtx gen_aarch64_split_simd_movv4si (rtx, rtx);
extern rtx gen_aarch64_split_simd_movv2di (rtx, rtx);
extern rtx gen_aarch64_split_simd_movv8hf (rtx, rtx);
extern rtx gen_aarch64_split_simd_movv8bf (rtx, rtx);
extern rtx gen_aarch64_split_simd_movv4sf (rtx, rtx);
extern rtx gen_aarch64_split_simd_movv2df (rtx, rtx);
extern rtx gen_aarch64_get_halfv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_halfv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_halfv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_get_halfv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_get_halfv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_halfv8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_halfv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_halfv2df (rtx, rtx, rtx);
extern rtx gen_ctzv2si2 (rtx, rtx);
extern rtx gen_ctzv4si2 (rtx, rtx);
extern rtx gen_xorsignv4hf3 (rtx, rtx, rtx);
extern rtx gen_xorsignv8hf3 (rtx, rtx, rtx);
extern rtx gen_xorsignv2sf3 (rtx, rtx, rtx);
extern rtx gen_xorsignv4sf3 (rtx, rtx, rtx);
extern rtx gen_xorsignv2df3 (rtx, rtx, rtx);
extern rtx gen_sdot_prodv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_udot_prodv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_sdot_prodv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_udot_prodv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_copysignv4hf3 (rtx, rtx, rtx);
extern rtx gen_copysignv8hf3 (rtx, rtx, rtx);
extern rtx gen_copysignv2sf3 (rtx, rtx, rtx);
extern rtx gen_copysignv4sf3 (rtx, rtx, rtx);
extern rtx gen_copysignv2df3 (rtx, rtx, rtx);
extern rtx gen_rsqrtv2sf2 (rtx, rtx);
extern rtx gen_rsqrtv4sf2 (rtx, rtx);
extern rtx gen_rsqrtv2df2 (rtx, rtx);
extern rtx gen_rsqrtsf2 (rtx, rtx);
extern rtx gen_rsqrtdf2 (rtx, rtx);
extern rtx gen_ssadv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_usadv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_signbitv2sf2 (rtx, rtx);
extern rtx gen_signbitv4sf2 (rtx, rtx);
extern rtx gen_ashlv8qi3 (rtx, rtx, rtx);
extern rtx gen_ashlv16qi3 (rtx, rtx, rtx);
extern rtx gen_ashlv4hi3 (rtx, rtx, rtx);
extern rtx gen_ashlv8hi3 (rtx, rtx, rtx);
extern rtx gen_ashlv2si3 (rtx, rtx, rtx);
extern rtx gen_ashlv4si3 (rtx, rtx, rtx);
extern rtx gen_ashlv2di3 (rtx, rtx, rtx);
extern rtx gen_lshrv8qi3 (rtx, rtx, rtx);
extern rtx gen_lshrv16qi3 (rtx, rtx, rtx);
extern rtx gen_lshrv4hi3 (rtx, rtx, rtx);
extern rtx gen_lshrv8hi3 (rtx, rtx, rtx);
extern rtx gen_lshrv2si3 (rtx, rtx, rtx);
extern rtx gen_lshrv4si3 (rtx, rtx, rtx);
extern rtx gen_lshrv2di3 (rtx, rtx, rtx);
extern rtx gen_ashrv8qi3 (rtx, rtx, rtx);
extern rtx gen_ashrv16qi3 (rtx, rtx, rtx);
extern rtx gen_ashrv4hi3 (rtx, rtx, rtx);
extern rtx gen_ashrv8hi3 (rtx, rtx, rtx);
extern rtx gen_ashrv2si3 (rtx, rtx, rtx);
extern rtx gen_ashrv4si3 (rtx, rtx, rtx);
extern rtx gen_ashrv2di3 (rtx, rtx, rtx);
extern rtx gen_vashlv8qi3 (rtx, rtx, rtx);
extern rtx gen_vashlv16qi3 (rtx, rtx, rtx);
extern rtx gen_vashlv4hi3 (rtx, rtx, rtx);
extern rtx gen_vashlv8hi3 (rtx, rtx, rtx);
extern rtx gen_vashlv2si3 (rtx, rtx, rtx);
extern rtx gen_vashlv4si3 (rtx, rtx, rtx);
extern rtx gen_vashlv2di3 (rtx, rtx, rtx);
extern rtx gen_vashrv8qi3 (rtx, rtx, rtx);
extern rtx gen_vashrv16qi3 (rtx, rtx, rtx);
extern rtx gen_vashrv4hi3 (rtx, rtx, rtx);
extern rtx gen_vashrv8hi3 (rtx, rtx, rtx);
extern rtx gen_vashrv2si3 (rtx, rtx, rtx);
extern rtx gen_vashrv4si3 (rtx, rtx, rtx);
extern rtx gen_aarch64_ashr_simddi (rtx, rtx, rtx);
extern rtx gen_vlshrv8qi3 (rtx, rtx, rtx);
extern rtx gen_vlshrv16qi3 (rtx, rtx, rtx);
extern rtx gen_vlshrv4hi3 (rtx, rtx, rtx);
extern rtx gen_vlshrv8hi3 (rtx, rtx, rtx);
extern rtx gen_vlshrv2si3 (rtx, rtx, rtx);
extern rtx gen_vlshrv4si3 (rtx, rtx, rtx);
extern rtx gen_aarch64_lshr_simddi (rtx, rtx, rtx);
extern rtx gen_vec_setv8qi (rtx, rtx, rtx);
extern rtx gen_vec_setv16qi (rtx, rtx, rtx);
extern rtx gen_vec_setv4hi (rtx, rtx, rtx);
extern rtx gen_vec_setv8hi (rtx, rtx, rtx);
extern rtx gen_vec_setv2si (rtx, rtx, rtx);
extern rtx gen_vec_setv4si (rtx, rtx, rtx);
extern rtx gen_vec_setv2di (rtx, rtx, rtx);
extern rtx gen_vec_setv4hf (rtx, rtx, rtx);
extern rtx gen_vec_setv8hf (rtx, rtx, rtx);
extern rtx gen_vec_setv4bf (rtx, rtx, rtx);
extern rtx gen_vec_setv8bf (rtx, rtx, rtx);
extern rtx gen_vec_setv2sf (rtx, rtx, rtx);
extern rtx gen_vec_setv4sf (rtx, rtx, rtx);
extern rtx gen_vec_setv2df (rtx, rtx, rtx);
extern rtx gen_smaxv2di3 (rtx, rtx, rtx);
extern rtx gen_sminv2di3 (rtx, rtx, rtx);
extern rtx gen_umaxv2di3 (rtx, rtx, rtx);
extern rtx gen_uminv2di3 (rtx, rtx, rtx);
extern rtx gen_move_lo_quad_v16qi (rtx, rtx);
extern rtx gen_move_lo_quad_v8hi (rtx, rtx);
extern rtx gen_move_lo_quad_v4si (rtx, rtx);
extern rtx gen_move_lo_quad_v2di (rtx, rtx);
extern rtx gen_move_lo_quad_v8hf (rtx, rtx);
extern rtx gen_move_lo_quad_v8bf (rtx, rtx);
extern rtx gen_move_lo_quad_v4sf (rtx, rtx);
extern rtx gen_move_lo_quad_v2df (rtx, rtx);
extern rtx gen_move_hi_quad_v16qi (rtx, rtx);
extern rtx gen_move_hi_quad_v8hi (rtx, rtx);
extern rtx gen_move_hi_quad_v4si (rtx, rtx);
extern rtx gen_move_hi_quad_v2di (rtx, rtx);
extern rtx gen_move_hi_quad_v8hf (rtx, rtx);
extern rtx gen_move_hi_quad_v8bf (rtx, rtx);
extern rtx gen_move_hi_quad_v4sf (rtx, rtx);
extern rtx gen_move_hi_quad_v2df (rtx, rtx);
extern rtx gen_vec_pack_trunc_v4hi (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v2si (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_di (rtx, rtx, rtx);
extern rtx gen_vec_unpacks_hi_v16qi (rtx, rtx);
extern rtx gen_vec_unpacku_hi_v16qi (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v8hi (rtx, rtx);
extern rtx gen_vec_unpacku_hi_v8hi (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v4si (rtx, rtx);
extern rtx gen_vec_unpacku_hi_v4si (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v16qi (rtx, rtx);
extern rtx gen_vec_unpacku_lo_v16qi (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v8hi (rtx, rtx);
extern rtx gen_vec_unpacku_lo_v8hi (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v4si (rtx, rtx);
extern rtx gen_vec_unpacku_lo_v4si (rtx, rtx);
extern rtx gen_vec_widen_smult_lo_v16qi (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_lo_v16qi (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_lo_v8hi (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_lo_v8hi (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_lo_v4si (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_lo_v4si (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_hi_v16qi (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_hi_v16qi (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_hi_v8hi (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_hi_v8hi (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_hi_v4si (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_hi_v4si (rtx, rtx, rtx);
extern rtx gen_divv4hf3 (rtx, rtx, rtx);
extern rtx gen_divv8hf3 (rtx, rtx, rtx);
extern rtx gen_divv2sf3 (rtx, rtx, rtx);
extern rtx gen_divv4sf3 (rtx, rtx, rtx);
extern rtx gen_divv2df3 (rtx, rtx, rtx);
extern rtx gen_fixv4hfv4hi2 (rtx, rtx);
extern rtx gen_fixunsv4hfv4hi2 (rtx, rtx);
extern rtx gen_fixv8hfv8hi2 (rtx, rtx);
extern rtx gen_fixunsv8hfv8hi2 (rtx, rtx);
extern rtx gen_fixv2sfv2si2 (rtx, rtx);
extern rtx gen_fixunsv2sfv2si2 (rtx, rtx);
extern rtx gen_fixv4sfv4si2 (rtx, rtx);
extern rtx gen_fixunsv4sfv4si2 (rtx, rtx);
extern rtx gen_fixv2dfv2di2 (rtx, rtx);
extern rtx gen_fixunsv2dfv2di2 (rtx, rtx);
extern rtx gen_fix_truncv4hfv4hi2 (rtx, rtx);
extern rtx gen_fixuns_truncv4hfv4hi2 (rtx, rtx);
extern rtx gen_fix_truncv8hfv8hi2 (rtx, rtx);
extern rtx gen_fixuns_truncv8hfv8hi2 (rtx, rtx);
extern rtx gen_fix_truncv2sfv2si2 (rtx, rtx);
extern rtx gen_fixuns_truncv2sfv2si2 (rtx, rtx);
extern rtx gen_fix_truncv4sfv4si2 (rtx, rtx);
extern rtx gen_fixuns_truncv4sfv4si2 (rtx, rtx);
extern rtx gen_fix_truncv2dfv2di2 (rtx, rtx);
extern rtx gen_fixuns_truncv2dfv2di2 (rtx, rtx);
extern rtx gen_ftruncv4hf2 (rtx, rtx);
extern rtx gen_ftruncv8hf2 (rtx, rtx);
extern rtx gen_ftruncv2sf2 (rtx, rtx);
extern rtx gen_ftruncv4sf2 (rtx, rtx);
extern rtx gen_ftruncv2df2 (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v8hf (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v4sf (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v8hf (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v4sf (rtx, rtx);
extern rtx gen_aarch64_float_truncate_hi_v4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_float_truncate_hi_v8hf (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v2df (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_df (rtx, rtx, rtx);
extern rtx gen_reduc_plus_scal_v8qi (rtx, rtx);
extern rtx gen_reduc_plus_scal_v16qi (rtx, rtx);
extern rtx gen_reduc_plus_scal_v4hi (rtx, rtx);
extern rtx gen_reduc_plus_scal_v8hi (rtx, rtx);
extern rtx gen_reduc_plus_scal_v2si (rtx, rtx);
extern rtx gen_reduc_plus_scal_v4si (rtx, rtx);
extern rtx gen_reduc_plus_scal_v2di (rtx, rtx);
extern rtx gen_reduc_plus_scal_v4sf (rtx, rtx);
extern rtx gen_reduc_smax_nan_scal_v4hf (rtx, rtx);
extern rtx gen_reduc_smin_nan_scal_v4hf (rtx, rtx);
extern rtx gen_reduc_smax_scal_v4hf (rtx, rtx);
extern rtx gen_reduc_smin_scal_v4hf (rtx, rtx);
extern rtx gen_reduc_smax_nan_scal_v8hf (rtx, rtx);
extern rtx gen_reduc_smin_nan_scal_v8hf (rtx, rtx);
extern rtx gen_reduc_smax_scal_v8hf (rtx, rtx);
extern rtx gen_reduc_smin_scal_v8hf (rtx, rtx);
extern rtx gen_reduc_smax_nan_scal_v2sf (rtx, rtx);
extern rtx gen_reduc_smin_nan_scal_v2sf (rtx, rtx);
extern rtx gen_reduc_smax_scal_v2sf (rtx, rtx);
extern rtx gen_reduc_smin_scal_v2sf (rtx, rtx);
extern rtx gen_reduc_smax_nan_scal_v4sf (rtx, rtx);
extern rtx gen_reduc_smin_nan_scal_v4sf (rtx, rtx);
extern rtx gen_reduc_smax_scal_v4sf (rtx, rtx);
extern rtx gen_reduc_smin_scal_v4sf (rtx, rtx);
extern rtx gen_reduc_smax_nan_scal_v2df (rtx, rtx);
extern rtx gen_reduc_smin_nan_scal_v2df (rtx, rtx);
extern rtx gen_reduc_smax_scal_v2df (rtx, rtx);
extern rtx gen_reduc_smin_scal_v2df (rtx, rtx);
extern rtx gen_reduc_umax_scal_v8qi (rtx, rtx);
extern rtx gen_reduc_umin_scal_v8qi (rtx, rtx);
extern rtx gen_reduc_smax_scal_v8qi (rtx, rtx);
extern rtx gen_reduc_smin_scal_v8qi (rtx, rtx);
extern rtx gen_reduc_umax_scal_v16qi (rtx, rtx);
extern rtx gen_reduc_umin_scal_v16qi (rtx, rtx);
extern rtx gen_reduc_smax_scal_v16qi (rtx, rtx);
extern rtx gen_reduc_smin_scal_v16qi (rtx, rtx);
extern rtx gen_reduc_umax_scal_v4hi (rtx, rtx);
extern rtx gen_reduc_umin_scal_v4hi (rtx, rtx);
extern rtx gen_reduc_smax_scal_v4hi (rtx, rtx);
extern rtx gen_reduc_smin_scal_v4hi (rtx, rtx);
extern rtx gen_reduc_umax_scal_v8hi (rtx, rtx);
extern rtx gen_reduc_umin_scal_v8hi (rtx, rtx);
extern rtx gen_reduc_smax_scal_v8hi (rtx, rtx);
extern rtx gen_reduc_smin_scal_v8hi (rtx, rtx);
extern rtx gen_reduc_umax_scal_v2si (rtx, rtx);
extern rtx gen_reduc_umin_scal_v2si (rtx, rtx);
extern rtx gen_reduc_smax_scal_v2si (rtx, rtx);
extern rtx gen_reduc_smin_scal_v2si (rtx, rtx);
extern rtx gen_reduc_umax_scal_v4si (rtx, rtx);
extern rtx gen_reduc_umin_scal_v4si (rtx, rtx);
extern rtx gen_reduc_smax_scal_v4si (rtx, rtx);
extern rtx gen_reduc_smin_scal_v4si (rtx, rtx);
extern rtx gen_aarch64_simd_bslv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv4bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv4hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bslv2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bsldi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_simd_bsldf (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v8qiv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v16qiv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v4hiv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v8hiv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v2siv2si (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v4siv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v2div2di (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v2sfv2si (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v4sfv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v2dfv2di (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_didi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv8qiv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv16qiv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv4hiv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv8hiv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv2siv2si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv4siv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv2div2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpdidi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv2sfv2si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv4sfv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv2dfv2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv8qiv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv16qiv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv4hiv4hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv8hiv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv2siv2si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv4siv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv2div2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpudidi (rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8qiv8qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16qiv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4hiv4hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8hiv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2siv2si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4siv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2div2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2sfv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4sfv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2dfv2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconddidi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2siv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2sfv2si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4siv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4sfv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2div2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2dfv2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8qiv8qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16qiv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4hiv4hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8hiv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv2siv2si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4siv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv2div2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondudidi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv2sfv2si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4sfv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv2dfv2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_combinev8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_combinev4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_combinev4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_combinev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_combinev2si (rtx, rtx, rtx);
extern rtx gen_aarch64_combinev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_combinedi (rtx, rtx, rtx);
extern rtx gen_aarch64_combinedf (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_combinev8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_combinev4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_combinev4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_combinev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_combinev2si (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_combinev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_combinedi (rtx, rtx, rtx);
extern rtx gen_aarch64_simd_combinedf (rtx, rtx, rtx);
extern rtx gen_aarch64_saddl2v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_saddl2v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_saddl2v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_uaddl2v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uaddl2v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uaddl2v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_ssubl2v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_ssubl2v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_ssubl2v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_usubl2v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_usubl2v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_usubl2v4si (rtx, rtx, rtx);
extern rtx gen_widen_ssumv16qi3 (rtx, rtx, rtx);
extern rtx gen_widen_ssumv8hi3 (rtx, rtx, rtx);
extern rtx gen_widen_ssumv4si3 (rtx, rtx, rtx);
extern rtx gen_widen_ssumv8qi3 (rtx, rtx, rtx);
extern rtx gen_widen_ssumv4hi3 (rtx, rtx, rtx);
extern rtx gen_widen_ssumv2si3 (rtx, rtx, rtx);
extern rtx gen_widen_usumv16qi3 (rtx, rtx, rtx);
extern rtx gen_widen_usumv8hi3 (rtx, rtx, rtx);
extern rtx gen_widen_usumv4si3 (rtx, rtx, rtx);
extern rtx gen_widen_usumv8qi3 (rtx, rtx, rtx);
extern rtx gen_widen_usumv4hi3 (rtx, rtx, rtx);
extern rtx gen_widen_usumv2si3 (rtx, rtx, rtx);
extern rtx gen_aarch64_saddw2v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_saddw2v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_saddw2v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_uaddw2v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_uaddw2v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_uaddw2v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_ssubw2v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_ssubw2v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_ssubw2v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_usubw2v16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_usubw2v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_usubw2v4si (rtx, rtx, rtx);
extern rtx gen_avgv8qi3_floor (rtx, rtx, rtx);
extern rtx gen_uavgv8qi3_floor (rtx, rtx, rtx);
extern rtx gen_avgv16qi3_floor (rtx, rtx, rtx);
extern rtx gen_uavgv16qi3_floor (rtx, rtx, rtx);
extern rtx gen_avgv4hi3_floor (rtx, rtx, rtx);
extern rtx gen_uavgv4hi3_floor (rtx, rtx, rtx);
extern rtx gen_avgv8hi3_floor (rtx, rtx, rtx);
extern rtx gen_uavgv8hi3_floor (rtx, rtx, rtx);
extern rtx gen_avgv2si3_floor (rtx, rtx, rtx);
extern rtx gen_uavgv2si3_floor (rtx, rtx, rtx);
extern rtx gen_avgv4si3_floor (rtx, rtx, rtx);
extern rtx gen_uavgv4si3_floor (rtx, rtx, rtx);
extern rtx gen_avgv8qi3_ceil (rtx, rtx, rtx);
extern rtx gen_uavgv8qi3_ceil (rtx, rtx, rtx);
extern rtx gen_avgv16qi3_ceil (rtx, rtx, rtx);
extern rtx gen_uavgv16qi3_ceil (rtx, rtx, rtx);
extern rtx gen_avgv4hi3_ceil (rtx, rtx, rtx);
extern rtx gen_uavgv4hi3_ceil (rtx, rtx, rtx);
extern rtx gen_avgv8hi3_ceil (rtx, rtx, rtx);
extern rtx gen_uavgv8hi3_ceil (rtx, rtx, rtx);
extern rtx gen_avgv2si3_ceil (rtx, rtx, rtx);
extern rtx gen_uavgv2si3_ceil (rtx, rtx, rtx);
extern rtx gen_avgv4si3_ceil (rtx, rtx, rtx);
extern rtx gen_uavgv4si3_ceil (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2v8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2v4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2v8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2v4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2_lanev8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2_lanev4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2_laneqv8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2_laneqv4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2_lanev8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2_lanev4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2_laneqv8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2_laneqv4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2_nv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlal2_nv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2_nv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmlsl2_nv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2v8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2v4si (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2_lanev8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2_lanev4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2_laneqv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2_laneqv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2_nv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_sqdmull2_nv4si (rtx, rtx, rtx);
extern rtx gen_sqrtv4hf2 (rtx, rtx);
extern rtx gen_sqrtv8hf2 (rtx, rtx);
extern rtx gen_sqrtv2sf2 (rtx, rtx);
extern rtx gen_sqrtv4sf2 (rtx, rtx);
extern rtx gen_sqrtv2df2 (rtx, rtx);
extern rtx gen_vec_load_lanesoiv16qi (rtx, rtx);
extern rtx gen_vec_load_lanesoiv8hi (rtx, rtx);
extern rtx gen_vec_load_lanesoiv4si (rtx, rtx);
extern rtx gen_vec_load_lanesoiv2di (rtx, rtx);
extern rtx gen_vec_load_lanesoiv8hf (rtx, rtx);
extern rtx gen_vec_load_lanesoiv4sf (rtx, rtx);
extern rtx gen_vec_load_lanesoiv2df (rtx, rtx);
extern rtx gen_vec_load_lanesoiv8bf (rtx, rtx);
extern rtx gen_vec_store_lanesoiv16qi (rtx, rtx);
extern rtx gen_vec_store_lanesoiv8hi (rtx, rtx);
extern rtx gen_vec_store_lanesoiv4si (rtx, rtx);
extern rtx gen_vec_store_lanesoiv2di (rtx, rtx);
extern rtx gen_vec_store_lanesoiv8hf (rtx, rtx);
extern rtx gen_vec_store_lanesoiv4sf (rtx, rtx);
extern rtx gen_vec_store_lanesoiv2df (rtx, rtx);
extern rtx gen_vec_store_lanesoiv8bf (rtx, rtx);
extern rtx gen_vec_load_lanesciv16qi (rtx, rtx);
extern rtx gen_vec_load_lanesciv8hi (rtx, rtx);
extern rtx gen_vec_load_lanesciv4si (rtx, rtx);
extern rtx gen_vec_load_lanesciv2di (rtx, rtx);
extern rtx gen_vec_load_lanesciv8hf (rtx, rtx);
extern rtx gen_vec_load_lanesciv4sf (rtx, rtx);
extern rtx gen_vec_load_lanesciv2df (rtx, rtx);
extern rtx gen_vec_load_lanesciv8bf (rtx, rtx);
extern rtx gen_vec_store_lanesciv16qi (rtx, rtx);
extern rtx gen_vec_store_lanesciv8hi (rtx, rtx);
extern rtx gen_vec_store_lanesciv4si (rtx, rtx);
extern rtx gen_vec_store_lanesciv2di (rtx, rtx);
extern rtx gen_vec_store_lanesciv8hf (rtx, rtx);
extern rtx gen_vec_store_lanesciv4sf (rtx, rtx);
extern rtx gen_vec_store_lanesciv2df (rtx, rtx);
extern rtx gen_vec_store_lanesciv8bf (rtx, rtx);
extern rtx gen_vec_load_lanesxiv16qi (rtx, rtx);
extern rtx gen_vec_load_lanesxiv8hi (rtx, rtx);
extern rtx gen_vec_load_lanesxiv4si (rtx, rtx);
extern rtx gen_vec_load_lanesxiv2di (rtx, rtx);
extern rtx gen_vec_load_lanesxiv8hf (rtx, rtx);
extern rtx gen_vec_load_lanesxiv4sf (rtx, rtx);
extern rtx gen_vec_load_lanesxiv2df (rtx, rtx);
extern rtx gen_vec_load_lanesxiv8bf (rtx, rtx);
extern rtx gen_vec_store_lanesxiv16qi (rtx, rtx);
extern rtx gen_vec_store_lanesxiv8hi (rtx, rtx);
extern rtx gen_vec_store_lanesxiv4si (rtx, rtx);
extern rtx gen_vec_store_lanesxiv2di (rtx, rtx);
extern rtx gen_vec_store_lanesxiv8hf (rtx, rtx);
extern rtx gen_vec_store_lanesxiv4sf (rtx, rtx);
extern rtx gen_vec_store_lanesxiv2df (rtx, rtx);
extern rtx gen_vec_store_lanesxiv8bf (rtx, rtx);
extern rtx gen_movoi (rtx, rtx);
extern rtx gen_movci (rtx, rtx);
extern rtx gen_movxi (rtx, rtx);
extern rtx gen_aarch64_ld1x3v8qi (rtx, rtx);
extern rtx gen_aarch64_ld1x3v16qi (rtx, rtx);
extern rtx gen_aarch64_ld1x3v4hi (rtx, rtx);
extern rtx gen_aarch64_ld1x3v8hi (rtx, rtx);
extern rtx gen_aarch64_ld1x3v2si (rtx, rtx);
extern rtx gen_aarch64_ld1x3v4si (rtx, rtx);
extern rtx gen_aarch64_ld1x3v4bf (rtx, rtx);
extern rtx gen_aarch64_ld1x3v8bf (rtx, rtx);
extern rtx gen_aarch64_ld1x3v2di (rtx, rtx);
extern rtx gen_aarch64_ld1x3v4hf (rtx, rtx);
extern rtx gen_aarch64_ld1x3v8hf (rtx, rtx);
extern rtx gen_aarch64_ld1x3v2sf (rtx, rtx);
extern rtx gen_aarch64_ld1x3v4sf (rtx, rtx);
extern rtx gen_aarch64_ld1x3v2df (rtx, rtx);
extern rtx gen_aarch64_ld1x3di (rtx, rtx);
extern rtx gen_aarch64_ld1x3df (rtx, rtx);
extern rtx gen_aarch64_ld1x4v8qi (rtx, rtx);
extern rtx gen_aarch64_ld1x4v16qi (rtx, rtx);
extern rtx gen_aarch64_ld1x4v4hi (rtx, rtx);
extern rtx gen_aarch64_ld1x4v8hi (rtx, rtx);
extern rtx gen_aarch64_ld1x4v2si (rtx, rtx);
extern rtx gen_aarch64_ld1x4v4si (rtx, rtx);
extern rtx gen_aarch64_ld1x4v4bf (rtx, rtx);
extern rtx gen_aarch64_ld1x4v8bf (rtx, rtx);
extern rtx gen_aarch64_ld1x4v2di (rtx, rtx);
extern rtx gen_aarch64_ld1x4v4hf (rtx, rtx);
extern rtx gen_aarch64_ld1x4v8hf (rtx, rtx);
extern rtx gen_aarch64_ld1x4v2sf (rtx, rtx);
extern rtx gen_aarch64_ld1x4v4sf (rtx, rtx);
extern rtx gen_aarch64_ld1x4v2df (rtx, rtx);
extern rtx gen_aarch64_ld1x4di (rtx, rtx);
extern rtx gen_aarch64_ld1x4df (rtx, rtx);
extern rtx gen_aarch64_st1x2v8qi (rtx, rtx);
extern rtx gen_aarch64_st1x2v16qi (rtx, rtx);
extern rtx gen_aarch64_st1x2v4hi (rtx, rtx);
extern rtx gen_aarch64_st1x2v8hi (rtx, rtx);
extern rtx gen_aarch64_st1x2v2si (rtx, rtx);
extern rtx gen_aarch64_st1x2v4si (rtx, rtx);
extern rtx gen_aarch64_st1x2v4bf (rtx, rtx);
extern rtx gen_aarch64_st1x2v8bf (rtx, rtx);
extern rtx gen_aarch64_st1x2v2di (rtx, rtx);
extern rtx gen_aarch64_st1x2v4hf (rtx, rtx);
extern rtx gen_aarch64_st1x2v8hf (rtx, rtx);
extern rtx gen_aarch64_st1x2v2sf (rtx, rtx);
extern rtx gen_aarch64_st1x2v4sf (rtx, rtx);
extern rtx gen_aarch64_st1x2v2df (rtx, rtx);
extern rtx gen_aarch64_st1x2di (rtx, rtx);
extern rtx gen_aarch64_st1x2df (rtx, rtx);
extern rtx gen_aarch64_st1x3v8qi (rtx, rtx);
extern rtx gen_aarch64_st1x3v16qi (rtx, rtx);
extern rtx gen_aarch64_st1x3v4hi (rtx, rtx);
extern rtx gen_aarch64_st1x3v8hi (rtx, rtx);
extern rtx gen_aarch64_st1x3v2si (rtx, rtx);
extern rtx gen_aarch64_st1x3v4si (rtx, rtx);
extern rtx gen_aarch64_st1x3v4bf (rtx, rtx);
extern rtx gen_aarch64_st1x3v8bf (rtx, rtx);
extern rtx gen_aarch64_st1x3v2di (rtx, rtx);
extern rtx gen_aarch64_st1x3v4hf (rtx, rtx);
extern rtx gen_aarch64_st1x3v8hf (rtx, rtx);
extern rtx gen_aarch64_st1x3v2sf (rtx, rtx);
extern rtx gen_aarch64_st1x3v4sf (rtx, rtx);
extern rtx gen_aarch64_st1x3v2df (rtx, rtx);
extern rtx gen_aarch64_st1x3di (rtx, rtx);
extern rtx gen_aarch64_st1x3df (rtx, rtx);
extern rtx gen_aarch64_st1x4v8qi (rtx, rtx);
extern rtx gen_aarch64_st1x4v16qi (rtx, rtx);
extern rtx gen_aarch64_st1x4v4hi (rtx, rtx);
extern rtx gen_aarch64_st1x4v8hi (rtx, rtx);
extern rtx gen_aarch64_st1x4v2si (rtx, rtx);
extern rtx gen_aarch64_st1x4v4si (rtx, rtx);
extern rtx gen_aarch64_st1x4v4bf (rtx, rtx);
extern rtx gen_aarch64_st1x4v8bf (rtx, rtx);
extern rtx gen_aarch64_st1x4v2di (rtx, rtx);
extern rtx gen_aarch64_st1x4v4hf (rtx, rtx);
extern rtx gen_aarch64_st1x4v8hf (rtx, rtx);
extern rtx gen_aarch64_st1x4v2sf (rtx, rtx);
extern rtx gen_aarch64_st1x4v4sf (rtx, rtx);
extern rtx gen_aarch64_st1x4v2df (rtx, rtx);
extern rtx gen_aarch64_st1x4di (rtx, rtx);
extern rtx gen_aarch64_st1x4df (rtx, rtx);
extern rtx gen_aarch64_ld2rv8qi (rtx, rtx);
extern rtx gen_aarch64_ld2rv16qi (rtx, rtx);
extern rtx gen_aarch64_ld2rv4hi (rtx, rtx);
extern rtx gen_aarch64_ld2rv8hi (rtx, rtx);
extern rtx gen_aarch64_ld2rv2si (rtx, rtx);
extern rtx gen_aarch64_ld2rv4si (rtx, rtx);
extern rtx gen_aarch64_ld2rv4bf (rtx, rtx);
extern rtx gen_aarch64_ld2rv8bf (rtx, rtx);
extern rtx gen_aarch64_ld2rv2di (rtx, rtx);
extern rtx gen_aarch64_ld2rv4hf (rtx, rtx);
extern rtx gen_aarch64_ld2rv8hf (rtx, rtx);
extern rtx gen_aarch64_ld2rv2sf (rtx, rtx);
extern rtx gen_aarch64_ld2rv4sf (rtx, rtx);
extern rtx gen_aarch64_ld2rv2df (rtx, rtx);
extern rtx gen_aarch64_ld2rdi (rtx, rtx);
extern rtx gen_aarch64_ld2rdf (rtx, rtx);
extern rtx gen_aarch64_ld3rv8qi (rtx, rtx);
extern rtx gen_aarch64_ld3rv16qi (rtx, rtx);
extern rtx gen_aarch64_ld3rv4hi (rtx, rtx);
extern rtx gen_aarch64_ld3rv8hi (rtx, rtx);
extern rtx gen_aarch64_ld3rv2si (rtx, rtx);
extern rtx gen_aarch64_ld3rv4si (rtx, rtx);
extern rtx gen_aarch64_ld3rv4bf (rtx, rtx);
extern rtx gen_aarch64_ld3rv8bf (rtx, rtx);
extern rtx gen_aarch64_ld3rv2di (rtx, rtx);
extern rtx gen_aarch64_ld3rv4hf (rtx, rtx);
extern rtx gen_aarch64_ld3rv8hf (rtx, rtx);
extern rtx gen_aarch64_ld3rv2sf (rtx, rtx);
extern rtx gen_aarch64_ld3rv4sf (rtx, rtx);
extern rtx gen_aarch64_ld3rv2df (rtx, rtx);
extern rtx gen_aarch64_ld3rdi (rtx, rtx);
extern rtx gen_aarch64_ld3rdf (rtx, rtx);
extern rtx gen_aarch64_ld4rv8qi (rtx, rtx);
extern rtx gen_aarch64_ld4rv16qi (rtx, rtx);
extern rtx gen_aarch64_ld4rv4hi (rtx, rtx);
extern rtx gen_aarch64_ld4rv8hi (rtx, rtx);
extern rtx gen_aarch64_ld4rv2si (rtx, rtx);
extern rtx gen_aarch64_ld4rv4si (rtx, rtx);
extern rtx gen_aarch64_ld4rv4bf (rtx, rtx);
extern rtx gen_aarch64_ld4rv8bf (rtx, rtx);
extern rtx gen_aarch64_ld4rv2di (rtx, rtx);
extern rtx gen_aarch64_ld4rv4hf (rtx, rtx);
extern rtx gen_aarch64_ld4rv8hf (rtx, rtx);
extern rtx gen_aarch64_ld4rv2sf (rtx, rtx);
extern rtx gen_aarch64_ld4rv4sf (rtx, rtx);
extern rtx gen_aarch64_ld4rv2df (rtx, rtx);
extern rtx gen_aarch64_ld4rdi (rtx, rtx);
extern rtx gen_aarch64_ld4rdf (rtx, rtx);
extern rtx gen_aarch64_ld2v8qi (rtx, rtx);
extern rtx gen_aarch64_ld2v4hi (rtx, rtx);
extern rtx gen_aarch64_ld2v4bf (rtx, rtx);
extern rtx gen_aarch64_ld2v4hf (rtx, rtx);
extern rtx gen_aarch64_ld2v2si (rtx, rtx);
extern rtx gen_aarch64_ld2v2sf (rtx, rtx);
extern rtx gen_aarch64_ld2di (rtx, rtx);
extern rtx gen_aarch64_ld2df (rtx, rtx);
extern rtx gen_aarch64_ld3v8qi (rtx, rtx);
extern rtx gen_aarch64_ld3v4hi (rtx, rtx);
extern rtx gen_aarch64_ld3v4bf (rtx, rtx);
extern rtx gen_aarch64_ld3v4hf (rtx, rtx);
extern rtx gen_aarch64_ld3v2si (rtx, rtx);
extern rtx gen_aarch64_ld3v2sf (rtx, rtx);
extern rtx gen_aarch64_ld3di (rtx, rtx);
extern rtx gen_aarch64_ld3df (rtx, rtx);
extern rtx gen_aarch64_ld4v8qi (rtx, rtx);
extern rtx gen_aarch64_ld4v4hi (rtx, rtx);
extern rtx gen_aarch64_ld4v4bf (rtx, rtx);
extern rtx gen_aarch64_ld4v4hf (rtx, rtx);
extern rtx gen_aarch64_ld4v2si (rtx, rtx);
extern rtx gen_aarch64_ld4v2sf (rtx, rtx);
extern rtx gen_aarch64_ld4di (rtx, rtx);
extern rtx gen_aarch64_ld4df (rtx, rtx);
extern rtx gen_aarch64_ld1v8qi (rtx, rtx);
extern rtx gen_aarch64_ld1v16qi (rtx, rtx);
extern rtx gen_aarch64_ld1v4hi (rtx, rtx);
extern rtx gen_aarch64_ld1v8hi (rtx, rtx);
extern rtx gen_aarch64_ld1v2si (rtx, rtx);
extern rtx gen_aarch64_ld1v4si (rtx, rtx);
extern rtx gen_aarch64_ld1v2di (rtx, rtx);
extern rtx gen_aarch64_ld1v4hf (rtx, rtx);
extern rtx gen_aarch64_ld1v8hf (rtx, rtx);
extern rtx gen_aarch64_ld1v4bf (rtx, rtx);
extern rtx gen_aarch64_ld1v8bf (rtx, rtx);
extern rtx gen_aarch64_ld1v2sf (rtx, rtx);
extern rtx gen_aarch64_ld1v4sf (rtx, rtx);
extern rtx gen_aarch64_ld1v2df (rtx, rtx);
extern rtx gen_aarch64_ld2v16qi (rtx, rtx);
extern rtx gen_aarch64_ld3v16qi (rtx, rtx);
extern rtx gen_aarch64_ld4v16qi (rtx, rtx);
extern rtx gen_aarch64_ld2v8hi (rtx, rtx);
extern rtx gen_aarch64_ld3v8hi (rtx, rtx);
extern rtx gen_aarch64_ld4v8hi (rtx, rtx);
extern rtx gen_aarch64_ld2v4si (rtx, rtx);
extern rtx gen_aarch64_ld3v4si (rtx, rtx);
extern rtx gen_aarch64_ld4v4si (rtx, rtx);
extern rtx gen_aarch64_ld2v2di (rtx, rtx);
extern rtx gen_aarch64_ld3v2di (rtx, rtx);
extern rtx gen_aarch64_ld4v2di (rtx, rtx);
extern rtx gen_aarch64_ld2v8hf (rtx, rtx);
extern rtx gen_aarch64_ld3v8hf (rtx, rtx);
extern rtx gen_aarch64_ld4v8hf (rtx, rtx);
extern rtx gen_aarch64_ld2v4sf (rtx, rtx);
extern rtx gen_aarch64_ld3v4sf (rtx, rtx);
extern rtx gen_aarch64_ld4v4sf (rtx, rtx);
extern rtx gen_aarch64_ld2v2df (rtx, rtx);
extern rtx gen_aarch64_ld3v2df (rtx, rtx);
extern rtx gen_aarch64_ld4v2df (rtx, rtx);
extern rtx gen_aarch64_ld2v8bf (rtx, rtx);
extern rtx gen_aarch64_ld3v8bf (rtx, rtx);
extern rtx gen_aarch64_ld4v8bf (rtx, rtx);
extern rtx gen_aarch64_ld1x2v16qi (rtx, rtx);
extern rtx gen_aarch64_ld1x2v8hi (rtx, rtx);
extern rtx gen_aarch64_ld1x2v4si (rtx, rtx);
extern rtx gen_aarch64_ld1x2v2di (rtx, rtx);
extern rtx gen_aarch64_ld1x2v8hf (rtx, rtx);
extern rtx gen_aarch64_ld1x2v4sf (rtx, rtx);
extern rtx gen_aarch64_ld1x2v2df (rtx, rtx);
extern rtx gen_aarch64_ld1x2v8bf (rtx, rtx);
extern rtx gen_aarch64_ld1x2v8qi (rtx, rtx);
extern rtx gen_aarch64_ld1x2v4hi (rtx, rtx);
extern rtx gen_aarch64_ld1x2v4bf (rtx, rtx);
extern rtx gen_aarch64_ld1x2v4hf (rtx, rtx);
extern rtx gen_aarch64_ld1x2v2si (rtx, rtx);
extern rtx gen_aarch64_ld1x2v2sf (rtx, rtx);
extern rtx gen_aarch64_ld1x2di (rtx, rtx);
extern rtx gen_aarch64_ld1x2df (rtx, rtx);
extern rtx gen_aarch64_ld2_lanev8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanev16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanev4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanev8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanev2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanev4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanev4bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanev8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanev2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanev4hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanev8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanev2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanev4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanev2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanedi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld2_lanedf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanev8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanev16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanev4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanev8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanev2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanev4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanev4bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanev8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanev2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanev4hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanev8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanev2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanev4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanev2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanedi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld3_lanedf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanev8qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanev16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanev4hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanev8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanev2si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanev4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanev4bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanev8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanev2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanev4hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanev8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanev2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanev4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanev2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanedi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_ld4_lanedf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregoiv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregoiv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregoiv4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregoiv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregoiv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregoiv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregoidi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregoidf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregciv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregciv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregciv4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregciv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregciv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregciv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregcidi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregcidf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregxiv8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregxiv4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregxiv4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregxiv4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregxiv2si (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregxiv2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregxidi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_dregxidf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregoiv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregciv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregxiv16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregoiv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregciv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregxiv8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregoiv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregciv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregxiv4si (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregoiv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregciv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregxiv2di (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregoiv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregciv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregxiv8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregoiv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregciv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregxiv4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregoiv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregciv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregxiv2df (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregoiv8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregciv8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_get_qregxiv8bf (rtx, rtx, rtx);
extern rtx gen_vec_permv8qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_st2v8qi (rtx, rtx);
extern rtx gen_aarch64_st2v4hi (rtx, rtx);
extern rtx gen_aarch64_st2v4bf (rtx, rtx);
extern rtx gen_aarch64_st2v4hf (rtx, rtx);
extern rtx gen_aarch64_st2v2si (rtx, rtx);
extern rtx gen_aarch64_st2v2sf (rtx, rtx);
extern rtx gen_aarch64_st2di (rtx, rtx);
extern rtx gen_aarch64_st2df (rtx, rtx);
extern rtx gen_aarch64_st3v8qi (rtx, rtx);
extern rtx gen_aarch64_st3v4hi (rtx, rtx);
extern rtx gen_aarch64_st3v4bf (rtx, rtx);
extern rtx gen_aarch64_st3v4hf (rtx, rtx);
extern rtx gen_aarch64_st3v2si (rtx, rtx);
extern rtx gen_aarch64_st3v2sf (rtx, rtx);
extern rtx gen_aarch64_st3di (rtx, rtx);
extern rtx gen_aarch64_st3df (rtx, rtx);
extern rtx gen_aarch64_st4v8qi (rtx, rtx);
extern rtx gen_aarch64_st4v4hi (rtx, rtx);
extern rtx gen_aarch64_st4v4bf (rtx, rtx);
extern rtx gen_aarch64_st4v4hf (rtx, rtx);
extern rtx gen_aarch64_st4v2si (rtx, rtx);
extern rtx gen_aarch64_st4v2sf (rtx, rtx);
extern rtx gen_aarch64_st4di (rtx, rtx);
extern rtx gen_aarch64_st4df (rtx, rtx);
extern rtx gen_aarch64_st2v16qi (rtx, rtx);
extern rtx gen_aarch64_st3v16qi (rtx, rtx);
extern rtx gen_aarch64_st4v16qi (rtx, rtx);
extern rtx gen_aarch64_st2v8hi (rtx, rtx);
extern rtx gen_aarch64_st3v8hi (rtx, rtx);
extern rtx gen_aarch64_st4v8hi (rtx, rtx);
extern rtx gen_aarch64_st2v4si (rtx, rtx);
extern rtx gen_aarch64_st3v4si (rtx, rtx);
extern rtx gen_aarch64_st4v4si (rtx, rtx);
extern rtx gen_aarch64_st2v2di (rtx, rtx);
extern rtx gen_aarch64_st3v2di (rtx, rtx);
extern rtx gen_aarch64_st4v2di (rtx, rtx);
extern rtx gen_aarch64_st2v8hf (rtx, rtx);
extern rtx gen_aarch64_st3v8hf (rtx, rtx);
extern rtx gen_aarch64_st4v8hf (rtx, rtx);
extern rtx gen_aarch64_st2v4sf (rtx, rtx);
extern rtx gen_aarch64_st3v4sf (rtx, rtx);
extern rtx gen_aarch64_st4v4sf (rtx, rtx);
extern rtx gen_aarch64_st2v2df (rtx, rtx);
extern rtx gen_aarch64_st3v2df (rtx, rtx);
extern rtx gen_aarch64_st4v2df (rtx, rtx);
extern rtx gen_aarch64_st2v8bf (rtx, rtx);
extern rtx gen_aarch64_st3v8bf (rtx, rtx);
extern rtx gen_aarch64_st4v8bf (rtx, rtx);
extern rtx gen_aarch64_st2_lanev8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanev16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanev4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanev8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanev2si (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanev4si (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanev4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanev8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanev2di (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanev8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanev4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanev2df (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanedi (rtx, rtx, rtx);
extern rtx gen_aarch64_st2_lanedf (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanev8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanev16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanev4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanev8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanev2si (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanev4si (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanev4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanev8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanev2di (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanev8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanev4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanev2df (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanedi (rtx, rtx, rtx);
extern rtx gen_aarch64_st3_lanedf (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanev8qi (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanev16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanev4hi (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanev8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanev2si (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanev4si (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanev4bf (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanev8bf (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanev2di (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanev4hf (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanev8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanev2sf (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanev4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanev2df (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanedi (rtx, rtx, rtx);
extern rtx gen_aarch64_st4_lanedf (rtx, rtx, rtx);
extern rtx gen_aarch64_st1v8qi (rtx, rtx);
extern rtx gen_aarch64_st1v16qi (rtx, rtx);
extern rtx gen_aarch64_st1v4hi (rtx, rtx);
extern rtx gen_aarch64_st1v8hi (rtx, rtx);
extern rtx gen_aarch64_st1v2si (rtx, rtx);
extern rtx gen_aarch64_st1v4si (rtx, rtx);
extern rtx gen_aarch64_st1v2di (rtx, rtx);
extern rtx gen_aarch64_st1v4hf (rtx, rtx);
extern rtx gen_aarch64_st1v8hf (rtx, rtx);
extern rtx gen_aarch64_st1v4bf (rtx, rtx);
extern rtx gen_aarch64_st1v8bf (rtx, rtx);
extern rtx gen_aarch64_st1v2sf (rtx, rtx);
extern rtx gen_aarch64_st1v4sf (rtx, rtx);
extern rtx gen_aarch64_st1v2df (rtx, rtx);
extern rtx gen_aarch64_set_qregoiv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregciv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregxiv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregoiv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregciv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregxiv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregoiv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregciv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregxiv4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregoiv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregciv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregxiv2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregoiv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregciv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregxiv8hf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregoiv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregciv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregxiv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregoiv2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregciv2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregxiv2df (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregoiv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregciv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_set_qregxiv8bf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_initv8qiqi (rtx, rtx);
extern rtx gen_vec_initv16qiqi (rtx, rtx);
extern rtx gen_vec_initv4hihi (rtx, rtx);
extern rtx gen_vec_initv8hihi (rtx, rtx);
extern rtx gen_vec_initv2sisi (rtx, rtx);
extern rtx gen_vec_initv4sisi (rtx, rtx);
extern rtx gen_vec_initv2didi (rtx, rtx);
extern rtx gen_vec_initv4hfhf (rtx, rtx);
extern rtx gen_vec_initv8hfhf (rtx, rtx);
extern rtx gen_vec_initv4bfbf (rtx, rtx);
extern rtx gen_vec_initv8bfbf (rtx, rtx);
extern rtx gen_vec_initv2sfsf (rtx, rtx);
extern rtx gen_vec_initv4sfsf (rtx, rtx);
extern rtx gen_vec_initv2dfdf (rtx, rtx);
extern rtx gen_vec_initv16qiv8qi (rtx, rtx);
extern rtx gen_vec_initv8hiv4hi (rtx, rtx);
extern rtx gen_vec_initv4siv2si (rtx, rtx);
extern rtx gen_vec_initv8hfv4hf (rtx, rtx);
extern rtx gen_vec_initv4sfv2sf (rtx, rtx);
extern rtx gen_vec_initv8bfv4bf (rtx, rtx);
extern rtx gen_vec_extractv8qiqi (rtx, rtx, rtx);
extern rtx gen_vec_extractv16qiqi (rtx, rtx, rtx);
extern rtx gen_vec_extractv4hihi (rtx, rtx, rtx);
extern rtx gen_vec_extractv8hihi (rtx, rtx, rtx);
extern rtx gen_vec_extractv2sisi (rtx, rtx, rtx);
extern rtx gen_vec_extractv4sisi (rtx, rtx, rtx);
extern rtx gen_vec_extractv2didi (rtx, rtx, rtx);
extern rtx gen_vec_extractv4hfhf (rtx, rtx, rtx);
extern rtx gen_vec_extractv8hfhf (rtx, rtx, rtx);
extern rtx gen_vec_extractv4bfbf (rtx, rtx, rtx);
extern rtx gen_vec_extractv8bfbf (rtx, rtx, rtx);
extern rtx gen_vec_extractv2sfsf (rtx, rtx, rtx);
extern rtx gen_vec_extractv4sfsf (rtx, rtx, rtx);
extern rtx gen_vec_extractv2dfdf (rtx, rtx, rtx);
extern rtx gen_vec_extractv16qiv8qi (rtx, rtx, rtx);
extern rtx gen_vec_extractv8hiv4hi (rtx, rtx, rtx);
extern rtx gen_vec_extractv4siv2si (rtx, rtx, rtx);
extern rtx gen_vec_extractv8hfv4hf (rtx, rtx, rtx);
extern rtx gen_vec_extractv8bfv4bf (rtx, rtx, rtx);
extern rtx gen_vec_extractv4sfv2sf (rtx, rtx, rtx);
extern rtx gen_vec_extractv2dfv1df (rtx, rtx, rtx);
extern rtx gen_aarch64_fmlal_lowv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlsl_lowv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlalq_lowv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlslq_lowv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlal_highv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlsl_highv2sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlalq_highv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlslq_highv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlal_lane_lowv2sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlsl_lane_lowv2sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlal_lane_highv2sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlsl_lane_highv2sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlalq_laneq_lowv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlslq_laneq_lowv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlalq_laneq_highv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlslq_laneq_highv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlal_laneq_lowv2sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlsl_laneq_lowv2sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlal_laneq_highv2sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlsl_laneq_highv2sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlalq_lane_lowv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlslq_lane_lowv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlalq_lane_highv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_fmlslq_lane_highv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swapqi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swaphi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swapsi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swapdi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swapti (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_exchangeqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_exchangehi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_exchangesi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_exchangedi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_addqi (rtx, rtx, rtx);
extern rtx gen_atomic_subqi (rtx, rtx, rtx);
extern rtx gen_atomic_orqi (rtx, rtx, rtx);
extern rtx gen_atomic_xorqi (rtx, rtx, rtx);
extern rtx gen_atomic_andqi (rtx, rtx, rtx);
extern rtx gen_atomic_addhi (rtx, rtx, rtx);
extern rtx gen_atomic_subhi (rtx, rtx, rtx);
extern rtx gen_atomic_orhi (rtx, rtx, rtx);
extern rtx gen_atomic_xorhi (rtx, rtx, rtx);
extern rtx gen_atomic_andhi (rtx, rtx, rtx);
extern rtx gen_atomic_addsi (rtx, rtx, rtx);
extern rtx gen_atomic_subsi (rtx, rtx, rtx);
extern rtx gen_atomic_orsi (rtx, rtx, rtx);
extern rtx gen_atomic_xorsi (rtx, rtx, rtx);
extern rtx gen_atomic_andsi (rtx, rtx, rtx);
extern rtx gen_atomic_adddi (rtx, rtx, rtx);
extern rtx gen_atomic_subdi (rtx, rtx, rtx);
extern rtx gen_atomic_ordi (rtx, rtx, rtx);
extern rtx gen_atomic_xordi (rtx, rtx, rtx);
extern rtx gen_atomic_anddi (rtx, rtx, rtx);
extern rtx gen_atomic_fetch_addqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_subqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_orqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_xorqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_andqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_addhi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_subhi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_orhi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_xorhi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_andhi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_addsi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_subsi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_orsi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_xorsi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_andsi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_adddi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_subdi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_ordi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_xordi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_anddi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_add_fetchqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_sub_fetchqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_or_fetchqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_xor_fetchqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_and_fetchqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_add_fetchhi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_sub_fetchhi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_or_fetchhi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_xor_fetchhi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_and_fetchhi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_add_fetchsi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_sub_fetchsi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_or_fetchsi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_xor_fetchsi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_and_fetchsi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_add_fetchdi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_sub_fetchdi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_or_fetchdi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_xor_fetchdi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_and_fetchdi (rtx, rtx, rtx, rtx);
extern rtx gen_mem_thread_fence (rtx);
extern rtx gen_dmb (rtx);
extern rtx gen_movvnx16qi (rtx, rtx);
extern rtx gen_movvnx8qi (rtx, rtx);
extern rtx gen_movvnx4qi (rtx, rtx);
extern rtx gen_movvnx2qi (rtx, rtx);
extern rtx gen_movvnx8hi (rtx, rtx);
extern rtx gen_movvnx4hi (rtx, rtx);
extern rtx gen_movvnx2hi (rtx, rtx);
extern rtx gen_movvnx8hf (rtx, rtx);
extern rtx gen_movvnx4hf (rtx, rtx);
extern rtx gen_movvnx2hf (rtx, rtx);
extern rtx gen_movvnx8bf (rtx, rtx);
extern rtx gen_movvnx4si (rtx, rtx);
extern rtx gen_movvnx2si (rtx, rtx);
extern rtx gen_movvnx4sf (rtx, rtx);
extern rtx gen_movvnx2sf (rtx, rtx);
extern rtx gen_movvnx2di (rtx, rtx);
extern rtx gen_movvnx2df (rtx, rtx);
extern rtx gen_movmisalignvnx16qi (rtx, rtx);
extern rtx gen_movmisalignvnx8qi (rtx, rtx);
extern rtx gen_movmisalignvnx4qi (rtx, rtx);
extern rtx gen_movmisalignvnx2qi (rtx, rtx);
extern rtx gen_movmisalignvnx8hi (rtx, rtx);
extern rtx gen_movmisalignvnx4hi (rtx, rtx);
extern rtx gen_movmisalignvnx2hi (rtx, rtx);
extern rtx gen_movmisalignvnx8hf (rtx, rtx);
extern rtx gen_movmisalignvnx4hf (rtx, rtx);
extern rtx gen_movmisalignvnx2hf (rtx, rtx);
extern rtx gen_movmisalignvnx8bf (rtx, rtx);
extern rtx gen_movmisalignvnx4si (rtx, rtx);
extern rtx gen_movmisalignvnx2si (rtx, rtx);
extern rtx gen_movmisalignvnx4sf (rtx, rtx);
extern rtx gen_movmisalignvnx2sf (rtx, rtx);
extern rtx gen_movmisalignvnx2di (rtx, rtx);
extern rtx gen_movmisalignvnx2df (rtx, rtx);
extern rtx gen_aarch64_sve_reload_mem (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx16qi (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx8qi (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx4qi (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx2qi (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx8hi (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx4hi (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx2hi (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx8hf (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx4hf (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx2hf (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx8bf (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx4si (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx2si (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx4sf (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx2sf (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx2di (rtx, rtx);
extern rtx gen_aarch64_sve_reinterpretvnx2df (rtx, rtx);
extern rtx gen_movvnx32qi (rtx, rtx);
extern rtx gen_movvnx16hi (rtx, rtx);
extern rtx gen_movvnx8si (rtx, rtx);
extern rtx gen_movvnx4di (rtx, rtx);
extern rtx gen_movvnx16bf (rtx, rtx);
extern rtx gen_movvnx16hf (rtx, rtx);
extern rtx gen_movvnx8sf (rtx, rtx);
extern rtx gen_movvnx4df (rtx, rtx);
extern rtx gen_movvnx48qi (rtx, rtx);
extern rtx gen_movvnx24hi (rtx, rtx);
extern rtx gen_movvnx12si (rtx, rtx);
extern rtx gen_movvnx6di (rtx, rtx);
extern rtx gen_movvnx24bf (rtx, rtx);
extern rtx gen_movvnx24hf (rtx, rtx);
extern rtx gen_movvnx12sf (rtx, rtx);
extern rtx gen_movvnx6df (rtx, rtx);
extern rtx gen_movvnx64qi (rtx, rtx);
extern rtx gen_movvnx32hi (rtx, rtx);
extern rtx gen_movvnx16si (rtx, rtx);
extern rtx gen_movvnx8di (rtx, rtx);
extern rtx gen_movvnx32bf (rtx, rtx);
extern rtx gen_movvnx32hf (rtx, rtx);
extern rtx gen_movvnx16sf (rtx, rtx);
extern rtx gen_movvnx8df (rtx, rtx);
extern rtx gen_movvnx16bi (rtx, rtx);
extern rtx gen_movvnx8bi (rtx, rtx);
extern rtx gen_movvnx4bi (rtx, rtx);
extern rtx gen_movvnx2bi (rtx, rtx);
extern rtx gen_vec_load_lanesvnx32qivnx16qi (rtx, rtx);
extern rtx gen_vec_load_lanesvnx16hivnx8hi (rtx, rtx);
extern rtx gen_vec_load_lanesvnx8sivnx4si (rtx, rtx);
extern rtx gen_vec_load_lanesvnx4divnx2di (rtx, rtx);
extern rtx gen_vec_load_lanesvnx16bfvnx8bf (rtx, rtx);
extern rtx gen_vec_load_lanesvnx16hfvnx8hf (rtx, rtx);
extern rtx gen_vec_load_lanesvnx8sfvnx4sf (rtx, rtx);
extern rtx gen_vec_load_lanesvnx4dfvnx2df (rtx, rtx);
extern rtx gen_vec_load_lanesvnx48qivnx16qi (rtx, rtx);
extern rtx gen_vec_load_lanesvnx24hivnx8hi (rtx, rtx);
extern rtx gen_vec_load_lanesvnx12sivnx4si (rtx, rtx);
extern rtx gen_vec_load_lanesvnx6divnx2di (rtx, rtx);
extern rtx gen_vec_load_lanesvnx24bfvnx8bf (rtx, rtx);
extern rtx gen_vec_load_lanesvnx24hfvnx8hf (rtx, rtx);
extern rtx gen_vec_load_lanesvnx12sfvnx4sf (rtx, rtx);
extern rtx gen_vec_load_lanesvnx6dfvnx2df (rtx, rtx);
extern rtx gen_vec_load_lanesvnx64qivnx16qi (rtx, rtx);
extern rtx gen_vec_load_lanesvnx32hivnx8hi (rtx, rtx);
extern rtx gen_vec_load_lanesvnx16sivnx4si (rtx, rtx);
extern rtx gen_vec_load_lanesvnx8divnx2di (rtx, rtx);
extern rtx gen_vec_load_lanesvnx32bfvnx8bf (rtx, rtx);
extern rtx gen_vec_load_lanesvnx32hfvnx8hf (rtx, rtx);
extern rtx gen_vec_load_lanesvnx16sfvnx4sf (rtx, rtx);
extern rtx gen_vec_load_lanesvnx8dfvnx2df (rtx, rtx);
extern rtx gen_gather_loadvnx2qivnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_gather_loadvnx2hivnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_gather_loadvnx2hfvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_gather_loadvnx2sivnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_gather_loadvnx2sfvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_gather_loadvnx2divnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_gather_loadvnx2dfvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_gather_loadvnx4qivnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_gather_loadvnx4hivnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_gather_loadvnx4hfvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_gather_loadvnx4sivnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_gather_loadvnx4sfvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_store_lanesvnx32qivnx16qi (rtx, rtx);
extern rtx gen_vec_store_lanesvnx16hivnx8hi (rtx, rtx);
extern rtx gen_vec_store_lanesvnx8sivnx4si (rtx, rtx);
extern rtx gen_vec_store_lanesvnx4divnx2di (rtx, rtx);
extern rtx gen_vec_store_lanesvnx16bfvnx8bf (rtx, rtx);
extern rtx gen_vec_store_lanesvnx16hfvnx8hf (rtx, rtx);
extern rtx gen_vec_store_lanesvnx8sfvnx4sf (rtx, rtx);
extern rtx gen_vec_store_lanesvnx4dfvnx2df (rtx, rtx);
extern rtx gen_vec_store_lanesvnx48qivnx16qi (rtx, rtx);
extern rtx gen_vec_store_lanesvnx24hivnx8hi (rtx, rtx);
extern rtx gen_vec_store_lanesvnx12sivnx4si (rtx, rtx);
extern rtx gen_vec_store_lanesvnx6divnx2di (rtx, rtx);
extern rtx gen_vec_store_lanesvnx24bfvnx8bf (rtx, rtx);
extern rtx gen_vec_store_lanesvnx24hfvnx8hf (rtx, rtx);
extern rtx gen_vec_store_lanesvnx12sfvnx4sf (rtx, rtx);
extern rtx gen_vec_store_lanesvnx6dfvnx2df (rtx, rtx);
extern rtx gen_vec_store_lanesvnx64qivnx16qi (rtx, rtx);
extern rtx gen_vec_store_lanesvnx32hivnx8hi (rtx, rtx);
extern rtx gen_vec_store_lanesvnx16sivnx4si (rtx, rtx);
extern rtx gen_vec_store_lanesvnx8divnx2di (rtx, rtx);
extern rtx gen_vec_store_lanesvnx32bfvnx8bf (rtx, rtx);
extern rtx gen_vec_store_lanesvnx32hfvnx8hf (rtx, rtx);
extern rtx gen_vec_store_lanesvnx16sfvnx4sf (rtx, rtx);
extern rtx gen_vec_store_lanesvnx8dfvnx2df (rtx, rtx);
extern rtx gen_scatter_storevnx2qivnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_scatter_storevnx2hivnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_scatter_storevnx2hfvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_scatter_storevnx2sivnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_scatter_storevnx2sfvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_scatter_storevnx2divnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_scatter_storevnx2dfvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_scatter_storevnx4qivnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_scatter_storevnx4hivnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_scatter_storevnx4hfvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_scatter_storevnx4sivnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_scatter_storevnx4sfvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_duplicatevnx16qi (rtx, rtx);
extern rtx gen_vec_duplicatevnx8qi (rtx, rtx);
extern rtx gen_vec_duplicatevnx4qi (rtx, rtx);
extern rtx gen_vec_duplicatevnx2qi (rtx, rtx);
extern rtx gen_vec_duplicatevnx8hi (rtx, rtx);
extern rtx gen_vec_duplicatevnx4hi (rtx, rtx);
extern rtx gen_vec_duplicatevnx2hi (rtx, rtx);
extern rtx gen_vec_duplicatevnx8hf (rtx, rtx);
extern rtx gen_vec_duplicatevnx4hf (rtx, rtx);
extern rtx gen_vec_duplicatevnx2hf (rtx, rtx);
extern rtx gen_vec_duplicatevnx8bf (rtx, rtx);
extern rtx gen_vec_duplicatevnx4si (rtx, rtx);
extern rtx gen_vec_duplicatevnx2si (rtx, rtx);
extern rtx gen_vec_duplicatevnx4sf (rtx, rtx);
extern rtx gen_vec_duplicatevnx2sf (rtx, rtx);
extern rtx gen_vec_duplicatevnx2di (rtx, rtx);
extern rtx gen_vec_duplicatevnx2df (rtx, rtx);
extern rtx gen_vec_initvnx16qiqi (rtx, rtx);
extern rtx gen_vec_initvnx8hihi (rtx, rtx);
extern rtx gen_vec_initvnx4sisi (rtx, rtx);
extern rtx gen_vec_initvnx2didi (rtx, rtx);
extern rtx gen_vec_initvnx8bfbf (rtx, rtx);
extern rtx gen_vec_initvnx8hfhf (rtx, rtx);
extern rtx gen_vec_initvnx4sfsf (rtx, rtx);
extern rtx gen_vec_initvnx2dfdf (rtx, rtx);
extern rtx gen_vec_duplicatevnx16bi (rtx, rtx);
extern rtx gen_vec_duplicatevnx8bi (rtx, rtx);
extern rtx gen_vec_duplicatevnx4bi (rtx, rtx);
extern rtx gen_vec_duplicatevnx2bi (rtx, rtx);
extern rtx gen_vec_extractvnx16qiqi (rtx, rtx, rtx);
extern rtx gen_vec_extractvnx8hihi (rtx, rtx, rtx);
extern rtx gen_vec_extractvnx4sisi (rtx, rtx, rtx);
extern rtx gen_vec_extractvnx2didi (rtx, rtx, rtx);
extern rtx gen_vec_extractvnx8bfbf (rtx, rtx, rtx);
extern rtx gen_vec_extractvnx8hfhf (rtx, rtx, rtx);
extern rtx gen_vec_extractvnx4sfsf (rtx, rtx, rtx);
extern rtx gen_vec_extractvnx2dfdf (rtx, rtx, rtx);
extern rtx gen_vec_extractvnx16biqi (rtx, rtx, rtx);
extern rtx gen_vec_extractvnx8bihi (rtx, rtx, rtx);
extern rtx gen_vec_extractvnx4bisi (rtx, rtx, rtx);
extern rtx gen_vec_extractvnx2bidi (rtx, rtx, rtx);
extern rtx gen_absvnx16qi2 (rtx, rtx);
extern rtx gen_negvnx16qi2 (rtx, rtx);
extern rtx gen_one_cmplvnx16qi2 (rtx, rtx);
extern rtx gen_clrsbvnx16qi2 (rtx, rtx);
extern rtx gen_clzvnx16qi2 (rtx, rtx);
extern rtx gen_popcountvnx16qi2 (rtx, rtx);
extern rtx gen_qabsvnx16qi2 (rtx, rtx);
extern rtx gen_qnegvnx16qi2 (rtx, rtx);
extern rtx gen_absvnx8hi2 (rtx, rtx);
extern rtx gen_negvnx8hi2 (rtx, rtx);
extern rtx gen_one_cmplvnx8hi2 (rtx, rtx);
extern rtx gen_clrsbvnx8hi2 (rtx, rtx);
extern rtx gen_clzvnx8hi2 (rtx, rtx);
extern rtx gen_popcountvnx8hi2 (rtx, rtx);
extern rtx gen_qabsvnx8hi2 (rtx, rtx);
extern rtx gen_qnegvnx8hi2 (rtx, rtx);
extern rtx gen_absvnx4si2 (rtx, rtx);
extern rtx gen_negvnx4si2 (rtx, rtx);
extern rtx gen_one_cmplvnx4si2 (rtx, rtx);
extern rtx gen_clrsbvnx4si2 (rtx, rtx);
extern rtx gen_clzvnx4si2 (rtx, rtx);
extern rtx gen_popcountvnx4si2 (rtx, rtx);
extern rtx gen_qabsvnx4si2 (rtx, rtx);
extern rtx gen_qnegvnx4si2 (rtx, rtx);
extern rtx gen_absvnx2di2 (rtx, rtx);
extern rtx gen_negvnx2di2 (rtx, rtx);
extern rtx gen_one_cmplvnx2di2 (rtx, rtx);
extern rtx gen_clrsbvnx2di2 (rtx, rtx);
extern rtx gen_clzvnx2di2 (rtx, rtx);
extern rtx gen_popcountvnx2di2 (rtx, rtx);
extern rtx gen_qabsvnx2di2 (rtx, rtx);
extern rtx gen_qnegvnx2di2 (rtx, rtx);
extern rtx gen_cond_absvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_negvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_one_cmplvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_clrsbvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_clzvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_popcountvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_qabsvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_qnegvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_absvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_negvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_one_cmplvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_clrsbvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_clzvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_popcountvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_qabsvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_qnegvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_absvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_negvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_one_cmplvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_clrsbvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_clzvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_popcountvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_qabsvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_qnegvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_absvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_negvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_one_cmplvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_clrsbvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_clzvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_popcountvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_qabsvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_qnegvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_extendvnx8qivnx8hi2 (rtx, rtx);
extern rtx gen_zero_extendvnx8qivnx8hi2 (rtx, rtx);
static inline rtx gen_extendvnx4qivnx8hi2 (rtx, rtx);
static inline rtx
gen_extendvnx4qivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx4qivnx8hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx4qivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx2qivnx8hi2 (rtx, rtx);
static inline rtx
gen_extendvnx2qivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx2qivnx8hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx2qivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx4hivnx8hi2 (rtx, rtx);
static inline rtx
gen_extendvnx4hivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx4hivnx8hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx4hivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx2hivnx8hi2 (rtx, rtx);
static inline rtx
gen_extendvnx2hivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx2hivnx8hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx2hivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx2sivnx8hi2 (rtx, rtx);
static inline rtx
gen_extendvnx2sivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx2sivnx8hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx2sivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx8qivnx4hi2 (rtx, rtx);
static inline rtx
gen_extendvnx8qivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx8qivnx4hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx8qivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_extendvnx4qivnx4hi2 (rtx, rtx);
extern rtx gen_zero_extendvnx4qivnx4hi2 (rtx, rtx);
static inline rtx gen_extendvnx2qivnx4hi2 (rtx, rtx);
static inline rtx
gen_extendvnx2qivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx2qivnx4hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx2qivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx4hivnx4hi2 (rtx, rtx);
static inline rtx
gen_extendvnx4hivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx4hivnx4hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx4hivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx2hivnx4hi2 (rtx, rtx);
static inline rtx
gen_extendvnx2hivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx2hivnx4hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx2hivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx2sivnx4hi2 (rtx, rtx);
static inline rtx
gen_extendvnx2sivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx2sivnx4hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx2sivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx8qivnx2hi2 (rtx, rtx);
static inline rtx
gen_extendvnx8qivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx8qivnx2hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx8qivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx4qivnx2hi2 (rtx, rtx);
static inline rtx
gen_extendvnx4qivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx4qivnx2hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx4qivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_extendvnx2qivnx2hi2 (rtx, rtx);
extern rtx gen_zero_extendvnx2qivnx2hi2 (rtx, rtx);
static inline rtx gen_extendvnx4hivnx2hi2 (rtx, rtx);
static inline rtx
gen_extendvnx4hivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx4hivnx2hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx4hivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx2hivnx2hi2 (rtx, rtx);
static inline rtx
gen_extendvnx2hivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx2hivnx2hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx2hivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx2sivnx2hi2 (rtx, rtx);
static inline rtx
gen_extendvnx2sivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx2sivnx2hi2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx2sivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx8qivnx4si2 (rtx, rtx);
static inline rtx
gen_extendvnx8qivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx8qivnx4si2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx8qivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_extendvnx4qivnx4si2 (rtx, rtx);
extern rtx gen_zero_extendvnx4qivnx4si2 (rtx, rtx);
static inline rtx gen_extendvnx2qivnx4si2 (rtx, rtx);
static inline rtx
gen_extendvnx2qivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx2qivnx4si2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx2qivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_extendvnx4hivnx4si2 (rtx, rtx);
extern rtx gen_zero_extendvnx4hivnx4si2 (rtx, rtx);
static inline rtx gen_extendvnx2hivnx4si2 (rtx, rtx);
static inline rtx
gen_extendvnx2hivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx2hivnx4si2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx2hivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx2sivnx4si2 (rtx, rtx);
static inline rtx
gen_extendvnx2sivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx2sivnx4si2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx2sivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx8qivnx2si2 (rtx, rtx);
static inline rtx
gen_extendvnx8qivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx8qivnx2si2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx8qivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx4qivnx2si2 (rtx, rtx);
static inline rtx
gen_extendvnx4qivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx4qivnx2si2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx4qivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_extendvnx2qivnx2si2 (rtx, rtx);
extern rtx gen_zero_extendvnx2qivnx2si2 (rtx, rtx);
static inline rtx gen_extendvnx4hivnx2si2 (rtx, rtx);
static inline rtx
gen_extendvnx4hivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx4hivnx2si2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx4hivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_extendvnx2hivnx2si2 (rtx, rtx);
extern rtx gen_zero_extendvnx2hivnx2si2 (rtx, rtx);
static inline rtx gen_extendvnx2sivnx2si2 (rtx, rtx);
static inline rtx
gen_extendvnx2sivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx2sivnx2si2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx2sivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx8qivnx2di2 (rtx, rtx);
static inline rtx
gen_extendvnx8qivnx2di2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx8qivnx2di2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx8qivnx2di2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_extendvnx4qivnx2di2 (rtx, rtx);
static inline rtx
gen_extendvnx4qivnx2di2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx4qivnx2di2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx4qivnx2di2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_extendvnx2qivnx2di2 (rtx, rtx);
extern rtx gen_zero_extendvnx2qivnx2di2 (rtx, rtx);
static inline rtx gen_extendvnx4hivnx2di2 (rtx, rtx);
static inline rtx
gen_extendvnx4hivnx2di2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
static inline rtx gen_zero_extendvnx4hivnx2di2 (rtx, rtx);
static inline rtx
gen_zero_extendvnx4hivnx2di2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_extendvnx2hivnx2di2 (rtx, rtx);
extern rtx gen_zero_extendvnx2hivnx2di2 (rtx, rtx);
extern rtx gen_extendvnx2sivnx2di2 (rtx, rtx);
extern rtx gen_zero_extendvnx2sivnx2di2 (rtx, rtx);
extern rtx gen_aarch64_pred_cnotvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cnotvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cnotvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_cnotvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_cnotvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_cnotvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_cnotvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_cnotvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_absvnx8hf2 (rtx, rtx);
extern rtx gen_negvnx8hf2 (rtx, rtx);
extern rtx gen_frecpxvnx8hf2 (rtx, rtx);
extern rtx gen_roundvnx8hf2 (rtx, rtx);
extern rtx gen_nearbyintvnx8hf2 (rtx, rtx);
extern rtx gen_floorvnx8hf2 (rtx, rtx);
extern rtx gen_frintnvnx8hf2 (rtx, rtx);
extern rtx gen_ceilvnx8hf2 (rtx, rtx);
extern rtx gen_rintvnx8hf2 (rtx, rtx);
extern rtx gen_btruncvnx8hf2 (rtx, rtx);
extern rtx gen_absvnx4sf2 (rtx, rtx);
extern rtx gen_negvnx4sf2 (rtx, rtx);
extern rtx gen_frecpxvnx4sf2 (rtx, rtx);
extern rtx gen_roundvnx4sf2 (rtx, rtx);
extern rtx gen_nearbyintvnx4sf2 (rtx, rtx);
extern rtx gen_floorvnx4sf2 (rtx, rtx);
extern rtx gen_frintnvnx4sf2 (rtx, rtx);
extern rtx gen_ceilvnx4sf2 (rtx, rtx);
extern rtx gen_rintvnx4sf2 (rtx, rtx);
extern rtx gen_btruncvnx4sf2 (rtx, rtx);
extern rtx gen_absvnx2df2 (rtx, rtx);
extern rtx gen_negvnx2df2 (rtx, rtx);
extern rtx gen_frecpxvnx2df2 (rtx, rtx);
extern rtx gen_roundvnx2df2 (rtx, rtx);
extern rtx gen_nearbyintvnx2df2 (rtx, rtx);
extern rtx gen_floorvnx2df2 (rtx, rtx);
extern rtx gen_frintnvnx2df2 (rtx, rtx);
extern rtx gen_ceilvnx2df2 (rtx, rtx);
extern rtx gen_rintvnx2df2 (rtx, rtx);
extern rtx gen_btruncvnx2df2 (rtx, rtx);
extern rtx gen_cond_absvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_negvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_frecpxvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_roundvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_nearbyintvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_floorvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_frintnvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_ceilvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_rintvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_btruncvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqrtvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_absvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_negvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_frecpxvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_roundvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_nearbyintvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_floorvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_frintnvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_ceilvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_rintvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_btruncvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqrtvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_absvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_negvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_frecpxvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_roundvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_nearbyintvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_floorvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_frintnvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_ceilvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_rintvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_btruncvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqrtvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_sqrtvnx8hf2 (rtx, rtx);
extern rtx gen_sqrtvnx4sf2 (rtx, rtx);
extern rtx gen_sqrtvnx2df2 (rtx, rtx);
extern rtx gen_rsqrtvnx4sf2 (rtx, rtx);
extern rtx gen_rsqrtvnx2df2 (rtx, rtx);
extern rtx gen_aarch64_rsqrtevnx4sf (rtx, rtx);
extern rtx gen_aarch64_rsqrtevnx2df (rtx, rtx);
extern rtx gen_aarch64_rsqrtsvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_rsqrtsvnx2df (rtx, rtx, rtx);
extern rtx gen_one_cmplvnx16bi2 (rtx, rtx);
extern rtx gen_one_cmplvnx8bi2 (rtx, rtx);
extern rtx gen_one_cmplvnx4bi2 (rtx, rtx);
extern rtx gen_one_cmplvnx2bi2 (rtx, rtx);
extern rtx gen_mulvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_smaxvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_sminvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_umaxvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_uminvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_mulvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_smaxvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_sminvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_umaxvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_uminvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_mulvnx4si3 (rtx, rtx, rtx);
extern rtx gen_smaxvnx4si3 (rtx, rtx, rtx);
extern rtx gen_sminvnx4si3 (rtx, rtx, rtx);
extern rtx gen_umaxvnx4si3 (rtx, rtx, rtx);
extern rtx gen_uminvnx4si3 (rtx, rtx, rtx);
extern rtx gen_mulvnx2di3 (rtx, rtx, rtx);
extern rtx gen_smaxvnx2di3 (rtx, rtx, rtx);
extern rtx gen_sminvnx2di3 (rtx, rtx, rtx);
extern rtx gen_umaxvnx2di3 (rtx, rtx, rtx);
extern rtx gen_uminvnx2di3 (rtx, rtx, rtx);
extern rtx gen_cond_addvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_subvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_mulvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smaxvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_umaxvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sminvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uminvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ashlvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ashrvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_lshrvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_andvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_iorvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_xorvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ssaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_usaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sssubvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ussubvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_addvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_subvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_mulvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smaxvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_umaxvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sminvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uminvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ashlvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ashrvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_lshrvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_andvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_iorvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_xorvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ssaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_usaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sssubvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ussubvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_addvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_subvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_mulvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smaxvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_umaxvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sminvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uminvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ashlvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ashrvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_lshrvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_andvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_iorvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_xorvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ssaddvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_usaddvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sssubvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ussubvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_addvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_subvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_mulvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smaxvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_umaxvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sminvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uminvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ashlvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ashrvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_lshrvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_andvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_iorvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_xorvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ssaddvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_usaddvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sssubvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_ussubvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_adrvnx4si_shift (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_adrvnx2di_shift (rtx, rtx, rtx, rtx);
extern rtx gen_sabdvnx16qi_3 (rtx, rtx, rtx);
extern rtx gen_uabdvnx16qi_3 (rtx, rtx, rtx);
extern rtx gen_sabdvnx8hi_3 (rtx, rtx, rtx);
extern rtx gen_uabdvnx8hi_3 (rtx, rtx, rtx);
extern rtx gen_sabdvnx4si_3 (rtx, rtx, rtx);
extern rtx gen_uabdvnx4si_3 (rtx, rtx, rtx);
extern rtx gen_sabdvnx2di_3 (rtx, rtx, rtx);
extern rtx gen_uabdvnx2di_3 (rtx, rtx, rtx);
extern rtx gen_aarch64_cond_sabdvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cond_uabdvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cond_sabdvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cond_uabdvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cond_sabdvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cond_uabdvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cond_sabdvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cond_uabdvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smulvnx16qi3_highpart (rtx, rtx, rtx);
extern rtx gen_umulvnx16qi3_highpart (rtx, rtx, rtx);
extern rtx gen_smulvnx8hi3_highpart (rtx, rtx, rtx);
extern rtx gen_umulvnx8hi3_highpart (rtx, rtx, rtx);
extern rtx gen_smulvnx4si3_highpart (rtx, rtx, rtx);
extern rtx gen_umulvnx4si3_highpart (rtx, rtx, rtx);
extern rtx gen_smulvnx2di3_highpart (rtx, rtx, rtx);
extern rtx gen_umulvnx2di3_highpart (rtx, rtx, rtx);
extern rtx gen_cond_smulhvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_umulhvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smulhvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_umulhvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smulhvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_umulhvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smulhvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_umulhvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_divvnx4si3 (rtx, rtx, rtx);
extern rtx gen_udivvnx4si3 (rtx, rtx, rtx);
extern rtx gen_divvnx2di3 (rtx, rtx, rtx);
extern rtx gen_udivvnx2di3 (rtx, rtx, rtx);
extern rtx gen_cond_divvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_udivvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_divvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_udivvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_bicvnx16qi (rtx, rtx, rtx);
extern rtx gen_aarch64_bicvnx8hi (rtx, rtx, rtx);
extern rtx gen_aarch64_bicvnx4si (rtx, rtx, rtx);
extern rtx gen_aarch64_bicvnx2di (rtx, rtx, rtx);
extern rtx gen_cond_bicvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_bicvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_bicvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_bicvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashlvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_ashrvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_lshrvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_ashlvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_ashrvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_lshrvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_ashlvnx4si3 (rtx, rtx, rtx);
extern rtx gen_ashrvnx4si3 (rtx, rtx, rtx);
extern rtx gen_lshrvnx4si3 (rtx, rtx, rtx);
extern rtx gen_ashlvnx2di3 (rtx, rtx, rtx);
extern rtx gen_ashrvnx2di3 (rtx, rtx, rtx);
extern rtx gen_lshrvnx2di3 (rtx, rtx, rtx);
extern rtx gen_vashlvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_vashrvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_vlshrvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_vashlvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_vashrvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_vlshrvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_vashlvnx4si3 (rtx, rtx, rtx);
extern rtx gen_vashrvnx4si3 (rtx, rtx, rtx);
extern rtx gen_vlshrvnx4si3 (rtx, rtx, rtx);
extern rtx gen_vashlvnx2di3 (rtx, rtx, rtx);
extern rtx gen_vashrvnx2di3 (rtx, rtx, rtx);
extern rtx gen_vlshrvnx2di3 (rtx, rtx, rtx);
extern rtx gen_cond_lslvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_asrvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_lsrvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_lslvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_asrvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_lsrvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_lslvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_asrvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_lsrvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sdiv_pow2vnx16qi3 (rtx, rtx, rtx);
extern rtx gen_sdiv_pow2vnx8hi3 (rtx, rtx, rtx);
extern rtx gen_sdiv_pow2vnx4si3 (rtx, rtx, rtx);
extern rtx gen_sdiv_pow2vnx2di3 (rtx, rtx, rtx);
extern rtx gen_cond_asrdvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqshluvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_srshrvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_urshrvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_asrdvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqshluvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_srshrvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_urshrvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_asrdvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqshluvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_srshrvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_urshrvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_asrdvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqshluvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_srshrvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_urshrvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fscalevnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fscalevnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fscalevnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addvnx8hf3 (rtx, rtx, rtx);
extern rtx gen_smax_nanvnx8hf3 (rtx, rtx, rtx);
extern rtx gen_smaxvnx8hf3 (rtx, rtx, rtx);
extern rtx gen_smin_nanvnx8hf3 (rtx, rtx, rtx);
extern rtx gen_sminvnx8hf3 (rtx, rtx, rtx);
extern rtx gen_mulvnx8hf3 (rtx, rtx, rtx);
extern rtx gen_mulxvnx8hf3 (rtx, rtx, rtx);
extern rtx gen_subvnx8hf3 (rtx, rtx, rtx);
extern rtx gen_addvnx4sf3 (rtx, rtx, rtx);
extern rtx gen_smax_nanvnx4sf3 (rtx, rtx, rtx);
extern rtx gen_smaxvnx4sf3 (rtx, rtx, rtx);
extern rtx gen_smin_nanvnx4sf3 (rtx, rtx, rtx);
extern rtx gen_sminvnx4sf3 (rtx, rtx, rtx);
extern rtx gen_mulvnx4sf3 (rtx, rtx, rtx);
extern rtx gen_mulxvnx4sf3 (rtx, rtx, rtx);
extern rtx gen_subvnx4sf3 (rtx, rtx, rtx);
extern rtx gen_addvnx2df3 (rtx, rtx, rtx);
extern rtx gen_smax_nanvnx2df3 (rtx, rtx, rtx);
extern rtx gen_smaxvnx2df3 (rtx, rtx, rtx);
extern rtx gen_smin_nanvnx2df3 (rtx, rtx, rtx);
extern rtx gen_sminvnx2df3 (rtx, rtx, rtx);
extern rtx gen_mulvnx2df3 (rtx, rtx, rtx);
extern rtx gen_mulxvnx2df3 (rtx, rtx, rtx);
extern rtx gen_subvnx2df3 (rtx, rtx, rtx);
extern rtx gen_cond_addvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_divvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smax_nanvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smaxvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smin_nanvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sminvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_mulvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_mulxvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_subvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_addvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_divvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smax_nanvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smaxvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smin_nanvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sminvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_mulvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_mulxvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_subvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_addvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_divvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smax_nanvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smaxvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_smin_nanvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sminvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_mulvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_mulxvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_subvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_cadd90vnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_cadd270vnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_cadd90vnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_cadd270vnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_cadd90vnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_cadd270vnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_abdvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_abdvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_abdvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cond_abdvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cond_abdvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_cond_abdvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_divvnx8hf3 (rtx, rtx, rtx);
extern rtx gen_divvnx4sf3 (rtx, rtx, rtx);
extern rtx gen_divvnx2df3 (rtx, rtx, rtx);
extern rtx gen_aarch64_frecpevnx8hf (rtx, rtx);
extern rtx gen_aarch64_frecpevnx4sf (rtx, rtx);
extern rtx gen_aarch64_frecpevnx2df (rtx, rtx);
extern rtx gen_aarch64_frecpsvnx8hf (rtx, rtx, rtx);
extern rtx gen_aarch64_frecpsvnx4sf (rtx, rtx, rtx);
extern rtx gen_aarch64_frecpsvnx2df (rtx, rtx, rtx);
extern rtx gen_copysignvnx8hf3 (rtx, rtx, rtx);
extern rtx gen_copysignvnx4sf3 (rtx, rtx, rtx);
extern rtx gen_copysignvnx2df3 (rtx, rtx, rtx);
extern rtx gen_xorsignvnx8hf3 (rtx, rtx, rtx);
extern rtx gen_xorsignvnx4sf3 (rtx, rtx, rtx);
extern rtx gen_xorsignvnx2df3 (rtx, rtx, rtx);
extern rtx gen_fmaxvnx8hf3 (rtx, rtx, rtx);
extern rtx gen_fminvnx8hf3 (rtx, rtx, rtx);
extern rtx gen_fmaxvnx4sf3 (rtx, rtx, rtx);
extern rtx gen_fminvnx4sf3 (rtx, rtx, rtx);
extern rtx gen_fmaxvnx2df3 (rtx, rtx, rtx);
extern rtx gen_fminvnx2df3 (rtx, rtx, rtx);
extern rtx gen_iorvnx16bi3 (rtx, rtx, rtx);
extern rtx gen_xorvnx16bi3 (rtx, rtx, rtx);
extern rtx gen_iorvnx8bi3 (rtx, rtx, rtx);
extern rtx gen_xorvnx8bi3 (rtx, rtx, rtx);
extern rtx gen_iorvnx4bi3 (rtx, rtx, rtx);
extern rtx gen_xorvnx4bi3 (rtx, rtx, rtx);
extern rtx gen_iorvnx2bi3 (rtx, rtx, rtx);
extern rtx gen_xorvnx2bi3 (rtx, rtx, rtx);
extern rtx gen_fmavnx16qi4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmavnx8hi4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmavnx4si4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmavnx2di4 (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fmavnx16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fmavnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fmavnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fmavnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fnmavnx16qi4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmavnx8hi4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmavnx4si4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmavnx2di4 (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fnmavnx16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fnmavnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fnmavnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fnmavnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ssadvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_usadvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_ssadvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_usadvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_fmavnx8hf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmavnx8hf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmsvnx8hf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmsvnx8hf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmavnx4sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmavnx4sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmsvnx4sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmsvnx4sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmavnx2df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmavnx2df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmsvnx2df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmsvnx2df4 (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fmavnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fnmavnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fnmsvnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fmsvnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fmavnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fnmavnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fnmsvnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fmsvnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fmavnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fnmavnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fnmsvnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fmsvnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcmlavnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcmla90vnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcmla180vnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcmla270vnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcmlavnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcmla90vnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcmla180vnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcmla270vnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcmlavnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcmla90vnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcmla180vnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcmla270vnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_vnx16qivnx16bi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_vnx8hivnx8bi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_vnx4sivnx4bi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_vnx2divnx2bi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_vnx8bfvnx8bi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_vnx8hfvnx8bi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_vnx4sfvnx4bi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_vnx2dfvnx2bi (rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx16qivnx16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx8hivnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx4sivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx2divnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx8bfvnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx8hfvnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx4sfvnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx2dfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduvnx16qivnx16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduvnx8hivnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduvnx4sivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduvnx2divnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduvnx8bfvnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduvnx8hfvnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduvnx4sfvnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduvnx2dfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx8hivnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx4sivnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx2divnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx8bfvnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx8hfvnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx4sfvnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondvnx2dfvnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpvnx16qivnx16bi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpvnx8hivnx8bi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpvnx4sivnx4bi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpvnx2divnx2bi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuvnx16qivnx16bi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuvnx8hivnx8bi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuvnx4sivnx4bi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuvnx2divnx2bi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpvnx8hfvnx8bi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpvnx4sfvnx4bi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpvnx2dfvnx2bi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_facgevnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_facgtvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_faclevnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_facltvnx8hf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_facgevnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_facgtvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_faclevnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_facltvnx4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_facgevnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_facgtvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_faclevnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_pred_facltvnx2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cbranchvnx16bi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchvnx8bi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchvnx4bi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchvnx2bi4 (rtx, rtx, rtx, rtx);
extern rtx gen_reduc_plus_scal_vnx16qi (rtx, rtx);
extern rtx gen_reduc_plus_scal_vnx8hi (rtx, rtx);
extern rtx gen_reduc_plus_scal_vnx4si (rtx, rtx);
extern rtx gen_reduc_plus_scal_vnx2di (rtx, rtx);
extern rtx gen_reduc_and_scal_vnx16qi (rtx, rtx);
extern rtx gen_reduc_ior_scal_vnx16qi (rtx, rtx);
extern rtx gen_reduc_smax_scal_vnx16qi (rtx, rtx);
extern rtx gen_reduc_smin_scal_vnx16qi (rtx, rtx);
extern rtx gen_reduc_umax_scal_vnx16qi (rtx, rtx);
extern rtx gen_reduc_umin_scal_vnx16qi (rtx, rtx);
extern rtx gen_reduc_xor_scal_vnx16qi (rtx, rtx);
extern rtx gen_reduc_and_scal_vnx8hi (rtx, rtx);
extern rtx gen_reduc_ior_scal_vnx8hi (rtx, rtx);
extern rtx gen_reduc_smax_scal_vnx8hi (rtx, rtx);
extern rtx gen_reduc_smin_scal_vnx8hi (rtx, rtx);
extern rtx gen_reduc_umax_scal_vnx8hi (rtx, rtx);
extern rtx gen_reduc_umin_scal_vnx8hi (rtx, rtx);
extern rtx gen_reduc_xor_scal_vnx8hi (rtx, rtx);
extern rtx gen_reduc_and_scal_vnx4si (rtx, rtx);
extern rtx gen_reduc_ior_scal_vnx4si (rtx, rtx);
extern rtx gen_reduc_smax_scal_vnx4si (rtx, rtx);
extern rtx gen_reduc_smin_scal_vnx4si (rtx, rtx);
extern rtx gen_reduc_umax_scal_vnx4si (rtx, rtx);
extern rtx gen_reduc_umin_scal_vnx4si (rtx, rtx);
extern rtx gen_reduc_xor_scal_vnx4si (rtx, rtx);
extern rtx gen_reduc_and_scal_vnx2di (rtx, rtx);
extern rtx gen_reduc_ior_scal_vnx2di (rtx, rtx);
extern rtx gen_reduc_smax_scal_vnx2di (rtx, rtx);
extern rtx gen_reduc_smin_scal_vnx2di (rtx, rtx);
extern rtx gen_reduc_umax_scal_vnx2di (rtx, rtx);
extern rtx gen_reduc_umin_scal_vnx2di (rtx, rtx);
extern rtx gen_reduc_xor_scal_vnx2di (rtx, rtx);
extern rtx gen_reduc_plus_scal_vnx8hf (rtx, rtx);
extern rtx gen_reduc_smax_nan_scal_vnx8hf (rtx, rtx);
extern rtx gen_reduc_smax_scal_vnx8hf (rtx, rtx);
extern rtx gen_reduc_smin_nan_scal_vnx8hf (rtx, rtx);
extern rtx gen_reduc_smin_scal_vnx8hf (rtx, rtx);
extern rtx gen_reduc_plus_scal_vnx4sf (rtx, rtx);
extern rtx gen_reduc_smax_nan_scal_vnx4sf (rtx, rtx);
extern rtx gen_reduc_smax_scal_vnx4sf (rtx, rtx);
extern rtx gen_reduc_smin_nan_scal_vnx4sf (rtx, rtx);
extern rtx gen_reduc_smin_scal_vnx4sf (rtx, rtx);
extern rtx gen_reduc_plus_scal_vnx2df (rtx, rtx);
extern rtx gen_reduc_smax_nan_scal_vnx2df (rtx, rtx);
extern rtx gen_reduc_smax_scal_vnx2df (rtx, rtx);
extern rtx gen_reduc_smin_nan_scal_vnx2df (rtx, rtx);
extern rtx gen_reduc_smin_scal_vnx2df (rtx, rtx);
extern rtx gen_fold_left_plus_vnx8hf (rtx, rtx, rtx);
extern rtx gen_fold_left_plus_vnx4sf (rtx, rtx, rtx);
extern rtx gen_fold_left_plus_vnx2df (rtx, rtx, rtx);
extern rtx gen_vec_permvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permvnx8bf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_unpacks_hi_vnx16qi (rtx, rtx);
extern rtx gen_vec_unpacku_hi_vnx16qi (rtx, rtx);
extern rtx gen_vec_unpacks_lo_vnx16qi (rtx, rtx);
extern rtx gen_vec_unpacku_lo_vnx16qi (rtx, rtx);
extern rtx gen_vec_unpacks_hi_vnx8hi (rtx, rtx);
extern rtx gen_vec_unpacku_hi_vnx8hi (rtx, rtx);
extern rtx gen_vec_unpacks_lo_vnx8hi (rtx, rtx);
extern rtx gen_vec_unpacku_lo_vnx8hi (rtx, rtx);
extern rtx gen_vec_unpacks_hi_vnx4si (rtx, rtx);
extern rtx gen_vec_unpacku_hi_vnx4si (rtx, rtx);
extern rtx gen_vec_unpacks_lo_vnx4si (rtx, rtx);
extern rtx gen_vec_unpacku_lo_vnx4si (rtx, rtx);
extern rtx gen_fix_truncvnx8hfvnx8hi2 (rtx, rtx);
extern rtx gen_fixuns_truncvnx8hfvnx8hi2 (rtx, rtx);
extern rtx gen_fix_truncvnx4sfvnx4si2 (rtx, rtx);
extern rtx gen_fixuns_truncvnx4sfvnx4si2 (rtx, rtx);
extern rtx gen_fix_truncvnx2dfvnx2di2 (rtx, rtx);
extern rtx gen_fixuns_truncvnx2dfvnx2di2 (rtx, rtx);
extern rtx gen_cond_fix_trunc_nontruncvnx8hfvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fixuns_trunc_nontruncvnx8hfvnx8hi (rtx, rtx, rtx, rtx);
static inline rtx gen_cond_fix_trunc_nontruncvnx4sfvnx8hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_fix_trunc_nontruncvnx4sfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_cond_fixuns_trunc_nontruncvnx4sfvnx8hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_fixuns_trunc_nontruncvnx4sfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_cond_fix_trunc_nontruncvnx2dfvnx8hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_fix_trunc_nontruncvnx2dfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_cond_fixuns_trunc_nontruncvnx2dfvnx8hi (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_fixuns_trunc_nontruncvnx2dfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_cond_fix_trunc_nontruncvnx8hfvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fixuns_trunc_nontruncvnx8hfvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fix_trunc_nontruncvnx4sfvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fixuns_trunc_nontruncvnx4sfvnx4si (rtx, rtx, rtx, rtx);
static inline rtx gen_cond_fix_trunc_nontruncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_fix_trunc_nontruncvnx2dfvnx4si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_cond_fixuns_trunc_nontruncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_fixuns_trunc_nontruncvnx2dfvnx4si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_cond_fix_trunc_nontruncvnx8hfvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fixuns_trunc_nontruncvnx8hfvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fix_trunc_nontruncvnx4sfvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fixuns_trunc_nontruncvnx4sfvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fix_trunc_nontruncvnx2dfvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fixuns_trunc_nontruncvnx2dfvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fix_trunc_truncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fixuns_trunc_truncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_pack_sfix_trunc_vnx2df (rtx, rtx, rtx);
extern rtx gen_vec_pack_ufix_trunc_vnx2df (rtx, rtx, rtx);
extern rtx gen_floatvnx8hivnx8hf2 (rtx, rtx);
extern rtx gen_floatunsvnx8hivnx8hf2 (rtx, rtx);
extern rtx gen_floatvnx4sivnx4sf2 (rtx, rtx);
extern rtx gen_floatunsvnx4sivnx4sf2 (rtx, rtx);
extern rtx gen_floatvnx2divnx2df2 (rtx, rtx);
extern rtx gen_floatunsvnx2divnx2df2 (rtx, rtx);
extern rtx gen_cond_float_nonextendvnx8hivnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_floatuns_nonextendvnx8hivnx8hf (rtx, rtx, rtx, rtx);
static inline rtx gen_cond_float_nonextendvnx8hivnx4sf (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_float_nonextendvnx8hivnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_cond_floatuns_nonextendvnx8hivnx4sf (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_floatuns_nonextendvnx8hivnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_cond_float_nonextendvnx8hivnx2df (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_float_nonextendvnx8hivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_cond_floatuns_nonextendvnx8hivnx2df (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_floatuns_nonextendvnx8hivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_cond_float_nonextendvnx4sivnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_floatuns_nonextendvnx4sivnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_float_nonextendvnx4sivnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_floatuns_nonextendvnx4sivnx4sf (rtx, rtx, rtx, rtx);
static inline rtx gen_cond_float_nonextendvnx4sivnx2df (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_float_nonextendvnx4sivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
static inline rtx gen_cond_floatuns_nonextendvnx4sivnx2df (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_floatuns_nonextendvnx4sivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_cond_float_nonextendvnx2divnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_floatuns_nonextendvnx2divnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_float_nonextendvnx2divnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_floatuns_nonextendvnx2divnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_float_nonextendvnx2divnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_floatuns_nonextendvnx2divnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_float_extendvnx4sivnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_floatuns_extendvnx4sivnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_unpacks_float_lo_vnx4si (rtx, rtx);
extern rtx gen_vec_unpacks_float_hi_vnx4si (rtx, rtx);
extern rtx gen_vec_unpacku_float_lo_vnx4si (rtx, rtx);
extern rtx gen_vec_unpacku_float_hi_vnx4si (rtx, rtx);
extern rtx gen_vec_pack_trunc_vnx4sf (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_vnx2df (rtx, rtx, rtx);
extern rtx gen_cond_fcvt_truncvnx4sfvnx8hf (rtx, rtx, rtx, rtx);
static inline rtx gen_cond_fcvt_truncvnx4sfvnx4sf (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_fcvt_truncvnx4sfvnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_cond_fcvt_truncvnx2dfvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcvt_truncvnx2dfvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcvt_truncvnx4sfvnx8bf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_unpacks_lo_vnx8hf (rtx, rtx);
extern rtx gen_vec_unpacks_hi_vnx8hf (rtx, rtx);
extern rtx gen_vec_unpacks_lo_vnx4sf (rtx, rtx);
extern rtx gen_vec_unpacks_hi_vnx4sf (rtx, rtx);
extern rtx gen_cond_fcvt_nontruncvnx8hfvnx4sf (rtx, rtx, rtx, rtx);
static inline rtx gen_cond_fcvt_nontruncvnx4sfvnx4sf (rtx, rtx, rtx, rtx);
static inline rtx
gen_cond_fcvt_nontruncvnx4sfvnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_cond_fcvt_nontruncvnx8hfvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcvt_nontruncvnx4sfvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_unpacks_hi_vnx16bi (rtx, rtx);
extern rtx gen_vec_unpacku_hi_vnx16bi (rtx, rtx);
extern rtx gen_vec_unpacks_lo_vnx16bi (rtx, rtx);
extern rtx gen_vec_unpacku_lo_vnx16bi (rtx, rtx);
extern rtx gen_vec_unpacks_hi_vnx8bi (rtx, rtx);
extern rtx gen_vec_unpacku_hi_vnx8bi (rtx, rtx);
extern rtx gen_vec_unpacks_lo_vnx8bi (rtx, rtx);
extern rtx gen_vec_unpacku_lo_vnx8bi (rtx, rtx);
extern rtx gen_vec_unpacks_hi_vnx4bi (rtx, rtx);
extern rtx gen_vec_unpacku_hi_vnx4bi (rtx, rtx);
extern rtx gen_vec_unpacks_lo_vnx4bi (rtx, rtx);
extern rtx gen_vec_unpacku_lo_vnx4bi (rtx, rtx);
extern rtx gen_aarch64_sve_incvnx8hi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincvnx8hi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincvnx8hi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_decvnx8hi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecvnx8hi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecvnx8hi_pat (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_incdivnx16bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincdivnx16bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincdivnx16bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_incdivnx8bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincdivnx8bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincdivnx8bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_incdivnx4bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincdivnx4bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincdivnx4bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_incdivnx2bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincdivnx2bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincdivnx2bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincsivnx16bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincsivnx16bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincsivnx8bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincsivnx8bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincsivnx4bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincsivnx4bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincsivnx2bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincsivnx2bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_incvnx2di_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincvnx2di_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincvnx2di_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_incvnx4si_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincvnx4si_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincvnx4si_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_incvnx8hi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqincvnx8hi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqincvnx8hi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_decdivnx16bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecdivnx16bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecdivnx16bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_decdivnx8bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecdivnx8bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecdivnx8bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_decdivnx4bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecdivnx4bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecdivnx4bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_decdivnx2bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecdivnx2bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecdivnx2bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecsivnx16bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecsivnx16bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecsivnx8bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecsivnx8bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecsivnx4bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecsivnx4bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecsivnx2bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecsivnx2bi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_decvnx2di_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecvnx2di_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecvnx2di_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_decvnx4si_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecvnx4si_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecvnx4si_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_decvnx8hi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_sqdecvnx8hi_cntp (rtx, rtx, rtx);
extern rtx gen_aarch64_sve_uqdecvnx8hi_cntp (rtx, rtx, rtx);
extern rtx gen_smulhsvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_umulhsvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_smulhrsvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_umulhrsvnx16qi3 (rtx, rtx, rtx);
extern rtx gen_smulhsvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_umulhsvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_smulhrsvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_umulhrsvnx8hi3 (rtx, rtx, rtx);
extern rtx gen_smulhsvnx4si3 (rtx, rtx, rtx);
extern rtx gen_umulhsvnx4si3 (rtx, rtx, rtx);
extern rtx gen_smulhrsvnx4si3 (rtx, rtx, rtx);
extern rtx gen_umulhrsvnx4si3 (rtx, rtx, rtx);
extern rtx gen_avgvnx16qi3_floor (rtx, rtx, rtx);
extern rtx gen_uavgvnx16qi3_floor (rtx, rtx, rtx);
extern rtx gen_avgvnx8hi3_floor (rtx, rtx, rtx);
extern rtx gen_uavgvnx8hi3_floor (rtx, rtx, rtx);
extern rtx gen_avgvnx4si3_floor (rtx, rtx, rtx);
extern rtx gen_uavgvnx4si3_floor (rtx, rtx, rtx);
extern rtx gen_avgvnx2di3_floor (rtx, rtx, rtx);
extern rtx gen_uavgvnx2di3_floor (rtx, rtx, rtx);
extern rtx gen_avgvnx16qi3_ceil (rtx, rtx, rtx);
extern rtx gen_uavgvnx16qi3_ceil (rtx, rtx, rtx);
extern rtx gen_avgvnx8hi3_ceil (rtx, rtx, rtx);
extern rtx gen_uavgvnx8hi3_ceil (rtx, rtx, rtx);
extern rtx gen_avgvnx4si3_ceil (rtx, rtx, rtx);
extern rtx gen_uavgvnx4si3_ceil (rtx, rtx, rtx);
extern rtx gen_avgvnx2di3_ceil (rtx, rtx, rtx);
extern rtx gen_uavgvnx2di3_ceil (rtx, rtx, rtx);
extern rtx gen_cond_shaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_shsubvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqrshlvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_srhaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_srshlvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_suqaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uhaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uhsubvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uqrshlvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_urhaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_urshlvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_usqaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_shaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_shsubvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqrshlvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_srhaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_srshlvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_suqaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uhaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uhsubvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uqrshlvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_urhaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_urshlvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_usqaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_shaddvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_shsubvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqrshlvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_srhaddvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_srshlvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_suqaddvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uhaddvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uhsubvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uqrshlvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_urhaddvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_urshlvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_usqaddvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_shaddvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_shsubvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqrshlvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_srhaddvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_srshlvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_suqaddvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uhaddvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uhsubvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uqrshlvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_urhaddvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_urshlvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_usqaddvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqshlvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uqshlvnx16qi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqshlvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uqshlvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqshlvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uqshlvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sqshlvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uqshlvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bslvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bslvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bslvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bslvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_nbslvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_nbslvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_nbslvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_nbslvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bsl1nvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bsl1nvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bsl1nvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bsl1nvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bsl2nvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bsl2nvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bsl2nvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_bsl2nvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_asrvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_lsrvnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_asrvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_lsrvnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_asrvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_lsrvnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_asrvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve_add_lsrvnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_sabavnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_uabavnx16qi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_sabavnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_uabavnx8hi (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_sabavnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_uabavnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_sabavnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_aarch64_sve2_uabavnx2di (rtx, rtx, rtx, rtx);
extern rtx gen_cond_sadalpvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uadalpvnx8hi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sadalpvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uadalpvnx4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_sadalpvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_uadalpvnx2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcvtltvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcvtltvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_cond_fcvtxvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_urecpevnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_ursqrtevnx4si (rtx, rtx, rtx, rtx);
extern rtx gen_cond_flogbvnx8hf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_flogbvnx4sf (rtx, rtx, rtx, rtx);
extern rtx gen_cond_flogbvnx2df (rtx, rtx, rtx, rtx);
extern rtx gen_check_raw_ptrssi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_check_war_ptrssi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_check_raw_ptrsdi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_check_war_ptrsdi (rtx, rtx, rtx, rtx, rtx);
#endif /* GCC_INSN_FLAGS_H */