aarch64: Fix type qualifiers for qtbl1 and qtbx1 Neon builtins

Fix type qualifiers for qtbl1 and qtbx1 Neon builtins and remove
casts from the Neon intrinsic function bodies that use these
builtins.

gcc/ChangeLog:

2021-09-23  Jonathan Wright  <jonathan.wright@arm.com>

	* config/aarch64/aarch64-builtins.c (TYPES_BINOP_PPU): Define
	new type qualifier enum.
	(TYPES_TERNOP_SSSU): Likewise.
	(TYPES_TERNOP_PPPU): Likewise.
	* config/aarch64/aarch64-simd-builtins.def: Define PPU, SSU,
	PPPU and SSSU builtin generator macros for qtbl1 and qtbx1
	Neon builtins.
	* config/aarch64/arm_neon.h (vqtbl1_p8): Use type-qualified
	builtin and remove casts.
	(vqtbl1_s8): Likewise.
	(vqtbl1q_p8): Likewise.
	(vqtbl1q_s8): Likewise.
	(vqtbx1_s8): Likewise.
	(vqtbx1_p8): Likewise.
	(vqtbx1q_s8): Likewise.
	(vqtbx1q_p8): Likewise.
	(vtbl1_p8): Likewise.
	(vtbl2_p8): Likewise.
	(vtbx2_p8): Likewise.
This commit is contained in:
Jonathan Wright 2021-09-23 14:27:22 +01:00
parent f38cd3bdb4
commit 3caf7f87b1
3 changed files with 27 additions and 21 deletions

View file

@ -182,6 +182,10 @@ static enum aarch64_type_qualifiers
aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
= { qualifier_poly, qualifier_poly, qualifier_poly };
#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
static enum aarch64_type_qualifiers
aarch64_types_binop_ppu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
= { qualifier_poly, qualifier_poly, qualifier_unsigned };
#define TYPES_BINOP_PPU (aarch64_types_binop_ppu_qualifiers)
static enum aarch64_type_qualifiers
aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
@ -207,6 +211,10 @@ aarch64_types_ternopu_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
qualifier_unsigned, qualifier_immediate };
#define TYPES_TERNOPUI (aarch64_types_ternopu_imm_qualifiers)
static enum aarch64_type_qualifiers
aarch64_types_ternop_sssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
= { qualifier_none, qualifier_none, qualifier_none, qualifier_unsigned };
#define TYPES_TERNOP_SSSU (aarch64_types_ternop_sssu_qualifiers)
static enum aarch64_type_qualifiers
aarch64_types_ternop_ssus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
= { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_none };
#define TYPES_TERNOP_SSUS (aarch64_types_ternop_ssus_qualifiers)
@ -214,6 +222,10 @@ static enum aarch64_type_qualifiers
aarch64_types_ternop_suss_qualifiers[SIMD_MAX_BUILTIN_ARGS]
= { qualifier_none, qualifier_unsigned, qualifier_none, qualifier_none };
#define TYPES_TERNOP_SUSS (aarch64_types_ternop_suss_qualifiers)
static enum aarch64_type_qualifiers
aarch64_types_binop_pppu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
= { qualifier_poly, qualifier_poly, qualifier_poly, qualifier_unsigned };
#define TYPES_TERNOP_PPPU (aarch64_types_binop_pppu_qualifiers)
static enum aarch64_type_qualifiers

View file

@ -721,6 +721,8 @@
/* Implemented by aarch64_qtbl1<mode>. */
VAR2 (BINOP, qtbl1, 0, NONE, v8qi, v16qi)
VAR2 (BINOPU, qtbl1, 0, NONE, v8qi, v16qi)
VAR2 (BINOP_PPU, qtbl1, 0, NONE, v8qi, v16qi)
VAR2 (BINOP_SSU, qtbl1, 0, NONE, v8qi, v16qi)
/* Implemented by aarch64_qtbl2<mode>. */
VAR2 (BINOP, qtbl2, 0, NONE, v8qi, v16qi)
@ -734,6 +736,8 @@
/* Implemented by aarch64_qtbx1<mode>. */
VAR2 (TERNOP, qtbx1, 0, NONE, v8qi, v16qi)
VAR2 (TERNOPU, qtbx1, 0, NONE, v8qi, v16qi)
VAR2 (TERNOP_PPPU, qtbx1, 0, NONE, v8qi, v16qi)
VAR2 (TERNOP_SSSU, qtbx1, 0, NONE, v8qi, v16qi)
/* Implemented by aarch64_qtbx2<mode>. */
VAR2 (TERNOP, qtbx2, 0, NONE, v8qi, v16qi)

View file

@ -10416,15 +10416,14 @@ __extension__ extern __inline poly8x8_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbl1_p8 (poly8x16_t __tab, uint8x8_t __idx)
{
return (poly8x8_t) __builtin_aarch64_qtbl1v8qi ((int8x16_t) __tab,
(int8x8_t) __idx);
return __builtin_aarch64_qtbl1v8qi_ppu (__tab, __idx);
}
__extension__ extern __inline int8x8_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbl1_s8 (int8x16_t __tab, uint8x8_t __idx)
{
return __builtin_aarch64_qtbl1v8qi (__tab, (int8x8_t) __idx);
return __builtin_aarch64_qtbl1v8qi_ssu (__tab, __idx);
}
__extension__ extern __inline uint8x8_t
@ -10438,15 +10437,14 @@ __extension__ extern __inline poly8x16_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbl1q_p8 (poly8x16_t __tab, uint8x16_t __idx)
{
return (poly8x16_t) __builtin_aarch64_qtbl1v16qi ((int8x16_t) __tab,
(int8x16_t) __idx);
return __builtin_aarch64_qtbl1v16qi_ppu (__tab, __idx);
}
__extension__ extern __inline int8x16_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbl1q_s8 (int8x16_t __tab, uint8x16_t __idx)
{
return __builtin_aarch64_qtbl1v16qi (__tab, (int8x16_t) __idx);
return __builtin_aarch64_qtbl1v16qi_ssu (__tab, __idx);
}
__extension__ extern __inline uint8x16_t
@ -10460,7 +10458,7 @@ __extension__ extern __inline int8x8_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbx1_s8 (int8x8_t __r, int8x16_t __tab, uint8x8_t __idx)
{
return __builtin_aarch64_qtbx1v8qi (__r, __tab, (int8x8_t) __idx);
return __builtin_aarch64_qtbx1v8qi_sssu (__r, __tab, __idx);
}
__extension__ extern __inline uint8x8_t
@ -10474,16 +10472,14 @@ __extension__ extern __inline poly8x8_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbx1_p8 (poly8x8_t __r, poly8x16_t __tab, uint8x8_t __idx)
{
return (poly8x8_t) __builtin_aarch64_qtbx1v8qi ((int8x8_t) __r,
(int8x16_t) __tab,
(int8x8_t) __idx);
return __builtin_aarch64_qtbx1v8qi_pppu (__r, __tab, __idx);
}
__extension__ extern __inline int8x16_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbx1q_s8 (int8x16_t __r, int8x16_t __tab, uint8x16_t __idx)
{
return __builtin_aarch64_qtbx1v16qi (__r, __tab, (int8x16_t) __idx);
return __builtin_aarch64_qtbx1v16qi_sssu (__r, __tab, __idx);
}
__extension__ extern __inline uint8x16_t
@ -10497,9 +10493,7 @@ __extension__ extern __inline poly8x16_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbx1q_p8 (poly8x16_t __r, poly8x16_t __tab, uint8x16_t __idx)
{
return (poly8x16_t) __builtin_aarch64_qtbx1v16qi ((int8x16_t) __r,
(int8x16_t) __tab,
(int8x16_t) __idx);
return __builtin_aarch64_qtbx1v16qi_pppu (__r, __tab, __idx);
}
/* V7 legacy table intrinsics. */
@ -10528,8 +10522,7 @@ vtbl1_p8 (poly8x8_t __tab, uint8x8_t __idx)
{
poly8x16_t __temp = vcombine_p8 (__tab,
vcreate_p8 (__AARCH64_UINT64_C (0x0)));
return (poly8x8_t) __builtin_aarch64_qtbl1v8qi ((int8x16_t) __temp,
(int8x8_t) __idx);
return __builtin_aarch64_qtbl1v8qi_ppu (__temp, __idx);
}
__extension__ extern __inline int8x8_t
@ -10553,8 +10546,7 @@ __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vtbl2_p8 (poly8x8x2_t __tab, uint8x8_t __idx)
{
poly8x16_t __temp = vcombine_p8 (__tab.val[0], __tab.val[1]);
return (poly8x8_t) __builtin_aarch64_qtbl1v8qi ((int8x16_t) __temp,
(int8x8_t) __idx);
return __builtin_aarch64_qtbl1v8qi_ppu (__temp, __idx);
}
__extension__ extern __inline int8x8_t
@ -10653,9 +10645,7 @@ __attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vtbx2_p8 (poly8x8_t __r, poly8x8x2_t __tab, uint8x8_t __idx)
{
poly8x16_t __temp = vcombine_p8 (__tab.val[0], __tab.val[1]);
return (poly8x8_t) __builtin_aarch64_qtbx1v8qi ((int8x8_t) __r,
(int8x16_t) __temp,
(int8x8_t) __idx);
return __builtin_aarch64_qtbx1v8qi_pppu (__r, __temp, __idx);
}
/* End of temporary inline asm. */