arm: Consistently use crypto_mode attribute in crypto patterns
A couple of patterns in the crypto support code were hard-coding the mode rather than using the iterators. While not incorrect, it was slightly confusing, so adapt those patterns to the style of the rest of the file. Also fix some white space issues. gcc/ChangeLog: * config/arm/crypto.md (crypto_<CYRPTO_AES:crypto_pattern>): Use <crypto_mode> rather than hard-coding the mode. (crypto_<CRYPTO_AESMC:crypto_pattern>): Fix white space. (crypto_<CRYPTO_AES:crypto_pattern>): Likewise. (*aarch32_crypto_aese_fused): Likewise. (*aarch32_crypto_aesd_fused): Likewise. (crypto_<CRYPTO_BINARY:crypto_pattern>): Likewise. (crypto_<CRYPTO_TERNARY:crypto_pattern>): Likewise. (crypto_sha1h_lb): Likewise. (crypto_vmullp64): Likewise. (crypto_<CRYPTO_SELECTING:crypto_pattern>): Likewise. (crypto_<CRYPTO_SELECTING:crypto_pattern>_lb): Likewise.
This commit is contained in:
parent
0042ca7b77
commit
c471ee0f05
1 changed files with 47 additions and 47 deletions
|
@ -22,7 +22,7 @@
|
|||
(define_insn "crypto_<CRYPTO_AESMC:crypto_pattern>"
|
||||
[(set (match_operand:<crypto_mode> 0 "register_operand" "=w")
|
||||
(unspec:<crypto_mode>
|
||||
[(match_operand:<crypto_mode> 1 "register_operand" "w")]
|
||||
[(match_operand:<crypto_mode> 1 "register_operand" "w")]
|
||||
CRYPTO_AESMC))]
|
||||
"TARGET_CRYPTO"
|
||||
"<crypto_pattern>.<crypto_size_sfx>\\t%q0, %q1"
|
||||
|
@ -30,12 +30,12 @@
|
|||
)
|
||||
|
||||
(define_insn "crypto_<CRYPTO_AES:crypto_pattern>"
|
||||
[(set (match_operand:V16QI 0 "register_operand" "=w")
|
||||
(unspec:V16QI
|
||||
[(xor:V16QI
|
||||
(match_operand:V16QI 1 "register_operand" "%0")
|
||||
(match_operand:V16QI 2 "register_operand" "w"))]
|
||||
CRYPTO_AES))]
|
||||
[(set (match_operand:<crypto_mode> 0 "register_operand" "=w")
|
||||
(unspec:<crypto_mode>
|
||||
[(xor:<crypto_mode>
|
||||
(match_operand:<crypto_mode> 1 "register_operand" "%0")
|
||||
(match_operand:<crypto_mode> 2 "register_operand" "w"))]
|
||||
CRYPTO_AES))]
|
||||
"TARGET_CRYPTO"
|
||||
"<crypto_pattern>.<crypto_size_sfx>\\t%q0, %q2"
|
||||
[(set_attr "type" "<crypto_type>")]
|
||||
|
@ -44,17 +44,16 @@
|
|||
;; When AESE/AESMC fusion is enabled we really want to keep the two together
|
||||
;; and enforce the register dependency without scheduling or register
|
||||
;; allocation messing up the order or introducing moves inbetween.
|
||||
;; Mash the two together during combine.
|
||||
;; Mash the two together during combine.
|
||||
|
||||
(define_insn "*aarch32_crypto_aese_fused"
|
||||
[(set (match_operand:V16QI 0 "register_operand" "=w")
|
||||
(unspec:V16QI
|
||||
[(unspec:V16QI
|
||||
[(xor:V16QI
|
||||
(match_operand:V16QI 1 "register_operand" "%0")
|
||||
(match_operand:V16QI 2 "register_operand" "w"))]
|
||||
UNSPEC_AESE)]
|
||||
UNSPEC_AESMC))]
|
||||
[(unspec:V16QI [(xor:V16QI
|
||||
(match_operand:V16QI 1 "register_operand" "%0")
|
||||
(match_operand:V16QI 2 "register_operand" "w"))]
|
||||
UNSPEC_AESE)]
|
||||
UNSPEC_AESMC))]
|
||||
"TARGET_CRYPTO
|
||||
&& arm_fusion_enabled_p (tune_params::FUSE_AES_AESMC)"
|
||||
"aese.8\\t%q0, %q2\;aesmc.8\\t%q0, %q0"
|
||||
|
@ -65,17 +64,16 @@
|
|||
;; When AESD/AESIMC fusion is enabled we really want to keep the two together
|
||||
;; and enforce the register dependency without scheduling or register
|
||||
;; allocation messing up the order or introducing moves inbetween.
|
||||
;; Mash the two together during combine.
|
||||
;; Mash the two together during combine.
|
||||
|
||||
(define_insn "*aarch32_crypto_aesd_fused"
|
||||
[(set (match_operand:V16QI 0 "register_operand" "=w")
|
||||
(unspec:V16QI
|
||||
[(unspec:V16QI
|
||||
[(xor:V16QI
|
||||
(match_operand:V16QI 1 "register_operand" "%0")
|
||||
(match_operand:V16QI 2 "register_operand" "w"))]
|
||||
UNSPEC_AESD)]
|
||||
UNSPEC_AESIMC))]
|
||||
[(unspec:V16QI [(xor:V16QI
|
||||
(match_operand:V16QI 1 "register_operand" "%0")
|
||||
(match_operand:V16QI 2 "register_operand" "w"))]
|
||||
UNSPEC_AESD)]
|
||||
UNSPEC_AESIMC))]
|
||||
"TARGET_CRYPTO
|
||||
&& arm_fusion_enabled_p (tune_params::FUSE_AES_AESMC)"
|
||||
"aesd.8\\t%q0, %q2\;aesimc.8\\t%q0, %q0"
|
||||
|
@ -86,9 +84,9 @@
|
|||
(define_insn "crypto_<CRYPTO_BINARY:crypto_pattern>"
|
||||
[(set (match_operand:<crypto_mode> 0 "register_operand" "=w")
|
||||
(unspec:<crypto_mode>
|
||||
[(match_operand:<crypto_mode> 1 "register_operand" "0")
|
||||
(match_operand:<crypto_mode> 2 "register_operand" "w")]
|
||||
CRYPTO_BINARY))]
|
||||
[(match_operand:<crypto_mode> 1 "register_operand" "0")
|
||||
(match_operand:<crypto_mode> 2 "register_operand" "w")]
|
||||
CRYPTO_BINARY))]
|
||||
"TARGET_CRYPTO"
|
||||
"<crypto_pattern>.<crypto_size_sfx>\\t%q0, %q2"
|
||||
[(set_attr "type" "<crypto_type>")]
|
||||
|
@ -96,18 +94,20 @@
|
|||
|
||||
(define_insn "crypto_<CRYPTO_TERNARY:crypto_pattern>"
|
||||
[(set (match_operand:<crypto_mode> 0 "register_operand" "=w")
|
||||
(unspec:<crypto_mode> [(match_operand:<crypto_mode> 1 "register_operand" "0")
|
||||
(match_operand:<crypto_mode> 2 "register_operand" "w")
|
||||
(match_operand:<crypto_mode> 3 "register_operand" "w")]
|
||||
CRYPTO_TERNARY))]
|
||||
(unspec:<crypto_mode>
|
||||
[(match_operand:<crypto_mode> 1 "register_operand" "0")
|
||||
(match_operand:<crypto_mode> 2 "register_operand" "w")
|
||||
(match_operand:<crypto_mode> 3 "register_operand" "w")]
|
||||
CRYPTO_TERNARY))]
|
||||
"TARGET_CRYPTO"
|
||||
"<crypto_pattern>.<crypto_size_sfx>\\t%q0, %q2, %q3"
|
||||
[(set_attr "type" "<crypto_type>")]
|
||||
)
|
||||
|
||||
/* The vec_select operation always selects index 0 from the lower V2SI subreg
|
||||
of the V4SI, adjusted for endianness. Required due to neon_vget_lane and
|
||||
neon_set_lane that change the element ordering in memory for big-endian. */
|
||||
;; The vec_select operation always selects index 0 from the lower V2SI
|
||||
;; subreg of the V4SI, adjusted for endianness. Required due to
|
||||
;; neon_vget_lane and neon_set_lane that change the element ordering
|
||||
;; in memory for big-endian.
|
||||
|
||||
(define_expand "crypto_sha1h"
|
||||
[(set (match_operand:V4SI 0 "register_operand")
|
||||
|
@ -122,10 +122,10 @@
|
|||
(define_insn "crypto_sha1h_lb"
|
||||
[(set (match_operand:V4SI 0 "register_operand" "=w")
|
||||
(unspec:V4SI
|
||||
[(vec_select:SI
|
||||
[(vec_select:SI
|
||||
(match_operand:V4SI 1 "register_operand" "w")
|
||||
(parallel [(match_operand:SI 2 "immediate_operand" "i")]))]
|
||||
UNSPEC_SHA1H))]
|
||||
UNSPEC_SHA1H))]
|
||||
"TARGET_CRYPTO && INTVAL (operands[2]) == NEON_ENDIAN_LANE_N (V2SImode, 0)"
|
||||
"sha1h.32\\t%q0, %q1"
|
||||
[(set_attr "type" "crypto_sha1_fast")]
|
||||
|
@ -133,9 +133,9 @@
|
|||
|
||||
(define_insn "crypto_vmullp64"
|
||||
[(set (match_operand:TI 0 "register_operand" "=w")
|
||||
(unspec:TI [(match_operand:DI 1 "register_operand" "w")
|
||||
(match_operand:DI 2 "register_operand" "w")]
|
||||
UNSPEC_VMULLP64))]
|
||||
(unspec:TI [(match_operand:DI 1 "register_operand" "w")
|
||||
(match_operand:DI 2 "register_operand" "w")]
|
||||
UNSPEC_VMULLP64))]
|
||||
"TARGET_CRYPTO"
|
||||
"vmull.p64\\t%q0, %P1, %P2"
|
||||
[(set_attr "type" "crypto_pmull")]
|
||||
|
@ -148,10 +148,10 @@
|
|||
(define_expand "crypto_<CRYPTO_SELECTING:crypto_pattern>"
|
||||
[(set (match_operand:V4SI 0 "register_operand")
|
||||
(unspec:<crypto_mode>
|
||||
[(match_operand:<crypto_mode> 1 "register_operand")
|
||||
(match_operand:<crypto_mode> 2 "register_operand")
|
||||
(match_operand:<crypto_mode> 3 "register_operand")]
|
||||
CRYPTO_SELECTING))]
|
||||
[(match_operand:<crypto_mode> 1 "register_operand")
|
||||
(match_operand:<crypto_mode> 2 "register_operand")
|
||||
(match_operand:<crypto_mode> 3 "register_operand")]
|
||||
CRYPTO_SELECTING))]
|
||||
"TARGET_CRYPTO"
|
||||
{
|
||||
rtx op4 = GEN_INT (NEON_ENDIAN_LANE_N (V2SImode, 0));
|
||||
|
@ -162,13 +162,13 @@
|
|||
|
||||
(define_insn "crypto_<CRYPTO_SELECTING:crypto_pattern>_lb"
|
||||
[(set (match_operand:V4SI 0 "register_operand" "=w")
|
||||
(unspec:<crypto_mode>
|
||||
[(match_operand:<crypto_mode> 1 "register_operand" "0")
|
||||
(vec_select:SI
|
||||
(match_operand:<crypto_mode> 2 "register_operand" "w")
|
||||
(parallel [(match_operand:SI 4 "immediate_operand" "i")]))
|
||||
(match_operand:<crypto_mode> 3 "register_operand" "w")]
|
||||
CRYPTO_SELECTING))]
|
||||
(unspec:<crypto_mode>
|
||||
[(match_operand:<crypto_mode> 1 "register_operand" "0")
|
||||
(vec_select:SI
|
||||
(match_operand:<crypto_mode> 2 "register_operand" "w")
|
||||
(parallel [(match_operand:SI 4 "immediate_operand" "i")]))
|
||||
(match_operand:<crypto_mode> 3 "register_operand" "w")]
|
||||
CRYPTO_SELECTING))]
|
||||
"TARGET_CRYPTO && INTVAL (operands[4]) == NEON_ENDIAN_LANE_N (V2SImode, 0)"
|
||||
"<crypto_pattern>.<crypto_size_sfx>\\t%q0, %q2, %q3"
|
||||
[(set_attr "type" "<crypto_type>")]
|
||||
|
|
Loading…
Add table
Reference in a new issue