[PATCH 3/4] Avoid unaligned accesses with ARM ldm/stm instructions

Jussi Kivilinna jussi.kivilinna at iki.fi
Thu Jun 30 23:55:33 CEST 2016


* cipher/rijndael-arm.S: Remove __ARM_FEATURE_UNALIGNED ifdefs, always
compile with unaligned load/store code paths.
* cipher/sha512-arm.S: Ditto.
--

Reported-by: Michael Plass <mfpnb at plass-family.net>
Signed-off-by: Jussi Kivilinna <jussi.kivilinna at iki.fi>
---
 cipher/rijndael-arm.S |   12 ++++--------
 cipher/sha512-arm.S   |    3 +--
 2 files changed, 5 insertions(+), 10 deletions(-)

diff --git a/cipher/rijndael-arm.S b/cipher/rijndael-arm.S
index 694369d..e3a91c2 100644
--- a/cipher/rijndael-arm.S
+++ b/cipher/rijndael-arm.S
@@ -225,7 +225,7 @@ _gcry_aes_arm_encrypt_block:
 	push {%r4-%r11, %ip, %lr};
 
 	/* read input block */
-#ifndef __ARM_FEATURE_UNALIGNED
+
 	/* test if src is unaligned */
 	tst	%r2, #3;
 	beq	1f;
@@ -238,7 +238,6 @@ _gcry_aes_arm_encrypt_block:
 	b	2f;
 .ltorg
 1:
-#endif
 	/* aligned load */
 	ldm	%r2, {RA, RB, RC, RD};
 #ifndef __ARMEL__
@@ -277,7 +276,7 @@ _gcry_aes_arm_encrypt_block:
 	add	%sp, #16;
 
 	/* store output block */
-#ifndef __ARM_FEATURE_UNALIGNED
+
 	/* test if dst is unaligned */
 	tst	RT0, #3;
 	beq	1f;
@@ -290,7 +289,6 @@ _gcry_aes_arm_encrypt_block:
 	b	2f;
 .ltorg
 1:
-#endif
 	/* aligned store */
 #ifndef __ARMEL__
 	rev	RA, RA;
@@ -484,7 +482,7 @@ _gcry_aes_arm_decrypt_block:
 	push {%r4-%r11, %ip, %lr};
 
 	/* read input block */
-#ifndef __ARM_FEATURE_UNALIGNED
+
 	/* test if src is unaligned */
 	tst	%r2, #3;
 	beq	1f;
@@ -497,7 +495,6 @@ _gcry_aes_arm_decrypt_block:
 	b	2f;
 .ltorg
 1:
-#endif
 	/* aligned load */
 	ldm	%r2, {RA, RB, RC, RD};
 #ifndef __ARMEL__
@@ -533,7 +530,7 @@ _gcry_aes_arm_decrypt_block:
 	add	%sp, #16;
 
 	/* store output block */
-#ifndef __ARM_FEATURE_UNALIGNED
+
 	/* test if dst is unaligned */
 	tst	RT0, #3;
 	beq	1f;
@@ -546,7 +543,6 @@ _gcry_aes_arm_decrypt_block:
 	b	2f;
 .ltorg
 1:
-#endif
 	/* aligned store */
 #ifndef __ARMEL__
 	rev	RA, RA;
diff --git a/cipher/sha512-arm.S b/cipher/sha512-arm.S
index 28f156e..94ec014 100644
--- a/cipher/sha512-arm.S
+++ b/cipher/sha512-arm.S
@@ -323,7 +323,7 @@ _gcry_sha512_transform_arm:
 	stm RWhi, {RT1lo,RT1hi,RT2lo,RT2hi,RT3lo,RT3hi,RT4lo,RT4hi}
 
 	/* Load input to w[16] */
-#ifndef __ARM_FEATURE_UNALIGNED
+
 	/* test if data is unaligned */
 	tst %r1, #3;
 	beq 1f;
@@ -341,7 +341,6 @@ _gcry_sha512_transform_arm:
 
 	read_be64_unaligned_4(%r1, 12 * 8, RT1lo, RT1hi, RT2lo, RT2hi, RT3lo, RT3hi, RT4lo, RT4hi, RWlo);
 	b 2f;
-#endif
 1:
 	/* aligned load */
 	add RWhi, %sp, #(w(0));




More information about the Gcrypt-devel mailing list