[PATCH 3/5] Move data in AMD64 assembly to text section
Jussi Kivilinna
jussi.kivilinna at iki.fi
Thu May 18 19:27:39 CEST 2017
* cipher/camellia-aesni-avx-amd64.S: Move data to .text section to
ensure that RIP relative addressing of data will work.
* cipher/camellia-aesni-avx2-amd64.S: Ditto.
* cipher/chacha20-avx2-amd64.S: Ditto.
* cipher/chacha20-ssse3-amd64.S: Ditto.
* cipher/des-amd64.S: Ditto.
* cipher/serpent-avx2-amd64.S: Ditto.
* cipher/sha1-avx-amd64.S: Ditto.
* cipher/sha1-avx-bmi2-amd64.S: Ditto.
* cipher/sha1-ssse3-amd64.S: Ditto.
* cipher/sha256-avx-amd64.S: Ditto.
* cipher/sha256-avx2-bmi2-amd64.S: Ditto.
* cipher/sha256-ssse3-amd64.S: Ditto.
* cipher/sha512-avx-amd64.S: Ditto.
* cipher/sha512-avx2-bmi2-amd64.S: Ditto.
* cipher/sha512-ssse3-amd64.S: Ditto.
--
Signed-off-by: Jussi Kivilinna <jussi.kivilinna at iki.fi>
---
cipher/camellia-aesni-avx-amd64.S | 5 +----
cipher/camellia-aesni-avx2-amd64.S | 3 +--
cipher/chacha20-avx2-amd64.S | 1 -
cipher/chacha20-ssse3-amd64.S | 1 -
cipher/des-amd64.S | 1 -
cipher/serpent-avx2-amd64.S | 1 -
cipher/sha1-avx-amd64.S | 3 +--
cipher/sha1-avx-bmi2-amd64.S | 3 +--
cipher/sha1-ssse3-amd64.S | 3 +--
cipher/sha256-avx-amd64.S | 1 -
cipher/sha256-avx2-bmi2-amd64.S | 1 -
cipher/sha256-ssse3-amd64.S | 1 -
cipher/sha512-avx-amd64.S | 2 --
cipher/sha512-avx2-bmi2-amd64.S | 2 --
cipher/sha512-ssse3-amd64.S | 2 --
15 files changed, 5 insertions(+), 25 deletions(-)
diff --git a/cipher/camellia-aesni-avx-amd64.S b/cipher/camellia-aesni-avx-amd64.S
index 5a3a3cbc..8022934f 100644
--- a/cipher/camellia-aesni-avx-amd64.S
+++ b/cipher/camellia-aesni-avx-amd64.S
@@ -629,7 +629,7 @@
vmovdqu y6, 14 * 16(rio); \
vmovdqu y7, 15 * 16(rio);
-.data
+.text
.align 16
#define SHUFB_BYTES(idx) \
@@ -773,7 +773,6 @@
.L0f0f0f0f:
.long 0x0f0f0f0f
-.text
.align 8
ELF(.type __camellia_enc_blk16, at function;)
@@ -1702,7 +1701,6 @@ ELF(.size _gcry_camellia_aesni_avx_ocb_auth,.-_gcry_camellia_aesni_avx_ocb_auth;
vpsllq $(64-(nror)), out, out; \
vpaddd t0, out, out;
-.data
.align 16
.Linv_shift_row_and_unpcklbw:
@@ -1735,7 +1733,6 @@ ELF(.size _gcry_camellia_aesni_avx_ocb_auth,.-_gcry_camellia_aesni_avx_ocb_auth;
.Lsigma6:
.long 0xB3E6C1FD, 0xB05688C2;
-.text
.align 8
ELF(.type __camellia_avx_setup128, at function;)
diff --git a/cipher/camellia-aesni-avx2-amd64.S b/cipher/camellia-aesni-avx2-amd64.S
index 26381df0..897e4aee 100644
--- a/cipher/camellia-aesni-avx2-amd64.S
+++ b/cipher/camellia-aesni-avx2-amd64.S
@@ -613,7 +613,7 @@
vmovdqu y6, 14 * 32(rio); \
vmovdqu y7, 15 * 32(rio);
-.data
+.text
.align 32
#define SHUFB_BYTES(idx) \
@@ -752,7 +752,6 @@
.L0f0f0f0f:
.long 0x0f0f0f0f
-.text
.align 8
ELF(.type __camellia_enc_blk32, at function;)
diff --git a/cipher/chacha20-avx2-amd64.S b/cipher/chacha20-avx2-amd64.S
index 12bed35b..8c085bad 100644
--- a/cipher/chacha20-avx2-amd64.S
+++ b/cipher/chacha20-avx2-amd64.S
@@ -947,7 +947,6 @@ _gcry_chacha20_amd64_avx2_blocks:
ret
ELF(.size _gcry_chacha20_amd64_avx2_blocks,.-_gcry_chacha20_amd64_avx2_blocks;)
-.data
.align 16
.LC:
.byte 2,3,0,1,6,7,4,5,10,11,8,9,14,15,12,13 /* pshufb rotate by 16 */
diff --git a/cipher/chacha20-ssse3-amd64.S b/cipher/chacha20-ssse3-amd64.S
index a1a843fa..c04010e7 100644
--- a/cipher/chacha20-ssse3-amd64.S
+++ b/cipher/chacha20-ssse3-amd64.S
@@ -623,7 +623,6 @@ _gcry_chacha20_amd64_ssse3_blocks:
ret
ELF(.size _gcry_chacha20_amd64_ssse3_blocks,.-_gcry_chacha20_amd64_ssse3_blocks;)
-.data
.align 16;
.LC:
.byte 2,3,0,1,6,7,4,5,10,11,8,9,14,15,12,13 /* pshufb rotate by 16 */
diff --git a/cipher/des-amd64.S b/cipher/des-amd64.S
index 307d2112..1b7cfba8 100644
--- a/cipher/des-amd64.S
+++ b/cipher/des-amd64.S
@@ -766,7 +766,6 @@ _gcry_3des_amd64_cfb_dec:
ret;
ELF(.size _gcry_3des_amd64_cfb_dec,.-_gcry_3des_amd64_cfb_dec;)
-.data
.align 16
.L_s1:
.quad 0x0010100001010400, 0x0000000000000000
diff --git a/cipher/serpent-avx2-amd64.S b/cipher/serpent-avx2-amd64.S
index 2902dab5..8d60a159 100644
--- a/cipher/serpent-avx2-amd64.S
+++ b/cipher/serpent-avx2-amd64.S
@@ -1113,7 +1113,6 @@ _gcry_serpent_avx2_ocb_auth:
ret;
ELF(.size _gcry_serpent_avx2_ocb_auth,.-_gcry_serpent_avx2_ocb_auth;)
-.data
.align 16
/* For CTR-mode IV byteswap */
diff --git a/cipher/sha1-avx-amd64.S b/cipher/sha1-avx-amd64.S
index 3b3a6d11..b14603bf 100644
--- a/cipher/sha1-avx-amd64.S
+++ b/cipher/sha1-avx-amd64.S
@@ -58,7 +58,7 @@
/* Constants */
-.data
+.text
#define K1 0x5A827999
#define K2 0x6ED9EBA1
#define K3 0x8F1BBCDC
@@ -214,7 +214,6 @@
* _gcry_sha1_transform_amd64_avx (void *ctx, const unsigned char *data,
* size_t nblks)
*/
-.text
.globl _gcry_sha1_transform_amd64_avx
ELF(.type _gcry_sha1_transform_amd64_avx, at function)
.align 16
diff --git a/cipher/sha1-avx-bmi2-amd64.S b/cipher/sha1-avx-bmi2-amd64.S
index 22bcbb3c..b267693f 100644
--- a/cipher/sha1-avx-bmi2-amd64.S
+++ b/cipher/sha1-avx-bmi2-amd64.S
@@ -59,7 +59,7 @@
/* Constants */
-.data
+.text
#define K1 0x5A827999
#define K2 0x6ED9EBA1
#define K3 0x8F1BBCDC
@@ -212,7 +212,6 @@
* _gcry_sha1_transform_amd64_avx_bmi2 (void *ctx, const unsigned char *data,
* size_t nblks)
*/
-.text
.globl _gcry_sha1_transform_amd64_avx_bmi2
ELF(.type _gcry_sha1_transform_amd64_avx_bmi2, at function)
.align 16
diff --git a/cipher/sha1-ssse3-amd64.S b/cipher/sha1-ssse3-amd64.S
index 98a19e60..2b439476 100644
--- a/cipher/sha1-ssse3-amd64.S
+++ b/cipher/sha1-ssse3-amd64.S
@@ -58,7 +58,7 @@
/* Constants */
-.data
+.text
#define K1 0x5A827999
#define K2 0x6ED9EBA1
#define K3 0x8F1BBCDC
@@ -226,7 +226,6 @@
* _gcry_sha1_transform_amd64_ssse3 (void *ctx, const unsigned char *data,
* size_t nblks)
*/
-.text
.globl _gcry_sha1_transform_amd64_ssse3
ELF(.type _gcry_sha1_transform_amd64_ssse3, at function)
.align 16
diff --git a/cipher/sha256-avx-amd64.S b/cipher/sha256-avx-amd64.S
index 8bf26bd7..6953855b 100644
--- a/cipher/sha256-avx-amd64.S
+++ b/cipher/sha256-avx-amd64.S
@@ -496,7 +496,6 @@ _gcry_sha256_transform_amd64_avx:
ret
-.data
.align 16
.LK256:
.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
diff --git a/cipher/sha256-avx2-bmi2-amd64.S b/cipher/sha256-avx2-bmi2-amd64.S
index 74b60631..85e663fe 100644
--- a/cipher/sha256-avx2-bmi2-amd64.S
+++ b/cipher/sha256-avx2-bmi2-amd64.S
@@ -763,7 +763,6 @@ _gcry_sha256_transform_amd64_avx2:
ret
-.data
.align 64
.LK256:
.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
diff --git a/cipher/sha256-ssse3-amd64.S b/cipher/sha256-ssse3-amd64.S
index 9ec87e46..a9213e41 100644
--- a/cipher/sha256-ssse3-amd64.S
+++ b/cipher/sha256-ssse3-amd64.S
@@ -516,7 +516,6 @@ _gcry_sha256_transform_amd64_ssse3:
ret
-.data
.align 16
.LK256:
.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
diff --git a/cipher/sha512-avx-amd64.S b/cipher/sha512-avx-amd64.S
index 699c271b..446a8b4e 100644
--- a/cipher/sha512-avx-amd64.S
+++ b/cipher/sha512-avx-amd64.S
@@ -368,8 +368,6 @@ _gcry_sha512_transform_amd64_avx:
;;; Binary Data
*/
-.data
-
.align 16
/* Mask for byte-swapping a couple of qwords in an XMM register using (v)pshufb. */
diff --git a/cipher/sha512-avx2-bmi2-amd64.S b/cipher/sha512-avx2-bmi2-amd64.S
index 02f95af6..05bef64c 100644
--- a/cipher/sha512-avx2-bmi2-amd64.S
+++ b/cipher/sha512-avx2-bmi2-amd64.S
@@ -735,8 +735,6 @@ _gcry_sha512_transform_amd64_avx2:
/*;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; */
/*;; Binary Data */
-.data
-
.align 64
/* K[t] used in SHA512 hashing */
.LK512:
diff --git a/cipher/sha512-ssse3-amd64.S b/cipher/sha512-ssse3-amd64.S
index c721bcf2..51193b36 100644
--- a/cipher/sha512-ssse3-amd64.S
+++ b/cipher/sha512-ssse3-amd64.S
@@ -373,8 +373,6 @@ _gcry_sha512_transform_amd64_ssse3:
;;; Binary Data
*/
-.data
-
.align 16
/* Mask for byte-swapping a couple of qwords in an XMM register using (v)pshufb. */
More information about the Gcrypt-devel
mailing list