[git] GCRYPT - branch, LIBGCRYPT-1-7-BRANCH, updated. libgcrypt-1.7.0-2-gcaa9d14

by Werner Koch cvs at cvs.gnupg.org
Tue Apr 19 20:13:11 CEST 2016


This is an automated email from the git hooks/post-receive script. It was
generated because a ref change was pushed to the repository containing
the project "The GNU crypto library".

The branch, LIBGCRYPT-1-7-BRANCH has been updated
       via  caa9d14c914bf6116ec3f773a322a94e2be0c0fb (commit)
      from  eecc081f8ae02c43454abaee4a4f72efaee42745 (commit)

Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.

- Log -----------------------------------------------------------------
commit caa9d14c914bf6116ec3f773a322a94e2be0c0fb
Author: Werner Koch <wk at gnupg.org>
Date:   Tue Apr 19 20:05:07 2016 +0200

    asm fix for older gcc versions.
    
    * cipher/crc-intel-pclmul.c: Remove extra trailing colon from
    asm statements.
    --
    
    gcc 4.2 is not able to grok a third colon without clobber
    expressions.  Reported for FreeBSD 9.
    
    GnuPG-bug-id: 2326
    Signed-off-by: Werner Koch <wk at gnupg.org>

diff --git a/cipher/crc-intel-pclmul.c b/cipher/crc-intel-pclmul.c
index 5002f80..c034e2e 100644
--- a/cipher/crc-intel-pclmul.c
+++ b/cipher/crc-intel-pclmul.c
@@ -143,7 +143,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 		      [inbuf_2] "m" (inbuf[2 * 16]),
 		      [inbuf_3] "m" (inbuf[3 * 16]),
 		      [crc] "m" (*pcrc)
-		    : );
+		    );
 
       inbuf += 4 * 16;
       inlen -= 4 * 16;
@@ -151,7 +151,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
       asm volatile ("movdqa %[k1k2], %%xmm4\n\t"
 		    :
 		    : [k1k2] "m" (consts->k[1 - 1])
-		    : );
+		    );
 
       /* Fold by 4. */
       while (inlen >= 4 * 16)
@@ -188,7 +188,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 			  [inbuf_1] "m" (inbuf[1 * 16]),
 			  [inbuf_2] "m" (inbuf[2 * 16]),
 			  [inbuf_3] "m" (inbuf[3 * 16])
-			: );
+			);
 
 	  inbuf += 4 * 16;
 	  inlen -= 4 * 16;
@@ -199,7 +199,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 		    :
 		    : [k3k4] "m" (consts->k[3 - 1]),
 		      [my_p] "m" (consts->my_p[0])
-		    : );
+		    );
 
       /* Fold 4 to 1. */
 
@@ -222,7 +222,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 		    "pxor %%xmm4, %%xmm0\n\t"
 		    :
 		    :
-		    : );
+		    );
     }
   else
     {
@@ -236,7 +236,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 		      [crc] "m" (*pcrc),
 		      [k3k4] "m" (consts->k[3 - 1]),
 		      [my_p] "m" (consts->my_p[0])
-		    : );
+		    );
 
       inbuf += 16;
       inlen -= 16;
@@ -256,7 +256,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 			"pxor %%xmm1, %%xmm0\n\t"
 			:
 			: [inbuf] "m" (*inbuf)
-			: );
+			);
 
 	  inbuf += 16;
 	  inlen -= 16;
@@ -288,7 +288,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 		      [mask] "m" (crc32_partial_fold_input_mask[inlen]),
 		      [shl_shuf] "m" (crc32_refl_shuf_shift[inlen]),
 		      [shr_shuf] "m" (crc32_refl_shuf_shift[inlen + 16])
-		    : );
+		    );
 
       inbuf += inlen;
       inlen -= inlen;
@@ -318,7 +318,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 		"pextrd $2, %%xmm0, %[out]\n\t"
 		: [out] "=m" (*pcrc)
 		: [k5] "m" (consts->k[5 - 1])
-	        : );
+	        );
 }
 
 static inline void
@@ -333,7 +333,7 @@ crc32_reflected_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
       asm volatile ("movdqa %[my_p], %%xmm5\n\t"
 		    :
 		    : [my_p] "m" (consts->my_p[0])
-		    : );
+		    );
 
       if (inlen == 1)
 	{
@@ -372,7 +372,7 @@ crc32_reflected_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
 		    : [out] "=m" (*pcrc)
 		    : [in] "rm" (data),
 		      [crc] "rm" (crc)
-		    : );
+		    );
     }
   else if (inlen == 4)
     {
@@ -391,7 +391,7 @@ crc32_reflected_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
 		    : [in] "m" (*inbuf),
 		      [crc] "m" (*pcrc),
 		      [my_p] "m" (consts->my_p[0])
-		    : );
+		    );
     }
   else
     {
@@ -404,14 +404,14 @@ crc32_reflected_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
 		      [crc] "m" (*pcrc),
 		      [my_p] "m" (consts->my_p[0]),
 		      [k3k4] "m" (consts->k[3 - 1])
-		    : );
+		    );
 
       if (inlen >= 8)
 	{
 	  asm volatile ("movq %[inbuf], %%xmm0\n\t"
 			:
 			: [inbuf] "m" (*inbuf)
-			: );
+			);
 	  if (inlen > 8)
 	    {
 	      asm volatile (/*"pinsrq $1, %[inbuf_tail], %%xmm0\n\t"*/
@@ -422,7 +422,7 @@ crc32_reflected_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
 			    : [inbuf_tail] "m" (inbuf[inlen - 8]),
 			      [merge_shuf] "m"
 				(*crc32_merge9to15_shuf[inlen - 9])
-			    : );
+			    );
 	    }
 	}
       else
@@ -435,7 +435,7 @@ crc32_reflected_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
 			  [inbuf_tail] "m" (inbuf[inlen - 4]),
 			  [merge_shuf] "m"
 			    (*crc32_merge5to7_shuf[inlen - 5])
-			: );
+			);
 	}
 
       /* Final fold. */
@@ -465,7 +465,7 @@ crc32_reflected_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
 		    "pextrd $2, %%xmm0, %[out]\n\t"
 		    : [out] "=m" (*pcrc)
 		    : [k5] "m" (consts->k[5 - 1])
-		    : );
+		    );
     }
 }
 
@@ -477,7 +477,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
   asm volatile ("movdqa %[bswap], %%xmm7\n\t"
 		:
 		: [bswap] "m" (*crc32_bswap_shuf)
-		: );
+		);
 
   if (inlen >= 8 * 16)
     {
@@ -497,7 +497,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 		      [inbuf_2] "m" (inbuf[2 * 16]),
 		      [inbuf_3] "m" (inbuf[3 * 16]),
 		      [crc] "m" (*pcrc)
-		    : );
+		    );
 
       inbuf += 4 * 16;
       inlen -= 4 * 16;
@@ -505,7 +505,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
       asm volatile ("movdqa %[k1k2], %%xmm4\n\t"
 		    :
 		    : [k1k2] "m" (consts->k[1 - 1])
-		    : );
+		    );
 
       /* Fold by 4. */
       while (inlen >= 4 * 16)
@@ -546,7 +546,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 			  [inbuf_1] "m" (inbuf[1 * 16]),
 			  [inbuf_2] "m" (inbuf[2 * 16]),
 			  [inbuf_3] "m" (inbuf[3 * 16])
-			: );
+			);
 
 	  inbuf += 4 * 16;
 	  inlen -= 4 * 16;
@@ -557,7 +557,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 		    :
 		    : [k3k4] "m" (consts->k[3 - 1]),
 		      [my_p] "m" (consts->my_p[0])
-		    : );
+		    );
 
       /* Fold 4 to 1. */
 
@@ -580,7 +580,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 		    "pxor %%xmm4, %%xmm0\n\t"
 		    :
 		    :
-		    : );
+		    );
     }
   else
     {
@@ -595,7 +595,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 		      [crc] "m" (*pcrc),
 		      [k3k4] "m" (consts->k[3 - 1]),
 		      [my_p] "m" (consts->my_p[0])
-		    : );
+		    );
 
       inbuf += 16;
       inlen -= 16;
@@ -616,7 +616,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 			"pxor %%xmm1, %%xmm0\n\t"
 			:
 			: [inbuf] "m" (*inbuf)
-			: );
+			);
 
 	  inbuf += 16;
 	  inlen -= 16;
@@ -650,7 +650,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
 		      [mask] "m" (crc32_partial_fold_input_mask[inlen]),
 		      [shl_shuf] "m" (crc32_refl_shuf_shift[32 - inlen]),
 		      [shr_shuf] "m" (crc32_shuf_shift[inlen + 16])
-		    : );
+		    );
 
       inbuf += inlen;
       inlen -= inlen;
@@ -697,7 +697,7 @@ crc32_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
       asm volatile ("movdqa %[my_p], %%xmm5\n\t"
 		    :
 		    : [my_p] "m" (consts->my_p[0])
-		    : );
+		    );
 
       if (inlen == 1)
 	{
@@ -774,14 +774,14 @@ crc32_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
 		      [crc] "m" (*pcrc),
 		      [my_p] "m" (consts->my_p[0]),
 		      [k3k4] "m" (consts->k[3 - 1])
-		    : );
+		    );
 
       if (inlen >= 8)
 	{
 	  asm volatile ("movq %[inbuf], %%xmm0\n\t"
 			:
 			: [inbuf] "m" (*inbuf)
-			: );
+			);
 	  if (inlen > 8)
 	    {
 	      asm volatile (/*"pinsrq $1, %[inbuf_tail], %%xmm0\n\t"*/
@@ -792,7 +792,7 @@ crc32_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
 			    : [inbuf_tail] "m" (inbuf[inlen - 8]),
 			      [merge_shuf] "m"
 				(*crc32_merge9to15_shuf[inlen - 9])
-			    : );
+			    );
 	    }
 	}
       else
@@ -805,7 +805,7 @@ crc32_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
 			  [inbuf_tail] "m" (inbuf[inlen - 4]),
 			  [merge_shuf] "m"
 			    (*crc32_merge5to7_shuf[inlen - 5])
-			: );
+			);
 	}
 
       /* Final fold. */

-----------------------------------------------------------------------

Summary of changes:
 cipher/crc-intel-pclmul.c | 62 +++++++++++++++++++++++------------------------
 1 file changed, 31 insertions(+), 31 deletions(-)


hooks/post-receive
-- 
The GNU crypto library
http://git.gnupg.org




More information about the Gnupg-commits mailing list