summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlexey Dobriyan <adobriyan@gmail.com>2012-01-14 21:44:49 +0300
committerGreg Kroah-Hartman <gregkh@linuxfoundation.org>2012-03-04 09:49:20 -0800
commitca24f59e757dda99b2544b908d3ec67e09167c91 (patch)
tree6cdd0494a2bc7e2ede4bd4df5184663676f2d6dc
parent98886b542b96243a07623139075be7193eca2ea2 (diff)
downloadlwn-ca24f59e757dda99b2544b908d3ec67e09167c91.tar.gz
lwn-ca24f59e757dda99b2544b908d3ec67e09167c91.zip
crypto: sha512 - use standard ror64()
commit f2ea0f5f04c97b48c88edccba52b0682fbe45087 upstream. Use standard ror64() instead of hand-written. There is no standard ror64, so create it. The difference is shift value being "unsigned int" instead of uint64_t (for which there is no reason). gcc starts to emit native ROR instructions which it doesn't do for some reason currently. This should make the code faster. Patch survives in-tree crypto test and ping flood with hmac(sha512) on. Signed-off-by: Alexey Dobriyan <adobriyan@gmail.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au> Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
-rw-r--r--crypto/sha512_generic.c13
-rw-r--r--include/linux/bitops.h20
2 files changed, 24 insertions, 9 deletions
diff --git a/crypto/sha512_generic.c b/crypto/sha512_generic.c
index f04af931a682..107f6f7be5e1 100644
--- a/crypto/sha512_generic.c
+++ b/crypto/sha512_generic.c
@@ -31,11 +31,6 @@ static inline u64 Maj(u64 x, u64 y, u64 z)
return (x & y) | (z & (x | y));
}
-static inline u64 RORu64(u64 x, u64 y)
-{
- return (x >> y) | (x << (64 - y));
-}
-
static const u64 sha512_K[80] = {
0x428a2f98d728ae22ULL, 0x7137449123ef65cdULL, 0xb5c0fbcfec4d3b2fULL,
0xe9b5dba58189dbbcULL, 0x3956c25bf348b538ULL, 0x59f111f1b605d019ULL,
@@ -66,10 +61,10 @@ static const u64 sha512_K[80] = {
0x5fcb6fab3ad6faecULL, 0x6c44198c4a475817ULL,
};
-#define e0(x) (RORu64(x,28) ^ RORu64(x,34) ^ RORu64(x,39))
-#define e1(x) (RORu64(x,14) ^ RORu64(x,18) ^ RORu64(x,41))
-#define s0(x) (RORu64(x, 1) ^ RORu64(x, 8) ^ (x >> 7))
-#define s1(x) (RORu64(x,19) ^ RORu64(x,61) ^ (x >> 6))
+#define e0(x) (ror64(x,28) ^ ror64(x,34) ^ ror64(x,39))
+#define e1(x) (ror64(x,14) ^ ror64(x,18) ^ ror64(x,41))
+#define s0(x) (ror64(x, 1) ^ ror64(x, 8) ^ (x >> 7))
+#define s1(x) (ror64(x,19) ^ ror64(x,61) ^ (x >> 6))
static inline void LOAD_OP(int I, u64 *W, const u8 *input)
{
diff --git a/include/linux/bitops.h b/include/linux/bitops.h
index c05a29cb9bb2..56835a7a3507 100644
--- a/include/linux/bitops.h
+++ b/include/linux/bitops.h
@@ -46,6 +46,26 @@ static inline unsigned long hweight_long(unsigned long w)
}
/**
+ * rol64 - rotate a 64-bit value left
+ * @word: value to rotate
+ * @shift: bits to roll
+ */
+static inline __u64 rol64(__u64 word, unsigned int shift)
+{
+ return (word << shift) | (word >> (64 - shift));
+}
+
+/**
+ * ror64 - rotate a 64-bit value right
+ * @word: value to rotate
+ * @shift: bits to roll
+ */
+static inline __u64 ror64(__u64 word, unsigned int shift)
+{
+ return (word >> shift) | (word << (64 - shift));
+}
+
+/**
* rol32 - rotate a 32-bit value left
* @word: value to rotate
* @shift: bits to roll