LKML Archive mirror
 help / color / mirror / Atom feed
* [PATCH] crypto: alignment fixes
@ 2006-03-08  7:05 Atsushi Nemoto
  2006-03-08 10:27 ` Herbert Xu
  0 siblings, 1 reply; 3+ messages in thread
From: Atsushi Nemoto @ 2006-03-08  7:05 UTC (permalink / raw
  To: linux-kernel; +Cc: herbert, akpm

This patch fixes some alignment problem on crypto modules.

1. Many cipher setkey functions load key words directly but the key
   words might not be aligned.  Enforce correct alignment in the
   setkey wrapper.
2. Some cipher modules lack cra_alignmask.
3. Some hash modules (and sha_transform() library function) load/store
   data words directly.  Use get_unaligned()/put_unaligned() for them.

Signed-off-by: Atsushi Nemoto <anemo@mba.ocn.ne.jp>

 crypto/cipher.c      |    7 +++++++
 crypto/des.c         |    1 +
 crypto/michael_mic.c |   11 ++++++-----
 crypto/serpent.c     |    1 +
 crypto/sha1.c        |    3 ++-
 crypto/sha256.c      |    3 ++-
 crypto/sha512.c      |    3 ++-
 crypto/tgr192.c      |   12 ++++++++----
 lib/sha1.c           |    3 ++-
 9 files changed, 31 insertions(+), 13 deletions(-)

diff --git a/crypto/cipher.c b/crypto/cipher.c
index 65bcea0..5b72d91 100644
--- a/crypto/cipher.c
+++ b/crypto/cipher.c
@@ -263,10 +263,17 @@ static unsigned int ecb_process(const st
 static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
 {
 	struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
+	unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
 	
 	if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) {
 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 		return -EINVAL;
+	} else if ((unsigned long)key & alignmask) {
+		u8 stack[keylen + alignmask];
+		u8 *buf = (u8 *)ALIGN((unsigned long)stack, alignmask + 1);
+		memcpy(buf, key, keylen);
+		return cia->cia_setkey(crypto_tfm_ctx(tfm), buf, keylen,
+				       &tfm->crt_flags);
 	} else
 		return cia->cia_setkey(crypto_tfm_ctx(tfm), key, keylen,
 		                       &tfm->crt_flags);
diff --git a/crypto/des.c b/crypto/des.c
index 7bb5486..2d74cab 100644
--- a/crypto/des.c
+++ b/crypto/des.c
@@ -965,6 +965,7 @@ static struct crypto_alg des3_ede_alg = 
 	.cra_blocksize		=	DES3_EDE_BLOCK_SIZE,
 	.cra_ctxsize		=	sizeof(struct des3_ede_ctx),
 	.cra_module		=	THIS_MODULE,
+	.cra_alignmask		=	3,
 	.cra_list		=	LIST_HEAD_INIT(des3_ede_alg.cra_list),
 	.cra_u			=	{ .cipher = {
 	.cia_min_keysize	=	DES3_EDE_KEY_SIZE,
diff --git a/crypto/michael_mic.c b/crypto/michael_mic.c
index 4f6ab23..80dbe87 100644
--- a/crypto/michael_mic.c
+++ b/crypto/michael_mic.c
@@ -16,6 +16,7 @@
 #include <linux/string.h>
 #include <linux/crypto.h>
 #include <linux/types.h>
+#include <asm/unaligned.h>
 
 
 struct michael_mic_ctx {
@@ -78,7 +79,7 @@ static void michael_update(void *ctx, co
 	src = (const __le32 *)data;
 
 	while (len >= 4) {
-		mctx->l ^= le32_to_cpup(src++);
+		mctx->l ^= le32_to_cpu(get_unaligned(src++));
 		michael_block(mctx->l, mctx->r);
 		len -= 4;
 	}
@@ -116,8 +117,8 @@ static void michael_final(void *ctx, u8 
 	/* l ^= 0; */
 	michael_block(mctx->l, mctx->r);
 
-	dst[0] = cpu_to_le32(mctx->l);
-	dst[1] = cpu_to_le32(mctx->r);
+	put_unaligned(cpu_to_le32(mctx->l), &dst[0]);
+	put_unaligned(cpu_to_le32(mctx->r), &dst[1]);
 }
 
 
@@ -133,8 +134,8 @@ static int michael_setkey(void *ctx, con
 		return -EINVAL;
 	}
 
-	mctx->l = le32_to_cpu(data[0]);
-	mctx->r = le32_to_cpu(data[1]);
+	mctx->l = le32_to_cpu(get_unaligned(&data[0]));
+	mctx->r = le32_to_cpu(get_unaligned(&data[1]));
 	return 0;
 }
 
diff --git a/crypto/serpent.c b/crypto/serpent.c
index 52ad1a4..e366406 100644
--- a/crypto/serpent.c
+++ b/crypto/serpent.c
@@ -481,6 +481,7 @@ static struct crypto_alg serpent_alg = {
 	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
 	.cra_blocksize		=	SERPENT_BLOCK_SIZE,
 	.cra_ctxsize		=	sizeof(struct serpent_ctx),
+	.cra_alignmask		=	3,
 	.cra_module		=	THIS_MODULE,
 	.cra_list		=	LIST_HEAD_INIT(serpent_alg.cra_list),
 	.cra_u			=	{ .cipher = {
diff --git a/crypto/sha1.c b/crypto/sha1.c
index 21571ed..d84a006 100644
--- a/crypto/sha1.c
+++ b/crypto/sha1.c
@@ -24,6 +24,7 @@
 #include <linux/types.h>
 #include <asm/scatterlist.h>
 #include <asm/byteorder.h>
+#include <asm/unaligned.h>
 
 #define SHA1_DIGEST_SIZE	20
 #define SHA1_HMAC_BLOCK_SIZE	64
@@ -100,7 +101,7 @@ static void sha1_final(void* ctx, u8 *ou
 
 	/* Store state in digest */
 	for (i = 0; i < 5; i++)
-		dst[i] = cpu_to_be32(sctx->state[i]);
+		put_unaligned(cpu_to_be32(sctx->state[i]), &dst[i]);
 
 	/* Wipe context */
 	memset(sctx, 0, sizeof *sctx);
diff --git a/crypto/sha256.c b/crypto/sha256.c
index 9d5ef67..f497dde 100644
--- a/crypto/sha256.c
+++ b/crypto/sha256.c
@@ -23,6 +23,7 @@
 #include <linux/types.h>
 #include <asm/scatterlist.h>
 #include <asm/byteorder.h>
+#include <asm/unaligned.h>
 
 #define SHA256_DIGEST_SIZE	32
 #define SHA256_HMAC_BLOCK_SIZE	64
@@ -300,7 +301,7 @@ static void sha256_final(void* ctx, u8 *
 
 	/* Store state in digest */
 	for (i = 0; i < 8; i++)
-		dst[i] = cpu_to_be32(sctx->state[i]);
+		put_unaligned(cpu_to_be32(sctx->state[i]), &dst[i]);
 
 	/* Zeroize sensitive information. */
 	memset(sctx, 0, sizeof(*sctx));
diff --git a/crypto/sha512.c b/crypto/sha512.c
index 3e6e939..0208d2d 100644
--- a/crypto/sha512.c
+++ b/crypto/sha512.c
@@ -21,6 +21,7 @@
 
 #include <asm/scatterlist.h>
 #include <asm/byteorder.h>
+#include <asm/unaligned.h>
 
 #define SHA384_DIGEST_SIZE 48
 #define SHA512_DIGEST_SIZE 64
@@ -258,7 +259,7 @@ sha512_final(void *ctx, u8 *hash)
 
 	/* Store state in digest */
 	for (i = 0; i < 8; i++)
-		dst[i] = cpu_to_be64(sctx->state[i]);
+		put_unaligned(cpu_to_be64(sctx->state[i]), &dst[i]);
 
 	/* Zeroize sensitive information. */
 	memset(sctx, 0, sizeof(struct sha512_ctx));
diff --git a/crypto/tgr192.c b/crypto/tgr192.c
index 2d8e44f..bc2f314 100644
--- a/crypto/tgr192.c
+++ b/crypto/tgr192.c
@@ -28,6 +28,7 @@
 #include <asm/scatterlist.h>
 #include <linux/crypto.h>
 #include <linux/types.h>
+#include <asm/unaligned.h>
 
 #define TGR192_DIGEST_SIZE 24
 #define TGR160_DIGEST_SIZE 20
@@ -472,7 +473,7 @@ static void tgr192_transform(struct tgr1
 	const __le64 *ptr = (const __le64 *)data;
 
 	for (i = 0; i < 8; i++)
-		x[i] = le64_to_cpu(ptr[i]);
+		x[i] = le64_to_cpu(get_unaligned(ptr + i));
 
 	/* save */
 	a = aa = tctx->a;
@@ -596,9 +597,12 @@ static void tgr192_final(void *ctx, u8 *
 	tgr192_transform(tctx, tctx->hash);
 
 	be64p = (__be64 *)tctx->hash;
-	dst[0] = be64p[0] = cpu_to_be64(tctx->a);
-	dst[1] = be64p[1] = cpu_to_be64(tctx->b);
-	dst[2] = be64p[2] = cpu_to_be64(tctx->c);
+	be64p[0] = cpu_to_be64(tctx->a);
+	be64p[1] = cpu_to_be64(tctx->b);
+	be64p[2] = cpu_to_be64(tctx->c);
+	put_unaligned(be64p[0], &dst[0]);
+	put_unaligned(be64p[1], &dst[1]);
+	put_unaligned(be64p[2], &dst[2]);
 }
 
 static void tgr160_final(void *ctx, u8 * out)
diff --git a/lib/sha1.c b/lib/sha1.c
index 1cdabe3..4997950 100644
--- a/lib/sha1.c
+++ b/lib/sha1.c
@@ -6,6 +6,7 @@
 #include <linux/kernel.h>
 #include <linux/module.h>
 #include <linux/cryptohash.h>
+#include <asm/unaligned.h>
 
 /* The SHA f()-functions.  */
 
@@ -41,7 +42,7 @@ void sha_transform(__u32 *digest, const 
 	__u32 a, b, c, d, e, t, i;
 
 	for (i = 0; i < 16; i++)
-		W[i] = be32_to_cpu(((const __be32 *)in)[i]);
+		W[i] = be32_to_cpu(get_unaligned((const __be32 *)in + i));
 
 	for (i = 0; i < 64; i++)
 		W[i+16] = rol32(W[i+13] ^ W[i+8] ^ W[i+2] ^ W[i], 1);

^ permalink raw reply related	[flat|nested] 3+ messages in thread

* Re: [PATCH] crypto: alignment fixes
  2006-03-08  7:05 [PATCH] crypto: alignment fixes Atsushi Nemoto
@ 2006-03-08 10:27 ` Herbert Xu
  2006-03-08 14:09   ` Atsushi Nemoto
  0 siblings, 1 reply; 3+ messages in thread
From: Herbert Xu @ 2006-03-08 10:27 UTC (permalink / raw
  To: Atsushi Nemoto; +Cc: linux-kernel, akpm

On Wed, Mar 08, 2006 at 04:05:29PM +0900, Atsushi Nemoto wrote:
> This patch fixes some alignment problem on crypto modules.

Thanks for the patch.  Please split this up and cc
linux-crypto@vger.kernel.org.

> 1. Many cipher setkey functions load key words directly but the key
>    words might not be aligned.  Enforce correct alignment in the
>    setkey wrapper.

This isn't right.  The alignmask applies to source/destination buffers
only.  The only requirement on the key is that it must always be
32-bit aligned.

> 2. Some cipher modules lack cra_alignmask.

Good catch.

> 3. Some hash modules (and sha_transform() library function) load/store
>    data words directly.  Use get_unaligned()/put_unaligned() for them.

We should extend alignmask to cover this and handle it in the digest
layer.

Cheers,
-- 
Visit Openswan at http://www.openswan.org/
Email: Herbert Xu ~{PmV>HI~} <herbert@gondor.apana.org.au>
Home Page: http://gondor.apana.org.au/~herbert/
PGP Key: http://gondor.apana.org.au/~herbert/pubkey.txt

^ permalink raw reply	[flat|nested] 3+ messages in thread

* Re: [PATCH] crypto: alignment fixes
  2006-03-08 10:27 ` Herbert Xu
@ 2006-03-08 14:09   ` Atsushi Nemoto
  0 siblings, 0 replies; 3+ messages in thread
From: Atsushi Nemoto @ 2006-03-08 14:09 UTC (permalink / raw
  To: herbert; +Cc: linux-kernel, akpm

>>>>> On Wed, 8 Mar 2006 21:27:31 +1100, Herbert Xu <herbert@gondor.apana.org.au> said:
>> This patch fixes some alignment problem on crypto modules.

herbert> Thanks for the patch.  Please split this up and cc
herbert> linux-crypto@vger.kernel.org.

OK, I'll send soon.

>> 1. Many cipher setkey functions load key words directly but the key
>> words might not be aligned.  Enforce correct alignment in the
>> setkey wrapper.

herbert> This isn't right.  The alignmask applies to
herbert> source/destination buffers only.  The only requirement on the
herbert> key is that it must always be 32-bit aligned.

Thank you for clarification.  The tcrypt module breaks this
requirement currently.  I'll fix tcrypt.

>> 3. Some hash modules (and sha_transform() library function)
>> load/store data words directly.  Use
>> get_unaligned()/put_unaligned() for them.

herbert> We should extend alignmask to cover this and handle it in the
herbert> digest layer.

OK, I'll try it.  Thank you.

---
Atsushi Nemoto

^ permalink raw reply	[flat|nested] 3+ messages in thread

end of thread, other threads:[~2006-03-08 14:09 UTC | newest]

Thread overview: 3+ messages (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2006-03-08  7:05 [PATCH] crypto: alignment fixes Atsushi Nemoto
2006-03-08 10:27 ` Herbert Xu
2006-03-08 14:09   ` Atsushi Nemoto

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).