diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2008-01-25 08:38:25 -0800 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2008-01-25 08:38:25 -0800 |
commit | eba0e319c12fb098d66316a8eafbaaa9174a07c3 (patch) | |
tree | b2703117db9e36bb3510654efd55361f61c54742 /include | |
parent | df8dc74e8a383eaf2d9b44b80a71ec6f0e52b42e (diff) | |
parent | 15e7b4452b72ae890f2fcb027b4c4fa63a1c9a7a (diff) | |
download | lwn-eba0e319c12fb098d66316a8eafbaaa9174a07c3.tar.gz lwn-eba0e319c12fb098d66316a8eafbaaa9174a07c3.zip |
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (125 commits)
[CRYPTO] twofish: Merge common glue code
[CRYPTO] hifn_795x: Fixup container_of() usage
[CRYPTO] cast6: inline bloat--
[CRYPTO] api: Set default CRYPTO_MINALIGN to unsigned long long
[CRYPTO] tcrypt: Make xcbc available as a standalone test
[CRYPTO] xcbc: Remove bogus hash/cipher test
[CRYPTO] xcbc: Fix algorithm leak when block size check fails
[CRYPTO] tcrypt: Zero axbuf in the right function
[CRYPTO] padlock: Only reset the key once for each CBC and ECB operation
[CRYPTO] api: Include sched.h for cond_resched in scatterwalk.h
[CRYPTO] salsa20-asm: Remove unnecessary dependency on CRYPTO_SALSA20
[CRYPTO] tcrypt: Add select of AEAD
[CRYPTO] salsa20: Add x86-64 assembly version
[CRYPTO] salsa20_i586: Salsa20 stream cipher algorithm (i586 version)
[CRYPTO] gcm: Introduce rfc4106
[CRYPTO] api: Show async type
[CRYPTO] chainiv: Avoid lock spinning where possible
[CRYPTO] seqiv: Add select AEAD in Kconfig
[CRYPTO] scatterwalk: Handle zero nbytes in scatterwalk_map_and_copy
[CRYPTO] null: Allow setkey on digest_null
...
Diffstat (limited to 'include')
-rw-r--r-- | include/crypto/aead.h | 105 | ||||
-rw-r--r-- | include/crypto/aes.h | 31 | ||||
-rw-r--r-- | include/crypto/algapi.h | 31 | ||||
-rw-r--r-- | include/crypto/authenc.h | 27 | ||||
-rw-r--r-- | include/crypto/ctr.h | 20 | ||||
-rw-r--r-- | include/crypto/des.h | 19 | ||||
-rw-r--r-- | include/crypto/internal/aead.h | 80 | ||||
-rw-r--r-- | include/crypto/internal/skcipher.h | 110 | ||||
-rw-r--r-- | include/crypto/scatterwalk.h | 119 | ||||
-rw-r--r-- | include/crypto/sha.h | 12 | ||||
-rw-r--r-- | include/crypto/skcipher.h | 110 | ||||
-rw-r--r-- | include/linux/crypto.h | 103 | ||||
-rw-r--r-- | include/linux/hw_random.h | 2 |
13 files changed, 723 insertions, 46 deletions
diff --git a/include/crypto/aead.h b/include/crypto/aead.h new file mode 100644 index 000000000000..0edf949f6369 --- /dev/null +++ b/include/crypto/aead.h @@ -0,0 +1,105 @@ +/* + * AEAD: Authenticated Encryption with Associated Data + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#ifndef _CRYPTO_AEAD_H +#define _CRYPTO_AEAD_H + +#include <linux/crypto.h> +#include <linux/kernel.h> +#include <linux/slab.h> + +/** + * struct aead_givcrypt_request - AEAD request with IV generation + * @seq: Sequence number for IV generation + * @giv: Space for generated IV + * @areq: The AEAD request itself + */ +struct aead_givcrypt_request { + u64 seq; + u8 *giv; + + struct aead_request areq; +}; + +static inline struct crypto_aead *aead_givcrypt_reqtfm( + struct aead_givcrypt_request *req) +{ + return crypto_aead_reqtfm(&req->areq); +} + +static inline int crypto_aead_givencrypt(struct aead_givcrypt_request *req) +{ + struct aead_tfm *crt = crypto_aead_crt(aead_givcrypt_reqtfm(req)); + return crt->givencrypt(req); +}; + +static inline int crypto_aead_givdecrypt(struct aead_givcrypt_request *req) +{ + struct aead_tfm *crt = crypto_aead_crt(aead_givcrypt_reqtfm(req)); + return crt->givdecrypt(req); +}; + +static inline void aead_givcrypt_set_tfm(struct aead_givcrypt_request *req, + struct crypto_aead *tfm) +{ + req->areq.base.tfm = crypto_aead_tfm(tfm); +} + +static inline struct aead_givcrypt_request *aead_givcrypt_alloc( + struct crypto_aead *tfm, gfp_t gfp) +{ + struct aead_givcrypt_request *req; + + req = kmalloc(sizeof(struct aead_givcrypt_request) + + crypto_aead_reqsize(tfm), gfp); + + if (likely(req)) + aead_givcrypt_set_tfm(req, tfm); + + return req; +} + +static inline void aead_givcrypt_free(struct aead_givcrypt_request *req) +{ + kfree(req); +} + +static inline void aead_givcrypt_set_callback( + struct aead_givcrypt_request *req, u32 flags, + crypto_completion_t complete, void *data) +{ + aead_request_set_callback(&req->areq, flags, complete, data); +} + +static inline void aead_givcrypt_set_crypt(struct aead_givcrypt_request *req, + struct scatterlist *src, + struct scatterlist *dst, + unsigned int nbytes, void *iv) +{ + aead_request_set_crypt(&req->areq, src, dst, nbytes, iv); +} + +static inline void aead_givcrypt_set_assoc(struct aead_givcrypt_request *req, + struct scatterlist *assoc, + unsigned int assoclen) +{ + aead_request_set_assoc(&req->areq, assoc, assoclen); +} + +static inline void aead_givcrypt_set_giv(struct aead_givcrypt_request *req, + u8 *giv, u64 seq) +{ + req->giv = giv; + req->seq = seq; +} + +#endif /* _CRYPTO_AEAD_H */ diff --git a/include/crypto/aes.h b/include/crypto/aes.h new file mode 100644 index 000000000000..d480b76715a8 --- /dev/null +++ b/include/crypto/aes.h @@ -0,0 +1,31 @@ +/* + * Common values for AES algorithms + */ + +#ifndef _CRYPTO_AES_H +#define _CRYPTO_AES_H + +#include <linux/types.h> +#include <linux/crypto.h> + +#define AES_MIN_KEY_SIZE 16 +#define AES_MAX_KEY_SIZE 32 +#define AES_KEYSIZE_128 16 +#define AES_KEYSIZE_192 24 +#define AES_KEYSIZE_256 32 +#define AES_BLOCK_SIZE 16 + +struct crypto_aes_ctx { + u32 key_length; + u32 key_enc[60]; + u32 key_dec[60]; +}; + +extern u32 crypto_ft_tab[4][256]; +extern u32 crypto_fl_tab[4][256]; +extern u32 crypto_it_tab[4][256]; +extern u32 crypto_il_tab[4][256]; + +int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len); +#endif diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h index b9b05d399d2b..60d06e784be3 100644 --- a/include/crypto/algapi.h +++ b/include/crypto/algapi.h @@ -111,8 +111,15 @@ void crypto_drop_spawn(struct crypto_spawn *spawn); struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, u32 mask); +static inline void crypto_set_spawn(struct crypto_spawn *spawn, + struct crypto_instance *inst) +{ + spawn->inst = inst; +} + struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb); int crypto_check_attr_type(struct rtattr **tb, u32 type); +const char *crypto_attr_alg_name(struct rtattr *rta); struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask); int crypto_attr_u32(struct rtattr *rta, u32 *num); struct crypto_instance *crypto_alloc_instance(const char *name, @@ -124,6 +131,10 @@ int crypto_enqueue_request(struct crypto_queue *queue, struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue); int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm); +/* These functions require the input/output to be aligned as u32. */ +void crypto_inc(u8 *a, unsigned int size); +void crypto_xor(u8 *dst, const u8 *src, unsigned int size); + int blkcipher_walk_done(struct blkcipher_desc *desc, struct blkcipher_walk *walk, int err); int blkcipher_walk_virt(struct blkcipher_desc *desc, @@ -187,20 +198,11 @@ static inline struct crypto_instance *crypto_aead_alg_instance( return crypto_tfm_alg_instance(&aead->base); } -static inline struct crypto_ablkcipher *crypto_spawn_ablkcipher( - struct crypto_spawn *spawn) -{ - u32 type = CRYPTO_ALG_TYPE_BLKCIPHER; - u32 mask = CRYPTO_ALG_TYPE_MASK; - - return __crypto_ablkcipher_cast(crypto_spawn_tfm(spawn, type, mask)); -} - static inline struct crypto_blkcipher *crypto_spawn_blkcipher( struct crypto_spawn *spawn) { u32 type = CRYPTO_ALG_TYPE_BLKCIPHER; - u32 mask = CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC; + u32 mask = CRYPTO_ALG_TYPE_MASK; return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask)); } @@ -303,5 +305,14 @@ static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb, return crypto_attr_alg(tb[1], type, mask); } +/* + * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms. + * Otherwise returns zero. + */ +static inline int crypto_requires_sync(u32 type, u32 mask) +{ + return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC; +} + #endif /* _CRYPTO_ALGAPI_H */ diff --git a/include/crypto/authenc.h b/include/crypto/authenc.h new file mode 100644 index 000000000000..e47b044929a8 --- /dev/null +++ b/include/crypto/authenc.h @@ -0,0 +1,27 @@ +/* + * Authenc: Simple AEAD wrapper for IPsec + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ +#ifndef _CRYPTO_AUTHENC_H +#define _CRYPTO_AUTHENC_H + +#include <linux/types.h> + +enum { + CRYPTO_AUTHENC_KEYA_UNSPEC, + CRYPTO_AUTHENC_KEYA_PARAM, +}; + +struct crypto_authenc_key_param { + __be32 enckeylen; +}; + +#endif /* _CRYPTO_AUTHENC_H */ + diff --git a/include/crypto/ctr.h b/include/crypto/ctr.h new file mode 100644 index 000000000000..4180fc080e3b --- /dev/null +++ b/include/crypto/ctr.h @@ -0,0 +1,20 @@ +/* + * CTR: Counter mode + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#ifndef _CRYPTO_CTR_H +#define _CRYPTO_CTR_H + +#define CTR_RFC3686_NONCE_SIZE 4 +#define CTR_RFC3686_IV_SIZE 8 +#define CTR_RFC3686_BLOCK_SIZE 16 + +#endif /* _CRYPTO_CTR_H */ diff --git a/include/crypto/des.h b/include/crypto/des.h new file mode 100644 index 000000000000..2971c6304ade --- /dev/null +++ b/include/crypto/des.h @@ -0,0 +1,19 @@ +/* + * DES & Triple DES EDE Cipher Algorithms. + */ + +#ifndef __CRYPTO_DES_H +#define __CRYPTO_DES_H + +#define DES_KEY_SIZE 8 +#define DES_EXPKEY_WORDS 32 +#define DES_BLOCK_SIZE 8 + +#define DES3_EDE_KEY_SIZE (3 * DES_KEY_SIZE) +#define DES3_EDE_EXPKEY_WORDS (3 * DES_EXPKEY_WORDS) +#define DES3_EDE_BLOCK_SIZE DES_BLOCK_SIZE + + +extern unsigned long des_ekey(u32 *pe, const u8 *k); + +#endif /* __CRYPTO_DES_H */ diff --git a/include/crypto/internal/aead.h b/include/crypto/internal/aead.h new file mode 100644 index 000000000000..d838c945575a --- /dev/null +++ b/include/crypto/internal/aead.h @@ -0,0 +1,80 @@ +/* + * AEAD: Authenticated Encryption with Associated Data + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#ifndef _CRYPTO_INTERNAL_AEAD_H +#define _CRYPTO_INTERNAL_AEAD_H + +#include <crypto/aead.h> +#include <crypto/algapi.h> +#include <linux/types.h> + +struct rtattr; + +struct crypto_aead_spawn { + struct crypto_spawn base; +}; + +extern const struct crypto_type crypto_nivaead_type; + +static inline void crypto_set_aead_spawn( + struct crypto_aead_spawn *spawn, struct crypto_instance *inst) +{ + crypto_set_spawn(&spawn->base, inst); +} + +int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name, + u32 type, u32 mask); + +static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn) +{ + crypto_drop_spawn(&spawn->base); +} + +static inline struct crypto_alg *crypto_aead_spawn_alg( + struct crypto_aead_spawn *spawn) +{ + return spawn->base.alg; +} + +static inline struct crypto_aead *crypto_spawn_aead( + struct crypto_aead_spawn *spawn) +{ + return __crypto_aead_cast( + crypto_spawn_tfm(&spawn->base, CRYPTO_ALG_TYPE_AEAD, + CRYPTO_ALG_TYPE_MASK)); +} + +struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl, + struct rtattr **tb, u32 type, + u32 mask); +void aead_geniv_free(struct crypto_instance *inst); +int aead_geniv_init(struct crypto_tfm *tfm); +void aead_geniv_exit(struct crypto_tfm *tfm); + +static inline struct crypto_aead *aead_geniv_base(struct crypto_aead *geniv) +{ + return crypto_aead_crt(geniv)->base; +} + +static inline void *aead_givcrypt_reqctx(struct aead_givcrypt_request *req) +{ + return aead_request_ctx(&req->areq); +} + +static inline void aead_givcrypt_complete(struct aead_givcrypt_request *req, + int err) +{ + aead_request_complete(&req->areq, err); +} + +#endif /* _CRYPTO_INTERNAL_AEAD_H */ + diff --git a/include/crypto/internal/skcipher.h b/include/crypto/internal/skcipher.h new file mode 100644 index 000000000000..2ba42cd7d6aa --- /dev/null +++ b/include/crypto/internal/skcipher.h @@ -0,0 +1,110 @@ +/* + * Symmetric key ciphers. + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#ifndef _CRYPTO_INTERNAL_SKCIPHER_H +#define _CRYPTO_INTERNAL_SKCIPHER_H + +#include <crypto/algapi.h> +#include <crypto/skcipher.h> +#include <linux/types.h> + +struct rtattr; + +struct crypto_skcipher_spawn { + struct crypto_spawn base; +}; + +extern const struct crypto_type crypto_givcipher_type; + +static inline void crypto_set_skcipher_spawn( + struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst) +{ + crypto_set_spawn(&spawn->base, inst); +} + +int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name, + u32 type, u32 mask); + +static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn) +{ + crypto_drop_spawn(&spawn->base); +} + +static inline struct crypto_alg *crypto_skcipher_spawn_alg( + struct crypto_skcipher_spawn *spawn) +{ + return spawn->base.alg; +} + +static inline struct crypto_ablkcipher *crypto_spawn_skcipher( + struct crypto_skcipher_spawn *spawn) +{ + return __crypto_ablkcipher_cast( + crypto_spawn_tfm(&spawn->base, crypto_skcipher_type(0), + crypto_skcipher_mask(0))); +} + +int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req); +int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req); +const char *crypto_default_geniv(const struct crypto_alg *alg); + +struct crypto_instance *skcipher_geniv_alloc(struct crypto_template *tmpl, + struct rtattr **tb, u32 type, + u32 mask); +void skcipher_geniv_free(struct crypto_instance *inst); +int skcipher_geniv_init(struct crypto_tfm *tfm); +void skcipher_geniv_exit(struct crypto_tfm *tfm); + +static inline struct crypto_ablkcipher *skcipher_geniv_cipher( + struct crypto_ablkcipher *geniv) +{ + return crypto_ablkcipher_crt(geniv)->base; +} + +static inline int skcipher_enqueue_givcrypt( + struct crypto_queue *queue, struct skcipher_givcrypt_request *request) +{ + return ablkcipher_enqueue_request(queue, &request->creq); +} + +static inline struct skcipher_givcrypt_request *skcipher_dequeue_givcrypt( + struct crypto_queue *queue) +{ + return container_of(ablkcipher_dequeue_request(queue), + struct skcipher_givcrypt_request, creq); +} + +static inline void *skcipher_givcrypt_reqctx( + struct skcipher_givcrypt_request *req) +{ + return ablkcipher_request_ctx(&req->creq); +} + +static inline void ablkcipher_request_complete(struct ablkcipher_request *req, + int err) +{ + req->base.complete(&req->base, err); +} + +static inline void skcipher_givcrypt_complete( + struct skcipher_givcrypt_request *req, int err) +{ + ablkcipher_request_complete(&req->creq, err); +} + +static inline u32 ablkcipher_request_flags(struct ablkcipher_request *req) +{ + return req->base.flags; +} + +#endif /* _CRYPTO_INTERNAL_SKCIPHER_H */ + diff --git a/include/crypto/scatterwalk.h b/include/crypto/scatterwalk.h new file mode 100644 index 000000000000..224658b8d806 --- /dev/null +++ b/include/crypto/scatterwalk.h @@ -0,0 +1,119 @@ +/* + * Cryptographic scatter and gather helpers. + * + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> + * Copyright (c) 2002 Adam J. Richter <adam@yggdrasil.com> + * Copyright (c) 2004 Jean-Luc Cooke <jlcooke@certainkey.com> + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#ifndef _CRYPTO_SCATTERWALK_H +#define _CRYPTO_SCATTERWALK_H + +#include <asm/kmap_types.h> +#include <crypto/algapi.h> +#include <linux/hardirq.h> +#include <linux/highmem.h> +#include <linux/kernel.h> +#include <linux/mm.h> +#include <linux/scatterlist.h> +#include <linux/sched.h> + +static inline enum km_type crypto_kmap_type(int out) +{ + enum km_type type; + + if (in_softirq()) + type = out * (KM_SOFTIRQ1 - KM_SOFTIRQ0) + KM_SOFTIRQ0; + else + type = out * (KM_USER1 - KM_USER0) + KM_USER0; + + return type; +} + +static inline void *crypto_kmap(struct page *page, int out) +{ + return kmap_atomic(page, crypto_kmap_type(out)); +} + +static inline void crypto_kunmap(void *vaddr, int out) +{ + kunmap_atomic(vaddr, crypto_kmap_type(out)); +} + +static inline void crypto_yield(u32 flags) +{ + if (flags & CRYPTO_TFM_REQ_MAY_SLEEP) + cond_resched(); +} + +static inline void scatterwalk_sg_chain(struct scatterlist *sg1, int num, + struct scatterlist *sg2) +{ + sg_set_page(&sg1[num - 1], (void *)sg2, 0, 0); +} + +static inline struct scatterlist *scatterwalk_sg_next(struct scatterlist *sg) +{ + return (++sg)->length ? sg : (void *)sg_page(sg); +} + +static inline unsigned long scatterwalk_samebuf(struct scatter_walk *walk_in, + struct scatter_walk *walk_out) +{ + return !(((sg_page(walk_in->sg) - sg_page(walk_out->sg)) << PAGE_SHIFT) + + (int)(walk_in->offset - walk_out->offset)); +} + +static inline unsigned int scatterwalk_pagelen(struct scatter_walk *walk) +{ + unsigned int len = walk->sg->offset + walk->sg->length - walk->offset; + unsigned int len_this_page = offset_in_page(~walk->offset) + 1; + return len_this_page > len ? len : len_this_page; +} + +static inline unsigned int scatterwalk_clamp(struct scatter_walk *walk, + unsigned int nbytes) +{ + unsigned int len_this_page = scatterwalk_pagelen(walk); + return nbytes > len_this_page ? len_this_page : nbytes; +} + +static inline void scatterwalk_advance(struct scatter_walk *walk, + unsigned int nbytes) +{ + walk->offset += nbytes; +} + +static inline unsigned int scatterwalk_aligned(struct scatter_walk *walk, + unsigned int alignmask) +{ + return !(walk->offset & alignmask); +} + +static inline struct page *scatterwalk_page(struct scatter_walk *walk) +{ + return sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); +} + +static inline void scatterwalk_unmap(void *vaddr, int out) +{ + crypto_kunmap(vaddr, out); +} + +void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg); +void scatterwalk_copychunks(void *buf, struct scatter_walk *walk, + size_t nbytes, int out); +void *scatterwalk_map(struct scatter_walk *walk, int out); +void scatterwalk_done(struct scatter_walk *walk, int out, int more); + +void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg, + unsigned int start, unsigned int nbytes, int out); + +#endif /* _CRYPTO_SCATTERWALK_H */ diff --git a/include/crypto/sha.h b/include/crypto/sha.h index 0686e1f7a24b..c0ccc2b1a2d8 100644 --- a/include/crypto/sha.h +++ b/include/crypto/sha.h @@ -8,6 +8,9 @@ #define SHA1_DIGEST_SIZE 20 #define SHA1_BLOCK_SIZE 64 +#define SHA224_DIGEST_SIZE 28 +#define SHA224_BLOCK_SIZE 64 + #define SHA256_DIGEST_SIZE 32 #define SHA256_BLOCK_SIZE 64 @@ -23,6 +26,15 @@ #define SHA1_H3 0x10325476UL #define SHA1_H4 0xc3d2e1f0UL +#define SHA224_H0 0xc1059ed8UL +#define SHA224_H1 0x367cd507UL +#define SHA224_H2 0x3070dd17UL +#define SHA224_H3 0xf70e5939UL +#define SHA224_H4 0xffc00b31UL +#define SHA224_H5 0x68581511UL +#define SHA224_H6 0x64f98fa7UL +#define SHA224_H7 0xbefa4fa4UL + #define SHA256_H0 0x6a09e667UL #define SHA256_H1 0xbb67ae85UL #define SHA256_H2 0x3c6ef372UL diff --git a/include/crypto/skcipher.h b/include/crypto/skcipher.h new file mode 100644 index 000000000000..25fd6126522d --- /dev/null +++ b/include/crypto/skcipher.h @@ -0,0 +1,110 @@ +/* + * Symmetric key ciphers. + * + * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#ifndef _CRYPTO_SKCIPHER_H +#define _CRYPTO_SKCIPHER_H + +#include <linux/crypto.h> +#include <linux/kernel.h> +#include <linux/slab.h> + +/** + * struct skcipher_givcrypt_request - Crypto request with IV generation + * @seq: Sequence number for IV generation + * @giv: Space for generated IV + * @creq: The crypto request itself + */ +struct skcipher_givcrypt_request { + u64 seq; + u8 *giv; + + struct ablkcipher_request creq; +}; + +static inline struct crypto_ablkcipher *skcipher_givcrypt_reqtfm( + struct skcipher_givcrypt_request *req) +{ + return crypto_ablkcipher_reqtfm(&req->creq); +} + +static inline int crypto_skcipher_givencrypt( + struct skcipher_givcrypt_request *req) +{ + struct ablkcipher_tfm *crt = + crypto_ablkcipher_crt(skcipher_givcrypt_reqtfm(req)); + return crt->givencrypt(req); +}; + +static inline int crypto_skcipher_givdecrypt( + struct skcipher_givcrypt_request *req) +{ + struct ablkcipher_tfm *crt = + crypto_ablkcipher_crt(skcipher_givcrypt_reqtfm(req)); + return crt->givdecrypt(req); +}; + +static inline void skcipher_givcrypt_set_tfm( + struct skcipher_givcrypt_request *req, struct crypto_ablkcipher *tfm) +{ + req->creq.base.tfm = crypto_ablkcipher_tfm(tfm); +} + +static inline struct skcipher_givcrypt_request *skcipher_givcrypt_cast( + struct crypto_async_request *req) +{ + return container_of(ablkcipher_request_cast(req), + struct skcipher_givcrypt_request, creq); +} + +static inline struct skcipher_givcrypt_request *skcipher_givcrypt_alloc( + struct crypto_ablkcipher *tfm, gfp_t gfp) +{ + struct skcipher_givcrypt_request *req; + + req = kmalloc(sizeof(struct skcipher_givcrypt_request) + + crypto_ablkcipher_reqsize(tfm), gfp); + + if (likely(req)) + skcipher_givcrypt_set_tfm(req, tfm); + + return req; +} + +static inline void skcipher_givcrypt_free(struct skcipher_givcrypt_request *req) +{ + kfree(req); +} + +static inline void skcipher_givcrypt_set_callback( + struct skcipher_givcrypt_request *req, u32 flags, + crypto_completion_t complete, void *data) +{ + ablkcipher_request_set_callback(&req->creq, flags, complete, data); +} + +static inline void skcipher_givcrypt_set_crypt( + struct skcipher_givcrypt_request *req, + struct scatterlist *src, struct scatterlist *dst, + unsigned int nbytes, void *iv) +{ + ablkcipher_request_set_crypt(&req->creq, src, dst, nbytes, iv); +} + +static inline void skcipher_givcrypt_set_giv( + struct skcipher_givcrypt_request *req, u8 *giv, u64 seq) +{ + req->giv = giv; + req->seq = seq; +} + +#endif /* _CRYPTO_SKCIPHER_H */ + diff --git a/include/linux/crypto.h b/include/linux/crypto.h index f3110ebe894a..5e02d1b46370 100644 --- a/include/linux/crypto.h +++ b/include/linux/crypto.h @@ -33,10 +33,13 @@ #define CRYPTO_ALG_TYPE_DIGEST 0x00000002 #define CRYPTO_ALG_TYPE_HASH 0x00000003 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004 -#define CRYPTO_ALG_TYPE_COMPRESS 0x00000005 -#define CRYPTO_ALG_TYPE_AEAD 0x00000006 +#define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005 +#define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006 +#define CRYPTO_ALG_TYPE_COMPRESS 0x00000008 +#define CRYPTO_ALG_TYPE_AEAD 0x00000009 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e +#define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c #define CRYPTO_ALG_LARVAL 0x00000010 #define CRYPTO_ALG_DEAD 0x00000020 @@ -50,6 +53,12 @@ #define CRYPTO_ALG_NEED_FALLBACK 0x00000100 /* + * This bit is set for symmetric key ciphers that have already been wrapped + * with a generic IV generator to prevent them from being wrapped again. + */ +#define CRYPTO_ALG_GENIV 0x00000200 + +/* * Transform masks and values (for crt_flags). */ #define CRYPTO_TFM_REQ_MASK 0x000fff00 @@ -81,13 +90,11 @@ #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN #elif defined(ARCH_SLAB_MINALIGN) #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN +#else +#define CRYPTO_MINALIGN __alignof__(unsigned long long) #endif -#ifdef CRYPTO_MINALIGN #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN))) -#else -#define CRYPTO_MINALIGN_ATTR -#endif struct scatterlist; struct crypto_ablkcipher; @@ -97,6 +104,8 @@ struct crypto_blkcipher; struct crypto_hash; struct crypto_tfm; struct crypto_type; +struct aead_givcrypt_request; +struct skcipher_givcrypt_request; typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err); @@ -176,6 +185,10 @@ struct ablkcipher_alg { unsigned int keylen); int (*encrypt)(struct ablkcipher_request *req); int (*decrypt)(struct ablkcipher_request *req); + int (*givencrypt)(struct skcipher_givcrypt_request *req); + int (*givdecrypt)(struct skcipher_givcrypt_request *req); + + const char *geniv; unsigned int min_keysize; unsigned int max_keysize; @@ -185,11 +198,16 @@ struct ablkcipher_alg { struct aead_alg { int (*setkey)(struct crypto_aead *tfm, const u8 *key, unsigned int keylen); + int (*setauthsize)(struct crypto_aead *tfm, unsigned int authsize); int (*encrypt)(struct aead_request *req); int (*decrypt)(struct aead_request *req); + int (*givencrypt)(struct aead_givcrypt_request *req); + int (*givdecrypt)(struct aead_givcrypt_request *req); + + const char *geniv; unsigned int ivsize; - unsigned int authsize; + unsigned int maxauthsize; }; struct blkcipher_alg { @@ -202,6 +220,8 @@ struct blkcipher_alg { struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes); + const char *geniv; + unsigned int min_keysize; unsigned int max_keysize; unsigned int ivsize; @@ -317,6 +337,11 @@ struct ablkcipher_tfm { unsigned int keylen); int (*encrypt)(struct ablkcipher_request *req); int (*decrypt)(struct ablkcipher_request *req); + int (*givencrypt)(struct skcipher_givcrypt_request *req); + int (*givdecrypt)(struct skcipher_givcrypt_request *req); + + struct crypto_ablkcipher *base; + unsigned int ivsize; unsigned int reqsize; }; @@ -326,6 +351,11 @@ struct aead_tfm { unsigned int keylen); int (*encrypt)(struct aead_request *req); int (*decrypt)(struct aead_request *req); + int (*givencrypt)(struct aead_givcrypt_request *req); + int (*givdecrypt)(struct aead_givcrypt_request *req); + + struct crypto_aead *base; + unsigned int ivsize; unsigned int authsize; unsigned int reqsize; @@ -525,17 +555,23 @@ static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast( return (struct crypto_ablkcipher *)tfm; } -static inline struct crypto_ablkcipher *crypto_alloc_ablkcipher( - const char *alg_name, u32 type, u32 mask) +static inline u32 crypto_skcipher_type(u32 type) { - type &= ~CRYPTO_ALG_TYPE_MASK; + type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); type |= CRYPTO_ALG_TYPE_BLKCIPHER; - mask |= CRYPTO_ALG_TYPE_MASK; + return type; +} - return __crypto_ablkcipher_cast( - crypto_alloc_base(alg_name, type, mask)); +static inline u32 crypto_skcipher_mask(u32 mask) +{ + mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV); + mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK; + return mask; } +struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name, + u32 type, u32 mask); + static inline struct crypto_tfm *crypto_ablkcipher_tfm( struct crypto_ablkcipher *tfm) { @@ -550,11 +586,8 @@ static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm) static inline int crypto_has_ablkcipher(const char *alg_name, u32 type, u32 mask) { - type &= ~CRYPTO_ALG_TYPE_MASK; - type |= CRYPTO_ALG_TYPE_BLKCIPHER; - mask |= CRYPTO_ALG_TYPE_MASK; - - return crypto_has_alg(alg_name, type, mask); + return crypto_has_alg(alg_name, crypto_skcipher_type(type), + crypto_skcipher_mask(mask)); } static inline struct ablkcipher_tfm *crypto_ablkcipher_crt( @@ -601,7 +634,9 @@ static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm, static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm, const u8 *key, unsigned int keylen) { - return crypto_ablkcipher_crt(tfm)->setkey(tfm, key, keylen); + struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm); + + return crt->setkey(crt->base, key, keylen); } static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm( @@ -633,7 +668,7 @@ static inline unsigned int crypto_ablkcipher_reqsize( static inline void ablkcipher_request_set_tfm( struct ablkcipher_request *req, struct crypto_ablkcipher *tfm) { - req->base.tfm = crypto_ablkcipher_tfm(tfm); + req->base.tfm = crypto_ablkcipher_tfm(crypto_ablkcipher_crt(tfm)->base); } static inline struct ablkcipher_request *ablkcipher_request_cast( @@ -686,15 +721,7 @@ static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm) return (struct crypto_aead *)tfm; } -static inline struct crypto_aead *crypto_alloc_aead(const char *alg_name, - u32 type, u32 mask) -{ - type &= ~CRYPTO_ALG_TYPE_MASK; - type |= CRYPTO_ALG_TYPE_AEAD; - mask |= CRYPTO_ALG_TYPE_MASK; - - return __crypto_aead_cast(crypto_alloc_base(alg_name, type, mask)); -} +struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask); static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm) { @@ -749,9 +776,13 @@ static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags) static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key, unsigned int keylen) { - return crypto_aead_crt(tfm)->setkey(tfm, key, keylen); + struct aead_tfm *crt = crypto_aead_crt(tfm); + + return crt->setkey(crt->base, key, keylen); } +int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize); + static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req) { return __crypto_aead_cast(req->base.tfm); @@ -775,7 +806,7 @@ static inline unsigned int crypto_aead_reqsize(struct crypto_aead *tfm) static inline void aead_request_set_tfm(struct aead_request *req, struct crypto_aead *tfm) { - req->base.tfm = crypto_aead_tfm(tfm); + req->base.tfm = crypto_aead_tfm(crypto_aead_crt(tfm)->base); } static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm, @@ -841,9 +872,9 @@ static inline struct crypto_blkcipher *crypto_blkcipher_cast( static inline struct crypto_blkcipher *crypto_alloc_blkcipher( const char *alg_name, u32 type, u32 mask) { - type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); + type &= ~CRYPTO_ALG_TYPE_MASK; type |= CRYPTO_ALG_TYPE_BLKCIPHER; - mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC; + mask |= CRYPTO_ALG_TYPE_MASK; return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask)); } @@ -861,9 +892,9 @@ static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm) static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask) { - type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); + type &= ~CRYPTO_ALG_TYPE_MASK; type |= CRYPTO_ALG_TYPE_BLKCIPHER; - mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC; + mask |= CRYPTO_ALG_TYPE_MASK; return crypto_has_alg(alg_name, type, mask); } @@ -1081,6 +1112,7 @@ static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name, u32 type, u32 mask) { type &= ~CRYPTO_ALG_TYPE_MASK; + mask &= ~CRYPTO_ALG_TYPE_MASK; type |= CRYPTO_ALG_TYPE_HASH; mask |= CRYPTO_ALG_TYPE_HASH_MASK; @@ -1100,6 +1132,7 @@ static inline void crypto_free_hash(struct crypto_hash *tfm) static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask) { type &= ~CRYPTO_ALG_TYPE_MASK; + mask &= ~CRYPTO_ALG_TYPE_MASK; type |= CRYPTO_ALG_TYPE_HASH; mask |= CRYPTO_ALG_TYPE_HASH_MASK; diff --git a/include/linux/hw_random.h b/include/linux/hw_random.h index 21ea7610e177..85d11916e9ea 100644 --- a/include/linux/hw_random.h +++ b/include/linux/hw_random.h @@ -33,7 +33,7 @@ struct hwrng { const char *name; int (*init)(struct hwrng *rng); void (*cleanup)(struct hwrng *rng); - int (*data_present)(struct hwrng *rng); + int (*data_present)(struct hwrng *rng, int wait); int (*data_read)(struct hwrng *rng, u32 *data); unsigned long priv; |