mirror of
https://github.com/Fishwaldo/Star64_linux.git
synced 2025-04-02 12:34:06 +00:00
crypto: keywrap - simplify code
The code is simplified by using two __be64 values for the operation instead of using two arrays of u8. This allows to get rid of the memory alignment code. In addition, the crypto_xor can be replaced with a native XOR operation. Finally, the definition of the variables is re-arranged such that the data structures come before simple variables to potentially reduce memory space. Signed-off-by: Stephan Mueller <smueller@chronox.de> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
5b3f3a8bed
commit
9e49451d7a
1 changed files with 26 additions and 58 deletions
|
@ -93,18 +93,10 @@ struct crypto_kw_ctx {
|
||||||
|
|
||||||
struct crypto_kw_block {
|
struct crypto_kw_block {
|
||||||
#define SEMIBSIZE 8
|
#define SEMIBSIZE 8
|
||||||
u8 A[SEMIBSIZE];
|
__be64 A;
|
||||||
u8 R[SEMIBSIZE];
|
__be64 R;
|
||||||
};
|
};
|
||||||
|
|
||||||
/* convert 64 bit integer into its string representation */
|
|
||||||
static inline void crypto_kw_cpu_to_be64(u64 val, u8 *buf)
|
|
||||||
{
|
|
||||||
__be64 *a = (__be64 *)buf;
|
|
||||||
|
|
||||||
*a = cpu_to_be64(val);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Fast forward the SGL to the "end" length minus SEMIBSIZE.
|
* Fast forward the SGL to the "end" length minus SEMIBSIZE.
|
||||||
* The start in the SGL defined by the fast-forward is returned with
|
* The start in the SGL defined by the fast-forward is returned with
|
||||||
|
@ -139,17 +131,10 @@ static int crypto_kw_decrypt(struct blkcipher_desc *desc,
|
||||||
struct crypto_blkcipher *tfm = desc->tfm;
|
struct crypto_blkcipher *tfm = desc->tfm;
|
||||||
struct crypto_kw_ctx *ctx = crypto_blkcipher_ctx(tfm);
|
struct crypto_kw_ctx *ctx = crypto_blkcipher_ctx(tfm);
|
||||||
struct crypto_cipher *child = ctx->child;
|
struct crypto_cipher *child = ctx->child;
|
||||||
|
struct crypto_kw_block block;
|
||||||
unsigned long alignmask = max_t(unsigned long, SEMIBSIZE,
|
|
||||||
crypto_cipher_alignmask(child));
|
|
||||||
unsigned int i;
|
|
||||||
|
|
||||||
u8 blockbuf[sizeof(struct crypto_kw_block) + alignmask];
|
|
||||||
struct crypto_kw_block *block = (struct crypto_kw_block *)
|
|
||||||
PTR_ALIGN(blockbuf + 0, alignmask + 1);
|
|
||||||
|
|
||||||
u64 t = 6 * ((nbytes) >> 3);
|
|
||||||
struct scatterlist *lsrc, *ldst;
|
struct scatterlist *lsrc, *ldst;
|
||||||
|
u64 t = 6 * ((nbytes) >> 3);
|
||||||
|
unsigned int i;
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -160,7 +145,7 @@ static int crypto_kw_decrypt(struct blkcipher_desc *desc,
|
||||||
return -EINVAL;
|
return -EINVAL;
|
||||||
|
|
||||||
/* Place the IV into block A */
|
/* Place the IV into block A */
|
||||||
memcpy(block->A, desc->info, SEMIBSIZE);
|
memcpy(&block.A, desc->info, SEMIBSIZE);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* src scatterlist is read-only. dst scatterlist is r/w. During the
|
* src scatterlist is read-only. dst scatterlist is r/w. During the
|
||||||
|
@ -171,32 +156,27 @@ static int crypto_kw_decrypt(struct blkcipher_desc *desc,
|
||||||
ldst = dst;
|
ldst = dst;
|
||||||
|
|
||||||
for (i = 0; i < 6; i++) {
|
for (i = 0; i < 6; i++) {
|
||||||
u8 tbe_buffer[SEMIBSIZE + alignmask];
|
|
||||||
/* alignment for the crypto_xor and the _to_be64 operation */
|
|
||||||
u8 *tbe = PTR_ALIGN(tbe_buffer + 0, alignmask + 1);
|
|
||||||
unsigned int tmp_nbytes = nbytes;
|
|
||||||
struct scatter_walk src_walk, dst_walk;
|
struct scatter_walk src_walk, dst_walk;
|
||||||
|
unsigned int tmp_nbytes = nbytes;
|
||||||
|
|
||||||
while (tmp_nbytes) {
|
while (tmp_nbytes) {
|
||||||
/* move pointer by tmp_nbytes in the SGL */
|
/* move pointer by tmp_nbytes in the SGL */
|
||||||
crypto_kw_scatterlist_ff(&src_walk, lsrc, tmp_nbytes);
|
crypto_kw_scatterlist_ff(&src_walk, lsrc, tmp_nbytes);
|
||||||
/* get the source block */
|
/* get the source block */
|
||||||
scatterwalk_copychunks(block->R, &src_walk, SEMIBSIZE,
|
scatterwalk_copychunks(&block.R, &src_walk, SEMIBSIZE,
|
||||||
false);
|
false);
|
||||||
|
|
||||||
/* perform KW operation: get counter as byte string */
|
|
||||||
crypto_kw_cpu_to_be64(t, tbe);
|
|
||||||
/* perform KW operation: modify IV with counter */
|
/* perform KW operation: modify IV with counter */
|
||||||
crypto_xor(block->A, tbe, SEMIBSIZE);
|
block.A ^= cpu_to_be64(t);
|
||||||
t--;
|
t--;
|
||||||
/* perform KW operation: decrypt block */
|
/* perform KW operation: decrypt block */
|
||||||
crypto_cipher_decrypt_one(child, (u8*)block,
|
crypto_cipher_decrypt_one(child, (u8*)&block,
|
||||||
(u8*)block);
|
(u8*)&block);
|
||||||
|
|
||||||
/* move pointer by tmp_nbytes in the SGL */
|
/* move pointer by tmp_nbytes in the SGL */
|
||||||
crypto_kw_scatterlist_ff(&dst_walk, ldst, tmp_nbytes);
|
crypto_kw_scatterlist_ff(&dst_walk, ldst, tmp_nbytes);
|
||||||
/* Copy block->R into place */
|
/* Copy block->R into place */
|
||||||
scatterwalk_copychunks(block->R, &dst_walk, SEMIBSIZE,
|
scatterwalk_copychunks(&block.R, &dst_walk, SEMIBSIZE,
|
||||||
true);
|
true);
|
||||||
|
|
||||||
tmp_nbytes -= SEMIBSIZE;
|
tmp_nbytes -= SEMIBSIZE;
|
||||||
|
@ -208,11 +188,10 @@ static int crypto_kw_decrypt(struct blkcipher_desc *desc,
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Perform authentication check */
|
/* Perform authentication check */
|
||||||
if (crypto_memneq("\xA6\xA6\xA6\xA6\xA6\xA6\xA6\xA6", block->A,
|
if (block.A != cpu_to_be64(0xa6a6a6a6a6a6a6a6))
|
||||||
SEMIBSIZE))
|
|
||||||
ret = -EBADMSG;
|
ret = -EBADMSG;
|
||||||
|
|
||||||
memzero_explicit(block, sizeof(struct crypto_kw_block));
|
memzero_explicit(&block, sizeof(struct crypto_kw_block));
|
||||||
|
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
@ -224,17 +203,10 @@ static int crypto_kw_encrypt(struct blkcipher_desc *desc,
|
||||||
struct crypto_blkcipher *tfm = desc->tfm;
|
struct crypto_blkcipher *tfm = desc->tfm;
|
||||||
struct crypto_kw_ctx *ctx = crypto_blkcipher_ctx(tfm);
|
struct crypto_kw_ctx *ctx = crypto_blkcipher_ctx(tfm);
|
||||||
struct crypto_cipher *child = ctx->child;
|
struct crypto_cipher *child = ctx->child;
|
||||||
|
struct crypto_kw_block block;
|
||||||
unsigned long alignmask = max_t(unsigned long, SEMIBSIZE,
|
|
||||||
crypto_cipher_alignmask(child));
|
|
||||||
unsigned int i;
|
|
||||||
|
|
||||||
u8 blockbuf[sizeof(struct crypto_kw_block) + alignmask];
|
|
||||||
struct crypto_kw_block *block = (struct crypto_kw_block *)
|
|
||||||
PTR_ALIGN(blockbuf + 0, alignmask + 1);
|
|
||||||
|
|
||||||
u64 t = 1;
|
|
||||||
struct scatterlist *lsrc, *ldst;
|
struct scatterlist *lsrc, *ldst;
|
||||||
|
u64 t = 1;
|
||||||
|
unsigned int i;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Require at least 2 semiblocks (note, the 3rd semiblock that is
|
* Require at least 2 semiblocks (note, the 3rd semiblock that is
|
||||||
|
@ -249,7 +221,7 @@ static int crypto_kw_encrypt(struct blkcipher_desc *desc,
|
||||||
* Place the predefined IV into block A -- for encrypt, the caller
|
* Place the predefined IV into block A -- for encrypt, the caller
|
||||||
* does not need to provide an IV, but he needs to fetch the final IV.
|
* does not need to provide an IV, but he needs to fetch the final IV.
|
||||||
*/
|
*/
|
||||||
memcpy(block->A, "\xA6\xA6\xA6\xA6\xA6\xA6\xA6\xA6", SEMIBSIZE);
|
block.A = cpu_to_be64(0xa6a6a6a6a6a6a6a6);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* src scatterlist is read-only. dst scatterlist is r/w. During the
|
* src scatterlist is read-only. dst scatterlist is r/w. During the
|
||||||
|
@ -260,30 +232,26 @@ static int crypto_kw_encrypt(struct blkcipher_desc *desc,
|
||||||
ldst = dst;
|
ldst = dst;
|
||||||
|
|
||||||
for (i = 0; i < 6; i++) {
|
for (i = 0; i < 6; i++) {
|
||||||
u8 tbe_buffer[SEMIBSIZE + alignmask];
|
|
||||||
u8 *tbe = PTR_ALIGN(tbe_buffer + 0, alignmask + 1);
|
|
||||||
unsigned int tmp_nbytes = nbytes;
|
|
||||||
struct scatter_walk src_walk, dst_walk;
|
struct scatter_walk src_walk, dst_walk;
|
||||||
|
unsigned int tmp_nbytes = nbytes;
|
||||||
|
|
||||||
scatterwalk_start(&src_walk, lsrc);
|
scatterwalk_start(&src_walk, lsrc);
|
||||||
scatterwalk_start(&dst_walk, ldst);
|
scatterwalk_start(&dst_walk, ldst);
|
||||||
|
|
||||||
while (tmp_nbytes) {
|
while (tmp_nbytes) {
|
||||||
/* get the source block */
|
/* get the source block */
|
||||||
scatterwalk_copychunks(block->R, &src_walk, SEMIBSIZE,
|
scatterwalk_copychunks(&block.R, &src_walk, SEMIBSIZE,
|
||||||
false);
|
false);
|
||||||
|
|
||||||
/* perform KW operation: encrypt block */
|
/* perform KW operation: encrypt block */
|
||||||
crypto_cipher_encrypt_one(child, (u8 *)block,
|
crypto_cipher_encrypt_one(child, (u8 *)&block,
|
||||||
(u8 *)block);
|
(u8 *)&block);
|
||||||
/* perform KW operation: get counter as byte string */
|
|
||||||
crypto_kw_cpu_to_be64(t, tbe);
|
|
||||||
/* perform KW operation: modify IV with counter */
|
/* perform KW operation: modify IV with counter */
|
||||||
crypto_xor(block->A, tbe, SEMIBSIZE);
|
block.A ^= cpu_to_be64(t);
|
||||||
t++;
|
t++;
|
||||||
|
|
||||||
/* Copy block->R into place */
|
/* Copy block->R into place */
|
||||||
scatterwalk_copychunks(block->R, &dst_walk, SEMIBSIZE,
|
scatterwalk_copychunks(&block.R, &dst_walk, SEMIBSIZE,
|
||||||
true);
|
true);
|
||||||
|
|
||||||
tmp_nbytes -= SEMIBSIZE;
|
tmp_nbytes -= SEMIBSIZE;
|
||||||
|
@ -295,9 +263,9 @@ static int crypto_kw_encrypt(struct blkcipher_desc *desc,
|
||||||
}
|
}
|
||||||
|
|
||||||
/* establish the IV for the caller to pick up */
|
/* establish the IV for the caller to pick up */
|
||||||
memcpy(desc->info, block->A, SEMIBSIZE);
|
memcpy(desc->info, &block.A, SEMIBSIZE);
|
||||||
|
|
||||||
memzero_explicit(block, sizeof(struct crypto_kw_block));
|
memzero_explicit(&block, sizeof(struct crypto_kw_block));
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue