mirror of
https://github.com/Fishwaldo/Star64_linux.git
synced 2025-07-07 23:11:27 +00:00
crypto: arm/ghash - provide a synchronous version
GHASH is used by the GCM mode, which is often used in contexts where only synchronous ciphers are permitted. So provide a synchronous version of GHASH based on the existing code. This requires a non-SIMD fallback to deal with invocations occurring from a context where SIMD instructions may not be used. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
e5f050402f
commit
0a5dff9882
1 changed files with 52 additions and 26 deletions
|
@ -9,6 +9,7 @@
|
||||||
#include <asm/neon.h>
|
#include <asm/neon.h>
|
||||||
#include <asm/simd.h>
|
#include <asm/simd.h>
|
||||||
#include <asm/unaligned.h>
|
#include <asm/unaligned.h>
|
||||||
|
#include <crypto/b128ops.h>
|
||||||
#include <crypto/cryptd.h>
|
#include <crypto/cryptd.h>
|
||||||
#include <crypto/internal/hash.h>
|
#include <crypto/internal/hash.h>
|
||||||
#include <crypto/internal/simd.h>
|
#include <crypto/internal/simd.h>
|
||||||
|
@ -30,6 +31,8 @@ struct ghash_key {
|
||||||
u64 h2[2];
|
u64 h2[2];
|
||||||
u64 h3[2];
|
u64 h3[2];
|
||||||
u64 h4[2];
|
u64 h4[2];
|
||||||
|
|
||||||
|
be128 k;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct ghash_desc_ctx {
|
struct ghash_desc_ctx {
|
||||||
|
@ -62,6 +65,36 @@ static int ghash_init(struct shash_desc *desc)
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void ghash_do_update(int blocks, u64 dg[], const char *src,
|
||||||
|
struct ghash_key *key, const char *head)
|
||||||
|
{
|
||||||
|
if (likely(crypto_simd_usable())) {
|
||||||
|
kernel_neon_begin();
|
||||||
|
pmull_ghash_update(blocks, dg, src, key, head);
|
||||||
|
kernel_neon_end();
|
||||||
|
} else {
|
||||||
|
be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) };
|
||||||
|
|
||||||
|
do {
|
||||||
|
const u8 *in = src;
|
||||||
|
|
||||||
|
if (head) {
|
||||||
|
in = head;
|
||||||
|
blocks++;
|
||||||
|
head = NULL;
|
||||||
|
} else {
|
||||||
|
src += GHASH_BLOCK_SIZE;
|
||||||
|
}
|
||||||
|
|
||||||
|
crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE);
|
||||||
|
gf128mul_lle(&dst, &key->k);
|
||||||
|
} while (--blocks);
|
||||||
|
|
||||||
|
dg[0] = be64_to_cpu(dst.b);
|
||||||
|
dg[1] = be64_to_cpu(dst.a);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static int ghash_update(struct shash_desc *desc, const u8 *src,
|
static int ghash_update(struct shash_desc *desc, const u8 *src,
|
||||||
unsigned int len)
|
unsigned int len)
|
||||||
{
|
{
|
||||||
|
@ -85,10 +118,8 @@ static int ghash_update(struct shash_desc *desc, const u8 *src,
|
||||||
blocks = len / GHASH_BLOCK_SIZE;
|
blocks = len / GHASH_BLOCK_SIZE;
|
||||||
len %= GHASH_BLOCK_SIZE;
|
len %= GHASH_BLOCK_SIZE;
|
||||||
|
|
||||||
kernel_neon_begin();
|
ghash_do_update(blocks, ctx->digest, src, key,
|
||||||
pmull_ghash_update(blocks, ctx->digest, src, key,
|
partial ? ctx->buf : NULL);
|
||||||
partial ? ctx->buf : NULL);
|
|
||||||
kernel_neon_end();
|
|
||||||
src += blocks * GHASH_BLOCK_SIZE;
|
src += blocks * GHASH_BLOCK_SIZE;
|
||||||
partial = 0;
|
partial = 0;
|
||||||
}
|
}
|
||||||
|
@ -106,9 +137,7 @@ static int ghash_final(struct shash_desc *desc, u8 *dst)
|
||||||
struct ghash_key *key = crypto_shash_ctx(desc->tfm);
|
struct ghash_key *key = crypto_shash_ctx(desc->tfm);
|
||||||
|
|
||||||
memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
|
memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
|
||||||
kernel_neon_begin();
|
ghash_do_update(1, ctx->digest, ctx->buf, key, NULL);
|
||||||
pmull_ghash_update(1, ctx->digest, ctx->buf, key, NULL);
|
|
||||||
kernel_neon_end();
|
|
||||||
}
|
}
|
||||||
put_unaligned_be64(ctx->digest[1], dst);
|
put_unaligned_be64(ctx->digest[1], dst);
|
||||||
put_unaligned_be64(ctx->digest[0], dst + 8);
|
put_unaligned_be64(ctx->digest[0], dst + 8);
|
||||||
|
@ -132,24 +161,25 @@ static int ghash_setkey(struct crypto_shash *tfm,
|
||||||
const u8 *inkey, unsigned int keylen)
|
const u8 *inkey, unsigned int keylen)
|
||||||
{
|
{
|
||||||
struct ghash_key *key = crypto_shash_ctx(tfm);
|
struct ghash_key *key = crypto_shash_ctx(tfm);
|
||||||
be128 h, k;
|
be128 h;
|
||||||
|
|
||||||
if (keylen != GHASH_BLOCK_SIZE) {
|
if (keylen != GHASH_BLOCK_SIZE) {
|
||||||
crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
||||||
return -EINVAL;
|
return -EINVAL;
|
||||||
}
|
}
|
||||||
|
|
||||||
memcpy(&k, inkey, GHASH_BLOCK_SIZE);
|
/* needed for the fallback */
|
||||||
ghash_reflect(key->h, &k);
|
memcpy(&key->k, inkey, GHASH_BLOCK_SIZE);
|
||||||
|
ghash_reflect(key->h, &key->k);
|
||||||
|
|
||||||
h = k;
|
h = key->k;
|
||||||
gf128mul_lle(&h, &k);
|
gf128mul_lle(&h, &key->k);
|
||||||
ghash_reflect(key->h2, &h);
|
ghash_reflect(key->h2, &h);
|
||||||
|
|
||||||
gf128mul_lle(&h, &k);
|
gf128mul_lle(&h, &key->k);
|
||||||
ghash_reflect(key->h3, &h);
|
ghash_reflect(key->h3, &h);
|
||||||
|
|
||||||
gf128mul_lle(&h, &k);
|
gf128mul_lle(&h, &key->k);
|
||||||
ghash_reflect(key->h4, &h);
|
ghash_reflect(key->h4, &h);
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -162,15 +192,13 @@ static struct shash_alg ghash_alg = {
|
||||||
.final = ghash_final,
|
.final = ghash_final,
|
||||||
.setkey = ghash_setkey,
|
.setkey = ghash_setkey,
|
||||||
.descsize = sizeof(struct ghash_desc_ctx),
|
.descsize = sizeof(struct ghash_desc_ctx),
|
||||||
.base = {
|
|
||||||
.cra_name = "__ghash",
|
.base.cra_name = "ghash",
|
||||||
.cra_driver_name = "__driver-ghash-ce",
|
.base.cra_driver_name = "ghash-ce-sync",
|
||||||
.cra_priority = 0,
|
.base.cra_priority = 300 - 1,
|
||||||
.cra_flags = CRYPTO_ALG_INTERNAL,
|
.base.cra_blocksize = GHASH_BLOCK_SIZE,
|
||||||
.cra_blocksize = GHASH_BLOCK_SIZE,
|
.base.cra_ctxsize = sizeof(struct ghash_key),
|
||||||
.cra_ctxsize = sizeof(struct ghash_key),
|
.base.cra_module = THIS_MODULE,
|
||||||
.cra_module = THIS_MODULE,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
static int ghash_async_init(struct ahash_request *req)
|
static int ghash_async_init(struct ahash_request *req)
|
||||||
|
@ -285,9 +313,7 @@ static int ghash_async_init_tfm(struct crypto_tfm *tfm)
|
||||||
struct cryptd_ahash *cryptd_tfm;
|
struct cryptd_ahash *cryptd_tfm;
|
||||||
struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
|
struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||||
|
|
||||||
cryptd_tfm = cryptd_alloc_ahash("__driver-ghash-ce",
|
cryptd_tfm = cryptd_alloc_ahash("ghash-ce-sync", 0, 0);
|
||||||
CRYPTO_ALG_INTERNAL,
|
|
||||||
CRYPTO_ALG_INTERNAL);
|
|
||||||
if (IS_ERR(cryptd_tfm))
|
if (IS_ERR(cryptd_tfm))
|
||||||
return PTR_ERR(cryptd_tfm);
|
return PTR_ERR(cryptd_tfm);
|
||||||
ctx->cryptd_tfm = cryptd_tfm;
|
ctx->cryptd_tfm = cryptd_tfm;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue