[PATCH v3 3/8] crypto: hctr2 - Add HCTR2 support

Eric Biggers ebiggers at kernel.org
Tue Mar 22 00:00:45 PDT 2022


On Tue, Mar 15, 2022 at 11:00:30PM +0000, Nathan Huckleberry wrote:
> +struct hctr2_tfm_ctx {
> +	struct crypto_cipher *blockcipher;
> +	struct crypto_skcipher *xctr;
> +	struct crypto_shash *polyval;
> +	u8 L[BLOCKCIPHER_BLOCK_SIZE];
> +};

How about adding a comment at the end of the struct above that says that the
struct is followed by the two exported_length_digests?  (Or hashed_tweaklen,
which is the name suggested in my suggested helper functions below?)

> +
> +struct hctr2_request_ctx {
> +	u8 first_block[BLOCKCIPHER_BLOCK_SIZE];
> +	u8 xctr_iv[BLOCKCIPHER_BLOCK_SIZE];
> +	struct scatterlist *bulk_part_dst;
> +	struct scatterlist *bulk_part_src;
> +	struct scatterlist sg_src[2];
> +	struct scatterlist sg_dst[2];
> +	/* Sub-requests, must be last */
> +	union {
> +		struct shash_desc hash_desc;
> +		struct skcipher_request xctr_req;
> +	} u;
> +};

Likewise above for the hashed tweak.

Also how about adding inline functions or macros that return these new fields,
so that the arithmetic to find them doesn't have to be duplicated in the code?
The 'exported_length_digests' array local variables are a bit weird.  Maybe just
use some helper functions directly to get at the fields?

How about:

static inline u8 *hctr2_hashed_tweaklen(const struct hctr2_tfm_ctx *tctx,
                                        bool odd)
{
        u8 *p = (u8 *)tctx + sizeof(*tctx);

        if (odd) /* For messages not a multiple of block length */
                p += crypto_shash_statesize(tctx->polyval);
        return p;
}

static inline u8 *hctr2_hashed_tweak(const struct hctr2_tfm_ctx *tctx,
                                     struct hctr2_request_ctx *rctx)
{
        return (u8 *)rctx + tctx->hashed_tweak_offset;
}

> +static int hctr2_setkey(struct crypto_skcipher *tfm, const u8 *key,
> +			unsigned int keylen)
> +{
> +	struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
> +	u8 hbar[BLOCKCIPHER_BLOCK_SIZE];
> +	__le64 tweak_length_block[2];
> +	void *exported_length_digests[2];
> +	SHASH_DESC_ON_STACK(shash, tfm->polyval);
> +	int err;
> +
> +	exported_length_digests[0] = (u8 *)tctx + sizeof(*tctx);
> +	exported_length_digests[1] = (u8 *)tctx + sizeof(*tctx) +
> +				     crypto_shash_descsize(tctx->polyval);

The size needed by crypto_shash_export() is crypto_shash_statesize(), not
crypto_shash_descsize().  They happen to be the same with all polyval
implementations you've proposed, but it's not guaranteed for shash algorithms in
general.

> +	crypto_cipher_clear_flags(tctx->blockcipher, CRYPTO_TFM_REQ_MASK);
> +	crypto_cipher_set_flags(tctx->blockcipher,
> +				crypto_skcipher_get_flags(tfm) &
> +				CRYPTO_TFM_REQ_MASK);
> +	err = crypto_cipher_setkey(tctx->blockcipher, key, keylen);
> +	if (err)
> +		return err;
> +
> +	crypto_skcipher_clear_flags(tctx->xctr, CRYPTO_TFM_REQ_MASK);
> +	crypto_skcipher_set_flags(tctx->xctr,
> +				  crypto_skcipher_get_flags(tfm) &
> +				  CRYPTO_TFM_REQ_MASK);
> +	err = crypto_skcipher_setkey(tctx->xctr, key, keylen);
> +	if (err)
> +		return err;
> +
> +	memset(tctx->L, 0, sizeof(tctx->L));
> +	memset(hbar, 0, sizeof(hbar));
> +	tctx->L[0] = 0x01;
> +	crypto_cipher_encrypt_one(tctx->blockcipher, tctx->L, tctx->L);
> +	crypto_cipher_encrypt_one(tctx->blockcipher, hbar, hbar);
> +
> +	crypto_shash_clear_flags(tctx->polyval, CRYPTO_TFM_REQ_MASK);
> +	crypto_shash_set_flags(tctx->polyval, crypto_skcipher_get_flags(tfm) &
> +			       CRYPTO_TFM_REQ_MASK);
> +	err = crypto_shash_setkey(tctx->polyval, hbar, BLOCKCIPHER_BLOCK_SIZE);
> +	if (err)
> +		return err;
> +	memzero_explicit(hbar, sizeof(hbar));
> +
> +	shash->tfm = tctx->polyval;
> +	memset(tweak_length_block, 0, sizeof(tweak_length_block));
> +
> +	tweak_length_block[0] = cpu_to_le64(TWEAK_SIZE * 8 * 2 + 2);
> +	err = crypto_shash_init(shash);
> +	if (err)
> +		return err;
> +	err = crypto_shash_update(shash, (u8 *)tweak_length_block,
> +				  POLYVAL_BLOCK_SIZE);
> +	if (err)
> +		return err;
> +	err = crypto_shash_export(shash, exported_length_digests[0]);
> +	if (err)
> +		return err;
> +
> +	tweak_length_block[0] = cpu_to_le64(TWEAK_SIZE * 8 * 2 + 3);
> +	err = crypto_shash_init(shash);
> +	if (err)
> +		return err;
> +	err = crypto_shash_update(shash, (u8 *)tweak_length_block,
> +				  POLYVAL_BLOCK_SIZE);
> +	if (err)
> +		return err;
> +	return crypto_shash_export(shash, exported_length_digests[1]);
> +}

hctr2_setkey() is getting pretty long.  How about splitting the tweak length
pre-hashing into a helper function?

Also, a comment that explains why the tweak length is being pre-hashed, and why
it *can* be pre-hashed, would be helpful.  Note that it is only possible because
this implementation only supports one tweak length.

- Eric



More information about the linux-arm-kernel mailing list