]> git.itanic.dy.fi Git - linux-stable/commitdiff
crypto: crypto4xx - add aes-ctr support
authorChristian Lamparter <chunkeey@gmail.com>
Thu, 19 Apr 2018 16:41:54 +0000 (18:41 +0200)
committerHerbert Xu <herbert@gondor.apana.org.au>
Sat, 28 Apr 2018 08:09:43 +0000 (16:09 +0800)
This patch adds support for the aes-ctr skcipher.

name         : ctr(aes)
driver       : ctr-aes-ppc4xx
module       : crypto4xx
priority     : 300
refcnt       : 1
selftest     : passed
internal     : no
type         : skcipher
async        : yes
blocksize    : 16
min keysize  : 16
max keysize  : 32
ivsize       : 16
chunksize    : 16
walksize     : 16

The hardware uses only the last 32-bits as the counter while the
kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
the whole IV is a counter. To make this work, the driver will
fallback if the counter is going to overlow.

The aead's crypto4xx_setup_fallback() function is renamed to
crypto4xx_aead_setup_fallback.

Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
drivers/crypto/Kconfig
drivers/crypto/amcc/crypto4xx_alg.c
drivers/crypto/amcc/crypto4xx_core.c
drivers/crypto/amcc/crypto4xx_core.h

index 3dbc47528667b77cfa01ab5a4fb4c972d8b3ef38..1fa263adbcc6e741887888ddf707d98112246c05 100644 (file)
@@ -302,6 +302,7 @@ config CRYPTO_DEV_PPC4XX
        select CRYPTO_AEAD
        select CRYPTO_AES
        select CRYPTO_CCM
+       select CRYPTO_CTR
        select CRYPTO_GCM
        select CRYPTO_BLKCIPHER
        help
index 2dfeb71deca96674417d644e2d17830cd2d21308..8a352ddefd5231eebd3f809467fcecfcc486716a 100644 (file)
@@ -240,6 +240,85 @@ int crypto4xx_rfc3686_decrypt(struct skcipher_request *req)
                                  ctx->sa_out, ctx->sa_len, 0);
 }
 
+static int
+crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
+{
+       struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
+       struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
+       size_t iv_len = crypto_skcipher_ivsize(cipher);
+       unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4));
+       unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
+                       AES_BLOCK_SIZE;
+
+       /*
+        * The hardware uses only the last 32-bits as the counter while the
+        * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
+        * the whole IV is a counter.  So fallback if the counter is going to
+        * overlow.
+        */
+       if (counter + nblks < counter) {
+               struct skcipher_request *subreq = skcipher_request_ctx(req);
+               int ret;
+
+               skcipher_request_set_tfm(subreq, ctx->sw_cipher.cipher);
+               skcipher_request_set_callback(subreq, req->base.flags,
+                       NULL, NULL);
+               skcipher_request_set_crypt(subreq, req->src, req->dst,
+                       req->cryptlen, req->iv);
+               ret = encrypt ? crypto_skcipher_encrypt(subreq)
+                       : crypto_skcipher_decrypt(subreq);
+               skcipher_request_zero(subreq);
+               return ret;
+       }
+
+       return encrypt ? crypto4xx_encrypt_iv(req)
+                      : crypto4xx_decrypt_iv(req);
+}
+
+static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
+                                      struct crypto_skcipher *cipher,
+                                      const u8 *key,
+                                      unsigned int keylen)
+{
+       int rc;
+
+       crypto_skcipher_clear_flags(ctx->sw_cipher.cipher,
+                                   CRYPTO_TFM_REQ_MASK);
+       crypto_skcipher_set_flags(ctx->sw_cipher.cipher,
+               crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
+       rc = crypto_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
+       crypto_skcipher_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
+       crypto_skcipher_set_flags(cipher,
+               crypto_skcipher_get_flags(ctx->sw_cipher.cipher) &
+                       CRYPTO_TFM_RES_MASK);
+
+       return rc;
+}
+
+int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
+                            const u8 *key, unsigned int keylen)
+{
+       struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
+       int rc;
+
+       rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
+       if (rc)
+               return rc;
+
+       return crypto4xx_setkey_aes(cipher, key, keylen,
+               CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
+}
+
+int crypto4xx_encrypt_ctr(struct skcipher_request *req)
+{
+       return crypto4xx_ctr_crypt(req, true);
+}
+
+int crypto4xx_decrypt_ctr(struct skcipher_request *req)
+{
+       return crypto4xx_ctr_crypt(req, false);
+}
+
 static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
                                                bool is_ccm, bool decrypt)
 {
@@ -282,10 +361,10 @@ static int crypto4xx_aead_fallback(struct aead_request *req,
                            crypto_aead_encrypt(subreq);
 }
 
-static int crypto4xx_setup_fallback(struct crypto4xx_ctx *ctx,
-                                   struct crypto_aead *cipher,
-                                   const u8 *key,
-                                   unsigned int keylen)
+static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
+                                        struct crypto_aead *cipher,
+                                        const u8 *key,
+                                        unsigned int keylen)
 {
        int rc;
 
@@ -313,7 +392,7 @@ int crypto4xx_setkey_aes_ccm(struct crypto_aead *cipher, const u8 *key,
        struct dynamic_sa_ctl *sa;
        int rc = 0;
 
-       rc = crypto4xx_setup_fallback(ctx, cipher, key, keylen);
+       rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
        if (rc)
                return rc;
 
@@ -472,7 +551,7 @@ int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher,
                return -EINVAL;
        }
 
-       rc = crypto4xx_setup_fallback(ctx, cipher, key, keylen);
+       rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
        if (rc)
                return rc;
 
index 7d0629626d15f82dcd9a871b4564b7c06bc29783..73963928d91b54d81eb1feebd93b43cb79b746e9 100644 (file)
@@ -941,6 +941,19 @@ static int crypto4xx_sk_init(struct crypto_skcipher *sk)
        struct crypto4xx_alg *amcc_alg;
        struct crypto4xx_ctx *ctx =  crypto_skcipher_ctx(sk);
 
+       if (alg->base.cra_flags & CRYPTO_ALG_NEED_FALLBACK) {
+               ctx->sw_cipher.cipher =
+                       crypto_alloc_skcipher(alg->base.cra_name, 0,
+                                             CRYPTO_ALG_NEED_FALLBACK |
+                                             CRYPTO_ALG_ASYNC);
+               if (IS_ERR(ctx->sw_cipher.cipher))
+                       return PTR_ERR(ctx->sw_cipher.cipher);
+
+               crypto_skcipher_set_reqsize(sk,
+                       sizeof(struct skcipher_request) + 32 +
+                       crypto_skcipher_reqsize(ctx->sw_cipher.cipher));
+       }
+
        amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.cipher);
        crypto4xx_ctx_init(amcc_alg, ctx);
        return 0;
@@ -956,6 +969,8 @@ static void crypto4xx_sk_exit(struct crypto_skcipher *sk)
        struct crypto4xx_ctx *ctx =  crypto_skcipher_ctx(sk);
 
        crypto4xx_common_exit(ctx);
+       if (ctx->sw_cipher.cipher)
+               crypto_free_skcipher(ctx->sw_cipher.cipher);
 }
 
 static int crypto4xx_aead_init(struct crypto_aead *tfm)
@@ -1145,6 +1160,28 @@ static struct crypto4xx_alg_common crypto4xx_alg[] = {
                .init = crypto4xx_sk_init,
                .exit = crypto4xx_sk_exit,
        } },
+       { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
+               .base = {
+                       .cra_name = "ctr(aes)",
+                       .cra_driver_name = "ctr-aes-ppc4xx",
+                       .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
+                       .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
+                               CRYPTO_ALG_NEED_FALLBACK |
+                               CRYPTO_ALG_ASYNC |
+                               CRYPTO_ALG_KERN_DRIVER_ONLY,
+                       .cra_blocksize = AES_BLOCK_SIZE,
+                       .cra_ctxsize = sizeof(struct crypto4xx_ctx),
+                       .cra_module = THIS_MODULE,
+               },
+               .min_keysize = AES_MIN_KEY_SIZE,
+               .max_keysize = AES_MAX_KEY_SIZE,
+               .ivsize = AES_IV_SIZE,
+               .setkey = crypto4xx_setkey_aes_ctr,
+               .encrypt = crypto4xx_encrypt_ctr,
+               .decrypt = crypto4xx_decrypt_ctr,
+               .init = crypto4xx_sk_init,
+               .exit = crypto4xx_sk_exit,
+       } },
        { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
                .base = {
                        .cra_name = "rfc3686(ctr(aes))",
index 863cc558bc29423e04817507e32e647dd222bfaf..279f650c4f1cf9c9989c4b972e5018f8ff1b1822 100644 (file)
@@ -128,6 +128,7 @@ struct crypto4xx_ctx {
        __le32 iv_nonce;
        u32 sa_len;
        union {
+               struct crypto_skcipher *cipher;
                struct crypto_aead *aead;
        } sw_cipher;
 };
@@ -163,12 +164,16 @@ int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
                             const u8 *key, unsigned int keylen);
 int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
                             const u8 *key, unsigned int keylen);
+int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
+                            const u8 *key, unsigned int keylen);
 int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
                             const u8 *key, unsigned int keylen);
 int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
                             const u8 *key, unsigned int keylen);
 int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
                             const u8 *key, unsigned int keylen);
+int crypto4xx_encrypt_ctr(struct skcipher_request *req);
+int crypto4xx_decrypt_ctr(struct skcipher_request *req);
 int crypto4xx_encrypt_iv(struct skcipher_request *req);
 int crypto4xx_decrypt_iv(struct skcipher_request *req);
 int crypto4xx_encrypt_noiv(struct skcipher_request *req);