mirror of
https://github.com/coolsnowwolf/lede.git
synced 2025-04-16 04:13:31 +00:00
708 lines
30 KiB
Diff
708 lines
30 KiB
Diff
From e3a53a6d11b2c1770545a2820a58c117799bcb70 Mon Sep 17 00:00:00 2001
|
|
From: Ansuel Smith <ansuelsmth@gmail.com>
|
|
Date: Tue, 16 Jun 2020 18:12:34 +0200
|
|
Subject: [PATCH 3/3] Convert ablkcipher to skcipher
|
|
|
|
---
|
|
cryptoapi/v1.1/nss_cryptoapi.c | 149 +++++++++++--------------
|
|
cryptoapi/v1.1/nss_cryptoapi_ablk.c | 136 +++++++++++-----------
|
|
cryptoapi/v1.1/nss_cryptoapi_debugfs.c | 1 +
|
|
cryptoapi/v1.1/nss_cryptoapi_private.h | 16 +--
|
|
4 files changed, 145 insertions(+), 157 deletions(-)
|
|
|
|
diff --git a/cryptoapi/v1.1/nss_cryptoapi.c b/cryptoapi/v1.1/nss_cryptoapi.c
|
|
index a10590e..3a835dc 100644
|
|
--- a/cryptoapi/v1.1/nss_cryptoapi.c
|
|
+++ b/cryptoapi/v1.1/nss_cryptoapi.c
|
|
@@ -66,7 +66,7 @@ struct aead_alg cryptoapi_aead_algs[] = {
|
|
.cra_name = "echainiv(authenc(hmac(sha1),cbc(aes)))",
|
|
.cra_driver_name = "nss-hmac-sha1-cbc-aes",
|
|
.cra_priority = 10000,
|
|
- .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
.cra_blocksize = AES_BLOCK_SIZE,
|
|
.cra_ctxsize = sizeof(struct nss_cryptoapi_ctx),
|
|
.cra_alignmask = 0,
|
|
@@ -87,7 +87,7 @@ struct aead_alg cryptoapi_aead_algs[] = {
|
|
.cra_name = "seqiv(authenc(hmac(sha1),rfc3686(ctr(aes))))",
|
|
.cra_driver_name = "nss-hmac-sha1-rfc3686-ctr-aes",
|
|
.cra_priority = 10000,
|
|
- .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
.cra_blocksize = AES_BLOCK_SIZE,
|
|
.cra_ctxsize = sizeof(struct nss_cryptoapi_ctx),
|
|
.cra_alignmask = 0,
|
|
@@ -108,7 +108,7 @@ struct aead_alg cryptoapi_aead_algs[] = {
|
|
.cra_name = "echainiv(authenc(hmac(sha1),cbc(des3_ede)))",
|
|
.cra_driver_name = "nss-hmac-sha1-cbc-3des",
|
|
.cra_priority = 300,
|
|
- .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NOSUPP_SG,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
.cra_ctxsize = sizeof(struct nss_cryptoapi_ctx),
|
|
.cra_alignmask = 0,
|
|
@@ -129,7 +129,7 @@ struct aead_alg cryptoapi_aead_algs[] = {
|
|
.cra_name = "echainiv(authenc(hmac(sha256),cbc(aes)))",
|
|
.cra_driver_name = "nss-hmac-sha256-cbc-aes",
|
|
.cra_priority = 10000,
|
|
- .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
.cra_blocksize = AES_BLOCK_SIZE,
|
|
.cra_ctxsize = sizeof(struct nss_cryptoapi_ctx),
|
|
.cra_alignmask = 0,
|
|
@@ -150,7 +150,7 @@ struct aead_alg cryptoapi_aead_algs[] = {
|
|
.cra_name = "seqiv(authenc(hmac(sha256),rfc3686(ctr(aes))))",
|
|
.cra_driver_name = "nss-hmac-sha256-rfc3686-ctr-aes",
|
|
.cra_priority = 10000,
|
|
- .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
.cra_blocksize = AES_BLOCK_SIZE,
|
|
.cra_ctxsize = sizeof(struct nss_cryptoapi_ctx),
|
|
.cra_alignmask = 0,
|
|
@@ -171,7 +171,7 @@ struct aead_alg cryptoapi_aead_algs[] = {
|
|
.cra_name = "echainiv(authenc(hmac(sha256),cbc(des3_ede)))",
|
|
.cra_driver_name = "nss-hmac-sha256-cbc-3des",
|
|
.cra_priority = 300,
|
|
- .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NOSUPP_SG,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
.cra_ctxsize = sizeof(struct nss_cryptoapi_ctx),
|
|
.cra_alignmask = 0,
|
|
@@ -192,75 +192,66 @@ struct aead_alg cryptoapi_aead_algs[] = {
|
|
/*
|
|
* ABLK cipher algorithms
|
|
*/
|
|
-static struct crypto_alg cryptoapi_ablkcipher_algs[] = {
|
|
+static struct skcipher_alg cryptoapi_skcipher_algs[] = {
|
|
{
|
|
- .cra_name = "cbc(aes)",
|
|
- .cra_driver_name = "nss-cbc-aes",
|
|
- .cra_priority = 10000,
|
|
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
|
|
- .cra_blocksize = AES_BLOCK_SIZE,
|
|
- .cra_ctxsize = sizeof(struct nss_cryptoapi_ctx),
|
|
- .cra_alignmask = 0,
|
|
- .cra_type = &crypto_ablkcipher_type,
|
|
- .cra_module = THIS_MODULE,
|
|
- .cra_init = nss_cryptoapi_ablkcipher_init,
|
|
- .cra_exit = nss_cryptoapi_ablkcipher_exit,
|
|
- .cra_u = {
|
|
- .ablkcipher = {
|
|
- .ivsize = AES_BLOCK_SIZE,
|
|
- .min_keysize = AES_MIN_KEY_SIZE,
|
|
- .max_keysize = AES_MAX_KEY_SIZE,
|
|
- .setkey = nss_cryptoapi_ablk_aes_setkey,
|
|
- .encrypt = nss_cryptoapi_ablk_aes_encrypt,
|
|
- .decrypt = nss_cryptoapi_ablk_aes_decrypt,
|
|
- },
|
|
+ .base = {
|
|
+ .cra_name = "cbc(aes)",
|
|
+ .cra_driver_name = "nss-cbc-aes",
|
|
+ .cra_priority = 10000,
|
|
+ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = AES_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct nss_cryptoapi_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_module = THIS_MODULE,
|
|
},
|
|
+ .init = nss_cryptoapi_skcipher_init,
|
|
+ .exit = nss_cryptoapi_skcipher_exit,
|
|
+ .ivsize = AES_BLOCK_SIZE,
|
|
+ .min_keysize = AES_MIN_KEY_SIZE,
|
|
+ .max_keysize = AES_MAX_KEY_SIZE,
|
|
+ .setkey = nss_cryptoapi_ablk_aes_setkey,
|
|
+ .encrypt = nss_cryptoapi_ablk_aes_encrypt,
|
|
+ .decrypt = nss_cryptoapi_ablk_aes_decrypt,
|
|
},
|
|
{
|
|
- .cra_name = "rfc3686(ctr(aes))",
|
|
- .cra_driver_name = "nss-rfc3686-ctr-aes",
|
|
- .cra_priority = 30000,
|
|
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
|
|
- .cra_blocksize = AES_BLOCK_SIZE,
|
|
- .cra_ctxsize = sizeof(struct nss_cryptoapi_ctx),
|
|
- .cra_alignmask = 0,
|
|
- .cra_type = &crypto_ablkcipher_type,
|
|
- .cra_module = THIS_MODULE,
|
|
- .cra_init = nss_cryptoapi_ablkcipher_init,
|
|
- .cra_exit = nss_cryptoapi_ablkcipher_exit,
|
|
- .cra_u = {
|
|
- .ablkcipher = {
|
|
- .ivsize = CTR_RFC3686_IV_SIZE,
|
|
- .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
|
|
- .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
|
|
- .setkey = nss_cryptoapi_ablk_aes_setkey,
|
|
- .encrypt = nss_cryptoapi_ablk_aes_encrypt,
|
|
- .decrypt = nss_cryptoapi_ablk_aes_decrypt,
|
|
- },
|
|
+ .base = {
|
|
+ .cra_name = "rfc3686(ctr(aes))",
|
|
+ .cra_driver_name = "nss-rfc3686-ctr-aes",
|
|
+ .cra_priority = 30000,
|
|
+ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = AES_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct nss_cryptoapi_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_module = THIS_MODULE,
|
|
},
|
|
+ .init = nss_cryptoapi_skcipher_init,
|
|
+ .exit = nss_cryptoapi_skcipher_exit,
|
|
+ .ivsize = CTR_RFC3686_IV_SIZE,
|
|
+ .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
|
|
+ .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
|
|
+ .setkey = nss_cryptoapi_ablk_aes_setkey,
|
|
+ .encrypt = nss_cryptoapi_ablk_aes_encrypt,
|
|
+ .decrypt = nss_cryptoapi_ablk_aes_decrypt,
|
|
},
|
|
{
|
|
- .cra_name = "cbc(des3_ede)",
|
|
- .cra_driver_name = "nss-cbc-3des",
|
|
- .cra_priority = 1000,
|
|
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_ASYNC,
|
|
- .cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
- .cra_ctxsize = sizeof(struct nss_cryptoapi_ctx),
|
|
- .cra_alignmask = 0,
|
|
- .cra_type = &crypto_ablkcipher_type,
|
|
- .cra_module = THIS_MODULE,
|
|
- .cra_init = nss_cryptoapi_ablkcipher_init,
|
|
- .cra_exit = nss_cryptoapi_ablkcipher_exit,
|
|
- .cra_u = {
|
|
- .ablkcipher = {
|
|
- .ivsize = DES3_EDE_BLOCK_SIZE,
|
|
- .min_keysize = DES3_EDE_KEY_SIZE,
|
|
- .max_keysize = DES3_EDE_KEY_SIZE,
|
|
- .setkey = nss_cryptoapi_3des_cbc_setkey,
|
|
- .encrypt = nss_cryptoapi_3des_cbc_encrypt,
|
|
- .decrypt = nss_cryptoapi_3des_cbc_decrypt,
|
|
- },
|
|
+ .base = {
|
|
+ .cra_name = "cbc(des3_ede)",
|
|
+ .cra_driver_name = "nss-cbc-3des",
|
|
+ .cra_priority = 1000,
|
|
+ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_NOSUPP_SG | CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct nss_cryptoapi_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_module = THIS_MODULE,
|
|
},
|
|
+ .init = nss_cryptoapi_skcipher_init,
|
|
+ .exit = nss_cryptoapi_skcipher_exit,
|
|
+ .ivsize = DES3_EDE_BLOCK_SIZE,
|
|
+ .min_keysize = DES3_EDE_KEY_SIZE,
|
|
+ .max_keysize = DES3_EDE_KEY_SIZE,
|
|
+ .setkey = nss_cryptoapi_3des_cbc_setkey,
|
|
+ .encrypt = nss_cryptoapi_3des_cbc_encrypt,
|
|
+ .decrypt = nss_cryptoapi_3des_cbc_decrypt,
|
|
},
|
|
};
|
|
|
|
@@ -277,14 +268,14 @@ static nss_crypto_user_ctx_t nss_cryptoapi_register(nss_crypto_handle_t crypto)
|
|
|
|
sc->crypto = crypto;
|
|
|
|
- for (i = 0; i < ARRAY_SIZE(cryptoapi_ablkcipher_algs); i++) {
|
|
- rc = crypto_register_alg(&cryptoapi_ablkcipher_algs[i]);
|
|
+ for (i = 0; i < ARRAY_SIZE(cryptoapi_skcipher_algs); i++) {
|
|
+ rc = crypto_register_skcipher(&cryptoapi_skcipher_algs[i]);
|
|
if (rc) {
|
|
- nss_cfi_trace("Ablk registration failed, algo: %s\n", cryptoapi_ablkcipher_algs[i].cra_name);
|
|
- cryptoapi_ablkcipher_algs[i].cra_flags = 0;
|
|
+ nss_cfi_trace("Ablk registration failed, algo: %s\n", cryptoapi_skcipher_algs[i].base.cra_name);
|
|
+ cryptoapi_skcipher_algs[i].base.cra_flags = 0;
|
|
continue;
|
|
}
|
|
- nss_cfi_info("Ablk registration succeeded, algo: %s\n", cryptoapi_ablkcipher_algs[i].cra_name);
|
|
+ nss_cfi_info("Ablk registration succeeded, algo: %s\n", cryptoapi_skcipher_algs[i].base.cra_name);
|
|
}
|
|
|
|
for (i = 0; i < ARRAY_SIZE(cryptoapi_aead_algs); i++) {
|
|
@@ -317,7 +308,7 @@ static nss_crypto_user_ctx_t nss_cryptoapi_register(nss_crypto_handle_t crypto)
|
|
static void nss_cryptoapi_unregister(nss_crypto_user_ctx_t cfi)
|
|
{
|
|
struct nss_cryptoapi *sc = &gbl_ctx;
|
|
- int i, ret = 0;
|
|
+ int i;
|
|
|
|
nss_cfi_info("unregister nss_cryptoapi\n");
|
|
|
|
@@ -326,16 +317,12 @@ static void nss_cryptoapi_unregister(nss_crypto_user_ctx_t cfi)
|
|
*/
|
|
atomic_set(&gbl_ctx.registered, 0);
|
|
|
|
- for (i = 0; i < ARRAY_SIZE(cryptoapi_ablkcipher_algs); i++) {
|
|
- if (!cryptoapi_ablkcipher_algs[i].cra_flags) {
|
|
- continue;
|
|
- }
|
|
- ret = crypto_unregister_alg(&cryptoapi_ablkcipher_algs[i]);
|
|
- if (ret) {
|
|
- nss_cfi_err("Ablk unregister failed, algo: %s\n", cryptoapi_ablkcipher_algs[i].cra_name);
|
|
+ for (i = 0; i < ARRAY_SIZE(cryptoapi_skcipher_algs); i++) {
|
|
+ if (!cryptoapi_skcipher_algs[i].base.cra_flags) {
|
|
continue;
|
|
}
|
|
- nss_cfi_info("Ablk unregister succeeded, algo: %s\n", cryptoapi_ablkcipher_algs[i].cra_name);
|
|
+ crypto_unregister_skcipher(&cryptoapi_skcipher_algs[i]);
|
|
+ nss_cfi_info("Ablk unregister succeeded, algo: %s\n", cryptoapi_skcipher_algs[i].base.cra_name);
|
|
}
|
|
|
|
for (i = 0; i < ARRAY_SIZE(cryptoapi_aead_algs); i++) {
|
|
diff --git a/cryptoapi/v1.1/nss_cryptoapi_ablk.c b/cryptoapi/v1.1/nss_cryptoapi_ablk.c
|
|
index 9b6c65e..913e9cc 100644
|
|
--- a/cryptoapi/v1.1/nss_cryptoapi_ablk.c
|
|
+++ b/cryptoapi/v1.1/nss_cryptoapi_ablk.c
|
|
@@ -102,12 +102,12 @@ int nss_cryptoapi_skcipher_ctx2session(struct crypto_skcipher *sk, uint32_t *sid
|
|
EXPORT_SYMBOL(nss_cryptoapi_skcipher_ctx2session);
|
|
|
|
/*
|
|
- * nss_cryptoapi_ablkcipher_init()
|
|
- * Cryptoapi ablkcipher init function.
|
|
+ * nss_cryptoapi_skcipher_init()
|
|
+ * Cryptoapi skcipher init function.
|
|
*/
|
|
-int nss_cryptoapi_ablkcipher_init(struct crypto_tfm *tfm)
|
|
+int nss_cryptoapi_skcipher_init(struct crypto_skcipher *tfm)
|
|
{
|
|
- struct nss_cryptoapi_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+ struct nss_cryptoapi_ctx *ctx = crypto_skcipher_ctx(tfm);
|
|
struct crypto_cipher *sw_tfm;
|
|
|
|
nss_cfi_assert(ctx);
|
|
@@ -122,31 +122,31 @@ int nss_cryptoapi_ablkcipher_init(struct crypto_tfm *tfm)
|
|
|
|
nss_cryptoapi_set_magic(ctx);
|
|
|
|
- if (!(crypto_tfm_alg_type(tfm) & CRYPTO_ALG_NEED_FALLBACK))
|
|
+ if (!(crypto_tfm_alg_type(&tfm->base) & CRYPTO_ALG_NEED_FALLBACK))
|
|
return 0;
|
|
|
|
/* Alloc fallback transform for future use */
|
|
- sw_tfm = crypto_alloc_cipher(crypto_tfm_alg_name(tfm), 0, CRYPTO_ALG_ASYNC |
|
|
+ sw_tfm = crypto_alloc_cipher(crypto_tfm_alg_name(&tfm->base), 0, CRYPTO_ALG_ASYNC |
|
|
CRYPTO_ALG_NEED_FALLBACK);
|
|
if (IS_ERR(sw_tfm)) {
|
|
- nss_cfi_err("unable to alloc software crypto for %s\n", crypto_tfm_alg_name(tfm));
|
|
+ nss_cfi_err("unable to alloc software crypto for %s\n", crypto_tfm_alg_name(&tfm->base));
|
|
return -EINVAL;
|
|
}
|
|
|
|
/* set this tfm reqsize same to fallback tfm */
|
|
- tfm->crt_ablkcipher.reqsize = sizeof(struct nss_cryptoapi_ctx);
|
|
+ crypto_skcipher_set_reqsize(tfm, sizeof(struct nss_cryptoapi_ctx));
|
|
ctx->sw_tfm = crypto_cipher_tfm(sw_tfm);
|
|
|
|
return 0;
|
|
}
|
|
|
|
/*
|
|
- * nss_cryptoapi_ablkcipher_exit()
|
|
- * Cryptoapi ablkcipher exit function.
|
|
+ * nss_cryptoapi_skcipher_exit()
|
|
+ * Cryptoapi skcipher exit function.
|
|
*/
|
|
-void nss_cryptoapi_ablkcipher_exit(struct crypto_tfm *tfm)
|
|
+void nss_cryptoapi_skcipher_exit(struct crypto_skcipher *tfm)
|
|
{
|
|
- struct nss_cryptoapi_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+ struct nss_cryptoapi_ctx *ctx = crypto_skcipher_ctx(tfm);
|
|
struct nss_cryptoapi *sc = &gbl_ctx;
|
|
nss_crypto_status_t status;
|
|
|
|
@@ -158,7 +158,7 @@ void nss_cryptoapi_ablkcipher_exit(struct crypto_tfm *tfm)
|
|
}
|
|
|
|
if (ctx->sw_tfm) {
|
|
- crypto_free_ablkcipher(__crypto_ablkcipher_cast(ctx->sw_tfm));
|
|
+ crypto_free_skcipher(__crypto_skcipher_cast(ctx->sw_tfm));
|
|
ctx->sw_tfm = NULL;
|
|
}
|
|
|
|
@@ -183,9 +183,9 @@ void nss_cryptoapi_ablkcipher_exit(struct crypto_tfm *tfm)
|
|
* nss_cryptoapi_ablk_aes_setkey()
|
|
* Cryptoapi setkey routine for aes.
|
|
*/
|
|
-int nss_cryptoapi_ablk_aes_setkey(struct crypto_ablkcipher *cipher, const u8 *key, unsigned int keylen)
|
|
+int nss_cryptoapi_ablk_aes_setkey(struct crypto_skcipher *cipher, const u8 *key, unsigned int keylen)
|
|
{
|
|
- struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
|
|
+ struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
|
|
struct nss_cryptoapi_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
struct nss_cryptoapi *sc = &gbl_ctx;
|
|
struct nss_crypto_key cip;
|
|
@@ -255,10 +255,10 @@ int nss_cryptoapi_ablk_aes_setkey(struct crypto_ablkcipher *cipher, const u8 *ke
|
|
|
|
/* set flag to fallback tfm */
|
|
crypto_tfm_clear_flags(ctx->sw_tfm, CRYPTO_TFM_REQ_MASK);
|
|
- crypto_tfm_set_flags(ctx->sw_tfm, crypto_ablkcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
|
|
+ crypto_tfm_set_flags(ctx->sw_tfm, crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
|
|
|
|
/* Set key to the fallback tfm */
|
|
- ret = crypto_ablkcipher_setkey(__crypto_ablkcipher_cast(ctx->sw_tfm), key, keylen);
|
|
+ ret = crypto_skcipher_setkey(__crypto_skcipher_cast(ctx->sw_tfm), key, keylen);
|
|
if (ret) {
|
|
nss_cfi_err("Failed to set key to the sw crypto");
|
|
|
|
@@ -266,7 +266,7 @@ int nss_cryptoapi_ablk_aes_setkey(struct crypto_ablkcipher *cipher, const u8 *ke
|
|
* Set back the fallback tfm flag to the original flag one after
|
|
* doing setkey
|
|
*/
|
|
- crypto_ablkcipher_set_flags(cipher, crypto_tfm_get_flags(ctx->sw_tfm));
|
|
+ crypto_skcipher_set_flags(cipher, crypto_tfm_get_flags(ctx->sw_tfm));
|
|
}
|
|
return ret;
|
|
default:
|
|
@@ -289,23 +289,23 @@ int nss_cryptoapi_ablk_aes_setkey(struct crypto_ablkcipher *cipher, const u8 *ke
|
|
return 0;
|
|
|
|
fail:
|
|
- crypto_ablkcipher_set_flags(cipher, flag);
|
|
+ crypto_skcipher_set_flags(cipher, flag);
|
|
return -EINVAL;
|
|
}
|
|
|
|
/*
|
|
- * nss_cryptoapi_ablkcipher_done()
|
|
+ * nss_cryptoapi_skcipher_done()
|
|
* Cipher operation completion callback function
|
|
*/
|
|
-void nss_cryptoapi_ablkcipher_done(struct nss_crypto_buf *buf)
|
|
+void nss_cryptoapi_skcipher_done(struct nss_crypto_buf *buf)
|
|
{
|
|
struct nss_cryptoapi_ctx *ctx;
|
|
- struct ablkcipher_request *req;
|
|
+ struct skcipher_request *req;
|
|
int err = 0;
|
|
|
|
nss_cfi_assert(buf);
|
|
|
|
- req = (struct ablkcipher_request *)nss_crypto_get_cb_ctx(buf);
|
|
+ req = (struct skcipher_request *)nss_crypto_get_cb_ctx(buf);
|
|
|
|
/*
|
|
* check cryptoapi context magic number.
|
|
@@ -319,7 +319,7 @@ void nss_cryptoapi_ablkcipher_done(struct nss_crypto_buf *buf)
|
|
nss_crypto_buf_free(gbl_ctx.crypto, buf);
|
|
|
|
nss_cfi_dbg("after transformation\n");
|
|
- nss_cfi_dbg_data(sg_virt(req->dst), req->nbytes, ' ');
|
|
+ nss_cfi_dbg_data(sg_virt(req->dst), req->cryptlen, ' ');
|
|
|
|
/*
|
|
* Passing always pass in case of encrypt.
|
|
@@ -337,7 +337,7 @@ void nss_cryptoapi_ablkcipher_done(struct nss_crypto_buf *buf)
|
|
* Cryptoapi: obtain sg to virtual address mapping.
|
|
* Check for multiple sg in src and dst
|
|
*/
|
|
-int nss_cryptoapi_ablk_checkaddr(struct ablkcipher_request *req)
|
|
+int nss_cryptoapi_ablk_checkaddr(struct skcipher_request *req)
|
|
{
|
|
/*
|
|
* Currently only single sg is supported
|
|
@@ -356,7 +356,7 @@ int nss_cryptoapi_ablk_checkaddr(struct ablkcipher_request *req)
|
|
/*
|
|
* If the size of data is more than 65K reject transformation
|
|
*/
|
|
- if (req->nbytes > NSS_CRYPTOAPI_MAX_DATA_LEN) {
|
|
+ if (req->cryptlen > NSS_CRYPTOAPI_MAX_DATA_LEN) {
|
|
nss_cfi_err("Buffer length exceeded limit\n");
|
|
return -EINVAL;
|
|
}
|
|
@@ -368,10 +368,10 @@ int nss_cryptoapi_ablk_checkaddr(struct ablkcipher_request *req)
|
|
* nss_cryptoapi_ablk_transform()
|
|
* Crytoapi common routine for encryption and decryption operations.
|
|
*/
|
|
-struct nss_crypto_buf *nss_cryptoapi_ablk_transform(struct ablkcipher_request *req, struct nss_cryptoapi_ablk_info *info)
|
|
+struct nss_crypto_buf *nss_cryptoapi_ablk_transform(struct skcipher_request *req, struct nss_cryptoapi_ablk_info *info)
|
|
{
|
|
- struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(req);
|
|
- struct nss_cryptoapi_ctx *ctx = crypto_ablkcipher_ctx(cipher);
|
|
+ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
|
|
+ struct nss_cryptoapi_ctx *ctx = crypto_skcipher_ctx(cipher);
|
|
struct nss_crypto_buf *buf;
|
|
struct nss_cryptoapi *sc = &gbl_ctx;
|
|
nss_crypto_status_t status;
|
|
@@ -382,7 +382,7 @@ struct nss_crypto_buf *nss_cryptoapi_ablk_transform(struct ablkcipher_request *r
|
|
nss_cfi_assert(ctx);
|
|
|
|
nss_cfi_dbg("src_vaddr: 0x%p, dst_vaddr: 0x%p, iv: 0x%p\n",
|
|
- sg_virt(req->src), sg_virt(req->dst), req->info);
|
|
+ sg_virt(req->src), sg_virt(req->dst), req->iv);
|
|
|
|
info->params->cipher_skip = 0;
|
|
info->params->auth_skip = 0;
|
|
@@ -419,19 +419,19 @@ struct nss_crypto_buf *nss_cryptoapi_ablk_transform(struct ablkcipher_request *r
|
|
/*
|
|
* Get IV location and memcpy the IV
|
|
*/
|
|
- iv_size = crypto_ablkcipher_ivsize(cipher);
|
|
+ iv_size = crypto_skcipher_ivsize(cipher);
|
|
iv_addr = nss_crypto_get_ivaddr(buf);
|
|
|
|
switch (ctx->cip_alg) {
|
|
case NSS_CRYPTO_CIPHER_AES_CBC:
|
|
case NSS_CRYPTO_CIPHER_DES:
|
|
- memcpy(iv_addr, req->info, iv_size);
|
|
+ memcpy(iv_addr, req->iv, iv_size);
|
|
break;
|
|
|
|
case NSS_CRYPTO_CIPHER_AES_CTR:
|
|
((uint32_t *)iv_addr)[0] = ctx->ctx_iv[0];
|
|
- ((uint32_t *)iv_addr)[1] = ((uint32_t *)req->info)[0];
|
|
- ((uint32_t *)iv_addr)[2] = ((uint32_t *)req->info)[1];
|
|
+ ((uint32_t *)iv_addr)[1] = ((uint32_t *)req->iv)[0];
|
|
+ ((uint32_t *)iv_addr)[2] = ((uint32_t *)req->iv)[1];
|
|
((uint32_t *)iv_addr)[3] = ctx->ctx_iv[3];
|
|
break;
|
|
|
|
@@ -446,7 +446,7 @@ struct nss_crypto_buf *nss_cryptoapi_ablk_transform(struct ablkcipher_request *r
|
|
/*
|
|
* Fill Cipher and Auth len
|
|
*/
|
|
- cipher_len = req->nbytes;
|
|
+ cipher_len = req->cryptlen;
|
|
auth_len = 0;
|
|
|
|
nss_crypto_set_data(buf, sg_virt(req->src), sg_virt(req->dst), cipher_len);
|
|
@@ -463,12 +463,12 @@ struct nss_crypto_buf *nss_cryptoapi_ablk_transform(struct ablkcipher_request *r
|
|
}
|
|
|
|
/*
|
|
- * nss_cryptoapi_ablkcipher_fallback()
|
|
- * Cryptoapi fallback for ablkcipher algorithm.
|
|
+ * nss_cryptoapi_skcipher_fallback()
|
|
+ * Cryptoapi fallback for skcipher algorithm.
|
|
*/
|
|
-int nss_cryptoapi_ablkcipher_fallback(struct nss_cryptoapi_ctx *ctx, struct ablkcipher_request *req, int type)
|
|
+int nss_cryptoapi_skcipher_fallback(struct nss_cryptoapi_ctx *ctx, struct skcipher_request *req, int type)
|
|
{
|
|
- struct crypto_ablkcipher *orig_tfm = crypto_ablkcipher_reqtfm(req);
|
|
+ struct crypto_skcipher *orig_tfm = crypto_skcipher_reqtfm(req);
|
|
int err;
|
|
|
|
if (!ctx->sw_tfm) {
|
|
@@ -476,16 +476,16 @@ int nss_cryptoapi_ablkcipher_fallback(struct nss_cryptoapi_ctx *ctx, struct ablk
|
|
}
|
|
|
|
/* Set new fallback tfm to the request */
|
|
- ablkcipher_request_set_tfm(req, __crypto_ablkcipher_cast(ctx->sw_tfm));
|
|
+ skcipher_request_set_tfm(req, __crypto_skcipher_cast(ctx->sw_tfm));
|
|
|
|
ctx->queued++;
|
|
|
|
switch (type) {
|
|
case NSS_CRYPTOAPI_ENCRYPT:
|
|
- err = crypto_ablkcipher_encrypt(req);
|
|
+ err = crypto_skcipher_encrypt(req);
|
|
break;
|
|
case NSS_CRYPTOAPI_DECRYPT:
|
|
- err = crypto_ablkcipher_decrypt(req);
|
|
+ err = crypto_skcipher_decrypt(req);
|
|
break;
|
|
default:
|
|
err = -EINVAL;
|
|
@@ -495,7 +495,7 @@ int nss_cryptoapi_ablkcipher_fallback(struct nss_cryptoapi_ctx *ctx, struct ablk
|
|
ctx->completed++;
|
|
|
|
/* Set original tfm to the request */
|
|
- ablkcipher_request_set_tfm(req, orig_tfm);
|
|
+ skcipher_request_set_tfm(req, orig_tfm);
|
|
|
|
return err;
|
|
}
|
|
@@ -504,13 +504,13 @@ int nss_cryptoapi_ablkcipher_fallback(struct nss_cryptoapi_ctx *ctx, struct ablk
|
|
* nss_cryptoapi_ablk_aes_encrypt()
|
|
* Crytoapi encrypt for aes(aes-cbc/rfc3686-aes-ctr) algorithms.
|
|
*/
|
|
-int nss_cryptoapi_ablk_aes_encrypt(struct ablkcipher_request *req)
|
|
+int nss_cryptoapi_ablk_aes_encrypt(struct skcipher_request *req)
|
|
{
|
|
struct nss_crypto_params params = { .req_type = NSS_CRYPTO_REQ_TYPE_ENCRYPT };
|
|
- struct nss_cryptoapi_ablk_info info = {.cb_fn = nss_cryptoapi_ablkcipher_done,
|
|
+ struct nss_cryptoapi_ablk_info info = {.cb_fn = nss_cryptoapi_skcipher_done,
|
|
.params = ¶ms};
|
|
- struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(req);
|
|
- struct nss_cryptoapi_ctx *ctx = crypto_ablkcipher_ctx(cipher);
|
|
+ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
|
|
+ struct nss_cryptoapi_ctx *ctx = crypto_skcipher_ctx(cipher);
|
|
struct nss_cryptoapi *sc = &gbl_ctx;
|
|
struct nss_crypto_buf *buf;
|
|
|
|
@@ -520,7 +520,7 @@ int nss_cryptoapi_ablk_aes_encrypt(struct ablkcipher_request *req)
|
|
nss_cryptoapi_verify_magic(ctx);
|
|
|
|
if (ctx->fallback_req)
|
|
- return nss_cryptoapi_ablkcipher_fallback(ctx, req, NSS_CRYPTOAPI_ENCRYPT);
|
|
+ return nss_cryptoapi_skcipher_fallback(ctx, req, NSS_CRYPTOAPI_ENCRYPT);
|
|
|
|
/*
|
|
* Check if previous call to setkey couldn't allocate session with core crypto.
|
|
@@ -539,9 +539,9 @@ int nss_cryptoapi_ablk_aes_encrypt(struct ablkcipher_request *req)
|
|
* According to RFC3686, AES-CTR algo need not be padded if the
|
|
* plaintext or ciphertext is unaligned to block size boundary.
|
|
*/
|
|
- if (nss_cryptoapi_check_unalign(req->nbytes, AES_BLOCK_SIZE) && (ctx->cip_alg != NSS_CRYPTO_CIPHER_AES_CTR)) {
|
|
+ if (nss_cryptoapi_check_unalign(req->cryptlen, AES_BLOCK_SIZE) && (ctx->cip_alg != NSS_CRYPTO_CIPHER_AES_CTR)) {
|
|
nss_cfi_err("Invalid cipher len - Not aligned to algo blocksize\n");
|
|
- crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
|
|
+ crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
|
|
return -EINVAL;
|
|
}
|
|
|
|
@@ -571,13 +571,13 @@ int nss_cryptoapi_ablk_aes_encrypt(struct ablkcipher_request *req)
|
|
* nss_cryptoapi_ablk_aes_decrypt()
|
|
* Crytoapi decrypt for aes(aes-cbc/rfc3686-aes-ctr) algorithms.
|
|
*/
|
|
-int nss_cryptoapi_ablk_aes_decrypt(struct ablkcipher_request *req)
|
|
+int nss_cryptoapi_ablk_aes_decrypt(struct skcipher_request *req)
|
|
{
|
|
struct nss_crypto_params params = { .req_type = NSS_CRYPTO_REQ_TYPE_DECRYPT };
|
|
- struct nss_cryptoapi_ablk_info info = {.cb_fn = nss_cryptoapi_ablkcipher_done,
|
|
+ struct nss_cryptoapi_ablk_info info = {.cb_fn = nss_cryptoapi_skcipher_done,
|
|
.params = ¶ms};
|
|
- struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(req);
|
|
- struct nss_cryptoapi_ctx *ctx = crypto_ablkcipher_ctx(cipher);
|
|
+ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
|
|
+ struct nss_cryptoapi_ctx *ctx = crypto_skcipher_ctx(cipher);
|
|
struct nss_cryptoapi *sc = &gbl_ctx;
|
|
struct nss_crypto_buf *buf;
|
|
|
|
@@ -587,7 +587,7 @@ int nss_cryptoapi_ablk_aes_decrypt(struct ablkcipher_request *req)
|
|
nss_cryptoapi_verify_magic(ctx);
|
|
|
|
if (ctx->fallback_req)
|
|
- return nss_cryptoapi_ablkcipher_fallback(ctx, req, NSS_CRYPTOAPI_DECRYPT);
|
|
+ return nss_cryptoapi_skcipher_fallback(ctx, req, NSS_CRYPTOAPI_DECRYPT);
|
|
|
|
/*
|
|
* Check if previous call to setkey couldn't allocate session with core crypto.
|
|
@@ -606,9 +606,9 @@ int nss_cryptoapi_ablk_aes_decrypt(struct ablkcipher_request *req)
|
|
* According to RFC3686, AES-CTR algo need not be padded if the
|
|
* plaintext or ciphertext is unaligned to block size boundary.
|
|
*/
|
|
- if (nss_cryptoapi_check_unalign(req->nbytes, AES_BLOCK_SIZE) && (ctx->cip_alg != NSS_CRYPTO_CIPHER_AES_CTR)) {
|
|
+ if (nss_cryptoapi_check_unalign(req->cryptlen, AES_BLOCK_SIZE) && (ctx->cip_alg != NSS_CRYPTO_CIPHER_AES_CTR)) {
|
|
nss_cfi_err("Invalid cipher len - Not aligned to algo blocksize\n");
|
|
- crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
|
|
+ crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
|
|
return -EINVAL;
|
|
}
|
|
|
|
@@ -638,9 +638,9 @@ int nss_cryptoapi_ablk_aes_decrypt(struct ablkcipher_request *req)
|
|
* nss_cryptoapi_3des_cbc_setkey()
|
|
* Cryptoapi DES3 CBC setkey function.
|
|
*/
|
|
-int nss_cryptoapi_3des_cbc_setkey(struct crypto_ablkcipher *cipher, const u8 *key, unsigned int keylen)
|
|
+int nss_cryptoapi_3des_cbc_setkey(struct crypto_skcipher *cipher, const u8 *key, unsigned int keylen)
|
|
{
|
|
- struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
|
|
+ struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
|
|
struct nss_cryptoapi_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
struct nss_cryptoapi *sc = &gbl_ctx;
|
|
struct nss_crypto_key cip = { .algo = NSS_CRYPTO_CIPHER_DES };
|
|
@@ -693,7 +693,7 @@ int nss_cryptoapi_3des_cbc_setkey(struct crypto_ablkcipher *cipher, const u8 *ke
|
|
return 0;
|
|
|
|
fail:
|
|
- crypto_ablkcipher_set_flags(cipher, flag);
|
|
+ crypto_skcipher_set_flags(cipher, flag);
|
|
return -EINVAL;
|
|
}
|
|
|
|
@@ -701,7 +701,7 @@ fail:
|
|
* nss_cryptoapi_3des_cbc_encrypt()
|
|
* Cryptoapi DES3 CBC encrypt function.
|
|
*/
|
|
-int nss_cryptoapi_3des_cbc_encrypt(struct ablkcipher_request *req)
|
|
+int nss_cryptoapi_3des_cbc_encrypt(struct skcipher_request *req)
|
|
{
|
|
struct nss_cryptoapi *sc = &gbl_ctx;
|
|
struct nss_cryptoapi_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
|
|
@@ -727,14 +727,14 @@ int nss_cryptoapi_3des_cbc_encrypt(struct ablkcipher_request *req)
|
|
return -EINVAL;
|
|
}
|
|
|
|
- if (nss_cryptoapi_check_unalign(req->nbytes, DES3_EDE_BLOCK_SIZE)) {
|
|
+ if (nss_cryptoapi_check_unalign(req->cryptlen, DES3_EDE_BLOCK_SIZE)) {
|
|
nss_cfi_err("Invalid cipher len - Not aligned to algo blocksize\n");
|
|
- crypto_ablkcipher_set_flags(crypto_ablkcipher_reqtfm(req), CRYPTO_TFM_RES_BAD_BLOCK_LEN);
|
|
+ crypto_skcipher_set_flags(crypto_skcipher_reqtfm(req), CRYPTO_TFM_RES_BAD_BLOCK_LEN);
|
|
return -EINVAL;
|
|
}
|
|
|
|
info.params = ¶ms;
|
|
- info.cb_fn = nss_cryptoapi_ablkcipher_done;
|
|
+ info.cb_fn = nss_cryptoapi_skcipher_done;
|
|
|
|
buf = nss_cryptoapi_ablk_transform(req, &info);
|
|
if (!buf) {
|
|
@@ -762,7 +762,7 @@ int nss_cryptoapi_3des_cbc_encrypt(struct ablkcipher_request *req)
|
|
* nss_cryptoapi_3des_cbc_decrypt()
|
|
* Cryptoapi DES3 CBC decrypt function.
|
|
*/
|
|
-int nss_cryptoapi_3des_cbc_decrypt(struct ablkcipher_request *req)
|
|
+int nss_cryptoapi_3des_cbc_decrypt(struct skcipher_request *req)
|
|
{
|
|
struct nss_cryptoapi *sc = &gbl_ctx;
|
|
struct nss_cryptoapi_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
|
|
@@ -788,14 +788,14 @@ int nss_cryptoapi_3des_cbc_decrypt(struct ablkcipher_request *req)
|
|
return -EINVAL;
|
|
}
|
|
|
|
- if (nss_cryptoapi_check_unalign(req->nbytes, DES3_EDE_BLOCK_SIZE)) {
|
|
+ if (nss_cryptoapi_check_unalign(req->cryptlen, DES3_EDE_BLOCK_SIZE)) {
|
|
nss_cfi_err("Invalid cipher len - Not aligned to algo blocksize\n");
|
|
- crypto_ablkcipher_set_flags(crypto_ablkcipher_reqtfm(req), CRYPTO_TFM_RES_BAD_BLOCK_LEN);
|
|
+ crypto_skcipher_set_flags(crypto_skcipher_reqtfm(req), CRYPTO_TFM_RES_BAD_BLOCK_LEN);
|
|
return -EINVAL;
|
|
}
|
|
|
|
info.params = ¶ms;
|
|
- info.cb_fn = nss_cryptoapi_ablkcipher_done;
|
|
+ info.cb_fn = nss_cryptoapi_skcipher_done;
|
|
|
|
buf = nss_cryptoapi_ablk_transform(req, &info);
|
|
if (!buf) {
|
|
diff --git a/cryptoapi/v1.1/nss_cryptoapi_debugfs.c b/cryptoapi/v1.1/nss_cryptoapi_debugfs.c
|
|
index dff774c..cf4bc70 100644
|
|
--- a/cryptoapi/v1.1/nss_cryptoapi_debugfs.c
|
|
+++ b/cryptoapi/v1.1/nss_cryptoapi_debugfs.c
|
|
@@ -55,6 +55,7 @@
|
|
*/
|
|
void nss_cryptoapi_debugfs_add_stats(struct dentry *parent, struct nss_cryptoapi_ctx *session_ctx)
|
|
{
|
|
+ pr_info("add stats");
|
|
debugfs_create_u64("queued", S_IRUGO, parent, &session_ctx->queued);
|
|
debugfs_create_u64("completed", S_IRUGO, parent, &session_ctx->completed);
|
|
debugfs_create_u64("queue_failed", S_IRUGO, parent, &session_ctx->queue_failed);
|
|
diff --git a/cryptoapi/v1.1/nss_cryptoapi_private.h b/cryptoapi/v1.1/nss_cryptoapi_private.h
|
|
index 5feb9e3..70c6714 100644
|
|
--- a/cryptoapi/v1.1/nss_cryptoapi_private.h
|
|
+++ b/cryptoapi/v1.1/nss_cryptoapi_private.h
|
|
@@ -141,16 +141,16 @@ int nss_cryptoapi_sha256_3des_encrypt(struct aead_request *req);
|
|
int nss_cryptoapi_sha256_3des_decrypt(struct aead_request *req);
|
|
|
|
/* ABLKCIPHER */
|
|
-int nss_cryptoapi_ablkcipher_init(struct crypto_tfm *tfm);
|
|
-void nss_cryptoapi_ablkcipher_exit(struct crypto_tfm *tfm);
|
|
-int nss_cryptoapi_ablk_aes_setkey(struct crypto_ablkcipher *cipher, const u8 *key, unsigned int len);
|
|
-int nss_cryptoapi_3des_cbc_setkey(struct crypto_ablkcipher *cipher, const u8 *key, unsigned int len);
|
|
+int nss_cryptoapi_skcipher_init(struct crypto_skcipher *tfm);
|
|
+void nss_cryptoapi_skcipher_exit(struct crypto_skcipher *tfm);
|
|
+int nss_cryptoapi_ablk_aes_setkey(struct crypto_skcipher *cipher, const u8 *key, unsigned int len);
|
|
+int nss_cryptoapi_3des_cbc_setkey(struct crypto_skcipher *cipher, const u8 *key, unsigned int len);
|
|
|
|
-int nss_cryptoapi_ablk_aes_encrypt(struct ablkcipher_request *req);
|
|
-int nss_cryptoapi_ablk_aes_decrypt(struct ablkcipher_request *req);
|
|
+int nss_cryptoapi_ablk_aes_encrypt(struct skcipher_request *req);
|
|
+int nss_cryptoapi_ablk_aes_decrypt(struct skcipher_request *req);
|
|
|
|
-int nss_cryptoapi_3des_cbc_encrypt(struct ablkcipher_request *req);
|
|
-int nss_cryptoapi_3des_cbc_decrypt(struct ablkcipher_request *req);
|
|
+int nss_cryptoapi_3des_cbc_encrypt(struct skcipher_request *req);
|
|
+int nss_cryptoapi_3des_cbc_decrypt(struct skcipher_request *req);
|
|
|
|
#endif /* __NSS_CRYPTOAPI_PRIVATE_H */
|
|
|
|
--
|
|
2.27.0.rc0
|
|
|