OpenWrt – Blame information for rev 1
?pathlinks?
Rev | Author | Line No. | Line |
---|---|---|---|
1 | office | 1 | From 98e87e3d933b8e504ea41b8857c038d2cd06cddc Mon Sep 17 00:00:00 2001 |
2 | From: Christian Lamparter <chunkeey@gmail.com> |
||
3 | Date: Thu, 19 Apr 2018 18:41:54 +0200 |
||
4 | Subject: [PATCH 5/8] crypto: crypto4xx - add aes-ctr support |
||
5 | |||
6 | This patch adds support for the aes-ctr skcipher. |
||
7 | |||
8 | name : ctr(aes) |
||
9 | driver : ctr-aes-ppc4xx |
||
10 | module : crypto4xx |
||
11 | priority : 300 |
||
12 | refcnt : 1 |
||
13 | selftest : passed |
||
14 | internal : no |
||
15 | type : skcipher |
||
16 | async : yes |
||
17 | blocksize : 16 |
||
18 | min keysize : 16 |
||
19 | max keysize : 32 |
||
20 | ivsize : 16 |
||
21 | chunksize : 16 |
||
22 | walksize : 16 |
||
23 | |||
24 | The hardware uses only the last 32-bits as the counter while the |
||
25 | kernel tests (aes_ctr_enc_tv_template[4] for example) expect that |
||
26 | the whole IV is a counter. To make this work, the driver will |
||
27 | fallback if the counter is going to overlow. |
||
28 | |||
29 | The aead's crypto4xx_setup_fallback() function is renamed to |
||
30 | crypto4xx_aead_setup_fallback. |
||
31 | |||
32 | Signed-off-by: Christian Lamparter <chunkeey@gmail.com> |
||
33 | Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au> |
||
34 | --- |
||
35 | drivers/crypto/amcc/crypto4xx_alg.c | 91 ++++++++++++++++++++++++++-- |
||
36 | drivers/crypto/amcc/crypto4xx_core.c | 37 +++++++++++ |
||
37 | drivers/crypto/amcc/crypto4xx_core.h | 5 ++ |
||
38 | 3 files changed, 127 insertions(+), 6 deletions(-) |
||
39 | |||
40 | --- a/drivers/crypto/amcc/crypto4xx_alg.c |
||
41 | +++ b/drivers/crypto/amcc/crypto4xx_alg.c |
||
42 | @@ -240,6 +240,85 @@ int crypto4xx_rfc3686_decrypt(struct skc |
||
43 | ctx->sa_out, ctx->sa_len, 0); |
||
44 | } |
||
45 | |||
46 | +static int |
||
47 | +crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt) |
||
48 | +{ |
||
49 | + struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req); |
||
50 | + struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher); |
||
51 | + size_t iv_len = crypto_skcipher_ivsize(cipher); |
||
52 | + unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4)); |
||
53 | + unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) / |
||
54 | + AES_BLOCK_SIZE; |
||
55 | + |
||
56 | + /* |
||
57 | + * The hardware uses only the last 32-bits as the counter while the |
||
58 | + * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that |
||
59 | + * the whole IV is a counter. So fallback if the counter is going to |
||
60 | + * overlow. |
||
61 | + */ |
||
62 | + if (counter + nblks < counter) { |
||
63 | + struct skcipher_request *subreq = skcipher_request_ctx(req); |
||
64 | + int ret; |
||
65 | + |
||
66 | + skcipher_request_set_tfm(subreq, ctx->sw_cipher.cipher); |
||
67 | + skcipher_request_set_callback(subreq, req->base.flags, |
||
68 | + NULL, NULL); |
||
69 | + skcipher_request_set_crypt(subreq, req->src, req->dst, |
||
70 | + req->cryptlen, req->iv); |
||
71 | + ret = encrypt ? crypto_skcipher_encrypt(subreq) |
||
72 | + : crypto_skcipher_decrypt(subreq); |
||
73 | + skcipher_request_zero(subreq); |
||
74 | + return ret; |
||
75 | + } |
||
76 | + |
||
77 | + return encrypt ? crypto4xx_encrypt_iv(req) |
||
78 | + : crypto4xx_decrypt_iv(req); |
||
79 | +} |
||
80 | + |
||
81 | +static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx, |
||
82 | + struct crypto_skcipher *cipher, |
||
83 | + const u8 *key, |
||
84 | + unsigned int keylen) |
||
85 | +{ |
||
86 | + int rc; |
||
87 | + |
||
88 | + crypto_skcipher_clear_flags(ctx->sw_cipher.cipher, |
||
89 | + CRYPTO_TFM_REQ_MASK); |
||
90 | + crypto_skcipher_set_flags(ctx->sw_cipher.cipher, |
||
91 | + crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK); |
||
92 | + rc = crypto_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen); |
||
93 | + crypto_skcipher_clear_flags(cipher, CRYPTO_TFM_RES_MASK); |
||
94 | + crypto_skcipher_set_flags(cipher, |
||
95 | + crypto_skcipher_get_flags(ctx->sw_cipher.cipher) & |
||
96 | + CRYPTO_TFM_RES_MASK); |
||
97 | + |
||
98 | + return rc; |
||
99 | +} |
||
100 | + |
||
101 | +int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher, |
||
102 | + const u8 *key, unsigned int keylen) |
||
103 | +{ |
||
104 | + struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher); |
||
105 | + int rc; |
||
106 | + |
||
107 | + rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen); |
||
108 | + if (rc) |
||
109 | + return rc; |
||
110 | + |
||
111 | + return crypto4xx_setkey_aes(cipher, key, keylen, |
||
112 | + CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB); |
||
113 | +} |
||
114 | + |
||
115 | +int crypto4xx_encrypt_ctr(struct skcipher_request *req) |
||
116 | +{ |
||
117 | + return crypto4xx_ctr_crypt(req, true); |
||
118 | +} |
||
119 | + |
||
120 | +int crypto4xx_decrypt_ctr(struct skcipher_request *req) |
||
121 | +{ |
||
122 | + return crypto4xx_ctr_crypt(req, false); |
||
123 | +} |
||
124 | + |
||
125 | static inline bool crypto4xx_aead_need_fallback(struct aead_request *req, |
||
126 | bool is_ccm, bool decrypt) |
||
127 | { |
||
128 | @@ -282,10 +361,10 @@ static int crypto4xx_aead_fallback(struc |
||
129 | crypto_aead_encrypt(subreq); |
||
130 | } |
||
131 | |||
132 | -static int crypto4xx_setup_fallback(struct crypto4xx_ctx *ctx, |
||
133 | - struct crypto_aead *cipher, |
||
134 | - const u8 *key, |
||
135 | - unsigned int keylen) |
||
136 | +static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx, |
||
137 | + struct crypto_aead *cipher, |
||
138 | + const u8 *key, |
||
139 | + unsigned int keylen) |
||
140 | { |
||
141 | int rc; |
||
142 | |||
143 | @@ -313,7 +392,7 @@ int crypto4xx_setkey_aes_ccm(struct cryp |
||
144 | struct dynamic_sa_ctl *sa; |
||
145 | int rc = 0; |
||
146 | |||
147 | - rc = crypto4xx_setup_fallback(ctx, cipher, key, keylen); |
||
148 | + rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen); |
||
149 | if (rc) |
||
150 | return rc; |
||
151 | |||
152 | @@ -472,7 +551,7 @@ int crypto4xx_setkey_aes_gcm(struct cryp |
||
153 | return -EINVAL; |
||
154 | } |
||
155 | |||
156 | - rc = crypto4xx_setup_fallback(ctx, cipher, key, keylen); |
||
157 | + rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen); |
||
158 | if (rc) |
||
159 | return rc; |
||
160 | |||
161 | --- a/drivers/crypto/amcc/crypto4xx_core.c |
||
162 | +++ b/drivers/crypto/amcc/crypto4xx_core.c |
||
163 | @@ -941,6 +941,19 @@ static int crypto4xx_sk_init(struct cryp |
||
164 | struct crypto4xx_alg *amcc_alg; |
||
165 | struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(sk); |
||
166 | |||
167 | + if (alg->base.cra_flags & CRYPTO_ALG_NEED_FALLBACK) { |
||
168 | + ctx->sw_cipher.cipher = |
||
169 | + crypto_alloc_skcipher(alg->base.cra_name, 0, |
||
170 | + CRYPTO_ALG_NEED_FALLBACK | |
||
171 | + CRYPTO_ALG_ASYNC); |
||
172 | + if (IS_ERR(ctx->sw_cipher.cipher)) |
||
173 | + return PTR_ERR(ctx->sw_cipher.cipher); |
||
174 | + |
||
175 | + crypto_skcipher_set_reqsize(sk, |
||
176 | + sizeof(struct skcipher_request) + 32 + |
||
177 | + crypto_skcipher_reqsize(ctx->sw_cipher.cipher)); |
||
178 | + } |
||
179 | + |
||
180 | amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.cipher); |
||
181 | crypto4xx_ctx_init(amcc_alg, ctx); |
||
182 | return 0; |
||
183 | @@ -956,6 +969,8 @@ static void crypto4xx_sk_exit(struct cry |
||
184 | struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(sk); |
||
185 | |||
186 | crypto4xx_common_exit(ctx); |
||
187 | + if (ctx->sw_cipher.cipher) |
||
188 | + crypto_free_skcipher(ctx->sw_cipher.cipher); |
||
189 | } |
||
190 | |||
191 | static int crypto4xx_aead_init(struct crypto_aead *tfm) |
||
192 | @@ -1145,6 +1160,28 @@ static struct crypto4xx_alg_common crypt |
||
193 | .init = crypto4xx_sk_init, |
||
194 | .exit = crypto4xx_sk_exit, |
||
195 | } }, |
||
196 | + { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = { |
||
197 | + .base = { |
||
198 | + .cra_name = "ctr(aes)", |
||
199 | + .cra_driver_name = "ctr-aes-ppc4xx", |
||
200 | + .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY, |
||
201 | + .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | |
||
202 | + CRYPTO_ALG_NEED_FALLBACK | |
||
203 | + CRYPTO_ALG_ASYNC | |
||
204 | + CRYPTO_ALG_KERN_DRIVER_ONLY, |
||
205 | + .cra_blocksize = AES_BLOCK_SIZE, |
||
206 | + .cra_ctxsize = sizeof(struct crypto4xx_ctx), |
||
207 | + .cra_module = THIS_MODULE, |
||
208 | + }, |
||
209 | + .min_keysize = AES_MIN_KEY_SIZE, |
||
210 | + .max_keysize = AES_MAX_KEY_SIZE, |
||
211 | + .ivsize = AES_IV_SIZE, |
||
212 | + .setkey = crypto4xx_setkey_aes_ctr, |
||
213 | + .encrypt = crypto4xx_encrypt_ctr, |
||
214 | + .decrypt = crypto4xx_decrypt_ctr, |
||
215 | + .init = crypto4xx_sk_init, |
||
216 | + .exit = crypto4xx_sk_exit, |
||
217 | + } }, |
||
218 | { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = { |
||
219 | .base = { |
||
220 | .cra_name = "rfc3686(ctr(aes))", |
||
221 | --- a/drivers/crypto/amcc/crypto4xx_core.h |
||
222 | +++ b/drivers/crypto/amcc/crypto4xx_core.h |
||
223 | @@ -128,6 +128,7 @@ struct crypto4xx_ctx { |
||
224 | __le32 iv_nonce; |
||
225 | u32 sa_len; |
||
226 | union { |
||
227 | + struct crypto_skcipher *cipher; |
||
228 | struct crypto_aead *aead; |
||
229 | } sw_cipher; |
||
230 | }; |
||
231 | @@ -163,12 +164,16 @@ int crypto4xx_setkey_aes_cbc(struct cryp |
||
232 | const u8 *key, unsigned int keylen); |
||
233 | int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher, |
||
234 | const u8 *key, unsigned int keylen); |
||
235 | +int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher, |
||
236 | + const u8 *key, unsigned int keylen); |
||
237 | int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher, |
||
238 | const u8 *key, unsigned int keylen); |
||
239 | int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher, |
||
240 | const u8 *key, unsigned int keylen); |
||
241 | int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher, |
||
242 | const u8 *key, unsigned int keylen); |
||
243 | +int crypto4xx_encrypt_ctr(struct skcipher_request *req); |
||
244 | +int crypto4xx_decrypt_ctr(struct skcipher_request *req); |
||
245 | int crypto4xx_encrypt_iv(struct skcipher_request *req); |
||
246 | int crypto4xx_decrypt_iv(struct skcipher_request *req); |
||
247 | int crypto4xx_encrypt_noiv(struct skcipher_request *req); |