1 /*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 *
7 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
8 * and Nettle, by Niels M�ller.
9 *
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)
13 * any later version.
14 *
15 */
16 #ifndef _LINUX_CRYPTO_H
17 #define _LINUX_CRYPTO_H
18
19 #include <linux/module.h>
20 #include <linux/kernel.h>
21 #include <linux/types.h>
22 #include <linux/list.h>
23 #include <linux/string.h>
24 #include <asm/page.h>
25 #include <asm/errno.h>
26
27 /*
28 * Algorithm masks and types.
29 */
30 #define CRYPTO_ALG_TYPE_MASK 0x000000ff
31 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
32 #define CRYPTO_ALG_TYPE_DIGEST 0x00000002
33 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000004
34
35 /*
36 * Transform masks and values (for crt_flags).
37 */
38 #define CRYPTO_TFM_MODE_MASK 0x000000ff
39 #define CRYPTO_TFM_REQ_MASK 0x000fff00
40 #define CRYPTO_TFM_RES_MASK 0xfff00000
41
42 #define CRYPTO_TFM_MODE_ECB 0x00000001
43 #define CRYPTO_TFM_MODE_CBC 0x00000002
44 #define CRYPTO_TFM_MODE_CFB 0x00000004
45 #define CRYPTO_TFM_MODE_CTR 0x00000008
46
47 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
48 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
49 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
50 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
51 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
52 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
53
54 /*
55 * Miscellaneous stuff.
56 */
57 #define CRYPTO_UNSPEC 0
58 #define CRYPTO_MAX_ALG_NAME 64
59
60 struct scatterlist;
61
62 /*
63 * Algorithms: modular crypto algorithm implementations, managed
64 * via crypto_register_alg() and crypto_unregister_alg().
65 */
66 struct cipher_alg {
67 unsigned int cia_min_keysize;
68 unsigned int cia_max_keysize;
69 int (*cia_setkey)(void *ctx, const u8 *key,
70 unsigned int keylen, u32 *flags);
71 void (*cia_encrypt)(void *ctx, u8 *dst, const u8 *src);
72 void (*cia_decrypt)(void *ctx, u8 *dst, const u8 *src);
73 };
74
75 struct digest_alg {
76 unsigned int dia_digestsize;
77 void (*dia_init)(void *ctx);
78 void (*dia_update)(void *ctx, const u8 *data, unsigned int len);
79 void (*dia_final)(void *ctx, u8 *out);
80 int (*dia_setkey)(void *ctx, const u8 *key,
81 unsigned int keylen, u32 *flags);
82 };
83
84 struct compress_alg {
85 int (*coa_init)(void *ctx);
86 void (*coa_exit)(void *ctx);
87 int (*coa_compress)(void *ctx, const u8 *src, unsigned int slen,
88 u8 *dst, unsigned int *dlen);
89 int (*coa_decompress)(void *ctx, const u8 *src, unsigned int slen,
90 u8 *dst, unsigned int *dlen);
91 };
92
93 #define cra_cipher cra_u.cipher
94 #define cra_digest cra_u.digest
95 #define cra_compress cra_u.compress
96
97 struct crypto_alg {
98 struct list_head cra_list;
99 u32 cra_flags;
100 unsigned int cra_blocksize;
101 unsigned int cra_ctxsize;
102 const char cra_name[CRYPTO_MAX_ALG_NAME];
103
104 union {
105 struct cipher_alg cipher;
106 struct digest_alg digest;
107 struct compress_alg compress;
108 } cra_u;
109
110 struct module *cra_module;
111 };
112
113 /*
114 * Algorithm registration interface.
115 */
116 int crypto_register_alg(struct crypto_alg *alg);
117 int crypto_unregister_alg(struct crypto_alg *alg);
118
119 /*
120 * Algorithm query interface.
121 */
122 int crypto_alg_available(const char *name, u32 flags);
123
124 /*
125 * Transforms: user-instantiated objects which encapsulate algorithms
126 * and core processing logic. Managed via crypto_alloc_tfm() and
127 * crypto_free_tfm(), as well as the various helpers below.
128 */
129 struct crypto_tfm;
130
131 struct cipher_tfm {
132 void *cit_iv;
133 unsigned int cit_ivsize;
134 u32 cit_mode;
135 int (*cit_setkey)(struct crypto_tfm *tfm,
136 const u8 *key, unsigned int keylen);
137 int (*cit_encrypt)(struct crypto_tfm *tfm,
138 struct scatterlist *dst,
139 struct scatterlist *src,
140 unsigned int nbytes);
141 int (*cit_encrypt_iv)(struct crypto_tfm *tfm,
142 struct scatterlist *dst,
143 struct scatterlist *src,
144 unsigned int nbytes, u8 *iv);
145 int (*cit_decrypt)(struct crypto_tfm *tfm,
146 struct scatterlist *dst,
147 struct scatterlist *src,
148 unsigned int nbytes);
149 int (*cit_decrypt_iv)(struct crypto_tfm *tfm,
150 struct scatterlist *dst,
151 struct scatterlist *src,
152 unsigned int nbytes, u8 *iv);
153 void (*cit_xor_block)(u8 *dst, const u8 *src);
154 };
155
156 struct digest_tfm {
157 void (*dit_init)(struct crypto_tfm *tfm);
158 void (*dit_update)(struct crypto_tfm *tfm,
159 struct scatterlist *sg, unsigned int nsg);
160 void (*dit_final)(struct crypto_tfm *tfm, u8 *out);
161 void (*dit_digest)(struct crypto_tfm *tfm, struct scatterlist *sg,
162 unsigned int nsg, u8 *out);
163 int (*dit_setkey)(struct crypto_tfm *tfm,
164 const u8 *key, unsigned int keylen);
165 #ifdef CONFIG_CRYPTO_HMAC
166 void *dit_hmac_block;
167 #endif
168 };
169
170 struct compress_tfm {
171 int (*cot_compress)(struct crypto_tfm *tfm,
172 const u8 *src, unsigned int slen,
173 u8 *dst, unsigned int *dlen);
174 int (*cot_decompress)(struct crypto_tfm *tfm,
175 const u8 *src, unsigned int slen,
176 u8 *dst, unsigned int *dlen);
177 };
178
179 #define crt_cipher crt_u.cipher
180 #define crt_digest crt_u.digest
181 #define crt_compress crt_u.compress
182
183 struct crypto_tfm {
184
185 u32 crt_flags;
186
187 union {
188 struct cipher_tfm cipher;
189 struct digest_tfm digest;
190 struct compress_tfm compress;
191 } crt_u;
192
193 struct crypto_alg *__crt_alg;
194 };
195
196 /*
197 * Transform user interface.
198 */
199
200 /*
201 * crypto_alloc_tfm() will first attempt to locate an already loaded algorithm.
202 * If that fails and the kernel supports dynamically loadable modules, it
203 * will then attempt to load a module of the same name or alias. A refcount
204 * is grabbed on the algorithm which is then associated with the new transform.
205 *
206 * crypto_free_tfm() frees up the transform and any associated resources,
207 * then drops the refcount on the associated algorithm.
208 */
209 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
210 void crypto_free_tfm(struct crypto_tfm *tfm);
211
212 /*
213 * Transform helpers which query the underlying algorithm.
214 */
crypto_tfm_alg_name(struct crypto_tfm * tfm)215 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
216 {
217 return tfm->__crt_alg->cra_name;
218 }
219
crypto_tfm_alg_modname(struct crypto_tfm * tfm)220 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm)
221 {
222 struct crypto_alg *alg = tfm->__crt_alg;
223
224 if (alg->cra_module)
225 return alg->cra_module->name;
226 else
227 return NULL;
228 }
229
crypto_tfm_alg_type(struct crypto_tfm * tfm)230 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
231 {
232 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
233 }
234
crypto_tfm_alg_min_keysize(struct crypto_tfm * tfm)235 static inline unsigned int crypto_tfm_alg_min_keysize(struct crypto_tfm *tfm)
236 {
237 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
238 return tfm->__crt_alg->cra_cipher.cia_min_keysize;
239 }
240
crypto_tfm_alg_max_keysize(struct crypto_tfm * tfm)241 static inline unsigned int crypto_tfm_alg_max_keysize(struct crypto_tfm *tfm)
242 {
243 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
244 return tfm->__crt_alg->cra_cipher.cia_max_keysize;
245 }
246
crypto_tfm_alg_ivsize(struct crypto_tfm * tfm)247 static inline unsigned int crypto_tfm_alg_ivsize(struct crypto_tfm *tfm)
248 {
249 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
250 return tfm->crt_cipher.cit_ivsize;
251 }
252
crypto_tfm_alg_blocksize(struct crypto_tfm * tfm)253 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
254 {
255 return tfm->__crt_alg->cra_blocksize;
256 }
257
crypto_tfm_alg_digestsize(struct crypto_tfm * tfm)258 static inline unsigned int crypto_tfm_alg_digestsize(struct crypto_tfm *tfm)
259 {
260 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
261 return tfm->__crt_alg->cra_digest.dia_digestsize;
262 }
263
264 /*
265 * API wrappers.
266 */
crypto_digest_init(struct crypto_tfm * tfm)267 static inline void crypto_digest_init(struct crypto_tfm *tfm)
268 {
269 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
270 tfm->crt_digest.dit_init(tfm);
271 }
272
crypto_digest_update(struct crypto_tfm * tfm,struct scatterlist * sg,unsigned int nsg)273 static inline void crypto_digest_update(struct crypto_tfm *tfm,
274 struct scatterlist *sg,
275 unsigned int nsg)
276 {
277 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
278 tfm->crt_digest.dit_update(tfm, sg, nsg);
279 }
280
crypto_digest_final(struct crypto_tfm * tfm,u8 * out)281 static inline void crypto_digest_final(struct crypto_tfm *tfm, u8 *out)
282 {
283 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
284 tfm->crt_digest.dit_final(tfm, out);
285 }
286
crypto_digest_digest(struct crypto_tfm * tfm,struct scatterlist * sg,unsigned int nsg,u8 * out)287 static inline void crypto_digest_digest(struct crypto_tfm *tfm,
288 struct scatterlist *sg,
289 unsigned int nsg, u8 *out)
290 {
291 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
292 tfm->crt_digest.dit_digest(tfm, sg, nsg, out);
293 }
294
crypto_digest_setkey(struct crypto_tfm * tfm,const u8 * key,unsigned int keylen)295 static inline int crypto_digest_setkey(struct crypto_tfm *tfm,
296 const u8 *key, unsigned int keylen)
297 {
298 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
299 if (tfm->crt_digest.dit_setkey == NULL)
300 return -ENOSYS;
301 return tfm->crt_digest.dit_setkey(tfm, key, keylen);
302 }
303
crypto_cipher_setkey(struct crypto_tfm * tfm,const u8 * key,unsigned int keylen)304 static inline int crypto_cipher_setkey(struct crypto_tfm *tfm,
305 const u8 *key, unsigned int keylen)
306 {
307 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
308 return tfm->crt_cipher.cit_setkey(tfm, key, keylen);
309 }
310
crypto_cipher_encrypt(struct crypto_tfm * tfm,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes)311 static inline int crypto_cipher_encrypt(struct crypto_tfm *tfm,
312 struct scatterlist *dst,
313 struct scatterlist *src,
314 unsigned int nbytes)
315 {
316 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
317 return tfm->crt_cipher.cit_encrypt(tfm, dst, src, nbytes);
318 }
319
crypto_cipher_encrypt_iv(struct crypto_tfm * tfm,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes,u8 * iv)320 static inline int crypto_cipher_encrypt_iv(struct crypto_tfm *tfm,
321 struct scatterlist *dst,
322 struct scatterlist *src,
323 unsigned int nbytes, u8 *iv)
324 {
325 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
326 BUG_ON(tfm->crt_cipher.cit_mode == CRYPTO_TFM_MODE_ECB);
327 return tfm->crt_cipher.cit_encrypt_iv(tfm, dst, src, nbytes, iv);
328 }
329
crypto_cipher_decrypt(struct crypto_tfm * tfm,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes)330 static inline int crypto_cipher_decrypt(struct crypto_tfm *tfm,
331 struct scatterlist *dst,
332 struct scatterlist *src,
333 unsigned int nbytes)
334 {
335 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
336 return tfm->crt_cipher.cit_decrypt(tfm, dst, src, nbytes);
337 }
338
crypto_cipher_decrypt_iv(struct crypto_tfm * tfm,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes,u8 * iv)339 static inline int crypto_cipher_decrypt_iv(struct crypto_tfm *tfm,
340 struct scatterlist *dst,
341 struct scatterlist *src,
342 unsigned int nbytes, u8 *iv)
343 {
344 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
345 BUG_ON(tfm->crt_cipher.cit_mode == CRYPTO_TFM_MODE_ECB);
346 return tfm->crt_cipher.cit_decrypt_iv(tfm, dst, src, nbytes, iv);
347 }
348
crypto_cipher_set_iv(struct crypto_tfm * tfm,const u8 * src,unsigned int len)349 static inline void crypto_cipher_set_iv(struct crypto_tfm *tfm,
350 const u8 *src, unsigned int len)
351 {
352 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
353 memcpy(tfm->crt_cipher.cit_iv, src, len);
354 }
355
crypto_cipher_get_iv(struct crypto_tfm * tfm,u8 * dst,unsigned int len)356 static inline void crypto_cipher_get_iv(struct crypto_tfm *tfm,
357 u8 *dst, unsigned int len)
358 {
359 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
360 memcpy(dst, tfm->crt_cipher.cit_iv, len);
361 }
362
crypto_comp_compress(struct crypto_tfm * tfm,const u8 * src,unsigned int slen,u8 * dst,unsigned int * dlen)363 static inline int crypto_comp_compress(struct crypto_tfm *tfm,
364 const u8 *src, unsigned int slen,
365 u8 *dst, unsigned int *dlen)
366 {
367 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_COMPRESS);
368 return tfm->crt_compress.cot_compress(tfm, src, slen, dst, dlen);
369 }
370
crypto_comp_decompress(struct crypto_tfm * tfm,const u8 * src,unsigned int slen,u8 * dst,unsigned int * dlen)371 static inline int crypto_comp_decompress(struct crypto_tfm *tfm,
372 const u8 *src, unsigned int slen,
373 u8 *dst, unsigned int *dlen)
374 {
375 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_COMPRESS);
376 return tfm->crt_compress.cot_decompress(tfm, src, slen, dst, dlen);
377 }
378
379 /*
380 * HMAC support.
381 */
382 #ifdef CONFIG_CRYPTO_HMAC
383 void crypto_hmac_init(struct crypto_tfm *tfm, u8 *key, unsigned int *keylen);
384 void crypto_hmac_update(struct crypto_tfm *tfm,
385 struct scatterlist *sg, unsigned int nsg);
386 void crypto_hmac_final(struct crypto_tfm *tfm, u8 *key,
387 unsigned int *keylen, u8 *out);
388 void crypto_hmac(struct crypto_tfm *tfm, u8 *key, unsigned int *keylen,
389 struct scatterlist *sg, unsigned int nsg, u8 *out);
390 #endif /* CONFIG_CRYPTO_HMAC */
391
392 #endif /* _LINUX_CRYPTO_H */
393
394