source: EcnlProtoTool/trunk/openssl-1.1.0e/crypto/evp/e_aes.c@ 331

Last change on this file since 331 was 331, checked in by coas-nagasima, 6 years ago

prototoolに関連するプロジェクトをnewlibからmuslを使うよう変更・更新
ntshellをnewlibの下位の実装から、muslのsyscallの実装に変更・更新
以下のOSSをアップデート
・mruby-1.3.0
・musl-1.1.18
・onigmo-6.1.3
・tcc-0.9.27
以下のOSSを追加
・openssl-1.1.0e
・curl-7.57.0
・zlib-1.2.11
以下のmrbgemsを追加
・iij/mruby-digest
・iij/mruby-env
・iij/mruby-errno
・iij/mruby-iijson
・iij/mruby-ipaddr
・iij/mruby-mock
・iij/mruby-require
・iij/mruby-tls-openssl

  • Property svn:eol-style set to native
  • Property svn:mime-type set to text/x-csrc
File size: 96.1 KB
Line 
1/*
2 * Copyright 2001-2016 The OpenSSL Project Authors. All Rights Reserved.
3 *
4 * Licensed under the OpenSSL license (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
8 */
9
10#include <openssl/opensslconf.h>
11#include <openssl/crypto.h>
12#include <openssl/evp.h>
13#include <openssl/err.h>
14#include <string.h>
15#include <assert.h>
16#include <openssl/aes.h>
17#include "internal/evp_int.h"
18#include "../modes/modes_lcl.h"
19#include <openssl/rand.h>
20#include "evp_locl.h"
21#include <stdint.h>
22typedef unsigned short uint16_t;
23
24typedef struct {
25 union {
26 double align;
27 AES_KEY ks;
28 } ks;
29 block128_f block;
30 union {
31 cbc128_f cbc;
32 ctr128_f ctr;
33 } stream;
34} EVP_AES_KEY;
35
36typedef struct {
37 union {
38 double align;
39 AES_KEY ks;
40 } ks; /* AES key schedule to use */
41 int key_set; /* Set if key initialised */
42 int iv_set; /* Set if an iv is set */
43 GCM128_CONTEXT gcm;
44 unsigned char *iv; /* Temporary IV store */
45 int ivlen; /* IV length */
46 int taglen;
47 int iv_gen; /* It is OK to generate IVs */
48 int tls_aad_len; /* TLS AAD length */
49 ctr128_f ctr;
50} EVP_AES_GCM_CTX;
51
52typedef struct {
53 union {
54 double align;
55 AES_KEY ks;
56 } ks1, ks2; /* AES key schedules to use */
57 XTS128_CONTEXT xts;
58 void (*stream) (const unsigned char *in,
59 unsigned char *out, size_t length,
60 const AES_KEY *key1, const AES_KEY *key2,
61 const unsigned char iv[16]);
62} EVP_AES_XTS_CTX;
63
64typedef struct {
65 union {
66 double align;
67 AES_KEY ks;
68 } ks; /* AES key schedule to use */
69 int key_set; /* Set if key initialised */
70 int iv_set; /* Set if an iv is set */
71 int tag_set; /* Set if tag is valid */
72 int len_set; /* Set if message length set */
73 int L, M; /* L and M parameters from RFC3610 */
74 int tls_aad_len; /* TLS AAD length */
75 CCM128_CONTEXT ccm;
76 ccm128_f str;
77} EVP_AES_CCM_CTX;
78
79#ifndef OPENSSL_NO_OCB
80typedef struct {
81 union {
82 double align;
83 AES_KEY ks;
84 } ksenc; /* AES key schedule to use for encryption */
85 union {
86 double align;
87 AES_KEY ks;
88 } ksdec; /* AES key schedule to use for decryption */
89 int key_set; /* Set if key initialised */
90 int iv_set; /* Set if an iv is set */
91 OCB128_CONTEXT ocb;
92 unsigned char *iv; /* Temporary IV store */
93 unsigned char tag[16];
94 unsigned char data_buf[16]; /* Store partial data blocks */
95 unsigned char aad_buf[16]; /* Store partial AAD blocks */
96 int data_buf_len;
97 int aad_buf_len;
98 int ivlen; /* IV length */
99 int taglen;
100} EVP_AES_OCB_CTX;
101#endif
102
103#define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
104
105#ifdef VPAES_ASM
106int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
107 AES_KEY *key);
108int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
109 AES_KEY *key);
110
111void vpaes_encrypt(const unsigned char *in, unsigned char *out,
112 const AES_KEY *key);
113void vpaes_decrypt(const unsigned char *in, unsigned char *out,
114 const AES_KEY *key);
115
116void vpaes_cbc_encrypt(const unsigned char *in,
117 unsigned char *out,
118 size_t length,
119 const AES_KEY *key, unsigned char *ivec, int enc);
120#endif
121#ifdef BSAES_ASM
122void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
123 size_t length, const AES_KEY *key,
124 unsigned char ivec[16], int enc);
125void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
126 size_t len, const AES_KEY *key,
127 const unsigned char ivec[16]);
128void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
129 size_t len, const AES_KEY *key1,
130 const AES_KEY *key2, const unsigned char iv[16]);
131void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
132 size_t len, const AES_KEY *key1,
133 const AES_KEY *key2, const unsigned char iv[16]);
134#endif
135#ifdef AES_CTR_ASM
136void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
137 size_t blocks, const AES_KEY *key,
138 const unsigned char ivec[AES_BLOCK_SIZE]);
139#endif
140#ifdef AES_XTS_ASM
141void AES_xts_encrypt(const char *inp, char *out, size_t len,
142 const AES_KEY *key1, const AES_KEY *key2,
143 const unsigned char iv[16]);
144void AES_xts_decrypt(const char *inp, char *out, size_t len,
145 const AES_KEY *key1, const AES_KEY *key2,
146 const unsigned char iv[16]);
147#endif
148
149#if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
150# include "ppc_arch.h"
151# ifdef VPAES_ASM
152# define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
153# endif
154# define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
155# define HWAES_set_encrypt_key aes_p8_set_encrypt_key
156# define HWAES_set_decrypt_key aes_p8_set_decrypt_key
157# define HWAES_encrypt aes_p8_encrypt
158# define HWAES_decrypt aes_p8_decrypt
159# define HWAES_cbc_encrypt aes_p8_cbc_encrypt
160# define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
161# define HWAES_xts_encrypt aes_p8_xts_encrypt
162# define HWAES_xts_decrypt aes_p8_xts_decrypt
163#endif
164
165#if defined(AES_ASM) && !defined(I386_ONLY) && ( \
166 ((defined(__i386) || defined(__i386__) || \
167 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
168 defined(__x86_64) || defined(__x86_64__) || \
169 defined(_M_AMD64) || defined(_M_X64) )
170
171extern unsigned int OPENSSL_ia32cap_P[];
172
173# ifdef VPAES_ASM
174# define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
175# endif
176# ifdef BSAES_ASM
177# define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
178# endif
179/*
180 * AES-NI section
181 */
182# define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
183
184int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
185 AES_KEY *key);
186int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
187 AES_KEY *key);
188
189void aesni_encrypt(const unsigned char *in, unsigned char *out,
190 const AES_KEY *key);
191void aesni_decrypt(const unsigned char *in, unsigned char *out,
192 const AES_KEY *key);
193
194void aesni_ecb_encrypt(const unsigned char *in,
195 unsigned char *out,
196 size_t length, const AES_KEY *key, int enc);
197void aesni_cbc_encrypt(const unsigned char *in,
198 unsigned char *out,
199 size_t length,
200 const AES_KEY *key, unsigned char *ivec, int enc);
201
202void aesni_ctr32_encrypt_blocks(const unsigned char *in,
203 unsigned char *out,
204 size_t blocks,
205 const void *key, const unsigned char *ivec);
206
207void aesni_xts_encrypt(const unsigned char *in,
208 unsigned char *out,
209 size_t length,
210 const AES_KEY *key1, const AES_KEY *key2,
211 const unsigned char iv[16]);
212
213void aesni_xts_decrypt(const unsigned char *in,
214 unsigned char *out,
215 size_t length,
216 const AES_KEY *key1, const AES_KEY *key2,
217 const unsigned char iv[16]);
218
219void aesni_ccm64_encrypt_blocks(const unsigned char *in,
220 unsigned char *out,
221 size_t blocks,
222 const void *key,
223 const unsigned char ivec[16],
224 unsigned char cmac[16]);
225
226void aesni_ccm64_decrypt_blocks(const unsigned char *in,
227 unsigned char *out,
228 size_t blocks,
229 const void *key,
230 const unsigned char ivec[16],
231 unsigned char cmac[16]);
232
233# if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
234size_t aesni_gcm_encrypt(const unsigned char *in,
235 unsigned char *out,
236 size_t len,
237 const void *key, unsigned char ivec[16], u64 *Xi);
238# define AES_gcm_encrypt aesni_gcm_encrypt
239size_t aesni_gcm_decrypt(const unsigned char *in,
240 unsigned char *out,
241 size_t len,
242 const void *key, unsigned char ivec[16], u64 *Xi);
243# define AES_gcm_decrypt aesni_gcm_decrypt
244void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
245 size_t len);
246# define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
247 gctx->gcm.ghash==gcm_ghash_avx)
248# define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
249 gctx->gcm.ghash==gcm_ghash_avx)
250# undef AES_GCM_ASM2 /* minor size optimization */
251# endif
252
253static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
254 const unsigned char *iv, int enc)
255{
256 int ret, mode;
257 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
258
259 mode = EVP_CIPHER_CTX_mode(ctx);
260 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
261 && !enc) {
262 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
263 &dat->ks.ks);
264 dat->block = (block128_f) aesni_decrypt;
265 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
266 (cbc128_f) aesni_cbc_encrypt : NULL;
267 } else {
268 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
269 &dat->ks.ks);
270 dat->block = (block128_f) aesni_encrypt;
271 if (mode == EVP_CIPH_CBC_MODE)
272 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
273 else if (mode == EVP_CIPH_CTR_MODE)
274 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
275 else
276 dat->stream.cbc = NULL;
277 }
278
279 if (ret < 0) {
280 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
281 return 0;
282 }
283
284 return 1;
285}
286
287static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
288 const unsigned char *in, size_t len)
289{
290 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
291 EVP_CIPHER_CTX_iv_noconst(ctx),
292 EVP_CIPHER_CTX_encrypting(ctx));
293
294 return 1;
295}
296
297static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
298 const unsigned char *in, size_t len)
299{
300 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
301
302 if (len < bl)
303 return 1;
304
305 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
306 EVP_CIPHER_CTX_encrypting(ctx));
307
308 return 1;
309}
310
311# define aesni_ofb_cipher aes_ofb_cipher
312static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
313 const unsigned char *in, size_t len);
314
315# define aesni_cfb_cipher aes_cfb_cipher
316static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
317 const unsigned char *in, size_t len);
318
319# define aesni_cfb8_cipher aes_cfb8_cipher
320static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
321 const unsigned char *in, size_t len);
322
323# define aesni_cfb1_cipher aes_cfb1_cipher
324static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
325 const unsigned char *in, size_t len);
326
327# define aesni_ctr_cipher aes_ctr_cipher
328static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
329 const unsigned char *in, size_t len);
330
331static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
332 const unsigned char *iv, int enc)
333{
334 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
335 if (!iv && !key)
336 return 1;
337 if (key) {
338 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
339 &gctx->ks.ks);
340 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
341 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
342 /*
343 * If we have an iv can set it directly, otherwise use saved IV.
344 */
345 if (iv == NULL && gctx->iv_set)
346 iv = gctx->iv;
347 if (iv) {
348 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
349 gctx->iv_set = 1;
350 }
351 gctx->key_set = 1;
352 } else {
353 /* If key set use IV, otherwise copy */
354 if (gctx->key_set)
355 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
356 else
357 memcpy(gctx->iv, iv, gctx->ivlen);
358 gctx->iv_set = 1;
359 gctx->iv_gen = 0;
360 }
361 return 1;
362}
363
364# define aesni_gcm_cipher aes_gcm_cipher
365static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
366 const unsigned char *in, size_t len);
367
368static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
369 const unsigned char *iv, int enc)
370{
371 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
372 if (!iv && !key)
373 return 1;
374
375 if (key) {
376 /* key_len is two AES keys */
377 if (enc) {
378 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
379 &xctx->ks1.ks);
380 xctx->xts.block1 = (block128_f) aesni_encrypt;
381 xctx->stream = aesni_xts_encrypt;
382 } else {
383 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
384 &xctx->ks1.ks);
385 xctx->xts.block1 = (block128_f) aesni_decrypt;
386 xctx->stream = aesni_xts_decrypt;
387 }
388
389 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
390 EVP_CIPHER_CTX_key_length(ctx) * 4,
391 &xctx->ks2.ks);
392 xctx->xts.block2 = (block128_f) aesni_encrypt;
393
394 xctx->xts.key1 = &xctx->ks1;
395 }
396
397 if (iv) {
398 xctx->xts.key2 = &xctx->ks2;
399 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
400 }
401
402 return 1;
403}
404
405# define aesni_xts_cipher aes_xts_cipher
406static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
407 const unsigned char *in, size_t len);
408
409static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
410 const unsigned char *iv, int enc)
411{
412 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
413 if (!iv && !key)
414 return 1;
415 if (key) {
416 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
417 &cctx->ks.ks);
418 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
419 &cctx->ks, (block128_f) aesni_encrypt);
420 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
421 (ccm128_f) aesni_ccm64_decrypt_blocks;
422 cctx->key_set = 1;
423 }
424 if (iv) {
425 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
426 cctx->iv_set = 1;
427 }
428 return 1;
429}
430
431# define aesni_ccm_cipher aes_ccm_cipher
432static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
433 const unsigned char *in, size_t len);
434
435# ifndef OPENSSL_NO_OCB
436void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
437 size_t blocks, const void *key,
438 size_t start_block_num,
439 unsigned char offset_i[16],
440 const unsigned char L_[][16],
441 unsigned char checksum[16]);
442void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
443 size_t blocks, const void *key,
444 size_t start_block_num,
445 unsigned char offset_i[16],
446 const unsigned char L_[][16],
447 unsigned char checksum[16]);
448
449static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
450 const unsigned char *iv, int enc)
451{
452 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
453 if (!iv && !key)
454 return 1;
455 if (key) {
456 do {
457 /*
458 * We set both the encrypt and decrypt key here because decrypt
459 * needs both. We could possibly optimise to remove setting the
460 * decrypt for an encryption operation.
461 */
462 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
463 &octx->ksenc.ks);
464 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
465 &octx->ksdec.ks);
466 if (!CRYPTO_ocb128_init(&octx->ocb,
467 &octx->ksenc.ks, &octx->ksdec.ks,
468 (block128_f) aesni_encrypt,
469 (block128_f) aesni_decrypt,
470 enc ? aesni_ocb_encrypt
471 : aesni_ocb_decrypt))
472 return 0;
473 }
474 while (0);
475
476 /*
477 * If we have an iv we can set it directly, otherwise use saved IV.
478 */
479 if (iv == NULL && octx->iv_set)
480 iv = octx->iv;
481 if (iv) {
482 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
483 != 1)
484 return 0;
485 octx->iv_set = 1;
486 }
487 octx->key_set = 1;
488 } else {
489 /* If key set use IV, otherwise copy */
490 if (octx->key_set)
491 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
492 else
493 memcpy(octx->iv, iv, octx->ivlen);
494 octx->iv_set = 1;
495 }
496 return 1;
497}
498
499# define aesni_ocb_cipher aes_ocb_cipher
500static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
501 const unsigned char *in, size_t len);
502# endif /* OPENSSL_NO_OCB */
503
504# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
505static const EVP_CIPHER aesni_##keylen##_##mode = { \
506 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
507 flags|EVP_CIPH_##MODE##_MODE, \
508 aesni_init_key, \
509 aesni_##mode##_cipher, \
510 NULL, \
511 sizeof(EVP_AES_KEY), \
512 NULL,NULL,NULL,NULL }; \
513static const EVP_CIPHER aes_##keylen##_##mode = { \
514 nid##_##keylen##_##nmode,blocksize, \
515 keylen/8,ivlen, \
516 flags|EVP_CIPH_##MODE##_MODE, \
517 aes_init_key, \
518 aes_##mode##_cipher, \
519 NULL, \
520 sizeof(EVP_AES_KEY), \
521 NULL,NULL,NULL,NULL }; \
522const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
523{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
524
525# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
526static const EVP_CIPHER aesni_##keylen##_##mode = { \
527 nid##_##keylen##_##mode,blocksize, \
528 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
529 flags|EVP_CIPH_##MODE##_MODE, \
530 aesni_##mode##_init_key, \
531 aesni_##mode##_cipher, \
532 aes_##mode##_cleanup, \
533 sizeof(EVP_AES_##MODE##_CTX), \
534 NULL,NULL,aes_##mode##_ctrl,NULL }; \
535static const EVP_CIPHER aes_##keylen##_##mode = { \
536 nid##_##keylen##_##mode,blocksize, \
537 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
538 flags|EVP_CIPH_##MODE##_MODE, \
539 aes_##mode##_init_key, \
540 aes_##mode##_cipher, \
541 aes_##mode##_cleanup, \
542 sizeof(EVP_AES_##MODE##_CTX), \
543 NULL,NULL,aes_##mode##_ctrl,NULL }; \
544const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
545{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
546
547#elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
548
549# include "sparc_arch.h"
550
551extern unsigned int OPENSSL_sparcv9cap_P[];
552
553/*
554 * Initial Fujitsu SPARC64 X support
555 */
556# define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
557# define HWAES_set_encrypt_key aes_fx_set_encrypt_key
558# define HWAES_set_decrypt_key aes_fx_set_decrypt_key
559# define HWAES_encrypt aes_fx_encrypt
560# define HWAES_decrypt aes_fx_decrypt
561# define HWAES_cbc_encrypt aes_fx_cbc_encrypt
562# define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
563
564# define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
565
566void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
567void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
568void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
569 const AES_KEY *key);
570void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
571 const AES_KEY *key);
572/*
573 * Key-length specific subroutines were chosen for following reason.
574 * Each SPARC T4 core can execute up to 8 threads which share core's
575 * resources. Loading as much key material to registers allows to
576 * minimize references to shared memory interface, as well as amount
577 * of instructions in inner loops [much needed on T4]. But then having
578 * non-key-length specific routines would require conditional branches
579 * either in inner loops or on subroutines' entries. Former is hardly
580 * acceptable, while latter means code size increase to size occupied
581 * by multiple key-length specific subroutines, so why fight?
582 */
583void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
584 size_t len, const AES_KEY *key,
585 unsigned char *ivec);
586void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
587 size_t len, const AES_KEY *key,
588 unsigned char *ivec);
589void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
590 size_t len, const AES_KEY *key,
591 unsigned char *ivec);
592void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
593 size_t len, const AES_KEY *key,
594 unsigned char *ivec);
595void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
596 size_t len, const AES_KEY *key,
597 unsigned char *ivec);
598void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
599 size_t len, const AES_KEY *key,
600 unsigned char *ivec);
601void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
602 size_t blocks, const AES_KEY *key,
603 unsigned char *ivec);
604void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
605 size_t blocks, const AES_KEY *key,
606 unsigned char *ivec);
607void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
608 size_t blocks, const AES_KEY *key,
609 unsigned char *ivec);
610void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
611 size_t blocks, const AES_KEY *key1,
612 const AES_KEY *key2, const unsigned char *ivec);
613void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
614 size_t blocks, const AES_KEY *key1,
615 const AES_KEY *key2, const unsigned char *ivec);
616void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
617 size_t blocks, const AES_KEY *key1,
618 const AES_KEY *key2, const unsigned char *ivec);
619void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
620 size_t blocks, const AES_KEY *key1,
621 const AES_KEY *key2, const unsigned char *ivec);
622
623static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
624 const unsigned char *iv, int enc)
625{
626 int ret, mode, bits;
627 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
628
629 mode = EVP_CIPHER_CTX_mode(ctx);
630 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
631 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
632 && !enc) {
633 ret = 0;
634 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
635 dat->block = (block128_f) aes_t4_decrypt;
636 switch (bits) {
637 case 128:
638 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
639 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
640 break;
641 case 192:
642 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
643 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
644 break;
645 case 256:
646 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
647 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
648 break;
649 default:
650 ret = -1;
651 }
652 } else {
653 ret = 0;
654 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
655 dat->block = (block128_f) aes_t4_encrypt;
656 switch (bits) {
657 case 128:
658 if (mode == EVP_CIPH_CBC_MODE)
659 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
660 else if (mode == EVP_CIPH_CTR_MODE)
661 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
662 else
663 dat->stream.cbc = NULL;
664 break;
665 case 192:
666 if (mode == EVP_CIPH_CBC_MODE)
667 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
668 else if (mode == EVP_CIPH_CTR_MODE)
669 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
670 else
671 dat->stream.cbc = NULL;
672 break;
673 case 256:
674 if (mode == EVP_CIPH_CBC_MODE)
675 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
676 else if (mode == EVP_CIPH_CTR_MODE)
677 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
678 else
679 dat->stream.cbc = NULL;
680 break;
681 default:
682 ret = -1;
683 }
684 }
685
686 if (ret < 0) {
687 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
688 return 0;
689 }
690
691 return 1;
692}
693
694# define aes_t4_cbc_cipher aes_cbc_cipher
695static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
696 const unsigned char *in, size_t len);
697
698# define aes_t4_ecb_cipher aes_ecb_cipher
699static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
700 const unsigned char *in, size_t len);
701
702# define aes_t4_ofb_cipher aes_ofb_cipher
703static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
704 const unsigned char *in, size_t len);
705
706# define aes_t4_cfb_cipher aes_cfb_cipher
707static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
708 const unsigned char *in, size_t len);
709
710# define aes_t4_cfb8_cipher aes_cfb8_cipher
711static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
712 const unsigned char *in, size_t len);
713
714# define aes_t4_cfb1_cipher aes_cfb1_cipher
715static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
716 const unsigned char *in, size_t len);
717
718# define aes_t4_ctr_cipher aes_ctr_cipher
719static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
720 const unsigned char *in, size_t len);
721
722static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
723 const unsigned char *iv, int enc)
724{
725 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
726 if (!iv && !key)
727 return 1;
728 if (key) {
729 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
730 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
731 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
732 (block128_f) aes_t4_encrypt);
733 switch (bits) {
734 case 128:
735 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
736 break;
737 case 192:
738 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
739 break;
740 case 256:
741 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
742 break;
743 default:
744 return 0;
745 }
746 /*
747 * If we have an iv can set it directly, otherwise use saved IV.
748 */
749 if (iv == NULL && gctx->iv_set)
750 iv = gctx->iv;
751 if (iv) {
752 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
753 gctx->iv_set = 1;
754 }
755 gctx->key_set = 1;
756 } else {
757 /* If key set use IV, otherwise copy */
758 if (gctx->key_set)
759 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
760 else
761 memcpy(gctx->iv, iv, gctx->ivlen);
762 gctx->iv_set = 1;
763 gctx->iv_gen = 0;
764 }
765 return 1;
766}
767
768# define aes_t4_gcm_cipher aes_gcm_cipher
769static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
770 const unsigned char *in, size_t len);
771
772static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
773 const unsigned char *iv, int enc)
774{
775 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
776 if (!iv && !key)
777 return 1;
778
779 if (key) {
780 int bits = EVP_CIPHER_CTX_key_length(ctx) * 4;
781 xctx->stream = NULL;
782 /* key_len is two AES keys */
783 if (enc) {
784 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
785 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
786 switch (bits) {
787 case 128:
788 xctx->stream = aes128_t4_xts_encrypt;
789 break;
790 case 256:
791 xctx->stream = aes256_t4_xts_encrypt;
792 break;
793 default:
794 return 0;
795 }
796 } else {
797 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
798 &xctx->ks1.ks);
799 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
800 switch (bits) {
801 case 128:
802 xctx->stream = aes128_t4_xts_decrypt;
803 break;
804 case 256:
805 xctx->stream = aes256_t4_xts_decrypt;
806 break;
807 default:
808 return 0;
809 }
810 }
811
812 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
813 EVP_CIPHER_CTX_key_length(ctx) * 4,
814 &xctx->ks2.ks);
815 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
816
817 xctx->xts.key1 = &xctx->ks1;
818 }
819
820 if (iv) {
821 xctx->xts.key2 = &xctx->ks2;
822 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
823 }
824
825 return 1;
826}
827
828# define aes_t4_xts_cipher aes_xts_cipher
829static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
830 const unsigned char *in, size_t len);
831
832static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
833 const unsigned char *iv, int enc)
834{
835 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
836 if (!iv && !key)
837 return 1;
838 if (key) {
839 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
840 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
841 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
842 &cctx->ks, (block128_f) aes_t4_encrypt);
843 cctx->str = NULL;
844 cctx->key_set = 1;
845 }
846 if (iv) {
847 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
848 cctx->iv_set = 1;
849 }
850 return 1;
851}
852
853# define aes_t4_ccm_cipher aes_ccm_cipher
854static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
855 const unsigned char *in, size_t len);
856
857# ifndef OPENSSL_NO_OCB
858static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
859 const unsigned char *iv, int enc)
860{
861 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
862 if (!iv && !key)
863 return 1;
864 if (key) {
865 do {
866 /*
867 * We set both the encrypt and decrypt key here because decrypt
868 * needs both. We could possibly optimise to remove setting the
869 * decrypt for an encryption operation.
870 */
871 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
872 &octx->ksenc.ks);
873 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
874 &octx->ksdec.ks);
875 if (!CRYPTO_ocb128_init(&octx->ocb,
876 &octx->ksenc.ks, &octx->ksdec.ks,
877 (block128_f) aes_t4_encrypt,
878 (block128_f) aes_t4_decrypt,
879 NULL))
880 return 0;
881 }
882 while (0);
883
884 /*
885 * If we have an iv we can set it directly, otherwise use saved IV.
886 */
887 if (iv == NULL && octx->iv_set)
888 iv = octx->iv;
889 if (iv) {
890 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
891 != 1)
892 return 0;
893 octx->iv_set = 1;
894 }
895 octx->key_set = 1;
896 } else {
897 /* If key set use IV, otherwise copy */
898 if (octx->key_set)
899 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
900 else
901 memcpy(octx->iv, iv, octx->ivlen);
902 octx->iv_set = 1;
903 }
904 return 1;
905}
906
907# define aes_t4_ocb_cipher aes_ocb_cipher
908static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
909 const unsigned char *in, size_t len);
910# endif /* OPENSSL_NO_OCB */
911
912# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
913static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
914 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
915 flags|EVP_CIPH_##MODE##_MODE, \
916 aes_t4_init_key, \
917 aes_t4_##mode##_cipher, \
918 NULL, \
919 sizeof(EVP_AES_KEY), \
920 NULL,NULL,NULL,NULL }; \
921static const EVP_CIPHER aes_##keylen##_##mode = { \
922 nid##_##keylen##_##nmode,blocksize, \
923 keylen/8,ivlen, \
924 flags|EVP_CIPH_##MODE##_MODE, \
925 aes_init_key, \
926 aes_##mode##_cipher, \
927 NULL, \
928 sizeof(EVP_AES_KEY), \
929 NULL,NULL,NULL,NULL }; \
930const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
931{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
932
933# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
934static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
935 nid##_##keylen##_##mode,blocksize, \
936 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
937 flags|EVP_CIPH_##MODE##_MODE, \
938 aes_t4_##mode##_init_key, \
939 aes_t4_##mode##_cipher, \
940 aes_##mode##_cleanup, \
941 sizeof(EVP_AES_##MODE##_CTX), \
942 NULL,NULL,aes_##mode##_ctrl,NULL }; \
943static const EVP_CIPHER aes_##keylen##_##mode = { \
944 nid##_##keylen##_##mode,blocksize, \
945 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
946 flags|EVP_CIPH_##MODE##_MODE, \
947 aes_##mode##_init_key, \
948 aes_##mode##_cipher, \
949 aes_##mode##_cleanup, \
950 sizeof(EVP_AES_##MODE##_CTX), \
951 NULL,NULL,aes_##mode##_ctrl,NULL }; \
952const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
953{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
954
955#else
956
957# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
958static const EVP_CIPHER aes_##keylen##_##mode = { \
959 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
960 flags|EVP_CIPH_##MODE##_MODE, \
961 aes_init_key, \
962 aes_##mode##_cipher, \
963 NULL, \
964 sizeof(EVP_AES_KEY), \
965 NULL,NULL,NULL,NULL }; \
966const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
967{ return &aes_##keylen##_##mode; }
968
969# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
970static const EVP_CIPHER aes_##keylen##_##mode = { \
971 nid##_##keylen##_##mode,blocksize, \
972 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
973 flags|EVP_CIPH_##MODE##_MODE, \
974 aes_##mode##_init_key, \
975 aes_##mode##_cipher, \
976 aes_##mode##_cleanup, \
977 sizeof(EVP_AES_##MODE##_CTX), \
978 NULL,NULL,aes_##mode##_ctrl,NULL }; \
979const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
980{ return &aes_##keylen##_##mode; }
981
982#endif
983
984#if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
985# include "arm_arch.h"
986# if __ARM_MAX_ARCH__>=7
987# if defined(BSAES_ASM)
988# define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
989# endif
990# if defined(VPAES_ASM)
991# define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
992# endif
993# define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
994# define HWAES_set_encrypt_key aes_v8_set_encrypt_key
995# define HWAES_set_decrypt_key aes_v8_set_decrypt_key
996# define HWAES_encrypt aes_v8_encrypt
997# define HWAES_decrypt aes_v8_decrypt
998# define HWAES_cbc_encrypt aes_v8_cbc_encrypt
999# define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
1000# endif
1001#endif
1002
1003#if defined(HWAES_CAPABLE)
1004int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
1005 AES_KEY *key);
1006int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
1007 AES_KEY *key);
1008void HWAES_encrypt(const unsigned char *in, unsigned char *out,
1009 const AES_KEY *key);
1010void HWAES_decrypt(const unsigned char *in, unsigned char *out,
1011 const AES_KEY *key);
1012void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
1013 size_t length, const AES_KEY *key,
1014 unsigned char *ivec, const int enc);
1015void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
1016 size_t len, const AES_KEY *key,
1017 const unsigned char ivec[16]);
1018void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
1019 size_t len, const AES_KEY *key1,
1020 const AES_KEY *key2, const unsigned char iv[16]);
1021void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
1022 size_t len, const AES_KEY *key1,
1023 const AES_KEY *key2, const unsigned char iv[16]);
1024#endif
1025
1026#define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
1027 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
1028 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
1029 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
1030 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
1031 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
1032 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
1033 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
1034
1035static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1036 const unsigned char *iv, int enc)
1037{
1038 int ret, mode;
1039 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1040
1041 mode = EVP_CIPHER_CTX_mode(ctx);
1042 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
1043 && !enc) {
1044#ifdef HWAES_CAPABLE
1045 if (HWAES_CAPABLE) {
1046 ret = HWAES_set_decrypt_key(key,
1047 EVP_CIPHER_CTX_key_length(ctx) * 8,
1048 &dat->ks.ks);
1049 dat->block = (block128_f) HWAES_decrypt;
1050 dat->stream.cbc = NULL;
1051# ifdef HWAES_cbc_encrypt
1052 if (mode == EVP_CIPH_CBC_MODE)
1053 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
1054# endif
1055 } else
1056#endif
1057#ifdef BSAES_CAPABLE
1058 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
1059 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1060 &dat->ks.ks);
1061 dat->block = (block128_f) AES_decrypt;
1062 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
1063 } else
1064#endif
1065#ifdef VPAES_CAPABLE
1066 if (VPAES_CAPABLE) {
1067 ret = vpaes_set_decrypt_key(key,
1068 EVP_CIPHER_CTX_key_length(ctx) * 8,
1069 &dat->ks.ks);
1070 dat->block = (block128_f) vpaes_decrypt;
1071 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
1072 (cbc128_f) vpaes_cbc_encrypt : NULL;
1073 } else
1074#endif
1075 {
1076 ret = AES_set_decrypt_key(key,
1077 EVP_CIPHER_CTX_key_length(ctx) * 8,
1078 &dat->ks.ks);
1079 dat->block = (block128_f) AES_decrypt;
1080 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
1081 (cbc128_f) AES_cbc_encrypt : NULL;
1082 }
1083 } else
1084#ifdef HWAES_CAPABLE
1085 if (HWAES_CAPABLE) {
1086 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1087 &dat->ks.ks);
1088 dat->block = (block128_f) HWAES_encrypt;
1089 dat->stream.cbc = NULL;
1090# ifdef HWAES_cbc_encrypt
1091 if (mode == EVP_CIPH_CBC_MODE)
1092 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
1093 else
1094# endif
1095# ifdef HWAES_ctr32_encrypt_blocks
1096 if (mode == EVP_CIPH_CTR_MODE)
1097 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
1098 else
1099# endif
1100 (void)0; /* terminate potentially open 'else' */
1101 } else
1102#endif
1103#ifdef BSAES_CAPABLE
1104 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
1105 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1106 &dat->ks.ks);
1107 dat->block = (block128_f) AES_encrypt;
1108 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
1109 } else
1110#endif
1111#ifdef VPAES_CAPABLE
1112 if (VPAES_CAPABLE) {
1113 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1114 &dat->ks.ks);
1115 dat->block = (block128_f) vpaes_encrypt;
1116 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
1117 (cbc128_f) vpaes_cbc_encrypt : NULL;
1118 } else
1119#endif
1120 {
1121 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1122 &dat->ks.ks);
1123 dat->block = (block128_f) AES_encrypt;
1124 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
1125 (cbc128_f) AES_cbc_encrypt : NULL;
1126#ifdef AES_CTR_ASM
1127 if (mode == EVP_CIPH_CTR_MODE)
1128 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
1129#endif
1130 }
1131
1132 if (ret < 0) {
1133 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
1134 return 0;
1135 }
1136
1137 return 1;
1138}
1139
1140static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1141 const unsigned char *in, size_t len)
1142{
1143 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1144
1145 if (dat->stream.cbc)
1146 (*dat->stream.cbc) (in, out, len, &dat->ks,
1147 EVP_CIPHER_CTX_iv_noconst(ctx),
1148 EVP_CIPHER_CTX_encrypting(ctx));
1149 else if (EVP_CIPHER_CTX_encrypting(ctx))
1150 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
1151 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
1152 else
1153 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
1154 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
1155
1156 return 1;
1157}
1158
1159static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1160 const unsigned char *in, size_t len)
1161{
1162 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
1163 size_t i;
1164 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1165
1166 if (len < bl)
1167 return 1;
1168
1169 for (i = 0, len -= bl; i <= len; i += bl)
1170 (*dat->block) (in + i, out + i, &dat->ks);
1171
1172 return 1;
1173}
1174
1175static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1176 const unsigned char *in, size_t len)
1177{
1178 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1179
1180 int num = EVP_CIPHER_CTX_num(ctx);
1181 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
1182 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
1183 EVP_CIPHER_CTX_set_num(ctx, num);
1184 return 1;
1185}
1186
1187static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1188 const unsigned char *in, size_t len)
1189{
1190 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1191
1192 int num = EVP_CIPHER_CTX_num(ctx);
1193 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
1194 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
1195 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
1196 EVP_CIPHER_CTX_set_num(ctx, num);
1197 return 1;
1198}
1199
1200static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1201 const unsigned char *in, size_t len)
1202{
1203 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1204
1205 int num = EVP_CIPHER_CTX_num(ctx);
1206 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
1207 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
1208 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
1209 EVP_CIPHER_CTX_set_num(ctx, num);
1210 return 1;
1211}
1212
1213static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1214 const unsigned char *in, size_t len)
1215{
1216 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1217
1218 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
1219 int num = EVP_CIPHER_CTX_num(ctx);
1220 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
1221 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
1222 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
1223 EVP_CIPHER_CTX_set_num(ctx, num);
1224 return 1;
1225 }
1226
1227 while (len >= MAXBITCHUNK) {
1228 int num = EVP_CIPHER_CTX_num(ctx);
1229 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
1230 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
1231 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
1232 EVP_CIPHER_CTX_set_num(ctx, num);
1233 len -= MAXBITCHUNK;
1234 }
1235 if (len) {
1236 int num = EVP_CIPHER_CTX_num(ctx);
1237 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
1238 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
1239 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
1240 EVP_CIPHER_CTX_set_num(ctx, num);
1241 }
1242
1243 return 1;
1244}
1245
1246static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1247 const unsigned char *in, size_t len)
1248{
1249 unsigned int num = EVP_CIPHER_CTX_num(ctx);
1250 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1251
1252 if (dat->stream.ctr)
1253 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
1254 EVP_CIPHER_CTX_iv_noconst(ctx),
1255 EVP_CIPHER_CTX_buf_noconst(ctx),
1256 &num, dat->stream.ctr);
1257 else
1258 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
1259 EVP_CIPHER_CTX_iv_noconst(ctx),
1260 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
1261 dat->block);
1262 EVP_CIPHER_CTX_set_num(ctx, num);
1263 return 1;
1264}
1265
1266BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
1267 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
1268 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
1269
1270static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1271{
1272 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
1273 if (gctx == NULL)
1274 return 0;
1275 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
1276 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
1277 OPENSSL_free(gctx->iv);
1278 return 1;
1279}
1280
1281/* increment counter (64-bit int) by 1 */
1282static void ctr64_inc(unsigned char *counter)
1283{
1284 int n = 8;
1285 unsigned char c;
1286
1287 do {
1288 --n;
1289 c = counter[n];
1290 ++c;
1291 counter[n] = c;
1292 if (c)
1293 return;
1294 } while (n);
1295}
1296
1297static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1298{
1299 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
1300 switch (type) {
1301 case EVP_CTRL_INIT:
1302 gctx->key_set = 0;
1303 gctx->iv_set = 0;
1304 gctx->ivlen = EVP_CIPHER_CTX_iv_length(c);
1305 gctx->iv = EVP_CIPHER_CTX_iv_noconst(c);
1306 gctx->taglen = -1;
1307 gctx->iv_gen = 0;
1308 gctx->tls_aad_len = -1;
1309 return 1;
1310
1311 case EVP_CTRL_AEAD_SET_IVLEN:
1312 if (arg <= 0)
1313 return 0;
1314 /* Allocate memory for IV if needed */
1315 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
1316 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
1317 OPENSSL_free(gctx->iv);
1318 gctx->iv = OPENSSL_malloc(arg);
1319 if (gctx->iv == NULL)
1320 return 0;
1321 }
1322 gctx->ivlen = arg;
1323 return 1;
1324
1325 case EVP_CTRL_AEAD_SET_TAG:
1326 if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_encrypting(c))
1327 return 0;
1328 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
1329 gctx->taglen = arg;
1330 return 1;
1331
1332 case EVP_CTRL_AEAD_GET_TAG:
1333 if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_encrypting(c)
1334 || gctx->taglen < 0)
1335 return 0;
1336 memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
1337 return 1;
1338
1339 case EVP_CTRL_GCM_SET_IV_FIXED:
1340 /* Special case: -1 length restores whole IV */
1341 if (arg == -1) {
1342 memcpy(gctx->iv, ptr, gctx->ivlen);
1343 gctx->iv_gen = 1;
1344 return 1;
1345 }
1346 /*
1347 * Fixed field must be at least 4 bytes and invocation field at least
1348 * 8.
1349 */
1350 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1351 return 0;
1352 if (arg)
1353 memcpy(gctx->iv, ptr, arg);
1354 if (EVP_CIPHER_CTX_encrypting(c)
1355 && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1356 return 0;
1357 gctx->iv_gen = 1;
1358 return 1;
1359
1360 case EVP_CTRL_GCM_IV_GEN:
1361 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1362 return 0;
1363 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
1364 if (arg <= 0 || arg > gctx->ivlen)
1365 arg = gctx->ivlen;
1366 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1367 /*
1368 * Invocation field will be at least 8 bytes in size and so no need
1369 * to check wrap around or increment more than last 8 bytes.
1370 */
1371 ctr64_inc(gctx->iv + gctx->ivlen - 8);
1372 gctx->iv_set = 1;
1373 return 1;
1374
1375 case EVP_CTRL_GCM_SET_IV_INV:
1376 if (gctx->iv_gen == 0 || gctx->key_set == 0
1377 || EVP_CIPHER_CTX_encrypting(c))
1378 return 0;
1379 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1380 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
1381 gctx->iv_set = 1;
1382 return 1;
1383
1384 case EVP_CTRL_AEAD_TLS1_AAD:
1385 /* Save the AAD for later use */
1386 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1387 return 0;
1388 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
1389 gctx->tls_aad_len = arg;
1390 {
1391 unsigned int len =
1392 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
1393 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
1394 /* Correct length for explicit IV */
1395 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1396 return 0;
1397 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1398 /* If decrypting correct for tag too */
1399 if (!EVP_CIPHER_CTX_encrypting(c)) {
1400 if (len < EVP_GCM_TLS_TAG_LEN)
1401 return 0;
1402 len -= EVP_GCM_TLS_TAG_LEN;
1403 }
1404 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
1405 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
1406 }
1407 /* Extra padding: tag appended to record */
1408 return EVP_GCM_TLS_TAG_LEN;
1409
1410 case EVP_CTRL_COPY:
1411 {
1412 EVP_CIPHER_CTX *out = ptr;
1413 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
1414 if (gctx->gcm.key) {
1415 if (gctx->gcm.key != &gctx->ks)
1416 return 0;
1417 gctx_out->gcm.key = &gctx_out->ks;
1418 }
1419 if (gctx->iv == EVP_CIPHER_CTX_iv_noconst(c))
1420 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1421 else {
1422 gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
1423 if (gctx_out->iv == NULL)
1424 return 0;
1425 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
1426 }
1427 return 1;
1428 }
1429
1430 default:
1431 return -1;
1432
1433 }
1434}
1435
1436static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1437 const unsigned char *iv, int enc)
1438{
1439 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
1440 if (!iv && !key)
1441 return 1;
1442 if (key) {
1443 do {
1444#ifdef HWAES_CAPABLE
1445 if (HWAES_CAPABLE) {
1446 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1447 &gctx->ks.ks);
1448 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
1449 (block128_f) HWAES_encrypt);
1450# ifdef HWAES_ctr32_encrypt_blocks
1451 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
1452# else
1453 gctx->ctr = NULL;
1454# endif
1455 break;
1456 } else
1457#endif
1458#ifdef BSAES_CAPABLE
1459 if (BSAES_CAPABLE) {
1460 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1461 &gctx->ks.ks);
1462 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
1463 (block128_f) AES_encrypt);
1464 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
1465 break;
1466 } else
1467#endif
1468#ifdef VPAES_CAPABLE
1469 if (VPAES_CAPABLE) {
1470 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1471 &gctx->ks.ks);
1472 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
1473 (block128_f) vpaes_encrypt);
1474 gctx->ctr = NULL;
1475 break;
1476 } else
1477#endif
1478 (void)0; /* terminate potentially open 'else' */
1479
1480 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1481 &gctx->ks.ks);
1482 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
1483 (block128_f) AES_encrypt);
1484#ifdef AES_CTR_ASM
1485 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
1486#else
1487 gctx->ctr = NULL;
1488#endif
1489 } while (0);
1490
1491 /*
1492 * If we have an iv can set it directly, otherwise use saved IV.
1493 */
1494 if (iv == NULL && gctx->iv_set)
1495 iv = gctx->iv;
1496 if (iv) {
1497 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
1498 gctx->iv_set = 1;
1499 }
1500 gctx->key_set = 1;
1501 } else {
1502 /* If key set use IV, otherwise copy */
1503 if (gctx->key_set)
1504 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
1505 else
1506 memcpy(gctx->iv, iv, gctx->ivlen);
1507 gctx->iv_set = 1;
1508 gctx->iv_gen = 0;
1509 }
1510 return 1;
1511}
1512
1513/*
1514 * Handle TLS GCM packet format. This consists of the last portion of the IV
1515 * followed by the payload and finally the tag. On encrypt generate IV,
1516 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
1517 * and verify tag.
1518 */
1519
1520static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1521 const unsigned char *in, size_t len)
1522{
1523 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
1524 int rv = -1;
1525 /* Encrypt/decrypt must be performed in place */
1526 if (out != in
1527 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1528 return -1;
1529 /*
1530 * Set IV from start of buffer or generate IV and write to start of
1531 * buffer.
1532 */
1533 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_encrypting(ctx) ?
1534 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
1535 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1536 goto err;
1537 /* Use saved AAD */
1538 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
1539 gctx->tls_aad_len))
1540 goto err;
1541 /* Fix buffer and length to point to payload */
1542 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1543 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1544 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1545 if (EVP_CIPHER_CTX_encrypting(ctx)) {
1546 /* Encrypt payload */
1547 if (gctx->ctr) {
1548 size_t bulk = 0;
1549#if defined(AES_GCM_ASM)
1550 if (len >= 32 && AES_GCM_ASM(gctx)) {
1551 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
1552 return -1;
1553
1554 bulk = AES_gcm_encrypt(in, out, len,
1555 gctx->gcm.key,
1556 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
1557 gctx->gcm.len.u[1] += bulk;
1558 }
1559#endif
1560 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
1561 in + bulk,
1562 out + bulk,
1563 len - bulk, gctx->ctr))
1564 goto err;
1565 } else {
1566 size_t bulk = 0;
1567#if defined(AES_GCM_ASM2)
1568 if (len >= 32 && AES_GCM_ASM2(gctx)) {
1569 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
1570 return -1;
1571
1572 bulk = AES_gcm_encrypt(in, out, len,
1573 gctx->gcm.key,
1574 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
1575 gctx->gcm.len.u[1] += bulk;
1576 }
1577#endif
1578 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
1579 in + bulk, out + bulk, len - bulk))
1580 goto err;
1581 }
1582 out += len;
1583 /* Finally write tag */
1584 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
1585 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1586 } else {
1587 /* Decrypt */
1588 if (gctx->ctr) {
1589 size_t bulk = 0;
1590#if defined(AES_GCM_ASM)
1591 if (len >= 16 && AES_GCM_ASM(gctx)) {
1592 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
1593 return -1;
1594
1595 bulk = AES_gcm_decrypt(in, out, len,
1596 gctx->gcm.key,
1597 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
1598 gctx->gcm.len.u[1] += bulk;
1599 }
1600#endif
1601 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
1602 in + bulk,
1603 out + bulk,
1604 len - bulk, gctx->ctr))
1605 goto err;
1606 } else {
1607 size_t bulk = 0;
1608#if defined(AES_GCM_ASM2)
1609 if (len >= 16 && AES_GCM_ASM2(gctx)) {
1610 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
1611 return -1;
1612
1613 bulk = AES_gcm_decrypt(in, out, len,
1614 gctx->gcm.key,
1615 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
1616 gctx->gcm.len.u[1] += bulk;
1617 }
1618#endif
1619 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
1620 in + bulk, out + bulk, len - bulk))
1621 goto err;
1622 }
1623 /* Retrieve tag */
1624 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
1625 EVP_GCM_TLS_TAG_LEN);
1626 /* If tag mismatch wipe buffer */
1627 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
1628 EVP_GCM_TLS_TAG_LEN)) {
1629 OPENSSL_cleanse(out, len);
1630 goto err;
1631 }
1632 rv = len;
1633 }
1634
1635 err:
1636 gctx->iv_set = 0;
1637 gctx->tls_aad_len = -1;
1638 return rv;
1639}
1640
1641static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1642 const unsigned char *in, size_t len)
1643{
1644 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
1645 /* If not set up, return error */
1646 if (!gctx->key_set)
1647 return -1;
1648
1649 if (gctx->tls_aad_len >= 0)
1650 return aes_gcm_tls_cipher(ctx, out, in, len);
1651
1652 if (!gctx->iv_set)
1653 return -1;
1654 if (in) {
1655 if (out == NULL) {
1656 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
1657 return -1;
1658 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
1659 if (gctx->ctr) {
1660 size_t bulk = 0;
1661#if defined(AES_GCM_ASM)
1662 if (len >= 32 && AES_GCM_ASM(gctx)) {
1663 size_t res = (16 - gctx->gcm.mres) % 16;
1664
1665 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
1666 return -1;
1667
1668 bulk = AES_gcm_encrypt(in + res,
1669 out + res, len - res,
1670 gctx->gcm.key, gctx->gcm.Yi.c,
1671 gctx->gcm.Xi.u);
1672 gctx->gcm.len.u[1] += bulk;
1673 bulk += res;
1674 }
1675#endif
1676 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
1677 in + bulk,
1678 out + bulk,
1679 len - bulk, gctx->ctr))
1680 return -1;
1681 } else {
1682 size_t bulk = 0;
1683#if defined(AES_GCM_ASM2)
1684 if (len >= 32 && AES_GCM_ASM2(gctx)) {
1685 size_t res = (16 - gctx->gcm.mres) % 16;
1686
1687 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
1688 return -1;
1689
1690 bulk = AES_gcm_encrypt(in + res,
1691 out + res, len - res,
1692 gctx->gcm.key, gctx->gcm.Yi.c,
1693 gctx->gcm.Xi.u);
1694 gctx->gcm.len.u[1] += bulk;
1695 bulk += res;
1696 }
1697#endif
1698 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
1699 in + bulk, out + bulk, len - bulk))
1700 return -1;
1701 }
1702 } else {
1703 if (gctx->ctr) {
1704 size_t bulk = 0;
1705#if defined(AES_GCM_ASM)
1706 if (len >= 16 && AES_GCM_ASM(gctx)) {
1707 size_t res = (16 - gctx->gcm.mres) % 16;
1708
1709 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
1710 return -1;
1711
1712 bulk = AES_gcm_decrypt(in + res,
1713 out + res, len - res,
1714 gctx->gcm.key,
1715 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
1716 gctx->gcm.len.u[1] += bulk;
1717 bulk += res;
1718 }
1719#endif
1720 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
1721 in + bulk,
1722 out + bulk,
1723 len - bulk, gctx->ctr))
1724 return -1;
1725 } else {
1726 size_t bulk = 0;
1727#if defined(AES_GCM_ASM2)
1728 if (len >= 16 && AES_GCM_ASM2(gctx)) {
1729 size_t res = (16 - gctx->gcm.mres) % 16;
1730
1731 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
1732 return -1;
1733
1734 bulk = AES_gcm_decrypt(in + res,
1735 out + res, len - res,
1736 gctx->gcm.key,
1737 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
1738 gctx->gcm.len.u[1] += bulk;
1739 bulk += res;
1740 }
1741#endif
1742 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
1743 in + bulk, out + bulk, len - bulk))
1744 return -1;
1745 }
1746 }
1747 return len;
1748 } else {
1749 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
1750 if (gctx->taglen < 0)
1751 return -1;
1752 if (CRYPTO_gcm128_finish(&gctx->gcm,
1753 EVP_CIPHER_CTX_buf_noconst(ctx),
1754 gctx->taglen) != 0)
1755 return -1;
1756 gctx->iv_set = 0;
1757 return 0;
1758 }
1759 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
1760 gctx->taglen = 16;
1761 /* Don't reuse the IV */
1762 gctx->iv_set = 0;
1763 return 0;
1764 }
1765
1766}
1767
1768#define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
1769 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
1770 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
1771 | EVP_CIPH_CUSTOM_COPY)
1772
1773BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
1774 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
1775 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
1776 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
1777 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
1778 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
1779
1780static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1781{
1782 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,c);
1783 if (type == EVP_CTRL_COPY) {
1784 EVP_CIPHER_CTX *out = ptr;
1785 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
1786 if (xctx->xts.key1) {
1787 if (xctx->xts.key1 != &xctx->ks1)
1788 return 0;
1789 xctx_out->xts.key1 = &xctx_out->ks1;
1790 }
1791 if (xctx->xts.key2) {
1792 if (xctx->xts.key2 != &xctx->ks2)
1793 return 0;
1794 xctx_out->xts.key2 = &xctx_out->ks2;
1795 }
1796 return 1;
1797 } else if (type != EVP_CTRL_INIT)
1798 return -1;
1799 /* key1 and key2 are used as an indicator both key and IV are set */
1800 xctx->xts.key1 = NULL;
1801 xctx->xts.key2 = NULL;
1802 return 1;
1803}
1804
1805static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1806 const unsigned char *iv, int enc)
1807{
1808 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
1809 if (!iv && !key)
1810 return 1;
1811
1812 if (key)
1813 do {
1814#ifdef AES_XTS_ASM
1815 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
1816#else
1817 xctx->stream = NULL;
1818#endif
1819 /* key_len is two AES keys */
1820#ifdef HWAES_CAPABLE
1821 if (HWAES_CAPABLE) {
1822 if (enc) {
1823 HWAES_set_encrypt_key(key,
1824 EVP_CIPHER_CTX_key_length(ctx) * 4,
1825 &xctx->ks1.ks);
1826 xctx->xts.block1 = (block128_f) HWAES_encrypt;
1827# ifdef HWAES_xts_encrypt
1828 xctx->stream = HWAES_xts_encrypt;
1829# endif
1830 } else {
1831 HWAES_set_decrypt_key(key,
1832 EVP_CIPHER_CTX_key_length(ctx) * 4,
1833 &xctx->ks1.ks);
1834 xctx->xts.block1 = (block128_f) HWAES_decrypt;
1835# ifdef HWAES_xts_decrypt
1836 xctx->stream = HWAES_xts_decrypt;
1837#endif
1838 }
1839
1840 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
1841 EVP_CIPHER_CTX_key_length(ctx) * 4,
1842 &xctx->ks2.ks);
1843 xctx->xts.block2 = (block128_f) HWAES_encrypt;
1844
1845 xctx->xts.key1 = &xctx->ks1;
1846 break;
1847 } else
1848#endif
1849#ifdef BSAES_CAPABLE
1850 if (BSAES_CAPABLE)
1851 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
1852 else
1853#endif
1854#ifdef VPAES_CAPABLE
1855 if (VPAES_CAPABLE) {
1856 if (enc) {
1857 vpaes_set_encrypt_key(key,
1858 EVP_CIPHER_CTX_key_length(ctx) * 4,
1859 &xctx->ks1.ks);
1860 xctx->xts.block1 = (block128_f) vpaes_encrypt;
1861 } else {
1862 vpaes_set_decrypt_key(key,
1863 EVP_CIPHER_CTX_key_length(ctx) * 4,
1864 &xctx->ks1.ks);
1865 xctx->xts.block1 = (block128_f) vpaes_decrypt;
1866 }
1867
1868 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
1869 EVP_CIPHER_CTX_key_length(ctx) * 4,
1870 &xctx->ks2.ks);
1871 xctx->xts.block2 = (block128_f) vpaes_encrypt;
1872
1873 xctx->xts.key1 = &xctx->ks1;
1874 break;
1875 } else
1876#endif
1877 (void)0; /* terminate potentially open 'else' */
1878
1879 if (enc) {
1880 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
1881 &xctx->ks1.ks);
1882 xctx->xts.block1 = (block128_f) AES_encrypt;
1883 } else {
1884 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
1885 &xctx->ks1.ks);
1886 xctx->xts.block1 = (block128_f) AES_decrypt;
1887 }
1888
1889 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
1890 EVP_CIPHER_CTX_key_length(ctx) * 4,
1891 &xctx->ks2.ks);
1892 xctx->xts.block2 = (block128_f) AES_encrypt;
1893
1894 xctx->xts.key1 = &xctx->ks1;
1895 } while (0);
1896
1897 if (iv) {
1898 xctx->xts.key2 = &xctx->ks2;
1899 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
1900 }
1901
1902 return 1;
1903}
1904
1905static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1906 const unsigned char *in, size_t len)
1907{
1908 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
1909 if (!xctx->xts.key1 || !xctx->xts.key2)
1910 return 0;
1911 if (!out || !in || len < AES_BLOCK_SIZE)
1912 return 0;
1913 if (xctx->stream)
1914 (*xctx->stream) (in, out, len,
1915 xctx->xts.key1, xctx->xts.key2,
1916 EVP_CIPHER_CTX_iv_noconst(ctx));
1917 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
1918 in, out, len,
1919 EVP_CIPHER_CTX_encrypting(ctx)))
1920 return 0;
1921 return 1;
1922}
1923
1924#define aes_xts_cleanup NULL
1925
1926#define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
1927 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
1928 | EVP_CIPH_CUSTOM_COPY)
1929
1930BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
1931 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
1932
1933static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1934{
1935 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
1936 switch (type) {
1937 case EVP_CTRL_INIT:
1938 cctx->key_set = 0;
1939 cctx->iv_set = 0;
1940 cctx->L = 8;
1941 cctx->M = 12;
1942 cctx->tag_set = 0;
1943 cctx->len_set = 0;
1944 cctx->tls_aad_len = -1;
1945 return 1;
1946
1947 case EVP_CTRL_AEAD_TLS1_AAD:
1948 /* Save the AAD for later use */
1949 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1950 return 0;
1951 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
1952 cctx->tls_aad_len = arg;
1953 {
1954 uint16_t len =
1955 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
1956 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
1957 /* Correct length for explicit IV */
1958 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
1959 return 0;
1960 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
1961 /* If decrypting correct for tag too */
1962 if (!EVP_CIPHER_CTX_encrypting(c)) {
1963 if (len < cctx->M)
1964 return 0;
1965 len -= cctx->M;
1966 }
1967 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
1968 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
1969 }
1970 /* Extra padding: tag appended to record */
1971 return cctx->M;
1972
1973 case EVP_CTRL_CCM_SET_IV_FIXED:
1974 /* Sanity check length */
1975 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
1976 return 0;
1977 /* Just copy to first part of IV */
1978 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
1979 return 1;
1980
1981 case EVP_CTRL_AEAD_SET_IVLEN:
1982 arg = 15 - arg;
1983 case EVP_CTRL_CCM_SET_L:
1984 if (arg < 2 || arg > 8)
1985 return 0;
1986 cctx->L = arg;
1987 return 1;
1988
1989 case EVP_CTRL_AEAD_SET_TAG:
1990 if ((arg & 1) || arg < 4 || arg > 16)
1991 return 0;
1992 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
1993 return 0;
1994 if (ptr) {
1995 cctx->tag_set = 1;
1996 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
1997 }
1998 cctx->M = arg;
1999 return 1;
2000
2001 case EVP_CTRL_AEAD_GET_TAG:
2002 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
2003 return 0;
2004 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
2005 return 0;
2006 cctx->tag_set = 0;
2007 cctx->iv_set = 0;
2008 cctx->len_set = 0;
2009 return 1;
2010
2011 case EVP_CTRL_COPY:
2012 {
2013 EVP_CIPHER_CTX *out = ptr;
2014 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
2015 if (cctx->ccm.key) {
2016 if (cctx->ccm.key != &cctx->ks)
2017 return 0;
2018 cctx_out->ccm.key = &cctx_out->ks;
2019 }
2020 return 1;
2021 }
2022
2023 default:
2024 return -1;
2025
2026 }
2027}
2028
2029static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2030 const unsigned char *iv, int enc)
2031{
2032 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
2033 if (!iv && !key)
2034 return 1;
2035 if (key)
2036 do {
2037#ifdef HWAES_CAPABLE
2038 if (HWAES_CAPABLE) {
2039 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2040 &cctx->ks.ks);
2041
2042 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
2043 &cctx->ks, (block128_f) HWAES_encrypt);
2044 cctx->str = NULL;
2045 cctx->key_set = 1;
2046 break;
2047 } else
2048#endif
2049#ifdef VPAES_CAPABLE
2050 if (VPAES_CAPABLE) {
2051 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2052 &cctx->ks.ks);
2053 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
2054 &cctx->ks, (block128_f) vpaes_encrypt);
2055 cctx->str = NULL;
2056 cctx->key_set = 1;
2057 break;
2058 }
2059#endif
2060 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2061 &cctx->ks.ks);
2062 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
2063 &cctx->ks, (block128_f) AES_encrypt);
2064 cctx->str = NULL;
2065 cctx->key_set = 1;
2066 } while (0);
2067 if (iv) {
2068 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
2069 cctx->iv_set = 1;
2070 }
2071 return 1;
2072}
2073
2074static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2075 const unsigned char *in, size_t len)
2076{
2077 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
2078 CCM128_CONTEXT *ccm = &cctx->ccm;
2079 /* Encrypt/decrypt must be performed in place */
2080 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
2081 return -1;
2082 /* If encrypting set explicit IV from sequence number (start of AAD) */
2083 if (EVP_CIPHER_CTX_encrypting(ctx))
2084 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
2085 EVP_CCM_TLS_EXPLICIT_IV_LEN);
2086 /* Get rest of IV from explicit IV */
2087 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
2088 EVP_CCM_TLS_EXPLICIT_IV_LEN);
2089 /* Correct length value */
2090 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
2091 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
2092 len))
2093 return -1;
2094 /* Use saved AAD */
2095 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
2096 /* Fix buffer to point to payload */
2097 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2098 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2099 if (EVP_CIPHER_CTX_encrypting(ctx)) {
2100 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
2101 cctx->str) :
2102 CRYPTO_ccm128_encrypt(ccm, in, out, len))
2103 return -1;
2104 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
2105 return -1;
2106 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
2107 } else {
2108 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
2109 cctx->str) :
2110 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
2111 unsigned char tag[16];
2112 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
2113 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
2114 return len;
2115 }
2116 }
2117 OPENSSL_cleanse(out, len);
2118 return -1;
2119 }
2120}
2121
2122static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2123 const unsigned char *in, size_t len)
2124{
2125 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
2126 CCM128_CONTEXT *ccm = &cctx->ccm;
2127 /* If not set up, return error */
2128 if (!cctx->key_set)
2129 return -1;
2130
2131 if (cctx->tls_aad_len >= 0)
2132 return aes_ccm_tls_cipher(ctx, out, in, len);
2133
2134 if (!cctx->iv_set)
2135 return -1;
2136
2137 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
2138 return -1;
2139 if (!out) {
2140 if (!in) {
2141 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
2142 15 - cctx->L, len))
2143 return -1;
2144 cctx->len_set = 1;
2145 return len;
2146 }
2147 /* If have AAD need message length */
2148 if (!cctx->len_set && len)
2149 return -1;
2150 CRYPTO_ccm128_aad(ccm, in, len);
2151 return len;
2152 }
2153 /* EVP_*Final() doesn't return any data */
2154 if (!in)
2155 return 0;
2156 /* If not set length yet do it */
2157 if (!cctx->len_set) {
2158 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
2159 15 - cctx->L, len))
2160 return -1;
2161 cctx->len_set = 1;
2162 }
2163 if (EVP_CIPHER_CTX_encrypting(ctx)) {
2164 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
2165 cctx->str) :
2166 CRYPTO_ccm128_encrypt(ccm, in, out, len))
2167 return -1;
2168 cctx->tag_set = 1;
2169 return len;
2170 } else {
2171 int rv = -1;
2172 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
2173 cctx->str) :
2174 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
2175 unsigned char tag[16];
2176 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
2177 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
2178 cctx->M))
2179 rv = len;
2180 }
2181 }
2182 if (rv == -1)
2183 OPENSSL_cleanse(out, len);
2184 cctx->iv_set = 0;
2185 cctx->tag_set = 0;
2186 cctx->len_set = 0;
2187 return rv;
2188 }
2189}
2190
2191#define aes_ccm_cleanup NULL
2192
2193BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
2194 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
2195 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
2196 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
2197 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
2198 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
2199
2200typedef struct {
2201 union {
2202 double align;
2203 AES_KEY ks;
2204 } ks;
2205 /* Indicates if IV has been set */
2206 unsigned char *iv;
2207} EVP_AES_WRAP_CTX;
2208
2209static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2210 const unsigned char *iv, int enc)
2211{
2212 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
2213 if (!iv && !key)
2214 return 1;
2215 if (key) {
2216 if (EVP_CIPHER_CTX_encrypting(ctx))
2217 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2218 &wctx->ks.ks);
2219 else
2220 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2221 &wctx->ks.ks);
2222 if (!iv)
2223 wctx->iv = NULL;
2224 }
2225 if (iv) {
2226 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
2227 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
2228 }
2229 return 1;
2230}
2231
2232static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2233 const unsigned char *in, size_t inlen)
2234{
2235 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
2236 size_t rv;
2237 /* AES wrap with padding has IV length of 4, without padding 8 */
2238 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
2239 /* No final operation so always return zero length */
2240 if (!in)
2241 return 0;
2242 /* Input length must always be non-zero */
2243 if (!inlen)
2244 return -1;
2245 /* If decrypting need at least 16 bytes and multiple of 8 */
2246 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
2247 return -1;
2248 /* If not padding input must be multiple of 8 */
2249 if (!pad && inlen & 0x7)
2250 return -1;
2251 if (is_partially_overlapping(out, in, inlen)) {
2252 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
2253 return 0;
2254 }
2255 if (!out) {
2256 if (EVP_CIPHER_CTX_encrypting(ctx)) {
2257 /* If padding round up to multiple of 8 */
2258 if (pad)
2259 inlen = (inlen + 7) / 8 * 8;
2260 /* 8 byte prefix */
2261 return inlen + 8;
2262 } else {
2263 /*
2264 * If not padding output will be exactly 8 bytes smaller than
2265 * input. If padding it will be at least 8 bytes smaller but we
2266 * don't know how much.
2267 */
2268 return inlen - 8;
2269 }
2270 }
2271 if (pad) {
2272 if (EVP_CIPHER_CTX_encrypting(ctx))
2273 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
2274 out, in, inlen,
2275 (block128_f) AES_encrypt);
2276 else
2277 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
2278 out, in, inlen,
2279 (block128_f) AES_decrypt);
2280 } else {
2281 if (EVP_CIPHER_CTX_encrypting(ctx))
2282 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
2283 out, in, inlen, (block128_f) AES_encrypt);
2284 else
2285 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
2286 out, in, inlen, (block128_f) AES_decrypt);
2287 }
2288 return rv ? (int)rv : -1;
2289}
2290
2291#define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
2292 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
2293 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
2294
2295static const EVP_CIPHER aes_128_wrap = {
2296 NID_id_aes128_wrap,
2297 8, 16, 8, WRAP_FLAGS,
2298 aes_wrap_init_key, aes_wrap_cipher,
2299 NULL,
2300 sizeof(EVP_AES_WRAP_CTX),
2301 NULL, NULL, NULL, NULL
2302};
2303
2304const EVP_CIPHER *EVP_aes_128_wrap(void)
2305{
2306 return &aes_128_wrap;
2307}
2308
2309static const EVP_CIPHER aes_192_wrap = {
2310 NID_id_aes192_wrap,
2311 8, 24, 8, WRAP_FLAGS,
2312 aes_wrap_init_key, aes_wrap_cipher,
2313 NULL,
2314 sizeof(EVP_AES_WRAP_CTX),
2315 NULL, NULL, NULL, NULL
2316};
2317
2318const EVP_CIPHER *EVP_aes_192_wrap(void)
2319{
2320 return &aes_192_wrap;
2321}
2322
2323static const EVP_CIPHER aes_256_wrap = {
2324 NID_id_aes256_wrap,
2325 8, 32, 8, WRAP_FLAGS,
2326 aes_wrap_init_key, aes_wrap_cipher,
2327 NULL,
2328 sizeof(EVP_AES_WRAP_CTX),
2329 NULL, NULL, NULL, NULL
2330};
2331
2332const EVP_CIPHER *EVP_aes_256_wrap(void)
2333{
2334 return &aes_256_wrap;
2335}
2336
2337static const EVP_CIPHER aes_128_wrap_pad = {
2338 NID_id_aes128_wrap_pad,
2339 8, 16, 4, WRAP_FLAGS,
2340 aes_wrap_init_key, aes_wrap_cipher,
2341 NULL,
2342 sizeof(EVP_AES_WRAP_CTX),
2343 NULL, NULL, NULL, NULL
2344};
2345
2346const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
2347{
2348 return &aes_128_wrap_pad;
2349}
2350
2351static const EVP_CIPHER aes_192_wrap_pad = {
2352 NID_id_aes192_wrap_pad,
2353 8, 24, 4, WRAP_FLAGS,
2354 aes_wrap_init_key, aes_wrap_cipher,
2355 NULL,
2356 sizeof(EVP_AES_WRAP_CTX),
2357 NULL, NULL, NULL, NULL
2358};
2359
2360const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
2361{
2362 return &aes_192_wrap_pad;
2363}
2364
2365static const EVP_CIPHER aes_256_wrap_pad = {
2366 NID_id_aes256_wrap_pad,
2367 8, 32, 4, WRAP_FLAGS,
2368 aes_wrap_init_key, aes_wrap_cipher,
2369 NULL,
2370 sizeof(EVP_AES_WRAP_CTX),
2371 NULL, NULL, NULL, NULL
2372};
2373
2374const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
2375{
2376 return &aes_256_wrap_pad;
2377}
2378
2379#ifndef OPENSSL_NO_OCB
2380static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2381{
2382 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
2383 EVP_CIPHER_CTX *newc;
2384 EVP_AES_OCB_CTX *new_octx;
2385
2386 switch (type) {
2387 case EVP_CTRL_INIT:
2388 octx->key_set = 0;
2389 octx->iv_set = 0;
2390 octx->ivlen = EVP_CIPHER_CTX_iv_length(c);
2391 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
2392 octx->taglen = 16;
2393 octx->data_buf_len = 0;
2394 octx->aad_buf_len = 0;
2395 return 1;
2396
2397 case EVP_CTRL_AEAD_SET_IVLEN:
2398 /* IV len must be 1 to 15 */
2399 if (arg <= 0 || arg > 15)
2400 return 0;
2401
2402 octx->ivlen = arg;
2403 return 1;
2404
2405 case EVP_CTRL_AEAD_SET_TAG:
2406 if (!ptr) {
2407 /* Tag len must be 0 to 16 */
2408 if (arg < 0 || arg > 16)
2409 return 0;
2410
2411 octx->taglen = arg;
2412 return 1;
2413 }
2414 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
2415 return 0;
2416 memcpy(octx->tag, ptr, arg);
2417 return 1;
2418
2419 case EVP_CTRL_AEAD_GET_TAG:
2420 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
2421 return 0;
2422
2423 memcpy(ptr, octx->tag, arg);
2424 return 1;
2425
2426 case EVP_CTRL_COPY:
2427 newc = (EVP_CIPHER_CTX *)ptr;
2428 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
2429 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
2430 &new_octx->ksenc.ks,
2431 &new_octx->ksdec.ks);
2432
2433 default:
2434 return -1;
2435
2436 }
2437}
2438
2439# ifdef HWAES_CAPABLE
2440# ifdef HWAES_ocb_encrypt
2441void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
2442 size_t blocks, const void *key,
2443 size_t start_block_num,
2444 unsigned char offset_i[16],
2445 const unsigned char L_[][16],
2446 unsigned char checksum[16]);
2447# else
2448# define HWAES_ocb_encrypt ((ocb128_f)NULL)
2449# endif
2450# ifdef HWAES_ocb_decrypt
2451void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
2452 size_t blocks, const void *key,
2453 size_t start_block_num,
2454 unsigned char offset_i[16],
2455 const unsigned char L_[][16],
2456 unsigned char checksum[16]);
2457# else
2458# define HWAES_ocb_decrypt ((ocb128_f)NULL)
2459# endif
2460# endif
2461
2462static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2463 const unsigned char *iv, int enc)
2464{
2465 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
2466 if (!iv && !key)
2467 return 1;
2468 if (key) {
2469 do {
2470 /*
2471 * We set both the encrypt and decrypt key here because decrypt
2472 * needs both. We could possibly optimise to remove setting the
2473 * decrypt for an encryption operation.
2474 */
2475# ifdef HWAES_CAPABLE
2476 if (HWAES_CAPABLE) {
2477 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2478 &octx->ksenc.ks);
2479 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2480 &octx->ksdec.ks);
2481 if (!CRYPTO_ocb128_init(&octx->ocb,
2482 &octx->ksenc.ks, &octx->ksdec.ks,
2483 (block128_f) HWAES_encrypt,
2484 (block128_f) HWAES_decrypt,
2485 enc ? HWAES_ocb_encrypt
2486 : HWAES_ocb_decrypt))
2487 return 0;
2488 break;
2489 }
2490# endif
2491# ifdef VPAES_CAPABLE
2492 if (VPAES_CAPABLE) {
2493 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2494 &octx->ksenc.ks);
2495 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2496 &octx->ksdec.ks);
2497 if (!CRYPTO_ocb128_init(&octx->ocb,
2498 &octx->ksenc.ks, &octx->ksdec.ks,
2499 (block128_f) vpaes_encrypt,
2500 (block128_f) vpaes_decrypt,
2501 NULL))
2502 return 0;
2503 break;
2504 }
2505# endif
2506 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2507 &octx->ksenc.ks);
2508 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2509 &octx->ksdec.ks);
2510 if (!CRYPTO_ocb128_init(&octx->ocb,
2511 &octx->ksenc.ks, &octx->ksdec.ks,
2512 (block128_f) AES_encrypt,
2513 (block128_f) AES_decrypt,
2514 NULL))
2515 return 0;
2516 }
2517 while (0);
2518
2519 /*
2520 * If we have an iv we can set it directly, otherwise use saved IV.
2521 */
2522 if (iv == NULL && octx->iv_set)
2523 iv = octx->iv;
2524 if (iv) {
2525 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
2526 != 1)
2527 return 0;
2528 octx->iv_set = 1;
2529 }
2530 octx->key_set = 1;
2531 } else {
2532 /* If key set use IV, otherwise copy */
2533 if (octx->key_set)
2534 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
2535 else
2536 memcpy(octx->iv, iv, octx->ivlen);
2537 octx->iv_set = 1;
2538 }
2539 return 1;
2540}
2541
2542static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2543 const unsigned char *in, size_t len)
2544{
2545 unsigned char *buf;
2546 int *buf_len;
2547 int written_len = 0;
2548 size_t trailing_len;
2549 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
2550
2551 /* If IV or Key not set then return error */
2552 if (!octx->iv_set)
2553 return -1;
2554
2555 if (!octx->key_set)
2556 return -1;
2557
2558 if (in != NULL) {
2559 /*
2560 * Need to ensure we are only passing full blocks to low level OCB
2561 * routines. We do it here rather than in EVP_EncryptUpdate/
2562 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
2563 * and those routines don't support that
2564 */
2565
2566 /* Are we dealing with AAD or normal data here? */
2567 if (out == NULL) {
2568 buf = octx->aad_buf;
2569 buf_len = &(octx->aad_buf_len);
2570 } else {
2571 buf = octx->data_buf;
2572 buf_len = &(octx->data_buf_len);
2573
2574 if (is_partially_overlapping(out + *buf_len, in, len)) {
2575 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
2576 return 0;
2577 }
2578 }
2579
2580 /*
2581 * If we've got a partially filled buffer from a previous call then
2582 * use that data first
2583 */
2584 if (*buf_len > 0) {
2585 unsigned int remaining;
2586
2587 remaining = AES_BLOCK_SIZE - (*buf_len);
2588 if (remaining > len) {
2589 memcpy(buf + (*buf_len), in, len);
2590 *(buf_len) += len;
2591 return 0;
2592 }
2593 memcpy(buf + (*buf_len), in, remaining);
2594
2595 /*
2596 * If we get here we've filled the buffer, so process it
2597 */
2598 len -= remaining;
2599 in += remaining;
2600 if (out == NULL) {
2601 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
2602 return -1;
2603 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
2604 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
2605 AES_BLOCK_SIZE))
2606 return -1;
2607 } else {
2608 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
2609 AES_BLOCK_SIZE))
2610 return -1;
2611 }
2612 written_len = AES_BLOCK_SIZE;
2613 *buf_len = 0;
2614 if (out != NULL)
2615 out += AES_BLOCK_SIZE;
2616 }
2617
2618 /* Do we have a partial block to handle at the end? */
2619 trailing_len = len % AES_BLOCK_SIZE;
2620
2621 /*
2622 * If we've got some full blocks to handle, then process these first
2623 */
2624 if (len != trailing_len) {
2625 if (out == NULL) {
2626 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
2627 return -1;
2628 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
2629 if (!CRYPTO_ocb128_encrypt
2630 (&octx->ocb, in, out, len - trailing_len))
2631 return -1;
2632 } else {
2633 if (!CRYPTO_ocb128_decrypt
2634 (&octx->ocb, in, out, len - trailing_len))
2635 return -1;
2636 }
2637 written_len += len - trailing_len;
2638 in += len - trailing_len;
2639 }
2640
2641 /* Handle any trailing partial block */
2642 if (trailing_len > 0) {
2643 memcpy(buf, in, trailing_len);
2644 *buf_len = trailing_len;
2645 }
2646
2647 return written_len;
2648 } else {
2649 /*
2650 * First of all empty the buffer of any partial block that we might
2651 * have been provided - both for data and AAD
2652 */
2653 if (octx->data_buf_len > 0) {
2654 if (EVP_CIPHER_CTX_encrypting(ctx)) {
2655 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
2656 octx->data_buf_len))
2657 return -1;
2658 } else {
2659 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
2660 octx->data_buf_len))
2661 return -1;
2662 }
2663 written_len = octx->data_buf_len;
2664 octx->data_buf_len = 0;
2665 }
2666 if (octx->aad_buf_len > 0) {
2667 if (!CRYPTO_ocb128_aad
2668 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
2669 return -1;
2670 octx->aad_buf_len = 0;
2671 }
2672 /* If decrypting then verify */
2673 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
2674 if (octx->taglen < 0)
2675 return -1;
2676 if (CRYPTO_ocb128_finish(&octx->ocb,
2677 octx->tag, octx->taglen) != 0)
2678 return -1;
2679 octx->iv_set = 0;
2680 return written_len;
2681 }
2682 /* If encrypting then just get the tag */
2683 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
2684 return -1;
2685 /* Don't reuse the IV */
2686 octx->iv_set = 0;
2687 return written_len;
2688 }
2689}
2690
2691static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
2692{
2693 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
2694 CRYPTO_ocb128_cleanup(&octx->ocb);
2695 return 1;
2696}
2697
2698BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
2699 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
2700BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
2701 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
2702BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
2703 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
2704#endif /* OPENSSL_NO_OCB */
Note: See TracBrowser for help on using the repository browser.