2 * Copyright 2001-2021 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the OpenSSL license (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
10 #include <openssl/opensslconf.h>
11 #include <openssl/crypto.h>
12 #include <openssl/evp.h>
13 #include <openssl/err.h>
16 #include <openssl/aes.h>
17 #include "crypto/evp.h"
18 #include "modes_local.h"
19 #include <openssl/rand.h>
20 #include "evp_local.h"
38 } ks; /* AES key schedule to use */
39 int key_set; /* Set if key initialised */
40 int iv_set; /* Set if an iv is set */
42 unsigned char *iv; /* Temporary IV store */
43 int ivlen; /* IV length */
45 int iv_gen; /* It is OK to generate IVs */
46 int tls_aad_len; /* TLS AAD length */
54 } ks1, ks2; /* AES key schedules to use */
56 void (*stream) (const unsigned char *in,
57 unsigned char *out, size_t length,
58 const AES_KEY *key1, const AES_KEY *key2,
59 const unsigned char iv[16]);
66 } ks; /* AES key schedule to use */
67 int key_set; /* Set if key initialised */
68 int iv_set; /* Set if an iv is set */
69 int tag_set; /* Set if tag is valid */
70 int len_set; /* Set if message length set */
71 int L, M; /* L and M parameters from RFC3610 */
72 int tls_aad_len; /* TLS AAD length */
77 #ifndef OPENSSL_NO_OCB
82 } ksenc; /* AES key schedule to use for encryption */
86 } ksdec; /* AES key schedule to use for decryption */
87 int key_set; /* Set if key initialised */
88 int iv_set; /* Set if an iv is set */
90 unsigned char *iv; /* Temporary IV store */
91 unsigned char tag[16];
92 unsigned char data_buf[16]; /* Store partial data blocks */
93 unsigned char aad_buf[16]; /* Store partial AAD blocks */
96 int ivlen; /* IV length */
101 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
104 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
106 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
109 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
111 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
114 void vpaes_cbc_encrypt(const unsigned char *in,
117 const AES_KEY *key, unsigned char *ivec, int enc);
120 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
121 size_t length, const AES_KEY *key,
122 unsigned char ivec[16], int enc);
123 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
124 size_t len, const AES_KEY *key,
125 const unsigned char ivec[16]);
126 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
127 size_t len, const AES_KEY *key1,
128 const AES_KEY *key2, const unsigned char iv[16]);
129 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
130 size_t len, const AES_KEY *key1,
131 const AES_KEY *key2, const unsigned char iv[16]);
134 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
135 size_t blocks, const AES_KEY *key,
136 const unsigned char ivec[AES_BLOCK_SIZE]);
139 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
140 const AES_KEY *key1, const AES_KEY *key2,
141 const unsigned char iv[16]);
142 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
143 const AES_KEY *key1, const AES_KEY *key2,
144 const unsigned char iv[16]);
147 /* increment counter (64-bit int) by 1 */
148 static void ctr64_inc(unsigned char *counter)
163 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
164 # include "ppc_arch.h"
166 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
168 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
169 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
170 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
171 # define HWAES_encrypt aes_p8_encrypt
172 # define HWAES_decrypt aes_p8_decrypt
173 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
174 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
175 # define HWAES_xts_encrypt aes_p8_xts_encrypt
176 # define HWAES_xts_decrypt aes_p8_xts_decrypt
179 #if defined(OPENSSL_CPUID_OBJ) && ( \
180 ((defined(__i386) || defined(__i386__) || \
181 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
182 defined(__x86_64) || defined(__x86_64__) || \
183 defined(_M_AMD64) || defined(_M_X64) )
185 extern unsigned int OPENSSL_ia32cap_P[];
188 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
191 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
196 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
198 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
200 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
203 void aesni_encrypt(const unsigned char *in, unsigned char *out,
205 void aesni_decrypt(const unsigned char *in, unsigned char *out,
208 void aesni_ecb_encrypt(const unsigned char *in,
210 size_t length, const AES_KEY *key, int enc);
211 void aesni_cbc_encrypt(const unsigned char *in,
214 const AES_KEY *key, unsigned char *ivec, int enc);
216 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
219 const void *key, const unsigned char *ivec);
221 void aesni_xts_encrypt(const unsigned char *in,
224 const AES_KEY *key1, const AES_KEY *key2,
225 const unsigned char iv[16]);
227 void aesni_xts_decrypt(const unsigned char *in,
230 const AES_KEY *key1, const AES_KEY *key2,
231 const unsigned char iv[16]);
233 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
237 const unsigned char ivec[16],
238 unsigned char cmac[16]);
240 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
244 const unsigned char ivec[16],
245 unsigned char cmac[16]);
247 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
248 size_t aesni_gcm_encrypt(const unsigned char *in,
251 const void *key, unsigned char ivec[16], u64 *Xi);
252 # define AES_gcm_encrypt aesni_gcm_encrypt
253 size_t aesni_gcm_decrypt(const unsigned char *in,
256 const void *key, unsigned char ivec[16], u64 *Xi);
257 # define AES_gcm_decrypt aesni_gcm_decrypt
258 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
260 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
261 gctx->gcm.ghash==gcm_ghash_avx)
262 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
263 gctx->gcm.ghash==gcm_ghash_avx)
264 # undef AES_GCM_ASM2 /* minor size optimization */
267 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
268 const unsigned char *iv, int enc)
271 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
273 mode = EVP_CIPHER_CTX_mode(ctx);
274 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
276 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
278 dat->block = (block128_f) aesni_decrypt;
279 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
280 (cbc128_f) aesni_cbc_encrypt : NULL;
282 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
284 dat->block = (block128_f) aesni_encrypt;
285 if (mode == EVP_CIPH_CBC_MODE)
286 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
287 else if (mode == EVP_CIPH_CTR_MODE)
288 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
290 dat->stream.cbc = NULL;
294 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
301 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
302 const unsigned char *in, size_t len)
304 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
305 EVP_CIPHER_CTX_iv_noconst(ctx),
306 EVP_CIPHER_CTX_encrypting(ctx));
311 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
312 const unsigned char *in, size_t len)
314 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
319 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
320 EVP_CIPHER_CTX_encrypting(ctx));
325 # define aesni_ofb_cipher aes_ofb_cipher
326 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
327 const unsigned char *in, size_t len);
329 # define aesni_cfb_cipher aes_cfb_cipher
330 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
331 const unsigned char *in, size_t len);
333 # define aesni_cfb8_cipher aes_cfb8_cipher
334 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
335 const unsigned char *in, size_t len);
337 # define aesni_cfb1_cipher aes_cfb1_cipher
338 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
339 const unsigned char *in, size_t len);
341 # define aesni_ctr_cipher aes_ctr_cipher
342 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
343 const unsigned char *in, size_t len);
345 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
346 const unsigned char *iv, int enc)
348 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
352 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
354 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
355 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
357 * If we have an iv can set it directly, otherwise use saved IV.
359 if (iv == NULL && gctx->iv_set)
362 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
367 /* If key set use IV, otherwise copy */
369 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
371 memcpy(gctx->iv, iv, gctx->ivlen);
378 # define aesni_gcm_cipher aes_gcm_cipher
379 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
380 const unsigned char *in, size_t len);
382 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
383 const unsigned char *iv, int enc)
385 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
391 /* The key is two half length keys in reality */
392 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
395 * Verify that the two keys are different.
397 * This addresses Rogaway's vulnerability.
398 * See comment in aes_xts_init_key() below.
400 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
401 EVPerr(EVP_F_AESNI_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
405 /* key_len is two AES keys */
407 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
409 xctx->xts.block1 = (block128_f) aesni_encrypt;
410 xctx->stream = aesni_xts_encrypt;
412 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
414 xctx->xts.block1 = (block128_f) aesni_decrypt;
415 xctx->stream = aesni_xts_decrypt;
418 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
419 EVP_CIPHER_CTX_key_length(ctx) * 4,
421 xctx->xts.block2 = (block128_f) aesni_encrypt;
423 xctx->xts.key1 = &xctx->ks1;
427 xctx->xts.key2 = &xctx->ks2;
428 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
434 # define aesni_xts_cipher aes_xts_cipher
435 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
436 const unsigned char *in, size_t len);
438 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
439 const unsigned char *iv, int enc)
441 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
445 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
447 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
448 &cctx->ks, (block128_f) aesni_encrypt);
449 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
450 (ccm128_f) aesni_ccm64_decrypt_blocks;
454 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
460 # define aesni_ccm_cipher aes_ccm_cipher
461 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
462 const unsigned char *in, size_t len);
464 # ifndef OPENSSL_NO_OCB
465 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
466 size_t blocks, const void *key,
467 size_t start_block_num,
468 unsigned char offset_i[16],
469 const unsigned char L_[][16],
470 unsigned char checksum[16]);
471 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
472 size_t blocks, const void *key,
473 size_t start_block_num,
474 unsigned char offset_i[16],
475 const unsigned char L_[][16],
476 unsigned char checksum[16]);
478 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
479 const unsigned char *iv, int enc)
481 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
487 * We set both the encrypt and decrypt key here because decrypt
488 * needs both. We could possibly optimise to remove setting the
489 * decrypt for an encryption operation.
491 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
493 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
495 if (!CRYPTO_ocb128_init(&octx->ocb,
496 &octx->ksenc.ks, &octx->ksdec.ks,
497 (block128_f) aesni_encrypt,
498 (block128_f) aesni_decrypt,
499 enc ? aesni_ocb_encrypt
500 : aesni_ocb_decrypt))
506 * If we have an iv we can set it directly, otherwise use saved IV.
508 if (iv == NULL && octx->iv_set)
511 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
518 /* If key set use IV, otherwise copy */
520 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
522 memcpy(octx->iv, iv, octx->ivlen);
528 # define aesni_ocb_cipher aes_ocb_cipher
529 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
530 const unsigned char *in, size_t len);
531 # endif /* OPENSSL_NO_OCB */
533 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
534 static const EVP_CIPHER aesni_##keylen##_##mode = { \
535 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
536 flags|EVP_CIPH_##MODE##_MODE, \
538 aesni_##mode##_cipher, \
540 sizeof(EVP_AES_KEY), \
541 NULL,NULL,NULL,NULL }; \
542 static const EVP_CIPHER aes_##keylen##_##mode = { \
543 nid##_##keylen##_##nmode,blocksize, \
545 flags|EVP_CIPH_##MODE##_MODE, \
547 aes_##mode##_cipher, \
549 sizeof(EVP_AES_KEY), \
550 NULL,NULL,NULL,NULL }; \
551 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
552 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
554 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
555 static const EVP_CIPHER aesni_##keylen##_##mode = { \
556 nid##_##keylen##_##mode,blocksize, \
557 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
558 flags|EVP_CIPH_##MODE##_MODE, \
559 aesni_##mode##_init_key, \
560 aesni_##mode##_cipher, \
561 aes_##mode##_cleanup, \
562 sizeof(EVP_AES_##MODE##_CTX), \
563 NULL,NULL,aes_##mode##_ctrl,NULL }; \
564 static const EVP_CIPHER aes_##keylen##_##mode = { \
565 nid##_##keylen##_##mode,blocksize, \
566 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
567 flags|EVP_CIPH_##MODE##_MODE, \
568 aes_##mode##_init_key, \
569 aes_##mode##_cipher, \
570 aes_##mode##_cleanup, \
571 sizeof(EVP_AES_##MODE##_CTX), \
572 NULL,NULL,aes_##mode##_ctrl,NULL }; \
573 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
574 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
576 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
578 # include "sparc_arch.h"
580 extern unsigned int OPENSSL_sparcv9cap_P[];
583 * Initial Fujitsu SPARC64 X support
585 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
586 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
587 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
588 # define HWAES_encrypt aes_fx_encrypt
589 # define HWAES_decrypt aes_fx_decrypt
590 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
591 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
593 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
595 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
596 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
597 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
599 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
602 * Key-length specific subroutines were chosen for following reason.
603 * Each SPARC T4 core can execute up to 8 threads which share core's
604 * resources. Loading as much key material to registers allows to
605 * minimize references to shared memory interface, as well as amount
606 * of instructions in inner loops [much needed on T4]. But then having
607 * non-key-length specific routines would require conditional branches
608 * either in inner loops or on subroutines' entries. Former is hardly
609 * acceptable, while latter means code size increase to size occupied
610 * by multiple key-length specific subroutines, so why fight?
612 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
613 size_t len, const AES_KEY *key,
614 unsigned char *ivec, int /*unused*/);
615 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
616 size_t len, const AES_KEY *key,
617 unsigned char *ivec, int /*unused*/);
618 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
619 size_t len, const AES_KEY *key,
620 unsigned char *ivec, int /*unused*/);
621 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
622 size_t len, const AES_KEY *key,
623 unsigned char *ivec, int /*unused*/);
624 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
625 size_t len, const AES_KEY *key,
626 unsigned char *ivec, int /*unused*/);
627 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
628 size_t len, const AES_KEY *key,
629 unsigned char *ivec, int /*unused*/);
630 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
631 size_t blocks, const AES_KEY *key,
632 unsigned char *ivec);
633 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
634 size_t blocks, const AES_KEY *key,
635 unsigned char *ivec);
636 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
637 size_t blocks, const AES_KEY *key,
638 unsigned char *ivec);
639 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
640 size_t blocks, const AES_KEY *key1,
641 const AES_KEY *key2, const unsigned char *ivec);
642 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
643 size_t blocks, const AES_KEY *key1,
644 const AES_KEY *key2, const unsigned char *ivec);
645 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
646 size_t blocks, const AES_KEY *key1,
647 const AES_KEY *key2, const unsigned char *ivec);
648 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
649 size_t blocks, const AES_KEY *key1,
650 const AES_KEY *key2, const unsigned char *ivec);
652 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
653 const unsigned char *iv, int enc)
656 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
658 mode = EVP_CIPHER_CTX_mode(ctx);
659 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
660 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
663 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
664 dat->block = (block128_f) aes_t4_decrypt;
667 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
668 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
671 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
672 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
675 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
676 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
683 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
684 dat->block = (block128_f) aes_t4_encrypt;
687 if (mode == EVP_CIPH_CBC_MODE)
688 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
689 else if (mode == EVP_CIPH_CTR_MODE)
690 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
692 dat->stream.cbc = NULL;
695 if (mode == EVP_CIPH_CBC_MODE)
696 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
697 else if (mode == EVP_CIPH_CTR_MODE)
698 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
700 dat->stream.cbc = NULL;
703 if (mode == EVP_CIPH_CBC_MODE)
704 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
705 else if (mode == EVP_CIPH_CTR_MODE)
706 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
708 dat->stream.cbc = NULL;
716 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
723 # define aes_t4_cbc_cipher aes_cbc_cipher
724 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
725 const unsigned char *in, size_t len);
727 # define aes_t4_ecb_cipher aes_ecb_cipher
728 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
729 const unsigned char *in, size_t len);
731 # define aes_t4_ofb_cipher aes_ofb_cipher
732 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
733 const unsigned char *in, size_t len);
735 # define aes_t4_cfb_cipher aes_cfb_cipher
736 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
737 const unsigned char *in, size_t len);
739 # define aes_t4_cfb8_cipher aes_cfb8_cipher
740 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
741 const unsigned char *in, size_t len);
743 # define aes_t4_cfb1_cipher aes_cfb1_cipher
744 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
745 const unsigned char *in, size_t len);
747 # define aes_t4_ctr_cipher aes_ctr_cipher
748 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
749 const unsigned char *in, size_t len);
751 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
752 const unsigned char *iv, int enc)
754 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
758 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
759 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
760 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
761 (block128_f) aes_t4_encrypt);
764 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
767 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
770 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
776 * If we have an iv can set it directly, otherwise use saved IV.
778 if (iv == NULL && gctx->iv_set)
781 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
786 /* If key set use IV, otherwise copy */
788 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
790 memcpy(gctx->iv, iv, gctx->ivlen);
797 # define aes_t4_gcm_cipher aes_gcm_cipher
798 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
799 const unsigned char *in, size_t len);
801 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
802 const unsigned char *iv, int enc)
804 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
810 /* The key is two half length keys in reality */
811 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
812 const int bits = bytes * 8;
815 * Verify that the two keys are different.
817 * This addresses Rogaway's vulnerability.
818 * See comment in aes_xts_init_key() below.
820 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
821 EVPerr(EVP_F_AES_T4_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
826 /* key_len is two AES keys */
828 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
829 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
832 xctx->stream = aes128_t4_xts_encrypt;
835 xctx->stream = aes256_t4_xts_encrypt;
841 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
843 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
846 xctx->stream = aes128_t4_xts_decrypt;
849 xctx->stream = aes256_t4_xts_decrypt;
856 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
857 EVP_CIPHER_CTX_key_length(ctx) * 4,
859 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
861 xctx->xts.key1 = &xctx->ks1;
865 xctx->xts.key2 = &xctx->ks2;
866 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
872 # define aes_t4_xts_cipher aes_xts_cipher
873 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
874 const unsigned char *in, size_t len);
876 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
877 const unsigned char *iv, int enc)
879 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
883 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
884 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
885 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
886 &cctx->ks, (block128_f) aes_t4_encrypt);
891 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
897 # define aes_t4_ccm_cipher aes_ccm_cipher
898 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
899 const unsigned char *in, size_t len);
901 # ifndef OPENSSL_NO_OCB
902 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
903 const unsigned char *iv, int enc)
905 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
911 * We set both the encrypt and decrypt key here because decrypt
912 * needs both. We could possibly optimise to remove setting the
913 * decrypt for an encryption operation.
915 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
917 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
919 if (!CRYPTO_ocb128_init(&octx->ocb,
920 &octx->ksenc.ks, &octx->ksdec.ks,
921 (block128_f) aes_t4_encrypt,
922 (block128_f) aes_t4_decrypt,
929 * If we have an iv we can set it directly, otherwise use saved IV.
931 if (iv == NULL && octx->iv_set)
934 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
941 /* If key set use IV, otherwise copy */
943 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
945 memcpy(octx->iv, iv, octx->ivlen);
951 # define aes_t4_ocb_cipher aes_ocb_cipher
952 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
953 const unsigned char *in, size_t len);
954 # endif /* OPENSSL_NO_OCB */
956 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
957 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
958 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
959 flags|EVP_CIPH_##MODE##_MODE, \
961 aes_t4_##mode##_cipher, \
963 sizeof(EVP_AES_KEY), \
964 NULL,NULL,NULL,NULL }; \
965 static const EVP_CIPHER aes_##keylen##_##mode = { \
966 nid##_##keylen##_##nmode,blocksize, \
968 flags|EVP_CIPH_##MODE##_MODE, \
970 aes_##mode##_cipher, \
972 sizeof(EVP_AES_KEY), \
973 NULL,NULL,NULL,NULL }; \
974 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
975 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
977 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
978 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
979 nid##_##keylen##_##mode,blocksize, \
980 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
981 flags|EVP_CIPH_##MODE##_MODE, \
982 aes_t4_##mode##_init_key, \
983 aes_t4_##mode##_cipher, \
984 aes_##mode##_cleanup, \
985 sizeof(EVP_AES_##MODE##_CTX), \
986 NULL,NULL,aes_##mode##_ctrl,NULL }; \
987 static const EVP_CIPHER aes_##keylen##_##mode = { \
988 nid##_##keylen##_##mode,blocksize, \
989 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
990 flags|EVP_CIPH_##MODE##_MODE, \
991 aes_##mode##_init_key, \
992 aes_##mode##_cipher, \
993 aes_##mode##_cleanup, \
994 sizeof(EVP_AES_##MODE##_CTX), \
995 NULL,NULL,aes_##mode##_ctrl,NULL }; \
996 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
997 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
999 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
1003 # include "s390x_arch.h"
1009 * KM-AES parameter block - begin
1010 * (see z/Architecture Principles of Operation >= SA22-7832-06)
1013 unsigned char k[32];
1015 /* KM-AES parameter block - end */
1018 } S390X_AES_ECB_CTX;
1024 * KMO-AES parameter block - begin
1025 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1028 unsigned char cv[16];
1029 unsigned char k[32];
1031 /* KMO-AES parameter block - end */
1036 } S390X_AES_OFB_CTX;
1042 * KMF-AES parameter block - begin
1043 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1046 unsigned char cv[16];
1047 unsigned char k[32];
1049 /* KMF-AES parameter block - end */
1054 } S390X_AES_CFB_CTX;
1060 * KMA-GCM-AES parameter block - begin
1061 * (see z/Architecture Principles of Operation >= SA22-7832-11)
1064 unsigned char reserved[12];
1070 unsigned long long g[2];
1071 unsigned char b[16];
1073 unsigned char h[16];
1074 unsigned long long taadl;
1075 unsigned long long tpcl;
1077 unsigned long long g[2];
1080 unsigned char k[32];
1082 /* KMA-GCM-AES parameter block - end */
1094 unsigned char ares[16];
1095 unsigned char mres[16];
1096 unsigned char kres[16];
1102 } S390X_AES_GCM_CTX;
1108 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1109 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1110 * rounds field is used to store the function code and that the key
1111 * schedule is not stored (if aes hardware support is detected).
1114 unsigned char pad[16];
1120 * KMAC-AES parameter block - begin
1121 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1125 unsigned long long g[2];
1126 unsigned char b[16];
1128 unsigned char k[32];
1130 /* KMAC-AES parameter block - end */
1133 unsigned long long g[2];
1134 unsigned char b[16];
1137 unsigned long long g[2];
1138 unsigned char b[16];
1141 unsigned long long blocks;
1150 unsigned char pad[140];
1154 } S390X_AES_CCM_CTX;
1156 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */
1157 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6))
1159 /* Most modes of operation need km for partial block processing. */
1160 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1161 S390X_CAPBIT(S390X_AES_128))
1162 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1163 S390X_CAPBIT(S390X_AES_192))
1164 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1165 S390X_CAPBIT(S390X_AES_256))
1167 # define s390x_aes_init_key aes_init_key
1168 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1169 const unsigned char *iv, int enc);
1171 # define S390X_aes_128_cbc_CAPABLE 0 /* checked by callee */
1172 # define S390X_aes_192_cbc_CAPABLE 0
1173 # define S390X_aes_256_cbc_CAPABLE 0
1174 # define S390X_AES_CBC_CTX EVP_AES_KEY
1176 # define s390x_aes_cbc_init_key aes_init_key
1178 # define s390x_aes_cbc_cipher aes_cbc_cipher
1179 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1180 const unsigned char *in, size_t len);
1182 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE
1183 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE
1184 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE
1186 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1187 const unsigned char *key,
1188 const unsigned char *iv, int enc)
1190 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1191 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1193 cctx->fc = S390X_AES_FC(keylen) | (enc ? 0 : S390X_DECRYPT);
1196 memcpy(cctx->km.param.k, key, keylen);
1201 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1202 const unsigned char *in, size_t len)
1204 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1206 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1210 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \
1211 (OPENSSL_s390xcap_P.kmo[0] & \
1212 S390X_CAPBIT(S390X_AES_128)))
1213 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \
1214 (OPENSSL_s390xcap_P.kmo[0] & \
1215 S390X_CAPBIT(S390X_AES_192)))
1216 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \
1217 (OPENSSL_s390xcap_P.kmo[0] & \
1218 S390X_CAPBIT(S390X_AES_256)))
1220 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
1221 const unsigned char *key,
1222 const unsigned char *ivec, int enc)
1224 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1225 const unsigned char *oiv = EVP_CIPHER_CTX_original_iv(ctx);
1226 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1227 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1229 cctx->fc = S390X_AES_FC(keylen);
1232 memcpy(cctx->kmo.param.k, key, keylen);
1235 memcpy(cctx->kmo.param.cv, oiv, ivlen);
1239 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1240 const unsigned char *in, size_t len)
1242 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1243 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1244 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1248 memcpy(cctx->kmo.param.cv, iv, ivlen);
1250 *out = *in ^ cctx->kmo.param.cv[n];
1259 len &= ~(size_t)0xf;
1261 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1268 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1272 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1277 memcpy(iv, cctx->kmo.param.cv, ivlen);
1282 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \
1283 (OPENSSL_s390xcap_P.kmf[0] & \
1284 S390X_CAPBIT(S390X_AES_128)))
1285 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \
1286 (OPENSSL_s390xcap_P.kmf[0] & \
1287 S390X_CAPBIT(S390X_AES_192)))
1288 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \
1289 (OPENSSL_s390xcap_P.kmf[0] & \
1290 S390X_CAPBIT(S390X_AES_256)))
1292 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1293 const unsigned char *key,
1294 const unsigned char *ivec, int enc)
1296 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1297 const unsigned char *oiv = EVP_CIPHER_CTX_original_iv(ctx);
1298 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1299 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1301 cctx->fc = S390X_AES_FC(keylen) | (enc ? 0 : S390X_DECRYPT)
1302 | (16 << 24); /* 16 bytes cipher feedback */
1305 memcpy(cctx->kmf.param.k, key, keylen);
1308 memcpy(cctx->kmf.param.cv, oiv, ivlen);
1312 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1313 const unsigned char *in, size_t len)
1315 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1316 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1317 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1318 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1319 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1324 memcpy(cctx->kmf.param.cv, iv, ivlen);
1327 *out = cctx->kmf.param.cv[n] ^ tmp;
1328 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1337 len &= ~(size_t)0xf;
1339 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1346 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1347 S390X_AES_FC(keylen), cctx->kmf.param.k);
1351 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1352 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1357 memcpy(iv, cctx->kmf.param.cv, ivlen);
1362 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1363 S390X_CAPBIT(S390X_AES_128))
1364 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1365 S390X_CAPBIT(S390X_AES_192))
1366 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1367 S390X_CAPBIT(S390X_AES_256))
1369 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1370 const unsigned char *key,
1371 const unsigned char *ivec, int enc)
1373 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1374 const unsigned char *oiv = EVP_CIPHER_CTX_original_iv(ctx);
1375 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1376 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1378 cctx->fc = S390X_AES_FC(keylen) | (enc ? 0 : S390X_DECRYPT)
1379 | (1 << 24); /* 1 byte cipher feedback flag */
1382 memcpy(cctx->kmf.param.k, key, keylen);
1385 memcpy(cctx->kmf.param.cv, oiv, ivlen);
1389 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1390 const unsigned char *in, size_t len)
1392 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1393 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1394 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1396 memcpy(cctx->kmf.param.cv, iv, ivlen);
1397 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1398 memcpy(iv, cctx->kmf.param.cv, ivlen);
1402 # define S390X_aes_128_cfb1_CAPABLE 0
1403 # define S390X_aes_192_cfb1_CAPABLE 0
1404 # define S390X_aes_256_cfb1_CAPABLE 0
1406 # define s390x_aes_cfb1_init_key aes_init_key
1408 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1409 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1410 const unsigned char *in, size_t len);
1412 # define S390X_aes_128_ctr_CAPABLE 0 /* checked by callee */
1413 # define S390X_aes_192_ctr_CAPABLE 0
1414 # define S390X_aes_256_ctr_CAPABLE 0
1415 # define S390X_AES_CTR_CTX EVP_AES_KEY
1417 # define s390x_aes_ctr_init_key aes_init_key
1419 # define s390x_aes_ctr_cipher aes_ctr_cipher
1420 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1421 const unsigned char *in, size_t len);
1423 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1424 (OPENSSL_s390xcap_P.kma[0] & \
1425 S390X_CAPBIT(S390X_AES_128)))
1426 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1427 (OPENSSL_s390xcap_P.kma[0] & \
1428 S390X_CAPBIT(S390X_AES_192)))
1429 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1430 (OPENSSL_s390xcap_P.kma[0] & \
1431 S390X_CAPBIT(S390X_AES_256)))
1433 /* iv + padding length for iv lengths != 12 */
1434 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1437 * Process additional authenticated data. Returns 0 on success. Code is
1440 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1443 unsigned long long alen;
1446 if (ctx->kma.param.tpcl)
1449 alen = ctx->kma.param.taadl + len;
1450 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1452 ctx->kma.param.taadl = alen;
1457 ctx->ares[n] = *aad;
1462 /* ctx->ares contains a complete block if offset has wrapped around */
1464 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1465 ctx->fc |= S390X_KMA_HS;
1472 len &= ~(size_t)0xf;
1474 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1476 ctx->fc |= S390X_KMA_HS;
1484 ctx->ares[rem] = aad[rem];
1491 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1492 * success. Code is big-endian.
1494 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1495 unsigned char *out, size_t len)
1497 const unsigned char *inptr;
1498 unsigned long long mlen;
1501 unsigned char b[16];
1506 mlen = ctx->kma.param.tpcl + len;
1507 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1509 ctx->kma.param.tpcl = mlen;
1515 while (n && inlen) {
1516 ctx->mres[n] = *inptr;
1521 /* ctx->mres contains a complete block if offset has wrapped around */
1523 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1524 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1525 ctx->fc |= S390X_KMA_HS;
1528 /* previous call already encrypted/decrypted its remainder,
1529 * see comment below */
1544 len &= ~(size_t)0xf;
1546 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1547 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1550 ctx->fc |= S390X_KMA_HS;
1555 * If there is a remainder, it has to be saved such that it can be
1556 * processed by kma later. However, we also have to do the for-now
1557 * unauthenticated encryption/decryption part here and now...
1560 if (!ctx->mreslen) {
1561 buf.w[0] = ctx->kma.param.j0.w[0];
1562 buf.w[1] = ctx->kma.param.j0.w[1];
1563 buf.w[2] = ctx->kma.param.j0.w[2];
1564 buf.w[3] = ctx->kma.param.cv.w + 1;
1565 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1569 for (i = 0; i < rem; i++) {
1570 ctx->mres[n + i] = in[i];
1571 out[i] = in[i] ^ ctx->kres[n + i];
1574 ctx->mreslen += rem;
1580 * Initialize context structure. Code is big-endian.
1582 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx)
1584 ctx->kma.param.t.g[0] = 0;
1585 ctx->kma.param.t.g[1] = 0;
1586 ctx->kma.param.tpcl = 0;
1587 ctx->kma.param.taadl = 0;
1592 if (ctx->ivlen == 12) {
1593 memcpy(&ctx->kma.param.j0, ctx->iv, ctx->ivlen);
1594 ctx->kma.param.j0.w[3] = 1;
1595 ctx->kma.param.cv.w = 1;
1597 /* ctx->iv has the right size and is already padded. */
1598 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1599 ctx->fc, &ctx->kma.param);
1600 ctx->fc |= S390X_KMA_HS;
1602 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1603 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1604 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1605 ctx->kma.param.t.g[0] = 0;
1606 ctx->kma.param.t.g[1] = 0;
1611 * Performs various operations on the context structure depending on control
1612 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1613 * Code is big-endian.
1615 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1617 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1618 S390X_AES_GCM_CTX *gctx_out;
1619 EVP_CIPHER_CTX *out;
1620 unsigned char *buf, *iv;
1621 int ivlen, enc, len;
1625 ivlen = EVP_CIPHER_iv_length(c->cipher);
1626 iv = EVP_CIPHER_CTX_iv_noconst(c);
1629 gctx->ivlen = ivlen;
1633 gctx->tls_aad_len = -1;
1636 case EVP_CTRL_GET_IVLEN:
1637 *(int *)ptr = gctx->ivlen;
1640 case EVP_CTRL_AEAD_SET_IVLEN:
1645 iv = EVP_CIPHER_CTX_iv_noconst(c);
1646 len = S390X_gcm_ivpadlen(arg);
1648 /* Allocate memory for iv if needed. */
1649 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1651 OPENSSL_free(gctx->iv);
1653 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1654 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1659 memset(gctx->iv + arg, 0, len - arg - 8);
1660 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1665 case EVP_CTRL_AEAD_SET_TAG:
1666 buf = EVP_CIPHER_CTX_buf_noconst(c);
1667 enc = EVP_CIPHER_CTX_encrypting(c);
1668 if (arg <= 0 || arg > 16 || enc)
1671 memcpy(buf, ptr, arg);
1675 case EVP_CTRL_AEAD_GET_TAG:
1676 enc = EVP_CIPHER_CTX_encrypting(c);
1677 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1680 memcpy(ptr, gctx->kma.param.t.b, arg);
1683 case EVP_CTRL_GCM_SET_IV_FIXED:
1684 /* Special case: -1 length restores whole iv */
1686 memcpy(gctx->iv, ptr, gctx->ivlen);
1691 * Fixed field must be at least 4 bytes and invocation field at least
1694 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1698 memcpy(gctx->iv, ptr, arg);
1700 enc = EVP_CIPHER_CTX_encrypting(c);
1701 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1707 case EVP_CTRL_GCM_IV_GEN:
1708 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1711 s390x_aes_gcm_setiv(gctx);
1713 if (arg <= 0 || arg > gctx->ivlen)
1716 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1718 * Invocation field will be at least 8 bytes in size and so no need
1719 * to check wrap around or increment more than last 8 bytes.
1721 ctr64_inc(gctx->iv + gctx->ivlen - 8);
1725 case EVP_CTRL_GCM_SET_IV_INV:
1726 enc = EVP_CIPHER_CTX_encrypting(c);
1727 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1730 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1731 s390x_aes_gcm_setiv(gctx);
1735 case EVP_CTRL_AEAD_TLS1_AAD:
1736 /* Save the aad for later use. */
1737 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1740 buf = EVP_CIPHER_CTX_buf_noconst(c);
1741 memcpy(buf, ptr, arg);
1742 gctx->tls_aad_len = arg;
1744 len = buf[arg - 2] << 8 | buf[arg - 1];
1745 /* Correct length for explicit iv. */
1746 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1748 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1750 /* If decrypting correct for tag too. */
1751 enc = EVP_CIPHER_CTX_encrypting(c);
1753 if (len < EVP_GCM_TLS_TAG_LEN)
1755 len -= EVP_GCM_TLS_TAG_LEN;
1757 buf[arg - 2] = len >> 8;
1758 buf[arg - 1] = len & 0xff;
1759 /* Extra padding: tag appended to record. */
1760 return EVP_GCM_TLS_TAG_LEN;
1764 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1765 iv = EVP_CIPHER_CTX_iv_noconst(c);
1767 if (gctx->iv == iv) {
1768 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1770 len = S390X_gcm_ivpadlen(gctx->ivlen);
1772 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1773 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1777 memcpy(gctx_out->iv, gctx->iv, len);
1787 * Set key or iv or enc/dec. Returns 1 on success. Otherwise 0 is returned.
1789 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1790 const unsigned char *key,
1791 const unsigned char *iv, int enc)
1793 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1794 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1796 gctx->fc = S390X_AES_FC(keylen) | (enc ? 0 : S390X_DECRYPT);
1799 gctx->fc &= ~S390X_KMA_HS;
1800 memcpy(&gctx->kma.param.k, key, keylen);
1805 memcpy(gctx->iv, iv, gctx->ivlen);
1810 if (gctx->key_set && gctx->iv_set)
1811 s390x_aes_gcm_setiv(gctx);
1813 gctx->fc &= ~(S390X_KMA_LPC | S390X_KMA_LAAD);
1821 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1822 * if successful. Otherwise -1 is returned. Code is big-endian.
1824 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1825 const unsigned char *in, size_t len)
1827 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1828 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1829 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1832 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1835 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1836 : EVP_CTRL_GCM_SET_IV_INV,
1837 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1840 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1841 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1842 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1844 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1845 gctx->kma.param.tpcl = len << 3;
1846 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1847 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1850 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1851 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1853 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1854 EVP_GCM_TLS_TAG_LEN)) {
1855 OPENSSL_cleanse(out, len);
1862 gctx->tls_aad_len = -1;
1867 * Called from EVP layer to initialize context, process additional
1868 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1869 * ciphertext or process a TLS packet, depending on context. Returns bytes
1870 * written on success. Otherwise -1 is returned. Code is big-endian.
1872 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1873 const unsigned char *in, size_t len)
1875 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1876 unsigned char *buf, tmp[16];
1882 if (gctx->tls_aad_len >= 0)
1883 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1890 if (s390x_aes_gcm_aad(gctx, in, len))
1893 if (s390x_aes_gcm(gctx, in, out, len))
1898 gctx->kma.param.taadl <<= 3;
1899 gctx->kma.param.tpcl <<= 3;
1900 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1901 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1902 /* recall that we already did en-/decrypt gctx->mres
1903 * and returned it to caller... */
1904 OPENSSL_cleanse(tmp, gctx->mreslen);
1906 enc = EVP_CIPHER_CTX_encrypting(ctx);
1910 if (gctx->taglen < 0)
1913 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1914 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1921 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1923 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1924 const unsigned char *iv;
1929 iv = EVP_CIPHER_CTX_iv(c);
1931 OPENSSL_free(gctx->iv);
1933 OPENSSL_cleanse(gctx, sizeof(*gctx));
1937 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1938 # define S390X_aes_128_xts_CAPABLE 0 /* checked by callee */
1939 # define S390X_aes_256_xts_CAPABLE 0
1941 # define s390x_aes_xts_init_key aes_xts_init_key
1942 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1943 const unsigned char *key,
1944 const unsigned char *iv, int enc);
1945 # define s390x_aes_xts_cipher aes_xts_cipher
1946 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1947 const unsigned char *in, size_t len);
1948 # define s390x_aes_xts_ctrl aes_xts_ctrl
1949 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1950 # define s390x_aes_xts_cleanup aes_xts_cleanup
1952 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1953 (OPENSSL_s390xcap_P.kmac[0] & \
1954 S390X_CAPBIT(S390X_AES_128)))
1955 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1956 (OPENSSL_s390xcap_P.kmac[0] & \
1957 S390X_CAPBIT(S390X_AES_192)))
1958 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1959 (OPENSSL_s390xcap_P.kmac[0] & \
1960 S390X_CAPBIT(S390X_AES_256)))
1962 # define S390X_CCM_AAD_FLAG 0x40
1965 * Set nonce and length fields. Code is big-endian.
1967 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1968 const unsigned char *nonce,
1971 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1972 ctx->aes.ccm.nonce.g[1] = mlen;
1973 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1977 * Process additional authenticated data. Code is big-endian.
1979 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1988 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1990 /* Suppress 'type-punned pointer dereference' warning. */
1991 ptr = ctx->aes.ccm.buf.b;
1993 if (alen < ((1 << 16) - (1 << 8))) {
1994 *(uint16_t *)ptr = alen;
1996 } else if (sizeof(alen) == 8
1997 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1998 *(uint16_t *)ptr = 0xffff;
1999 *(uint64_t *)(ptr + 2) = alen;
2002 *(uint16_t *)ptr = 0xfffe;
2003 *(uint32_t *)(ptr + 2) = alen;
2007 while (i < 16 && alen) {
2008 ctx->aes.ccm.buf.b[i] = *aad;
2014 ctx->aes.ccm.buf.b[i] = 0;
2018 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
2019 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
2020 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
2021 &ctx->aes.ccm.kmac_param);
2022 ctx->aes.ccm.blocks += 2;
2025 alen &= ~(size_t)0xf;
2027 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2028 ctx->aes.ccm.blocks += alen >> 4;
2032 for (i = 0; i < rem; i++)
2033 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
2035 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2036 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2037 ctx->aes.ccm.kmac_param.k);
2038 ctx->aes.ccm.blocks++;
2043 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
2046 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
2047 unsigned char *out, size_t len, int enc)
2050 unsigned int i, l, num;
2051 unsigned char flags;
2053 flags = ctx->aes.ccm.nonce.b[0];
2054 if (!(flags & S390X_CCM_AAD_FLAG)) {
2055 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
2056 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
2057 ctx->aes.ccm.blocks++;
2060 ctx->aes.ccm.nonce.b[0] = l;
2063 * Reconstruct length from encoded length field
2064 * and initialize it with counter value.
2067 for (i = 15 - l; i < 15; i++) {
2068 n |= ctx->aes.ccm.nonce.b[i];
2069 ctx->aes.ccm.nonce.b[i] = 0;
2072 n |= ctx->aes.ccm.nonce.b[15];
2073 ctx->aes.ccm.nonce.b[15] = 1;
2076 return -1; /* length mismatch */
2079 /* Two operations per block plus one for tag encryption */
2080 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
2081 if (ctx->aes.ccm.blocks > (1ULL << 61))
2082 return -2; /* too much data */
2087 len &= ~(size_t)0xf;
2090 /* mac-then-encrypt */
2092 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2094 for (i = 0; i < rem; i++)
2095 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
2097 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2098 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2099 ctx->aes.ccm.kmac_param.k);
2102 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2103 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2104 &num, (ctr128_f)AES_ctr32_encrypt);
2106 /* decrypt-then-mac */
2107 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2108 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2109 &num, (ctr128_f)AES_ctr32_encrypt);
2112 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2114 for (i = 0; i < rem; i++)
2115 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
2117 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2118 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2119 ctx->aes.ccm.kmac_param.k);
2123 for (i = 15 - l; i < 16; i++)
2124 ctx->aes.ccm.nonce.b[i] = 0;
2126 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
2127 ctx->aes.ccm.kmac_param.k);
2128 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
2129 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
2131 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
2136 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
2137 * if successful. Otherwise -1 is returned.
2139 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2140 const unsigned char *in, size_t len)
2142 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2143 const unsigned char *ivec = EVP_CIPHER_CTX_iv(ctx);
2144 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2145 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2146 unsigned char iv[EVP_MAX_IV_LENGTH];
2149 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
2153 /* Set explicit iv (sequence number). */
2154 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2157 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2159 * Get explicit iv (sequence number). We already have fixed iv
2160 * (server/client_write_iv) here.
2162 memcpy(iv, ivec, sizeof(iv));
2163 memcpy(iv + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2164 s390x_aes_ccm_setiv(cctx, iv, len);
2166 /* Process aad (sequence number|type|version|length) */
2167 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
2169 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2170 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2173 if (s390x_aes_ccm(cctx, in, out, len, enc))
2176 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2177 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2179 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2180 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
2185 OPENSSL_cleanse(out, len);
2191 * Set key or iv or enc/dec. Returns 1 if successful.
2192 * Otherwise 0 is returned.
2194 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
2195 const unsigned char *key,
2196 const unsigned char *iv, int enc)
2198 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2199 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
2200 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2202 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
2205 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
2206 cctx->aes.ccm.key_set = 1;
2209 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
2210 cctx->aes.ccm.iv_set = 1;
2213 /* Store encoded m and l. */
2214 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
2215 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
2216 memset(cctx->aes.ccm.nonce.b + 1, 0, sizeof(cctx->aes.ccm.nonce.b) - 1);
2218 cctx->aes.ccm.blocks = 0;
2219 cctx->aes.ccm.len_set = 0;
2224 * Called from EVP layer to initialize context, process additional
2225 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2226 * plaintext or process a TLS packet, depending on context. Returns bytes
2227 * written on success. Otherwise -1 is returned.
2229 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2230 const unsigned char *in, size_t len)
2232 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2233 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2234 const unsigned char *ivec = EVP_CIPHER_CTX_iv(ctx);
2238 if (!cctx->aes.ccm.key_set)
2241 if (cctx->aes.ccm.tls_aad_len >= 0)
2242 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2245 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2246 * so integrity must be checked already at Update() i.e., before
2247 * potentially corrupted data is output.
2249 if (in == NULL && out != NULL)
2252 if (!cctx->aes.ccm.iv_set)
2256 /* Update(): Pass message length. */
2258 s390x_aes_ccm_setiv(cctx, ivec, len);
2260 cctx->aes.ccm.len_set = 1;
2264 /* Update(): Process aad. */
2265 if (!cctx->aes.ccm.len_set && len)
2268 s390x_aes_ccm_aad(cctx, in, len);
2272 /* The tag must be set before actually decrypting data */
2273 if (!enc && !cctx->aes.ccm.tag_set)
2276 /* Update(): Process message. */
2278 if (!cctx->aes.ccm.len_set) {
2280 * In case message length was not previously set explicitly via
2281 * Update(), set it now.
2283 s390x_aes_ccm_setiv(cctx, ivec, len);
2285 cctx->aes.ccm.len_set = 1;
2289 if (s390x_aes_ccm(cctx, in, out, len, enc))
2292 cctx->aes.ccm.tag_set = 1;
2297 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2298 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2299 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2305 OPENSSL_cleanse(out, len);
2312 * Performs various operations on the context structure depending on control
2313 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2314 * Code is big-endian.
2316 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2318 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2319 unsigned char *buf, *iv;
2324 cctx->aes.ccm.key_set = 0;
2325 cctx->aes.ccm.iv_set = 0;
2326 cctx->aes.ccm.l = 8;
2327 cctx->aes.ccm.m = 12;
2328 cctx->aes.ccm.tag_set = 0;
2329 cctx->aes.ccm.len_set = 0;
2330 cctx->aes.ccm.tls_aad_len = -1;
2333 case EVP_CTRL_GET_IVLEN:
2334 *(int *)ptr = 15 - cctx->aes.ccm.l;
2337 case EVP_CTRL_AEAD_TLS1_AAD:
2338 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2341 /* Save the aad for later use. */
2342 buf = EVP_CIPHER_CTX_buf_noconst(c);
2343 memcpy(buf, ptr, arg);
2344 cctx->aes.ccm.tls_aad_len = arg;
2346 len = buf[arg - 2] << 8 | buf[arg - 1];
2347 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2350 /* Correct length for explicit iv. */
2351 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2353 enc = EVP_CIPHER_CTX_encrypting(c);
2355 if (len < cctx->aes.ccm.m)
2358 /* Correct length for tag. */
2359 len -= cctx->aes.ccm.m;
2362 buf[arg - 2] = len >> 8;
2363 buf[arg - 1] = len & 0xff;
2365 /* Extra padding: tag appended to record. */
2366 return cctx->aes.ccm.m;
2368 case EVP_CTRL_CCM_SET_IV_FIXED:
2369 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2372 /* Copy to first part of the iv. */
2373 iv = EVP_CIPHER_CTX_iv_noconst(c);
2374 memcpy(iv, ptr, arg);
2377 case EVP_CTRL_AEAD_SET_IVLEN:
2381 case EVP_CTRL_CCM_SET_L:
2382 if (arg < 2 || arg > 8)
2385 cctx->aes.ccm.l = arg;
2388 case EVP_CTRL_AEAD_SET_TAG:
2389 if ((arg & 1) || arg < 4 || arg > 16)
2392 enc = EVP_CIPHER_CTX_encrypting(c);
2397 cctx->aes.ccm.tag_set = 1;
2398 buf = EVP_CIPHER_CTX_buf_noconst(c);
2399 memcpy(buf, ptr, arg);
2402 cctx->aes.ccm.m = arg;
2405 case EVP_CTRL_AEAD_GET_TAG:
2406 enc = EVP_CIPHER_CTX_encrypting(c);
2407 if (!enc || !cctx->aes.ccm.tag_set)
2410 if(arg < cctx->aes.ccm.m)
2413 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2424 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2426 # ifndef OPENSSL_NO_OCB
2427 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2428 # define S390X_aes_128_ocb_CAPABLE 0
2429 # define S390X_aes_192_ocb_CAPABLE 0
2430 # define S390X_aes_256_ocb_CAPABLE 0
2432 # define s390x_aes_ocb_init_key aes_ocb_init_key
2433 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2434 const unsigned char *iv, int enc);
2435 # define s390x_aes_ocb_cipher aes_ocb_cipher
2436 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2437 const unsigned char *in, size_t len);
2438 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2439 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2440 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2441 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2444 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2446 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2447 nid##_##keylen##_##nmode,blocksize, \
2450 flags | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_##MODE##_MODE, \
2451 s390x_aes_##mode##_init_key, \
2452 s390x_aes_##mode##_cipher, \
2454 sizeof(S390X_AES_##MODE##_CTX), \
2460 static const EVP_CIPHER aes_##keylen##_##mode = { \
2461 nid##_##keylen##_##nmode, \
2465 flags | EVP_CIPH_##MODE##_MODE, \
2467 aes_##mode##_cipher, \
2469 sizeof(EVP_AES_KEY), \
2475 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2477 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2478 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2481 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2482 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2483 nid##_##keylen##_##mode, \
2485 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2487 flags | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_##MODE##_MODE, \
2488 s390x_aes_##mode##_init_key, \
2489 s390x_aes_##mode##_cipher, \
2490 s390x_aes_##mode##_cleanup, \
2491 sizeof(S390X_AES_##MODE##_CTX), \
2494 s390x_aes_##mode##_ctrl, \
2497 static const EVP_CIPHER aes_##keylen##_##mode = { \
2498 nid##_##keylen##_##mode,blocksize, \
2499 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2501 flags | EVP_CIPH_##MODE##_MODE, \
2502 aes_##mode##_init_key, \
2503 aes_##mode##_cipher, \
2504 aes_##mode##_cleanup, \
2505 sizeof(EVP_AES_##MODE##_CTX), \
2508 aes_##mode##_ctrl, \
2511 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2513 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2514 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2519 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2520 static const EVP_CIPHER aes_##keylen##_##mode = { \
2521 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2522 flags|EVP_CIPH_##MODE##_MODE, \
2524 aes_##mode##_cipher, \
2526 sizeof(EVP_AES_KEY), \
2527 NULL,NULL,NULL,NULL }; \
2528 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2529 { return &aes_##keylen##_##mode; }
2531 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2532 static const EVP_CIPHER aes_##keylen##_##mode = { \
2533 nid##_##keylen##_##mode,blocksize, \
2534 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
2535 flags|EVP_CIPH_##MODE##_MODE, \
2536 aes_##mode##_init_key, \
2537 aes_##mode##_cipher, \
2538 aes_##mode##_cleanup, \
2539 sizeof(EVP_AES_##MODE##_CTX), \
2540 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2541 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2542 { return &aes_##keylen##_##mode; }
2546 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2547 # include "arm_arch.h"
2548 # if __ARM_MAX_ARCH__>=7
2549 # if defined(BSAES_ASM)
2550 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2552 # if defined(VPAES_ASM)
2553 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2555 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2556 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2557 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2558 # define HWAES_encrypt aes_v8_encrypt
2559 # define HWAES_decrypt aes_v8_decrypt
2560 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2561 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2565 #if defined(HWAES_CAPABLE)
2566 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
2568 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
2570 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
2571 const AES_KEY *key);
2572 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
2573 const AES_KEY *key);
2574 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
2575 size_t length, const AES_KEY *key,
2576 unsigned char *ivec, const int enc);
2577 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
2578 size_t len, const AES_KEY *key,
2579 const unsigned char ivec[16]);
2580 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
2581 size_t len, const AES_KEY *key1,
2582 const AES_KEY *key2, const unsigned char iv[16]);
2583 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
2584 size_t len, const AES_KEY *key1,
2585 const AES_KEY *key2, const unsigned char iv[16]);
2588 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2589 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2590 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2591 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2592 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2593 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2594 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2595 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2597 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2598 const unsigned char *iv, int enc)
2601 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2603 mode = EVP_CIPHER_CTX_mode(ctx);
2604 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2606 #ifdef HWAES_CAPABLE
2607 if (HWAES_CAPABLE) {
2608 ret = HWAES_set_decrypt_key(key,
2609 EVP_CIPHER_CTX_key_length(ctx) * 8,
2611 dat->block = (block128_f) HWAES_decrypt;
2612 dat->stream.cbc = NULL;
2613 # ifdef HWAES_cbc_encrypt
2614 if (mode == EVP_CIPH_CBC_MODE)
2615 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2619 #ifdef BSAES_CAPABLE
2620 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2621 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2623 dat->block = (block128_f) AES_decrypt;
2624 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2627 #ifdef VPAES_CAPABLE
2628 if (VPAES_CAPABLE) {
2629 ret = vpaes_set_decrypt_key(key,
2630 EVP_CIPHER_CTX_key_length(ctx) * 8,
2632 dat->block = (block128_f) vpaes_decrypt;
2633 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2634 (cbc128_f) vpaes_cbc_encrypt : NULL;
2638 ret = AES_set_decrypt_key(key,
2639 EVP_CIPHER_CTX_key_length(ctx) * 8,
2641 dat->block = (block128_f) AES_decrypt;
2642 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2643 (cbc128_f) AES_cbc_encrypt : NULL;
2646 #ifdef HWAES_CAPABLE
2647 if (HWAES_CAPABLE) {
2648 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2650 dat->block = (block128_f) HWAES_encrypt;
2651 dat->stream.cbc = NULL;
2652 # ifdef HWAES_cbc_encrypt
2653 if (mode == EVP_CIPH_CBC_MODE)
2654 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2657 # ifdef HWAES_ctr32_encrypt_blocks
2658 if (mode == EVP_CIPH_CTR_MODE)
2659 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2662 (void)0; /* terminate potentially open 'else' */
2665 #ifdef BSAES_CAPABLE
2666 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2667 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2669 dat->block = (block128_f) AES_encrypt;
2670 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2673 #ifdef VPAES_CAPABLE
2674 if (VPAES_CAPABLE) {
2675 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2677 dat->block = (block128_f) vpaes_encrypt;
2678 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2679 (cbc128_f) vpaes_cbc_encrypt : NULL;
2683 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2685 dat->block = (block128_f) AES_encrypt;
2686 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2687 (cbc128_f) AES_cbc_encrypt : NULL;
2689 if (mode == EVP_CIPH_CTR_MODE)
2690 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2695 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2702 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2703 const unsigned char *in, size_t len)
2705 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2707 if (dat->stream.cbc)
2708 (*dat->stream.cbc) (in, out, len, &dat->ks,
2709 EVP_CIPHER_CTX_iv_noconst(ctx),
2710 EVP_CIPHER_CTX_encrypting(ctx));
2711 else if (EVP_CIPHER_CTX_encrypting(ctx))
2712 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2713 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2715 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2716 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2721 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2722 const unsigned char *in, size_t len)
2724 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2726 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2731 for (i = 0, len -= bl; i <= len; i += bl)
2732 (*dat->block) (in + i, out + i, &dat->ks);
2737 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2738 const unsigned char *in, size_t len)
2740 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2742 int num = EVP_CIPHER_CTX_num(ctx);
2743 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2744 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2745 EVP_CIPHER_CTX_set_num(ctx, num);
2749 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2750 const unsigned char *in, size_t len)
2752 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2754 int num = EVP_CIPHER_CTX_num(ctx);
2755 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2756 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2757 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2758 EVP_CIPHER_CTX_set_num(ctx, num);
2762 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2763 const unsigned char *in, size_t len)
2765 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2767 int num = EVP_CIPHER_CTX_num(ctx);
2768 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2769 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2770 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2771 EVP_CIPHER_CTX_set_num(ctx, num);
2775 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2776 const unsigned char *in, size_t len)
2778 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2780 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2781 int num = EVP_CIPHER_CTX_num(ctx);
2782 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2783 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2784 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2785 EVP_CIPHER_CTX_set_num(ctx, num);
2789 while (len >= MAXBITCHUNK) {
2790 int num = EVP_CIPHER_CTX_num(ctx);
2791 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2792 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2793 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2794 EVP_CIPHER_CTX_set_num(ctx, num);
2800 int num = EVP_CIPHER_CTX_num(ctx);
2801 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2802 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2803 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2804 EVP_CIPHER_CTX_set_num(ctx, num);
2810 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2811 const unsigned char *in, size_t len)
2813 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2814 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2816 if (dat->stream.ctr)
2817 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2818 EVP_CIPHER_CTX_iv_noconst(ctx),
2819 EVP_CIPHER_CTX_buf_noconst(ctx),
2820 &num, dat->stream.ctr);
2822 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2823 EVP_CIPHER_CTX_iv_noconst(ctx),
2824 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2826 EVP_CIPHER_CTX_set_num(ctx, num);
2830 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2831 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2832 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2834 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2836 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2839 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2840 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2841 OPENSSL_free(gctx->iv);
2845 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2847 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2852 gctx->ivlen = EVP_CIPHER_iv_length(c->cipher);
2856 gctx->tls_aad_len = -1;
2859 case EVP_CTRL_GET_IVLEN:
2860 *(int *)ptr = gctx->ivlen;
2863 case EVP_CTRL_AEAD_SET_IVLEN:
2866 /* Allocate memory for IV if needed */
2867 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2868 if (gctx->iv != c->iv)
2869 OPENSSL_free(gctx->iv);
2870 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2871 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2878 case EVP_CTRL_AEAD_SET_TAG:
2879 if (arg <= 0 || arg > 16 || c->encrypt)
2881 memcpy(c->buf, ptr, arg);
2885 case EVP_CTRL_AEAD_GET_TAG:
2886 if (arg <= 0 || arg > 16 || !c->encrypt
2887 || gctx->taglen < 0)
2889 memcpy(ptr, c->buf, arg);
2892 case EVP_CTRL_GCM_SET_IV_FIXED:
2893 /* Special case: -1 length restores whole IV */
2895 memcpy(gctx->iv, ptr, gctx->ivlen);
2900 * Fixed field must be at least 4 bytes and invocation field at least
2903 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2906 memcpy(gctx->iv, ptr, arg);
2907 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2912 case EVP_CTRL_GCM_IV_GEN:
2913 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2915 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2916 if (arg <= 0 || arg > gctx->ivlen)
2918 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2920 * Invocation field will be at least 8 bytes in size and so no need
2921 * to check wrap around or increment more than last 8 bytes.
2923 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2927 case EVP_CTRL_GCM_SET_IV_INV:
2928 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2930 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2931 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2935 case EVP_CTRL_AEAD_TLS1_AAD:
2936 /* Save the AAD for later use */
2937 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2939 memcpy(c->buf, ptr, arg);
2940 gctx->tls_aad_len = arg;
2942 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2943 /* Correct length for explicit IV */
2944 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2946 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2947 /* If decrypting correct for tag too */
2949 if (len < EVP_GCM_TLS_TAG_LEN)
2951 len -= EVP_GCM_TLS_TAG_LEN;
2953 c->buf[arg - 2] = len >> 8;
2954 c->buf[arg - 1] = len & 0xff;
2956 /* Extra padding: tag appended to record */
2957 return EVP_GCM_TLS_TAG_LEN;
2961 EVP_CIPHER_CTX *out = ptr;
2962 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2963 if (gctx->gcm.key) {
2964 if (gctx->gcm.key != &gctx->ks)
2966 gctx_out->gcm.key = &gctx_out->ks;
2968 if (gctx->iv == c->iv)
2969 gctx_out->iv = out->iv;
2971 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2972 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2975 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2986 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2987 const unsigned char *iv, int enc)
2989 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2994 #ifdef HWAES_CAPABLE
2995 if (HWAES_CAPABLE) {
2996 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2997 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2998 (block128_f) HWAES_encrypt);
2999 # ifdef HWAES_ctr32_encrypt_blocks
3000 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
3007 #ifdef BSAES_CAPABLE
3008 if (BSAES_CAPABLE) {
3009 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3010 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3011 (block128_f) AES_encrypt);
3012 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
3016 #ifdef VPAES_CAPABLE
3017 if (VPAES_CAPABLE) {
3018 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3019 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3020 (block128_f) vpaes_encrypt);
3025 (void)0; /* terminate potentially open 'else' */
3027 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3028 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3029 (block128_f) AES_encrypt);
3031 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
3038 * If we have an iv can set it directly, otherwise use saved IV.
3040 if (iv == NULL && gctx->iv_set)
3043 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3048 /* If key set use IV, otherwise copy */
3050 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3052 memcpy(gctx->iv, iv, gctx->ivlen);
3060 * Handle TLS GCM packet format. This consists of the last portion of the IV
3061 * followed by the payload and finally the tag. On encrypt generate IV,
3062 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
3066 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3067 const unsigned char *in, size_t len)
3069 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3071 /* Encrypt/decrypt must be performed in place */
3073 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
3076 * Set IV from start of buffer or generate IV and write to start of
3079 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
3080 : EVP_CTRL_GCM_SET_IV_INV,
3081 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
3084 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
3086 /* Fix buffer and length to point to payload */
3087 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3088 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3089 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3091 /* Encrypt payload */
3094 #if defined(AES_GCM_ASM)
3095 if (len >= 32 && AES_GCM_ASM(gctx)) {
3096 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3099 bulk = AES_gcm_encrypt(in, out, len,
3101 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3102 gctx->gcm.len.u[1] += bulk;
3105 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3108 len - bulk, gctx->ctr))
3112 #if defined(AES_GCM_ASM2)
3113 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3114 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3117 bulk = AES_gcm_encrypt(in, out, len,
3119 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3120 gctx->gcm.len.u[1] += bulk;
3123 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3124 in + bulk, out + bulk, len - bulk))
3128 /* Finally write tag */
3129 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
3130 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3135 #if defined(AES_GCM_ASM)
3136 if (len >= 16 && AES_GCM_ASM(gctx)) {
3137 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3140 bulk = AES_gcm_decrypt(in, out, len,
3142 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3143 gctx->gcm.len.u[1] += bulk;
3146 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3149 len - bulk, gctx->ctr))
3153 #if defined(AES_GCM_ASM2)
3154 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3155 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3158 bulk = AES_gcm_decrypt(in, out, len,
3160 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3161 gctx->gcm.len.u[1] += bulk;
3164 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3165 in + bulk, out + bulk, len - bulk))
3169 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
3170 /* If tag mismatch wipe buffer */
3171 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
3172 OPENSSL_cleanse(out, len);
3180 gctx->tls_aad_len = -1;
3184 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3185 const unsigned char *in, size_t len)
3187 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3188 /* If not set up, return error */
3192 if (gctx->tls_aad_len >= 0)
3193 return aes_gcm_tls_cipher(ctx, out, in, len);
3199 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
3201 } else if (ctx->encrypt) {
3204 #if defined(AES_GCM_ASM)
3205 if (len >= 32 && AES_GCM_ASM(gctx)) {
3206 size_t res = (16 - gctx->gcm.mres) % 16;
3208 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3211 bulk = AES_gcm_encrypt(in + res,
3212 out + res, len - res,
3213 gctx->gcm.key, gctx->gcm.Yi.c,
3215 gctx->gcm.len.u[1] += bulk;
3219 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3222 len - bulk, gctx->ctr))
3226 #if defined(AES_GCM_ASM2)
3227 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3228 size_t res = (16 - gctx->gcm.mres) % 16;
3230 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3233 bulk = AES_gcm_encrypt(in + res,
3234 out + res, len - res,
3235 gctx->gcm.key, gctx->gcm.Yi.c,
3237 gctx->gcm.len.u[1] += bulk;
3241 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3242 in + bulk, out + bulk, len - bulk))
3248 #if defined(AES_GCM_ASM)
3249 if (len >= 16 && AES_GCM_ASM(gctx)) {
3250 size_t res = (16 - gctx->gcm.mres) % 16;
3252 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3255 bulk = AES_gcm_decrypt(in + res,
3256 out + res, len - res,
3258 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3259 gctx->gcm.len.u[1] += bulk;
3263 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3266 len - bulk, gctx->ctr))
3270 #if defined(AES_GCM_ASM2)
3271 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3272 size_t res = (16 - gctx->gcm.mres) % 16;
3274 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3277 bulk = AES_gcm_decrypt(in + res,
3278 out + res, len - res,
3280 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3281 gctx->gcm.len.u[1] += bulk;
3285 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3286 in + bulk, out + bulk, len - bulk))
3292 if (!ctx->encrypt) {
3293 if (gctx->taglen < 0)
3295 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3300 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3302 /* Don't reuse the IV */
3309 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3310 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3311 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3312 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
3314 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3315 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3316 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3317 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3318 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3319 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3321 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3323 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3325 if (type == EVP_CTRL_COPY) {
3326 EVP_CIPHER_CTX *out = ptr;
3327 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3329 if (xctx->xts.key1) {
3330 if (xctx->xts.key1 != &xctx->ks1)
3332 xctx_out->xts.key1 = &xctx_out->ks1;
3334 if (xctx->xts.key2) {
3335 if (xctx->xts.key2 != &xctx->ks2)
3337 xctx_out->xts.key2 = &xctx_out->ks2;
3340 } else if (type != EVP_CTRL_INIT)
3342 /* key1 and key2 are used as an indicator both key and IV are set */
3343 xctx->xts.key1 = NULL;
3344 xctx->xts.key2 = NULL;
3348 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3349 const unsigned char *iv, int enc)
3351 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3358 /* The key is two half length keys in reality */
3359 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
3362 * Verify that the two keys are different.
3364 * This addresses the vulnerability described in Rogaway's
3365 * September 2004 paper:
3367 * "Efficient Instantiations of Tweakable Blockciphers and
3368 * Refinements to Modes OCB and PMAC".
3369 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3371 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3373 * "The check for Key_1 != Key_2 shall be done at any place
3374 * BEFORE using the keys in the XTS-AES algorithm to process
3377 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3378 EVPerr(EVP_F_AES_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
3383 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3385 xctx->stream = NULL;
3387 /* key_len is two AES keys */
3388 #ifdef HWAES_CAPABLE
3389 if (HWAES_CAPABLE) {
3391 HWAES_set_encrypt_key(key,
3392 EVP_CIPHER_CTX_key_length(ctx) * 4,
3394 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3395 # ifdef HWAES_xts_encrypt
3396 xctx->stream = HWAES_xts_encrypt;
3399 HWAES_set_decrypt_key(key,
3400 EVP_CIPHER_CTX_key_length(ctx) * 4,
3402 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3403 # ifdef HWAES_xts_decrypt
3404 xctx->stream = HWAES_xts_decrypt;
3408 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3409 EVP_CIPHER_CTX_key_length(ctx) * 4,
3411 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3413 xctx->xts.key1 = &xctx->ks1;
3417 #ifdef BSAES_CAPABLE
3419 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3422 #ifdef VPAES_CAPABLE
3423 if (VPAES_CAPABLE) {
3425 vpaes_set_encrypt_key(key,
3426 EVP_CIPHER_CTX_key_length(ctx) * 4,
3428 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3430 vpaes_set_decrypt_key(key,
3431 EVP_CIPHER_CTX_key_length(ctx) * 4,
3433 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3436 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3437 EVP_CIPHER_CTX_key_length(ctx) * 4,
3439 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3441 xctx->xts.key1 = &xctx->ks1;
3445 (void)0; /* terminate potentially open 'else' */
3448 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3450 xctx->xts.block1 = (block128_f) AES_encrypt;
3452 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3454 xctx->xts.block1 = (block128_f) AES_decrypt;
3457 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3458 EVP_CIPHER_CTX_key_length(ctx) * 4,
3460 xctx->xts.block2 = (block128_f) AES_encrypt;
3462 xctx->xts.key1 = &xctx->ks1;
3466 xctx->xts.key2 = &xctx->ks2;
3467 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3473 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3474 const unsigned char *in, size_t len)
3476 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3477 if (!xctx->xts.key1 || !xctx->xts.key2)
3479 if (!out || !in || len < AES_BLOCK_SIZE)
3482 (*xctx->stream) (in, out, len,
3483 xctx->xts.key1, xctx->xts.key2,
3484 EVP_CIPHER_CTX_iv_noconst(ctx));
3485 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3487 EVP_CIPHER_CTX_encrypting(ctx)))
3492 #define aes_xts_cleanup NULL
3494 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3495 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3496 | EVP_CIPH_CUSTOM_COPY)
3498 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3499 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3501 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3503 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3512 cctx->tls_aad_len = -1;
3514 case EVP_CTRL_GET_IVLEN:
3515 *(int *)ptr = 15 - cctx->L;
3517 case EVP_CTRL_AEAD_TLS1_AAD:
3518 /* Save the AAD for later use */
3519 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3521 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3522 cctx->tls_aad_len = arg;
3525 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3526 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3527 /* Correct length for explicit IV */
3528 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3530 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3531 /* If decrypting correct for tag too */
3532 if (!EVP_CIPHER_CTX_encrypting(c)) {
3537 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3538 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3540 /* Extra padding: tag appended to record */
3543 case EVP_CTRL_CCM_SET_IV_FIXED:
3544 /* Sanity check length */
3545 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3547 /* Just copy to first part of IV */
3548 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3551 case EVP_CTRL_AEAD_SET_IVLEN:
3554 case EVP_CTRL_CCM_SET_L:
3555 if (arg < 2 || arg > 8)
3560 case EVP_CTRL_AEAD_SET_TAG:
3561 if ((arg & 1) || arg < 4 || arg > 16)
3563 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3567 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3572 case EVP_CTRL_AEAD_GET_TAG:
3573 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3575 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3584 EVP_CIPHER_CTX *out = ptr;
3585 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3586 if (cctx->ccm.key) {
3587 if (cctx->ccm.key != &cctx->ks)
3589 cctx_out->ccm.key = &cctx_out->ks;
3600 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3601 const unsigned char *iv, int enc)
3603 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3608 #ifdef HWAES_CAPABLE
3609 if (HWAES_CAPABLE) {
3610 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3613 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3614 &cctx->ks, (block128_f) HWAES_encrypt);
3620 #ifdef VPAES_CAPABLE
3621 if (VPAES_CAPABLE) {
3622 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3624 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3625 &cctx->ks, (block128_f) vpaes_encrypt);
3631 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3633 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3634 &cctx->ks, (block128_f) AES_encrypt);
3639 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3645 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3646 const unsigned char *in, size_t len)
3648 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3649 CCM128_CONTEXT *ccm = &cctx->ccm;
3650 /* Encrypt/decrypt must be performed in place */
3651 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3653 /* If encrypting set explicit IV from sequence number (start of AAD) */
3654 if (EVP_CIPHER_CTX_encrypting(ctx))
3655 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3656 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3657 /* Get rest of IV from explicit IV */
3658 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3659 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3660 /* Correct length value */
3661 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3662 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3666 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3667 /* Fix buffer to point to payload */
3668 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3669 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3670 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3671 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3673 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3675 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3677 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3679 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3681 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3682 unsigned char tag[16];
3683 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3684 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3688 OPENSSL_cleanse(out, len);
3693 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3694 const unsigned char *in, size_t len)
3696 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3697 CCM128_CONTEXT *ccm = &cctx->ccm;
3698 /* If not set up, return error */
3702 if (cctx->tls_aad_len >= 0)
3703 return aes_ccm_tls_cipher(ctx, out, in, len);
3705 /* EVP_*Final() doesn't return any data */
3706 if (in == NULL && out != NULL)
3714 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3720 /* If have AAD need message length */
3721 if (!cctx->len_set && len)
3723 CRYPTO_ccm128_aad(ccm, in, len);
3727 /* The tag must be set before actually decrypting data */
3728 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3731 /* If not set length yet do it */
3732 if (!cctx->len_set) {
3733 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3738 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3739 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3741 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3747 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3749 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3750 unsigned char tag[16];
3751 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3752 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3758 OPENSSL_cleanse(out, len);
3766 #define aes_ccm_cleanup NULL
3768 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3769 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3770 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3771 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3772 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3773 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3780 /* Indicates if IV has been set */
3784 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3785 const unsigned char *iv, int enc)
3787 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3791 if (EVP_CIPHER_CTX_encrypting(ctx))
3792 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3795 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3801 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3802 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3807 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3808 const unsigned char *in, size_t inlen)
3810 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3812 /* AES wrap with padding has IV length of 4, without padding 8 */
3813 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3814 /* No final operation so always return zero length */
3817 /* Input length must always be non-zero */
3820 /* If decrypting need at least 16 bytes and multiple of 8 */
3821 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3823 /* If not padding input must be multiple of 8 */
3824 if (!pad && inlen & 0x7)
3826 if (is_partially_overlapping(out, in, inlen)) {
3827 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3831 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3832 /* If padding round up to multiple of 8 */
3834 inlen = (inlen + 7) / 8 * 8;
3839 * If not padding output will be exactly 8 bytes smaller than
3840 * input. If padding it will be at least 8 bytes smaller but we
3841 * don't know how much.
3847 if (EVP_CIPHER_CTX_encrypting(ctx))
3848 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3850 (block128_f) AES_encrypt);
3852 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3854 (block128_f) AES_decrypt);
3856 if (EVP_CIPHER_CTX_encrypting(ctx))
3857 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3858 out, in, inlen, (block128_f) AES_encrypt);
3860 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3861 out, in, inlen, (block128_f) AES_decrypt);
3863 return rv ? (int)rv : -1;
3866 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3867 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3868 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3870 static const EVP_CIPHER aes_128_wrap = {
3872 8, 16, 8, WRAP_FLAGS,
3873 aes_wrap_init_key, aes_wrap_cipher,
3875 sizeof(EVP_AES_WRAP_CTX),
3876 NULL, NULL, NULL, NULL
3879 const EVP_CIPHER *EVP_aes_128_wrap(void)
3881 return &aes_128_wrap;
3884 static const EVP_CIPHER aes_192_wrap = {
3886 8, 24, 8, WRAP_FLAGS,
3887 aes_wrap_init_key, aes_wrap_cipher,
3889 sizeof(EVP_AES_WRAP_CTX),
3890 NULL, NULL, NULL, NULL
3893 const EVP_CIPHER *EVP_aes_192_wrap(void)
3895 return &aes_192_wrap;
3898 static const EVP_CIPHER aes_256_wrap = {
3900 8, 32, 8, WRAP_FLAGS,
3901 aes_wrap_init_key, aes_wrap_cipher,
3903 sizeof(EVP_AES_WRAP_CTX),
3904 NULL, NULL, NULL, NULL
3907 const EVP_CIPHER *EVP_aes_256_wrap(void)
3909 return &aes_256_wrap;
3912 static const EVP_CIPHER aes_128_wrap_pad = {
3913 NID_id_aes128_wrap_pad,
3914 8, 16, 4, WRAP_FLAGS,
3915 aes_wrap_init_key, aes_wrap_cipher,
3917 sizeof(EVP_AES_WRAP_CTX),
3918 NULL, NULL, NULL, NULL
3921 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3923 return &aes_128_wrap_pad;
3926 static const EVP_CIPHER aes_192_wrap_pad = {
3927 NID_id_aes192_wrap_pad,
3928 8, 24, 4, WRAP_FLAGS,
3929 aes_wrap_init_key, aes_wrap_cipher,
3931 sizeof(EVP_AES_WRAP_CTX),
3932 NULL, NULL, NULL, NULL
3935 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3937 return &aes_192_wrap_pad;
3940 static const EVP_CIPHER aes_256_wrap_pad = {
3941 NID_id_aes256_wrap_pad,
3942 8, 32, 4, WRAP_FLAGS,
3943 aes_wrap_init_key, aes_wrap_cipher,
3945 sizeof(EVP_AES_WRAP_CTX),
3946 NULL, NULL, NULL, NULL
3949 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3951 return &aes_256_wrap_pad;
3954 #ifndef OPENSSL_NO_OCB
3955 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3957 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3958 EVP_CIPHER_CTX *newc;
3959 EVP_AES_OCB_CTX *new_octx;
3965 octx->ivlen = EVP_CIPHER_iv_length(c->cipher);
3966 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
3968 octx->data_buf_len = 0;
3969 octx->aad_buf_len = 0;
3972 case EVP_CTRL_GET_IVLEN:
3973 *(int *)ptr = octx->ivlen;
3976 case EVP_CTRL_AEAD_SET_IVLEN:
3977 /* IV len must be 1 to 15 */
3978 if (arg <= 0 || arg > 15)
3984 case EVP_CTRL_AEAD_SET_TAG:
3986 /* Tag len must be 0 to 16 */
3987 if (arg < 0 || arg > 16)
3993 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
3995 memcpy(octx->tag, ptr, arg);
3998 case EVP_CTRL_AEAD_GET_TAG:
3999 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
4002 memcpy(ptr, octx->tag, arg);
4006 newc = (EVP_CIPHER_CTX *)ptr;
4007 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
4008 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
4009 &new_octx->ksenc.ks,
4010 &new_octx->ksdec.ks);
4018 # ifdef HWAES_CAPABLE
4019 # ifdef HWAES_ocb_encrypt
4020 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
4021 size_t blocks, const void *key,
4022 size_t start_block_num,
4023 unsigned char offset_i[16],
4024 const unsigned char L_[][16],
4025 unsigned char checksum[16]);
4027 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
4029 # ifdef HWAES_ocb_decrypt
4030 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
4031 size_t blocks, const void *key,
4032 size_t start_block_num,
4033 unsigned char offset_i[16],
4034 const unsigned char L_[][16],
4035 unsigned char checksum[16]);
4037 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
4041 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4042 const unsigned char *iv, int enc)
4044 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4050 * We set both the encrypt and decrypt key here because decrypt
4051 * needs both. We could possibly optimise to remove setting the
4052 * decrypt for an encryption operation.
4054 # ifdef HWAES_CAPABLE
4055 if (HWAES_CAPABLE) {
4056 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4058 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4060 if (!CRYPTO_ocb128_init(&octx->ocb,
4061 &octx->ksenc.ks, &octx->ksdec.ks,
4062 (block128_f) HWAES_encrypt,
4063 (block128_f) HWAES_decrypt,
4064 enc ? HWAES_ocb_encrypt
4065 : HWAES_ocb_decrypt))
4070 # ifdef VPAES_CAPABLE
4071 if (VPAES_CAPABLE) {
4072 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4074 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4076 if (!CRYPTO_ocb128_init(&octx->ocb,
4077 &octx->ksenc.ks, &octx->ksdec.ks,
4078 (block128_f) vpaes_encrypt,
4079 (block128_f) vpaes_decrypt,
4085 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4087 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4089 if (!CRYPTO_ocb128_init(&octx->ocb,
4090 &octx->ksenc.ks, &octx->ksdec.ks,
4091 (block128_f) AES_encrypt,
4092 (block128_f) AES_decrypt,
4099 * If we have an iv we can set it directly, otherwise use saved IV.
4101 if (iv == NULL && octx->iv_set)
4104 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
4111 /* If key set use IV, otherwise copy */
4113 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
4115 memcpy(octx->iv, iv, octx->ivlen);
4121 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4122 const unsigned char *in, size_t len)
4126 int written_len = 0;
4127 size_t trailing_len;
4128 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4130 /* If IV or Key not set then return error */
4139 * Need to ensure we are only passing full blocks to low level OCB
4140 * routines. We do it here rather than in EVP_EncryptUpdate/
4141 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4142 * and those routines don't support that
4145 /* Are we dealing with AAD or normal data here? */
4147 buf = octx->aad_buf;
4148 buf_len = &(octx->aad_buf_len);
4150 buf = octx->data_buf;
4151 buf_len = &(octx->data_buf_len);
4153 if (is_partially_overlapping(out + *buf_len, in, len)) {
4154 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
4160 * If we've got a partially filled buffer from a previous call then
4161 * use that data first
4164 unsigned int remaining;
4166 remaining = AES_BLOCK_SIZE - (*buf_len);
4167 if (remaining > len) {
4168 memcpy(buf + (*buf_len), in, len);
4172 memcpy(buf + (*buf_len), in, remaining);
4175 * If we get here we've filled the buffer, so process it
4180 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
4182 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4183 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
4187 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
4191 written_len = AES_BLOCK_SIZE;
4194 out += AES_BLOCK_SIZE;
4197 /* Do we have a partial block to handle at the end? */
4198 trailing_len = len % AES_BLOCK_SIZE;
4201 * If we've got some full blocks to handle, then process these first
4203 if (len != trailing_len) {
4205 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
4207 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4208 if (!CRYPTO_ocb128_encrypt
4209 (&octx->ocb, in, out, len - trailing_len))
4212 if (!CRYPTO_ocb128_decrypt
4213 (&octx->ocb, in, out, len - trailing_len))
4216 written_len += len - trailing_len;
4217 in += len - trailing_len;
4220 /* Handle any trailing partial block */
4221 if (trailing_len > 0) {
4222 memcpy(buf, in, trailing_len);
4223 *buf_len = trailing_len;
4229 * First of all empty the buffer of any partial block that we might
4230 * have been provided - both for data and AAD
4232 if (octx->data_buf_len > 0) {
4233 if (EVP_CIPHER_CTX_encrypting(ctx)) {
4234 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4235 octx->data_buf_len))
4238 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4239 octx->data_buf_len))
4242 written_len = octx->data_buf_len;
4243 octx->data_buf_len = 0;
4245 if (octx->aad_buf_len > 0) {
4246 if (!CRYPTO_ocb128_aad
4247 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4249 octx->aad_buf_len = 0;
4251 /* If decrypting then verify */
4252 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
4253 if (octx->taglen < 0)
4255 if (CRYPTO_ocb128_finish(&octx->ocb,
4256 octx->tag, octx->taglen) != 0)
4261 /* If encrypting then just get the tag */
4262 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4264 /* Don't reuse the IV */
4270 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4272 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4273 CRYPTO_ocb128_cleanup(&octx->ocb);
4277 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4278 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4279 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4280 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4281 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4282 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4283 #endif /* OPENSSL_NO_OCB */