Newer
Older
/* ====================================================================
* Copyright (c) 2001-2014 The OpenSSL Project. All rights reserved.
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. All advertising materials mentioning features or use of this
* software must display the following acknowledgment:
* "This product includes software developed by the OpenSSL Project
* for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
*
* 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
* endorse or promote products derived from this software without
* prior written permission. For written permission, please contact
* openssl-core@openssl.org.
*
* 5. Products derived from this software may not be called "OpenSSL"
* nor may "OpenSSL" appear in their names without prior written
* permission of the OpenSSL Project.
*
* 6. Redistributions of any form whatsoever must retain the following
* acknowledgment:
* "This product includes software developed by the OpenSSL Project
* for use in the OpenSSL Toolkit (http://www.openssl.org/)"
*
* THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* ====================================================================
*
*/
Dr. Stephen Henson
committed
#define OPENSSL_FIPSAPI
#include <openssl/opensslconf.h>
#ifndef OPENSSL_NO_AES
#include <openssl/evp.h>
#include <openssl/err.h>
#include <string.h>
#include <assert.h>
#include <openssl/aes.h>
#include "evp_locl.h"
#include "modes_lcl.h"
Dr. Stephen Henson
committed
#include <openssl/rand.h>
typedef struct
{
block128_f block;
cbc128_f cbc;
ctr128_f ctr;
} EVP_AES_KEY;
Andy Polyakov
committed
typedef struct
{
union { double align; AES_KEY ks; } ks; /* AES key schedule to use */
Andy Polyakov
committed
int key_set; /* Set if key initialised */
int iv_set; /* Set if an iv is set */
GCM128_CONTEXT gcm;
unsigned char *iv; /* Temporary IV store */
int ivlen; /* IV length */
int taglen;
int iv_gen; /* It is OK to generate IVs */
int tls_aad_len; /* TLS AAD length */
ctr128_f ctr;
Andy Polyakov
committed
} EVP_AES_GCM_CTX;
typedef struct
{
union { double align; AES_KEY ks; } ks1, ks2; /* AES key schedules to use */
Andy Polyakov
committed
XTS128_CONTEXT xts;
void (*stream)(const unsigned char *in,
unsigned char *out, size_t length,
const AES_KEY *key1, const AES_KEY *key2,
const unsigned char iv[16]);
Andy Polyakov
committed
} EVP_AES_XTS_CTX;
typedef struct
{
union { double align; AES_KEY ks; } ks; /* AES key schedule to use */
Andy Polyakov
committed
int key_set; /* Set if key initialised */
int iv_set; /* Set if an iv is set */
int tag_set; /* Set if tag is valid */
int len_set; /* Set if message length set */
int L, M; /* L and M parameters from RFC3610 */
CCM128_CONTEXT ccm;
Andy Polyakov
committed
} EVP_AES_CCM_CTX;
#define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
#ifdef VPAES_ASM
int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
AES_KEY *key);
int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
AES_KEY *key);
void vpaes_encrypt(const unsigned char *in, unsigned char *out,
const AES_KEY *key);
void vpaes_decrypt(const unsigned char *in, unsigned char *out,
const AES_KEY *key);
void vpaes_cbc_encrypt(const unsigned char *in,
unsigned char *out,
size_t length,
const AES_KEY *key,
unsigned char *ivec, int enc);
#endif
void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
size_t length, const AES_KEY *key,
unsigned char ivec[16], int enc);
void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
size_t len, const AES_KEY *key,
const unsigned char ivec[16]);
void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
size_t len, const AES_KEY *key1,
const AES_KEY *key2, const unsigned char iv[16]);
void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
size_t len, const AES_KEY *key1,
const AES_KEY *key2, const unsigned char iv[16]);
#ifdef AES_CTR_ASM
void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
size_t blocks, const AES_KEY *key,
const unsigned char ivec[AES_BLOCK_SIZE]);
#endif
#ifdef AES_XTS_ASM
void AES_xts_encrypt(const char *inp,char *out,size_t len,
const AES_KEY *key1, const AES_KEY *key2,
const unsigned char iv[16]);
void AES_xts_decrypt(const char *inp,char *out,size_t len,
const AES_KEY *key1, const AES_KEY *key2,
const unsigned char iv[16]);
#endif
#if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
# define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
# define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
# define HWAES_set_encrypt_key aes_p8_set_encrypt_key
# define HWAES_set_decrypt_key aes_p8_set_decrypt_key
# define HWAES_encrypt aes_p8_encrypt
# define HWAES_decrypt aes_p8_decrypt
# define HWAES_cbc_encrypt aes_p8_cbc_encrypt
# define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
#if defined(AES_ASM) && !defined(I386_ONLY) && ( \
((defined(__i386) || defined(__i386__) || \
defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
defined(__x86_64) || defined(__x86_64__) || \
defined(_M_AMD64) || defined(_M_X64) || \
defined(__INTEL__) )
#ifdef VPAES_ASM
#define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
#endif
#ifdef BSAES_ASM
#define BSAES_CAPABLE VPAES_CAPABLE
#endif
Andy Polyakov
committed
/*
* AES-NI section
*/
#define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
Andy Polyakov
committed
AES_KEY *key);
int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
Andy Polyakov
committed
AES_KEY *key);
void aesni_encrypt(const unsigned char *in, unsigned char *out,
Andy Polyakov
committed
const AES_KEY *key);
void aesni_decrypt(const unsigned char *in, unsigned char *out,
Andy Polyakov
committed
const AES_KEY *key);
void aesni_ecb_encrypt(const unsigned char *in,
Andy Polyakov
committed
unsigned char *out,
size_t length,
const AES_KEY *key,
int enc);
void aesni_cbc_encrypt(const unsigned char *in,
Andy Polyakov
committed
unsigned char *out,
size_t length,
const AES_KEY *key,
unsigned char *ivec, int enc);
void aesni_ctr32_encrypt_blocks(const unsigned char *in,
Andy Polyakov
committed
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
unsigned char *out,
size_t blocks,
const void *key,
const unsigned char *ivec);
void aesni_xts_encrypt(const unsigned char *in,
unsigned char *out,
size_t length,
const AES_KEY *key1, const AES_KEY *key2,
const unsigned char iv[16]);
void aesni_xts_decrypt(const unsigned char *in,
unsigned char *out,
size_t length,
const AES_KEY *key1, const AES_KEY *key2,
const unsigned char iv[16]);
void aesni_ccm64_encrypt_blocks (const unsigned char *in,
unsigned char *out,
size_t blocks,
const void *key,
const unsigned char ivec[16],
unsigned char cmac[16]);
void aesni_ccm64_decrypt_blocks (const unsigned char *in,
unsigned char *out,
size_t blocks,
const void *key,
const unsigned char ivec[16],
unsigned char cmac[16]);
#if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
size_t aesni_gcm_encrypt(const unsigned char *in,
unsigned char *out,
size_t len,
const void *key,
unsigned char ivec[16],
u64 *Xi);
#define AES_gcm_encrypt aesni_gcm_encrypt
size_t aesni_gcm_decrypt(const unsigned char *in,
unsigned char *out,
size_t len,
const void *key,
unsigned char ivec[16],
u64 *Xi);
#define AES_gcm_decrypt aesni_gcm_decrypt
void gcm_ghash_avx(u64 Xi[2],const u128 Htable[16],const u8 *in,size_t len);
#define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
gctx->gcm.ghash==gcm_ghash_avx)
#define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
gctx->gcm.ghash==gcm_ghash_avx)
#undef AES_GCM_ASM2 /* minor size optimization */
Andy Polyakov
committed
static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
{
int ret, mode;
EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
mode = ctx->cipher->flags & EVP_CIPH_MODE;
if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
&& !enc)
{
Andy Polyakov
committed
ret = aesni_set_decrypt_key(key, ctx->key_len*8, ctx->cipher_data);
dat->block = (block128_f)aesni_decrypt;
dat->stream.cbc = mode==EVP_CIPH_CBC_MODE ?
(cbc128_f)aesni_cbc_encrypt :
NULL;
}
else {
Andy Polyakov
committed
ret = aesni_set_encrypt_key(key, ctx->key_len*8, ctx->cipher_data);
dat->block = (block128_f)aesni_encrypt;
if (mode==EVP_CIPH_CBC_MODE)
dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
else if (mode==EVP_CIPH_CTR_MODE)
dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
else
dat->stream.cbc = NULL;
}
return 0;
}
return 1;
}
Andy Polyakov
committed
static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx,unsigned char *out,
const unsigned char *in, size_t len)
{
Andy Polyakov
committed
aesni_cbc_encrypt(in,out,len,ctx->cipher_data,ctx->iv,ctx->encrypt);
Andy Polyakov
committed
static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx,unsigned char *out,
const unsigned char *in, size_t len)
{
size_t bl = ctx->cipher->block_size;
if (len<bl) return 1;
Andy Polyakov
committed
aesni_ecb_encrypt(in,out,len,ctx->cipher_data,ctx->encrypt);
#define aesni_ofb_cipher aes_ofb_cipher
Andy Polyakov
committed
static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx,unsigned char *out,
const unsigned char *in,size_t len);
#define aesni_cfb_cipher aes_cfb_cipher
Andy Polyakov
committed
static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx,unsigned char *out,
const unsigned char *in,size_t len);
#define aesni_cfb8_cipher aes_cfb8_cipher
Andy Polyakov
committed
static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx,unsigned char *out,
const unsigned char *in,size_t len);
#define aesni_cfb1_cipher aes_cfb1_cipher
Andy Polyakov
committed
static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx,unsigned char *out,
const unsigned char *in,size_t len);
#define aesni_ctr_cipher aes_ctr_cipher
Andy Polyakov
committed
static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
Andy Polyakov
committed
static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
{
EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
if (!iv && !key)
return 1;
if (key)
{
aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
Andy Polyakov
committed
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
(block128_f)aesni_encrypt);
gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
Andy Polyakov
committed
/* If we have an iv can set it directly, otherwise use
* saved IV.
*/
if (iv == NULL && gctx->iv_set)
iv = gctx->iv;
if (iv)
{
CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
gctx->iv_set = 1;
}
gctx->key_set = 1;
}
else
{
/* If key set use IV, otherwise copy */
if (gctx->key_set)
CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
else
memcpy(gctx->iv, iv, gctx->ivlen);
gctx->iv_set = 1;
gctx->iv_gen = 0;
}
return 1;
}
#define aesni_gcm_cipher aes_gcm_cipher
Andy Polyakov
committed
static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
Andy Polyakov
committed
static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
{
EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
if (!iv && !key)
return 1;
if (key)
{
/* key_len is two AES keys */
if (enc)
{
aesni_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks);
Andy Polyakov
committed
xctx->xts.block1 = (block128_f)aesni_encrypt;
xctx->stream = aesni_xts_encrypt;
Andy Polyakov
committed
}
else
{
aesni_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks);
Andy Polyakov
committed
xctx->xts.block1 = (block128_f)aesni_decrypt;
xctx->stream = aesni_xts_decrypt;
Andy Polyakov
committed
}
aesni_set_encrypt_key(key + ctx->key_len/2,
Andy Polyakov
committed
xctx->xts.block2 = (block128_f)aesni_encrypt;
xctx->xts.key1 = &xctx->ks1;
}
if (iv)
{
xctx->xts.key2 = &xctx->ks2;
memcpy(ctx->iv, iv, 16);
}
return 1;
}
#define aesni_xts_cipher aes_xts_cipher
Andy Polyakov
committed
static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
Andy Polyakov
committed
static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
{
EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
if (!iv && !key)
return 1;
if (key)
{
aesni_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks.ks);
Andy Polyakov
committed
CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
&cctx->ks, (block128_f)aesni_encrypt);
cctx->str = enc?(ccm128_f)aesni_ccm64_encrypt_blocks :
(ccm128_f)aesni_ccm64_decrypt_blocks;
Andy Polyakov
committed
cctx->key_set = 1;
}
if (iv)
{
memcpy(ctx->iv, iv, 15 - cctx->L);
cctx->iv_set = 1;
}
return 1;
}
#define aesni_ccm_cipher aes_ccm_cipher
Andy Polyakov
committed
static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
Andy Polyakov
committed
#define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
static const EVP_CIPHER aesni_##keylen##_##mode = { \
nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
Andy Polyakov
committed
flags|EVP_CIPH_##MODE##_MODE, \
aesni_init_key, \
aesni_##mode##_cipher, \
NULL, \
sizeof(EVP_AES_KEY), \
Andy Polyakov
committed
static const EVP_CIPHER aes_##keylen##_##mode = { \
nid##_##keylen##_##nmode,blocksize, \
keylen/8,ivlen, \
flags|EVP_CIPH_##MODE##_MODE, \
aes_init_key, \
aes_##mode##_cipher, \
NULL, \
sizeof(EVP_AES_KEY), \
NULL,NULL,NULL,NULL }; \
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
Andy Polyakov
committed
#define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
static const EVP_CIPHER aesni_##keylen##_##mode = { \
nid##_##keylen##_##mode,blocksize, \
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
flags|EVP_CIPH_##MODE##_MODE, \
aesni_##mode##_init_key, \
aesni_##mode##_cipher, \
aes_##mode##_cleanup, \
sizeof(EVP_AES_##MODE##_CTX), \
NULL,NULL,aes_##mode##_ctrl,NULL }; \
static const EVP_CIPHER aes_##keylen##_##mode = { \
nid##_##keylen##_##mode,blocksize, \
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
flags|EVP_CIPH_##MODE##_MODE, \
aes_##mode##_init_key, \
aes_##mode##_cipher, \
aes_##mode##_cleanup, \
sizeof(EVP_AES_##MODE##_CTX), \
NULL,NULL,aes_##mode##_ctrl,NULL }; \
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
#elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
#include "sparc_arch.h"
extern unsigned int OPENSSL_sparcv9cap_P[];
#define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
void aes_t4_set_encrypt_key (const unsigned char *key, int bits,
AES_KEY *ks);
void aes_t4_set_decrypt_key (const unsigned char *key, int bits,
AES_KEY *ks);
void aes_t4_encrypt (const unsigned char *in, unsigned char *out,
const AES_KEY *key);
void aes_t4_decrypt (const unsigned char *in, unsigned char *out,
const AES_KEY *key);
/*
* Key-length specific subroutines were chosen for following reason.
* Each SPARC T4 core can execute up to 8 threads which share core's
* resources. Loading as much key material to registers allows to
* minimize references to shared memory interface, as well as amount
* of instructions in inner loops [much needed on T4]. But then having
* non-key-length specific routines would require conditional branches
* either in inner loops or on subroutines' entries. Former is hardly
* acceptable, while latter means code size increase to size occupied
* by multiple key-length specfic subroutines, so why fight?
*/
void aes128_t4_cbc_encrypt (const unsigned char *in, unsigned char *out,
size_t len, const AES_KEY *key,
unsigned char *ivec);
void aes128_t4_cbc_decrypt (const unsigned char *in, unsigned char *out,
size_t len, const AES_KEY *key,
unsigned char *ivec);
void aes192_t4_cbc_encrypt (const unsigned char *in, unsigned char *out,
size_t len, const AES_KEY *key,
unsigned char *ivec);
void aes192_t4_cbc_decrypt (const unsigned char *in, unsigned char *out,
size_t len, const AES_KEY *key,
unsigned char *ivec);
void aes256_t4_cbc_encrypt (const unsigned char *in, unsigned char *out,
size_t len, const AES_KEY *key,
unsigned char *ivec);
void aes256_t4_cbc_decrypt (const unsigned char *in, unsigned char *out,
size_t len, const AES_KEY *key,
unsigned char *ivec);
void aes128_t4_ctr32_encrypt (const unsigned char *in, unsigned char *out,
size_t blocks, const AES_KEY *key,
unsigned char *ivec);
void aes192_t4_ctr32_encrypt (const unsigned char *in, unsigned char *out,
size_t blocks, const AES_KEY *key,
unsigned char *ivec);
void aes256_t4_ctr32_encrypt (const unsigned char *in, unsigned char *out,
size_t blocks, const AES_KEY *key,
unsigned char *ivec);
void aes128_t4_xts_encrypt (const unsigned char *in, unsigned char *out,
size_t blocks, const AES_KEY *key1,
const AES_KEY *key2, const unsigned char *ivec);
void aes128_t4_xts_decrypt (const unsigned char *in, unsigned char *out,
size_t blocks, const AES_KEY *key1,
const AES_KEY *key2, const unsigned char *ivec);
void aes256_t4_xts_encrypt (const unsigned char *in, unsigned char *out,
size_t blocks, const AES_KEY *key1,
const AES_KEY *key2, const unsigned char *ivec);
void aes256_t4_xts_decrypt (const unsigned char *in, unsigned char *out,
size_t blocks, const AES_KEY *key1,
const AES_KEY *key2, const unsigned char *ivec);
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
{
int ret, mode, bits;
EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
mode = ctx->cipher->flags & EVP_CIPH_MODE;
bits = ctx->key_len*8;
if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
&& !enc)
{
ret = 0;
aes_t4_set_decrypt_key(key, bits, ctx->cipher_data);
dat->block = (block128_f)aes_t4_decrypt;
switch (bits) {
case 128:
dat->stream.cbc = mode==EVP_CIPH_CBC_MODE ?
(cbc128_f)aes128_t4_cbc_decrypt :
NULL;
break;
case 192:
dat->stream.cbc = mode==EVP_CIPH_CBC_MODE ?
(cbc128_f)aes192_t4_cbc_decrypt :
NULL;
break;
case 256:
dat->stream.cbc = mode==EVP_CIPH_CBC_MODE ?
(cbc128_f)aes256_t4_cbc_decrypt :
NULL;
break;
default:
ret = -1;
}
}
else {
ret = 0;
aes_t4_set_encrypt_key(key, bits, ctx->cipher_data);
dat->block = (block128_f)aes_t4_encrypt;
switch (bits) {
case 128:
if (mode==EVP_CIPH_CBC_MODE)
dat->stream.cbc = (cbc128_f)aes128_t4_cbc_encrypt;
else if (mode==EVP_CIPH_CTR_MODE)
dat->stream.ctr = (ctr128_f)aes128_t4_ctr32_encrypt;
else
dat->stream.cbc = NULL;
break;
case 192:
if (mode==EVP_CIPH_CBC_MODE)
dat->stream.cbc = (cbc128_f)aes192_t4_cbc_encrypt;
else if (mode==EVP_CIPH_CTR_MODE)
dat->stream.ctr = (ctr128_f)aes192_t4_ctr32_encrypt;
else
dat->stream.cbc = NULL;
break;
case 256:
if (mode==EVP_CIPH_CBC_MODE)
dat->stream.cbc = (cbc128_f)aes256_t4_cbc_encrypt;
else if (mode==EVP_CIPH_CTR_MODE)
dat->stream.ctr = (ctr128_f)aes256_t4_ctr32_encrypt;
else
dat->stream.cbc = NULL;
break;
default:
ret = -1;
}
}
if(ret < 0)
{
EVPerr(EVP_F_AES_T4_INIT_KEY,EVP_R_AES_KEY_SETUP_FAILED);
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
return 0;
}
return 1;
}
#define aes_t4_cbc_cipher aes_cbc_cipher
static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx,unsigned char *out,
const unsigned char *in, size_t len);
#define aes_t4_ecb_cipher aes_ecb_cipher
static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx,unsigned char *out,
const unsigned char *in, size_t len);
#define aes_t4_ofb_cipher aes_ofb_cipher
static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx,unsigned char *out,
const unsigned char *in,size_t len);
#define aes_t4_cfb_cipher aes_cfb_cipher
static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx,unsigned char *out,
const unsigned char *in,size_t len);
#define aes_t4_cfb8_cipher aes_cfb8_cipher
static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx,unsigned char *out,
const unsigned char *in,size_t len);
#define aes_t4_cfb1_cipher aes_cfb1_cipher
static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx,unsigned char *out,
const unsigned char *in,size_t len);
#define aes_t4_ctr_cipher aes_ctr_cipher
static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
{
EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
if (!iv && !key)
return 1;
if (key)
{
int bits = ctx->key_len * 8;
aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
(block128_f)aes_t4_encrypt);
switch (bits) {
case 128:
gctx->ctr = (ctr128_f)aes128_t4_ctr32_encrypt;
break;
case 192:
gctx->ctr = (ctr128_f)aes192_t4_ctr32_encrypt;
break;
case 256:
gctx->ctr = (ctr128_f)aes256_t4_ctr32_encrypt;
break;
default:
return 0;
}
/* If we have an iv can set it directly, otherwise use
* saved IV.
*/
if (iv == NULL && gctx->iv_set)
iv = gctx->iv;
if (iv)
{
CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
gctx->iv_set = 1;
}
gctx->key_set = 1;
}
else
{
/* If key set use IV, otherwise copy */
if (gctx->key_set)
CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
else
memcpy(gctx->iv, iv, gctx->ivlen);
gctx->iv_set = 1;
gctx->iv_gen = 0;
}
return 1;
}
#define aes_t4_gcm_cipher aes_gcm_cipher
static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
{
EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
if (!iv && !key)
return 1;
if (key)
{
int bits = ctx->key_len * 4;
xctx->stream = NULL;
/* key_len is two AES keys */
if (enc)
{
aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
xctx->xts.block1 = (block128_f)aes_t4_encrypt;
switch (bits) {
case 128:
xctx->stream = aes128_t4_xts_encrypt;
break;
#if 0 /* not yet */
case 192:
xctx->stream = aes192_t4_xts_encrypt;
break;
#endif
case 256:
xctx->stream = aes256_t4_xts_encrypt;
break;
default:
return 0;
}
}
else
{
aes_t4_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks);
xctx->xts.block1 = (block128_f)aes_t4_decrypt;
switch (bits) {
case 128:
xctx->stream = aes128_t4_xts_decrypt;
break;
#if 0 /* not yet */
case 192:
xctx->stream = aes192_t4_xts_decrypt;
break;
#endif
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
case 256:
xctx->stream = aes256_t4_xts_decrypt;
break;
default:
return 0;
}
}
aes_t4_set_encrypt_key(key + ctx->key_len/2,
ctx->key_len * 4, &xctx->ks2.ks);
xctx->xts.block2 = (block128_f)aes_t4_encrypt;
xctx->xts.key1 = &xctx->ks1;
}
if (iv)
{
xctx->xts.key2 = &xctx->ks2;
memcpy(ctx->iv, iv, 16);
}
return 1;
}
#define aes_t4_xts_cipher aes_xts_cipher
static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
{
EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
if (!iv && !key)
return 1;
if (key)
{
int bits = ctx->key_len * 8;
aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
&cctx->ks, (block128_f)aes_t4_encrypt);
#if 0 /* not yet */
switch (bits) {
case 128:
cctx->str = enc?(ccm128_f)aes128_t4_ccm64_encrypt :
(ccm128_f)ae128_t4_ccm64_decrypt;
break;
case 192:
cctx->str = enc?(ccm128_f)aes192_t4_ccm64_encrypt :
(ccm128_f)ae192_t4_ccm64_decrypt;
break;
case 256:
cctx->str = enc?(ccm128_f)aes256_t4_ccm64_encrypt :
(ccm128_f)ae256_t4_ccm64_decrypt;
break;
default:
return 0;
}
#endif
cctx->key_set = 1;
}
if (iv)
{
memcpy(ctx->iv, iv, 15 - cctx->L);
cctx->iv_set = 1;
}
return 1;
}
#define aes_t4_ccm_cipher aes_ccm_cipher
static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
#define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
flags|EVP_CIPH_##MODE##_MODE, \
aes_t4_init_key, \
aes_t4_##mode##_cipher, \
NULL, \
sizeof(EVP_AES_KEY), \
NULL,NULL,NULL,NULL }; \
static const EVP_CIPHER aes_##keylen##_##mode = { \
nid##_##keylen##_##nmode,blocksize, \
keylen/8,ivlen, \
flags|EVP_CIPH_##MODE##_MODE, \
aes_init_key, \
aes_##mode##_cipher, \
NULL, \
sizeof(EVP_AES_KEY), \
NULL,NULL,NULL,NULL }; \
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
#define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
nid##_##keylen##_##mode,blocksize, \
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
flags|EVP_CIPH_##MODE##_MODE, \
aes_t4_##mode##_init_key, \
aes_t4_##mode##_cipher, \
aes_##mode##_cleanup, \
sizeof(EVP_AES_##MODE##_CTX), \
NULL,NULL,aes_##mode##_ctrl,NULL }; \
static const EVP_CIPHER aes_##keylen##_##mode = { \
nid##_##keylen##_##mode,blocksize, \
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
flags|EVP_CIPH_##MODE##_MODE, \
aes_##mode##_init_key, \
aes_##mode##_cipher, \
aes_##mode##_cleanup, \
sizeof(EVP_AES_##MODE##_CTX), \
NULL,NULL,aes_##mode##_ctrl,NULL }; \
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
Andy Polyakov
committed
#else
#define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
static const EVP_CIPHER aes_##keylen##_##mode = { \
nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
flags|EVP_CIPH_##MODE##_MODE, \
aes_init_key, \
aes_##mode##_cipher, \
NULL, \
sizeof(EVP_AES_KEY), \
NULL,NULL,NULL,NULL }; \
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
{ return &aes_##keylen##_##mode; }
Andy Polyakov
committed
#define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
static const EVP_CIPHER aes_##keylen##_##mode = { \
nid##_##keylen##_##mode,blocksize, \
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
flags|EVP_CIPH_##MODE##_MODE, \
aes_##mode##_init_key, \
aes_##mode##_cipher, \
aes_##mode##_cleanup, \
sizeof(EVP_AES_##MODE##_CTX), \
NULL,NULL,aes_##mode##_ctrl,NULL }; \
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
{ return &aes_##keylen##_##mode; }
#endif
#if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
#include "arm_arch.h"
#if __ARM_ARCH__>=7
# if defined(BSAES_ASM)
# define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
# endif
# define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
# define HWAES_set_encrypt_key aes_v8_set_encrypt_key
# define HWAES_set_decrypt_key aes_v8_set_decrypt_key
# define HWAES_encrypt aes_v8_encrypt
# define HWAES_decrypt aes_v8_decrypt
# define HWAES_cbc_encrypt aes_v8_cbc_encrypt
# define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
#endif
Andy Polyakov
committed
#endif
#if defined(HWAES_CAPABLE)
int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
AES_KEY *key);
int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
AES_KEY *key);
void HWAES_encrypt(const unsigned char *in, unsigned char *out,
const AES_KEY *key);
void HWAES_decrypt(const unsigned char *in, unsigned char *out,
const AES_KEY *key);
void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
size_t length, const AES_KEY *key,
unsigned char *ivec, const int enc);
void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
size_t len, const AES_KEY *key, const unsigned char ivec[16]);
#endif
Andy Polyakov
committed
#define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
{
EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
mode = ctx->cipher->flags & EVP_CIPH_MODE;
if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
#ifdef HWAES_CAPABLE
if (HWAES_CAPABLE)
{
ret = HWAES_set_decrypt_key(key,ctx->key_len*8,&dat->ks.ks);
dat->block = (block128_f)HWAES_decrypt;
dat->stream.cbc = NULL;
#ifdef HWAES_cbc_encrypt
if (mode==EVP_CIPH_CBC_MODE)
dat->stream.cbc = (cbc128_f)HWAES_cbc_encrypt;
#endif
}
else
#endif
#ifdef BSAES_CAPABLE
if (BSAES_CAPABLE && mode==EVP_CIPH_CBC_MODE)
{
ret = AES_set_decrypt_key(key,ctx->key_len*8,&dat->ks.ks);
dat->block = (block128_f)AES_decrypt;
dat->stream.cbc = (cbc128_f)bsaes_cbc_encrypt;
}
else
#endif
#ifdef VPAES_CAPABLE
if (VPAES_CAPABLE)
{
ret = vpaes_set_decrypt_key(key,ctx->key_len*8,&dat->ks.ks);
dat->block = (block128_f)vpaes_decrypt;
dat->stream.cbc = mode==EVP_CIPH_CBC_MODE ?
(cbc128_f)vpaes_cbc_encrypt :
NULL;
}
else
#endif
{
ret = AES_set_decrypt_key(key,ctx->key_len*8,&dat->ks.ks);
dat->block = (block128_f)AES_decrypt;
dat->stream.cbc = mode==EVP_CIPH_CBC_MODE ?
(cbc128_f)AES_cbc_encrypt :
NULL;