Changeset View
Changeset View
Standalone View
Standalone View
cipher/rijndael.c
Show First 20 Lines • Show All 193 Lines • ▼ Show 20 Lines | |||||
extern size_t _gcry_aes_armv8_ce_ocb_auth (gcry_cipher_hd_t c, | extern size_t _gcry_aes_armv8_ce_ocb_auth (gcry_cipher_hd_t c, | ||||
const void *abuf_arg, size_t nblocks); | const void *abuf_arg, size_t nblocks); | ||||
extern void _gcry_aes_armv8_ce_xts_crypt (void *context, unsigned char *tweak, | extern void _gcry_aes_armv8_ce_xts_crypt (void *context, unsigned char *tweak, | ||||
void *outbuf_arg, | void *outbuf_arg, | ||||
const void *inbuf_arg, | const void *inbuf_arg, | ||||
size_t nblocks, int encrypt); | size_t nblocks, int encrypt); | ||||
#endif /*USE_ARM_ASM*/ | #endif /*USE_ARM_ASM*/ | ||||
/* forward declaration */ | |||||
static int _gcry_aes_generic_cbc_enc (const void *context, unsigned char *iv, | |||||
void *outbuf_arg, const void *inbuf_arg, | |||||
size_t nblocks, | |||||
int cbc_mac); | |||||
#ifdef USE_PPC_ASM | |||||
/* POWER 8 AES extensions */ | |||||
extern void aes_p8_encrypt (const unsigned char *in, | |||||
unsigned char *out, | |||||
const RIJNDAEL_context *ctx); | |||||
static unsigned int _gcry_aes_ppc8_encrypt (const RIJNDAEL_context *ctx, | |||||
unsigned char *out, | |||||
const unsigned char *in) | |||||
{ | |||||
/* When I tried to switch these registers in the assembly it broke. */ | |||||
aes_p8_encrypt (in, out, ctx); | |||||
return 0; /* does not use stack */ | |||||
} | |||||
/* this is the decryption key part of context */ | |||||
extern void aes_p8_decrypt (const unsigned char *in, | |||||
unsigned char *out, | |||||
const void *sboxes); | |||||
static unsigned int _gcry_aes_ppc8_decrypt (const RIJNDAEL_context *ctx, | |||||
unsigned char *out, | |||||
const unsigned char *in) | |||||
{ | |||||
aes_p8_decrypt (in, out, &ctx->u2); | |||||
return 0; /* does not use stack */ | |||||
} | |||||
extern int aes_p8_set_encrypt_key (const unsigned char *userKey, const int bits, | |||||
RIJNDAEL_context *key); | |||||
extern int aes_p8_set_decrypt_key (const unsigned char *userKey, const int bits, | |||||
/* this is the decryption key part of context */ | |||||
const unsigned (*)[15][4]); | |||||
/* No performance benifit observed */ | |||||
#if 0 | |||||
/* or decrypt */ | |||||
extern void aes_p8_cbc_encrypt (const unsigned char *in, | |||||
unsigned char *out, | |||||
size_t length, | |||||
const RIJNDAEL_context *key, unsigned char *ivec, int is_enc); | |||||
static void _gcry_aes_ppc8_cbc_enc (void *context, unsigned char *iv, | |||||
void *outbuf_arg, const void *inbuf_arg, | |||||
size_t nblocks, | |||||
int is_cbc_mac) | |||||
{ | |||||
const RIJNDAEL_context *ctx = context; | |||||
#ifdef __builtin_expect | |||||
__builtin_expect (is_cbc_mac, 0); | |||||
#endif | |||||
if (is_cbc_mac) { | |||||
_gcry_aes_generic_cbc_enc (ctx, iv, outbuf_arg, inbuf_arg, nblocks, is_cbc_mac); | |||||
return; | |||||
} | |||||
aes_p8_cbc_encrypt (inbuf_arg, outbuf_arg, nblocks, ctx, iv, 1); | |||||
} | |||||
extern void _gcry_aes_ppc8_cbc_dec (void *context, unsigned char *iv, | |||||
void *outbuf_arg, const void *inbuf_arg, | |||||
size_t nblocks) | |||||
{ | |||||
aes_p8_cbc_encrypt (inbuf_arg, outbuf_arg, nblocks, context, iv, 0); | |||||
} | |||||
extern void aes_p8_ctr32_encrypt_blocks (const unsigned char *in, unsigned char *out, | |||||
size_t len, const void *key, | |||||
const unsigned char ivec[16]); | |||||
void _gcry_aes_ppc8_ctr_enc (void *context, unsigned char *ctr, | |||||
void *outbuf, const void *inbuf, | |||||
size_t nblocks) | |||||
{ | |||||
aes_p8_ctr32_encrypt_blocks (inbuf, outbuf, nblocks, context, ctr); | |||||
} | |||||
extern void aes_p8_xts_encrypt (const unsigned char *in, | |||||
unsigned char *out, | |||||
size_t length, | |||||
const void *key1, const void *key2, | |||||
const unsigned char iv[16]); | |||||
extern void aes_p8_xts_decrypt (const unsigned char *in, | |||||
unsigned char *out, | |||||
size_t length, | |||||
const void *key1, const void *key2, | |||||
const unsigned char iv[16]); | |||||
static void _gcry_aes_ppc8_xts_crypt_wrap (void *context, unsigned char *tweak, | |||||
void *outbuf, const void *inbuf, | |||||
size_t nblocks, int encrypt) | |||||
{ | |||||
const RIJNDAEL_context *ctx = context; | |||||
if (encrypt) | |||||
aes_p8_xts_encrypt (inbuf, outbuf, nblocks, &ctx->u1, &ctx->u2, tweak); | |||||
else | |||||
aes_p8_xts_decrypt (inbuf, outbuf, nblocks, &ctx->u1, &ctx->u2, tweak); | |||||
} | |||||
#endif | |||||
#endif /*USE_PPC_ASM*/ | |||||
static unsigned int do_encrypt (const RIJNDAEL_context *ctx, unsigned char *bx, | static unsigned int do_encrypt (const RIJNDAEL_context *ctx, unsigned char *bx, | ||||
const unsigned char *ax); | const unsigned char *ax); | ||||
static unsigned int do_decrypt (const RIJNDAEL_context *ctx, unsigned char *bx, | static unsigned int do_decrypt (const RIJNDAEL_context *ctx, unsigned char *bx, | ||||
const unsigned char *ax); | const unsigned char *ax); | ||||
␌ | ␌ | ||||
/* All the numbers. */ | /* All the numbers. */ | ||||
▲ Show 20 Lines • Show All 45 Lines • ▼ Show 20 Lines | do_setkey (RIJNDAEL_context *ctx, const byte *key, const unsigned keylen, | ||||
gcry_cipher_hd_t hd) | gcry_cipher_hd_t hd) | ||||
{ | { | ||||
static int initialized = 0; | static int initialized = 0; | ||||
static const char *selftest_failed = 0; | static const char *selftest_failed = 0; | ||||
int rounds; | int rounds; | ||||
int i,j, r, t, rconpointer = 0; | int i,j, r, t, rconpointer = 0; | ||||
int KC; | int KC; | ||||
#if defined(USE_AESNI) || defined(USE_PADLOCK) || defined(USE_SSSE3) \ | #if defined(USE_AESNI) || defined(USE_PADLOCK) || defined(USE_SSSE3) \ | ||||
|| defined(USE_ARM_CE) | || defined(USE_ARM_CE) || defined(USE_PPC_ASM) | ||||
unsigned int hwfeatures; | unsigned int hwfeatures; | ||||
#endif | #endif | ||||
(void)hd; | (void)hd; | ||||
/* The on-the-fly self tests are only run in non-fips mode. In fips | /* The on-the-fly self tests are only run in non-fips mode. In fips | ||||
mode explicit self-tests are required. Actually the on-the-fly | mode explicit self-tests are required. Actually the on-the-fly | ||||
self-tests are not fully thread-safe and it might happen that a | self-tests are not fully thread-safe and it might happen that a | ||||
Show All 27 Lines | else if ( keylen == 256/8 ) | ||||
KC = 8; | KC = 8; | ||||
} | } | ||||
else | else | ||||
return GPG_ERR_INV_KEYLEN; | return GPG_ERR_INV_KEYLEN; | ||||
ctx->rounds = rounds; | ctx->rounds = rounds; | ||||
#if defined(USE_AESNI) || defined(USE_PADLOCK) || defined(USE_SSSE3) \ | #if defined(USE_AESNI) || defined(USE_PADLOCK) || defined(USE_SSSE3) \ | ||||
|| defined(USE_ARM_CE) | || defined(USE_ARM_CE) || defined(USE_PPC_ASM) | ||||
hwfeatures = _gcry_get_hw_features (); | hwfeatures = _gcry_get_hw_features (); | ||||
#endif | #endif | ||||
ctx->decryption_prepared = 0; | ctx->decryption_prepared = 0; | ||||
#ifdef USE_PADLOCK | #ifdef USE_PADLOCK | ||||
ctx->use_padlock = 0; | ctx->use_padlock = 0; | ||||
#endif | #endif | ||||
#ifdef USE_AESNI | #ifdef USE_AESNI | ||||
ctx->use_aesni = 0; | ctx->use_aesni = 0; | ||||
#endif | #endif | ||||
#ifdef USE_SSSE3 | #ifdef USE_SSSE3 | ||||
ctx->use_ssse3 = 0; | ctx->use_ssse3 = 0; | ||||
#endif | #endif | ||||
#ifdef USE_ARM_CE | #ifdef USE_ARM_CE | ||||
ctx->use_arm_ce = 0; | ctx->use_arm_ce = 0; | ||||
#endif | #endif | ||||
#ifdef USE_PPC_ASM | |||||
ctx->use_ppc_asm = 0; | |||||
#endif | |||||
if (0) | if (0) | ||||
{ | { | ||||
; | ; | ||||
} | } | ||||
#ifdef USE_AESNI | #ifdef USE_AESNI | ||||
else if (hwfeatures & HWF_INTEL_AESNI) | else if (hwfeatures & HWF_INTEL_AESNI) | ||||
{ | { | ||||
▲ Show 20 Lines • Show All 64 Lines • ▼ Show 20 Lines | else if (hwfeatures & HWF_ARM_AES) | ||||
hd->bulk.cbc_dec = _gcry_aes_armv8_ce_cbc_dec; | hd->bulk.cbc_dec = _gcry_aes_armv8_ce_cbc_dec; | ||||
hd->bulk.ctr_enc = _gcry_aes_armv8_ce_ctr_enc; | hd->bulk.ctr_enc = _gcry_aes_armv8_ce_ctr_enc; | ||||
hd->bulk.ocb_crypt = _gcry_aes_armv8_ce_ocb_crypt; | hd->bulk.ocb_crypt = _gcry_aes_armv8_ce_ocb_crypt; | ||||
hd->bulk.ocb_auth = _gcry_aes_armv8_ce_ocb_auth; | hd->bulk.ocb_auth = _gcry_aes_armv8_ce_ocb_auth; | ||||
hd->bulk.xts_crypt = _gcry_aes_armv8_ce_xts_crypt; | hd->bulk.xts_crypt = _gcry_aes_armv8_ce_xts_crypt; | ||||
} | } | ||||
} | } | ||||
#endif | #endif | ||||
#ifdef USE_PPC_ASM | |||||
else if (hwfeatures & HWF_PPC_VCRYPTO) | |||||
{ | |||||
ctx->encrypt_fn = _gcry_aes_ppc8_encrypt; | |||||
ctx->decrypt_fn = _gcry_aes_ppc8_decrypt; | |||||
ctx->prefetch_enc_fn = NULL; | |||||
ctx->prefetch_dec_fn = NULL; | |||||
ctx->use_ppc_asm = 1; | |||||
/* no performance benifit was observed */ | |||||
#if 0 | |||||
if (hd) | |||||
{ | |||||
hd->bulk.cbc_enc = _gcry_aes_ppc8_cbc_enc; | |||||
hd->bulk.cbc_dec = _gcry_aes_ppc8_cbc_dec; | |||||
hd->bulk.ctr_enc = _gcry_aes_ppc8_ctr_enc; | |||||
hd->bulk.xts_crypt = _gcry_aes_ppc8_xts_crypt_wrap; | |||||
} | |||||
#endif | |||||
} | |||||
#endif | |||||
else | else | ||||
{ | { | ||||
ctx->encrypt_fn = do_encrypt; | ctx->encrypt_fn = do_encrypt; | ||||
ctx->decrypt_fn = do_decrypt; | ctx->decrypt_fn = do_decrypt; | ||||
ctx->prefetch_enc_fn = prefetch_enc; | ctx->prefetch_enc_fn = prefetch_enc; | ||||
ctx->prefetch_dec_fn = prefetch_dec; | ctx->prefetch_dec_fn = prefetch_dec; | ||||
} | } | ||||
Show All 10 Lines | |||||
#ifdef USE_SSSE3 | #ifdef USE_SSSE3 | ||||
else if (ctx->use_ssse3) | else if (ctx->use_ssse3) | ||||
_gcry_aes_ssse3_do_setkey (ctx, key); | _gcry_aes_ssse3_do_setkey (ctx, key); | ||||
#endif | #endif | ||||
#ifdef USE_ARM_CE | #ifdef USE_ARM_CE | ||||
else if (ctx->use_arm_ce) | else if (ctx->use_arm_ce) | ||||
_gcry_aes_armv8_ce_setkey (ctx, key); | _gcry_aes_armv8_ce_setkey (ctx, key); | ||||
#endif | #endif | ||||
#ifdef USE_PPC_ASM | |||||
else if (ctx->use_ppc_asm) { | |||||
/* These are both done here to avoid having to store the key. | |||||
* These S-boxes are generated on-the-fly. */ | |||||
aes_p8_set_encrypt_key (key, keylen * 8, ctx); | |||||
aes_p8_set_decrypt_key (key, keylen * 8, &ctx->keyschdec32); | |||||
} | |||||
#endif | |||||
else | else | ||||
{ | { | ||||
const byte *sbox = ((const byte *)encT) + 1; | const byte *sbox = ((const byte *)encT) + 1; | ||||
union | union | ||||
{ | { | ||||
PROPERLY_ALIGNED_TYPE dummy; | PROPERLY_ALIGNED_TYPE dummy; | ||||
byte data[MAXKC][4]; | byte data[MAXKC][4]; | ||||
u32 data32[MAXKC]; | u32 data32[MAXKC]; | ||||
▲ Show 20 Lines • Show All 128 Lines • ▼ Show 20 Lines | else if (ctx->use_arm_ce) | ||||
} | } | ||||
#endif /*USE_SSSE3*/ | #endif /*USE_SSSE3*/ | ||||
#ifdef USE_PADLOCK | #ifdef USE_PADLOCK | ||||
else if (ctx->use_padlock) | else if (ctx->use_padlock) | ||||
{ | { | ||||
/* Padlock does not need decryption subkeys. */ | /* Padlock does not need decryption subkeys. */ | ||||
} | } | ||||
#endif /*USE_PADLOCK*/ | #endif /*USE_PADLOCK*/ | ||||
#ifdef USE_PPC_ASM | |||||
else if (ctx->use_ppc_asm) | |||||
{ | |||||
/* done during encryption key setup, as then we have the actual | |||||
* key available */ | |||||
} | |||||
#endif /*USE_PPC_ASM*/ | |||||
else | else | ||||
{ | { | ||||
const byte *sbox = ((const byte *)encT) + 1; | const byte *sbox = ((const byte *)encT) + 1; | ||||
prefetch_enc(); | prefetch_enc(); | ||||
prefetch_dec(); | prefetch_dec(); | ||||
ctx->keyschdec32[0][0] = ctx->keyschenc32[0][0]; | ctx->keyschdec32[0][0] = ctx->keyschenc32[0][0]; | ||||
Show All 35 Lines | else | ||||
ctx->keyschdec32[r][0] = ctx->keyschenc32[r][0]; | ctx->keyschdec32[r][0] = ctx->keyschenc32[r][0]; | ||||
ctx->keyschdec32[r][1] = ctx->keyschenc32[r][1]; | ctx->keyschdec32[r][1] = ctx->keyschenc32[r][1]; | ||||
ctx->keyschdec32[r][2] = ctx->keyschenc32[r][2]; | ctx->keyschdec32[r][2] = ctx->keyschenc32[r][2]; | ||||
ctx->keyschdec32[r][3] = ctx->keyschenc32[r][3]; | ctx->keyschdec32[r][3] = ctx->keyschenc32[r][3]; | ||||
} | } | ||||
} | } | ||||
␌ | ␌ | ||||
#if !defined(USE_ARM_ASM) && !defined(USE_AMD64_ASM) | #if !defined(USE_ARM_ASM) && !defined(USE_AMD64_ASM) && !defined(USE_PPC_ASM) | ||||
/* Encrypt one block. A and B may be the same. */ | /* Encrypt one block. A and B may be the same. */ | ||||
static unsigned int | static unsigned int | ||||
do_encrypt_fn (const RIJNDAEL_context *ctx, unsigned char *b, | do_encrypt_fn (const RIJNDAEL_context *ctx, unsigned char *b, | ||||
const unsigned char *a) | const unsigned char *a) | ||||
{ | { | ||||
#define rk (ctx->keyschenc32) | #define rk (ctx->keyschenc32) | ||||
const byte *sbox = ((const byte *)encT) + 1; | const byte *sbox = ((const byte *)encT) + 1; | ||||
int rounds = ctx->rounds; | int rounds = ctx->rounds; | ||||
▲ Show 20 Lines • Show All 117 Lines • ▼ Show 20 Lines | #define rk (ctx->keyschenc32) | ||||
buf_put_le32(b + 0, sa[0]); | buf_put_le32(b + 0, sa[0]); | ||||
buf_put_le32(b + 4, sa[1]); | buf_put_le32(b + 4, sa[1]); | ||||
buf_put_le32(b + 8, sa[2]); | buf_put_le32(b + 8, sa[2]); | ||||
buf_put_le32(b + 12, sa[3]); | buf_put_le32(b + 12, sa[3]); | ||||
#undef rk | #undef rk | ||||
return (56 + 2*sizeof(int)); | return (56 + 2*sizeof(int)); | ||||
} | } | ||||
#endif /*!USE_ARM_ASM && !USE_AMD64_ASM*/ | #endif /*!USE_ARM_ASM && !USE_AMD64_ASM && !USE_PPC_ASM*/ | ||||
static unsigned int | static unsigned int | ||||
do_encrypt (const RIJNDAEL_context *ctx, | do_encrypt (const RIJNDAEL_context *ctx, | ||||
unsigned char *bx, const unsigned char *ax) | unsigned char *bx, const unsigned char *ax) | ||||
{ | { | ||||
#ifdef USE_AMD64_ASM | #ifdef USE_AMD64_ASM | ||||
return _gcry_aes_amd64_encrypt_block(ctx->keyschenc, bx, ax, ctx->rounds, | return _gcry_aes_amd64_encrypt_block(ctx->keyschenc, bx, ax, ctx->rounds, | ||||
encT); | encT); | ||||
#elif defined(USE_ARM_ASM) | #elif defined(USE_ARM_ASM) | ||||
return _gcry_aes_arm_encrypt_block(ctx->keyschenc, bx, ax, ctx->rounds, encT); | return _gcry_aes_arm_encrypt_block(ctx->keyschenc, bx, ax, ctx->rounds, encT); | ||||
#elif defined(USE_PPC_ASM) | |||||
return _gcry_aes_ppc8_encrypt(ctx, bx, ax); | |||||
#else | #else | ||||
return do_encrypt_fn (ctx, bx, ax); | return do_encrypt_fn (ctx, bx, ax); | ||||
#endif /* !USE_ARM_ASM && !USE_AMD64_ASM*/ | #endif /* !USE_ARM_ASM && !USE_AMD64_ASM*/ | ||||
} | } | ||||
static unsigned int | static unsigned int | ||||
rijndael_encrypt (void *context, byte *b, const byte *a) | rijndael_encrypt (void *context, byte *b, const byte *a) | ||||
▲ Show 20 Lines • Show All 61 Lines • ▼ Show 20 Lines | else | ||||
inbuf += BLOCKSIZE; | inbuf += BLOCKSIZE; | ||||
} | } | ||||
} | } | ||||
if (burn_depth) | if (burn_depth) | ||||
_gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | ||||
} | } | ||||
static int _gcry_aes_generic_cbc_enc (const void *context, unsigned char *iv, | |||||
void *outbuf_arg, const void *inbuf_arg, | |||||
size_t nblocks, | |||||
int cbc_mac) | |||||
{ | |||||
const RIJNDAEL_context *ctx = context; | |||||
unsigned char *outbuf = outbuf_arg; | |||||
const unsigned char *inbuf = inbuf_arg; | |||||
rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; | |||||
int burn_depth = 0; | |||||
if (ctx->prefetch_enc_fn) | |||||
ctx->prefetch_enc_fn(); | |||||
unsigned char *last_iv = iv; | |||||
for ( ;nblocks; nblocks-- ) | |||||
{ | |||||
cipher_block_xor(outbuf, inbuf, last_iv, BLOCKSIZE); | |||||
burn_depth = encrypt_fn (ctx, outbuf, outbuf); | |||||
last_iv = outbuf; | |||||
inbuf += BLOCKSIZE; | |||||
if (!cbc_mac) | |||||
outbuf += BLOCKSIZE; | |||||
} | |||||
if (last_iv != iv) | |||||
cipher_block_cpy (iv, last_iv, BLOCKSIZE); | |||||
if (burn_depth) | |||||
_gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | |||||
return 0; | |||||
} | |||||
/* Bulk encryption of complete blocks in CBC mode. Caller needs to | /* Bulk encryption of complete blocks in CBC mode. Caller needs to | ||||
make sure that IV is aligned on an unsigned long boundary. This | make sure that IV is aligned on an unsigned long boundary. This | ||||
function is only intended for the bulk encryption feature of | function is only intended for the bulk encryption feature of | ||||
cipher.c. */ | cipher.c. */ | ||||
void | void | ||||
_gcry_aes_cbc_enc (void *context, unsigned char *iv, | _gcry_aes_cbc_enc (void *context, unsigned char *iv, | ||||
void *outbuf_arg, const void *inbuf_arg, | void *outbuf_arg, const void *inbuf_arg, | ||||
size_t nblocks, int cbc_mac) | size_t nblocks, int cbc_mac) | ||||
{ | { | ||||
RIJNDAEL_context *ctx = context; | RIJNDAEL_context *ctx = context; | ||||
unsigned char *outbuf = outbuf_arg; | unsigned char *outbuf = outbuf_arg; | ||||
const unsigned char *inbuf = inbuf_arg; | const unsigned char *inbuf = inbuf_arg; | ||||
unsigned char *last_iv; | |||||
unsigned int burn_depth = 0; | unsigned int burn_depth = 0; | ||||
if (0) | if (0) | ||||
; | ; | ||||
#ifdef USE_AESNI | #ifdef USE_AESNI | ||||
else if (ctx->use_aesni) | else if (ctx->use_aesni) | ||||
{ | { | ||||
_gcry_aes_aesni_cbc_enc (ctx, iv, outbuf, inbuf, nblocks, cbc_mac); | _gcry_aes_aesni_cbc_enc (ctx, iv, outbuf, inbuf, nblocks, cbc_mac); | ||||
Show All 11 Lines | #ifdef USE_ARM_CE | ||||
else if (ctx->use_arm_ce) | else if (ctx->use_arm_ce) | ||||
{ | { | ||||
_gcry_aes_armv8_ce_cbc_enc (ctx, iv, outbuf, inbuf, nblocks, cbc_mac); | _gcry_aes_armv8_ce_cbc_enc (ctx, iv, outbuf, inbuf, nblocks, cbc_mac); | ||||
return; | return; | ||||
} | } | ||||
#endif /*USE_ARM_CE*/ | #endif /*USE_ARM_CE*/ | ||||
else | else | ||||
{ | { | ||||
rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; | _gcry_aes_generic_cbc_enc (ctx, iv, outbuf, inbuf, nblocks, cbc_mac); | ||||
return; | |||||
if (ctx->prefetch_enc_fn) | |||||
ctx->prefetch_enc_fn(); | |||||
last_iv = iv; | |||||
for ( ;nblocks; nblocks-- ) | |||||
{ | |||||
cipher_block_xor(outbuf, inbuf, last_iv, BLOCKSIZE); | |||||
burn_depth = encrypt_fn (ctx, outbuf, outbuf); | |||||
last_iv = outbuf; | |||||
inbuf += BLOCKSIZE; | |||||
if (!cbc_mac) | |||||
outbuf += BLOCKSIZE; | |||||
} | |||||
if (last_iv != iv) | |||||
cipher_block_cpy (iv, last_iv, BLOCKSIZE); | |||||
} | } | ||||
if (burn_depth) | if (burn_depth) | ||||
_gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | ||||
} | } | ||||
/* Bulk encryption of complete blocks in CTR mode. Caller needs to | /* Bulk encryption of complete blocks in CTR mode. Caller needs to | ||||
▲ Show 20 Lines • Show All 58 Lines • ▼ Show 20 Lines | else | ||||
} | } | ||||
if (burn_depth) | if (burn_depth) | ||||
_gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | ||||
} | } | ||||
␌ | ␌ | ||||
#if !defined(USE_ARM_ASM) && !defined(USE_AMD64_ASM) | #if !defined(USE_ARM_ASM) && !defined(USE_AMD64_ASM) && !defined(USE_PPC_ASM) | ||||
/* Decrypt one block. A and B may be the same. */ | /* Decrypt one block. A and B may be the same. */ | ||||
static unsigned int | static unsigned int | ||||
do_decrypt_fn (const RIJNDAEL_context *ctx, unsigned char *b, | do_decrypt_fn (const RIJNDAEL_context *ctx, unsigned char *b, | ||||
const unsigned char *a) | const unsigned char *a) | ||||
{ | { | ||||
#define rk (ctx->keyschdec32) | #define rk (ctx->keyschdec32) | ||||
int rounds = ctx->rounds; | int rounds = ctx->rounds; | ||||
int r; | int r; | ||||
▲ Show 20 Lines • Show All 115 Lines • ▼ Show 20 Lines | #define rk (ctx->keyschdec32) | ||||
buf_put_le32(b + 0, sa[0]); | buf_put_le32(b + 0, sa[0]); | ||||
buf_put_le32(b + 4, sa[1]); | buf_put_le32(b + 4, sa[1]); | ||||
buf_put_le32(b + 8, sa[2]); | buf_put_le32(b + 8, sa[2]); | ||||
buf_put_le32(b + 12, sa[3]); | buf_put_le32(b + 12, sa[3]); | ||||
#undef rk | #undef rk | ||||
return (56+2*sizeof(int)); | return (56+2*sizeof(int)); | ||||
} | } | ||||
#endif /*!USE_ARM_ASM && !USE_AMD64_ASM*/ | #endif /*!USE_ARM_ASM && !USE_AMD64_ASM && !USE_PPC_ASM*/ | ||||
/* Decrypt one block. AX and BX may be the same. */ | /* Decrypt one block. AX and BX may be the same. */ | ||||
static unsigned int | static unsigned int | ||||
do_decrypt (const RIJNDAEL_context *ctx, unsigned char *bx, | do_decrypt (const RIJNDAEL_context *ctx, unsigned char *bx, | ||||
const unsigned char *ax) | const unsigned char *ax) | ||||
{ | { | ||||
#ifdef USE_AMD64_ASM | #ifdef USE_AMD64_ASM | ||||
return _gcry_aes_amd64_decrypt_block(ctx->keyschdec, bx, ax, ctx->rounds, | return _gcry_aes_amd64_decrypt_block(ctx->keyschdec, bx, ax, ctx->rounds, | ||||
&dec_tables); | &dec_tables); | ||||
#elif defined(USE_ARM_ASM) | #elif defined(USE_ARM_ASM) | ||||
return _gcry_aes_arm_decrypt_block(ctx->keyschdec, bx, ax, ctx->rounds, | return _gcry_aes_arm_decrypt_block(ctx->keyschdec, bx, ax, ctx->rounds, | ||||
&dec_tables); | &dec_tables); | ||||
#elif defined(USE_PPC_ASM) | |||||
return _gcry_aes_ppc8_decrypt(ctx, bx, ax); | |||||
#else | #else | ||||
return do_decrypt_fn (ctx, bx, ax); | return do_decrypt_fn (ctx, bx, ax); | ||||
#endif /*!USE_ARM_ASM && !USE_AMD64_ASM*/ | #endif | ||||
} | } | ||||
static inline void | static inline void | ||||
check_decryption_preparation (RIJNDAEL_context *ctx) | check_decryption_preparation (RIJNDAEL_context *ctx) | ||||
{ | { | ||||
if ( !ctx->decryption_prepared ) | if ( !ctx->decryption_prepared ) | ||||
{ | { | ||||
▲ Show 20 Lines • Show All 421 Lines • ▼ Show 20 Lines | static const unsigned char plaintext_128[16] = | ||||
{ | { | ||||
0x00,0x11,0x22,0x33,0x44,0x55,0x66,0x77, | 0x00,0x11,0x22,0x33,0x44,0x55,0x66,0x77, | ||||
0x88,0x99,0xaa,0xbb,0xcc,0xdd,0xee,0xff | 0x88,0x99,0xaa,0xbb,0xcc,0xdd,0xee,0xff | ||||
}; | }; | ||||
static const unsigned char key_128[16] = | static const unsigned char key_128[16] = | ||||
{ | { | ||||
0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07, | 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07, | ||||
0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f | 0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f | ||||
/* 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, */ | |||||
/* 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c */ | |||||
}; | }; | ||||
static const unsigned char ciphertext_128[16] = | static const unsigned char ciphertext_128[16] = | ||||
{ | { | ||||
0x69,0xc4,0xe0,0xd8,0x6a,0x7b,0x04,0x30, | 0x69,0xc4,0xe0,0xd8,0x6a,0x7b,0x04,0x30, | ||||
0xd8,0xcd,0xb7,0x80,0x70,0xb4,0xc5,0x5a | 0xd8,0xcd,0xb7,0x80,0x70,0xb4,0xc5,0x5a | ||||
}; | }; | ||||
static const unsigned char key_test_expansion_128[16] = | |||||
{ | |||||
0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, | |||||
0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c | |||||
}; | |||||
RIJNDAEL_context exp_ctx; | |||||
rijndael_setkey (&exp_ctx, key_test_expansion_128, sizeof (key_128), NULL); | |||||
#endif | #endif | ||||
/* Because gcc/ld can only align the CTX struct on 8 bytes on the | /* Because gcc/ld can only align the CTX struct on 8 bytes on the | ||||
stack, we need to allocate that context on the heap. */ | stack, we need to allocate that context on the heap. */ | ||||
ctx = _gcry_cipher_selftest_alloc_ctx (sizeof *ctx, &ctxmem); | ctx = _gcry_cipher_selftest_alloc_ctx (sizeof *ctx, &ctxmem); | ||||
if (!ctx) | if (!ctx) | ||||
return "failed to allocate memory"; | return "failed to allocate memory"; | ||||
rijndael_setkey (ctx, key_128, sizeof (key_128), NULL); | rijndael_setkey (ctx, key_128, sizeof (key_128), NULL); | ||||
rijndael_encrypt (ctx, scratch, plaintext_128); | rijndael_encrypt (ctx, scratch, plaintext_128); | ||||
if (memcmp (scratch, ciphertext_128, sizeof (ciphertext_128))) | if (memcmp (scratch, ciphertext_128, sizeof (ciphertext_128))) | ||||
{ | { | ||||
xfree (ctxmem); | xfree (ctxmem); | ||||
return "AES-128 test encryption failed."; | return "AES-128 test encryption failed."; | ||||
} | } | ||||
rijndael_decrypt (ctx, scratch, scratch); | rijndael_decrypt (ctx, scratch, ciphertext_128); | ||||
xfree (ctxmem); | xfree (ctxmem); | ||||
if (memcmp (scratch, plaintext_128, sizeof (plaintext_128))) | if (memcmp (scratch, plaintext_128, sizeof (plaintext_128))) | ||||
return "AES-128 test decryption failed."; | return "AES-128 test decryption failed."; | ||||
return NULL; | return NULL; | ||||
} | } | ||||
/* Run the self-tests for AES 192. Returns NULL on success. */ | /* Run the self-tests for AES 192. Returns NULL on success. */ | ||||
▲ Show 20 Lines • Show All 482 Lines • Show Last 20 Lines |