Changeset View
Changeset View
Standalone View
Standalone View
cipher/rijndael.c
| Context not available. | |||||
| extern int aes_p8_set_encrypt_key (const unsigned char *userKey, const int bits, | extern int aes_p8_set_encrypt_key (const unsigned char *userKey, const int bits, | ||||
| RIJNDAEL_context *key); | RIJNDAEL_context *key); | ||||
| extern int aes_p8_set_decrypt_key (const unsigned char *userKey, const int bits, | extern int aes_p8_set_decrypt_key (const unsigned char *userKey, const int bits, | ||||
| /* this is the decryption key part of context */ | /* this is the decryption key part of context */ | ||||
| const unsigned (*)[15][4]); | const unsigned (*)[15][4]); | ||||
| extern void aes_p8_cbc_encrypt (const unsigned char *in, unsigned char *out, | |||||
| size_t length, const void *key, | |||||
| unsigned char *ivec, const int enc, int rounds); | |||||
| static void _gcry_aes_ppc8_cbc_dec (void *context, unsigned char *iv, | |||||
| void *outbuf_arg, const void *inbuf_arg, | |||||
| size_t nblocks) { | |||||
| const RIJNDAEL_context *ctx = context; | |||||
| aes_p8_cbc_encrypt (inbuf_arg, outbuf_arg, nblocks * 16, &ctx->u2, iv, 0, ctx->rounds); | |||||
| return; | |||||
| } | |||||
| /* forward declaration */ | |||||
| static int _gcry_aes_generic_cbc_enc (const void *context, unsigned char *iv, | |||||
| void *outbuf_arg, const void *inbuf_arg, | |||||
| size_t nblocks, | |||||
| int cbc_mac); | |||||
| static void _gcry_aes_ppc8_cbc_enc (void *context, unsigned char *iv, | |||||
| void *outbuf_arg, const void *inbuf_arg, | |||||
| size_t nblocks, int cbc_mac) { | |||||
| const RIJNDAEL_context *ctx = context; | |||||
| #ifdef __builtin_expect | |||||
| __builtin_expect (cbc_mac, 0); | |||||
| #endif | |||||
| if (cbc_mac) { | |||||
| _gcry_aes_generic_cbc_enc (context, iv, outbuf_arg, inbuf_arg, nblocks, cbc_mac); | |||||
| return; | |||||
| } | |||||
| aes_p8_cbc_encrypt (inbuf_arg, outbuf_arg, nblocks * 16, &ctx->u1, iv, 1, ctx->rounds); | |||||
| _gcry_burn_stack (16 * 8); | |||||
| return; | |||||
| } | |||||
| extern void aes_p8_xts_encrypt(const unsigned char *inp, unsigned char *out, | |||||
| size_t len, const void *key1, | |||||
| const void *key2, const void *iv); | |||||
| extern void aes_p8_xts_decrypt(const unsigned char *inp, unsigned char *out, | |||||
| size_t len, const void *key1, | |||||
| const void *key2, const void *iv); | |||||
| void _gcry_aes_ppc8_xts_crypt (void *context, unsigned char *tweak, | |||||
| void *outbuf_arg, | |||||
| const void *inbuf_arg, | |||||
| size_t nblocks, int encrypt) { | |||||
| const RIJNDAEL_context *ctx = context; | |||||
| if (encrypt) | |||||
| aes_p8_xts_encrypt (inbuf_arg, outbuf_arg, nblocks * 16, &ctx->u1, NULL, tweak); | |||||
| else | |||||
| aes_p8_xts_decrypt (inbuf_arg, outbuf_arg, nblocks * 16, &ctx->u2, NULL, tweak); | |||||
| _gcry_burn_stack (16 * 6); | |||||
| } | |||||
| extern void aes_p8_ctr32_encrypt_blocks (const unsigned char *in, unsigned char *out, | |||||
| size_t len, const void *key, | |||||
| const void *ivec, int unused, int rounds); | |||||
| static inline void add_be128(void *ctr, uint64_t add) { | |||||
| uint64_t s[2]; | |||||
| s[0] = buf_get_be64((char*)ctr + 8); | |||||
| s[1] = buf_get_be64((char*)ctr + 0); | |||||
| s[0] += add; | |||||
| if (s[0] < add) { | |||||
| s[1]++; | |||||
| buf_put_be64((char*)ctr + 0, s[1]); | |||||
| } | |||||
| buf_put_be64((char*)ctr + 8, s[0]); | |||||
| } | |||||
| static void _gcry_aes_ppc8_ctr_enc (void *context, unsigned char *ctr, | |||||
| void *outbuf_arg, const void *inbuf_arg, | |||||
| size_t nblocks) { | |||||
| const unsigned char *inbuf = inbuf_arg; | |||||
| unsigned char *outbuf = outbuf_arg; | |||||
| const RIJNDAEL_context *ctx = context; | |||||
| const uint64_t two32 = 1ULL << 32; | |||||
| int overflow; | |||||
| u64 s[2], e[2]; | |||||
| s[0] = buf_get_be64(ctr + 8); | |||||
| overflow = two32 - (s[0] % two32) < nblocks; | |||||
| #ifdef __builtin_expect | |||||
| __builtin_expect(overflow, 0); | |||||
| #endif | |||||
| if (overflow) { | |||||
| uint32_t first_set = (two32 - (s[0] % two32)) % two32; | |||||
| aes_p8_ctr32_encrypt_blocks (inbuf, outbuf, first_set, &ctx->u1, ctr, /*unused*/0, ctx->rounds); | |||||
| inbuf += first_set * BLOCKSIZE; | |||||
| outbuf += first_set * BLOCKSIZE; | |||||
| nblocks -= first_set; | |||||
| add_be128(ctr, first_set); | |||||
| while (nblocks > UINT32_MAX) { | |||||
| aes_p8_ctr32_encrypt_blocks (inbuf, outbuf, two32, &ctx->u1, ctr, /*unused*/0, ctx->rounds); | |||||
| inbuf += two32 * BLOCKSIZE; | |||||
| outbuf += two32 * BLOCKSIZE; | |||||
| nblocks -= two32; | |||||
| add_be128(ctr, two32); | |||||
| } | |||||
| aes_p8_ctr32_encrypt_blocks (inbuf, outbuf, nblocks, &ctx->u1, ctr, /*unused*/0, ctx->rounds); | |||||
| } else { | |||||
| aes_p8_ctr32_encrypt_blocks (inbuf, outbuf, nblocks, &ctx->u1, ctr, /*unused*/0, ctx->rounds); | |||||
| } | |||||
| add_be128(ctr, nblocks); | |||||
| _gcry_burn_stack (16 * 8); | |||||
| return; | |||||
| } | |||||
| #endif /*USE_PPC_ASM*/ | #endif /*USE_PPC_ASM*/ | ||||
| static unsigned int do_encrypt (const RIJNDAEL_context *ctx, unsigned char *bx, | static unsigned int do_encrypt (const RIJNDAEL_context *ctx, unsigned char *bx, | ||||
| const unsigned char *ax); | const unsigned char *ax); | ||||
| static unsigned int do_decrypt (const RIJNDAEL_context *ctx, unsigned char *bx, | static unsigned int do_decrypt (const RIJNDAEL_context *ctx, unsigned char *bx, | ||||
| Context not available. | |||||
| ctx->encrypt_fn = _gcry_aes_ppc8_encrypt; | ctx->encrypt_fn = _gcry_aes_ppc8_encrypt; | ||||
| ctx->decrypt_fn = _gcry_aes_ppc8_decrypt; | ctx->decrypt_fn = _gcry_aes_ppc8_decrypt; | ||||
| ctx->prefetch_enc_fn = NULL; | ctx->prefetch_enc_fn = NULL; | ||||
| ctx->prefetch_dec_fn = NULL; | ctx->prefetch_dec_fn = NULL; | ||||
| ctx->use_ppc_asm = 1; | ctx->use_ppc_asm = 1; | ||||
| if (hd) { | |||||
| hd->bulk.cbc_dec = _gcry_aes_ppc8_cbc_dec; | |||||
| hd->bulk.cbc_enc = _gcry_aes_ppc8_cbc_enc; | |||||
| hd->bulk.xts_crypt = _gcry_aes_ppc8_xts_crypt; | |||||
| hd->bulk.ctr_enc = _gcry_aes_ppc8_ctr_enc; | |||||
| } | |||||
| } | } | ||||
| #endif | #endif | ||||
| else | else | ||||
| { | { | ||||
| ctx->encrypt_fn = do_encrypt; | ctx->encrypt_fn = do_encrypt; | ||||
| Context not available. | |||||
| else if (ctx->use_arm_ce) | else if (ctx->use_arm_ce) | ||||
| _gcry_aes_armv8_ce_setkey (ctx, key); | _gcry_aes_armv8_ce_setkey (ctx, key); | ||||
| #endif | #endif | ||||
| #ifdef USE_PPC_ASM | #ifdef USE_PPC_ASM | ||||
| else if (ctx->use_ppc_asm) { | else if (ctx->use_ppc_asm) { | ||||
| /* These are both done here to avoid having to store the key. | /* These are both done here to avoid having to store the key. */ | ||||
| * These S-boxes are generated on-the-fly. */ | |||||
| aes_p8_set_encrypt_key (key, keylen * 8, ctx); | aes_p8_set_encrypt_key (key, keylen * 8, ctx); | ||||
| aes_p8_set_decrypt_key (key, keylen * 8, &ctx->keyschdec32); | aes_p8_set_decrypt_key (key, keylen * 8, &ctx->keyschdec32); | ||||
| } | } | ||||
| #endif | #endif | ||||
| else | else | ||||
| Context not available. | |||||
| if (burn_depth) | if (burn_depth) | ||||
| _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | ||||
| } | } | ||||
| static int _gcry_aes_generic_cbc_enc (const void *context, unsigned char *iv, | |||||
| void *outbuf_arg, const void *inbuf_arg, | |||||
| size_t nblocks, | |||||
| int cbc_mac) | |||||
| { | |||||
| const RIJNDAEL_context *ctx = context; | |||||
| unsigned char *outbuf = outbuf_arg; | |||||
| const unsigned char *inbuf = inbuf_arg; | |||||
| rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; | |||||
| int burn_depth = 0; | |||||
| unsigned char *last_iv = iv; | |||||
| if (ctx->prefetch_enc_fn) | |||||
| ctx->prefetch_enc_fn(); | |||||
| for ( ;nblocks; nblocks-- ) | |||||
| { | |||||
| cipher_block_xor(outbuf, inbuf, last_iv, BLOCKSIZE); | |||||
| burn_depth = encrypt_fn (ctx, outbuf, outbuf); | |||||
| last_iv = outbuf; | |||||
| inbuf += BLOCKSIZE; | |||||
| if (!cbc_mac) | |||||
| outbuf += BLOCKSIZE; | |||||
| } | |||||
| if (last_iv != iv) | |||||
| cipher_block_cpy (iv, last_iv, BLOCKSIZE); | |||||
| if (burn_depth) | |||||
| _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | |||||
| return 0; | |||||
| } | |||||
| /* Bulk encryption of complete blocks in CBC mode. Caller needs to | /* Bulk encryption of complete blocks in CBC mode. Caller needs to | ||||
| make sure that IV is aligned on an unsigned long boundary. This | make sure that IV is aligned on an unsigned long boundary. This | ||||
| function is only intended for the bulk encryption feature of | function is only intended for the bulk encryption feature of | ||||
| cipher.c. */ | cipher.c. */ | ||||
| void | void | ||||
| Context not available. | |||||
| size_t nblocks, int cbc_mac) | size_t nblocks, int cbc_mac) | ||||
| { | { | ||||
| RIJNDAEL_context *ctx = context; | RIJNDAEL_context *ctx = context; | ||||
| unsigned char *outbuf = outbuf_arg; | unsigned char *outbuf = outbuf_arg; | ||||
| const unsigned char *inbuf = inbuf_arg; | const unsigned char *inbuf = inbuf_arg; | ||||
| unsigned char *last_iv; | |||||
| unsigned int burn_depth = 0; | unsigned int burn_depth = 0; | ||||
| if (0) | if (0) | ||||
| ; | ; | ||||
| #ifdef USE_AESNI | #ifdef USE_AESNI | ||||
| Context not available. | |||||
| else if (ctx->use_arm_ce) | else if (ctx->use_arm_ce) | ||||
| { | { | ||||
| _gcry_aes_armv8_ce_cbc_enc (ctx, iv, outbuf, inbuf, nblocks, cbc_mac); | _gcry_aes_armv8_ce_cbc_enc (ctx, iv, outbuf, inbuf, nblocks, cbc_mac); | ||||
| return; | return; | ||||
| } | } | ||||
| #endif /*USE_ARM_CE*/ | |||||
| #ifdef USE_PPC_ASM | |||||
| else if (ctx->use_ppc_asm) | |||||
| { | |||||
| _gcry_aes_ppc8_cbc_enc (ctx, iv, outbuf, inbuf, nblocks, cbc_mac); | |||||
| return; | |||||
| } | |||||
| #endif /*USE_ARM_CE*/ | #endif /*USE_ARM_CE*/ | ||||
| else | else | ||||
| { | { | ||||
| rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; | _gcry_aes_generic_cbc_enc (ctx, iv, outbuf, inbuf, nblocks, cbc_mac); | ||||
| return; | |||||
| if (ctx->prefetch_enc_fn) | |||||
| ctx->prefetch_enc_fn(); | |||||
| last_iv = iv; | |||||
| for ( ;nblocks; nblocks-- ) | |||||
| { | |||||
| cipher_block_xor(outbuf, inbuf, last_iv, BLOCKSIZE); | |||||
| burn_depth = encrypt_fn (ctx, outbuf, outbuf); | |||||
| last_iv = outbuf; | |||||
| inbuf += BLOCKSIZE; | |||||
| if (!cbc_mac) | |||||
| outbuf += BLOCKSIZE; | |||||
| } | |||||
| if (last_iv != iv) | |||||
| cipher_block_cpy (iv, last_iv, BLOCKSIZE); | |||||
| } | } | ||||
| if (burn_depth) | if (burn_depth) | ||||
| _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | ||||
| } | } | ||||
| Context not available. | |||||
| { | { | ||||
| _gcry_aes_armv8_ce_ctr_enc (ctx, ctr, outbuf, inbuf, nblocks); | _gcry_aes_armv8_ce_ctr_enc (ctx, ctr, outbuf, inbuf, nblocks); | ||||
| return; | return; | ||||
| } | } | ||||
| #endif /*USE_ARM_CE*/ | #endif /*USE_ARM_CE*/ | ||||
| #ifdef USE_PPC_ASM | |||||
| else if (ctx->use_ppc_asm) | |||||
| { | |||||
| _gcry_aes_ppc8_ctr_enc (ctx, ctr, outbuf, inbuf, nblocks); | |||||
| return; | |||||
| } | |||||
| #endif /*USE_PPC_ASM*/ | |||||
| else | else | ||||
| { | { | ||||
| union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } tmp; | union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } tmp; | ||||
| rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; | rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; | ||||
| Context not available. | |||||
| if (burn_depth) | if (burn_depth) | ||||
| _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | ||||
| } | } | ||||
| static void | |||||
| _gcry_aes_generic_cbc_dec (void *context, unsigned char *iv, | |||||
| void *outbuf_arg, const void *inbuf_arg, | |||||
| size_t nblocks) | |||||
| { | |||||
| RIJNDAEL_context *ctx = context; | |||||
| unsigned char *outbuf = outbuf_arg; | |||||
| const unsigned char *inbuf = inbuf_arg; | |||||
| unsigned char savebuf[BLOCKSIZE] ATTR_ALIGNED_16; | |||||
| unsigned burn_depth = 0; | |||||
| rijndael_cryptfn_t decrypt_fn = ctx->decrypt_fn; | |||||
| check_decryption_preparation (ctx); | |||||
| if (ctx->prefetch_dec_fn) | |||||
| ctx->prefetch_dec_fn(); | |||||
| for ( ;nblocks; nblocks-- ) | |||||
| { | |||||
| /* INBUF is needed later and it may be identical to OUTBUF, so store | |||||
| the intermediate result to SAVEBUF. */ | |||||
| burn_depth = decrypt_fn (ctx, savebuf, inbuf); | |||||
| cipher_block_xor_n_copy_2(outbuf, savebuf, iv, inbuf, BLOCKSIZE); | |||||
| inbuf += BLOCKSIZE; | |||||
| outbuf += BLOCKSIZE; | |||||
| } | |||||
| wipememory(savebuf, sizeof(savebuf)); | |||||
| if (burn_depth) | |||||
| _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | |||||
| } | |||||
| /* Bulk decryption of complete blocks in CBC mode. Caller needs to | /* Bulk decryption of complete blocks in CBC mode. Caller needs to | ||||
| make sure that IV is aligned on an unsigned long boundary. This | make sure that IV is aligned on an unsigned long boundary. This | ||||
| function is only intended for the bulk encryption feature of | function is only intended for the bulk encryption feature of | ||||
| cipher.c. */ | cipher.c. */ | ||||
| Context not available. | |||||
| size_t nblocks) | size_t nblocks) | ||||
| { | { | ||||
| RIJNDAEL_context *ctx = context; | RIJNDAEL_context *ctx = context; | ||||
| unsigned char *outbuf = outbuf_arg; | unsigned char *outbuf = outbuf_arg; | ||||
| const unsigned char *inbuf = inbuf_arg; | const unsigned char *inbuf = inbuf_arg; | ||||
| unsigned int burn_depth = 0; | |||||
| if (0) | if (0) | ||||
| ; | ; | ||||
| #ifdef USE_AESNI | #ifdef USE_AESNI | ||||
| else if (ctx->use_aesni) | else if (ctx->use_aesni) | ||||
| Context not available. | |||||
| return; | return; | ||||
| } | } | ||||
| #endif /*USE_ARM_CE*/ | #endif /*USE_ARM_CE*/ | ||||
| else | else | ||||
| { | { | ||||
| unsigned char savebuf[BLOCKSIZE] ATTR_ALIGNED_16; | _gcry_aes_generic_cbc_dec (ctx, iv, outbuf, inbuf, nblocks); | ||||
| rijndael_cryptfn_t decrypt_fn = ctx->decrypt_fn; | return; | ||||
| check_decryption_preparation (ctx); | |||||
| if (ctx->prefetch_dec_fn) | |||||
| ctx->prefetch_dec_fn(); | |||||
| for ( ;nblocks; nblocks-- ) | |||||
| { | |||||
| /* INBUF is needed later and it may be identical to OUTBUF, so store | |||||
| the intermediate result to SAVEBUF. */ | |||||
| burn_depth = decrypt_fn (ctx, savebuf, inbuf); | |||||
| cipher_block_xor_n_copy_2(outbuf, savebuf, iv, inbuf, BLOCKSIZE); | |||||
| inbuf += BLOCKSIZE; | |||||
| outbuf += BLOCKSIZE; | |||||
| } | |||||
| wipememory(savebuf, sizeof(savebuf)); | |||||
| } | } | ||||
| if (burn_depth) | |||||
| _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); | |||||
| } | } | ||||
| ␌ | ␌ | ||||
| /* Bulk encryption/decryption of complete blocks in OCB mode. */ | /* Bulk encryption/decryption of complete blocks in OCB mode. */ | ||||
| Context not available. | |||||
| { | { | ||||
| _gcry_aes_armv8_ce_xts_crypt (ctx, tweak, outbuf, inbuf, nblocks, encrypt); | _gcry_aes_armv8_ce_xts_crypt (ctx, tweak, outbuf, inbuf, nblocks, encrypt); | ||||
| return; | return; | ||||
| } | } | ||||
| #endif /*USE_ARM_CE*/ | #endif /*USE_ARM_CE*/ | ||||
| #ifdef USE_PPC_ASM | |||||
| else if (ctx->use_ppc_asm) | |||||
| { | |||||
| _gcry_aes_ppc8_xts_crypt (ctx, tweak, outbuf, inbuf, nblocks, encrypt); | |||||
| return; | |||||
| } | |||||
| #endif /*USE_PPC_ASM*/ | |||||
| else | else | ||||
| { | { | ||||
| if (encrypt) | if (encrypt) | ||||
| { | { | ||||
| if (ctx->prefetch_enc_fn) | if (ctx->prefetch_enc_fn) | ||||
| Context not available. | |||||