13#include <botan/cpuid.h>
31 const Altivec8x16 mask = {15,14,13,12, 11,10,9,8, 7,6,5,4, 3,2,1,0};
33 return vec_perm(src, zero, mask);
48 return (
Altivec64x2)reverse_vec(vec_vsx_ld(0, src));
51inline void store_block(
Altivec64x2 src, uint8_t dest[])
61 store_block(B1, out+16);
62 store_block(B2, out+16*2);
63 store_block(B3, out+16*3);
66#define AES_XOR_4(B0, B1, B2, B3, K) do { \
67 B0 = vec_xor(B0, K); \
68 B1 = vec_xor(B1, K); \
69 B2 = vec_xor(B2, K); \
70 B3 = vec_xor(B3, K); \
73#define AES_ENCRYPT_4(B0, B1, B2, B3, K) do { \
74 B0 = __builtin_crypto_vcipher(B0, K); \
75 B1 = __builtin_crypto_vcipher(B1, K); \
76 B2 = __builtin_crypto_vcipher(B2, K); \
77 B3 = __builtin_crypto_vcipher(B3, K); \
80#define AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K) do { \
81 B0 = __builtin_crypto_vcipherlast(B0, K); \
82 B1 = __builtin_crypto_vcipherlast(B1, K); \
83 B2 = __builtin_crypto_vcipherlast(B2, K); \
84 B3 = __builtin_crypto_vcipherlast(B3, K); \
87#define AES_DECRYPT_4(B0, B1, B2, B3, K) do { \
88 B0 = __builtin_crypto_vncipher(B0, K); \
89 B1 = __builtin_crypto_vncipher(B1, K); \
90 B2 = __builtin_crypto_vncipher(B2, K); \
91 B3 = __builtin_crypto_vncipher(B3, K); \
94#define AES_DECRYPT_4_LAST(B0, B1, B2, B3, K) do { \
95 B0 = __builtin_crypto_vncipherlast(B0, K); \
96 B1 = __builtin_crypto_vncipherlast(B1, K); \
97 B2 = __builtin_crypto_vncipherlast(B2, K); \
98 B3 = __builtin_crypto_vncipherlast(B3, K); \
104void AES_128::hw_aes_encrypt_n(const uint8_t in[], uint8_t out[],
size_t blocks)
const
137 store_blocks(B0, B1, B2, B3, out);
144 for(
size_t i = 0; i != blocks; ++i)
149 B = __builtin_crypto_vcipher(B, K1);
150 B = __builtin_crypto_vcipher(B, K2);
151 B = __builtin_crypto_vcipher(B, K3);
152 B = __builtin_crypto_vcipher(B, K4);
153 B = __builtin_crypto_vcipher(B, K5);
154 B = __builtin_crypto_vcipher(B, K6);
155 B = __builtin_crypto_vcipher(B, K7);
156 B = __builtin_crypto_vcipher(B, K8);
157 B = __builtin_crypto_vcipher(B, K9);
158 B = __builtin_crypto_vcipherlast(B, K10);
168void AES_128::hw_aes_decrypt_n(const uint8_t in[], uint8_t out[],
size_t blocks)
const
201 store_blocks(B0, B1, B2, B3, out);
208 for(
size_t i = 0; i != blocks; ++i)
213 B = __builtin_crypto_vncipher(B, K1);
214 B = __builtin_crypto_vncipher(B, K2);
215 B = __builtin_crypto_vncipher(B, K3);
216 B = __builtin_crypto_vncipher(B, K4);
217 B = __builtin_crypto_vncipher(B, K5);
218 B = __builtin_crypto_vncipher(B, K6);
219 B = __builtin_crypto_vncipher(B, K7);
220 B = __builtin_crypto_vncipher(B, K8);
221 B = __builtin_crypto_vncipher(B, K9);
222 B = __builtin_crypto_vncipherlast(B, K10);
232void AES_192::hw_aes_encrypt_n(const uint8_t in[], uint8_t out[],
size_t blocks)
const
269 store_blocks(B0, B1, B2, B3, out);
276 for(
size_t i = 0; i != blocks; ++i)
281 B = __builtin_crypto_vcipher(B, K1);
282 B = __builtin_crypto_vcipher(B, K2);
283 B = __builtin_crypto_vcipher(B, K3);
284 B = __builtin_crypto_vcipher(B, K4);
285 B = __builtin_crypto_vcipher(B, K5);
286 B = __builtin_crypto_vcipher(B, K6);
287 B = __builtin_crypto_vcipher(B, K7);
288 B = __builtin_crypto_vcipher(B, K8);
289 B = __builtin_crypto_vcipher(B, K9);
290 B = __builtin_crypto_vcipher(B, K10);
291 B = __builtin_crypto_vcipher(B, K11);
292 B = __builtin_crypto_vcipherlast(B, K12);
302void AES_192::hw_aes_decrypt_n(const uint8_t in[], uint8_t out[],
size_t blocks)
const
339 store_blocks(B0, B1, B2, B3, out);
346 for(
size_t i = 0; i != blocks; ++i)
351 B = __builtin_crypto_vncipher(B, K1);
352 B = __builtin_crypto_vncipher(B, K2);
353 B = __builtin_crypto_vncipher(B, K3);
354 B = __builtin_crypto_vncipher(B, K4);
355 B = __builtin_crypto_vncipher(B, K5);
356 B = __builtin_crypto_vncipher(B, K6);
357 B = __builtin_crypto_vncipher(B, K7);
358 B = __builtin_crypto_vncipher(B, K8);
359 B = __builtin_crypto_vncipher(B, K9);
360 B = __builtin_crypto_vncipher(B, K10);
361 B = __builtin_crypto_vncipher(B, K11);
362 B = __builtin_crypto_vncipherlast(B, K12);
372void AES_256::hw_aes_encrypt_n(const uint8_t in[], uint8_t out[],
size_t blocks)
const
413 store_blocks(B0, B1, B2, B3, out);
420 for(
size_t i = 0; i != blocks; ++i)
425 B = __builtin_crypto_vcipher(B, K1);
426 B = __builtin_crypto_vcipher(B, K2);
427 B = __builtin_crypto_vcipher(B, K3);
428 B = __builtin_crypto_vcipher(B, K4);
429 B = __builtin_crypto_vcipher(B, K5);
430 B = __builtin_crypto_vcipher(B, K6);
431 B = __builtin_crypto_vcipher(B, K7);
432 B = __builtin_crypto_vcipher(B, K8);
433 B = __builtin_crypto_vcipher(B, K9);
434 B = __builtin_crypto_vcipher(B, K10);
435 B = __builtin_crypto_vcipher(B, K11);
436 B = __builtin_crypto_vcipher(B, K12);
437 B = __builtin_crypto_vcipher(B, K13);
438 B = __builtin_crypto_vcipherlast(B, K14);
448void AES_256::hw_aes_decrypt_n(const uint8_t in[], uint8_t out[],
size_t blocks)
const
489 store_blocks(B0, B1, B2, B3, out);
496 for(
size_t i = 0; i != blocks; ++i)
501 B = __builtin_crypto_vncipher(B, K1);
502 B = __builtin_crypto_vncipher(B, K2);
503 B = __builtin_crypto_vncipher(B, K3);
504 B = __builtin_crypto_vncipher(B, K4);
505 B = __builtin_crypto_vncipher(B, K5);
506 B = __builtin_crypto_vncipher(B, K6);
507 B = __builtin_crypto_vncipher(B, K7);
508 B = __builtin_crypto_vncipher(B, K8);
509 B = __builtin_crypto_vncipher(B, K9);
510 B = __builtin_crypto_vncipher(B, K10);
511 B = __builtin_crypto_vncipher(B, K11);
512 B = __builtin_crypto_vncipher(B, K12);
513 B = __builtin_crypto_vncipher(B, K13);
514 B = __builtin_crypto_vncipherlast(B, K14);
525#undef AES_ENCRYPT_4_LAST
527#undef AES_DECRYPT_4_LAST
#define AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K)
#define AES_ENCRYPT_4(B0, B1, B2, B3, K)
#define AES_XOR_4(B0, B1, B2, B3, K)
#define AES_DECRYPT_4_LAST(B0, B1, B2, B3, K)
#define AES_DECRYPT_4(B0, B1, B2, B3, K)
static bool is_little_endian()
#define BOTAN_FUNC_ISA(isa)
std::unique_ptr< Private_Key > load_key(DataSource &source, std::function< std::string()> get_pass)
__vector unsigned long long Altivec64x2
__vector unsigned char Altivec8x16
__vector unsigned int Altivec32x4