56 /* |
56 /* |
57 * Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved. |
57 * Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved. |
58 */ |
58 */ |
59 |
59 |
60 /* |
60 /* |
61 * This engine supports SPARC microprocessors that provide AES and other |
61 * This engine supports SPARC microprocessors that provide DES and other |
62 * cipher and hash instructions, such as the T4 microprocessor. |
62 * cipher and hash instructions, such as the T4 microprocessor. |
63 */ |
63 */ |
64 |
64 |
65 #include <openssl/opensslconf.h> |
65 #include <openssl/opensslconf.h> |
66 |
66 |
67 #if !defined(OPENSSL_NO_HW) && !defined(OPENSSL_NO_HW_AES_T4) && \ |
67 #if !defined(OPENSSL_NO_HW) |
68 !defined(OPENSSL_NO_AES) |
|
69 #include <sys/types.h> |
68 #include <sys/types.h> |
70 #include <sys/auxv.h> /* getisax() */ |
69 #include <sys/auxv.h> /* getisax() */ |
71 #include <stdlib.h> |
70 #include <stdlib.h> |
72 #include <stdbool.h> |
71 #include <stdbool.h> |
73 #include <string.h> |
72 #include <string.h> |
74 #include <errno.h> |
73 #include <errno.h> |
75 #include <openssl/aes.h> |
|
76 #include <openssl/engine.h> |
74 #include <openssl/engine.h> |
77 #include "eng_t4_aes_asm.h" |
|
78 |
75 |
79 #define T4_LIB_NAME "SPARC T4 engine" |
76 #define T4_LIB_NAME "SPARC T4 engine" |
80 #include "eng_t4_err.c" |
77 #include "eng_t4_err.c" |
81 |
|
82 /* Copied from Solaris aes_impl.h */ |
|
83 #ifndef MAX_AES_NR |
|
84 #define MAX_AES_NR 14 /* Maximum number of rounds */ |
|
85 #endif |
|
86 #ifndef MAX_AES_NB |
|
87 #define MAX_AES_NB 4 /* Number of columns comprising a state */ |
|
88 #endif |
|
89 |
|
90 /* Index for the supported ciphers */ |
|
91 typedef enum { |
|
92 T4_AES_128_CBC, |
|
93 T4_AES_192_CBC, |
|
94 T4_AES_256_CBC, |
|
95 #ifndef SOLARIS_NO_AES_CFB128 |
|
96 T4_AES_128_CFB128, |
|
97 T4_AES_192_CFB128, |
|
98 T4_AES_256_CFB128, |
|
99 #endif /* !SOLARIS_NO_AES_CFB128 */ |
|
100 T4_AES_128_CTR, |
|
101 T4_AES_192_CTR, |
|
102 T4_AES_256_CTR, |
|
103 T4_AES_128_ECB, |
|
104 T4_AES_192_ECB, |
|
105 T4_AES_256_ECB, |
|
106 T4_CIPHER_MAX |
|
107 } t4_cipher_id; |
|
108 |
|
109 /* T4 cipher context; must be 8-byte aligned (last field must be uint64_t) */ |
|
110 typedef struct t4_cipher_ctx { |
|
111 t4_cipher_id index; |
|
112 uint64_t *iv; |
|
113 uint64_t aligned_iv_buffer[2]; /* use if original IV unaligned */ |
|
114 /* Encryption and decryption key schedule are the same: */ |
|
115 uint64_t t4_ks[((MAX_AES_NR) + 1) * (MAX_AES_NB)]; |
|
116 } t4_cipher_ctx_t; |
|
117 |
|
118 typedef struct t4_cipher { |
|
119 t4_cipher_id id; |
|
120 int nid; |
|
121 int iv_len; |
|
122 int min_key_len; |
|
123 int max_key_len; |
|
124 unsigned long flags; |
|
125 } t4_cipher_t; |
|
126 |
78 |
127 /* Constants used when creating the ENGINE */ |
79 /* Constants used when creating the ENGINE */ |
128 static const char *ENGINE_T4_ID = "t4"; |
80 static const char *ENGINE_T4_ID = "t4"; |
129 static const char *ENGINE_T4_NAME = "SPARC T4 engine support"; |
81 static const char *ENGINE_T4_NAME = "SPARC T4 engine support"; |
130 static const char *ENGINE_NO_T4_NAME = "SPARC T4 engine support (no T4)"; |
82 static const char *ENGINE_NO_T4_NAME = "SPARC T4 engine support (no T4)"; |
181 extern DSA_METHOD *t4_DSA(void); |
130 extern DSA_METHOD *t4_DSA(void); |
182 |
131 |
183 /* Static variables */ |
132 /* Static variables */ |
184 /* This can't be const as NID*ctr is inserted when the engine is initialized */ |
133 /* This can't be const as NID*ctr is inserted when the engine is initialized */ |
185 static int t4_cipher_nids[] = { |
134 static int t4_cipher_nids[] = { |
186 NID_aes_128_cbc, NID_aes_192_cbc, NID_aes_256_cbc, |
|
187 #ifndef SOLARIS_NO_AES_CFB128 |
|
188 NID_aes_128_cfb128, NID_aes_192_cfb128, NID_aes_256_cfb128, |
|
189 #endif |
|
190 NID_aes_128_ctr, NID_aes_192_ctr, NID_aes_256_ctr, |
|
191 NID_aes_128_ecb, NID_aes_192_ecb, NID_aes_256_ecb, |
|
192 #ifndef OPENSSL_NO_DES |
135 #ifndef OPENSSL_NO_DES |
193 /* Must be at end of list (see t4_des_cipher_count in t4_bind() */ |
136 /* Must be at end of list (see t4_des_cipher_count in t4_bind() */ |
194 NID_des_cbc, NID_des_ede3_cbc, NID_des_ecb, NID_des_ede3_ecb, |
137 NID_des_cbc, NID_des_ede3_cbc, NID_des_ecb, NID_des_ede3_ecb, |
195 #endif |
138 #endif |
196 }; |
139 }; |
197 static const int t4_des_cipher_count = 4; |
140 static const int t4_des_cipher_count = 4; |
198 static int t4_cipher_count = |
141 static int t4_cipher_count = |
199 (sizeof (t4_cipher_nids) / sizeof (t4_cipher_nids[0])); |
142 (sizeof (t4_cipher_nids) / sizeof (t4_cipher_nids[0])); |
200 |
|
201 /* |
|
202 * Cipher Table for all supported symmetric ciphers. |
|
203 * Must be in same order as t4_cipher_id. |
|
204 */ |
|
205 static t4_cipher_t t4_cipher_table[] = { |
|
206 /* ID NID IV min- max-key flags */ |
|
207 {T4_AES_128_CBC, NID_aes_128_cbc, 16, 16, 16, 0}, |
|
208 {T4_AES_192_CBC, NID_aes_192_cbc, 16, 24, 24, 0}, |
|
209 {T4_AES_256_CBC, NID_aes_256_cbc, 16, 32, 32, 0}, |
|
210 #ifndef SOLARIS_NO_AES_CFB128 |
|
211 {T4_AES_128_CFB128, NID_aes_128_cfb128, 16, 16, 16, |
|
212 EVP_CIPH_NO_PADDING}, |
|
213 {T4_AES_192_CFB128, NID_aes_192_cfb128, 16, 24, 24, |
|
214 EVP_CIPH_NO_PADDING}, |
|
215 {T4_AES_256_CFB128, NID_aes_256_cfb128, 16, 32, 32, |
|
216 EVP_CIPH_NO_PADDING}, |
|
217 #endif |
|
218 {T4_AES_128_CTR, NID_aes_128_ctr, 16, 16, 16, |
|
219 EVP_CIPH_NO_PADDING}, |
|
220 {T4_AES_192_CTR, NID_aes_192_ctr, 16, 24, 24, |
|
221 EVP_CIPH_NO_PADDING}, |
|
222 {T4_AES_256_CTR, NID_aes_256_ctr, 16, 32, 32, |
|
223 EVP_CIPH_NO_PADDING}, |
|
224 {T4_AES_128_ECB, NID_aes_128_ecb, 0, 16, 16, 0}, |
|
225 {T4_AES_192_ECB, NID_aes_192_ecb, 0, 24, 24, 0}, |
|
226 {T4_AES_256_ECB, NID_aes_256_ecb, 0, 32, 32, 0}, |
|
227 }; |
|
228 |
|
229 |
|
230 /* Formal declaration for functions in EVP_CIPHER structure */ |
|
231 static int t4_cipher_init_aes(EVP_CIPHER_CTX *ctx, const unsigned char *key, |
|
232 const unsigned char *iv, int enc); |
|
233 |
|
234 static int t4_cipher_do_aes_128_cbc(EVP_CIPHER_CTX *ctx, unsigned char *out, |
|
235 const unsigned char *in, size_t inl); |
|
236 static int t4_cipher_do_aes_192_cbc(EVP_CIPHER_CTX *ctx, unsigned char *out, |
|
237 const unsigned char *in, size_t inl); |
|
238 static int t4_cipher_do_aes_256_cbc(EVP_CIPHER_CTX *ctx, unsigned char *out, |
|
239 const unsigned char *in, size_t inl); |
|
240 #ifndef SOLARIS_NO_AES_CFB128 |
|
241 static int t4_cipher_do_aes_128_cfb128(EVP_CIPHER_CTX *ctx, unsigned char *out, |
|
242 const unsigned char *in, size_t inl); |
|
243 static int t4_cipher_do_aes_192_cfb128(EVP_CIPHER_CTX *ctx, unsigned char *out, |
|
244 const unsigned char *in, size_t inl); |
|
245 static int t4_cipher_do_aes_256_cfb128(EVP_CIPHER_CTX *ctx, unsigned char *out, |
|
246 const unsigned char *in, size_t inl); |
|
247 #endif |
|
248 static int t4_cipher_do_aes_128_ctr(EVP_CIPHER_CTX *ctx, unsigned char *out, |
|
249 const unsigned char *in, size_t inl); |
|
250 static int t4_cipher_do_aes_192_ctr(EVP_CIPHER_CTX *ctx, unsigned char *out, |
|
251 const unsigned char *in, size_t inl); |
|
252 static int t4_cipher_do_aes_256_ctr(EVP_CIPHER_CTX *ctx, unsigned char *out, |
|
253 const unsigned char *in, size_t inl); |
|
254 static int t4_cipher_do_aes_128_ecb(EVP_CIPHER_CTX *ctx, unsigned char *out, |
|
255 const unsigned char *in, size_t inl); |
|
256 static int t4_cipher_do_aes_192_ecb(EVP_CIPHER_CTX *ctx, unsigned char *out, |
|
257 const unsigned char *in, size_t inl); |
|
258 static int t4_cipher_do_aes_256_ecb(EVP_CIPHER_CTX *ctx, unsigned char *out, |
|
259 const unsigned char *in, size_t inl); |
|
260 |
143 |
261 |
144 |
262 /* |
145 /* |
263 * Cipher Algorithms |
146 * Cipher Algorithms |
264 * |
147 * |
272 * init(), do_cipher(), cleanup(), |
155 * init(), do_cipher(), cleanup(), |
273 * ctx_size, |
156 * ctx_size, |
274 * set_asn1_parameters(), get_asn1_parameters(), ctrl(), app_data |
157 * set_asn1_parameters(), get_asn1_parameters(), ctrl(), app_data |
275 */ |
158 */ |
276 |
159 |
277 static const EVP_CIPHER t4_aes_128_cbc = { |
|
278 NID_aes_128_cbc, |
|
279 16, 16, 16, |
|
280 EVP_CIPH_CBC_MODE, |
|
281 t4_cipher_init_aes, t4_cipher_do_aes_128_cbc, NULL, |
|
282 sizeof (t4_cipher_ctx_t), |
|
283 EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv, |
|
284 NULL, NULL |
|
285 }; |
|
286 static const EVP_CIPHER t4_aes_192_cbc = { |
|
287 NID_aes_192_cbc, |
|
288 16, 24, 16, |
|
289 EVP_CIPH_CBC_MODE, |
|
290 t4_cipher_init_aes, t4_cipher_do_aes_192_cbc, NULL, |
|
291 sizeof (t4_cipher_ctx_t), |
|
292 EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv, |
|
293 NULL, NULL |
|
294 }; |
|
295 static const EVP_CIPHER t4_aes_256_cbc = { |
|
296 NID_aes_256_cbc, |
|
297 16, 32, 16, |
|
298 EVP_CIPH_CBC_MODE, |
|
299 t4_cipher_init_aes, t4_cipher_do_aes_256_cbc, NULL, |
|
300 sizeof (t4_cipher_ctx_t), |
|
301 EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv, |
|
302 NULL, NULL |
|
303 }; |
|
304 |
|
305 #ifndef SOLARIS_NO_AES_CFB128 |
|
306 static const EVP_CIPHER t4_aes_128_cfb128 = { |
|
307 NID_aes_128_cfb128, |
|
308 16, 16, 16, |
|
309 EVP_CIPH_CFB_MODE, |
|
310 t4_cipher_init_aes, t4_cipher_do_aes_128_cfb128, NULL, |
|
311 sizeof (t4_cipher_ctx_t), |
|
312 EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv, |
|
313 NULL, NULL |
|
314 }; |
|
315 static const EVP_CIPHER t4_aes_192_cfb128 = { |
|
316 NID_aes_192_cfb128, |
|
317 16, 24, 16, |
|
318 EVP_CIPH_CFB_MODE, |
|
319 t4_cipher_init_aes, t4_cipher_do_aes_192_cfb128, NULL, |
|
320 sizeof (t4_cipher_ctx_t), |
|
321 EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv, |
|
322 NULL, NULL |
|
323 }; |
|
324 static const EVP_CIPHER t4_aes_256_cfb128 = { |
|
325 NID_aes_256_cfb128, |
|
326 16, 32, 16, |
|
327 EVP_CIPH_CFB_MODE, |
|
328 t4_cipher_init_aes, t4_cipher_do_aes_256_cfb128, NULL, |
|
329 sizeof (t4_cipher_ctx_t), |
|
330 EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv, |
|
331 NULL, NULL |
|
332 }; |
|
333 #endif /* !SOLARIS_NO_AES_CFB128 */ |
|
334 |
|
335 static EVP_CIPHER t4_aes_128_ctr = { |
|
336 NID_aes_128_ctr, |
|
337 16, 16, 16, |
|
338 EVP_CIPH_CTR_MODE, |
|
339 t4_cipher_init_aes, t4_cipher_do_aes_128_ctr, NULL, |
|
340 sizeof (t4_cipher_ctx_t), |
|
341 EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv, |
|
342 NULL, NULL |
|
343 }; |
|
344 static EVP_CIPHER t4_aes_192_ctr = { |
|
345 NID_aes_192_ctr, |
|
346 16, 24, 16, |
|
347 EVP_CIPH_CTR_MODE, |
|
348 t4_cipher_init_aes, t4_cipher_do_aes_192_ctr, NULL, |
|
349 sizeof (t4_cipher_ctx_t), |
|
350 EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv, |
|
351 NULL, NULL |
|
352 }; |
|
353 static EVP_CIPHER t4_aes_256_ctr = { |
|
354 NID_aes_256_ctr, |
|
355 16, 32, 16, |
|
356 EVP_CIPH_CTR_MODE, |
|
357 t4_cipher_init_aes, t4_cipher_do_aes_256_ctr, NULL, |
|
358 sizeof (t4_cipher_ctx_t), |
|
359 EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv, |
|
360 NULL, NULL |
|
361 }; |
|
362 |
|
363 /* |
|
364 * ECB modes don't use an Initial Vector, so that's why set_asn1_parameters, |
|
365 * get_asn1_parameters, and cleanup fields are set to NULL. |
|
366 */ |
|
367 static const EVP_CIPHER t4_aes_128_ecb = { |
|
368 NID_aes_128_ecb, |
|
369 16, 16, 0, |
|
370 EVP_CIPH_ECB_MODE, |
|
371 t4_cipher_init_aes, t4_cipher_do_aes_128_ecb, NULL, |
|
372 sizeof (t4_cipher_ctx_t), |
|
373 NULL, NULL, NULL, NULL |
|
374 }; |
|
375 static const EVP_CIPHER t4_aes_192_ecb = { |
|
376 NID_aes_192_ecb, |
|
377 16, 24, 0, |
|
378 EVP_CIPH_ECB_MODE, |
|
379 t4_cipher_init_aes, t4_cipher_do_aes_192_ecb, NULL, |
|
380 sizeof (t4_cipher_ctx_t), |
|
381 NULL, NULL, NULL, NULL |
|
382 }; |
|
383 static const EVP_CIPHER t4_aes_256_ecb = { |
|
384 NID_aes_256_ecb, |
|
385 16, 32, 0, |
|
386 EVP_CIPH_ECB_MODE, |
|
387 t4_cipher_init_aes, t4_cipher_do_aes_256_ecb, NULL, |
|
388 sizeof (t4_cipher_ctx_t), |
|
389 NULL, NULL, NULL, NULL |
|
390 }; |
|
391 |
160 |
392 #ifndef OPENSSL_NO_DES |
161 #ifndef OPENSSL_NO_DES |
393 extern const EVP_CIPHER t4_des_cbc; |
162 extern const EVP_CIPHER t4_des_cbc; |
394 extern const EVP_CIPHER t4_des3_cbc; |
163 extern const EVP_CIPHER t4_des3_cbc; |
395 extern const EVP_CIPHER t4_des_ecb; |
164 extern const EVP_CIPHER t4_des_ecb; |
484 break; |
222 break; |
485 case NID_des_ede3_ecb: |
223 case NID_des_ede3_ecb: |
486 *cipher = &t4_des3_ecb; |
224 *cipher = &t4_des3_ecb; |
487 break; |
225 break; |
488 #endif /* !OPENSSL_NO_DES */ |
226 #endif /* !OPENSSL_NO_DES */ |
489 case NID_aes_128_ctr: |
|
490 *cipher = &t4_aes_128_ctr; |
|
491 break; |
|
492 case NID_aes_192_ctr: |
|
493 *cipher = &t4_aes_192_ctr; |
|
494 break; |
|
495 case NID_aes_256_ctr: |
|
496 *cipher = &t4_aes_256_ctr; |
|
497 break; |
|
498 default: |
227 default: |
499 /* cipher not supported */ |
228 /* cipher not supported */ |
500 *cipher = NULL; |
229 *cipher = NULL; |
501 return (0); |
230 return (0); |
502 } |
231 } |
503 |
232 |
504 return (1); |
233 return (1); |
505 } |
234 } |
506 |
235 |
507 |
236 |
508 /* Called by t4_cipher_init_aes() */ |
|
509 static t4_cipher_id |
|
510 get_cipher_index_by_nid(int nid) |
|
511 { |
|
512 t4_cipher_id i; |
|
513 |
|
514 for (i = (t4_cipher_id)0; i < T4_CIPHER_MAX; ++i) |
|
515 if (t4_cipher_table[i].nid == nid) |
|
516 return (i); |
|
517 return (T4_CIPHER_MAX); |
|
518 } |
|
519 |
|
520 |
|
521 /* ARGSUSED2 */ |
|
522 static int |
|
523 t4_cipher_init_aes(EVP_CIPHER_CTX *ctx, const unsigned char *key, |
|
524 const unsigned char *iv, int enc) |
|
525 { |
|
526 t4_cipher_ctx_t *tctx = ctx->cipher_data; |
|
527 uint64_t *t4_ks = tctx->t4_ks; |
|
528 t4_cipher_t *t4_cipher; |
|
529 t4_cipher_id index; |
|
530 int key_len = ctx->key_len; |
|
531 uint64_t aligned_key_buffer[4]; /* 16, 24, or 32 bytes long */ |
|
532 uint64_t *aligned_key; |
|
533 |
|
534 if (key == NULL) { |
|
535 T4err(T4_F_CIPHER_INIT_AES, T4_R_CIPHER_KEY); |
|
536 return (0); |
|
537 } |
|
538 |
|
539 /* Get the cipher entry index in t4_cipher_table from nid */ |
|
540 index = get_cipher_index_by_nid(ctx->cipher->nid); |
|
541 if (index >= T4_CIPHER_MAX) { |
|
542 T4err(T4_F_CIPHER_INIT_AES, T4_R_CIPHER_NID); |
|
543 return (0); /* Error */ |
|
544 } |
|
545 t4_cipher = &t4_cipher_table[index]; |
|
546 |
|
547 /* Check key size and iv size */ |
|
548 if (ctx->cipher->iv_len < t4_cipher->iv_len) { |
|
549 T4err(T4_F_CIPHER_INIT_AES, T4_R_IV_LEN_INCORRECT); |
|
550 return (0); /* Error */ |
|
551 } |
|
552 if ((key_len < t4_cipher->min_key_len) || |
|
553 (key_len > t4_cipher->max_key_len)) { |
|
554 T4err(T4_F_CIPHER_INIT_AES, T4_R_KEY_LEN_INCORRECT); |
|
555 return (0); /* Error */ |
|
556 } |
|
557 |
|
558 /* Set cipher flags, if any */ |
|
559 ctx->flags |= t4_cipher->flags; |
|
560 |
|
561 /* Align the key */ |
|
562 if (((unsigned long)key & 0x7) == 0) /* already aligned */ |
|
563 aligned_key = (uint64_t *)key; |
|
564 else { /* key is not 8-byte aligned */ |
|
565 #ifdef DEBUG_T4 |
|
566 (void) fprintf(stderr, "T4: key is not 8 byte aligned\n"); |
|
567 #endif |
|
568 (void) memcpy(aligned_key_buffer, key, key_len); |
|
569 aligned_key = aligned_key_buffer; |
|
570 } |
|
571 |
|
572 |
|
573 /* |
|
574 * Expand the key schedule. |
|
575 * Copy original key to start of t4_ks key schedule. Note that the |
|
576 * encryption and decryption key schedule are the same for T4. |
|
577 */ |
|
578 switch (key_len) { |
|
579 case 16: |
|
580 t4_aes_expand128(&t4_ks[2], |
|
581 (const uint32_t *)aligned_key); |
|
582 t4_ks[0] = aligned_key[0]; |
|
583 t4_ks[1] = aligned_key[1]; |
|
584 break; |
|
585 case 24: |
|
586 t4_aes_expand192(&t4_ks[3], |
|
587 (const uint32_t *)aligned_key); |
|
588 t4_ks[0] = aligned_key[0]; |
|
589 t4_ks[1] = aligned_key[1]; |
|
590 t4_ks[2] = aligned_key[2]; |
|
591 break; |
|
592 case 32: |
|
593 t4_aes_expand256(&t4_ks[4], |
|
594 (const uint32_t *)aligned_key); |
|
595 t4_ks[0] = aligned_key[0]; |
|
596 t4_ks[1] = aligned_key[1]; |
|
597 t4_ks[2] = aligned_key[2]; |
|
598 t4_ks[3] = aligned_key[3]; |
|
599 break; |
|
600 default: |
|
601 T4err(T4_F_CIPHER_INIT_AES, T4_R_CIPHER_KEY); |
|
602 return (0); |
|
603 } |
|
604 |
|
605 /* Save index to cipher */ |
|
606 tctx->index = index; |
|
607 |
|
608 /* Align IV, if needed */ |
|
609 if (t4_cipher->iv_len <= 0) { /* no IV (such as with ECB mode) */ |
|
610 tctx->iv = NULL; |
|
611 } else if (((unsigned long)ctx->iv & 0x7) == 0) { /* already aligned */ |
|
612 tctx->iv = (uint64_t *)ctx->iv; |
|
613 } else { |
|
614 /* IV is not 8 byte aligned */ |
|
615 (void) memcpy(tctx->aligned_iv_buffer, ctx->iv, |
|
616 ctx->cipher->iv_len); |
|
617 tctx->iv = tctx->aligned_iv_buffer; |
|
618 #ifdef DEBUG_T4 |
|
619 (void) fprintf(stderr, |
|
620 "t4_cipher_init_aes: IV is not 8 byte aligned\n"); |
|
621 (void) fprintf(stderr, |
|
622 "t4_cipher_init_aes: ctx->cipher->iv_len =%d\n", |
|
623 ctx->cipher->iv_len); |
|
624 (void) fprintf(stderr, "t4_cipher_init_aes: after " |
|
625 "re-alignment, tctx->iv = %p\n", (void *)tctx->iv); |
|
626 #endif /* DEBUG_T4 */ |
|
627 } |
|
628 |
|
629 return (1); |
|
630 } |
|
631 |
|
632 |
|
633 /* |
|
634 * ENCRYPT_UPDATE or DECRYPT_UPDATE |
|
635 */ |
|
636 #define T4_CIPHER_DO_AES(t4_cipher_do_aes, t4_aes_load_keys_for_encrypt, \ |
|
637 t4_aes_encrypt, t4_aes_load_keys_for_decrypt, t4_aes_decrypt, iv) \ |
|
638 static int \ |
|
639 t4_cipher_do_aes(EVP_CIPHER_CTX *ctx, unsigned char *out, \ |
|
640 const unsigned char *in, size_t inl) \ |
|
641 { \ |
|
642 t4_cipher_ctx_t *tctx = ctx->cipher_data; \ |
|
643 uint64_t *t4_ks = tctx->t4_ks; \ |
|
644 unsigned long outl = inl; \ |
|
645 unsigned char *bufin_alloc = NULL, *bufout_alloc = NULL; \ |
|
646 unsigned char *bufin, *bufout; \ |
|
647 \ |
|
648 /* "in" and "out" must be 8 byte aligned */ \ |
|
649 if (((unsigned long)in & 0x7) == 0) { /* already aligned */ \ |
|
650 bufin = (unsigned char *)in; \ |
|
651 } else { /* "in" is not 8 byte aligned */ \ |
|
652 if (((unsigned long)out & 0x7) == 0) { /* aligned */ \ |
|
653 /* use output buffer for input */ \ |
|
654 bufin = out; \ |
|
655 } else { \ |
|
656 bufin = bufin_alloc = OPENSSL_malloc(inl); \ |
|
657 if (bufin_alloc == NULL) \ |
|
658 return (0); /* error */ \ |
|
659 } \ |
|
660 (void) memcpy(bufin, in, inl); \ |
|
661 } \ |
|
662 \ |
|
663 if (((unsigned long)out & 0x7) == 0) { /* already aligned */ \ |
|
664 bufout = out; \ |
|
665 } else { /* "out" is not 8 byte aligned */ \ |
|
666 if (bufin_alloc != NULL) { \ |
|
667 /* use allocated input buffer for output */ \ |
|
668 bufout = bufin_alloc; \ |
|
669 } else { \ |
|
670 bufout = bufout_alloc = OPENSSL_malloc(outl); \ |
|
671 if (bufout_alloc == NULL) { \ |
|
672 OPENSSL_free(bufin_alloc); \ |
|
673 return (0); /* error */ \ |
|
674 } \ |
|
675 } \ |
|
676 } \ |
|
677 \ |
|
678 /* Data length must be an even multiple of block size. */ \ |
|
679 if ((inl & 0xf) != 0) { \ |
|
680 OPENSSL_free(bufout_alloc); \ |
|
681 OPENSSL_free(bufin_alloc); \ |
|
682 T4err(T4_F_CIPHER_DO_AES, T4_R_NOT_BLOCKSIZE_LENGTH); \ |
|
683 return (0); \ |
|
684 } \ |
|
685 \ |
|
686 if (ctx->encrypt) { \ |
|
687 t4_aes_load_keys_for_encrypt(t4_ks); \ |
|
688 t4_aes_encrypt(t4_ks, (uint64_t *)bufin, \ |
|
689 (uint64_t *)bufout, (size_t)inl, iv); \ |
|
690 } else { /* decrypt */ \ |
|
691 t4_aes_load_keys_for_decrypt(t4_ks); \ |
|
692 t4_aes_decrypt(t4_ks, (uint64_t *)bufin, \ |
|
693 (uint64_t *)bufout, (size_t)inl, iv); \ |
|
694 } \ |
|
695 \ |
|
696 /* Cleanup */ \ |
|
697 if (bufin_alloc != NULL) { \ |
|
698 if (bufout == bufin_alloc) \ |
|
699 (void) memcpy(out, bufout, outl); \ |
|
700 OPENSSL_free(bufin_alloc); \ |
|
701 } \ |
|
702 if (bufout_alloc != NULL) { \ |
|
703 (void) memcpy(out, bufout_alloc, outl); \ |
|
704 OPENSSL_free(bufout_alloc); \ |
|
705 } \ |
|
706 \ |
|
707 return (1); \ |
|
708 } |
|
709 |
|
710 |
|
711 /* AES CBC mode. */ |
|
712 T4_CIPHER_DO_AES(t4_cipher_do_aes_128_cbc, |
|
713 t4_aes128_load_keys_for_encrypt, t4_aes128_cbc_encrypt, |
|
714 t4_aes128_load_keys_for_decrypt, t4_aes128_cbc_decrypt, tctx->iv) |
|
715 T4_CIPHER_DO_AES(t4_cipher_do_aes_192_cbc, |
|
716 t4_aes192_load_keys_for_encrypt, t4_aes192_cbc_encrypt, |
|
717 t4_aes192_load_keys_for_decrypt, t4_aes192_cbc_decrypt, tctx->iv) |
|
718 T4_CIPHER_DO_AES(t4_cipher_do_aes_256_cbc, |
|
719 t4_aes256_load_keys_for_encrypt, t4_aes256_cbc_encrypt, |
|
720 t4_aes256_load_keys_for_decrypt, t4_aes256_cbc_decrypt, tctx->iv) |
|
721 |
|
722 /* |
|
723 * AES CFB128 mode. |
|
724 * CFB128 decrypt uses load_keys_for_encrypt() as the mode uses |
|
725 * the raw AES encrypt operation for the decryption, too. |
|
726 */ |
|
727 #ifndef SOLARIS_NO_AES_CFB128 |
|
728 T4_CIPHER_DO_AES(t4_cipher_do_aes_128_cfb128, |
|
729 t4_aes128_load_keys_for_encrypt, t4_aes128_cfb128_encrypt, |
|
730 t4_aes128_load_keys_for_encrypt, t4_aes128_cfb128_decrypt, tctx->iv) |
|
731 T4_CIPHER_DO_AES(t4_cipher_do_aes_192_cfb128, |
|
732 t4_aes192_load_keys_for_encrypt, t4_aes192_cfb128_encrypt, |
|
733 t4_aes192_load_keys_for_encrypt, t4_aes192_cfb128_decrypt, tctx->iv) |
|
734 T4_CIPHER_DO_AES(t4_cipher_do_aes_256_cfb128, |
|
735 t4_aes256_load_keys_for_encrypt, t4_aes256_cfb128_encrypt, |
|
736 t4_aes256_load_keys_for_encrypt, t4_aes256_cfb128_decrypt, tctx->iv) |
|
737 #endif /* !SOLARIS_NO_AES_CFB128 */ |
|
738 |
|
739 /* AES CTR mode. */ |
|
740 T4_CIPHER_DO_AES(t4_cipher_do_aes_128_ctr, |
|
741 t4_aes128_load_keys_for_encrypt, t4_aes128_ctr_crypt, |
|
742 t4_aes128_load_keys_for_decrypt, t4_aes128_ctr_crypt, tctx->iv) |
|
743 T4_CIPHER_DO_AES(t4_cipher_do_aes_192_ctr, |
|
744 t4_aes192_load_keys_for_encrypt, t4_aes192_ctr_crypt, |
|
745 t4_aes192_load_keys_for_decrypt, t4_aes192_ctr_crypt, tctx->iv) |
|
746 T4_CIPHER_DO_AES(t4_cipher_do_aes_256_ctr, |
|
747 t4_aes256_load_keys_for_encrypt, t4_aes256_ctr_crypt, |
|
748 t4_aes256_load_keys_for_decrypt, t4_aes256_ctr_crypt, tctx->iv) |
|
749 |
|
750 /* AES ECB mode. */ |
|
751 T4_CIPHER_DO_AES(t4_cipher_do_aes_128_ecb, |
|
752 t4_aes128_load_keys_for_encrypt, t4_aes128_ecb_encrypt, |
|
753 t4_aes128_load_keys_for_decrypt, t4_aes128_ecb_decrypt, NULL) |
|
754 T4_CIPHER_DO_AES(t4_cipher_do_aes_192_ecb, |
|
755 t4_aes192_load_keys_for_encrypt, t4_aes192_ecb_encrypt, |
|
756 t4_aes192_load_keys_for_decrypt, t4_aes192_ecb_decrypt, NULL) |
|
757 T4_CIPHER_DO_AES(t4_cipher_do_aes_256_ecb, |
|
758 t4_aes256_load_keys_for_encrypt, t4_aes256_ecb_encrypt, |
|
759 t4_aes256_load_keys_for_decrypt, t4_aes256_ecb_decrypt, NULL) |
|
760 |
|
761 |
|
762 /* |
237 /* |
763 * Is the t4 engine available? |
238 * Is the t4 engine available? |
764 * Passed to ENGINE_set_init_function(). |
239 * Passed to ENGINE_set_init_function(). |
765 */ |
240 */ |
766 /* ARGSUSED */ |
241 /* ARGSUSED */ |