Print this page
first pass
Split |
Close |
Expand all |
Collapse all |
--- old/usr/src/uts/common/crypto/io/aes.c
+++ new/usr/src/uts/common/crypto/io/aes.c
1 1 /*
2 2 * CDDL HEADER START
3 3 *
4 4 * The contents of this file are subject to the terms of the
5 5 * Common Development and Distribution License (the "License").
6 6 * You may not use this file except in compliance with the License.
7 7 *
8 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 9 * or http://www.opensolaris.org/os/licensing.
10 10 * See the License for the specific language governing permissions
11 11 * and limitations under the License.
12 12 *
13 13 * When distributing Covered Code, include this CDDL HEADER in each
14 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 15 * If applicable, add the following below this CDDL HEADER, with the
16 16 * fields enclosed by brackets "[]" replaced with your own identifying
17 17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 18 *
19 19 * CDDL HEADER END
20 20 */
21 21 /*
22 22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23 23 */
24 24
25 25 /*
26 26 * AES provider for the Kernel Cryptographic Framework (KCF)
27 27 */
28 28
29 29 #include <sys/types.h>
30 30 #include <sys/systm.h>
31 31 #include <sys/modctl.h>
32 32 #include <sys/cmn_err.h>
33 33 #include <sys/ddi.h>
34 34 #include <sys/crypto/common.h>
35 35 #include <sys/crypto/impl.h>
36 36 #include <sys/crypto/spi.h>
37 37 #include <sys/sysmacros.h>
38 38 #include <sys/strsun.h>
39 39 #include <modes/modes.h>
40 40 #define _AES_IMPL
41 41 #include <aes/aes_impl.h>
42 42
43 43 extern struct mod_ops mod_cryptoops;
44 44
45 45 /*
46 46 * Module linkage information for the kernel.
47 47 */
48 48 static struct modlcrypto modlcrypto = {
49 49 &mod_cryptoops,
50 50 "AES Kernel SW Provider"
51 51 };
52 52
53 53 static struct modlinkage modlinkage = {
54 54 MODREV_1,
55 55 (void *)&modlcrypto,
56 56 NULL
57 57 };
58 58
59 59 /*
60 60 * The following definitions are to keep EXPORT_SRC happy.
61 61 */
62 62 #ifndef AES_MIN_KEY_BYTES
63 63 #define AES_MIN_KEY_BYTES 0
64 64 #endif
65 65
66 66 #ifndef AES_MAX_KEY_BYTES
67 67 #define AES_MAX_KEY_BYTES 0
68 68 #endif
69 69
70 70 /*
71 71 * Mechanism info structure passed to KCF during registration.
72 72 */
73 73 static crypto_mech_info_t aes_mech_info_tab[] = {
74 74 /* AES_ECB */
75 75 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
76 76 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
77 77 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
78 78 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
79 79 /* AES_CBC */
80 80 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
81 81 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
82 82 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
83 83 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
84 84 /* AES_CTR */
85 85 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
86 86 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
87 87 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
88 88 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
89 89 /* AES_CCM */
90 90 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
91 91 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
92 92 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
93 93 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
94 94 /* AES_GCM */
95 95 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
96 96 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
97 97 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
98 98 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
99 99 /* AES_GMAC */
100 100 {SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
101 101 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
102 102 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
103 103 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
104 104 CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
105 105 CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
106 106 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
107 107 };
108 108
109 109 /* operations are in-place if the output buffer is NULL */
110 110 #define AES_ARG_INPLACE(input, output) \
111 111 if ((output) == NULL) \
112 112 (output) = (input);
113 113
114 114 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
115 115
116 116 static crypto_control_ops_t aes_control_ops = {
117 117 aes_provider_status
118 118 };
119 119
120 120 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
121 121 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
122 122 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
123 123 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
124 124 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
125 125 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
126 126 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
127 127 crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
128 128 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
129 129 crypto_req_handle_t);
130 130 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
131 131 crypto_req_handle_t);
132 132
133 133 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
134 134 crypto_req_handle_t);
135 135 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
136 136 crypto_data_t *, crypto_req_handle_t);
137 137 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
138 138 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
139 139 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
140 140
141 141 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
142 142 crypto_req_handle_t);
143 143 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
144 144 crypto_data_t *, crypto_req_handle_t);
145 145 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
146 146 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
147 147 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
148 148
149 149 static crypto_cipher_ops_t aes_cipher_ops = {
150 150 aes_encrypt_init,
151 151 aes_encrypt,
152 152 aes_encrypt_update,
153 153 aes_encrypt_final,
154 154 aes_encrypt_atomic,
155 155 aes_decrypt_init,
156 156 aes_decrypt,
157 157 aes_decrypt_update,
158 158 aes_decrypt_final,
159 159 aes_decrypt_atomic
160 160 };
161 161
162 162 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
163 163 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
164 164 crypto_spi_ctx_template_t, crypto_req_handle_t);
165 165 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
166 166 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
167 167 crypto_spi_ctx_template_t, crypto_req_handle_t);
168 168
169 169 static crypto_mac_ops_t aes_mac_ops = {
170 170 NULL,
171 171 NULL,
172 172 NULL,
173 173 NULL,
174 174 aes_mac_atomic,
175 175 aes_mac_verify_atomic
176 176 };
177 177
178 178 static int aes_create_ctx_template(crypto_provider_handle_t,
179 179 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
180 180 size_t *, crypto_req_handle_t);
181 181 static int aes_free_context(crypto_ctx_t *);
182 182
183 183 static crypto_ctx_ops_t aes_ctx_ops = {
184 184 aes_create_ctx_template,
185 185 aes_free_context
186 186 };
187 187
188 188 static crypto_ops_t aes_crypto_ops = {
189 189 &aes_control_ops,
190 190 NULL,
191 191 &aes_cipher_ops,
192 192 &aes_mac_ops,
193 193 NULL,
194 194 NULL,
195 195 NULL,
196 196 NULL,
197 197 NULL,
198 198 NULL,
199 199 NULL,
200 200 NULL,
201 201 NULL,
202 202 &aes_ctx_ops,
203 203 NULL,
204 204 NULL,
205 205 NULL,
206 206 };
207 207
208 208 static crypto_provider_info_t aes_prov_info = {
209 209 CRYPTO_SPI_VERSION_4,
210 210 "AES Software Provider",
211 211 CRYPTO_SW_PROVIDER,
212 212 {&modlinkage},
213 213 NULL,
214 214 &aes_crypto_ops,
215 215 sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
216 216 aes_mech_info_tab
217 217 };
218 218
219 219 static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
220 220 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
221 221
222 222 int
223 223 _init(void)
224 224 {
225 225 int ret;
226 226
227 227 if ((ret = mod_install(&modlinkage)) != 0)
228 228 return (ret);
229 229
230 230 /* Register with KCF. If the registration fails, remove the module. */
231 231 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) {
232 232 (void) mod_remove(&modlinkage);
233 233 return (EACCES);
234 234 }
235 235
236 236 return (0);
237 237 }
238 238
239 239 int
240 240 _fini(void)
241 241 {
242 242 /* Unregister from KCF if module is registered */
243 243 if (aes_prov_handle != NULL) {
244 244 if (crypto_unregister_provider(aes_prov_handle))
245 245 return (EBUSY);
246 246
247 247 aes_prov_handle = NULL;
248 248 }
249 249
250 250 return (mod_remove(&modlinkage));
251 251 }
252 252
253 253 int
254 254 _info(struct modinfo *modinfop)
255 255 {
256 256 return (mod_info(&modlinkage, modinfop));
257 257 }
258 258
259 259
260 260 static int
261 261 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
262 262 {
263 263 void *p = NULL;
264 264 boolean_t param_required = B_TRUE;
265 265 size_t param_len;
266 266 void *(*alloc_fun)(int);
267 267 int rv = CRYPTO_SUCCESS;
268 268
269 269 switch (mechanism->cm_type) {
270 270 case AES_ECB_MECH_INFO_TYPE:
271 271 param_required = B_FALSE;
272 272 alloc_fun = ecb_alloc_ctx;
273 273 break;
274 274 case AES_CBC_MECH_INFO_TYPE:
275 275 param_len = AES_BLOCK_LEN;
276 276 alloc_fun = cbc_alloc_ctx;
277 277 break;
278 278 case AES_CTR_MECH_INFO_TYPE:
279 279 param_len = sizeof (CK_AES_CTR_PARAMS);
280 280 alloc_fun = ctr_alloc_ctx;
281 281 break;
282 282 case AES_CCM_MECH_INFO_TYPE:
283 283 param_len = sizeof (CK_AES_CCM_PARAMS);
284 284 alloc_fun = ccm_alloc_ctx;
285 285 break;
286 286 case AES_GCM_MECH_INFO_TYPE:
287 287 param_len = sizeof (CK_AES_GCM_PARAMS);
288 288 alloc_fun = gcm_alloc_ctx;
289 289 break;
290 290 case AES_GMAC_MECH_INFO_TYPE:
291 291 param_len = sizeof (CK_AES_GMAC_PARAMS);
292 292 alloc_fun = gmac_alloc_ctx;
293 293 break;
294 294 default:
295 295 rv = CRYPTO_MECHANISM_INVALID;
296 296 return (rv);
297 297 }
298 298 if (param_required && mechanism->cm_param != NULL &&
↓ open down ↓ |
298 lines elided |
↑ open up ↑ |
299 299 mechanism->cm_param_len != param_len) {
300 300 rv = CRYPTO_MECHANISM_PARAM_INVALID;
301 301 }
302 302 if (ctx != NULL) {
303 303 p = (alloc_fun)(kmflag);
304 304 *ctx = p;
305 305 }
306 306 return (rv);
307 307 }
308 308
309 -/* EXPORT DELETE START */
310 -
311 309 /*
312 310 * Initialize key schedules for AES
313 311 */
314 312 static int
315 313 init_keysched(crypto_key_t *key, void *newbie)
316 314 {
317 315 /*
318 316 * Only keys by value are supported by this module.
319 317 */
320 318 switch (key->ck_format) {
321 319 case CRYPTO_KEY_RAW:
322 320 if (key->ck_length < AES_MINBITS ||
323 321 key->ck_length > AES_MAXBITS) {
324 322 return (CRYPTO_KEY_SIZE_RANGE);
325 323 }
326 324
327 325 /* key length must be either 128, 192, or 256 */
328 326 if ((key->ck_length & 63) != 0)
↓ open down ↓ |
8 lines elided |
↑ open up ↑ |
329 327 return (CRYPTO_KEY_SIZE_RANGE);
330 328 break;
331 329 default:
332 330 return (CRYPTO_KEY_TYPE_INCONSISTENT);
333 331 }
334 332
335 333 aes_init_keysched(key->ck_data, key->ck_length, newbie);
336 334 return (CRYPTO_SUCCESS);
337 335 }
338 336
339 -/* EXPORT DELETE END */
340 -
341 337 /*
342 338 * KCF software provider control entry points.
343 339 */
344 340 /* ARGSUSED */
345 341 static void
346 342 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
347 343 {
348 344 *status = CRYPTO_PROVIDER_READY;
349 345 }
350 346
351 347 static int
352 348 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
353 349 crypto_key_t *key, crypto_spi_ctx_template_t template,
354 350 crypto_req_handle_t req) {
355 351 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
356 352 }
357 353
358 354 static int
359 355 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
360 356 crypto_key_t *key, crypto_spi_ctx_template_t template,
361 357 crypto_req_handle_t req) {
362 358 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
363 359 }
364 360
↓ open down ↓ |
14 lines elided |
↑ open up ↑ |
365 361
366 362
367 363 /*
368 364 * KCF software provider encrypt entry points.
369 365 */
370 366 static int
371 367 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
372 368 crypto_key_t *key, crypto_spi_ctx_template_t template,
373 369 crypto_req_handle_t req, boolean_t is_encrypt_init)
374 370 {
375 -
376 -/* EXPORT DELETE START */
377 -
378 371 aes_ctx_t *aes_ctx;
379 372 int rv;
380 373 int kmflag;
381 374
382 375 /*
383 376 * Only keys by value are supported by this module.
384 377 */
385 378 if (key->ck_format != CRYPTO_KEY_RAW) {
386 379 return (CRYPTO_KEY_TYPE_INCONSISTENT);
387 380 }
388 381
389 382 kmflag = crypto_kmflag(req);
390 383 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
391 384 != CRYPTO_SUCCESS)
392 385 return (rv);
↓ open down ↓ |
5 lines elided |
↑ open up ↑ |
393 386
394 387 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
395 388 is_encrypt_init);
396 389 if (rv != CRYPTO_SUCCESS) {
397 390 crypto_free_mode_ctx(aes_ctx);
398 391 return (rv);
399 392 }
400 393
401 394 ctx->cc_provider_private = aes_ctx;
402 395
403 -/* EXPORT DELETE END */
404 -
405 396 return (CRYPTO_SUCCESS);
406 397 }
407 398
408 399 static void
409 400 aes_copy_block64(uint8_t *in, uint64_t *out)
410 401 {
411 402 if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
412 403 /* LINTED: pointer alignment */
413 404 out[0] = *(uint64_t *)&in[0];
414 405 /* LINTED: pointer alignment */
415 406 out[1] = *(uint64_t *)&in[8];
416 407 } else {
417 408 uint8_t *iv8 = (uint8_t *)&out[0];
418 409
419 410 AES_COPY_BLOCK(in, iv8);
↓ open down ↓ |
5 lines elided |
↑ open up ↑ |
420 411 }
421 412 }
422 413
423 414
424 415 static int
425 416 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
426 417 crypto_data_t *ciphertext, crypto_req_handle_t req)
427 418 {
428 419 int ret = CRYPTO_FAILED;
429 420
430 -/* EXPORT DELETE START */
431 -
432 421 aes_ctx_t *aes_ctx;
433 422 size_t saved_length, saved_offset, length_needed;
434 423
435 424 ASSERT(ctx->cc_provider_private != NULL);
436 425 aes_ctx = ctx->cc_provider_private;
437 426
438 427 /*
439 428 * For block ciphers, plaintext must be a multiple of AES block size.
440 429 * This test is only valid for ciphers whose blocksize is a power of 2.
441 430 */
442 431 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
443 432 == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
444 433 return (CRYPTO_DATA_LEN_RANGE);
445 434
446 435 AES_ARG_INPLACE(plaintext, ciphertext);
447 436
448 437 /*
449 438 * We need to just return the length needed to store the output.
450 439 * We should not destroy the context for the following case.
451 440 */
452 441 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
453 442 case CCM_MODE:
454 443 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
455 444 break;
456 445 case GCM_MODE:
457 446 length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
458 447 break;
459 448 case GMAC_MODE:
460 449 if (plaintext->cd_length != 0)
461 450 return (CRYPTO_ARGUMENTS_BAD);
462 451
463 452 length_needed = aes_ctx->ac_tag_len;
464 453 break;
465 454 default:
466 455 length_needed = plaintext->cd_length;
467 456 }
468 457
469 458 if (ciphertext->cd_length < length_needed) {
470 459 ciphertext->cd_length = length_needed;
471 460 return (CRYPTO_BUFFER_TOO_SMALL);
472 461 }
473 462
474 463 saved_length = ciphertext->cd_length;
475 464 saved_offset = ciphertext->cd_offset;
476 465
477 466 /*
478 467 * Do an update on the specified input data.
479 468 */
480 469 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
481 470 if (ret != CRYPTO_SUCCESS) {
482 471 return (ret);
483 472 }
484 473
485 474 /*
486 475 * For CCM mode, aes_ccm_encrypt_final() will take care of any
487 476 * left-over unprocessed data, and compute the MAC
488 477 */
489 478 if (aes_ctx->ac_flags & CCM_MODE) {
490 479 /*
491 480 * ccm_encrypt_final() will compute the MAC and append
492 481 * it to existing ciphertext. So, need to adjust the left over
493 482 * length value accordingly
494 483 */
495 484
496 485 /* order of following 2 lines MUST not be reversed */
497 486 ciphertext->cd_offset = ciphertext->cd_length;
498 487 ciphertext->cd_length = saved_length - ciphertext->cd_length;
499 488 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
500 489 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
501 490 if (ret != CRYPTO_SUCCESS) {
502 491 return (ret);
503 492 }
504 493
505 494 if (plaintext != ciphertext) {
506 495 ciphertext->cd_length =
507 496 ciphertext->cd_offset - saved_offset;
508 497 }
509 498 ciphertext->cd_offset = saved_offset;
510 499 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
511 500 /*
512 501 * gcm_encrypt_final() will compute the MAC and append
513 502 * it to existing ciphertext. So, need to adjust the left over
514 503 * length value accordingly
515 504 */
516 505
517 506 /* order of following 2 lines MUST not be reversed */
518 507 ciphertext->cd_offset = ciphertext->cd_length;
519 508 ciphertext->cd_length = saved_length - ciphertext->cd_length;
520 509 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
521 510 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
522 511 aes_xor_block);
523 512 if (ret != CRYPTO_SUCCESS) {
524 513 return (ret);
525 514 }
526 515
↓ open down ↓ |
85 lines elided |
↑ open up ↑ |
527 516 if (plaintext != ciphertext) {
528 517 ciphertext->cd_length =
529 518 ciphertext->cd_offset - saved_offset;
530 519 }
531 520 ciphertext->cd_offset = saved_offset;
532 521 }
533 522
534 523 ASSERT(aes_ctx->ac_remainder_len == 0);
535 524 (void) aes_free_context(ctx);
536 525
537 -/* EXPORT DELETE END */
538 -
539 526 return (ret);
540 527 }
541 528
542 529
543 530 static int
544 531 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
545 532 crypto_data_t *plaintext, crypto_req_handle_t req)
546 533 {
547 534 int ret = CRYPTO_FAILED;
548 535
549 -/* EXPORT DELETE START */
550 -
551 536 aes_ctx_t *aes_ctx;
552 537 off_t saved_offset;
553 538 size_t saved_length, length_needed;
554 539
555 540 ASSERT(ctx->cc_provider_private != NULL);
556 541 aes_ctx = ctx->cc_provider_private;
557 542
558 543 /*
559 544 * For block ciphers, plaintext must be a multiple of AES block size.
560 545 * This test is only valid for ciphers whose blocksize is a power of 2.
561 546 */
562 547 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
563 548 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
564 549 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
565 550 }
566 551
567 552 AES_ARG_INPLACE(ciphertext, plaintext);
568 553
569 554 /*
570 555 * Return length needed to store the output.
571 556 * Do not destroy context when plaintext buffer is too small.
572 557 *
573 558 * CCM: plaintext is MAC len smaller than cipher text
574 559 * GCM: plaintext is TAG len smaller than cipher text
575 560 * GMAC: plaintext length must be zero
576 561 */
577 562 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
578 563 case CCM_MODE:
579 564 length_needed = aes_ctx->ac_processed_data_len;
580 565 break;
581 566 case GCM_MODE:
582 567 length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
583 568 break;
584 569 case GMAC_MODE:
585 570 if (plaintext->cd_length != 0)
586 571 return (CRYPTO_ARGUMENTS_BAD);
587 572
588 573 length_needed = 0;
589 574 break;
590 575 default:
591 576 length_needed = ciphertext->cd_length;
592 577 }
593 578
594 579 if (plaintext->cd_length < length_needed) {
595 580 plaintext->cd_length = length_needed;
596 581 return (CRYPTO_BUFFER_TOO_SMALL);
597 582 }
598 583
599 584 saved_offset = plaintext->cd_offset;
600 585 saved_length = plaintext->cd_length;
601 586
602 587 /*
603 588 * Do an update on the specified input data.
604 589 */
605 590 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
606 591 if (ret != CRYPTO_SUCCESS) {
607 592 goto cleanup;
608 593 }
609 594
610 595 if (aes_ctx->ac_flags & CCM_MODE) {
611 596 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
612 597 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
613 598
614 599 /* order of following 2 lines MUST not be reversed */
615 600 plaintext->cd_offset = plaintext->cd_length;
616 601 plaintext->cd_length = saved_length - plaintext->cd_length;
617 602
618 603 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
619 604 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
620 605 aes_xor_block);
621 606 if (ret == CRYPTO_SUCCESS) {
622 607 if (plaintext != ciphertext) {
623 608 plaintext->cd_length =
624 609 plaintext->cd_offset - saved_offset;
625 610 }
626 611 } else {
627 612 plaintext->cd_length = saved_length;
628 613 }
629 614
630 615 plaintext->cd_offset = saved_offset;
631 616 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
632 617 /* order of following 2 lines MUST not be reversed */
633 618 plaintext->cd_offset = plaintext->cd_length;
634 619 plaintext->cd_length = saved_length - plaintext->cd_length;
635 620
636 621 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
637 622 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
638 623 if (ret == CRYPTO_SUCCESS) {
639 624 if (plaintext != ciphertext) {
640 625 plaintext->cd_length =
641 626 plaintext->cd_offset - saved_offset;
642 627 }
643 628 } else {
644 629 plaintext->cd_length = saved_length;
↓ open down ↓ |
84 lines elided |
↑ open up ↑ |
645 630 }
646 631
647 632 plaintext->cd_offset = saved_offset;
648 633 }
649 634
650 635 ASSERT(aes_ctx->ac_remainder_len == 0);
651 636
652 637 cleanup:
653 638 (void) aes_free_context(ctx);
654 639
655 -/* EXPORT DELETE END */
656 -
657 640 return (ret);
658 641 }
659 642
660 643
661 644 /* ARGSUSED */
662 645 static int
663 646 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
664 647 crypto_data_t *ciphertext, crypto_req_handle_t req)
665 648 {
666 649 off_t saved_offset;
667 650 size_t saved_length, out_len;
668 651 int ret = CRYPTO_SUCCESS;
669 652 aes_ctx_t *aes_ctx;
670 653
671 654 ASSERT(ctx->cc_provider_private != NULL);
672 655 aes_ctx = ctx->cc_provider_private;
673 656
674 657 AES_ARG_INPLACE(plaintext, ciphertext);
675 658
676 659 /* compute number of bytes that will hold the ciphertext */
677 660 out_len = aes_ctx->ac_remainder_len;
678 661 out_len += plaintext->cd_length;
679 662 out_len &= ~(AES_BLOCK_LEN - 1);
680 663
681 664 /* return length needed to store the output */
682 665 if (ciphertext->cd_length < out_len) {
683 666 ciphertext->cd_length = out_len;
684 667 return (CRYPTO_BUFFER_TOO_SMALL);
685 668 }
686 669
687 670 saved_offset = ciphertext->cd_offset;
688 671 saved_length = ciphertext->cd_length;
689 672
690 673 /*
691 674 * Do the AES update on the specified input data.
692 675 */
693 676 switch (plaintext->cd_format) {
694 677 case CRYPTO_DATA_RAW:
695 678 ret = crypto_update_iov(ctx->cc_provider_private,
696 679 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
697 680 aes_copy_block64);
698 681 break;
699 682 case CRYPTO_DATA_UIO:
700 683 ret = crypto_update_uio(ctx->cc_provider_private,
701 684 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
702 685 aes_copy_block64);
703 686 break;
704 687 case CRYPTO_DATA_MBLK:
705 688 ret = crypto_update_mp(ctx->cc_provider_private,
706 689 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
707 690 aes_copy_block64);
708 691 break;
709 692 default:
710 693 ret = CRYPTO_ARGUMENTS_BAD;
711 694 }
712 695
713 696 /*
714 697 * Since AES counter mode is a stream cipher, we call
715 698 * ctr_mode_final() to pick up any remaining bytes.
716 699 * It is an internal function that does not destroy
717 700 * the context like *normal* final routines.
718 701 */
719 702 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
720 703 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
721 704 ciphertext, aes_encrypt_block);
722 705 }
723 706
724 707 if (ret == CRYPTO_SUCCESS) {
725 708 if (plaintext != ciphertext)
726 709 ciphertext->cd_length =
727 710 ciphertext->cd_offset - saved_offset;
728 711 } else {
729 712 ciphertext->cd_length = saved_length;
730 713 }
731 714 ciphertext->cd_offset = saved_offset;
732 715
733 716 return (ret);
734 717 }
735 718
736 719
737 720 static int
738 721 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
739 722 crypto_data_t *plaintext, crypto_req_handle_t req)
740 723 {
741 724 off_t saved_offset;
742 725 size_t saved_length, out_len;
743 726 int ret = CRYPTO_SUCCESS;
744 727 aes_ctx_t *aes_ctx;
745 728
746 729 ASSERT(ctx->cc_provider_private != NULL);
747 730 aes_ctx = ctx->cc_provider_private;
748 731
749 732 AES_ARG_INPLACE(ciphertext, plaintext);
750 733
751 734 /*
752 735 * Compute number of bytes that will hold the plaintext.
753 736 * This is not necessary for CCM, GCM, and GMAC since these
754 737 * mechanisms never return plaintext for update operations.
755 738 */
756 739 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
757 740 out_len = aes_ctx->ac_remainder_len;
758 741 out_len += ciphertext->cd_length;
759 742 out_len &= ~(AES_BLOCK_LEN - 1);
760 743
761 744 /* return length needed to store the output */
762 745 if (plaintext->cd_length < out_len) {
763 746 plaintext->cd_length = out_len;
764 747 return (CRYPTO_BUFFER_TOO_SMALL);
765 748 }
766 749 }
767 750
768 751 saved_offset = plaintext->cd_offset;
769 752 saved_length = plaintext->cd_length;
770 753
771 754 if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
772 755 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
773 756
774 757 /*
775 758 * Do the AES update on the specified input data.
776 759 */
777 760 switch (ciphertext->cd_format) {
778 761 case CRYPTO_DATA_RAW:
779 762 ret = crypto_update_iov(ctx->cc_provider_private,
780 763 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
781 764 aes_copy_block64);
782 765 break;
783 766 case CRYPTO_DATA_UIO:
784 767 ret = crypto_update_uio(ctx->cc_provider_private,
785 768 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
786 769 aes_copy_block64);
787 770 break;
788 771 case CRYPTO_DATA_MBLK:
789 772 ret = crypto_update_mp(ctx->cc_provider_private,
790 773 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
791 774 aes_copy_block64);
792 775 break;
793 776 default:
794 777 ret = CRYPTO_ARGUMENTS_BAD;
795 778 }
796 779
797 780 /*
798 781 * Since AES counter mode is a stream cipher, we call
799 782 * ctr_mode_final() to pick up any remaining bytes.
800 783 * It is an internal function that does not destroy
801 784 * the context like *normal* final routines.
802 785 */
803 786 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
804 787 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
805 788 aes_encrypt_block);
806 789 if (ret == CRYPTO_DATA_LEN_RANGE)
807 790 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
808 791 }
809 792
810 793 if (ret == CRYPTO_SUCCESS) {
811 794 if (ciphertext != plaintext)
812 795 plaintext->cd_length =
813 796 plaintext->cd_offset - saved_offset;
814 797 } else {
815 798 plaintext->cd_length = saved_length;
816 799 }
817 800 plaintext->cd_offset = saved_offset;
↓ open down ↓ |
151 lines elided |
↑ open up ↑ |
818 801
819 802
820 803 return (ret);
821 804 }
822 805
823 806 /* ARGSUSED */
824 807 static int
825 808 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
826 809 crypto_req_handle_t req)
827 810 {
828 -
829 -/* EXPORT DELETE START */
830 -
831 811 aes_ctx_t *aes_ctx;
832 812 int ret;
833 813
834 814 ASSERT(ctx->cc_provider_private != NULL);
835 815 aes_ctx = ctx->cc_provider_private;
836 816
837 817 if (data->cd_format != CRYPTO_DATA_RAW &&
838 818 data->cd_format != CRYPTO_DATA_UIO &&
839 819 data->cd_format != CRYPTO_DATA_MBLK) {
840 820 return (CRYPTO_ARGUMENTS_BAD);
841 821 }
842 822
843 823 if (aes_ctx->ac_flags & CTR_MODE) {
844 824 if (aes_ctx->ac_remainder_len > 0) {
845 825 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
846 826 aes_encrypt_block);
847 827 if (ret != CRYPTO_SUCCESS)
848 828 return (ret);
849 829 }
850 830 } else if (aes_ctx->ac_flags & CCM_MODE) {
851 831 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
852 832 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
853 833 if (ret != CRYPTO_SUCCESS) {
854 834 return (ret);
855 835 }
856 836 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
857 837 size_t saved_offset = data->cd_offset;
858 838
859 839 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
860 840 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
861 841 aes_xor_block);
862 842 if (ret != CRYPTO_SUCCESS) {
863 843 return (ret);
864 844 }
865 845 data->cd_length = data->cd_offset - saved_offset;
866 846 data->cd_offset = saved_offset;
867 847 } else {
868 848 /*
869 849 * There must be no unprocessed plaintext.
870 850 * This happens if the length of the last data is
↓ open down ↓ |
30 lines elided |
↑ open up ↑ |
871 851 * not a multiple of the AES block length.
872 852 */
873 853 if (aes_ctx->ac_remainder_len > 0) {
874 854 return (CRYPTO_DATA_LEN_RANGE);
875 855 }
876 856 data->cd_length = 0;
877 857 }
878 858
879 859 (void) aes_free_context(ctx);
880 860
881 -/* EXPORT DELETE END */
882 -
883 861 return (CRYPTO_SUCCESS);
884 862 }
885 863
886 864 /* ARGSUSED */
887 865 static int
888 866 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
889 867 crypto_req_handle_t req)
890 868 {
891 -
892 -/* EXPORT DELETE START */
893 -
894 869 aes_ctx_t *aes_ctx;
895 870 int ret;
896 871 off_t saved_offset;
897 872 size_t saved_length;
898 873
899 874 ASSERT(ctx->cc_provider_private != NULL);
900 875 aes_ctx = ctx->cc_provider_private;
901 876
902 877 if (data->cd_format != CRYPTO_DATA_RAW &&
903 878 data->cd_format != CRYPTO_DATA_UIO &&
904 879 data->cd_format != CRYPTO_DATA_MBLK) {
905 880 return (CRYPTO_ARGUMENTS_BAD);
906 881 }
907 882
908 883 /*
909 884 * There must be no unprocessed ciphertext.
910 885 * This happens if the length of the last ciphertext is
911 886 * not a multiple of the AES block length.
912 887 */
913 888 if (aes_ctx->ac_remainder_len > 0) {
914 889 if ((aes_ctx->ac_flags & CTR_MODE) == 0)
915 890 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
916 891 else {
917 892 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
918 893 aes_encrypt_block);
919 894 if (ret == CRYPTO_DATA_LEN_RANGE)
920 895 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
921 896 if (ret != CRYPTO_SUCCESS)
922 897 return (ret);
923 898 }
924 899 }
925 900
926 901 if (aes_ctx->ac_flags & CCM_MODE) {
927 902 /*
928 903 * This is where all the plaintext is returned, make sure
929 904 * the plaintext buffer is big enough
930 905 */
931 906 size_t pt_len = aes_ctx->ac_data_len;
932 907 if (data->cd_length < pt_len) {
933 908 data->cd_length = pt_len;
934 909 return (CRYPTO_BUFFER_TOO_SMALL);
935 910 }
936 911
937 912 ASSERT(aes_ctx->ac_processed_data_len == pt_len);
938 913 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
939 914 saved_offset = data->cd_offset;
940 915 saved_length = data->cd_length;
941 916 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
942 917 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
943 918 aes_xor_block);
944 919 if (ret == CRYPTO_SUCCESS) {
945 920 data->cd_length = data->cd_offset - saved_offset;
946 921 } else {
947 922 data->cd_length = saved_length;
948 923 }
949 924
950 925 data->cd_offset = saved_offset;
951 926 if (ret != CRYPTO_SUCCESS) {
952 927 return (ret);
953 928 }
954 929 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
955 930 /*
956 931 * This is where all the plaintext is returned, make sure
957 932 * the plaintext buffer is big enough
958 933 */
959 934 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
960 935 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
961 936
962 937 if (data->cd_length < pt_len) {
963 938 data->cd_length = pt_len;
964 939 return (CRYPTO_BUFFER_TOO_SMALL);
965 940 }
966 941
967 942 saved_offset = data->cd_offset;
968 943 saved_length = data->cd_length;
969 944 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
970 945 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
971 946 if (ret == CRYPTO_SUCCESS) {
972 947 data->cd_length = data->cd_offset - saved_offset;
973 948 } else {
974 949 data->cd_length = saved_length;
975 950 }
976 951
977 952 data->cd_offset = saved_offset;
978 953 if (ret != CRYPTO_SUCCESS) {
979 954 return (ret);
↓ open down ↓ |
76 lines elided |
↑ open up ↑ |
980 955 }
981 956 }
982 957
983 958
984 959 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
985 960 data->cd_length = 0;
986 961 }
987 962
988 963 (void) aes_free_context(ctx);
989 964
990 -/* EXPORT DELETE END */
991 -
992 965 return (CRYPTO_SUCCESS);
993 966 }
994 967
995 968 /* ARGSUSED */
996 969 static int
997 970 aes_encrypt_atomic(crypto_provider_handle_t provider,
998 971 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
999 972 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
1000 973 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1001 974 {
1002 975 aes_ctx_t aes_ctx; /* on the stack */
1003 976 off_t saved_offset;
1004 977 size_t saved_length;
1005 978 size_t length_needed;
1006 979 int ret;
1007 980
1008 981 AES_ARG_INPLACE(plaintext, ciphertext);
1009 982
1010 983 /*
1011 984 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
1012 985 * be a multiple of AES block size.
1013 986 */
1014 987 switch (mechanism->cm_type) {
1015 988 case AES_CTR_MECH_INFO_TYPE:
1016 989 case AES_CCM_MECH_INFO_TYPE:
1017 990 case AES_GCM_MECH_INFO_TYPE:
1018 991 case AES_GMAC_MECH_INFO_TYPE:
1019 992 break;
1020 993 default:
1021 994 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1022 995 return (CRYPTO_DATA_LEN_RANGE);
1023 996 }
1024 997
1025 998 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1026 999 return (ret);
1027 1000
1028 1001 bzero(&aes_ctx, sizeof (aes_ctx_t));
1029 1002
1030 1003 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1031 1004 crypto_kmflag(req), B_TRUE);
1032 1005 if (ret != CRYPTO_SUCCESS)
1033 1006 return (ret);
1034 1007
1035 1008 switch (mechanism->cm_type) {
1036 1009 case AES_CCM_MECH_INFO_TYPE:
1037 1010 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
1038 1011 break;
1039 1012 case AES_GMAC_MECH_INFO_TYPE:
1040 1013 if (plaintext->cd_length != 0)
1041 1014 return (CRYPTO_ARGUMENTS_BAD);
1042 1015 /* FALLTHRU */
1043 1016 case AES_GCM_MECH_INFO_TYPE:
1044 1017 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
1045 1018 break;
1046 1019 default:
1047 1020 length_needed = plaintext->cd_length;
1048 1021 }
1049 1022
1050 1023 /* return size of buffer needed to store output */
1051 1024 if (ciphertext->cd_length < length_needed) {
1052 1025 ciphertext->cd_length = length_needed;
1053 1026 ret = CRYPTO_BUFFER_TOO_SMALL;
1054 1027 goto out;
1055 1028 }
1056 1029
1057 1030 saved_offset = ciphertext->cd_offset;
1058 1031 saved_length = ciphertext->cd_length;
1059 1032
1060 1033 /*
1061 1034 * Do an update on the specified input data.
1062 1035 */
1063 1036 switch (plaintext->cd_format) {
1064 1037 case CRYPTO_DATA_RAW:
1065 1038 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1066 1039 aes_encrypt_contiguous_blocks, aes_copy_block64);
1067 1040 break;
1068 1041 case CRYPTO_DATA_UIO:
1069 1042 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1070 1043 aes_encrypt_contiguous_blocks, aes_copy_block64);
1071 1044 break;
1072 1045 case CRYPTO_DATA_MBLK:
1073 1046 ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1074 1047 aes_encrypt_contiguous_blocks, aes_copy_block64);
1075 1048 break;
1076 1049 default:
1077 1050 ret = CRYPTO_ARGUMENTS_BAD;
1078 1051 }
1079 1052
1080 1053 if (ret == CRYPTO_SUCCESS) {
1081 1054 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1082 1055 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1083 1056 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1084 1057 aes_xor_block);
1085 1058 if (ret != CRYPTO_SUCCESS)
1086 1059 goto out;
1087 1060 ASSERT(aes_ctx.ac_remainder_len == 0);
1088 1061 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1089 1062 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1090 1063 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1091 1064 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1092 1065 aes_copy_block, aes_xor_block);
1093 1066 if (ret != CRYPTO_SUCCESS)
1094 1067 goto out;
1095 1068 ASSERT(aes_ctx.ac_remainder_len == 0);
1096 1069 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1097 1070 if (aes_ctx.ac_remainder_len > 0) {
1098 1071 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1099 1072 ciphertext, aes_encrypt_block);
1100 1073 if (ret != CRYPTO_SUCCESS)
1101 1074 goto out;
1102 1075 }
1103 1076 } else {
1104 1077 ASSERT(aes_ctx.ac_remainder_len == 0);
1105 1078 }
1106 1079
1107 1080 if (plaintext != ciphertext) {
1108 1081 ciphertext->cd_length =
1109 1082 ciphertext->cd_offset - saved_offset;
1110 1083 }
1111 1084 } else {
1112 1085 ciphertext->cd_length = saved_length;
1113 1086 }
1114 1087 ciphertext->cd_offset = saved_offset;
1115 1088
1116 1089 out:
1117 1090 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1118 1091 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1119 1092 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1120 1093 }
1121 1094
1122 1095 return (ret);
1123 1096 }
1124 1097
1125 1098 /* ARGSUSED */
1126 1099 static int
1127 1100 aes_decrypt_atomic(crypto_provider_handle_t provider,
1128 1101 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1129 1102 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1130 1103 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1131 1104 {
1132 1105 aes_ctx_t aes_ctx; /* on the stack */
1133 1106 off_t saved_offset;
1134 1107 size_t saved_length;
1135 1108 size_t length_needed;
1136 1109 int ret;
1137 1110
1138 1111 AES_ARG_INPLACE(ciphertext, plaintext);
1139 1112
1140 1113 /*
1141 1114 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1142 1115 * be a multiple of AES block size.
1143 1116 */
1144 1117 switch (mechanism->cm_type) {
1145 1118 case AES_CTR_MECH_INFO_TYPE:
1146 1119 case AES_CCM_MECH_INFO_TYPE:
1147 1120 case AES_GCM_MECH_INFO_TYPE:
1148 1121 case AES_GMAC_MECH_INFO_TYPE:
1149 1122 break;
1150 1123 default:
1151 1124 if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1152 1125 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1153 1126 }
1154 1127
1155 1128 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1156 1129 return (ret);
1157 1130
1158 1131 bzero(&aes_ctx, sizeof (aes_ctx_t));
1159 1132
1160 1133 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1161 1134 crypto_kmflag(req), B_FALSE);
1162 1135 if (ret != CRYPTO_SUCCESS)
1163 1136 return (ret);
1164 1137
1165 1138 switch (mechanism->cm_type) {
1166 1139 case AES_CCM_MECH_INFO_TYPE:
1167 1140 length_needed = aes_ctx.ac_data_len;
1168 1141 break;
1169 1142 case AES_GCM_MECH_INFO_TYPE:
1170 1143 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1171 1144 break;
1172 1145 case AES_GMAC_MECH_INFO_TYPE:
1173 1146 if (plaintext->cd_length != 0)
1174 1147 return (CRYPTO_ARGUMENTS_BAD);
1175 1148 length_needed = 0;
1176 1149 break;
1177 1150 default:
1178 1151 length_needed = ciphertext->cd_length;
1179 1152 }
1180 1153
1181 1154 /* return size of buffer needed to store output */
1182 1155 if (plaintext->cd_length < length_needed) {
1183 1156 plaintext->cd_length = length_needed;
1184 1157 ret = CRYPTO_BUFFER_TOO_SMALL;
1185 1158 goto out;
1186 1159 }
1187 1160
1188 1161 saved_offset = plaintext->cd_offset;
1189 1162 saved_length = plaintext->cd_length;
1190 1163
1191 1164 if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1192 1165 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1193 1166 gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1194 1167
1195 1168 /*
1196 1169 * Do an update on the specified input data.
1197 1170 */
1198 1171 switch (ciphertext->cd_format) {
1199 1172 case CRYPTO_DATA_RAW:
1200 1173 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1201 1174 aes_decrypt_contiguous_blocks, aes_copy_block64);
1202 1175 break;
1203 1176 case CRYPTO_DATA_UIO:
1204 1177 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1205 1178 aes_decrypt_contiguous_blocks, aes_copy_block64);
1206 1179 break;
1207 1180 case CRYPTO_DATA_MBLK:
1208 1181 ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1209 1182 aes_decrypt_contiguous_blocks, aes_copy_block64);
1210 1183 break;
1211 1184 default:
1212 1185 ret = CRYPTO_ARGUMENTS_BAD;
1213 1186 }
1214 1187
1215 1188 if (ret == CRYPTO_SUCCESS) {
1216 1189 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1217 1190 ASSERT(aes_ctx.ac_processed_data_len
1218 1191 == aes_ctx.ac_data_len);
1219 1192 ASSERT(aes_ctx.ac_processed_mac_len
1220 1193 == aes_ctx.ac_mac_len);
1221 1194 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1222 1195 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1223 1196 aes_copy_block, aes_xor_block);
1224 1197 ASSERT(aes_ctx.ac_remainder_len == 0);
1225 1198 if ((ret == CRYPTO_SUCCESS) &&
1226 1199 (ciphertext != plaintext)) {
1227 1200 plaintext->cd_length =
1228 1201 plaintext->cd_offset - saved_offset;
1229 1202 } else {
1230 1203 plaintext->cd_length = saved_length;
1231 1204 }
1232 1205 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1233 1206 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1234 1207 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1235 1208 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1236 1209 aes_xor_block);
1237 1210 ASSERT(aes_ctx.ac_remainder_len == 0);
1238 1211 if ((ret == CRYPTO_SUCCESS) &&
1239 1212 (ciphertext != plaintext)) {
1240 1213 plaintext->cd_length =
1241 1214 plaintext->cd_offset - saved_offset;
1242 1215 } else {
1243 1216 plaintext->cd_length = saved_length;
1244 1217 }
1245 1218 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1246 1219 ASSERT(aes_ctx.ac_remainder_len == 0);
1247 1220 if (ciphertext != plaintext)
1248 1221 plaintext->cd_length =
1249 1222 plaintext->cd_offset - saved_offset;
1250 1223 } else {
1251 1224 if (aes_ctx.ac_remainder_len > 0) {
1252 1225 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1253 1226 plaintext, aes_encrypt_block);
1254 1227 if (ret == CRYPTO_DATA_LEN_RANGE)
1255 1228 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1256 1229 if (ret != CRYPTO_SUCCESS)
1257 1230 goto out;
1258 1231 }
1259 1232 if (ciphertext != plaintext)
1260 1233 plaintext->cd_length =
1261 1234 plaintext->cd_offset - saved_offset;
1262 1235 }
1263 1236 } else {
1264 1237 plaintext->cd_length = saved_length;
1265 1238 }
1266 1239 plaintext->cd_offset = saved_offset;
1267 1240
1268 1241 out:
1269 1242 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1270 1243 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1271 1244 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1272 1245 }
1273 1246
1274 1247 if (aes_ctx.ac_flags & CCM_MODE) {
1275 1248 if (aes_ctx.ac_pt_buf != NULL) {
1276 1249 kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1277 1250 }
1278 1251 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1279 1252 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1280 1253 kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1281 1254 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1282 1255 }
1283 1256 }
1284 1257
1285 1258 return (ret);
1286 1259 }
↓ open down ↓ |
285 lines elided |
↑ open up ↑ |
1287 1260
1288 1261 /*
1289 1262 * KCF software provider context template entry points.
1290 1263 */
1291 1264 /* ARGSUSED */
1292 1265 static int
1293 1266 aes_create_ctx_template(crypto_provider_handle_t provider,
1294 1267 crypto_mechanism_t *mechanism, crypto_key_t *key,
1295 1268 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1296 1269 {
1297 -
1298 -/* EXPORT DELETE START */
1299 -
1300 1270 void *keysched;
1301 1271 size_t size;
1302 1272 int rv;
1303 1273
1304 1274 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1305 1275 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1306 1276 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1307 1277 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1308 1278 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1309 1279 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1310 1280 return (CRYPTO_MECHANISM_INVALID);
1311 1281
1312 1282 if ((keysched = aes_alloc_keysched(&size,
1313 1283 crypto_kmflag(req))) == NULL) {
1314 1284 return (CRYPTO_HOST_MEMORY);
1315 1285 }
1316 1286
1317 1287 /*
1318 1288 * Initialize key schedule. Key length information is stored
1319 1289 * in the key.
↓ open down ↓ |
10 lines elided |
↑ open up ↑ |
1320 1290 */
1321 1291 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1322 1292 bzero(keysched, size);
1323 1293 kmem_free(keysched, size);
1324 1294 return (rv);
1325 1295 }
1326 1296
1327 1297 *tmpl = keysched;
1328 1298 *tmpl_size = size;
1329 1299
1330 -/* EXPORT DELETE END */
1331 -
1332 1300 return (CRYPTO_SUCCESS);
1333 1301 }
1334 1302
1335 1303
1336 1304 static int
1337 1305 aes_free_context(crypto_ctx_t *ctx)
1338 1306 {
1339 -
1340 -/* EXPORT DELETE START */
1341 -
1342 1307 aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1343 1308
1344 1309 if (aes_ctx != NULL) {
1345 1310 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1346 1311 ASSERT(aes_ctx->ac_keysched_len != 0);
1347 1312 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1348 1313 kmem_free(aes_ctx->ac_keysched,
1349 1314 aes_ctx->ac_keysched_len);
1350 1315 }
1351 1316 crypto_free_mode_ctx(aes_ctx);
1352 1317 ctx->cc_provider_private = NULL;
1353 1318 }
1354 1319
1355 -/* EXPORT DELETE END */
1356 -
1357 1320 return (CRYPTO_SUCCESS);
1358 1321 }
1359 1322
1360 1323
1361 1324 static int
1362 1325 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1363 1326 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1364 1327 boolean_t is_encrypt_init)
1365 1328 {
1366 1329 int rv = CRYPTO_SUCCESS;
1367 -
1368 -/* EXPORT DELETE START */
1369 -
1370 1330 void *keysched;
1371 1331 size_t size;
1372 1332
1373 1333 if (template == NULL) {
1374 1334 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1375 1335 return (CRYPTO_HOST_MEMORY);
1376 1336 /*
1377 1337 * Initialize key schedule.
1378 1338 * Key length is stored in the key.
1379 1339 */
1380 1340 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1381 1341 kmem_free(keysched, size);
1382 1342 return (rv);
1383 1343 }
1384 1344
1385 1345 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1386 1346 aes_ctx->ac_keysched_len = size;
1387 1347 } else {
1388 1348 keysched = template;
1389 1349 }
1390 1350 aes_ctx->ac_keysched = keysched;
1391 1351
1392 1352 switch (mechanism->cm_type) {
1393 1353 case AES_CBC_MECH_INFO_TYPE:
1394 1354 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1395 1355 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1396 1356 break;
1397 1357 case AES_CTR_MECH_INFO_TYPE: {
1398 1358 CK_AES_CTR_PARAMS *pp;
1399 1359
1400 1360 if (mechanism->cm_param == NULL ||
1401 1361 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1402 1362 return (CRYPTO_MECHANISM_PARAM_INVALID);
1403 1363 }
1404 1364 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1405 1365 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1406 1366 pp->cb, aes_copy_block);
1407 1367 break;
1408 1368 }
1409 1369 case AES_CCM_MECH_INFO_TYPE:
1410 1370 if (mechanism->cm_param == NULL ||
1411 1371 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1412 1372 return (CRYPTO_MECHANISM_PARAM_INVALID);
1413 1373 }
1414 1374 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1415 1375 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1416 1376 aes_xor_block);
1417 1377 break;
1418 1378 case AES_GCM_MECH_INFO_TYPE:
1419 1379 if (mechanism->cm_param == NULL ||
1420 1380 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1421 1381 return (CRYPTO_MECHANISM_PARAM_INVALID);
1422 1382 }
1423 1383 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1424 1384 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1425 1385 aes_xor_block);
1426 1386 break;
1427 1387 case AES_GMAC_MECH_INFO_TYPE:
1428 1388 if (mechanism->cm_param == NULL ||
1429 1389 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1430 1390 return (CRYPTO_MECHANISM_PARAM_INVALID);
1431 1391 }
1432 1392 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1433 1393 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1434 1394 aes_xor_block);
1435 1395 break;
1436 1396 case AES_ECB_MECH_INFO_TYPE:
↓ open down ↓ |
57 lines elided |
↑ open up ↑ |
1437 1397 aes_ctx->ac_flags |= ECB_MODE;
1438 1398 }
1439 1399
1440 1400 if (rv != CRYPTO_SUCCESS) {
1441 1401 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1442 1402 bzero(keysched, size);
1443 1403 kmem_free(keysched, size);
1444 1404 }
1445 1405 }
1446 1406
1447 -/* EXPORT DELETE END */
1448 -
1449 1407 return (rv);
1450 1408 }
1451 1409
1452 1410 static int
1453 1411 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1454 1412 CK_AES_GCM_PARAMS *gcm_params)
1455 1413 {
1456 1414 /* LINTED: pointer alignment */
1457 1415 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1458 1416
1459 1417 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1460 1418 return (CRYPTO_MECHANISM_INVALID);
1461 1419
1462 1420 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1463 1421 return (CRYPTO_MECHANISM_PARAM_INVALID);
1464 1422
1465 1423 if (params->pIv == NULL)
1466 1424 return (CRYPTO_MECHANISM_PARAM_INVALID);
1467 1425
1468 1426 gcm_params->pIv = params->pIv;
1469 1427 gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1470 1428 gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1471 1429
1472 1430 if (data == NULL)
1473 1431 return (CRYPTO_SUCCESS);
1474 1432
1475 1433 if (data->cd_format != CRYPTO_DATA_RAW)
1476 1434 return (CRYPTO_ARGUMENTS_BAD);
1477 1435
1478 1436 gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1479 1437 gcm_params->ulAADLen = data->cd_length;
1480 1438 return (CRYPTO_SUCCESS);
1481 1439 }
1482 1440
1483 1441 static int
1484 1442 aes_mac_atomic(crypto_provider_handle_t provider,
1485 1443 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1486 1444 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1487 1445 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1488 1446 {
1489 1447 CK_AES_GCM_PARAMS gcm_params;
1490 1448 crypto_mechanism_t gcm_mech;
1491 1449 int rv;
1492 1450
1493 1451 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1494 1452 != CRYPTO_SUCCESS)
1495 1453 return (rv);
1496 1454
1497 1455 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1498 1456 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1499 1457 gcm_mech.cm_param = (char *)&gcm_params;
1500 1458
1501 1459 return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1502 1460 key, &null_crypto_data, mac, template, req));
1503 1461 }
1504 1462
1505 1463 static int
1506 1464 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1507 1465 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1508 1466 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1509 1467 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1510 1468 {
1511 1469 CK_AES_GCM_PARAMS gcm_params;
1512 1470 crypto_mechanism_t gcm_mech;
1513 1471 int rv;
1514 1472
1515 1473 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1516 1474 != CRYPTO_SUCCESS)
1517 1475 return (rv);
1518 1476
1519 1477 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1520 1478 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1521 1479 gcm_mech.cm_param = (char *)&gcm_params;
1522 1480
1523 1481 return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1524 1482 key, mac, &null_crypto_data, template, req));
1525 1483 }
↓ open down ↓ |
67 lines elided |
↑ open up ↑ |
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX