Lines Matching +full:inline +full:- +full:crypto +full:- +full:engine

1 /*-
2 * Copyright (c) 2017-2019 Chelsio Communications, Inc.
38 #include "crypto/t4_crypto.h"
41 * Crypto operations use a key context to store cipher keys and
42 * partial hash digests. They can either be passed inline as part of
43 * a work request using crypto or they can be stored in card RAM. For
44 * the latter case, work requests must replace the inline key context
49 * +-------------------------------+
51 * +-------------------------------+
52 * | AES key | ----- For requests with AES
53 * +-------------------------------+
54 * | Hash state | ----- For hash-only requests
55 * +-------------------------------+ -
56 * | IPAD (16-byte aligned) | \
57 * +-------------------------------+ +---- For requests with HMAC
58 * | OPAD (16-byte aligned) | /
59 * +-------------------------------+ -
60 * | GMAC H | ----- For AES-GCM
61 * +-------------------------------+ -
293 * Generate the initial GMAC hash state for a AES-GCM key.
323 u32[i] = htobe32(auth_ctx->sha1ctx.h.b32[i]); in t4_copy_partial_hash()
328 u32[i] = htobe32(auth_ctx->sha224ctx.state[i]); in t4_copy_partial_hash()
333 u32[i] = htobe32(auth_ctx->sha256ctx.state[i]); in t4_copy_partial_hash()
338 u64[i] = htobe64(auth_ctx->sha384ctx.state[i]); in t4_copy_partial_hash()
343 u64[i] = htobe64(auth_ctx->sha512ctx.state[i]); in t4_copy_partial_hash()
355 t4_copy_partial_hash(axf->type, &auth_ctx, dst); in t4_init_hmac_digest()
360 t4_copy_partial_hash(axf->type, &auth_ctx, dst); in t4_init_hmac_digest()
368 * NB: The crypto engine wants the words in the decryption key in reverse
385 *--dkey = htobe32(ek[4 * 10 + i]); in t4_aes_getdeckey()
389 *--dkey = htobe32(ek[4 * 11 + 2 + i]); in t4_aes_getdeckey()
391 *--dkey = htobe32(ek[4 * 12 + i]); in t4_aes_getdeckey()
395 *--dkey = htobe32(ek[4 * 13 + i]); in t4_aes_getdeckey()
397 *--dkey = htobe32(ek[4 * 14 + i]); in t4_aes_getdeckey()
406 * - keyid management
407 * - request to program key?
415 tls->params.cipher_key_len; in t4_tls_key_info_size()
416 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) { in t4_tls_key_info_size()
419 switch (tls->params.auth_algorithm) { in t4_tls_key_info_size()
440 if (tls->params.tls_vminor == TLS_MINOR_VER_ONE) in t4_tls_proto_ver()
449 switch (tls->params.cipher_algorithm) { in t4_tls_cipher_mode()
462 switch (tls->params.cipher_algorithm) { in t4_tls_auth_mode()
464 switch (tls->params.auth_algorithm) { in t4_tls_auth_mode()
484 switch (tls->params.cipher_algorithm) { in t4_tls_hmac_ctrl()
497 switch (tls->params.cipher_key_len) { in tls_cipher_key_size()
512 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) in tls_mac_key_size()
515 switch (tls->params.auth_algorithm) { in tls_mac_key_size()
538 kctx->u.txhdr.ctxlen = t4_tls_key_info_size(tls) / 16; in t4_tls_key_ctx()
539 kctx->u.txhdr.dualck_to_txvalid = in t4_tls_key_ctx()
544 if (tls->params.cipher_algorithm == CRYPTO_AES_CBC) in t4_tls_key_ctx()
545 kctx->u.txhdr.dualck_to_txvalid |= in t4_tls_key_ctx()
547 kctx->u.txhdr.dualck_to_txvalid = in t4_tls_key_ctx()
548 htobe16(kctx->u.txhdr.dualck_to_txvalid); in t4_tls_key_ctx()
550 kctx->u.rxhdr.flitcnt_hmacctrl = in t4_tls_key_ctx()
554 kctx->u.rxhdr.protover_ciphmode = in t4_tls_key_ctx()
558 kctx->u.rxhdr.authmode_to_rxvalid = in t4_tls_key_ctx()
563 kctx->u.rxhdr.ivpresent_to_rxmk_size = in t4_tls_key_ctx()
568 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) { in t4_tls_key_ctx()
569 kctx->u.rxhdr.ivinsert_to_authinsrt = in t4_tls_key_ctx()
579 kctx->u.rxhdr.authmode_to_rxvalid |= in t4_tls_key_ctx()
581 kctx->u.rxhdr.ivpresent_to_rxmk_size |= in t4_tls_key_ctx()
583 kctx->u.rxhdr.ivinsert_to_authinsrt = in t4_tls_key_ctx()
597 tls->params.cipher_algorithm == CRYPTO_AES_CBC) in t4_tls_key_ctx()
598 t4_aes_getdeckey(kctx->keys.edkey, tls->params.cipher_key, in t4_tls_key_ctx()
599 tls->params.cipher_key_len * 8); in t4_tls_key_ctx()
601 memcpy(kctx->keys.edkey, tls->params.cipher_key, in t4_tls_key_ctx()
602 tls->params.cipher_key_len); in t4_tls_key_ctx()
605 hash = kctx->keys.edkey + tls->params.cipher_key_len; in t4_tls_key_ctx()
606 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) { in t4_tls_key_ctx()
610 memcpy(kctx->u.txhdr.txsalt, tls->params.iv, SALT_SIZE); in t4_tls_key_ctx()
611 t4_init_gmac_hash(tls->params.cipher_key, in t4_tls_key_ctx()
612 tls->params.cipher_key_len, hash); in t4_tls_key_ctx()
614 switch (tls->params.auth_algorithm) { in t4_tls_key_ctx()
630 t4_init_hmac_digest(axf, mac_key_size, tls->params.auth_key, in t4_tls_key_ctx()
631 tls->params.auth_key_len, hash); in t4_tls_key_ctx()
640 if (sc->vres.key.size == 0) in t4_alloc_tls_keyid()
641 return (-1); in t4_alloc_tls_keyid()
643 if (vmem_alloc(sc->key_map, TLS_KEY_CONTEXT_SZ, M_NOWAIT | M_FIRSTFIT, in t4_alloc_tls_keyid()
645 return (-1); in t4_alloc_tls_keyid()
653 vmem_free(sc->key_map, keyid, TLS_KEY_CONTEXT_SZ); in t4_free_tls_keyid()
660 kwr->wr_hi = htobe32(V_FW_WR_OP(FW_ULPTX_WR) | F_FW_WR_ATOMIC | flags); in t4_write_tlskey_wr()
661 kwr->wr_mid = htobe32(V_FW_WR_LEN16(DIV_ROUND_UP(TLS_KEY_WR_SZ, 16)) | in t4_write_tlskey_wr()
663 kwr->protocol = t4_tls_proto_ver(tls); in t4_write_tlskey_wr()
664 kwr->mfs = htobe16(tls->params.max_frame_len); in t4_write_tlskey_wr()
665 kwr->reneg_to_write_rx = V_KEY_GET_LOC(direction == KTLS_TX ? in t4_write_tlskey_wr()
669 kwr->cmd = htobe32(V_ULPTX_CMD(ULP_TX_MEM_WRITE) | in t4_write_tlskey_wr()
671 kwr->dlen = htobe32(V_ULP_MEMIO_DATA_LEN(TLS_KEY_CONTEXT_SZ >> 5)); in t4_write_tlskey_wr()
672 kwr->len16 = htobe32((tid << 8) | in t4_write_tlskey_wr()
673 DIV_ROUND_UP(TLS_KEY_WR_SZ - sizeof(struct work_request_hdr), 16)); in t4_write_tlskey_wr()
674 kwr->kaddr = htobe32(V_ULP_MEMIO_ADDR(keyid >> 5)); in t4_write_tlskey_wr()
677 kwr->sc_more = htobe32(V_ULPTX_CMD(ULP_TX_SC_IMM)); in t4_write_tlskey_wr()
678 kwr->sc_len = htobe32(TLS_KEY_CONTEXT_SZ); in t4_write_tlskey_wr()