Lines Matching refs:iv_len

160 	unsigned int iv_len;  member
627 u_int imm_len, iv_len; in ccr_cipher() local
667 iv_len = AES_BLOCK_LEN; in ccr_cipher()
669 iv_len = s->cipher.iv_len; in ccr_cipher()
671 if (ccr_use_imm_data(transhdr_len, crp->crp_payload_length + iv_len)) { in ccr_cipher()
686 wr_len = roundup2(transhdr_len, 16) + iv_len + in ccr_cipher()
701 memset(iv + s->cipher.iv_len, 0, iv_len - s->cipher.iv_len); in ccr_cipher()
713 crwr->sec_cpl.pldlen = htobe32(iv_len + crp->crp_payload_length); in ccr_cipher()
716 V_CPL_TX_SEC_PDU_CIPHERSTART(iv_len + 1) | in ccr_cipher()
729 V_SCMD_IV_SIZE(iv_len / 2) | in ccr_cipher()
766 memcpy(dst, iv, iv_len); in ccr_cipher()
767 dst += iv_len; in ccr_cipher()
820 u_int hash_size_in_response, imm_len, iopad_size, iv_len; in ccr_eta() local
841 iv_len = AES_BLOCK_LEN; in ccr_eta()
843 iv_len = s->cipher.iv_len; in ccr_eta()
845 if (crp->crp_aad_length + iv_len > MAX_AAD_LEN) in ccr_eta()
865 if (iv_len + crp->crp_aad_length + crp->crp_payload_length + in ccr_eta()
869 if (iv_len + crp->crp_aad_length + crp->crp_payload_length > in ccr_eta()
875 iv_len + crp->crp_aad_length); in ccr_eta()
937 if (ccr_use_imm_data(transhdr_len, iv_len + input_len)) { in ccr_eta()
971 aad_start = iv_len + 1; in ccr_eta()
977 cipher_start = iv_len + crp->crp_aad_length + 1; in ccr_eta()
987 wr_len = roundup2(transhdr_len, 16) + iv_len + roundup2(imm_len, 16) + in ccr_eta()
1002 memset(iv + s->cipher.iv_len, 0, iv_len - s->cipher.iv_len); in ccr_eta()
1014 crwr->sec_cpl.pldlen = htobe32(iv_len + input_len); in ccr_eta()
1037 V_SCMD_IV_SIZE(iv_len / 2) | in ccr_eta()
1077 memcpy(dst, iv, iv_len); in ccr_eta()
1078 dst += iv_len; in ccr_eta()
1123 u_int iv_len, kctx_len, op_type, transhdr_len, wr_len; in ccr_gcm() local
1150 iv_len = AES_BLOCK_LEN; in ccr_gcm()
1168 if (iv_len + crp->crp_aad_length + crp->crp_payload_length + in ccr_gcm()
1172 if (iv_len + crp->crp_aad_length + crp->crp_payload_length > in ccr_gcm()
1177 error = sglist_append_sglist(s->sg_dsgl, sc->sg_iv_aad, 0, iv_len + in ccr_gcm()
1227 if (ccr_use_imm_data(transhdr_len, iv_len + input_len)) { in ccr_gcm()
1260 aad_start = iv_len + 1; in ccr_gcm()
1266 cipher_start = iv_len + crp->crp_aad_length + 1; in ccr_gcm()
1276 wr_len = roundup2(transhdr_len, 16) + iv_len + roundup2(imm_len, 16) + in ccr_gcm()
1301 crwr->sec_cpl.pldlen = htobe32(iv_len + input_len); in ccr_gcm()
1333 V_SCMD_IV_SIZE(iv_len / 2) | in ccr_gcm()
1348 memcpy(dst, iv, iv_len); in ccr_gcm()
1349 dst += iv_len; in ccr_gcm()
1453 u_int iv_len, kctx_len, op_type, transhdr_len, wr_len; in ccr_ccm() local
1497 iv_len = AES_BLOCK_LEN; in ccr_ccm()
1499 if (iv_len + aad_len > MAX_AAD_LEN) in ccr_ccm()
1518 if (iv_len + aad_len + crp->crp_payload_length + in ccr_ccm()
1522 if (iv_len + aad_len + crp->crp_payload_length > in ccr_ccm()
1527 error = sglist_append_sglist(s->sg_dsgl, sc->sg_iv_aad, 0, iv_len + in ccr_ccm()
1577 if (ccr_use_imm_data(transhdr_len, iv_len + input_len)) { in ccr_ccm()
1611 aad_start = iv_len + 1; in ccr_ccm()
1623 wr_len = roundup2(transhdr_len, 16) + iv_len + roundup2(imm_len, 16) + in ccr_ccm()
1639 memset(iv, 0, iv_len); in ccr_ccm()
1653 crwr->sec_cpl.pldlen = htobe32(iv_len + input_len); in ccr_ccm()
1680 V_SCMD_IV_SIZE(iv_len / 2) | in ccr_ccm()
1695 memcpy(dst, iv, iv_len); in ccr_ccm()
1696 dst += iv_len; in ccr_ccm()
2491 s->cipher.iv_len = csp->csp_ivlen; in ccr_newsession()