Lines Matching refs:u_ctx

764 	struct uld_ctx *u_ctx = ULD_CTX(ctx);  in create_wreq()  local
770 qid = u_ctx->lldi.rxq_ids[rxqidx]; in create_wreq()
771 fid = u_ctx->lldi.rxq_ids[0]; in create_wreq()
774 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[portno]); in create_wreq()
802 struct uld_ctx *u_ctx = ULD_CTX(ctx); in create_cipher_wr() local
819 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in create_cipher_wr()
1163 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm)); in chcr_handle_cipher_resp() local
1209 wrparam.qid = u_ctx->lldi.rxq_ids[reqctx->rxqidx]; in chcr_handle_cipher_resp()
1218 skb->dev = u_ctx->lldi.ports[0]; in chcr_handle_cipher_resp()
1369 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm)); in chcr_aes_encrypt() local
1381 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aes_encrypt()
1388 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], in chcr_aes_encrypt()
1392 skb->dev = u_ctx->lldi.ports[0]; in chcr_aes_encrypt()
1411 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm)); in chcr_aes_decrypt() local
1427 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aes_decrypt()
1431 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], in chcr_aes_decrypt()
1435 skb->dev = u_ctx->lldi.ports[0]; in chcr_aes_decrypt()
1442 struct uld_ctx *u_ctx = NULL; in chcr_device_init() local
1447 u_ctx = assign_chcr_device(); in chcr_device_init()
1448 if (!u_ctx) { in chcr_device_init()
1453 ctx->dev = &u_ctx->dev; in chcr_device_init()
1454 ntxq = u_ctx->lldi.ntxq; in chcr_device_init()
1455 rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan; in chcr_device_init()
1456 txq_perchan = ntxq / u_ctx->lldi.nchan; in chcr_device_init()
1458 ctx->nrxq = u_ctx->lldi.nrxq; in chcr_device_init()
1568 struct uld_ctx *u_ctx = ULD_CTX(ctx); in create_hash_wr() local
1579 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in create_hash_wr()
1630 dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr, in create_hash_wr()
1632 if (dma_mapping_error(&u_ctx->lldi.pdev->dev, in create_hash_wr()
1660 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_update() local
1692 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_update()
1700 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_update()
1737 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_update()
1742 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_update()
1765 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_final() local
1819 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_final()
1833 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_finup() local
1851 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_finup()
1858 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_finup()
1912 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_finup()
1917 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_finup()
1931 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_digest() local
1954 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_digest()
1962 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_digest()
2013 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_digest()
2018 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_digest()
2030 struct uld_ctx *u_ctx = ULD_CTX(ctx); in chcr_ahash_continue() local
2081 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_continue()
2097 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(tfm)); in chcr_handle_ahash_resp() local
2110 dma_unmap_single(&u_ctx->lldi.pdev->dev, hctx_wr->dma_addr, in chcr_handle_ahash_resp()
2137 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_handle_ahash_resp()
2375 struct uld_ctx *u_ctx = ULD_CTX(a_ctx(tfm)); in chcr_aead_common_exit() local
2377 chcr_aead_dma_unmap(&u_ctx->lldi.pdev->dev, req, reqctx->op); in chcr_aead_common_exit()
2446 struct uld_ctx *u_ctx = ULD_CTX(ctx); in create_authenc_wr() local
2466 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in create_authenc_wr()
2720 struct uld_ctx *u_ctx = ULD_CTX(ctx); in chcr_add_aead_dst_ent() local
2724 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in chcr_add_aead_dst_ent()
2764 struct uld_ctx *u_ctx = ULD_CTX(ctx); in chcr_add_cipher_dst_ent() local
2768 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in chcr_add_cipher_dst_ent()
2972 struct uld_ctx *u_ctx = ULD_CTX(ctx); in fill_sec_cpl_for_aead() local
2982 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in fill_sec_cpl_for_aead()
3144 struct uld_ctx *u_ctx = ULD_CTX(ctx); in create_gcm_wr() local
3161 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in create_gcm_wr()
3749 struct uld_ctx *u_ctx = ULD_CTX(ctx); in chcr_aead_op() local
3766 if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aead_op()
3781 skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], size); in chcr_aead_op()
3788 skb->dev = u_ctx->lldi.ports[0]; in chcr_aead_op()