1*9207f9d2SChandrakanth patil /* 2*9207f9d2SChandrakanth patil * Copyright (c) 2024, Broadcom. All rights reserved. The term 3*9207f9d2SChandrakanth patil * Broadcom refers to Broadcom Limited and/or its subsidiaries. 4*9207f9d2SChandrakanth patil * 5*9207f9d2SChandrakanth patil * Redistribution and use in source and binary forms, with or without 6*9207f9d2SChandrakanth patil * modification, are permitted provided that the following conditions 7*9207f9d2SChandrakanth patil * are met: 8*9207f9d2SChandrakanth patil * 9*9207f9d2SChandrakanth patil * 1. Redistributions of source code must retain the above copyright 10*9207f9d2SChandrakanth patil * notice, this list of conditions and the following disclaimer. 11*9207f9d2SChandrakanth patil * 2. Redistributions in binary form must reproduce the above copyright 12*9207f9d2SChandrakanth patil * notice, this list of conditions and the following disclaimer in 13*9207f9d2SChandrakanth patil * the documentation and/or other materials provided with the 14*9207f9d2SChandrakanth patil * distribution. 15*9207f9d2SChandrakanth patil * 16*9207f9d2SChandrakanth patil * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' 17*9207f9d2SChandrakanth patil * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 18*9207f9d2SChandrakanth patil * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 19*9207f9d2SChandrakanth patil * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS 20*9207f9d2SChandrakanth patil * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21*9207f9d2SChandrakanth patil * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22*9207f9d2SChandrakanth patil * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 23*9207f9d2SChandrakanth patil * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 24*9207f9d2SChandrakanth patil * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25*9207f9d2SChandrakanth patil * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 26*9207f9d2SChandrakanth patil * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27*9207f9d2SChandrakanth patil * 28*9207f9d2SChandrakanth patil */ 29*9207f9d2SChandrakanth patil 30*9207f9d2SChandrakanth patil #include <sys/mman.h> 31*9207f9d2SChandrakanth patil 32*9207f9d2SChandrakanth patil #include <netinet/in.h> 33*9207f9d2SChandrakanth patil 34*9207f9d2SChandrakanth patil #include <assert.h> 35*9207f9d2SChandrakanth patil #include <errno.h> 36*9207f9d2SChandrakanth patil #include <malloc.h> 37*9207f9d2SChandrakanth patil #include <pthread.h> 38*9207f9d2SChandrakanth patil #include <signal.h> 39*9207f9d2SChandrakanth patil #include <stdio.h> 40*9207f9d2SChandrakanth patil #include <stdlib.h> 41*9207f9d2SChandrakanth patil #include <string.h> 42*9207f9d2SChandrakanth patil #include <unistd.h> 43*9207f9d2SChandrakanth patil 44*9207f9d2SChandrakanth patil #include "main.h" 45*9207f9d2SChandrakanth patil #include "verbs.h" 46*9207f9d2SChandrakanth patil 47*9207f9d2SChandrakanth patil static int ibv_to_bnxt_re_wr_opcd[11] = { 48*9207f9d2SChandrakanth patil BNXT_RE_WR_OPCD_RDMA_WRITE, 49*9207f9d2SChandrakanth patil BNXT_RE_WR_OPCD_RDMA_WRITE_IMM, 50*9207f9d2SChandrakanth patil BNXT_RE_WR_OPCD_SEND, 51*9207f9d2SChandrakanth patil BNXT_RE_WR_OPCD_SEND_IMM, 52*9207f9d2SChandrakanth patil BNXT_RE_WR_OPCD_RDMA_READ, 53*9207f9d2SChandrakanth patil BNXT_RE_WR_OPCD_ATOMIC_CS, 54*9207f9d2SChandrakanth patil BNXT_RE_WR_OPCD_ATOMIC_FA, 55*9207f9d2SChandrakanth patil BNXT_RE_WR_OPCD_INVAL, 56*9207f9d2SChandrakanth patil BNXT_RE_WR_OPCD_INVAL, 57*9207f9d2SChandrakanth patil BNXT_RE_WR_OPCD_INVAL, 58*9207f9d2SChandrakanth patil BNXT_RE_WR_OPCD_INVAL 59*9207f9d2SChandrakanth patil }; 60*9207f9d2SChandrakanth patil 61*9207f9d2SChandrakanth patil static int ibv_wr_to_wc_opcd[11] = { 62*9207f9d2SChandrakanth patil IBV_WC_RDMA_WRITE, 63*9207f9d2SChandrakanth patil IBV_WC_RDMA_WRITE, 64*9207f9d2SChandrakanth patil IBV_WC_SEND, 65*9207f9d2SChandrakanth patil IBV_WC_SEND, 66*9207f9d2SChandrakanth patil IBV_WC_RDMA_READ, 67*9207f9d2SChandrakanth patil IBV_WC_COMP_SWAP, 68*9207f9d2SChandrakanth patil IBV_WC_FETCH_ADD, 69*9207f9d2SChandrakanth patil 0xFF, 70*9207f9d2SChandrakanth patil 0xFF, 71*9207f9d2SChandrakanth patil 0xFF, 72*9207f9d2SChandrakanth patil 0xFF 73*9207f9d2SChandrakanth patil }; 74*9207f9d2SChandrakanth patil 75*9207f9d2SChandrakanth patil static int bnxt_re_req_to_ibv_status [12] = { 76*9207f9d2SChandrakanth patil IBV_WC_SUCCESS, 77*9207f9d2SChandrakanth patil IBV_WC_BAD_RESP_ERR, 78*9207f9d2SChandrakanth patil IBV_WC_LOC_LEN_ERR, 79*9207f9d2SChandrakanth patil IBV_WC_LOC_QP_OP_ERR, 80*9207f9d2SChandrakanth patil IBV_WC_LOC_PROT_ERR, 81*9207f9d2SChandrakanth patil IBV_WC_MW_BIND_ERR, 82*9207f9d2SChandrakanth patil IBV_WC_REM_INV_REQ_ERR, 83*9207f9d2SChandrakanth patil IBV_WC_REM_ACCESS_ERR, 84*9207f9d2SChandrakanth patil IBV_WC_REM_OP_ERR, 85*9207f9d2SChandrakanth patil IBV_WC_RNR_RETRY_EXC_ERR, 86*9207f9d2SChandrakanth patil IBV_WC_RETRY_EXC_ERR, 87*9207f9d2SChandrakanth patil IBV_WC_WR_FLUSH_ERR 88*9207f9d2SChandrakanth patil }; 89*9207f9d2SChandrakanth patil 90*9207f9d2SChandrakanth patil static int bnxt_re_res_to_ibv_status [9] = { 91*9207f9d2SChandrakanth patil IBV_WC_SUCCESS, 92*9207f9d2SChandrakanth patil IBV_WC_LOC_ACCESS_ERR, 93*9207f9d2SChandrakanth patil IBV_WC_LOC_LEN_ERR, 94*9207f9d2SChandrakanth patil IBV_WC_LOC_PROT_ERR, 95*9207f9d2SChandrakanth patil IBV_WC_LOC_QP_OP_ERR, 96*9207f9d2SChandrakanth patil IBV_WC_MW_BIND_ERR, 97*9207f9d2SChandrakanth patil IBV_WC_REM_INV_REQ_ERR, 98*9207f9d2SChandrakanth patil IBV_WC_WR_FLUSH_ERR, 99*9207f9d2SChandrakanth patil IBV_WC_FATAL_ERR 100*9207f9d2SChandrakanth patil }; 101*9207f9d2SChandrakanth patil 102*9207f9d2SChandrakanth patil static int bnxt_re_poll_one(struct bnxt_re_cq *cq, int nwc, struct ibv_wc *wc, 103*9207f9d2SChandrakanth patil uint32_t *resize); 104*9207f9d2SChandrakanth patil 105*9207f9d2SChandrakanth patil int bnxt_single_threaded; 106*9207f9d2SChandrakanth patil int bnxt_dyn_debug; 107*9207f9d2SChandrakanth patil int bnxt_re_query_device(struct ibv_context *ibvctx, 108*9207f9d2SChandrakanth patil struct ibv_device_attr *dev_attr) 109*9207f9d2SChandrakanth patil { 110*9207f9d2SChandrakanth patil struct ibv_query_device cmd = {}; 111*9207f9d2SChandrakanth patil uint8_t fw_ver[8]; 112*9207f9d2SChandrakanth patil int status; 113*9207f9d2SChandrakanth patil 114*9207f9d2SChandrakanth patil memset(dev_attr, 0, sizeof(struct ibv_device_attr)); 115*9207f9d2SChandrakanth patil status = ibv_cmd_query_device(ibvctx, dev_attr, (uint64_t *)&fw_ver, 116*9207f9d2SChandrakanth patil &cmd, sizeof(cmd)); 117*9207f9d2SChandrakanth patil snprintf(dev_attr->fw_ver, 64, "%d.%d.%d.%d", 118*9207f9d2SChandrakanth patil fw_ver[0], fw_ver[1], fw_ver[2], fw_ver[3]); 119*9207f9d2SChandrakanth patil 120*9207f9d2SChandrakanth patil return status; 121*9207f9d2SChandrakanth patil } 122*9207f9d2SChandrakanth patil 123*9207f9d2SChandrakanth patil int bnxt_re_query_device_compat(struct ibv_context *ibvctx, 124*9207f9d2SChandrakanth patil struct ibv_device_attr *dev_attr) 125*9207f9d2SChandrakanth patil 126*9207f9d2SChandrakanth patil { 127*9207f9d2SChandrakanth patil int rc = 0; 128*9207f9d2SChandrakanth patil 129*9207f9d2SChandrakanth patil rc = bnxt_re_query_device(ibvctx, dev_attr); 130*9207f9d2SChandrakanth patil 131*9207f9d2SChandrakanth patil return rc; 132*9207f9d2SChandrakanth patil } 133*9207f9d2SChandrakanth patil 134*9207f9d2SChandrakanth patil int bnxt_re_query_port(struct ibv_context *ibvctx, uint8_t port, 135*9207f9d2SChandrakanth patil struct ibv_port_attr *port_attr) 136*9207f9d2SChandrakanth patil { 137*9207f9d2SChandrakanth patil struct ibv_query_port cmd = {}; 138*9207f9d2SChandrakanth patil 139*9207f9d2SChandrakanth patil return ibv_cmd_query_port(ibvctx, port, port_attr, &cmd, sizeof(cmd)); 140*9207f9d2SChandrakanth patil } 141*9207f9d2SChandrakanth patil 142*9207f9d2SChandrakanth patil static inline bool bnxt_re_is_wcdpi_enabled(struct bnxt_re_context *cntx) 143*9207f9d2SChandrakanth patil { 144*9207f9d2SChandrakanth patil return cntx->comp_mask & BNXT_RE_COMP_MASK_UCNTX_WC_DPI_ENABLED; 145*9207f9d2SChandrakanth patil } 146*9207f9d2SChandrakanth patil 147*9207f9d2SChandrakanth patil static int bnxt_re_map_db_page(struct ibv_context *ibvctx, 148*9207f9d2SChandrakanth patil uint64_t dbr, uint32_t dpi, uint32_t wcdpi) 149*9207f9d2SChandrakanth patil { 150*9207f9d2SChandrakanth patil struct bnxt_re_context *cntx = to_bnxt_re_context(ibvctx); 151*9207f9d2SChandrakanth patil struct bnxt_re_dev *dev = to_bnxt_re_dev(ibvctx->device); 152*9207f9d2SChandrakanth patil 153*9207f9d2SChandrakanth patil cntx->udpi.dpindx = dpi; 154*9207f9d2SChandrakanth patil cntx->udpi.dbpage = mmap(NULL, dev->pg_size, PROT_WRITE, 155*9207f9d2SChandrakanth patil MAP_SHARED, ibvctx->cmd_fd, dbr); 156*9207f9d2SChandrakanth patil if (cntx->udpi.dbpage == MAP_FAILED) 157*9207f9d2SChandrakanth patil return -ENOMEM; 158*9207f9d2SChandrakanth patil if (wcdpi) { 159*9207f9d2SChandrakanth patil cntx->udpi.wcdbpg = mmap(NULL, dev->pg_size, PROT_WRITE, 160*9207f9d2SChandrakanth patil MAP_SHARED, ibvctx->cmd_fd, 161*9207f9d2SChandrakanth patil BNXT_RE_MAP_WC); 162*9207f9d2SChandrakanth patil if (cntx->udpi.wcdbpg == MAP_FAILED) 163*9207f9d2SChandrakanth patil return -ENOMEM; 164*9207f9d2SChandrakanth patil cntx->udpi.wcdpi = wcdpi; 165*9207f9d2SChandrakanth patil } 166*9207f9d2SChandrakanth patil 167*9207f9d2SChandrakanth patil return 0; 168*9207f9d2SChandrakanth patil } 169*9207f9d2SChandrakanth patil 170*9207f9d2SChandrakanth patil struct ibv_pd *bnxt_re_alloc_pd(struct ibv_context *ibvctx) 171*9207f9d2SChandrakanth patil { 172*9207f9d2SChandrakanth patil struct bnxt_re_context *cntx = to_bnxt_re_context(ibvctx); 173*9207f9d2SChandrakanth patil struct bnxt_re_pd_resp resp = {}; 174*9207f9d2SChandrakanth patil struct ibv_alloc_pd cmd = {}; 175*9207f9d2SChandrakanth patil struct bnxt_re_pd *pd; 176*9207f9d2SChandrakanth patil uint64_t dbr_map; 177*9207f9d2SChandrakanth patil 178*9207f9d2SChandrakanth patil pd = calloc(1, sizeof(*pd)); 179*9207f9d2SChandrakanth patil if (!pd) 180*9207f9d2SChandrakanth patil return NULL; 181*9207f9d2SChandrakanth patil 182*9207f9d2SChandrakanth patil if (ibv_cmd_alloc_pd(ibvctx, &pd->ibvpd, &cmd, sizeof(cmd), 183*9207f9d2SChandrakanth patil &resp.resp, sizeof(resp))) 184*9207f9d2SChandrakanth patil goto out; 185*9207f9d2SChandrakanth patil 186*9207f9d2SChandrakanth patil pd->pdid = resp.pdid; 187*9207f9d2SChandrakanth patil /* Map DB page now. */ 188*9207f9d2SChandrakanth patil if (!cntx->udpi.dbpage) { 189*9207f9d2SChandrakanth patil uint32_t wcdpi = 0; 190*9207f9d2SChandrakanth patil 191*9207f9d2SChandrakanth patil if (bnxt_re_is_wcdpi_enabled(cntx) && 192*9207f9d2SChandrakanth patil resp.comp_mask & BNXT_RE_COMP_MASK_PD_HAS_WC_DPI) 193*9207f9d2SChandrakanth patil wcdpi = resp.wcdpi; 194*9207f9d2SChandrakanth patil if (bnxt_re_map_db_page(ibvctx, resp.dbr, resp.dpi, wcdpi)) 195*9207f9d2SChandrakanth patil goto fail; 196*9207f9d2SChandrakanth patil if (cntx->cctx->chip_is_gen_p5_thor2 && cntx->udpi.wcdpi) 197*9207f9d2SChandrakanth patil bnxt_re_init_pbuf_list(cntx); 198*9207f9d2SChandrakanth patil } 199*9207f9d2SChandrakanth patil if (resp.comp_mask & BNXT_RE_COMP_MASK_PD_HAS_DBR_BAR_ADDR) { 200*9207f9d2SChandrakanth patil dbr_map = resp.dbr_bar_map & 0xFFFFFFFFFFFFF000; 201*9207f9d2SChandrakanth patil cntx->bar_map = mmap(NULL, 4096, PROT_READ, 202*9207f9d2SChandrakanth patil MAP_SHARED, ibvctx->cmd_fd, dbr_map); 203*9207f9d2SChandrakanth patil if (cntx->bar_map == MAP_FAILED) 204*9207f9d2SChandrakanth patil goto fail; 205*9207f9d2SChandrakanth patil } 206*9207f9d2SChandrakanth patil 207*9207f9d2SChandrakanth patil return &pd->ibvpd; 208*9207f9d2SChandrakanth patil fail: 209*9207f9d2SChandrakanth patil ibv_cmd_dealloc_pd(&pd->ibvpd); 210*9207f9d2SChandrakanth patil out: 211*9207f9d2SChandrakanth patil free(pd); 212*9207f9d2SChandrakanth patil return NULL; 213*9207f9d2SChandrakanth patil } 214*9207f9d2SChandrakanth patil 215*9207f9d2SChandrakanth patil int bnxt_re_free_pd(struct ibv_pd *ibvpd) 216*9207f9d2SChandrakanth patil { 217*9207f9d2SChandrakanth patil struct bnxt_re_pd *pd = to_bnxt_re_pd(ibvpd); 218*9207f9d2SChandrakanth patil int status; 219*9207f9d2SChandrakanth patil 220*9207f9d2SChandrakanth patil status = ibv_cmd_dealloc_pd(ibvpd); 221*9207f9d2SChandrakanth patil if (status) 222*9207f9d2SChandrakanth patil return status; 223*9207f9d2SChandrakanth patil /* DPI un-mapping will be done during uninit_ucontext */ 224*9207f9d2SChandrakanth patil free(pd); 225*9207f9d2SChandrakanth patil 226*9207f9d2SChandrakanth patil return 0; 227*9207f9d2SChandrakanth patil } 228*9207f9d2SChandrakanth patil 229*9207f9d2SChandrakanth patil struct ibv_mr *get_ibv_mr_from_bnxt_re_mr(struct bnxt_re_mr *mr) 230*9207f9d2SChandrakanth patil { 231*9207f9d2SChandrakanth patil return &mr->vmr; 232*9207f9d2SChandrakanth patil } 233*9207f9d2SChandrakanth patil 234*9207f9d2SChandrakanth patil struct ibv_mr *bnxt_re_reg_mr(struct ibv_pd *ibvpd, void *sva, size_t len, 235*9207f9d2SChandrakanth patil int access) 236*9207f9d2SChandrakanth patil { 237*9207f9d2SChandrakanth patil struct bnxt_re_mr_resp resp = {}; 238*9207f9d2SChandrakanth patil struct ibv_reg_mr cmd = {}; 239*9207f9d2SChandrakanth patil struct bnxt_re_mr *mr; 240*9207f9d2SChandrakanth patil uint64_t hw_va; 241*9207f9d2SChandrakanth patil hw_va = (uint64_t) sva; 242*9207f9d2SChandrakanth patil 243*9207f9d2SChandrakanth patil mr = calloc(1, sizeof(*mr)); 244*9207f9d2SChandrakanth patil if (!mr) 245*9207f9d2SChandrakanth patil return NULL; 246*9207f9d2SChandrakanth patil 247*9207f9d2SChandrakanth patil if (ibv_cmd_reg_mr(ibvpd, sva, len, hw_va, access, &mr->vmr, 248*9207f9d2SChandrakanth patil &cmd, sizeof(cmd), &resp.resp, sizeof(resp))) { 249*9207f9d2SChandrakanth patil free(mr); 250*9207f9d2SChandrakanth patil return NULL; 251*9207f9d2SChandrakanth patil } 252*9207f9d2SChandrakanth patil 253*9207f9d2SChandrakanth patil return get_ibv_mr_from_bnxt_re_mr(mr); 254*9207f9d2SChandrakanth patil } 255*9207f9d2SChandrakanth patil 256*9207f9d2SChandrakanth patil int bnxt_re_dereg_mr(VERBS_MR *ibvmr) 257*9207f9d2SChandrakanth patil { 258*9207f9d2SChandrakanth patil struct bnxt_re_mr *mr = (struct bnxt_re_mr *)ibvmr; 259*9207f9d2SChandrakanth patil int status; 260*9207f9d2SChandrakanth patil 261*9207f9d2SChandrakanth patil status = ibv_cmd_dereg_mr(ibvmr); 262*9207f9d2SChandrakanth patil if (status) 263*9207f9d2SChandrakanth patil return status; 264*9207f9d2SChandrakanth patil free(mr); 265*9207f9d2SChandrakanth patil 266*9207f9d2SChandrakanth patil return 0; 267*9207f9d2SChandrakanth patil } 268*9207f9d2SChandrakanth patil 269*9207f9d2SChandrakanth patil void *bnxt_re_alloc_cqslab(struct bnxt_re_context *cntx, 270*9207f9d2SChandrakanth patil uint32_t ncqe, uint32_t cur) 271*9207f9d2SChandrakanth patil { 272*9207f9d2SChandrakanth patil struct bnxt_re_mem *mem; 273*9207f9d2SChandrakanth patil uint32_t depth, sz; 274*9207f9d2SChandrakanth patil 275*9207f9d2SChandrakanth patil depth = bnxt_re_init_depth(ncqe + 1, cntx->comp_mask); 276*9207f9d2SChandrakanth patil if (depth > cntx->rdev->max_cq_depth + 1) 277*9207f9d2SChandrakanth patil depth = cntx->rdev->max_cq_depth + 1; 278*9207f9d2SChandrakanth patil if (depth == cur) 279*9207f9d2SChandrakanth patil return NULL; 280*9207f9d2SChandrakanth patil sz = get_aligned((depth * cntx->rdev->cqe_size), cntx->rdev->pg_size); 281*9207f9d2SChandrakanth patil mem = bnxt_re_alloc_mem(sz, cntx->rdev->pg_size); 282*9207f9d2SChandrakanth patil if (mem) 283*9207f9d2SChandrakanth patil mem->pad = depth; 284*9207f9d2SChandrakanth patil return mem; 285*9207f9d2SChandrakanth patil } 286*9207f9d2SChandrakanth patil 287*9207f9d2SChandrakanth patil struct ibv_cq *_bnxt_re_create_cq(struct ibv_context *ibvctx, int ncqe, 288*9207f9d2SChandrakanth patil struct ibv_comp_channel *channel, int vec, 289*9207f9d2SChandrakanth patil bool soft_cq) 290*9207f9d2SChandrakanth patil { 291*9207f9d2SChandrakanth patil struct bnxt_re_context *cntx = to_bnxt_re_context(ibvctx); 292*9207f9d2SChandrakanth patil struct bnxt_re_dev *dev = to_bnxt_re_dev(ibvctx->device); 293*9207f9d2SChandrakanth patil struct bnxt_re_cq_resp resp = {}; 294*9207f9d2SChandrakanth patil struct bnxt_re_cq_req cmd = {}; 295*9207f9d2SChandrakanth patil struct bnxt_re_cq *cq; 296*9207f9d2SChandrakanth patil bool has_dpi; 297*9207f9d2SChandrakanth patil 298*9207f9d2SChandrakanth patil if (ncqe > dev->max_cq_depth) 299*9207f9d2SChandrakanth patil return NULL; 300*9207f9d2SChandrakanth patil 301*9207f9d2SChandrakanth patil cq = calloc(1, (sizeof(*cq) + sizeof(struct bnxt_re_queue))); 302*9207f9d2SChandrakanth patil if (!cq) 303*9207f9d2SChandrakanth patil return NULL; 304*9207f9d2SChandrakanth patil cq->cqq = (void *)((char *)cq + sizeof(*cq)); 305*9207f9d2SChandrakanth patil if (!cq->cqq) 306*9207f9d2SChandrakanth patil goto mem; 307*9207f9d2SChandrakanth patil 308*9207f9d2SChandrakanth patil cq->mem = bnxt_re_alloc_cqslab(cntx, ncqe, 0); 309*9207f9d2SChandrakanth patil if (!cq->mem) 310*9207f9d2SChandrakanth patil goto mem; 311*9207f9d2SChandrakanth patil cq->cqq->depth = cq->mem->pad; 312*9207f9d2SChandrakanth patil cq->cqq->stride = dev->cqe_size; 313*9207f9d2SChandrakanth patil /* As an exception no need to call get_ring api we know 314*9207f9d2SChandrakanth patil * this is the only consumer 315*9207f9d2SChandrakanth patil */ 316*9207f9d2SChandrakanth patil cq->cqq->va = cq->mem->va_head; 317*9207f9d2SChandrakanth patil if (!cq->cqq->va) 318*9207f9d2SChandrakanth patil goto fail; 319*9207f9d2SChandrakanth patil 320*9207f9d2SChandrakanth patil cmd.cq_va = (uint64_t)cq->cqq->va; 321*9207f9d2SChandrakanth patil cmd.cq_handle = (uint64_t)cq; 322*9207f9d2SChandrakanth patil if (soft_cq) { 323*9207f9d2SChandrakanth patil cmd.comp_mask |= BNXT_RE_COMP_MASK_CQ_REQ_HAS_CAP_MASK; 324*9207f9d2SChandrakanth patil cmd.cq_capab |= BNXT_RE_COMP_MASK_CQ_REQ_CAP_DBR_RECOVERY; 325*9207f9d2SChandrakanth patil } 326*9207f9d2SChandrakanth patil if (ibv_cmd_create_cq(ibvctx, ncqe, channel, vec, 327*9207f9d2SChandrakanth patil &cq->ibvcq, &cmd.cmd, sizeof(cmd), 328*9207f9d2SChandrakanth patil &resp.resp, sizeof(resp))) 329*9207f9d2SChandrakanth patil goto fail; 330*9207f9d2SChandrakanth patil 331*9207f9d2SChandrakanth patil has_dpi = resp.comp_mask & BNXT_RE_COMP_MASK_CQ_HAS_DB_INFO; 332*9207f9d2SChandrakanth patil if (!cntx->udpi.dbpage && has_dpi) { 333*9207f9d2SChandrakanth patil uint32_t wcdpi = 0; 334*9207f9d2SChandrakanth patil 335*9207f9d2SChandrakanth patil if (bnxt_re_is_wcdpi_enabled(cntx) && 336*9207f9d2SChandrakanth patil resp.comp_mask & BNXT_RE_COMP_MASK_CQ_HAS_WC_DPI) 337*9207f9d2SChandrakanth patil wcdpi = resp.wcdpi; 338*9207f9d2SChandrakanth patil if (bnxt_re_map_db_page(ibvctx, resp.dbr, resp.dpi, wcdpi)) 339*9207f9d2SChandrakanth patil goto fail; 340*9207f9d2SChandrakanth patil if (cntx->cctx->chip_is_gen_p5_thor2 && cntx->udpi.wcdpi) 341*9207f9d2SChandrakanth patil bnxt_re_init_pbuf_list(cntx); 342*9207f9d2SChandrakanth patil } 343*9207f9d2SChandrakanth patil 344*9207f9d2SChandrakanth patil if (resp.comp_mask & BNXT_RE_COMP_MASK_CQ_HAS_CQ_PAGE) { 345*9207f9d2SChandrakanth patil cq->cq_page = mmap(NULL, dev->pg_size, PROT_WRITE, MAP_SHARED, 346*9207f9d2SChandrakanth patil ibvctx->cmd_fd, resp.cq_page); 347*9207f9d2SChandrakanth patil if (!cq->cq_page) 348*9207f9d2SChandrakanth patil fprintf(stderr, DEV "Valid cq_page not mapped\n"); 349*9207f9d2SChandrakanth patil } 350*9207f9d2SChandrakanth patil 351*9207f9d2SChandrakanth patil cq->cqid = resp.cqid; 352*9207f9d2SChandrakanth patil cq->phase = resp.phase; 353*9207f9d2SChandrakanth patil cq->cqq->tail = resp.tail; 354*9207f9d2SChandrakanth patil cq->udpi = &cntx->udpi; 355*9207f9d2SChandrakanth patil cq->first_arm = true; 356*9207f9d2SChandrakanth patil cq->cntx = cntx; 357*9207f9d2SChandrakanth patil cq->rand.seed = cq->cqid; 358*9207f9d2SChandrakanth patil cq->shadow_db_key = BNXT_RE_DB_KEY_INVALID; 359*9207f9d2SChandrakanth patil bnxt_re_dp_spin_init(&cq->cqq->qlock, PTHREAD_PROCESS_PRIVATE, !bnxt_single_threaded); 360*9207f9d2SChandrakanth patil INIT_DBLY_LIST_HEAD(&cq->sfhead); 361*9207f9d2SChandrakanth patil INIT_DBLY_LIST_HEAD(&cq->rfhead); 362*9207f9d2SChandrakanth patil INIT_DBLY_LIST_HEAD(&cq->prev_cq_head); 363*9207f9d2SChandrakanth patil if (_is_db_drop_recovery_enable(cntx) && !soft_cq) { 364*9207f9d2SChandrakanth patil INIT_DBLY_LIST_NODE(&cq->dbnode); 365*9207f9d2SChandrakanth patil pthread_spin_lock(&cntx->cq_dbr_res.lock); 366*9207f9d2SChandrakanth patil bnxt_re_list_add_node(&cq->dbnode, &cntx->cq_dbr_res.head); 367*9207f9d2SChandrakanth patil pthread_spin_unlock(&cntx->cq_dbr_res.lock); 368*9207f9d2SChandrakanth patil } 369*9207f9d2SChandrakanth patil 370*9207f9d2SChandrakanth patil return &cq->ibvcq; 371*9207f9d2SChandrakanth patil fail: 372*9207f9d2SChandrakanth patil bnxt_re_free_mem(cq->mem); 373*9207f9d2SChandrakanth patil mem: 374*9207f9d2SChandrakanth patil free(cq); 375*9207f9d2SChandrakanth patil return NULL; 376*9207f9d2SChandrakanth patil } 377*9207f9d2SChandrakanth patil 378*9207f9d2SChandrakanth patil struct ibv_cq *bnxt_re_create_cq(struct ibv_context *ibvctx, int ncqe, 379*9207f9d2SChandrakanth patil struct ibv_comp_channel *channel, int vec) 380*9207f9d2SChandrakanth patil { 381*9207f9d2SChandrakanth patil struct bnxt_re_context *cntx = to_bnxt_re_context(ibvctx); 382*9207f9d2SChandrakanth patil struct bnxt_re_dev *dev = to_bnxt_re_dev(ibvctx->device); 383*9207f9d2SChandrakanth patil sigset_t block_sig_set, old_sig_set; 384*9207f9d2SChandrakanth patil int ret; 385*9207f9d2SChandrakanth patil 386*9207f9d2SChandrakanth patil if (_is_db_drop_recovery_enable(cntx) && !cntx->dbr_cq) { 387*9207f9d2SChandrakanth patil cntx->dbr_ev_chan = 388*9207f9d2SChandrakanth patil ibv_create_comp_channel(ibvctx); 389*9207f9d2SChandrakanth patil if (!cntx->dbr_ev_chan) { 390*9207f9d2SChandrakanth patil fprintf(stderr, 391*9207f9d2SChandrakanth patil DEV "Failed to create completion channel\n"); 392*9207f9d2SChandrakanth patil goto free; 393*9207f9d2SChandrakanth patil } 394*9207f9d2SChandrakanth patil cntx->dbr_cq = _bnxt_re_create_cq(ibvctx, 1, cntx->dbr_ev_chan, vec, 1); 395*9207f9d2SChandrakanth patil if (!cntx->dbr_cq) { 396*9207f9d2SChandrakanth patil fprintf(stderr, DEV "Couldn't create CQ\n"); 397*9207f9d2SChandrakanth patil goto free; 398*9207f9d2SChandrakanth patil } 399*9207f9d2SChandrakanth patil cntx->db_recovery_page = mmap(NULL, dev->pg_size, PROT_READ | 400*9207f9d2SChandrakanth patil PROT_WRITE, MAP_SHARED, 401*9207f9d2SChandrakanth patil ibvctx->cmd_fd, BNXT_RE_DB_RECOVERY_PAGE); 402*9207f9d2SChandrakanth patil if (cntx->db_recovery_page == MAP_FAILED) { 403*9207f9d2SChandrakanth patil fprintf(stderr, DEV "Couldn't map DB recovery page\n"); 404*9207f9d2SChandrakanth patil goto free; 405*9207f9d2SChandrakanth patil } 406*9207f9d2SChandrakanth patil /* Create pthread to handle the doorbell drop events. This thread is 407*9207f9d2SChandrakanth patil * not going to handle any signals. Before creation block all the 408*9207f9d2SChandrakanth patil * signals, and after creation restore the old signal mask. 409*9207f9d2SChandrakanth patil */ 410*9207f9d2SChandrakanth patil sigfillset(&block_sig_set); 411*9207f9d2SChandrakanth patil pthread_sigmask(SIG_BLOCK, &block_sig_set, &old_sig_set); 412*9207f9d2SChandrakanth patil ret = pthread_create(&cntx->dbr_thread, NULL, bnxt_re_dbr_thread, cntx); 413*9207f9d2SChandrakanth patil if (ret) { 414*9207f9d2SChandrakanth patil fprintf(stderr, DEV "Couldn't create pthread\n"); 415*9207f9d2SChandrakanth patil pthread_sigmask(SIG_SETMASK, &old_sig_set, NULL); 416*9207f9d2SChandrakanth patil goto free; 417*9207f9d2SChandrakanth patil } 418*9207f9d2SChandrakanth patil pthread_sigmask(SIG_SETMASK, &old_sig_set, NULL); 419*9207f9d2SChandrakanth patil INIT_DBLY_LIST_HEAD(&cntx->qp_dbr_res.head); 420*9207f9d2SChandrakanth patil pthread_spin_init(&cntx->qp_dbr_res.lock, PTHREAD_PROCESS_PRIVATE); 421*9207f9d2SChandrakanth patil INIT_DBLY_LIST_HEAD(&cntx->cq_dbr_res.head); 422*9207f9d2SChandrakanth patil pthread_spin_init(&cntx->cq_dbr_res.lock, PTHREAD_PROCESS_PRIVATE); 423*9207f9d2SChandrakanth patil INIT_DBLY_LIST_HEAD(&cntx->srq_dbr_res.head); 424*9207f9d2SChandrakanth patil pthread_spin_init(&cntx->srq_dbr_res.lock, PTHREAD_PROCESS_PRIVATE); 425*9207f9d2SChandrakanth patil } 426*9207f9d2SChandrakanth patil return(_bnxt_re_create_cq(ibvctx, ncqe, channel, vec, 0)); 427*9207f9d2SChandrakanth patil free: 428*9207f9d2SChandrakanth patil if (cntx->dbr_ev_chan) { 429*9207f9d2SChandrakanth patil ret = ibv_destroy_comp_channel(cntx->dbr_ev_chan); 430*9207f9d2SChandrakanth patil if (ret) 431*9207f9d2SChandrakanth patil fprintf(stderr, DEV "ibv_destroy_comp_channel error\n"); 432*9207f9d2SChandrakanth patil } 433*9207f9d2SChandrakanth patil 434*9207f9d2SChandrakanth patil if (cntx->dbr_cq) { 435*9207f9d2SChandrakanth patil if (cntx->db_recovery_page) 436*9207f9d2SChandrakanth patil munmap(cntx->db_recovery_page, dev->pg_size); 437*9207f9d2SChandrakanth patil ret = ibv_destroy_cq(cntx->dbr_cq); 438*9207f9d2SChandrakanth patil if (ret) 439*9207f9d2SChandrakanth patil fprintf(stderr, DEV "ibv_destroy_cq error\n"); 440*9207f9d2SChandrakanth patil } 441*9207f9d2SChandrakanth patil return NULL; 442*9207f9d2SChandrakanth patil } 443*9207f9d2SChandrakanth patil 444*9207f9d2SChandrakanth patil int bnxt_re_poll_kernel_cq(struct bnxt_re_cq *cq) 445*9207f9d2SChandrakanth patil { 446*9207f9d2SChandrakanth patil struct ibv_wc tmp_wc; 447*9207f9d2SChandrakanth patil int rc; 448*9207f9d2SChandrakanth patil 449*9207f9d2SChandrakanth patil rc = ibv_cmd_poll_cq(&cq->ibvcq, 1, &tmp_wc); 450*9207f9d2SChandrakanth patil if (unlikely(rc)) 451*9207f9d2SChandrakanth patil fprintf(stderr, "ibv_cmd_poll_cq failed: %d\n", rc); 452*9207f9d2SChandrakanth patil return rc; 453*9207f9d2SChandrakanth patil } 454*9207f9d2SChandrakanth patil 455*9207f9d2SChandrakanth patil #define BNXT_RE_QUEUE_START_PHASE 0x01 456*9207f9d2SChandrakanth patil 457*9207f9d2SChandrakanth patil /* 458*9207f9d2SChandrakanth patil * Function to complete the last steps in CQ resize. Invoke poll function 459*9207f9d2SChandrakanth patil * in the kernel driver; this serves as a signal to the driver to complete CQ 460*9207f9d2SChandrakanth patil * resize steps required. Free memory mapped for the original CQ and switch 461*9207f9d2SChandrakanth patil * over to the memory mapped for CQ with the new size. Finally Ack the Cutoff 462*9207f9d2SChandrakanth patil * CQE. This function must be called under cq->cqq.lock. 463*9207f9d2SChandrakanth patil */ 464*9207f9d2SChandrakanth patil void bnxt_re_resize_cq_complete(struct bnxt_re_cq *cq) 465*9207f9d2SChandrakanth patil { 466*9207f9d2SChandrakanth patil struct bnxt_re_context *cntx = to_bnxt_re_context(cq->ibvcq.context); 467*9207f9d2SChandrakanth patil 468*9207f9d2SChandrakanth patil bnxt_re_poll_kernel_cq(cq); 469*9207f9d2SChandrakanth patil bnxt_re_free_mem(cq->mem); 470*9207f9d2SChandrakanth patil 471*9207f9d2SChandrakanth patil cq->mem = cq->resize_mem; 472*9207f9d2SChandrakanth patil cq->resize_mem = NULL; 473*9207f9d2SChandrakanth patil /* As an exception no need to call get_ring api we know 474*9207f9d2SChandrakanth patil * this is the only consumer 475*9207f9d2SChandrakanth patil */ 476*9207f9d2SChandrakanth patil cq->cqq->va = cq->mem->va_head; 477*9207f9d2SChandrakanth patil /* 478*9207f9d2SChandrakanth patil * We don't want to memcpy() the entire cqq structure below; otherwise 479*9207f9d2SChandrakanth patil * we'd end up overwriting cq->cqq.lock that is held by the caller. 480*9207f9d2SChandrakanth patil * So we copy the members piecemeal. cqq->head, cqq->tail implicitly 481*9207f9d2SChandrakanth patil * set to 0 before cutoff_ack DB. 482*9207f9d2SChandrakanth patil */ 483*9207f9d2SChandrakanth patil cq->cqq->depth = cq->mem->pad; 484*9207f9d2SChandrakanth patil cq->cqq->stride = cntx->rdev->cqe_size; 485*9207f9d2SChandrakanth patil cq->cqq->head = 0; 486*9207f9d2SChandrakanth patil cq->cqq->tail = 0; 487*9207f9d2SChandrakanth patil cq->phase = BNXT_RE_QUEUE_START_PHASE; 488*9207f9d2SChandrakanth patil /* Reset epoch portion of the flags */ 489*9207f9d2SChandrakanth patil cq->cqq->flags &= ~(BNXT_RE_FLAG_EPOCH_TAIL_MASK | 490*9207f9d2SChandrakanth patil BNXT_RE_FLAG_EPOCH_HEAD_MASK); 491*9207f9d2SChandrakanth patil bnxt_re_ring_cq_arm_db(cq, BNXT_RE_QUE_TYPE_CQ_CUT_ACK); 492*9207f9d2SChandrakanth patil } 493*9207f9d2SChandrakanth patil 494*9207f9d2SChandrakanth patil int bnxt_re_resize_cq(struct ibv_cq *ibvcq, int ncqe) 495*9207f9d2SChandrakanth patil { 496*9207f9d2SChandrakanth patil struct bnxt_re_context *cntx = to_bnxt_re_context(ibvcq->context); 497*9207f9d2SChandrakanth patil struct bnxt_re_dev *dev = to_bnxt_re_dev(ibvcq->context->device); 498*9207f9d2SChandrakanth patil struct bnxt_re_cq *cq = to_bnxt_re_cq(ibvcq); 499*9207f9d2SChandrakanth patil struct bnxt_re_resize_cq_req req = {}; 500*9207f9d2SChandrakanth patil uint32_t exit_cnt = 20; 501*9207f9d2SChandrakanth patil 502*9207f9d2SChandrakanth patil struct ibv_resize_cq_resp resp = {}; 503*9207f9d2SChandrakanth patil int rc = 0; 504*9207f9d2SChandrakanth patil 505*9207f9d2SChandrakanth patil if (ncqe > dev->max_cq_depth) 506*9207f9d2SChandrakanth patil return -EINVAL; 507*9207f9d2SChandrakanth patil 508*9207f9d2SChandrakanth patil bnxt_re_dp_spin_lock(&cq->cqq->qlock); 509*9207f9d2SChandrakanth patil cq->resize_mem = bnxt_re_alloc_cqslab(cntx, ncqe, cq->cqq->depth); 510*9207f9d2SChandrakanth patil if (unlikely(!cq->resize_mem)) { 511*9207f9d2SChandrakanth patil rc = -ENOMEM; 512*9207f9d2SChandrakanth patil goto done; 513*9207f9d2SChandrakanth patil } 514*9207f9d2SChandrakanth patil /* As an exception no need to call get_ring api we know 515*9207f9d2SChandrakanth patil * this is the only consumer 516*9207f9d2SChandrakanth patil */ 517*9207f9d2SChandrakanth patil req.cq_va = (uint64_t)cq->resize_mem->va_head; 518*9207f9d2SChandrakanth patil rc = ibv_cmd_resize_cq(ibvcq, ncqe, &req.cmd, 519*9207f9d2SChandrakanth patil sizeof(req), &resp, sizeof(resp)); 520*9207f9d2SChandrakanth patil if (unlikely(rc)) { 521*9207f9d2SChandrakanth patil bnxt_re_free_mem(cq->resize_mem); 522*9207f9d2SChandrakanth patil goto done; 523*9207f9d2SChandrakanth patil } 524*9207f9d2SChandrakanth patil 525*9207f9d2SChandrakanth patil while(true) { 526*9207f9d2SChandrakanth patil struct ibv_wc tmp_wc = {0}; 527*9207f9d2SChandrakanth patil uint32_t resize = 0; 528*9207f9d2SChandrakanth patil int dqed = 0; 529*9207f9d2SChandrakanth patil 530*9207f9d2SChandrakanth patil struct bnxt_re_work_compl *compl = NULL; 531*9207f9d2SChandrakanth patil dqed = bnxt_re_poll_one(cq, 1, &tmp_wc, &resize); 532*9207f9d2SChandrakanth patil if (resize) { 533*9207f9d2SChandrakanth patil break; 534*9207f9d2SChandrakanth patil } 535*9207f9d2SChandrakanth patil if (dqed) { 536*9207f9d2SChandrakanth patil compl = calloc(1, sizeof(*compl)); 537*9207f9d2SChandrakanth patil if (unlikely(!compl)) { 538*9207f9d2SChandrakanth patil fprintf(stderr, "%s: No Memory.. Continue\n", __func__); 539*9207f9d2SChandrakanth patil break; 540*9207f9d2SChandrakanth patil } 541*9207f9d2SChandrakanth patil memcpy(&compl->wc, &tmp_wc, sizeof(tmp_wc)); 542*9207f9d2SChandrakanth patil bnxt_re_list_add_node(&compl->cnode, &cq->prev_cq_head); 543*9207f9d2SChandrakanth patil compl = NULL; 544*9207f9d2SChandrakanth patil memset(&tmp_wc, 0, sizeof(tmp_wc)); 545*9207f9d2SChandrakanth patil } else { 546*9207f9d2SChandrakanth patil exit_cnt--; 547*9207f9d2SChandrakanth patil if (unlikely(!exit_cnt)) { 548*9207f9d2SChandrakanth patil rc = -EIO; 549*9207f9d2SChandrakanth patil break; 550*9207f9d2SChandrakanth patil } else { 551*9207f9d2SChandrakanth patil /* wait for 100 milli seconds */ 552*9207f9d2SChandrakanth patil bnxt_re_sub_sec_busy_wait(100 * 1000000); 553*9207f9d2SChandrakanth patil } 554*9207f9d2SChandrakanth patil } 555*9207f9d2SChandrakanth patil } 556*9207f9d2SChandrakanth patil done: 557*9207f9d2SChandrakanth patil bnxt_re_dp_spin_unlock(&cq->cqq->qlock); 558*9207f9d2SChandrakanth patil return rc; 559*9207f9d2SChandrakanth patil } 560*9207f9d2SChandrakanth patil 561*9207f9d2SChandrakanth patil static void bnxt_re_destroy_resize_cq_list(struct bnxt_re_cq *cq) 562*9207f9d2SChandrakanth patil { 563*9207f9d2SChandrakanth patil struct bnxt_re_list_node *cur, *tmp; 564*9207f9d2SChandrakanth patil struct bnxt_re_work_compl *compl; 565*9207f9d2SChandrakanth patil 566*9207f9d2SChandrakanth patil if (bnxt_re_list_empty(&cq->prev_cq_head)) 567*9207f9d2SChandrakanth patil return; 568*9207f9d2SChandrakanth patil 569*9207f9d2SChandrakanth patil list_for_each_node_safe(cur, tmp, &cq->prev_cq_head) { 570*9207f9d2SChandrakanth patil compl = list_node(cur, struct bnxt_re_work_compl, cnode); 571*9207f9d2SChandrakanth patil bnxt_re_list_del_node(&compl->cnode, &cq->prev_cq_head); 572*9207f9d2SChandrakanth patil free(compl); 573*9207f9d2SChandrakanth patil } 574*9207f9d2SChandrakanth patil 575*9207f9d2SChandrakanth patil } 576*9207f9d2SChandrakanth patil 577*9207f9d2SChandrakanth patil int bnxt_re_destroy_cq(struct ibv_cq *ibvcq) 578*9207f9d2SChandrakanth patil { 579*9207f9d2SChandrakanth patil struct bnxt_re_cq *cq = to_bnxt_re_cq(ibvcq); 580*9207f9d2SChandrakanth patil int status; 581*9207f9d2SChandrakanth patil 582*9207f9d2SChandrakanth patil if (_is_db_drop_recovery_enable(cq->cntx) && 583*9207f9d2SChandrakanth patil ibvcq != cq->cntx->dbr_cq) { 584*9207f9d2SChandrakanth patil pthread_spin_lock(&cq->cntx->cq_dbr_res.lock); 585*9207f9d2SChandrakanth patil bnxt_re_list_del_node(&cq->dbnode, 586*9207f9d2SChandrakanth patil &cq->cntx->cq_dbr_res.head); 587*9207f9d2SChandrakanth patil pthread_spin_unlock(&cq->cntx->cq_dbr_res.lock); 588*9207f9d2SChandrakanth patil } 589*9207f9d2SChandrakanth patil status = ibv_cmd_destroy_cq(ibvcq); 590*9207f9d2SChandrakanth patil if (status) { 591*9207f9d2SChandrakanth patil if (_is_db_drop_recovery_enable(cq->cntx) && 592*9207f9d2SChandrakanth patil ibvcq != cq->cntx->dbr_cq) { 593*9207f9d2SChandrakanth patil pthread_spin_lock(&cq->cntx->cq_dbr_res.lock); 594*9207f9d2SChandrakanth patil bnxt_re_list_add_node(&cq->dbnode, 595*9207f9d2SChandrakanth patil &cq->cntx->cq_dbr_res.head); 596*9207f9d2SChandrakanth patil pthread_spin_unlock(&cq->cntx->cq_dbr_res.lock); 597*9207f9d2SChandrakanth patil } 598*9207f9d2SChandrakanth patil return status; 599*9207f9d2SChandrakanth patil } 600*9207f9d2SChandrakanth patil bnxt_re_destroy_resize_cq_list(cq); 601*9207f9d2SChandrakanth patil bnxt_re_free_mem(cq->mem); 602*9207f9d2SChandrakanth patil free(cq); 603*9207f9d2SChandrakanth patil return 0; 604*9207f9d2SChandrakanth patil } 605*9207f9d2SChandrakanth patil 606*9207f9d2SChandrakanth patil static uint8_t bnxt_re_poll_err_scqe(struct bnxt_re_qp *qp, 607*9207f9d2SChandrakanth patil struct ibv_wc *ibvwc, 608*9207f9d2SChandrakanth patil struct bnxt_re_req_cqe *scqe, 609*9207f9d2SChandrakanth patil uint32_t flg_val, int *cnt) 610*9207f9d2SChandrakanth patil { 611*9207f9d2SChandrakanth patil struct bnxt_re_queue *sq = qp->jsqq->hwque; 612*9207f9d2SChandrakanth patil struct bnxt_re_wrid *swrid; 613*9207f9d2SChandrakanth patil struct bnxt_re_cq *scq; 614*9207f9d2SChandrakanth patil uint8_t status; 615*9207f9d2SChandrakanth patil uint32_t head; 616*9207f9d2SChandrakanth patil 617*9207f9d2SChandrakanth patil scq = to_bnxt_re_cq(qp->ibvqp.send_cq); 618*9207f9d2SChandrakanth patil 619*9207f9d2SChandrakanth patil head = qp->jsqq->last_idx; 620*9207f9d2SChandrakanth patil swrid = &qp->jsqq->swque[head]; 621*9207f9d2SChandrakanth patil 622*9207f9d2SChandrakanth patil *cnt = 1; 623*9207f9d2SChandrakanth patil status = (flg_val >> BNXT_RE_BCQE_STATUS_SHIFT) & 624*9207f9d2SChandrakanth patil BNXT_RE_BCQE_STATUS_MASK; 625*9207f9d2SChandrakanth patil ibvwc->status = bnxt_re_req_to_ibv_status[status]; 626*9207f9d2SChandrakanth patil ibvwc->wc_flags = 0; 627*9207f9d2SChandrakanth patil ibvwc->wr_id = swrid->wrid; 628*9207f9d2SChandrakanth patil ibvwc->qp_num = qp->qpid; 629*9207f9d2SChandrakanth patil ibvwc->opcode = swrid->wc_opcd; 630*9207f9d2SChandrakanth patil ibvwc->byte_len = 0; 631*9207f9d2SChandrakanth patil 632*9207f9d2SChandrakanth patil bnxt_re_incr_head(sq, swrid->slots); 633*9207f9d2SChandrakanth patil bnxt_re_jqq_mod_last(qp->jsqq, head); 634*9207f9d2SChandrakanth patil 635*9207f9d2SChandrakanth patil if (qp->qpst != IBV_QPS_ERR) 636*9207f9d2SChandrakanth patil qp->qpst = IBV_QPS_ERR; 637*9207f9d2SChandrakanth patil bnxt_re_list_add_node(&qp->snode, &scq->sfhead); 638*9207f9d2SChandrakanth patil bnxt_re_trace("%s: qp_num = 0x%x status = %d\n", 639*9207f9d2SChandrakanth patil __func__, ibvwc->qp_num, ibvwc->status) 640*9207f9d2SChandrakanth patil 641*9207f9d2SChandrakanth patil return false; 642*9207f9d2SChandrakanth patil } 643*9207f9d2SChandrakanth patil 644*9207f9d2SChandrakanth patil static uint8_t bnxt_re_poll_success_scqe(struct bnxt_re_qp *qp, 645*9207f9d2SChandrakanth patil struct ibv_wc *ibvwc, 646*9207f9d2SChandrakanth patil struct bnxt_re_req_cqe *scqe, int *cnt) 647*9207f9d2SChandrakanth patil { 648*9207f9d2SChandrakanth patil struct bnxt_re_queue *sq = qp->jsqq->hwque; 649*9207f9d2SChandrakanth patil struct bnxt_re_wrid *swrid; 650*9207f9d2SChandrakanth patil uint8_t pcqe = false; 651*9207f9d2SChandrakanth patil uint32_t cindx, head; 652*9207f9d2SChandrakanth patil 653*9207f9d2SChandrakanth patil head = qp->jsqq->last_idx; 654*9207f9d2SChandrakanth patil swrid = &qp->jsqq->swque[head]; 655*9207f9d2SChandrakanth patil cindx = le32toh(scqe->con_indx) % qp->cap.max_swr; 656*9207f9d2SChandrakanth patil 657*9207f9d2SChandrakanth patil if (!(swrid->sig & IBV_SEND_SIGNALED)) { 658*9207f9d2SChandrakanth patil *cnt = 0; 659*9207f9d2SChandrakanth patil } else { 660*9207f9d2SChandrakanth patil ibvwc->status = IBV_WC_SUCCESS; 661*9207f9d2SChandrakanth patil ibvwc->wc_flags = 0; 662*9207f9d2SChandrakanth patil ibvwc->qp_num = qp->qpid; 663*9207f9d2SChandrakanth patil ibvwc->wr_id = swrid->wrid; 664*9207f9d2SChandrakanth patil ibvwc->opcode = swrid->wc_opcd; 665*9207f9d2SChandrakanth patil if (ibvwc->opcode == IBV_WC_RDMA_READ || 666*9207f9d2SChandrakanth patil ibvwc->opcode == IBV_WC_COMP_SWAP || 667*9207f9d2SChandrakanth patil ibvwc->opcode == IBV_WC_FETCH_ADD) 668*9207f9d2SChandrakanth patil ibvwc->byte_len = swrid->bytes; 669*9207f9d2SChandrakanth patil *cnt = 1; 670*9207f9d2SChandrakanth patil } 671*9207f9d2SChandrakanth patil bnxt_re_incr_head(sq, swrid->slots); 672*9207f9d2SChandrakanth patil bnxt_re_jqq_mod_last(qp->jsqq, head); 673*9207f9d2SChandrakanth patil if (qp->jsqq->last_idx != cindx) 674*9207f9d2SChandrakanth patil pcqe = true; 675*9207f9d2SChandrakanth patil 676*9207f9d2SChandrakanth patil return pcqe; 677*9207f9d2SChandrakanth patil } 678*9207f9d2SChandrakanth patil 679*9207f9d2SChandrakanth patil static uint8_t bnxt_re_poll_scqe(struct bnxt_re_qp *qp, struct ibv_wc *ibvwc, 680*9207f9d2SChandrakanth patil void *cqe, uint32_t flg_val, int *cnt) 681*9207f9d2SChandrakanth patil { 682*9207f9d2SChandrakanth patil uint8_t status, pcqe = false; 683*9207f9d2SChandrakanth patil 684*9207f9d2SChandrakanth patil status = (flg_val >> BNXT_RE_BCQE_STATUS_SHIFT) & 685*9207f9d2SChandrakanth patil BNXT_RE_BCQE_STATUS_MASK; 686*9207f9d2SChandrakanth patil if (status == BNXT_RE_REQ_ST_OK) 687*9207f9d2SChandrakanth patil pcqe = bnxt_re_poll_success_scqe(qp, ibvwc, cqe, cnt); 688*9207f9d2SChandrakanth patil else 689*9207f9d2SChandrakanth patil pcqe = bnxt_re_poll_err_scqe(qp, ibvwc, cqe, flg_val, cnt); 690*9207f9d2SChandrakanth patil 691*9207f9d2SChandrakanth patil return pcqe; 692*9207f9d2SChandrakanth patil } 693*9207f9d2SChandrakanth patil 694*9207f9d2SChandrakanth patil static void bnxt_re_release_srqe(struct bnxt_re_srq *srq, int tag) 695*9207f9d2SChandrakanth patil { 696*9207f9d2SChandrakanth patil bnxt_re_dp_spin_lock(&srq->srqq->qlock); 697*9207f9d2SChandrakanth patil srq->srwrid[srq->last_idx].next_idx = tag; 698*9207f9d2SChandrakanth patil srq->last_idx = tag; 699*9207f9d2SChandrakanth patil srq->srwrid[srq->last_idx].next_idx = -1; 700*9207f9d2SChandrakanth patil bnxt_re_dp_spin_unlock(&srq->srqq->qlock); 701*9207f9d2SChandrakanth patil } 702*9207f9d2SChandrakanth patil 703*9207f9d2SChandrakanth patil static int bnxt_re_poll_err_rcqe(struct bnxt_re_qp *qp, struct ibv_wc *ibvwc, 704*9207f9d2SChandrakanth patil struct bnxt_re_bcqe *hdr, 705*9207f9d2SChandrakanth patil uint32_t flg_val, void *cqe) 706*9207f9d2SChandrakanth patil { 707*9207f9d2SChandrakanth patil struct bnxt_re_wrid *swque; 708*9207f9d2SChandrakanth patil struct bnxt_re_queue *rq; 709*9207f9d2SChandrakanth patil struct bnxt_re_cq *rcq; 710*9207f9d2SChandrakanth patil uint8_t status, cnt; 711*9207f9d2SChandrakanth patil uint32_t head = 0; 712*9207f9d2SChandrakanth patil 713*9207f9d2SChandrakanth patil rcq = to_bnxt_re_cq(qp->ibvqp.recv_cq); 714*9207f9d2SChandrakanth patil 715*9207f9d2SChandrakanth patil status = (flg_val >> BNXT_RE_BCQE_STATUS_SHIFT) & 716*9207f9d2SChandrakanth patil BNXT_RE_BCQE_STATUS_MASK; 717*9207f9d2SChandrakanth patil /* skip h/w flush errors */ 718*9207f9d2SChandrakanth patil if (status == BNXT_RE_RSP_ST_HW_FLUSH) 719*9207f9d2SChandrakanth patil return 0; 720*9207f9d2SChandrakanth patil 721*9207f9d2SChandrakanth patil if (!qp->srq) { 722*9207f9d2SChandrakanth patil rq = qp->jrqq->hwque; 723*9207f9d2SChandrakanth patil head = qp->jrqq->last_idx; 724*9207f9d2SChandrakanth patil swque = &qp->jrqq->swque[head]; 725*9207f9d2SChandrakanth patil ibvwc->wr_id = swque->wrid; 726*9207f9d2SChandrakanth patil cnt = swque->slots; 727*9207f9d2SChandrakanth patil } else { 728*9207f9d2SChandrakanth patil struct bnxt_re_srq *srq; 729*9207f9d2SChandrakanth patil int tag; 730*9207f9d2SChandrakanth patil 731*9207f9d2SChandrakanth patil srq = qp->srq; 732*9207f9d2SChandrakanth patil rq = srq->srqq; 733*9207f9d2SChandrakanth patil cnt = 1; 734*9207f9d2SChandrakanth patil tag = le32toh(hdr->qphi_rwrid) & BNXT_RE_BCQE_RWRID_MASK; 735*9207f9d2SChandrakanth patil ibvwc->wr_id = srq->srwrid[tag].wrid; 736*9207f9d2SChandrakanth patil bnxt_re_release_srqe(srq, tag); 737*9207f9d2SChandrakanth patil } 738*9207f9d2SChandrakanth patil 739*9207f9d2SChandrakanth patil ibvwc->status = bnxt_re_res_to_ibv_status[status]; 740*9207f9d2SChandrakanth patil ibvwc->qp_num = qp->qpid; 741*9207f9d2SChandrakanth patil ibvwc->opcode = IBV_WC_RECV; 742*9207f9d2SChandrakanth patil ibvwc->byte_len = 0; 743*9207f9d2SChandrakanth patil ibvwc->wc_flags = 0; 744*9207f9d2SChandrakanth patil if (qp->qptyp == IBV_QPT_UD) 745*9207f9d2SChandrakanth patil ibvwc->src_qp = 0; 746*9207f9d2SChandrakanth patil 747*9207f9d2SChandrakanth patil if (!qp->srq) 748*9207f9d2SChandrakanth patil bnxt_re_jqq_mod_last(qp->jrqq, head); 749*9207f9d2SChandrakanth patil bnxt_re_incr_head(rq, cnt); 750*9207f9d2SChandrakanth patil 751*9207f9d2SChandrakanth patil if (!qp->srq) 752*9207f9d2SChandrakanth patil bnxt_re_list_add_node(&qp->rnode, &rcq->rfhead); 753*9207f9d2SChandrakanth patil 754*9207f9d2SChandrakanth patil bnxt_re_trace("%s: qp_num = 0x%x status = %d\n", 755*9207f9d2SChandrakanth patil __func__, ibvwc->qp_num, ibvwc->status) 756*9207f9d2SChandrakanth patil return 1; 757*9207f9d2SChandrakanth patil } 758*9207f9d2SChandrakanth patil 759*9207f9d2SChandrakanth patil static void bnxt_re_fill_ud_cqe(struct ibv_wc *ibvwc, 760*9207f9d2SChandrakanth patil struct bnxt_re_bcqe *hdr, void *cqe, 761*9207f9d2SChandrakanth patil uint8_t flags) 762*9207f9d2SChandrakanth patil { 763*9207f9d2SChandrakanth patil struct bnxt_re_ud_cqe *ucqe = cqe; 764*9207f9d2SChandrakanth patil uint32_t qpid; 765*9207f9d2SChandrakanth patil 766*9207f9d2SChandrakanth patil qpid = ((le32toh(hdr->qphi_rwrid) >> BNXT_RE_BCQE_SRCQP_SHIFT) & 767*9207f9d2SChandrakanth patil BNXT_RE_BCQE_SRCQP_SHIFT) << 0x10; /* higher 8 bits of 24 */ 768*9207f9d2SChandrakanth patil qpid |= (le64toh(ucqe->qplo_mac) >> BNXT_RE_UD_CQE_SRCQPLO_SHIFT) & 769*9207f9d2SChandrakanth patil BNXT_RE_UD_CQE_SRCQPLO_MASK; /*lower 16 of 24 */ 770*9207f9d2SChandrakanth patil ibvwc->src_qp = qpid; 771*9207f9d2SChandrakanth patil ibvwc->wc_flags |= IBV_WC_GRH; 772*9207f9d2SChandrakanth patil ibvwc->sl = (flags & BNXT_RE_UD_FLAGS_IP_VER_MASK) >> 773*9207f9d2SChandrakanth patil BNXT_RE_UD_FLAGS_IP_VER_SFT; 774*9207f9d2SChandrakanth patil /*IB-stack ABI in user do not ask for MAC to be reported. */ 775*9207f9d2SChandrakanth patil } 776*9207f9d2SChandrakanth patil 777*9207f9d2SChandrakanth patil static void bnxt_re_poll_success_rcqe(struct bnxt_re_qp *qp, 778*9207f9d2SChandrakanth patil struct ibv_wc *ibvwc, 779*9207f9d2SChandrakanth patil struct bnxt_re_bcqe *hdr, 780*9207f9d2SChandrakanth patil uint32_t flg_val, void *cqe) 781*9207f9d2SChandrakanth patil { 782*9207f9d2SChandrakanth patil uint8_t flags, is_imm, is_rdma; 783*9207f9d2SChandrakanth patil struct bnxt_re_rc_cqe *rcqe; 784*9207f9d2SChandrakanth patil struct bnxt_re_wrid *swque; 785*9207f9d2SChandrakanth patil struct bnxt_re_queue *rq; 786*9207f9d2SChandrakanth patil uint32_t head = 0; 787*9207f9d2SChandrakanth patil uint32_t rcqe_len; 788*9207f9d2SChandrakanth patil uint8_t cnt; 789*9207f9d2SChandrakanth patil 790*9207f9d2SChandrakanth patil rcqe = cqe; 791*9207f9d2SChandrakanth patil if (!qp->srq) { 792*9207f9d2SChandrakanth patil rq = qp->jrqq->hwque; 793*9207f9d2SChandrakanth patil head = qp->jrqq->last_idx; 794*9207f9d2SChandrakanth patil swque = &qp->jrqq->swque[head]; 795*9207f9d2SChandrakanth patil cnt = swque->slots; 796*9207f9d2SChandrakanth patil ibvwc->wr_id = swque->wrid; 797*9207f9d2SChandrakanth patil } else { 798*9207f9d2SChandrakanth patil struct bnxt_re_srq *srq; 799*9207f9d2SChandrakanth patil int tag; 800*9207f9d2SChandrakanth patil 801*9207f9d2SChandrakanth patil srq = qp->srq; 802*9207f9d2SChandrakanth patil rq = srq->srqq; 803*9207f9d2SChandrakanth patil cnt = 1; 804*9207f9d2SChandrakanth patil tag = le32toh(hdr->qphi_rwrid) & BNXT_RE_BCQE_RWRID_MASK; 805*9207f9d2SChandrakanth patil ibvwc->wr_id = srq->srwrid[tag].wrid; 806*9207f9d2SChandrakanth patil bnxt_re_release_srqe(srq, tag); 807*9207f9d2SChandrakanth patil } 808*9207f9d2SChandrakanth patil 809*9207f9d2SChandrakanth patil ibvwc->status = IBV_WC_SUCCESS; 810*9207f9d2SChandrakanth patil ibvwc->qp_num = qp->qpid; 811*9207f9d2SChandrakanth patil rcqe_len = le32toh(rcqe->length); 812*9207f9d2SChandrakanth patil ibvwc->byte_len = (qp->qptyp == IBV_QPT_UD) ? 813*9207f9d2SChandrakanth patil rcqe_len & BNXT_RE_UD_CQE_LEN_MASK : rcqe_len; 814*9207f9d2SChandrakanth patil ibvwc->opcode = IBV_WC_RECV; 815*9207f9d2SChandrakanth patil 816*9207f9d2SChandrakanth patil flags = (flg_val >> BNXT_RE_BCQE_FLAGS_SHIFT) & 817*9207f9d2SChandrakanth patil BNXT_RE_BCQE_FLAGS_MASK; 818*9207f9d2SChandrakanth patil is_imm = (flags & BNXT_RE_RC_FLAGS_IMM_MASK) >> 819*9207f9d2SChandrakanth patil BNXT_RE_RC_FLAGS_IMM_SHIFT; 820*9207f9d2SChandrakanth patil is_rdma = (flags & BNXT_RE_RC_FLAGS_RDMA_MASK) >> 821*9207f9d2SChandrakanth patil BNXT_RE_RC_FLAGS_RDMA_SHIFT; 822*9207f9d2SChandrakanth patil ibvwc->wc_flags = 0; 823*9207f9d2SChandrakanth patil if (is_imm) { 824*9207f9d2SChandrakanth patil ibvwc->wc_flags |= IBV_WC_WITH_IMM; 825*9207f9d2SChandrakanth patil /* The HW is returning imm_data in little-endian format, 826*9207f9d2SChandrakanth patil * swap to Big Endian as expected by application 827*9207f9d2SChandrakanth patil */ 828*9207f9d2SChandrakanth patil ibvwc->imm_data = htobe32(le32toh(rcqe->imm_key)); 829*9207f9d2SChandrakanth patil if (is_rdma) 830*9207f9d2SChandrakanth patil ibvwc->opcode = IBV_WC_RECV_RDMA_WITH_IMM; 831*9207f9d2SChandrakanth patil } 832*9207f9d2SChandrakanth patil 833*9207f9d2SChandrakanth patil if (qp->qptyp == IBV_QPT_UD) { 834*9207f9d2SChandrakanth patil bnxt_re_fill_ud_cqe(ibvwc, hdr, cqe, flags); 835*9207f9d2SChandrakanth patil } 836*9207f9d2SChandrakanth patil 837*9207f9d2SChandrakanth patil if (!qp->srq) 838*9207f9d2SChandrakanth patil bnxt_re_jqq_mod_last(qp->jrqq, head); 839*9207f9d2SChandrakanth patil bnxt_re_incr_head(rq, cnt); 840*9207f9d2SChandrakanth patil } 841*9207f9d2SChandrakanth patil 842*9207f9d2SChandrakanth patil static uint8_t bnxt_re_poll_rcqe(struct bnxt_re_qp *qp, struct ibv_wc *ibvwc, 843*9207f9d2SChandrakanth patil void *cqe, uint32_t flg_val, int *cnt) 844*9207f9d2SChandrakanth patil { 845*9207f9d2SChandrakanth patil struct bnxt_re_bcqe *hdr; 846*9207f9d2SChandrakanth patil uint8_t status, pcqe = false; 847*9207f9d2SChandrakanth patil 848*9207f9d2SChandrakanth patil hdr = cqe + sizeof(struct bnxt_re_rc_cqe); 849*9207f9d2SChandrakanth patil 850*9207f9d2SChandrakanth patil status = (flg_val >> BNXT_RE_BCQE_STATUS_SHIFT) & 851*9207f9d2SChandrakanth patil BNXT_RE_BCQE_STATUS_MASK; 852*9207f9d2SChandrakanth patil *cnt = 1; 853*9207f9d2SChandrakanth patil if (status == BNXT_RE_RSP_ST_OK) 854*9207f9d2SChandrakanth patil bnxt_re_poll_success_rcqe(qp, ibvwc, hdr, flg_val, cqe); 855*9207f9d2SChandrakanth patil else 856*9207f9d2SChandrakanth patil *cnt = bnxt_re_poll_err_rcqe(qp, ibvwc, hdr, flg_val, cqe); 857*9207f9d2SChandrakanth patil 858*9207f9d2SChandrakanth patil return pcqe; 859*9207f9d2SChandrakanth patil } 860*9207f9d2SChandrakanth patil 861*9207f9d2SChandrakanth patil static void bnxt_re_qp_move_flush_err(struct bnxt_re_qp *qp) 862*9207f9d2SChandrakanth patil { 863*9207f9d2SChandrakanth patil struct bnxt_re_cq *scq, *rcq; 864*9207f9d2SChandrakanth patil 865*9207f9d2SChandrakanth patil scq = to_bnxt_re_cq(qp->ibvqp.send_cq); 866*9207f9d2SChandrakanth patil rcq = to_bnxt_re_cq(qp->ibvqp.recv_cq); 867*9207f9d2SChandrakanth patil 868*9207f9d2SChandrakanth patil if (qp->qpst != IBV_QPS_ERR) 869*9207f9d2SChandrakanth patil qp->qpst = IBV_QPS_ERR; 870*9207f9d2SChandrakanth patil bnxt_re_list_add_node(&qp->rnode, &rcq->rfhead); 871*9207f9d2SChandrakanth patil bnxt_re_list_add_node(&qp->snode, &scq->sfhead); 872*9207f9d2SChandrakanth patil } 873*9207f9d2SChandrakanth patil 874*9207f9d2SChandrakanth patil /* Always return false */ 875*9207f9d2SChandrakanth patil static uint8_t bnxt_re_poll_term_cqe(struct bnxt_re_qp *qp, int *cnt) 876*9207f9d2SChandrakanth patil { 877*9207f9d2SChandrakanth patil /* For now just add the QP to flush list without 878*9207f9d2SChandrakanth patil * considering the index reported in the CQE. 879*9207f9d2SChandrakanth patil * Continue reporting flush completions until the 880*9207f9d2SChandrakanth patil * SQ and RQ are empty. 881*9207f9d2SChandrakanth patil */ 882*9207f9d2SChandrakanth patil *cnt = 0; 883*9207f9d2SChandrakanth patil if (qp->qpst != IBV_QPS_RESET) 884*9207f9d2SChandrakanth patil bnxt_re_qp_move_flush_err(qp); 885*9207f9d2SChandrakanth patil 886*9207f9d2SChandrakanth patil return false; 887*9207f9d2SChandrakanth patil } 888*9207f9d2SChandrakanth patil 889*9207f9d2SChandrakanth patil static int bnxt_re_poll_one(struct bnxt_re_cq *cq, int nwc, struct ibv_wc *wc, 890*9207f9d2SChandrakanth patil uint32_t *resize) 891*9207f9d2SChandrakanth patil { 892*9207f9d2SChandrakanth patil int type, cnt = 0, dqed = 0, hw_polled = 0; 893*9207f9d2SChandrakanth patil struct bnxt_re_queue *cqq = cq->cqq; 894*9207f9d2SChandrakanth patil struct bnxt_re_req_cqe *scqe; 895*9207f9d2SChandrakanth patil struct bnxt_re_ud_cqe *rcqe; 896*9207f9d2SChandrakanth patil uint64_t *qp_handle = NULL; 897*9207f9d2SChandrakanth patil struct bnxt_re_bcqe *hdr; 898*9207f9d2SChandrakanth patil struct bnxt_re_qp *qp; 899*9207f9d2SChandrakanth patil uint8_t pcqe = false; 900*9207f9d2SChandrakanth patil uint32_t flg_val; 901*9207f9d2SChandrakanth patil void *cqe; 902*9207f9d2SChandrakanth patil 903*9207f9d2SChandrakanth patil while (nwc) { 904*9207f9d2SChandrakanth patil cqe = cqq->va + cqq->head * bnxt_re_get_cqe_sz(); 905*9207f9d2SChandrakanth patil hdr = cqe + sizeof(struct bnxt_re_req_cqe); 906*9207f9d2SChandrakanth patil flg_val = le32toh(hdr->flg_st_typ_ph); 907*9207f9d2SChandrakanth patil if (unlikely(!bnxt_re_is_cqe_valid(flg_val, cq->phase))) 908*9207f9d2SChandrakanth patil break; 909*9207f9d2SChandrakanth patil type = (flg_val >> BNXT_RE_BCQE_TYPE_SHIFT) & 910*9207f9d2SChandrakanth patil BNXT_RE_BCQE_TYPE_MASK; 911*9207f9d2SChandrakanth patil switch (type) { 912*9207f9d2SChandrakanth patil case BNXT_RE_WC_TYPE_SEND: 913*9207f9d2SChandrakanth patil scqe = cqe; 914*9207f9d2SChandrakanth patil qp_handle = (uint64_t *)&scqe->qp_handle; 915*9207f9d2SChandrakanth patil qp = (struct bnxt_re_qp *) 916*9207f9d2SChandrakanth patil (uintptr_t)le64toh(scqe->qp_handle); 917*9207f9d2SChandrakanth patil if (!qp) 918*9207f9d2SChandrakanth patil break; /*stale cqe. should be rung.*/ 919*9207f9d2SChandrakanth patil pcqe = bnxt_re_poll_scqe(qp, wc, cqe, flg_val, &cnt); 920*9207f9d2SChandrakanth patil break; 921*9207f9d2SChandrakanth patil case BNXT_RE_WC_TYPE_RECV_RC: 922*9207f9d2SChandrakanth patil case BNXT_RE_WC_TYPE_RECV_UD: 923*9207f9d2SChandrakanth patil rcqe = cqe; 924*9207f9d2SChandrakanth patil qp_handle = (uint64_t *)&rcqe->qp_handle; 925*9207f9d2SChandrakanth patil qp = (struct bnxt_re_qp *) 926*9207f9d2SChandrakanth patil (uintptr_t)le64toh(rcqe->qp_handle); 927*9207f9d2SChandrakanth patil if (!qp) 928*9207f9d2SChandrakanth patil break; /*stale cqe. should be rung.*/ 929*9207f9d2SChandrakanth patil pcqe = bnxt_re_poll_rcqe(qp, wc, cqe, flg_val, &cnt); 930*9207f9d2SChandrakanth patil break; 931*9207f9d2SChandrakanth patil case BNXT_RE_WC_TYPE_RECV_RAW: 932*9207f9d2SChandrakanth patil break; 933*9207f9d2SChandrakanth patil case BNXT_RE_WC_TYPE_TERM: 934*9207f9d2SChandrakanth patil scqe = cqe; 935*9207f9d2SChandrakanth patil qp_handle = (uint64_t *)&scqe->qp_handle; 936*9207f9d2SChandrakanth patil qp = (struct bnxt_re_qp *) 937*9207f9d2SChandrakanth patil (uintptr_t)le64toh(scqe->qp_handle); 938*9207f9d2SChandrakanth patil if (!qp) 939*9207f9d2SChandrakanth patil break; 940*9207f9d2SChandrakanth patil pcqe = bnxt_re_poll_term_cqe(qp, &cnt); 941*9207f9d2SChandrakanth patil break; 942*9207f9d2SChandrakanth patil case BNXT_RE_WC_TYPE_COFF: 943*9207f9d2SChandrakanth patil /* Stop further processing and return */ 944*9207f9d2SChandrakanth patil bnxt_re_resize_cq_complete(cq); 945*9207f9d2SChandrakanth patil if (unlikely(resize)) 946*9207f9d2SChandrakanth patil *resize = 1; 947*9207f9d2SChandrakanth patil return dqed; 948*9207f9d2SChandrakanth patil default: 949*9207f9d2SChandrakanth patil break; 950*9207f9d2SChandrakanth patil }; 951*9207f9d2SChandrakanth patil 952*9207f9d2SChandrakanth patil if (pcqe) 953*9207f9d2SChandrakanth patil goto skipp_real; 954*9207f9d2SChandrakanth patil 955*9207f9d2SChandrakanth patil hw_polled++; 956*9207f9d2SChandrakanth patil if (qp_handle) { 957*9207f9d2SChandrakanth patil *qp_handle = 0x0ULL; /* mark cqe as read */ 958*9207f9d2SChandrakanth patil qp_handle = NULL; 959*9207f9d2SChandrakanth patil } 960*9207f9d2SChandrakanth patil bnxt_re_incr_head(cq->cqq, 1); 961*9207f9d2SChandrakanth patil bnxt_re_change_cq_phase(cq); 962*9207f9d2SChandrakanth patil skipp_real: 963*9207f9d2SChandrakanth patil if (cnt) { 964*9207f9d2SChandrakanth patil cnt = 0; 965*9207f9d2SChandrakanth patil dqed++; 966*9207f9d2SChandrakanth patil nwc--; 967*9207f9d2SChandrakanth patil wc++; 968*9207f9d2SChandrakanth patil } 969*9207f9d2SChandrakanth patil } 970*9207f9d2SChandrakanth patil 971*9207f9d2SChandrakanth patil if (likely(hw_polled)) 972*9207f9d2SChandrakanth patil bnxt_re_ring_cq_db(cq); 973*9207f9d2SChandrakanth patil 974*9207f9d2SChandrakanth patil return dqed; 975*9207f9d2SChandrakanth patil } 976*9207f9d2SChandrakanth patil 977*9207f9d2SChandrakanth patil static int bnxt_re_poll_flush_wcs(struct bnxt_re_joint_queue *jqq, 978*9207f9d2SChandrakanth patil struct ibv_wc *ibvwc, uint32_t qpid, 979*9207f9d2SChandrakanth patil int nwc) 980*9207f9d2SChandrakanth patil { 981*9207f9d2SChandrakanth patil struct bnxt_re_queue *que; 982*9207f9d2SChandrakanth patil struct bnxt_re_wrid *wrid; 983*9207f9d2SChandrakanth patil uint32_t cnt = 0; 984*9207f9d2SChandrakanth patil 985*9207f9d2SChandrakanth patil que = jqq->hwque; 986*9207f9d2SChandrakanth patil while(nwc) { 987*9207f9d2SChandrakanth patil if (bnxt_re_is_que_empty(que)) 988*9207f9d2SChandrakanth patil break; 989*9207f9d2SChandrakanth patil wrid = &jqq->swque[jqq->last_idx]; 990*9207f9d2SChandrakanth patil ibvwc->status = IBV_WC_WR_FLUSH_ERR; 991*9207f9d2SChandrakanth patil ibvwc->opcode = wrid->wc_opcd; 992*9207f9d2SChandrakanth patil ibvwc->wr_id = wrid->wrid; 993*9207f9d2SChandrakanth patil ibvwc->qp_num = qpid; 994*9207f9d2SChandrakanth patil ibvwc->byte_len = 0; 995*9207f9d2SChandrakanth patil ibvwc->wc_flags = 0; 996*9207f9d2SChandrakanth patil 997*9207f9d2SChandrakanth patil bnxt_re_jqq_mod_last(jqq, jqq->last_idx); 998*9207f9d2SChandrakanth patil bnxt_re_incr_head(que, wrid->slots); 999*9207f9d2SChandrakanth patil nwc--; 1000*9207f9d2SChandrakanth patil cnt++; 1001*9207f9d2SChandrakanth patil ibvwc++; 1002*9207f9d2SChandrakanth patil } 1003*9207f9d2SChandrakanth patil 1004*9207f9d2SChandrakanth patil return cnt; 1005*9207f9d2SChandrakanth patil } 1006*9207f9d2SChandrakanth patil 1007*9207f9d2SChandrakanth patil static int bnxt_re_poll_flush_wqes(struct bnxt_re_cq *cq, 1008*9207f9d2SChandrakanth patil struct bnxt_re_list_head *lhead, 1009*9207f9d2SChandrakanth patil struct ibv_wc *ibvwc, 1010*9207f9d2SChandrakanth patil uint32_t nwc) 1011*9207f9d2SChandrakanth patil { 1012*9207f9d2SChandrakanth patil struct bnxt_re_list_node *cur, *tmp; 1013*9207f9d2SChandrakanth patil struct bnxt_re_joint_queue *jqq; 1014*9207f9d2SChandrakanth patil struct bnxt_re_qp *qp; 1015*9207f9d2SChandrakanth patil bool sq_list = false; 1016*9207f9d2SChandrakanth patil uint32_t polled = 0; 1017*9207f9d2SChandrakanth patil 1018*9207f9d2SChandrakanth patil sq_list = (lhead == &cq->sfhead) ? true : false; 1019*9207f9d2SChandrakanth patil if (!bnxt_re_list_empty(lhead)) { 1020*9207f9d2SChandrakanth patil list_for_each_node_safe(cur, tmp, lhead) { 1021*9207f9d2SChandrakanth patil if (sq_list) { 1022*9207f9d2SChandrakanth patil qp = list_node(cur, struct bnxt_re_qp, snode); 1023*9207f9d2SChandrakanth patil jqq = qp->jsqq; 1024*9207f9d2SChandrakanth patil } else { 1025*9207f9d2SChandrakanth patil qp = list_node(cur, struct bnxt_re_qp, rnode); 1026*9207f9d2SChandrakanth patil jqq = qp->jrqq; 1027*9207f9d2SChandrakanth patil if (!jqq) /* Using srq no need to flush */ 1028*9207f9d2SChandrakanth patil goto done; 1029*9207f9d2SChandrakanth patil } 1030*9207f9d2SChandrakanth patil 1031*9207f9d2SChandrakanth patil if (bnxt_re_is_que_empty(jqq->hwque)) 1032*9207f9d2SChandrakanth patil continue; 1033*9207f9d2SChandrakanth patil polled += bnxt_re_poll_flush_wcs(jqq, ibvwc + polled, 1034*9207f9d2SChandrakanth patil qp->qpid, nwc - polled); 1035*9207f9d2SChandrakanth patil if (!(nwc - polled)) 1036*9207f9d2SChandrakanth patil break; 1037*9207f9d2SChandrakanth patil } 1038*9207f9d2SChandrakanth patil } 1039*9207f9d2SChandrakanth patil done: 1040*9207f9d2SChandrakanth patil return polled; 1041*9207f9d2SChandrakanth patil } 1042*9207f9d2SChandrakanth patil 1043*9207f9d2SChandrakanth patil static int bnxt_re_poll_flush_lists(struct bnxt_re_cq *cq, uint32_t nwc, 1044*9207f9d2SChandrakanth patil struct ibv_wc *ibvwc) 1045*9207f9d2SChandrakanth patil { 1046*9207f9d2SChandrakanth patil int left, polled = 0; 1047*9207f9d2SChandrakanth patil 1048*9207f9d2SChandrakanth patil polled = bnxt_re_poll_flush_wqes(cq, &cq->sfhead, ibvwc, nwc); 1049*9207f9d2SChandrakanth patil left = nwc - polled; 1050*9207f9d2SChandrakanth patil 1051*9207f9d2SChandrakanth patil if (!left) 1052*9207f9d2SChandrakanth patil return polled; 1053*9207f9d2SChandrakanth patil 1054*9207f9d2SChandrakanth patil polled += bnxt_re_poll_flush_wqes(cq, &cq->rfhead, 1055*9207f9d2SChandrakanth patil ibvwc + polled, left); 1056*9207f9d2SChandrakanth patil return polled; 1057*9207f9d2SChandrakanth patil } 1058*9207f9d2SChandrakanth patil 1059*9207f9d2SChandrakanth patil static int bnxt_re_poll_resize_cq_list(struct bnxt_re_cq *cq, uint32_t nwc, 1060*9207f9d2SChandrakanth patil struct ibv_wc *ibvwc) 1061*9207f9d2SChandrakanth patil { 1062*9207f9d2SChandrakanth patil struct bnxt_re_list_node *cur, *tmp; 1063*9207f9d2SChandrakanth patil struct bnxt_re_work_compl *compl; 1064*9207f9d2SChandrakanth patil int left; 1065*9207f9d2SChandrakanth patil 1066*9207f9d2SChandrakanth patil left = nwc; 1067*9207f9d2SChandrakanth patil list_for_each_node_safe(cur, tmp, &cq->prev_cq_head) { 1068*9207f9d2SChandrakanth patil compl = list_node(cur, struct bnxt_re_work_compl, cnode); 1069*9207f9d2SChandrakanth patil if (!left) 1070*9207f9d2SChandrakanth patil break; 1071*9207f9d2SChandrakanth patil memcpy(ibvwc, &compl->wc, sizeof(*ibvwc)); 1072*9207f9d2SChandrakanth patil ibvwc++; 1073*9207f9d2SChandrakanth patil left--; 1074*9207f9d2SChandrakanth patil bnxt_re_list_del_node(&compl->cnode, &cq->prev_cq_head); 1075*9207f9d2SChandrakanth patil free(compl); 1076*9207f9d2SChandrakanth patil } 1077*9207f9d2SChandrakanth patil 1078*9207f9d2SChandrakanth patil return nwc - left; 1079*9207f9d2SChandrakanth patil } 1080*9207f9d2SChandrakanth patil 1081*9207f9d2SChandrakanth patil 1082*9207f9d2SChandrakanth patil int bnxt_re_poll_cq(struct ibv_cq *ibvcq, int nwc, struct ibv_wc *wc) 1083*9207f9d2SChandrakanth patil { 1084*9207f9d2SChandrakanth patil int dqed = 0, left = 0; 1085*9207f9d2SChandrakanth patil struct bnxt_re_cq *cq; 1086*9207f9d2SChandrakanth patil uint32_t resize = 0; 1087*9207f9d2SChandrakanth patil 1088*9207f9d2SChandrakanth patil cq = container_of(ibvcq, struct bnxt_re_cq, ibvcq); 1089*9207f9d2SChandrakanth patil bnxt_re_dp_spin_lock(&cq->cqq->qlock); 1090*9207f9d2SChandrakanth patil 1091*9207f9d2SChandrakanth patil left = nwc; 1092*9207f9d2SChandrakanth patil /* Check whether we have anything to be completed from prev cq context */ 1093*9207f9d2SChandrakanth patil if (unlikely(!bnxt_re_list_empty(&cq->prev_cq_head))) { 1094*9207f9d2SChandrakanth patil dqed = bnxt_re_poll_resize_cq_list(cq, nwc, wc); 1095*9207f9d2SChandrakanth patil left = nwc - dqed; 1096*9207f9d2SChandrakanth patil if (!left) { 1097*9207f9d2SChandrakanth patil bnxt_re_dp_spin_unlock(&cq->cqq->qlock); 1098*9207f9d2SChandrakanth patil return dqed; 1099*9207f9d2SChandrakanth patil } 1100*9207f9d2SChandrakanth patil } 1101*9207f9d2SChandrakanth patil 1102*9207f9d2SChandrakanth patil dqed += bnxt_re_poll_one(cq, left, wc + dqed, &resize); 1103*9207f9d2SChandrakanth patil /* Check if anything is there to flush. */ 1104*9207f9d2SChandrakanth patil left = nwc - dqed; 1105*9207f9d2SChandrakanth patil if (left && (!bnxt_re_list_empty(&cq->sfhead) || 1106*9207f9d2SChandrakanth patil !bnxt_re_list_empty(&cq->rfhead))) 1107*9207f9d2SChandrakanth patil dqed += bnxt_re_poll_flush_lists(cq, left, (wc + dqed)); 1108*9207f9d2SChandrakanth patil bnxt_re_dp_spin_unlock(&cq->cqq->qlock); 1109*9207f9d2SChandrakanth patil 1110*9207f9d2SChandrakanth patil return dqed; 1111*9207f9d2SChandrakanth patil } 1112*9207f9d2SChandrakanth patil 1113*9207f9d2SChandrakanth patil void bnxt_re_cleanup_cq(struct bnxt_re_qp *qp, struct bnxt_re_cq *cq) 1114*9207f9d2SChandrakanth patil { 1115*9207f9d2SChandrakanth patil struct bnxt_re_queue *que = cq->cqq; 1116*9207f9d2SChandrakanth patil struct bnxt_re_req_cqe *scqe; 1117*9207f9d2SChandrakanth patil struct bnxt_re_rc_cqe *rcqe; 1118*9207f9d2SChandrakanth patil struct bnxt_re_bcqe *hdr; 1119*9207f9d2SChandrakanth patil int indx, type; 1120*9207f9d2SChandrakanth patil void *cqe; 1121*9207f9d2SChandrakanth patil 1122*9207f9d2SChandrakanth patil 1123*9207f9d2SChandrakanth patil bnxt_re_dp_spin_lock(&que->qlock); 1124*9207f9d2SChandrakanth patil for(indx = 0; indx < que->depth; indx++) { 1125*9207f9d2SChandrakanth patil cqe = que->va + indx * bnxt_re_get_cqe_sz(); 1126*9207f9d2SChandrakanth patil hdr = cqe + sizeof(struct bnxt_re_req_cqe); 1127*9207f9d2SChandrakanth patil type = (hdr->flg_st_typ_ph >> BNXT_RE_BCQE_TYPE_SHIFT) & 1128*9207f9d2SChandrakanth patil BNXT_RE_BCQE_TYPE_MASK; 1129*9207f9d2SChandrakanth patil 1130*9207f9d2SChandrakanth patil if (type == BNXT_RE_WC_TYPE_COFF) 1131*9207f9d2SChandrakanth patil continue; 1132*9207f9d2SChandrakanth patil if (type == BNXT_RE_WC_TYPE_SEND || 1133*9207f9d2SChandrakanth patil type == BNXT_RE_WC_TYPE_TERM) { 1134*9207f9d2SChandrakanth patil scqe = cqe; 1135*9207f9d2SChandrakanth patil if (scqe->qp_handle == (uint64_t)qp) 1136*9207f9d2SChandrakanth patil scqe->qp_handle = 0ULL; 1137*9207f9d2SChandrakanth patil } else { 1138*9207f9d2SChandrakanth patil rcqe = cqe; 1139*9207f9d2SChandrakanth patil if (rcqe->qp_handle == (uint64_t)qp) 1140*9207f9d2SChandrakanth patil rcqe->qp_handle = 0ULL; 1141*9207f9d2SChandrakanth patil } 1142*9207f9d2SChandrakanth patil 1143*9207f9d2SChandrakanth patil } 1144*9207f9d2SChandrakanth patil 1145*9207f9d2SChandrakanth patil if (_is_db_drop_recovery_enable(cq->cntx)) { 1146*9207f9d2SChandrakanth patil pthread_spin_lock(&cq->cntx->cq_dbr_res.lock); 1147*9207f9d2SChandrakanth patil bnxt_re_list_del_node(&cq->dbnode, &cq->cntx->cq_dbr_res.head); 1148*9207f9d2SChandrakanth patil pthread_spin_unlock(&cq->cntx->cq_dbr_res.lock); 1149*9207f9d2SChandrakanth patil } 1150*9207f9d2SChandrakanth patil bnxt_re_list_del_node(&qp->snode, &cq->sfhead); 1151*9207f9d2SChandrakanth patil bnxt_re_list_del_node(&qp->rnode, &cq->rfhead); 1152*9207f9d2SChandrakanth patil bnxt_re_dp_spin_unlock(&que->qlock); 1153*9207f9d2SChandrakanth patil } 1154*9207f9d2SChandrakanth patil 1155*9207f9d2SChandrakanth patil void bnxt_re_cq_event(struct ibv_cq *ibvcq) 1156*9207f9d2SChandrakanth patil { 1157*9207f9d2SChandrakanth patil 1158*9207f9d2SChandrakanth patil } 1159*9207f9d2SChandrakanth patil 1160*9207f9d2SChandrakanth patil int bnxt_re_arm_cq(struct ibv_cq *ibvcq, int flags) 1161*9207f9d2SChandrakanth patil { 1162*9207f9d2SChandrakanth patil struct bnxt_re_cq *cq = to_bnxt_re_cq(ibvcq); 1163*9207f9d2SChandrakanth patil 1164*9207f9d2SChandrakanth patil bnxt_re_dp_spin_lock(&cq->cqq->qlock); 1165*9207f9d2SChandrakanth patil flags = !flags ? BNXT_RE_QUE_TYPE_CQ_ARMALL : 1166*9207f9d2SChandrakanth patil BNXT_RE_QUE_TYPE_CQ_ARMSE; 1167*9207f9d2SChandrakanth patil 1168*9207f9d2SChandrakanth patil bnxt_re_ring_cq_arm_db(cq, flags); 1169*9207f9d2SChandrakanth patil bnxt_re_dp_spin_unlock(&cq->cqq->qlock); 1170*9207f9d2SChandrakanth patil 1171*9207f9d2SChandrakanth patil return 0; 1172*9207f9d2SChandrakanth patil } 1173*9207f9d2SChandrakanth patil 1174*9207f9d2SChandrakanth patil static int bnxt_re_check_qp_limits(struct bnxt_re_context *cntx, 1175*9207f9d2SChandrakanth patil struct ibv_qp_init_attr *attr) 1176*9207f9d2SChandrakanth patil { 1177*9207f9d2SChandrakanth patil struct ibv_device_attr *devattr; 1178*9207f9d2SChandrakanth patil struct bnxt_re_dev *rdev; 1179*9207f9d2SChandrakanth patil 1180*9207f9d2SChandrakanth patil rdev = cntx->rdev; 1181*9207f9d2SChandrakanth patil devattr = &rdev->devattr; 1182*9207f9d2SChandrakanth patil if (attr->qp_type != IBV_QPT_RC && attr->qp_type != IBV_QPT_UD) 1183*9207f9d2SChandrakanth patil return EINVAL; 1184*9207f9d2SChandrakanth patil if (attr->cap.max_send_sge > devattr->max_sge) 1185*9207f9d2SChandrakanth patil return EINVAL; 1186*9207f9d2SChandrakanth patil if (attr->cap.max_recv_sge > devattr->max_sge) 1187*9207f9d2SChandrakanth patil return EINVAL; 1188*9207f9d2SChandrakanth patil if (cntx->modes & BNXT_RE_WQE_MODE_VARIABLE) { 1189*9207f9d2SChandrakanth patil if (attr->cap.max_inline_data > BNXT_RE_MAX_INLINE_SIZE_VAR_WQE) 1190*9207f9d2SChandrakanth patil return -EINVAL; 1191*9207f9d2SChandrakanth patil } else if (attr->cap.max_inline_data > BNXT_RE_MAX_INLINE_SIZE) { 1192*9207f9d2SChandrakanth patil return EINVAL; 1193*9207f9d2SChandrakanth patil } 1194*9207f9d2SChandrakanth patil if (attr->cap.max_send_wr > devattr->max_qp_wr) 1195*9207f9d2SChandrakanth patil attr->cap.max_send_wr = devattr->max_qp_wr; 1196*9207f9d2SChandrakanth patil if (attr->cap.max_recv_wr > devattr->max_qp_wr) 1197*9207f9d2SChandrakanth patil attr->cap.max_recv_wr = devattr->max_qp_wr; 1198*9207f9d2SChandrakanth patil 1199*9207f9d2SChandrakanth patil return 0; 1200*9207f9d2SChandrakanth patil } 1201*9207f9d2SChandrakanth patil 1202*9207f9d2SChandrakanth patil static int bnxt_re_get_rq_slots(struct bnxt_re_dev *rdev, uint8_t qpmode, 1203*9207f9d2SChandrakanth patil uint32_t nrwr, uint32_t nsge, uint32_t *esz) 1204*9207f9d2SChandrakanth patil { 1205*9207f9d2SChandrakanth patil uint32_t max_wqesz; 1206*9207f9d2SChandrakanth patil uint32_t wqe_size; 1207*9207f9d2SChandrakanth patil uint32_t stride; 1208*9207f9d2SChandrakanth patil uint32_t slots; 1209*9207f9d2SChandrakanth patil 1210*9207f9d2SChandrakanth patil stride = sizeof(struct bnxt_re_sge); 1211*9207f9d2SChandrakanth patil max_wqesz = bnxt_re_calc_wqe_sz(rdev->devattr.max_sge); 1212*9207f9d2SChandrakanth patil 1213*9207f9d2SChandrakanth patil wqe_size = bnxt_re_calc_wqe_sz(nsge); 1214*9207f9d2SChandrakanth patil if (wqe_size > max_wqesz) 1215*9207f9d2SChandrakanth patil return -EINVAL; 1216*9207f9d2SChandrakanth patil 1217*9207f9d2SChandrakanth patil if (qpmode == BNXT_RE_WQE_MODE_STATIC) 1218*9207f9d2SChandrakanth patil wqe_size = bnxt_re_calc_wqe_sz(6); 1219*9207f9d2SChandrakanth patil 1220*9207f9d2SChandrakanth patil if (esz) 1221*9207f9d2SChandrakanth patil *esz = wqe_size; 1222*9207f9d2SChandrakanth patil 1223*9207f9d2SChandrakanth patil slots = (nrwr * wqe_size) / stride; 1224*9207f9d2SChandrakanth patil return slots; 1225*9207f9d2SChandrakanth patil } 1226*9207f9d2SChandrakanth patil 1227*9207f9d2SChandrakanth patil static int bnxt_re_get_sq_slots(struct bnxt_re_dev *rdev, 1228*9207f9d2SChandrakanth patil uint8_t qpmode, uint32_t nswr, 1229*9207f9d2SChandrakanth patil uint32_t nsge, uint32_t ils, uint32_t *esize) 1230*9207f9d2SChandrakanth patil { 1231*9207f9d2SChandrakanth patil uint32_t max_wqesz; 1232*9207f9d2SChandrakanth patil uint32_t wqe_size; 1233*9207f9d2SChandrakanth patil uint32_t cal_ils; 1234*9207f9d2SChandrakanth patil uint32_t stride; 1235*9207f9d2SChandrakanth patil uint32_t ilsize; 1236*9207f9d2SChandrakanth patil uint32_t hdr_sz; 1237*9207f9d2SChandrakanth patil uint32_t slots; 1238*9207f9d2SChandrakanth patil 1239*9207f9d2SChandrakanth patil hdr_sz = bnxt_re_get_sqe_hdr_sz(); 1240*9207f9d2SChandrakanth patil stride = sizeof(struct bnxt_re_sge); 1241*9207f9d2SChandrakanth patil max_wqesz = bnxt_re_calc_wqe_sz(rdev->devattr.max_sge); 1242*9207f9d2SChandrakanth patil ilsize = get_aligned(ils, hdr_sz); 1243*9207f9d2SChandrakanth patil 1244*9207f9d2SChandrakanth patil wqe_size = bnxt_re_calc_wqe_sz(nsge); 1245*9207f9d2SChandrakanth patil if (ilsize) { 1246*9207f9d2SChandrakanth patil cal_ils = hdr_sz + ilsize; 1247*9207f9d2SChandrakanth patil wqe_size = MAX(cal_ils, wqe_size); 1248*9207f9d2SChandrakanth patil wqe_size = get_aligned(wqe_size, hdr_sz); 1249*9207f9d2SChandrakanth patil } 1250*9207f9d2SChandrakanth patil if (wqe_size > max_wqesz) 1251*9207f9d2SChandrakanth patil return -EINVAL; 1252*9207f9d2SChandrakanth patil 1253*9207f9d2SChandrakanth patil if (qpmode == BNXT_RE_WQE_MODE_STATIC) 1254*9207f9d2SChandrakanth patil wqe_size = bnxt_re_calc_wqe_sz(6); 1255*9207f9d2SChandrakanth patil 1256*9207f9d2SChandrakanth patil if (esize) 1257*9207f9d2SChandrakanth patil *esize = wqe_size; 1258*9207f9d2SChandrakanth patil slots = (nswr * wqe_size) / stride; 1259*9207f9d2SChandrakanth patil return slots; 1260*9207f9d2SChandrakanth patil } 1261*9207f9d2SChandrakanth patil 1262*9207f9d2SChandrakanth patil static int bnxt_re_get_sqmem_size(struct bnxt_re_context *cntx, 1263*9207f9d2SChandrakanth patil struct ibv_qp_init_attr *attr, 1264*9207f9d2SChandrakanth patil struct bnxt_re_qattr *qattr) 1265*9207f9d2SChandrakanth patil { 1266*9207f9d2SChandrakanth patil uint32_t nsge, nswr, diff = 0; 1267*9207f9d2SChandrakanth patil size_t bytes = 0; 1268*9207f9d2SChandrakanth patil uint32_t npsn; 1269*9207f9d2SChandrakanth patil uint32_t ils; 1270*9207f9d2SChandrakanth patil uint8_t mode; 1271*9207f9d2SChandrakanth patil uint32_t esz; 1272*9207f9d2SChandrakanth patil int nslots; 1273*9207f9d2SChandrakanth patil 1274*9207f9d2SChandrakanth patil mode = cntx->modes & BNXT_RE_WQE_MODE_VARIABLE; 1275*9207f9d2SChandrakanth patil nsge = attr->cap.max_send_sge; 1276*9207f9d2SChandrakanth patil diff = bnxt_re_get_diff(cntx->comp_mask); 1277*9207f9d2SChandrakanth patil nswr = attr->cap.max_send_wr + 1 + diff; 1278*9207f9d2SChandrakanth patil nswr = bnxt_re_init_depth(nswr, cntx->comp_mask); 1279*9207f9d2SChandrakanth patil ils = attr->cap.max_inline_data; 1280*9207f9d2SChandrakanth patil nslots = bnxt_re_get_sq_slots(cntx->rdev, mode, nswr, 1281*9207f9d2SChandrakanth patil nsge, ils, &esz); 1282*9207f9d2SChandrakanth patil if (nslots < 0) 1283*9207f9d2SChandrakanth patil return nslots; 1284*9207f9d2SChandrakanth patil npsn = bnxt_re_get_npsn(mode, nswr, nslots); 1285*9207f9d2SChandrakanth patil if (BNXT_RE_HW_RETX(cntx)) 1286*9207f9d2SChandrakanth patil npsn = roundup_pow_of_two(npsn); 1287*9207f9d2SChandrakanth patil 1288*9207f9d2SChandrakanth patil qattr->nwr = nswr; 1289*9207f9d2SChandrakanth patil qattr->slots = nslots; 1290*9207f9d2SChandrakanth patil qattr->esize = esz; 1291*9207f9d2SChandrakanth patil 1292*9207f9d2SChandrakanth patil bytes = nslots * sizeof(struct bnxt_re_sge); /* ring */ 1293*9207f9d2SChandrakanth patil bytes += npsn * bnxt_re_get_psne_size(cntx); /* psn */ 1294*9207f9d2SChandrakanth patil qattr->sz_ring = get_aligned(bytes, cntx->rdev->pg_size); 1295*9207f9d2SChandrakanth patil qattr->sz_shad = nswr * sizeof(struct bnxt_re_wrid); /* shadow */ 1296*9207f9d2SChandrakanth patil return 0; 1297*9207f9d2SChandrakanth patil } 1298*9207f9d2SChandrakanth patil 1299*9207f9d2SChandrakanth patil static int bnxt_re_get_rqmem_size(struct bnxt_re_context *cntx, 1300*9207f9d2SChandrakanth patil struct ibv_qp_init_attr *attr, 1301*9207f9d2SChandrakanth patil struct bnxt_re_qattr *qattr) 1302*9207f9d2SChandrakanth patil { 1303*9207f9d2SChandrakanth patil uint32_t nrwr, nsge; 1304*9207f9d2SChandrakanth patil size_t bytes = 0; 1305*9207f9d2SChandrakanth patil uint32_t esz; 1306*9207f9d2SChandrakanth patil int nslots; 1307*9207f9d2SChandrakanth patil 1308*9207f9d2SChandrakanth patil nsge = attr->cap.max_recv_sge; 1309*9207f9d2SChandrakanth patil nrwr = attr->cap.max_recv_wr + 1; 1310*9207f9d2SChandrakanth patil nrwr = bnxt_re_init_depth(nrwr, cntx->comp_mask); 1311*9207f9d2SChandrakanth patil nslots = bnxt_re_get_rq_slots(cntx->rdev, cntx->modes, 1312*9207f9d2SChandrakanth patil nrwr, nsge, &esz); 1313*9207f9d2SChandrakanth patil if (nslots < 0) 1314*9207f9d2SChandrakanth patil return nslots; 1315*9207f9d2SChandrakanth patil qattr->nwr = nrwr; 1316*9207f9d2SChandrakanth patil qattr->slots = nslots; 1317*9207f9d2SChandrakanth patil qattr->esize = esz; 1318*9207f9d2SChandrakanth patil 1319*9207f9d2SChandrakanth patil bytes = nslots * sizeof(struct bnxt_re_sge); 1320*9207f9d2SChandrakanth patil qattr->sz_ring = get_aligned(bytes, cntx->rdev->pg_size); 1321*9207f9d2SChandrakanth patil qattr->sz_shad = nrwr * sizeof(struct bnxt_re_wrid); 1322*9207f9d2SChandrakanth patil return 0; 1323*9207f9d2SChandrakanth patil } 1324*9207f9d2SChandrakanth patil 1325*9207f9d2SChandrakanth patil static int bnxt_re_get_qpmem_size(struct bnxt_re_context *cntx, 1326*9207f9d2SChandrakanth patil struct ibv_qp_init_attr *attr, 1327*9207f9d2SChandrakanth patil struct bnxt_re_qattr *qattr) 1328*9207f9d2SChandrakanth patil { 1329*9207f9d2SChandrakanth patil int size = 0; 1330*9207f9d2SChandrakanth patil int tmp; 1331*9207f9d2SChandrakanth patil int rc; 1332*9207f9d2SChandrakanth patil 1333*9207f9d2SChandrakanth patil size = sizeof(struct bnxt_re_qp); 1334*9207f9d2SChandrakanth patil tmp = sizeof(struct bnxt_re_joint_queue); 1335*9207f9d2SChandrakanth patil tmp += sizeof(struct bnxt_re_queue); 1336*9207f9d2SChandrakanth patil size += tmp; 1337*9207f9d2SChandrakanth patil 1338*9207f9d2SChandrakanth patil rc = bnxt_re_get_sqmem_size(cntx, attr, &qattr[BNXT_RE_QATTR_SQ_INDX]); 1339*9207f9d2SChandrakanth patil if (rc < 0) 1340*9207f9d2SChandrakanth patil return -EINVAL; 1341*9207f9d2SChandrakanth patil size += qattr[BNXT_RE_QATTR_SQ_INDX].sz_ring; 1342*9207f9d2SChandrakanth patil size += qattr[BNXT_RE_QATTR_SQ_INDX].sz_shad; 1343*9207f9d2SChandrakanth patil 1344*9207f9d2SChandrakanth patil if (!attr->srq) { 1345*9207f9d2SChandrakanth patil tmp = sizeof(struct bnxt_re_joint_queue); 1346*9207f9d2SChandrakanth patil tmp += sizeof(struct bnxt_re_queue); 1347*9207f9d2SChandrakanth patil size += tmp; 1348*9207f9d2SChandrakanth patil rc = bnxt_re_get_rqmem_size(cntx, attr, 1349*9207f9d2SChandrakanth patil &qattr[BNXT_RE_QATTR_RQ_INDX]); 1350*9207f9d2SChandrakanth patil if (rc < 0) 1351*9207f9d2SChandrakanth patil return -EINVAL; 1352*9207f9d2SChandrakanth patil size += qattr[BNXT_RE_QATTR_RQ_INDX].sz_ring; 1353*9207f9d2SChandrakanth patil size += qattr[BNXT_RE_QATTR_RQ_INDX].sz_shad; 1354*9207f9d2SChandrakanth patil } 1355*9207f9d2SChandrakanth patil return size; 1356*9207f9d2SChandrakanth patil } 1357*9207f9d2SChandrakanth patil 1358*9207f9d2SChandrakanth patil static void *bnxt_re_alloc_qpslab(struct bnxt_re_context *cntx, 1359*9207f9d2SChandrakanth patil struct ibv_qp_init_attr *attr, 1360*9207f9d2SChandrakanth patil struct bnxt_re_qattr *qattr) 1361*9207f9d2SChandrakanth patil { 1362*9207f9d2SChandrakanth patil int bytes; 1363*9207f9d2SChandrakanth patil 1364*9207f9d2SChandrakanth patil bytes = bnxt_re_get_qpmem_size(cntx, attr, qattr); 1365*9207f9d2SChandrakanth patil if (bytes < 0) 1366*9207f9d2SChandrakanth patil return NULL; 1367*9207f9d2SChandrakanth patil return bnxt_re_alloc_mem(bytes, cntx->rdev->pg_size); 1368*9207f9d2SChandrakanth patil } 1369*9207f9d2SChandrakanth patil 1370*9207f9d2SChandrakanth patil static int bnxt_re_alloc_queue_ptr(struct bnxt_re_qp *qp, 1371*9207f9d2SChandrakanth patil struct ibv_qp_init_attr *attr) 1372*9207f9d2SChandrakanth patil { 1373*9207f9d2SChandrakanth patil int rc = -ENOMEM; 1374*9207f9d2SChandrakanth patil int jqsz, qsz; 1375*9207f9d2SChandrakanth patil 1376*9207f9d2SChandrakanth patil jqsz = sizeof(struct bnxt_re_joint_queue); 1377*9207f9d2SChandrakanth patil qsz = sizeof(struct bnxt_re_queue); 1378*9207f9d2SChandrakanth patil qp->jsqq = bnxt_re_get_obj(qp->mem, jqsz); 1379*9207f9d2SChandrakanth patil if (!qp->jsqq) 1380*9207f9d2SChandrakanth patil return rc; 1381*9207f9d2SChandrakanth patil qp->jsqq->hwque = bnxt_re_get_obj(qp->mem, qsz); 1382*9207f9d2SChandrakanth patil if (!qp->jsqq->hwque) 1383*9207f9d2SChandrakanth patil goto fail; 1384*9207f9d2SChandrakanth patil 1385*9207f9d2SChandrakanth patil if (!attr->srq) { 1386*9207f9d2SChandrakanth patil qp->jrqq = bnxt_re_get_obj(qp->mem, jqsz); 1387*9207f9d2SChandrakanth patil if (!qp->jrqq) 1388*9207f9d2SChandrakanth patil goto fail; 1389*9207f9d2SChandrakanth patil qp->jrqq->hwque = bnxt_re_get_obj(qp->mem, qsz); 1390*9207f9d2SChandrakanth patil if (!qp->jrqq->hwque) 1391*9207f9d2SChandrakanth patil goto fail; 1392*9207f9d2SChandrakanth patil } 1393*9207f9d2SChandrakanth patil 1394*9207f9d2SChandrakanth patil return 0; 1395*9207f9d2SChandrakanth patil fail: 1396*9207f9d2SChandrakanth patil return rc; 1397*9207f9d2SChandrakanth patil } 1398*9207f9d2SChandrakanth patil 1399*9207f9d2SChandrakanth patil static int bnxt_re_alloc_init_swque(struct bnxt_re_joint_queue *jqq, 1400*9207f9d2SChandrakanth patil struct bnxt_re_mem *mem, 1401*9207f9d2SChandrakanth patil struct bnxt_re_qattr *qattr) 1402*9207f9d2SChandrakanth patil { 1403*9207f9d2SChandrakanth patil int indx; 1404*9207f9d2SChandrakanth patil 1405*9207f9d2SChandrakanth patil jqq->swque = bnxt_re_get_obj(mem, qattr->sz_shad); 1406*9207f9d2SChandrakanth patil if (!jqq->swque) 1407*9207f9d2SChandrakanth patil return -ENOMEM; 1408*9207f9d2SChandrakanth patil jqq->start_idx = 0; 1409*9207f9d2SChandrakanth patil jqq->last_idx = qattr->nwr - 1; 1410*9207f9d2SChandrakanth patil for (indx = 0; indx < qattr->nwr; indx++) 1411*9207f9d2SChandrakanth patil jqq->swque[indx].next_idx = indx + 1; 1412*9207f9d2SChandrakanth patil jqq->swque[jqq->last_idx].next_idx = 0; 1413*9207f9d2SChandrakanth patil jqq->last_idx = 0; 1414*9207f9d2SChandrakanth patil 1415*9207f9d2SChandrakanth patil return 0; 1416*9207f9d2SChandrakanth patil } 1417*9207f9d2SChandrakanth patil 1418*9207f9d2SChandrakanth patil static inline int bnxt_log2(int n) 1419*9207f9d2SChandrakanth patil { 1420*9207f9d2SChandrakanth patil int t; 1421*9207f9d2SChandrakanth patil 1422*9207f9d2SChandrakanth patil if (n <= 0) 1423*9207f9d2SChandrakanth patil return -1; 1424*9207f9d2SChandrakanth patil 1425*9207f9d2SChandrakanth patil t = 0; 1426*9207f9d2SChandrakanth patil while ((1 << t) < n) 1427*9207f9d2SChandrakanth patil ++t; 1428*9207f9d2SChandrakanth patil 1429*9207f9d2SChandrakanth patil return t; 1430*9207f9d2SChandrakanth patil } 1431*9207f9d2SChandrakanth patil 1432*9207f9d2SChandrakanth patil static int bnxt_re_alloc_queues(struct bnxt_re_qp *qp, 1433*9207f9d2SChandrakanth patil struct ibv_qp_init_attr *attr, 1434*9207f9d2SChandrakanth patil struct bnxt_re_qattr *qattr) 1435*9207f9d2SChandrakanth patil { 1436*9207f9d2SChandrakanth patil struct bnxt_re_context *cntx; 1437*9207f9d2SChandrakanth patil struct bnxt_re_queue *que; 1438*9207f9d2SChandrakanth patil uint32_t psn_size; 1439*9207f9d2SChandrakanth patil uint8_t indx; 1440*9207f9d2SChandrakanth patil int ret; 1441*9207f9d2SChandrakanth patil 1442*9207f9d2SChandrakanth patil cntx = qp->cntx; 1443*9207f9d2SChandrakanth patil 1444*9207f9d2SChandrakanth patil indx = BNXT_RE_QATTR_SQ_INDX; 1445*9207f9d2SChandrakanth patil que = qp->jsqq->hwque; 1446*9207f9d2SChandrakanth patil que->stride = sizeof(struct bnxt_re_sge); 1447*9207f9d2SChandrakanth patil que->depth = qattr[indx].slots; 1448*9207f9d2SChandrakanth patil que->diff = (bnxt_re_get_diff(cntx->comp_mask) * qattr[indx].esize) / 1449*9207f9d2SChandrakanth patil que->stride; 1450*9207f9d2SChandrakanth patil que->va = bnxt_re_get_ring(qp->mem, qattr[indx].sz_ring); 1451*9207f9d2SChandrakanth patil if (!que->va) 1452*9207f9d2SChandrakanth patil return -ENOMEM; 1453*9207f9d2SChandrakanth patil /* PSN-search memory is allocated without checking for 1454*9207f9d2SChandrakanth patil * QP-Type. Kernel driver do not map this memory if it 1455*9207f9d2SChandrakanth patil * is UD-qp. UD-qp use this memory to maintain WC-opcode. 1456*9207f9d2SChandrakanth patil * See definition of bnxt_re_fill_psns() for the use case. 1457*9207f9d2SChandrakanth patil */ 1458*9207f9d2SChandrakanth patil que->pad = (que->va + que->depth * que->stride); 1459*9207f9d2SChandrakanth patil psn_size = bnxt_re_get_psne_size(qp->cntx); 1460*9207f9d2SChandrakanth patil que->pad_stride_log2 = (uint32_t)bnxt_log2((double)psn_size); 1461*9207f9d2SChandrakanth patil 1462*9207f9d2SChandrakanth patil ret = bnxt_re_alloc_init_swque(qp->jsqq, qp->mem, &qattr[indx]); 1463*9207f9d2SChandrakanth patil if (ret) 1464*9207f9d2SChandrakanth patil goto fail; 1465*9207f9d2SChandrakanth patil 1466*9207f9d2SChandrakanth patil qp->cap.max_swr = qattr[indx].nwr; 1467*9207f9d2SChandrakanth patil qp->jsqq->cntx = qp->cntx; 1468*9207f9d2SChandrakanth patil que->dbtail = (qp->qpmode == BNXT_RE_WQE_MODE_VARIABLE) ? 1469*9207f9d2SChandrakanth patil &que->tail : &qp->jsqq->start_idx; 1470*9207f9d2SChandrakanth patil 1471*9207f9d2SChandrakanth patil /* Init and adjust MSN table size according to qp mode */ 1472*9207f9d2SChandrakanth patil if (!BNXT_RE_HW_RETX(qp->cntx)) 1473*9207f9d2SChandrakanth patil goto skip_msn; 1474*9207f9d2SChandrakanth patil que->msn = 0; 1475*9207f9d2SChandrakanth patil que->msn_tbl_sz = 0; 1476*9207f9d2SChandrakanth patil if (qp->qpmode & BNXT_RE_WQE_MODE_VARIABLE) 1477*9207f9d2SChandrakanth patil que->msn_tbl_sz = roundup_pow_of_two(qattr->slots) / 2; 1478*9207f9d2SChandrakanth patil else 1479*9207f9d2SChandrakanth patil que->msn_tbl_sz = roundup_pow_of_two(qattr->nwr); 1480*9207f9d2SChandrakanth patil skip_msn: 1481*9207f9d2SChandrakanth patil bnxt_re_dp_spin_init(&que->qlock, PTHREAD_PROCESS_PRIVATE, !bnxt_single_threaded); 1482*9207f9d2SChandrakanth patil 1483*9207f9d2SChandrakanth patil if (qp->jrqq) { 1484*9207f9d2SChandrakanth patil indx = BNXT_RE_QATTR_RQ_INDX; 1485*9207f9d2SChandrakanth patil que = qp->jrqq->hwque; 1486*9207f9d2SChandrakanth patil que->stride = sizeof(struct bnxt_re_sge); 1487*9207f9d2SChandrakanth patil que->depth = qattr[indx].slots; 1488*9207f9d2SChandrakanth patil que->max_slots = qattr[indx].esize / que->stride; 1489*9207f9d2SChandrakanth patil que->dbtail = &qp->jrqq->start_idx; 1490*9207f9d2SChandrakanth patil que->va = bnxt_re_get_ring(qp->mem, qattr[indx].sz_ring); 1491*9207f9d2SChandrakanth patil if (!que->va) 1492*9207f9d2SChandrakanth patil return -ENOMEM; 1493*9207f9d2SChandrakanth patil /* For RQ only bnxt_re_wri.wrid is used. */ 1494*9207f9d2SChandrakanth patil ret = bnxt_re_alloc_init_swque(qp->jrqq, qp->mem, &qattr[indx]); 1495*9207f9d2SChandrakanth patil if (ret) 1496*9207f9d2SChandrakanth patil goto fail; 1497*9207f9d2SChandrakanth patil 1498*9207f9d2SChandrakanth patil bnxt_re_dp_spin_init(&que->qlock, PTHREAD_PROCESS_PRIVATE, !bnxt_single_threaded); 1499*9207f9d2SChandrakanth patil qp->cap.max_rwr = qattr[indx].nwr; 1500*9207f9d2SChandrakanth patil qp->jrqq->cntx = qp->cntx; 1501*9207f9d2SChandrakanth patil } 1502*9207f9d2SChandrakanth patil 1503*9207f9d2SChandrakanth patil return 0; 1504*9207f9d2SChandrakanth patil fail: 1505*9207f9d2SChandrakanth patil return ret; 1506*9207f9d2SChandrakanth patil } 1507*9207f9d2SChandrakanth patil 1508*9207f9d2SChandrakanth patil void bnxt_re_async_event(struct ibv_async_event *event) 1509*9207f9d2SChandrakanth patil { 1510*9207f9d2SChandrakanth patil struct ibv_qp *ibvqp; 1511*9207f9d2SChandrakanth patil struct bnxt_re_qp *qp; 1512*9207f9d2SChandrakanth patil 1513*9207f9d2SChandrakanth patil switch (event->event_type) { 1514*9207f9d2SChandrakanth patil case IBV_EVENT_CQ_ERR: 1515*9207f9d2SChandrakanth patil break; 1516*9207f9d2SChandrakanth patil case IBV_EVENT_SRQ_ERR: 1517*9207f9d2SChandrakanth patil case IBV_EVENT_QP_FATAL: 1518*9207f9d2SChandrakanth patil case IBV_EVENT_QP_REQ_ERR: 1519*9207f9d2SChandrakanth patil case IBV_EVENT_QP_ACCESS_ERR: 1520*9207f9d2SChandrakanth patil case IBV_EVENT_PATH_MIG_ERR: { 1521*9207f9d2SChandrakanth patil ibvqp = event->element.qp; 1522*9207f9d2SChandrakanth patil qp = to_bnxt_re_qp(ibvqp); 1523*9207f9d2SChandrakanth patil bnxt_re_qp_move_flush_err(qp); 1524*9207f9d2SChandrakanth patil break; 1525*9207f9d2SChandrakanth patil } 1526*9207f9d2SChandrakanth patil case IBV_EVENT_SQ_DRAINED: 1527*9207f9d2SChandrakanth patil case IBV_EVENT_PATH_MIG: 1528*9207f9d2SChandrakanth patil case IBV_EVENT_COMM_EST: 1529*9207f9d2SChandrakanth patil case IBV_EVENT_QP_LAST_WQE_REACHED: 1530*9207f9d2SChandrakanth patil case IBV_EVENT_SRQ_LIMIT_REACHED: 1531*9207f9d2SChandrakanth patil case IBV_EVENT_PORT_ACTIVE: 1532*9207f9d2SChandrakanth patil case IBV_EVENT_PORT_ERR: 1533*9207f9d2SChandrakanth patil default: 1534*9207f9d2SChandrakanth patil break; 1535*9207f9d2SChandrakanth patil } 1536*9207f9d2SChandrakanth patil } 1537*9207f9d2SChandrakanth patil 1538*9207f9d2SChandrakanth patil struct ibv_qp *bnxt_re_create_qp(struct ibv_pd *ibvpd, 1539*9207f9d2SChandrakanth patil struct ibv_qp_init_attr *attr) 1540*9207f9d2SChandrakanth patil { 1541*9207f9d2SChandrakanth patil struct bnxt_re_context *cntx = to_bnxt_re_context(ibvpd->context); 1542*9207f9d2SChandrakanth patil struct bnxt_re_qp_resp resp = {}; 1543*9207f9d2SChandrakanth patil struct ibv_device_attr *devattr; 1544*9207f9d2SChandrakanth patil struct bnxt_re_qp_req req = {}; 1545*9207f9d2SChandrakanth patil struct bnxt_re_qattr qattr[2]; 1546*9207f9d2SChandrakanth patil struct bnxt_re_qpcap *cap; 1547*9207f9d2SChandrakanth patil struct bnxt_re_dev *rdev; 1548*9207f9d2SChandrakanth patil struct bnxt_re_qp *qp; 1549*9207f9d2SChandrakanth patil void *mem; 1550*9207f9d2SChandrakanth patil 1551*9207f9d2SChandrakanth patil if (bnxt_re_check_qp_limits(cntx, attr)) 1552*9207f9d2SChandrakanth patil return NULL; 1553*9207f9d2SChandrakanth patil 1554*9207f9d2SChandrakanth patil memset(qattr, 0, (2 * sizeof(*qattr))); 1555*9207f9d2SChandrakanth patil mem = bnxt_re_alloc_qpslab(cntx, attr, qattr); 1556*9207f9d2SChandrakanth patil if (!mem) 1557*9207f9d2SChandrakanth patil return NULL; 1558*9207f9d2SChandrakanth patil qp = bnxt_re_get_obj(mem, sizeof(*qp)); 1559*9207f9d2SChandrakanth patil if (!qp) 1560*9207f9d2SChandrakanth patil goto fail; 1561*9207f9d2SChandrakanth patil qp->mem = mem; 1562*9207f9d2SChandrakanth patil 1563*9207f9d2SChandrakanth patil qp->cctx = cntx->cctx; 1564*9207f9d2SChandrakanth patil 1565*9207f9d2SChandrakanth patil qp->cntx = cntx; 1566*9207f9d2SChandrakanth patil qp->qpmode = cntx->modes & BNXT_RE_WQE_MODE_VARIABLE; 1567*9207f9d2SChandrakanth patil /* alloc queue pointers */ 1568*9207f9d2SChandrakanth patil if (bnxt_re_alloc_queue_ptr(qp, attr)) 1569*9207f9d2SChandrakanth patil goto fail; 1570*9207f9d2SChandrakanth patil /* alloc queues */ 1571*9207f9d2SChandrakanth patil if (bnxt_re_alloc_queues(qp, attr, qattr)) 1572*9207f9d2SChandrakanth patil goto fail; 1573*9207f9d2SChandrakanth patil /* Fill ibv_cmd */ 1574*9207f9d2SChandrakanth patil cap = &qp->cap; 1575*9207f9d2SChandrakanth patil req.qpsva = (uint64_t)qp->jsqq->hwque->va; 1576*9207f9d2SChandrakanth patil req.qprva = qp->jrqq ? (uint64_t)qp->jrqq->hwque->va : 0; 1577*9207f9d2SChandrakanth patil req.qp_handle = (uint64_t)qp; 1578*9207f9d2SChandrakanth patil 1579*9207f9d2SChandrakanth patil if (ibv_cmd_create_qp(ibvpd, &qp->ibvqp, attr, &req.cmd, sizeof(req), 1580*9207f9d2SChandrakanth patil &resp.resp, sizeof(resp))) 1581*9207f9d2SChandrakanth patil goto fail; 1582*9207f9d2SChandrakanth patil 1583*9207f9d2SChandrakanth patil qp->qpid = resp.qpid; 1584*9207f9d2SChandrakanth patil qp->qptyp = attr->qp_type; 1585*9207f9d2SChandrakanth patil qp->qpst = IBV_QPS_RESET; 1586*9207f9d2SChandrakanth patil qp->scq = to_bnxt_re_cq(attr->send_cq); 1587*9207f9d2SChandrakanth patil qp->rcq = to_bnxt_re_cq(attr->recv_cq); 1588*9207f9d2SChandrakanth patil if (attr->srq) 1589*9207f9d2SChandrakanth patil qp->srq = to_bnxt_re_srq(attr->srq); 1590*9207f9d2SChandrakanth patil qp->udpi = &cntx->udpi; 1591*9207f9d2SChandrakanth patil qp->rand.seed = qp->qpid; 1592*9207f9d2SChandrakanth patil qp->sq_shadow_db_key = BNXT_RE_DB_KEY_INVALID; 1593*9207f9d2SChandrakanth patil qp->rq_shadow_db_key = BNXT_RE_DB_KEY_INVALID; 1594*9207f9d2SChandrakanth patil qp->sq_msn = 0; 1595*9207f9d2SChandrakanth patil 1596*9207f9d2SChandrakanth patil rdev = cntx->rdev; 1597*9207f9d2SChandrakanth patil devattr = &rdev->devattr; 1598*9207f9d2SChandrakanth patil cap->max_ssge = attr->cap.max_send_sge; 1599*9207f9d2SChandrakanth patil cap->max_rsge = attr->cap.max_recv_sge; 1600*9207f9d2SChandrakanth patil cap->max_inline = attr->cap.max_inline_data; 1601*9207f9d2SChandrakanth patil cap->sqsig = attr->sq_sig_all; 1602*9207f9d2SChandrakanth patil cap->is_atomic_cap = devattr->atomic_cap; 1603*9207f9d2SChandrakanth patil INIT_DBLY_LIST_NODE(&qp->snode); 1604*9207f9d2SChandrakanth patil INIT_DBLY_LIST_NODE(&qp->rnode); 1605*9207f9d2SChandrakanth patil INIT_DBLY_LIST_NODE(&qp->dbnode); 1606*9207f9d2SChandrakanth patil 1607*9207f9d2SChandrakanth patil /* For SR2, push will be negotiated at modify qp */ 1608*9207f9d2SChandrakanth patil if (_is_chip_gen_p5(qp->cctx) && cntx->udpi.wcdpi) { 1609*9207f9d2SChandrakanth patil qp->push_st_en = 1; 1610*9207f9d2SChandrakanth patil qp->max_push_sz = BNXT_RE_MAX_INLINE_SIZE; 1611*9207f9d2SChandrakanth patil } 1612*9207f9d2SChandrakanth patil 1613*9207f9d2SChandrakanth patil if (_is_db_drop_recovery_enable(cntx)) { 1614*9207f9d2SChandrakanth patil pthread_spin_lock(&cntx->qp_dbr_res.lock); 1615*9207f9d2SChandrakanth patil bnxt_re_list_add_node(&qp->dbnode, &cntx->qp_dbr_res.head); 1616*9207f9d2SChandrakanth patil pthread_spin_unlock(&cntx->qp_dbr_res.lock); 1617*9207f9d2SChandrakanth patil } 1618*9207f9d2SChandrakanth patil return &qp->ibvqp; 1619*9207f9d2SChandrakanth patil fail: 1620*9207f9d2SChandrakanth patil bnxt_re_free_mem(mem); 1621*9207f9d2SChandrakanth patil return NULL; 1622*9207f9d2SChandrakanth patil } 1623*9207f9d2SChandrakanth patil 1624*9207f9d2SChandrakanth patil int bnxt_re_modify_qp(struct ibv_qp *ibvqp, struct ibv_qp_attr *attr, 1625*9207f9d2SChandrakanth patil int attr_mask) 1626*9207f9d2SChandrakanth patil { 1627*9207f9d2SChandrakanth patil struct bnxt_re_qp *qp = to_bnxt_re_qp(ibvqp); 1628*9207f9d2SChandrakanth patil int rc; 1629*9207f9d2SChandrakanth patil 1630*9207f9d2SChandrakanth patil struct bnxt_re_modify_ex_resp resp = {}; 1631*9207f9d2SChandrakanth patil struct bnxt_re_modify_ex_req req = {}; 1632*9207f9d2SChandrakanth patil bool can_issue_mqp_ex = false; 1633*9207f9d2SChandrakanth patil 1634*9207f9d2SChandrakanth patil if (bnxt_re_is_mqp_ex_supported(qp->cntx)) { 1635*9207f9d2SChandrakanth patil can_issue_mqp_ex = true; 1636*9207f9d2SChandrakanth patil /* Request for PPP */ 1637*9207f9d2SChandrakanth patil if (can_request_ppp(qp, attr, attr_mask)) { 1638*9207f9d2SChandrakanth patil req.comp_mask |= BNXT_RE_MQP_PPP_REQ_EN; 1639*9207f9d2SChandrakanth patil req.dpi = qp->udpi->wcdpi; 1640*9207f9d2SChandrakanth patil } 1641*9207f9d2SChandrakanth patil if (attr_mask & IBV_QP_PATH_MTU) 1642*9207f9d2SChandrakanth patil req.comp_mask |= BNXT_RE_MQP_PATH_MTU_MASK; 1643*9207f9d2SChandrakanth patil } 1644*9207f9d2SChandrakanth patil rc = ibv_cmd_modify_qp_compat(ibvqp, attr, attr_mask, 1645*9207f9d2SChandrakanth patil can_issue_mqp_ex, &req, &resp); 1646*9207f9d2SChandrakanth patil if (!rc) { 1647*9207f9d2SChandrakanth patil if (attr_mask & IBV_QP_STATE) { 1648*9207f9d2SChandrakanth patil qp->qpst = attr->qp_state; 1649*9207f9d2SChandrakanth patil /* transition to reset */ 1650*9207f9d2SChandrakanth patil if (qp->qpst == IBV_QPS_RESET) { 1651*9207f9d2SChandrakanth patil qp->jsqq->hwque->head = 0; 1652*9207f9d2SChandrakanth patil qp->jsqq->hwque->tail = 0; 1653*9207f9d2SChandrakanth patil *qp->jsqq->hwque->dbtail = 0; 1654*9207f9d2SChandrakanth patil qp->jsqq->start_idx = 0; 1655*9207f9d2SChandrakanth patil qp->jsqq->last_idx = 0; 1656*9207f9d2SChandrakanth patil bnxt_re_cleanup_cq(qp, qp->scq); 1657*9207f9d2SChandrakanth patil if (qp->jrqq) { 1658*9207f9d2SChandrakanth patil qp->jrqq->hwque->head = 0; 1659*9207f9d2SChandrakanth patil qp->jrqq->hwque->tail = 0; 1660*9207f9d2SChandrakanth patil *qp->jrqq->hwque->dbtail = 0; 1661*9207f9d2SChandrakanth patil qp->jrqq->start_idx = 0; 1662*9207f9d2SChandrakanth patil qp->jrqq->last_idx = 0; 1663*9207f9d2SChandrakanth patil bnxt_re_cleanup_cq(qp, qp->rcq); 1664*9207f9d2SChandrakanth patil } 1665*9207f9d2SChandrakanth patil } 1666*9207f9d2SChandrakanth patil /* Copy if PUSH was enabled */ 1667*9207f9d2SChandrakanth patil if (resp.comp_mask & BNXT_RE_MQP_PPP_REQ_EN_MASK) { 1668*9207f9d2SChandrakanth patil qp->push_st_en = BNXT_RE_MQP_PPP_REQ_EN; 1669*9207f9d2SChandrakanth patil /* Set the next posting state 1670*9207f9d2SChandrakanth patil * based on current h/w state 1671*9207f9d2SChandrakanth patil */ 1672*9207f9d2SChandrakanth patil qp->push_st_en |= 1673*9207f9d2SChandrakanth patil !(!!(resp.ppp_st_idx & 1674*9207f9d2SChandrakanth patil BNXT_RE_MQP_PPP_STATE)) << 1675*9207f9d2SChandrakanth patil BNXT_RE_PPP_ST_SHIFT; 1676*9207f9d2SChandrakanth patil qp->ppp_idx = 1677*9207f9d2SChandrakanth patil (resp.ppp_st_idx & 1678*9207f9d2SChandrakanth patil BNXT_RE_MQP_PPP_IDX_MASK); 1679*9207f9d2SChandrakanth patil if (qp->qpmode == BNXT_RE_WQE_MODE_VARIABLE) 1680*9207f9d2SChandrakanth patil qp->max_push_sz = 1681*9207f9d2SChandrakanth patil BNXT_RE_MAX_PUSH_SIZE_VAR_WQE; 1682*9207f9d2SChandrakanth patil else 1683*9207f9d2SChandrakanth patil qp->max_push_sz = 1684*9207f9d2SChandrakanth patil BNXT_RE_MAX_INLINE_SIZE; 1685*9207f9d2SChandrakanth patil } 1686*9207f9d2SChandrakanth patil } 1687*9207f9d2SChandrakanth patil 1688*9207f9d2SChandrakanth patil if (attr_mask & IBV_QP_SQ_PSN) 1689*9207f9d2SChandrakanth patil qp->sq_psn = attr->sq_psn; 1690*9207f9d2SChandrakanth patil 1691*9207f9d2SChandrakanth patil if (resp.comp_mask & BNXT_RE_MQP_PATH_MTU_MASK) 1692*9207f9d2SChandrakanth patil qp->mtu = resp.path_mtu; 1693*9207f9d2SChandrakanth patil else if (attr_mask & IBV_QP_PATH_MTU) 1694*9207f9d2SChandrakanth patil qp->mtu = (0x80 << attr->path_mtu); 1695*9207f9d2SChandrakanth patil } 1696*9207f9d2SChandrakanth patil 1697*9207f9d2SChandrakanth patil return rc; 1698*9207f9d2SChandrakanth patil } 1699*9207f9d2SChandrakanth patil 1700*9207f9d2SChandrakanth patil int bnxt_re_query_qp(struct ibv_qp *ibvqp, struct ibv_qp_attr *attr, 1701*9207f9d2SChandrakanth patil int attr_mask, struct ibv_qp_init_attr *init_attr) 1702*9207f9d2SChandrakanth patil { 1703*9207f9d2SChandrakanth patil struct bnxt_re_qp *qp = to_bnxt_re_qp(ibvqp); 1704*9207f9d2SChandrakanth patil struct ibv_query_qp cmd = {}; 1705*9207f9d2SChandrakanth patil int rc; 1706*9207f9d2SChandrakanth patil 1707*9207f9d2SChandrakanth patil rc = ibv_cmd_query_qp(ibvqp, attr, attr_mask, init_attr, 1708*9207f9d2SChandrakanth patil &cmd, sizeof(cmd)); 1709*9207f9d2SChandrakanth patil if (!rc) 1710*9207f9d2SChandrakanth patil qp->qpst = ibvqp->state; 1711*9207f9d2SChandrakanth patil 1712*9207f9d2SChandrakanth patil return rc; 1713*9207f9d2SChandrakanth patil } 1714*9207f9d2SChandrakanth patil 1715*9207f9d2SChandrakanth patil int bnxt_re_destroy_qp(struct ibv_qp *ibvqp) 1716*9207f9d2SChandrakanth patil { 1717*9207f9d2SChandrakanth patil struct bnxt_re_qp *qp = to_bnxt_re_qp(ibvqp); 1718*9207f9d2SChandrakanth patil struct bnxt_re_mem *mem; 1719*9207f9d2SChandrakanth patil int status; 1720*9207f9d2SChandrakanth patil 1721*9207f9d2SChandrakanth patil qp->qpst = IBV_QPS_RESET; 1722*9207f9d2SChandrakanth patil if (_is_db_drop_recovery_enable(qp->cntx)) { 1723*9207f9d2SChandrakanth patil pthread_spin_lock(&qp->cntx->qp_dbr_res.lock); 1724*9207f9d2SChandrakanth patil bnxt_re_list_del_node(&qp->dbnode, &qp->cntx->qp_dbr_res.head); 1725*9207f9d2SChandrakanth patil pthread_spin_unlock(&qp->cntx->qp_dbr_res.lock); 1726*9207f9d2SChandrakanth patil } 1727*9207f9d2SChandrakanth patil status = ibv_cmd_destroy_qp(ibvqp); 1728*9207f9d2SChandrakanth patil if (status) { 1729*9207f9d2SChandrakanth patil if (_is_db_drop_recovery_enable(qp->cntx)) { 1730*9207f9d2SChandrakanth patil pthread_spin_lock(&qp->cntx->qp_dbr_res.lock); 1731*9207f9d2SChandrakanth patil bnxt_re_list_add_node(&qp->dbnode, 1732*9207f9d2SChandrakanth patil &qp->cntx->qp_dbr_res.head); 1733*9207f9d2SChandrakanth patil pthread_spin_unlock(&qp->cntx->qp_dbr_res.lock); 1734*9207f9d2SChandrakanth patil } 1735*9207f9d2SChandrakanth patil return status; 1736*9207f9d2SChandrakanth patil } 1737*9207f9d2SChandrakanth patil bnxt_re_cleanup_cq(qp, qp->rcq); 1738*9207f9d2SChandrakanth patil bnxt_re_cleanup_cq(qp, qp->scq); 1739*9207f9d2SChandrakanth patil mem = qp->mem; 1740*9207f9d2SChandrakanth patil bnxt_re_free_mem(mem); 1741*9207f9d2SChandrakanth patil return 0; 1742*9207f9d2SChandrakanth patil } 1743*9207f9d2SChandrakanth patil 1744*9207f9d2SChandrakanth patil static void bnxt_re_put_rx_sge(struct bnxt_re_queue *que, uint32_t *idx, 1745*9207f9d2SChandrakanth patil struct ibv_sge *sgl, int nsg) 1746*9207f9d2SChandrakanth patil { 1747*9207f9d2SChandrakanth patil struct bnxt_re_sge *sge; 1748*9207f9d2SChandrakanth patil int indx; 1749*9207f9d2SChandrakanth patil 1750*9207f9d2SChandrakanth patil for (indx = 0; indx < nsg; indx++) { 1751*9207f9d2SChandrakanth patil sge = bnxt_re_get_hwqe(que, (*idx)++); 1752*9207f9d2SChandrakanth patil sge->pa = htole64(sgl[indx].addr); 1753*9207f9d2SChandrakanth patil sge->lkey = htole32(sgl[indx].lkey); 1754*9207f9d2SChandrakanth patil sge->length = htole32(sgl[indx].length); 1755*9207f9d2SChandrakanth patil } 1756*9207f9d2SChandrakanth patil } 1757*9207f9d2SChandrakanth patil 1758*9207f9d2SChandrakanth patil static int bnxt_re_put_tx_sge(struct bnxt_re_queue *que, uint32_t *idx, 1759*9207f9d2SChandrakanth patil struct ibv_sge *sgl, int nsg) 1760*9207f9d2SChandrakanth patil { 1761*9207f9d2SChandrakanth patil struct bnxt_re_sge *sge; 1762*9207f9d2SChandrakanth patil int indx; 1763*9207f9d2SChandrakanth patil int len; 1764*9207f9d2SChandrakanth patil 1765*9207f9d2SChandrakanth patil len = 0; 1766*9207f9d2SChandrakanth patil for (indx = 0; indx < nsg; indx++) { 1767*9207f9d2SChandrakanth patil sge = bnxt_re_get_hwqe(que, (*idx)++); 1768*9207f9d2SChandrakanth patil sge->pa = htole64(sgl[indx].addr); 1769*9207f9d2SChandrakanth patil sge->lkey = htole32(sgl[indx].lkey); 1770*9207f9d2SChandrakanth patil sge->length = htole32(sgl[indx].length); 1771*9207f9d2SChandrakanth patil len += sgl[indx].length; 1772*9207f9d2SChandrakanth patil } 1773*9207f9d2SChandrakanth patil return len; 1774*9207f9d2SChandrakanth patil } 1775*9207f9d2SChandrakanth patil 1776*9207f9d2SChandrakanth patil static inline int bnxt_re_calc_inline_len(struct ibv_send_wr *swr) 1777*9207f9d2SChandrakanth patil { 1778*9207f9d2SChandrakanth patil int illen, indx; 1779*9207f9d2SChandrakanth patil 1780*9207f9d2SChandrakanth patil illen = 0; 1781*9207f9d2SChandrakanth patil for (indx = 0; indx < swr->num_sge; indx++) 1782*9207f9d2SChandrakanth patil illen += swr->sg_list[indx].length; 1783*9207f9d2SChandrakanth patil return get_aligned(illen, sizeof(struct bnxt_re_sge)); 1784*9207f9d2SChandrakanth patil } 1785*9207f9d2SChandrakanth patil 1786*9207f9d2SChandrakanth patil static int bnxt_re_put_inline(struct bnxt_re_queue *que, uint32_t *idx, 1787*9207f9d2SChandrakanth patil struct bnxt_re_push_buffer *pbuf, 1788*9207f9d2SChandrakanth patil struct ibv_sge *sgl, uint32_t nsg, 1789*9207f9d2SChandrakanth patil uint16_t max_ils) 1790*9207f9d2SChandrakanth patil { 1791*9207f9d2SChandrakanth patil int len, t_len, offt = 0; 1792*9207f9d2SChandrakanth patil int t_cplen = 0, cplen; 1793*9207f9d2SChandrakanth patil bool pull_dst = true; 1794*9207f9d2SChandrakanth patil void *il_dst = NULL; 1795*9207f9d2SChandrakanth patil void *il_src = NULL; 1796*9207f9d2SChandrakanth patil int alsize; 1797*9207f9d2SChandrakanth patil int indx; 1798*9207f9d2SChandrakanth patil 1799*9207f9d2SChandrakanth patil alsize = sizeof(struct bnxt_re_sge); 1800*9207f9d2SChandrakanth patil 1801*9207f9d2SChandrakanth patil t_len = 0; 1802*9207f9d2SChandrakanth patil for (indx = 0; indx < nsg; indx++) { 1803*9207f9d2SChandrakanth patil len = sgl[indx].length; 1804*9207f9d2SChandrakanth patil il_src = (void *)sgl[indx].addr; 1805*9207f9d2SChandrakanth patil t_len += len; 1806*9207f9d2SChandrakanth patil if (t_len > max_ils) 1807*9207f9d2SChandrakanth patil goto bad; 1808*9207f9d2SChandrakanth patil while (len) { 1809*9207f9d2SChandrakanth patil if (pull_dst) { 1810*9207f9d2SChandrakanth patil pull_dst = false; 1811*9207f9d2SChandrakanth patil il_dst = bnxt_re_get_hwqe(que, (*idx)++); 1812*9207f9d2SChandrakanth patil if (pbuf) 1813*9207f9d2SChandrakanth patil pbuf->wqe[*idx - 1] = 1814*9207f9d2SChandrakanth patil (__u64)il_dst; 1815*9207f9d2SChandrakanth patil t_cplen = 0; 1816*9207f9d2SChandrakanth patil offt = 0; 1817*9207f9d2SChandrakanth patil } 1818*9207f9d2SChandrakanth patil cplen = MIN(len, alsize); 1819*9207f9d2SChandrakanth patil cplen = MIN(cplen,(alsize - offt)); 1820*9207f9d2SChandrakanth patil memcpy(il_dst, il_src, cplen); 1821*9207f9d2SChandrakanth patil t_cplen += cplen; 1822*9207f9d2SChandrakanth patil il_src += cplen; 1823*9207f9d2SChandrakanth patil il_dst += cplen; 1824*9207f9d2SChandrakanth patil offt += cplen; 1825*9207f9d2SChandrakanth patil len -= cplen; 1826*9207f9d2SChandrakanth patil if (t_cplen == alsize) 1827*9207f9d2SChandrakanth patil pull_dst = true; 1828*9207f9d2SChandrakanth patil } 1829*9207f9d2SChandrakanth patil } 1830*9207f9d2SChandrakanth patil 1831*9207f9d2SChandrakanth patil return t_len; 1832*9207f9d2SChandrakanth patil bad: 1833*9207f9d2SChandrakanth patil return -ENOMEM; 1834*9207f9d2SChandrakanth patil } 1835*9207f9d2SChandrakanth patil 1836*9207f9d2SChandrakanth patil static int bnxt_re_required_slots(struct bnxt_re_qp *qp, struct ibv_send_wr *wr, 1837*9207f9d2SChandrakanth patil uint32_t *wqe_sz, void **pbuf) 1838*9207f9d2SChandrakanth patil { 1839*9207f9d2SChandrakanth patil uint32_t wqe_byte; 1840*9207f9d2SChandrakanth patil int ilsize; 1841*9207f9d2SChandrakanth patil 1842*9207f9d2SChandrakanth patil if (wr->send_flags & IBV_SEND_INLINE) { 1843*9207f9d2SChandrakanth patil ilsize = bnxt_re_calc_inline_len(wr); 1844*9207f9d2SChandrakanth patil if (ilsize > qp->cap.max_inline) 1845*9207f9d2SChandrakanth patil return -EINVAL; 1846*9207f9d2SChandrakanth patil if (qp->push_st_en && ilsize <= qp->max_push_sz) 1847*9207f9d2SChandrakanth patil *pbuf = bnxt_re_get_pbuf(&qp->push_st_en, qp->ppp_idx, qp->cntx); 1848*9207f9d2SChandrakanth patil wqe_byte = (ilsize + bnxt_re_get_sqe_hdr_sz()); 1849*9207f9d2SChandrakanth patil } else { 1850*9207f9d2SChandrakanth patil wqe_byte = bnxt_re_calc_wqe_sz(wr->num_sge); 1851*9207f9d2SChandrakanth patil } 1852*9207f9d2SChandrakanth patil 1853*9207f9d2SChandrakanth patil /* que->stride is always 2^4 = 16, thus using hard-coding */ 1854*9207f9d2SChandrakanth patil *wqe_sz = wqe_byte >> 4; 1855*9207f9d2SChandrakanth patil if (qp->qpmode == BNXT_RE_WQE_MODE_STATIC) 1856*9207f9d2SChandrakanth patil return 8; 1857*9207f9d2SChandrakanth patil return *wqe_sz; 1858*9207f9d2SChandrakanth patil } 1859*9207f9d2SChandrakanth patil 1860*9207f9d2SChandrakanth patil static inline void bnxt_re_set_hdr_flags(struct bnxt_re_bsqe *hdr, 1861*9207f9d2SChandrakanth patil struct ibv_send_wr *wr, 1862*9207f9d2SChandrakanth patil uint32_t slots, uint8_t sqsig) 1863*9207f9d2SChandrakanth patil { 1864*9207f9d2SChandrakanth patil uint32_t send_flags; 1865*9207f9d2SChandrakanth patil uint32_t hdrval = 0; 1866*9207f9d2SChandrakanth patil uint8_t opcd; 1867*9207f9d2SChandrakanth patil 1868*9207f9d2SChandrakanth patil send_flags = wr->send_flags; 1869*9207f9d2SChandrakanth patil if (send_flags & IBV_SEND_SIGNALED || sqsig) 1870*9207f9d2SChandrakanth patil hdrval |= ((BNXT_RE_WR_FLAGS_SIGNALED & BNXT_RE_HDR_FLAGS_MASK) 1871*9207f9d2SChandrakanth patil << BNXT_RE_HDR_FLAGS_SHIFT); 1872*9207f9d2SChandrakanth patil if (send_flags & IBV_SEND_FENCE) 1873*9207f9d2SChandrakanth patil hdrval |= ((BNXT_RE_WR_FLAGS_UC_FENCE & BNXT_RE_HDR_FLAGS_MASK) 1874*9207f9d2SChandrakanth patil << BNXT_RE_HDR_FLAGS_SHIFT); 1875*9207f9d2SChandrakanth patil if (send_flags & IBV_SEND_SOLICITED) 1876*9207f9d2SChandrakanth patil hdrval |= ((BNXT_RE_WR_FLAGS_SE & BNXT_RE_HDR_FLAGS_MASK) 1877*9207f9d2SChandrakanth patil << BNXT_RE_HDR_FLAGS_SHIFT); 1878*9207f9d2SChandrakanth patil if (send_flags & IBV_SEND_INLINE) 1879*9207f9d2SChandrakanth patil hdrval |= ((BNXT_RE_WR_FLAGS_INLINE & BNXT_RE_HDR_FLAGS_MASK) 1880*9207f9d2SChandrakanth patil << BNXT_RE_HDR_FLAGS_SHIFT); 1881*9207f9d2SChandrakanth patil hdrval |= (slots & BNXT_RE_HDR_WS_MASK) << BNXT_RE_HDR_WS_SHIFT; 1882*9207f9d2SChandrakanth patil 1883*9207f9d2SChandrakanth patil /* Fill opcode */ 1884*9207f9d2SChandrakanth patil opcd = ibv_to_bnxt_re_wr_opcd[wr->opcode]; 1885*9207f9d2SChandrakanth patil hdrval |= (opcd & BNXT_RE_HDR_WT_MASK); 1886*9207f9d2SChandrakanth patil hdr->rsv_ws_fl_wt = htole32(hdrval); 1887*9207f9d2SChandrakanth patil } 1888*9207f9d2SChandrakanth patil 1889*9207f9d2SChandrakanth patil static int bnxt_re_build_tx_sge(struct bnxt_re_queue *que, uint32_t *idx, 1890*9207f9d2SChandrakanth patil struct bnxt_re_push_buffer *pbuf, 1891*9207f9d2SChandrakanth patil struct ibv_send_wr *wr, 1892*9207f9d2SChandrakanth patil uint16_t max_il) 1893*9207f9d2SChandrakanth patil { 1894*9207f9d2SChandrakanth patil if (wr->send_flags & IBV_SEND_INLINE) 1895*9207f9d2SChandrakanth patil return bnxt_re_put_inline(que, idx, pbuf, wr->sg_list, wr->num_sge, max_il); 1896*9207f9d2SChandrakanth patil 1897*9207f9d2SChandrakanth patil return bnxt_re_put_tx_sge(que, idx, wr->sg_list, wr->num_sge); 1898*9207f9d2SChandrakanth patil } 1899*9207f9d2SChandrakanth patil 1900*9207f9d2SChandrakanth patil static void *bnxt_re_pull_psn_buff(struct bnxt_re_queue *que, bool hw_retx) 1901*9207f9d2SChandrakanth patil { 1902*9207f9d2SChandrakanth patil if (hw_retx) 1903*9207f9d2SChandrakanth patil return (void *)(que->pad + ((que->msn) << que->pad_stride_log2)); 1904*9207f9d2SChandrakanth patil return (void *)(que->pad + ((*que->dbtail) << que->pad_stride_log2)); 1905*9207f9d2SChandrakanth patil } 1906*9207f9d2SChandrakanth patil 1907*9207f9d2SChandrakanth patil static void bnxt_re_fill_psns_for_msntbl(struct bnxt_re_qp *qp, uint32_t len, 1908*9207f9d2SChandrakanth patil uint32_t st_idx, uint8_t opcode) 1909*9207f9d2SChandrakanth patil { 1910*9207f9d2SChandrakanth patil uint32_t npsn = 0, start_psn = 0, next_psn = 0; 1911*9207f9d2SChandrakanth patil struct bnxt_re_msns *msns; 1912*9207f9d2SChandrakanth patil uint32_t pkt_cnt = 0; 1913*9207f9d2SChandrakanth patil 1914*9207f9d2SChandrakanth patil msns = bnxt_re_pull_psn_buff(qp->jsqq->hwque, true); 1915*9207f9d2SChandrakanth patil msns->start_idx_next_psn_start_psn = 0; 1916*9207f9d2SChandrakanth patil 1917*9207f9d2SChandrakanth patil if (qp->qptyp == IBV_QPT_RC) { 1918*9207f9d2SChandrakanth patil start_psn = qp->sq_psn; 1919*9207f9d2SChandrakanth patil pkt_cnt = (len / qp->mtu); 1920*9207f9d2SChandrakanth patil if (len % qp->mtu) 1921*9207f9d2SChandrakanth patil pkt_cnt++; 1922*9207f9d2SChandrakanth patil /* Increment the psn even for 0 len packets 1923*9207f9d2SChandrakanth patil * e.g. for opcode rdma-write-with-imm-data 1924*9207f9d2SChandrakanth patil * with length field = 0 1925*9207f9d2SChandrakanth patil */ 1926*9207f9d2SChandrakanth patil if (bnxt_re_is_zero_len_pkt(len, opcode)) 1927*9207f9d2SChandrakanth patil pkt_cnt = 1; 1928*9207f9d2SChandrakanth patil /* make it 24 bit */ 1929*9207f9d2SChandrakanth patil next_psn = qp->sq_psn + pkt_cnt; 1930*9207f9d2SChandrakanth patil npsn = next_psn; 1931*9207f9d2SChandrakanth patil qp->sq_psn = next_psn; 1932*9207f9d2SChandrakanth patil msns->start_idx_next_psn_start_psn |= 1933*9207f9d2SChandrakanth patil bnxt_re_update_msn_tbl(st_idx, npsn, start_psn); 1934*9207f9d2SChandrakanth patil qp->jsqq->hwque->msn++; 1935*9207f9d2SChandrakanth patil qp->jsqq->hwque->msn %= qp->jsqq->hwque->msn_tbl_sz; 1936*9207f9d2SChandrakanth patil } 1937*9207f9d2SChandrakanth patil } 1938*9207f9d2SChandrakanth patil 1939*9207f9d2SChandrakanth patil static void bnxt_re_fill_psns(struct bnxt_re_qp *qp, uint32_t len, 1940*9207f9d2SChandrakanth patil uint32_t st_idx, uint8_t opcode) 1941*9207f9d2SChandrakanth patil { 1942*9207f9d2SChandrakanth patil uint32_t opc_spsn = 0, flg_npsn = 0; 1943*9207f9d2SChandrakanth patil struct bnxt_re_psns_ext *psns_ext; 1944*9207f9d2SChandrakanth patil uint32_t pkt_cnt = 0, nxt_psn = 0; 1945*9207f9d2SChandrakanth patil struct bnxt_re_psns *psns; 1946*9207f9d2SChandrakanth patil 1947*9207f9d2SChandrakanth patil psns = bnxt_re_pull_psn_buff(qp->jsqq->hwque, false); 1948*9207f9d2SChandrakanth patil psns_ext = (struct bnxt_re_psns_ext *)psns; 1949*9207f9d2SChandrakanth patil 1950*9207f9d2SChandrakanth patil if (qp->qptyp == IBV_QPT_RC) { 1951*9207f9d2SChandrakanth patil opc_spsn = qp->sq_psn & BNXT_RE_PSNS_SPSN_MASK; 1952*9207f9d2SChandrakanth patil pkt_cnt = (len / qp->mtu); 1953*9207f9d2SChandrakanth patil if (len % qp->mtu) 1954*9207f9d2SChandrakanth patil pkt_cnt++; 1955*9207f9d2SChandrakanth patil /* Increment the psn even for 0 len packets 1956*9207f9d2SChandrakanth patil * e.g. for opcode rdma-write-with-imm-data 1957*9207f9d2SChandrakanth patil * with length field = 0 1958*9207f9d2SChandrakanth patil */ 1959*9207f9d2SChandrakanth patil if (bnxt_re_is_zero_len_pkt(len, opcode)) 1960*9207f9d2SChandrakanth patil pkt_cnt = 1; 1961*9207f9d2SChandrakanth patil nxt_psn = ((qp->sq_psn + pkt_cnt) & BNXT_RE_PSNS_NPSN_MASK); 1962*9207f9d2SChandrakanth patil flg_npsn = nxt_psn; 1963*9207f9d2SChandrakanth patil qp->sq_psn = nxt_psn; 1964*9207f9d2SChandrakanth patil } 1965*9207f9d2SChandrakanth patil psns->opc_spsn = htole32(opc_spsn); 1966*9207f9d2SChandrakanth patil psns->flg_npsn = htole32(flg_npsn); 1967*9207f9d2SChandrakanth patil /* Update for Thor p5 not Thor2 */ 1968*9207f9d2SChandrakanth patil if (!BNXT_RE_HW_RETX(qp->cntx) && qp->cctx->chip_is_gen_p5_thor2) 1969*9207f9d2SChandrakanth patil psns_ext->st_slot_idx = st_idx; 1970*9207f9d2SChandrakanth patil } 1971*9207f9d2SChandrakanth patil 1972*9207f9d2SChandrakanth patil static int bnxt_re_build_ud_sqe(struct ibv_send_wr *wr, 1973*9207f9d2SChandrakanth patil struct bnxt_re_bsqe *hdr, 1974*9207f9d2SChandrakanth patil struct bnxt_re_send *sqe) 1975*9207f9d2SChandrakanth patil { 1976*9207f9d2SChandrakanth patil struct bnxt_re_ah *ah; 1977*9207f9d2SChandrakanth patil uint64_t qkey; 1978*9207f9d2SChandrakanth patil 1979*9207f9d2SChandrakanth patil ah = to_bnxt_re_ah(wr->wr.ud.ah); 1980*9207f9d2SChandrakanth patil if (!wr->wr.ud.ah) 1981*9207f9d2SChandrakanth patil return -EINVAL; 1982*9207f9d2SChandrakanth patil qkey = wr->wr.ud.remote_qkey; 1983*9207f9d2SChandrakanth patil hdr->lhdr.qkey_len |= htole64(qkey << 32); 1984*9207f9d2SChandrakanth patil sqe->dst_qp = htole32(wr->wr.ud.remote_qpn); 1985*9207f9d2SChandrakanth patil sqe->avid = htole32(ah->avid & 0xFFFFF); 1986*9207f9d2SChandrakanth patil 1987*9207f9d2SChandrakanth patil return 0; 1988*9207f9d2SChandrakanth patil } 1989*9207f9d2SChandrakanth patil 1990*9207f9d2SChandrakanth patil static void bnxt_re_build_cns_sqe(struct ibv_send_wr *wr, 1991*9207f9d2SChandrakanth patil struct bnxt_re_bsqe *hdr, 1992*9207f9d2SChandrakanth patil void *hdr2) 1993*9207f9d2SChandrakanth patil { 1994*9207f9d2SChandrakanth patil struct bnxt_re_atomic *sqe = hdr2; 1995*9207f9d2SChandrakanth patil 1996*9207f9d2SChandrakanth patil hdr->key_immd = htole32(wr->wr.atomic.rkey); 1997*9207f9d2SChandrakanth patil hdr->lhdr.rva = htole64(wr->wr.atomic.remote_addr); 1998*9207f9d2SChandrakanth patil sqe->cmp_dt = htole64(wr->wr.atomic.compare_add); 1999*9207f9d2SChandrakanth patil sqe->swp_dt = htole64(wr->wr.atomic.swap); 2000*9207f9d2SChandrakanth patil } 2001*9207f9d2SChandrakanth patil 2002*9207f9d2SChandrakanth patil static void bnxt_re_build_fna_sqe(struct ibv_send_wr *wr, 2003*9207f9d2SChandrakanth patil struct bnxt_re_bsqe *hdr, 2004*9207f9d2SChandrakanth patil void *hdr2) 2005*9207f9d2SChandrakanth patil { 2006*9207f9d2SChandrakanth patil struct bnxt_re_atomic *sqe = hdr2; 2007*9207f9d2SChandrakanth patil 2008*9207f9d2SChandrakanth patil hdr->key_immd = htole32(wr->wr.atomic.rkey); 2009*9207f9d2SChandrakanth patil hdr->lhdr.rva = htole64(wr->wr.atomic.remote_addr); 2010*9207f9d2SChandrakanth patil sqe->swp_dt = htole64(wr->wr.atomic.compare_add); 2011*9207f9d2SChandrakanth patil } 2012*9207f9d2SChandrakanth patil 2013*9207f9d2SChandrakanth patil void bnxt_re_force_rts2rts(struct bnxt_re_qp *qp) 2014*9207f9d2SChandrakanth patil { 2015*9207f9d2SChandrakanth patil struct ibv_qp_attr attr = {}; 2016*9207f9d2SChandrakanth patil int attr_mask; 2017*9207f9d2SChandrakanth patil attr_mask = IBV_QP_STATE; 2018*9207f9d2SChandrakanth patil attr.qp_state = IBV_QPS_RTS; 2019*9207f9d2SChandrakanth patil bnxt_re_modify_qp(&qp->ibvqp, &attr, attr_mask); 2020*9207f9d2SChandrakanth patil qp->wqe_cnt = 0; 2021*9207f9d2SChandrakanth patil } 2022*9207f9d2SChandrakanth patil 2023*9207f9d2SChandrakanth patil int bnxt_re_post_send(struct ibv_qp *ibvqp, struct ibv_send_wr *wr, 2024*9207f9d2SChandrakanth patil struct ibv_send_wr **bad) 2025*9207f9d2SChandrakanth patil { 2026*9207f9d2SChandrakanth patil struct bnxt_re_qp *qp = to_bnxt_re_qp(ibvqp); 2027*9207f9d2SChandrakanth patil struct bnxt_re_queue *sq = qp->jsqq->hwque; 2028*9207f9d2SChandrakanth patil struct bnxt_re_push_buffer *pbuf = NULL; 2029*9207f9d2SChandrakanth patil bool chip_is_not_gen_p5_thor2; 2030*9207f9d2SChandrakanth patil int slots, ret = 0, len = 0; 2031*9207f9d2SChandrakanth patil uint32_t swq_idx, wqe_size; 2032*9207f9d2SChandrakanth patil struct bnxt_re_wrid *wrid; 2033*9207f9d2SChandrakanth patil struct bnxt_re_rdma *rsqe; 2034*9207f9d2SChandrakanth patil struct bnxt_re_bsqe *hdr; 2035*9207f9d2SChandrakanth patil struct bnxt_re_send *sqe; 2036*9207f9d2SChandrakanth patil bool ring_db = false; 2037*9207f9d2SChandrakanth patil uint32_t idx; 2038*9207f9d2SChandrakanth patil 2039*9207f9d2SChandrakanth patil bnxt_re_dp_spin_lock(&sq->qlock); 2040*9207f9d2SChandrakanth patil chip_is_not_gen_p5_thor2 = !qp->cctx->chip_is_gen_p5_thor2; 2041*9207f9d2SChandrakanth patil while (wr) { 2042*9207f9d2SChandrakanth patil slots = bnxt_re_required_slots(qp, wr, &wqe_size, (void **)&pbuf); 2043*9207f9d2SChandrakanth patil if (unlikely(slots < 0 || bnxt_re_is_que_full(sq, slots)) || 2044*9207f9d2SChandrakanth patil wr->num_sge > qp->cap.max_ssge) { 2045*9207f9d2SChandrakanth patil *bad = wr; 2046*9207f9d2SChandrakanth patil ret = ENOMEM; 2047*9207f9d2SChandrakanth patil goto bad_wr; 2048*9207f9d2SChandrakanth patil } 2049*9207f9d2SChandrakanth patil if ((wr->opcode == IBV_WR_ATOMIC_CMP_AND_SWP || 2050*9207f9d2SChandrakanth patil wr->opcode == IBV_WR_ATOMIC_FETCH_AND_ADD) && 2051*9207f9d2SChandrakanth patil !qp->cap.is_atomic_cap) { 2052*9207f9d2SChandrakanth patil *bad = wr; 2053*9207f9d2SChandrakanth patil ret = EINVAL; 2054*9207f9d2SChandrakanth patil goto bad_wr; 2055*9207f9d2SChandrakanth patil } 2056*9207f9d2SChandrakanth patil idx = 0; 2057*9207f9d2SChandrakanth patil len = 0; 2058*9207f9d2SChandrakanth patil hdr = bnxt_re_get_hwqe(sq, idx++); 2059*9207f9d2SChandrakanth patil sqe = bnxt_re_get_hwqe(sq, idx++); 2060*9207f9d2SChandrakanth patil /* populate push buffer */ 2061*9207f9d2SChandrakanth patil if (pbuf) { 2062*9207f9d2SChandrakanth patil pbuf->qpid = qp->qpid; 2063*9207f9d2SChandrakanth patil pbuf->wqe[0] = (__u64)hdr; 2064*9207f9d2SChandrakanth patil pbuf->wqe[1] = (__u64)sqe; 2065*9207f9d2SChandrakanth patil pbuf->st_idx = *sq->dbtail; 2066*9207f9d2SChandrakanth patil } 2067*9207f9d2SChandrakanth patil if (wr->num_sge) { 2068*9207f9d2SChandrakanth patil len = bnxt_re_build_tx_sge(sq, &idx, pbuf, wr, qp->cap.max_inline); 2069*9207f9d2SChandrakanth patil if (unlikely(len < 0)) { 2070*9207f9d2SChandrakanth patil ret = ENOMEM; 2071*9207f9d2SChandrakanth patil *bad = wr; 2072*9207f9d2SChandrakanth patil goto bad_wr; 2073*9207f9d2SChandrakanth patil } 2074*9207f9d2SChandrakanth patil } 2075*9207f9d2SChandrakanth patil hdr->lhdr.qkey_len = htole32(len); 2076*9207f9d2SChandrakanth patil bnxt_re_set_hdr_flags(hdr, wr, wqe_size, qp->cap.sqsig); 2077*9207f9d2SChandrakanth patil switch (wr->opcode) { 2078*9207f9d2SChandrakanth patil case IBV_WR_SEND_WITH_IMM: 2079*9207f9d2SChandrakanth patil /* HW is swapping the immediate data before 2080*9207f9d2SChandrakanth patil * sending it out on the wire. To workaround 2081*9207f9d2SChandrakanth patil * this, swap the imm_data value as sent by 2082*9207f9d2SChandrakanth patil * the application so that the value going out 2083*9207f9d2SChandrakanth patil * on the wire is in big-endian format. 2084*9207f9d2SChandrakanth patil */ 2085*9207f9d2SChandrakanth patil hdr->key_immd = htole32(be32toh(wr->imm_data)); 2086*9207f9d2SChandrakanth patil if (qp->qptyp == IBV_QPT_UD) { 2087*9207f9d2SChandrakanth patil if (chip_is_not_gen_p5_thor2 && 2088*9207f9d2SChandrakanth patil qp->wqe_cnt == BNXT_RE_UD_QP_STALL) 2089*9207f9d2SChandrakanth patil bnxt_re_force_rts2rts(qp); 2090*9207f9d2SChandrakanth patil 2091*9207f9d2SChandrakanth patil len = bnxt_re_build_ud_sqe(wr, hdr, sqe); 2092*9207f9d2SChandrakanth patil } 2093*9207f9d2SChandrakanth patil break; 2094*9207f9d2SChandrakanth patil case IBV_WR_SEND: 2095*9207f9d2SChandrakanth patil if (qp->qptyp == IBV_QPT_UD) { 2096*9207f9d2SChandrakanth patil if (chip_is_not_gen_p5_thor2 && 2097*9207f9d2SChandrakanth patil qp->wqe_cnt == BNXT_RE_UD_QP_STALL) 2098*9207f9d2SChandrakanth patil bnxt_re_force_rts2rts(qp); 2099*9207f9d2SChandrakanth patil 2100*9207f9d2SChandrakanth patil len = bnxt_re_build_ud_sqe(wr, hdr, sqe); 2101*9207f9d2SChandrakanth patil } 2102*9207f9d2SChandrakanth patil break; 2103*9207f9d2SChandrakanth patil case IBV_WR_RDMA_WRITE_WITH_IMM: 2104*9207f9d2SChandrakanth patil hdr->key_immd = htole32(be32toh(wr->imm_data)); 2105*9207f9d2SChandrakanth patil case IBV_WR_RDMA_WRITE: 2106*9207f9d2SChandrakanth patil case IBV_WR_RDMA_READ: 2107*9207f9d2SChandrakanth patil rsqe = (struct bnxt_re_rdma *)sqe; 2108*9207f9d2SChandrakanth patil rsqe->rva = htole64(wr->wr.rdma.remote_addr); 2109*9207f9d2SChandrakanth patil rsqe->rkey = htole32(wr->wr.rdma.rkey); 2110*9207f9d2SChandrakanth patil break; 2111*9207f9d2SChandrakanth patil case IBV_WR_ATOMIC_CMP_AND_SWP: 2112*9207f9d2SChandrakanth patil bnxt_re_build_cns_sqe(wr, hdr, sqe); 2113*9207f9d2SChandrakanth patil break; 2114*9207f9d2SChandrakanth patil case IBV_WR_ATOMIC_FETCH_AND_ADD: 2115*9207f9d2SChandrakanth patil bnxt_re_build_fna_sqe(wr, hdr, sqe); 2116*9207f9d2SChandrakanth patil break; 2117*9207f9d2SChandrakanth patil default : 2118*9207f9d2SChandrakanth patil len = -EINVAL; 2119*9207f9d2SChandrakanth patil break; 2120*9207f9d2SChandrakanth patil } 2121*9207f9d2SChandrakanth patil 2122*9207f9d2SChandrakanth patil if (unlikely(len < 0)) { 2123*9207f9d2SChandrakanth patil ret = (len == -EINVAL) ? EINVAL : ENOMEM; 2124*9207f9d2SChandrakanth patil *bad = wr; 2125*9207f9d2SChandrakanth patil break; 2126*9207f9d2SChandrakanth patil } 2127*9207f9d2SChandrakanth patil if (BNXT_RE_HW_RETX(qp->cntx)) 2128*9207f9d2SChandrakanth patil bnxt_re_fill_psns_for_msntbl(qp, len, *sq->dbtail, wr->opcode); 2129*9207f9d2SChandrakanth patil else 2130*9207f9d2SChandrakanth patil bnxt_re_fill_psns(qp, len, *sq->dbtail, wr->opcode); 2131*9207f9d2SChandrakanth patil 2132*9207f9d2SChandrakanth patil wrid = bnxt_re_get_swqe(qp->jsqq, &swq_idx); 2133*9207f9d2SChandrakanth patil wrid->wrid = wr->wr_id; 2134*9207f9d2SChandrakanth patil wrid->bytes = len; 2135*9207f9d2SChandrakanth patil wrid->slots = slots; 2136*9207f9d2SChandrakanth patil wrid->sig = (wr->send_flags & IBV_SEND_SIGNALED || qp->cap.sqsig) ? 2137*9207f9d2SChandrakanth patil IBV_SEND_SIGNALED : 0; 2138*9207f9d2SChandrakanth patil wrid->wc_opcd = ibv_wr_to_wc_opcd[wr->opcode]; 2139*9207f9d2SChandrakanth patil 2140*9207f9d2SChandrakanth patil bnxt_re_incr_tail(sq, slots); 2141*9207f9d2SChandrakanth patil bnxt_re_jqq_mod_start(qp->jsqq, swq_idx); 2142*9207f9d2SChandrakanth patil ring_db = true; 2143*9207f9d2SChandrakanth patil if (pbuf) { 2144*9207f9d2SChandrakanth patil ring_db = false; 2145*9207f9d2SChandrakanth patil pbuf->tail = *sq->dbtail; 2146*9207f9d2SChandrakanth patil if (_is_chip_thor2(qp->cctx)) { 2147*9207f9d2SChandrakanth patil /* WA for SR2 A0, ring additional db */ 2148*9207f9d2SChandrakanth patil ring_db |= _is_chip_a0(qp->cctx); 2149*9207f9d2SChandrakanth patil bnxt_re_fill_ppp(pbuf, qp, len, idx); 2150*9207f9d2SChandrakanth patil } else { 2151*9207f9d2SChandrakanth patil bnxt_re_fill_push_wcb(qp, pbuf, idx); 2152*9207f9d2SChandrakanth patil } 2153*9207f9d2SChandrakanth patil 2154*9207f9d2SChandrakanth patil bnxt_re_put_pbuf(qp->cntx, pbuf); 2155*9207f9d2SChandrakanth patil pbuf = NULL; 2156*9207f9d2SChandrakanth patil } 2157*9207f9d2SChandrakanth patil qp->wqe_cnt++; 2158*9207f9d2SChandrakanth patil qp->sq_msn++; 2159*9207f9d2SChandrakanth patil wr = wr->next; 2160*9207f9d2SChandrakanth patil } 2161*9207f9d2SChandrakanth patil 2162*9207f9d2SChandrakanth patil bad_wr: 2163*9207f9d2SChandrakanth patil if (ring_db) 2164*9207f9d2SChandrakanth patil bnxt_re_ring_sq_db(qp); 2165*9207f9d2SChandrakanth patil 2166*9207f9d2SChandrakanth patil if (pbuf) 2167*9207f9d2SChandrakanth patil bnxt_re_put_pbuf(qp->cntx, pbuf); 2168*9207f9d2SChandrakanth patil 2169*9207f9d2SChandrakanth patil bnxt_re_dp_spin_unlock(&sq->qlock); 2170*9207f9d2SChandrakanth patil return ret; 2171*9207f9d2SChandrakanth patil } 2172*9207f9d2SChandrakanth patil 2173*9207f9d2SChandrakanth patil int bnxt_re_post_recv(struct ibv_qp *ibvqp, struct ibv_recv_wr *wr, 2174*9207f9d2SChandrakanth patil struct ibv_recv_wr **bad) 2175*9207f9d2SChandrakanth patil { 2176*9207f9d2SChandrakanth patil struct bnxt_re_qp *qp = to_bnxt_re_qp(ibvqp); 2177*9207f9d2SChandrakanth patil struct bnxt_re_queue *rq = qp->jrqq->hwque; 2178*9207f9d2SChandrakanth patil struct bnxt_re_wrid *swque; 2179*9207f9d2SChandrakanth patil struct bnxt_re_brqe *hdr; 2180*9207f9d2SChandrakanth patil struct bnxt_re_sge *sge; 2181*9207f9d2SChandrakanth patil bool ring_db = false; 2182*9207f9d2SChandrakanth patil uint32_t swq_idx; 2183*9207f9d2SChandrakanth patil uint32_t hdrval; 2184*9207f9d2SChandrakanth patil uint32_t idx; 2185*9207f9d2SChandrakanth patil int rc = 0; 2186*9207f9d2SChandrakanth patil 2187*9207f9d2SChandrakanth patil bnxt_re_dp_spin_lock(&rq->qlock); 2188*9207f9d2SChandrakanth patil while (wr) { 2189*9207f9d2SChandrakanth patil if (unlikely(bnxt_re_is_que_full(rq, rq->max_slots) || 2190*9207f9d2SChandrakanth patil wr->num_sge > qp->cap.max_rsge)) { 2191*9207f9d2SChandrakanth patil *bad = wr; 2192*9207f9d2SChandrakanth patil rc = ENOMEM; 2193*9207f9d2SChandrakanth patil break; 2194*9207f9d2SChandrakanth patil } 2195*9207f9d2SChandrakanth patil swque = bnxt_re_get_swqe(qp->jrqq, &swq_idx); 2196*9207f9d2SChandrakanth patil 2197*9207f9d2SChandrakanth patil /* 2198*9207f9d2SChandrakanth patil * Initialize idx to 2 since the length of header wqe is 32 bytes 2199*9207f9d2SChandrakanth patil * i.e. sizeof(struct bnxt_re_brqe) + sizeof(struct bnxt_re_send) 2200*9207f9d2SChandrakanth patil */ 2201*9207f9d2SChandrakanth patil idx = 2; 2202*9207f9d2SChandrakanth patil hdr = bnxt_re_get_hwqe_hdr(rq); 2203*9207f9d2SChandrakanth patil 2204*9207f9d2SChandrakanth patil if (!wr->num_sge) { 2205*9207f9d2SChandrakanth patil /* 2206*9207f9d2SChandrakanth patil * HW needs at least one SGE for RQ Entries. 2207*9207f9d2SChandrakanth patil * Create an entry if num_sge = 0, 2208*9207f9d2SChandrakanth patil * update the idx and set length of sge to 0. 2209*9207f9d2SChandrakanth patil */ 2210*9207f9d2SChandrakanth patil sge = bnxt_re_get_hwqe(rq, idx++); 2211*9207f9d2SChandrakanth patil sge->length = 0; 2212*9207f9d2SChandrakanth patil } else { 2213*9207f9d2SChandrakanth patil /* Fill SGEs */ 2214*9207f9d2SChandrakanth patil bnxt_re_put_rx_sge(rq, &idx, wr->sg_list, wr->num_sge); 2215*9207f9d2SChandrakanth patil } 2216*9207f9d2SChandrakanth patil hdrval = BNXT_RE_WR_OPCD_RECV; 2217*9207f9d2SChandrakanth patil hdrval |= ((idx & BNXT_RE_HDR_WS_MASK) << BNXT_RE_HDR_WS_SHIFT); 2218*9207f9d2SChandrakanth patil hdr->rsv_ws_fl_wt = htole32(hdrval); 2219*9207f9d2SChandrakanth patil hdr->wrid = htole32(swq_idx); 2220*9207f9d2SChandrakanth patil 2221*9207f9d2SChandrakanth patil swque->wrid = wr->wr_id; 2222*9207f9d2SChandrakanth patil swque->slots = rq->max_slots; 2223*9207f9d2SChandrakanth patil swque->wc_opcd = BNXT_RE_WC_OPCD_RECV; 2224*9207f9d2SChandrakanth patil 2225*9207f9d2SChandrakanth patil bnxt_re_jqq_mod_start(qp->jrqq, swq_idx); 2226*9207f9d2SChandrakanth patil bnxt_re_incr_tail(rq, rq->max_slots); 2227*9207f9d2SChandrakanth patil ring_db = true; 2228*9207f9d2SChandrakanth patil wr = wr->next; 2229*9207f9d2SChandrakanth patil } 2230*9207f9d2SChandrakanth patil if (ring_db) 2231*9207f9d2SChandrakanth patil bnxt_re_ring_rq_db(qp); 2232*9207f9d2SChandrakanth patil bnxt_re_dp_spin_unlock(&rq->qlock); 2233*9207f9d2SChandrakanth patil 2234*9207f9d2SChandrakanth patil return rc; 2235*9207f9d2SChandrakanth patil } 2236*9207f9d2SChandrakanth patil 2237*9207f9d2SChandrakanth patil static size_t bnxt_re_get_srqmem_size(struct bnxt_re_context *cntx, 2238*9207f9d2SChandrakanth patil struct ibv_srq_init_attr *attr, 2239*9207f9d2SChandrakanth patil struct bnxt_re_qattr *qattr) 2240*9207f9d2SChandrakanth patil { 2241*9207f9d2SChandrakanth patil uint32_t stride, nswr; 2242*9207f9d2SChandrakanth patil size_t size = 0; 2243*9207f9d2SChandrakanth patil 2244*9207f9d2SChandrakanth patil size = sizeof(struct bnxt_re_srq); 2245*9207f9d2SChandrakanth patil size += sizeof(struct bnxt_re_queue); 2246*9207f9d2SChandrakanth patil /* allocate 1 extra to determin full condition */ 2247*9207f9d2SChandrakanth patil nswr = attr->attr.max_wr + 1; 2248*9207f9d2SChandrakanth patil nswr = bnxt_re_init_depth(nswr, cntx->comp_mask); 2249*9207f9d2SChandrakanth patil stride = bnxt_re_get_srqe_sz(); 2250*9207f9d2SChandrakanth patil 2251*9207f9d2SChandrakanth patil qattr->nwr = nswr; 2252*9207f9d2SChandrakanth patil qattr->slots = nswr; 2253*9207f9d2SChandrakanth patil qattr->esize = stride; 2254*9207f9d2SChandrakanth patil 2255*9207f9d2SChandrakanth patil qattr->sz_ring = get_aligned((nswr * stride), cntx->rdev->pg_size); 2256*9207f9d2SChandrakanth patil qattr->sz_shad = nswr * sizeof(struct bnxt_re_wrid); /* shadow */ 2257*9207f9d2SChandrakanth patil 2258*9207f9d2SChandrakanth patil size += qattr->sz_ring; 2259*9207f9d2SChandrakanth patil size += qattr->sz_shad; 2260*9207f9d2SChandrakanth patil return size; 2261*9207f9d2SChandrakanth patil } 2262*9207f9d2SChandrakanth patil 2263*9207f9d2SChandrakanth patil static void *bnxt_re_alloc_srqslab(struct bnxt_re_context *cntx, 2264*9207f9d2SChandrakanth patil struct ibv_srq_init_attr *attr, 2265*9207f9d2SChandrakanth patil struct bnxt_re_qattr *qattr) 2266*9207f9d2SChandrakanth patil { 2267*9207f9d2SChandrakanth patil size_t bytes; 2268*9207f9d2SChandrakanth patil 2269*9207f9d2SChandrakanth patil bytes = bnxt_re_get_srqmem_size(cntx, attr, qattr); 2270*9207f9d2SChandrakanth patil return bnxt_re_alloc_mem(bytes, cntx->rdev->pg_size); 2271*9207f9d2SChandrakanth patil } 2272*9207f9d2SChandrakanth patil 2273*9207f9d2SChandrakanth patil static struct bnxt_re_srq *bnxt_re_srq_alloc_queue_ptr(struct bnxt_re_mem *mem) 2274*9207f9d2SChandrakanth patil { 2275*9207f9d2SChandrakanth patil struct bnxt_re_srq *srq; 2276*9207f9d2SChandrakanth patil 2277*9207f9d2SChandrakanth patil srq = bnxt_re_get_obj(mem, sizeof(*srq)); 2278*9207f9d2SChandrakanth patil if (!srq) 2279*9207f9d2SChandrakanth patil return NULL; 2280*9207f9d2SChandrakanth patil srq->srqq = bnxt_re_get_obj(mem, sizeof(struct bnxt_re_queue)); 2281*9207f9d2SChandrakanth patil if (!srq->srqq) 2282*9207f9d2SChandrakanth patil return NULL; 2283*9207f9d2SChandrakanth patil return srq; 2284*9207f9d2SChandrakanth patil } 2285*9207f9d2SChandrakanth patil 2286*9207f9d2SChandrakanth patil static int bnxt_re_srq_alloc_queue(struct bnxt_re_srq *srq, 2287*9207f9d2SChandrakanth patil struct ibv_srq_init_attr *attr, 2288*9207f9d2SChandrakanth patil struct bnxt_re_qattr *qattr) 2289*9207f9d2SChandrakanth patil { 2290*9207f9d2SChandrakanth patil struct bnxt_re_queue *que; 2291*9207f9d2SChandrakanth patil int ret = -ENOMEM; 2292*9207f9d2SChandrakanth patil int idx; 2293*9207f9d2SChandrakanth patil 2294*9207f9d2SChandrakanth patil que = srq->srqq; 2295*9207f9d2SChandrakanth patil que->depth = qattr->slots; 2296*9207f9d2SChandrakanth patil que->stride = qattr->esize; 2297*9207f9d2SChandrakanth patil que->va = bnxt_re_get_ring(srq->mem, qattr->sz_ring); 2298*9207f9d2SChandrakanth patil if (!que->va) 2299*9207f9d2SChandrakanth patil goto bail; 2300*9207f9d2SChandrakanth patil bnxt_re_dp_spin_init(&que->qlock, PTHREAD_PROCESS_PRIVATE, !bnxt_single_threaded); 2301*9207f9d2SChandrakanth patil /* For SRQ only bnxt_re_wrid.wrid is used. */ 2302*9207f9d2SChandrakanth patil srq->srwrid = bnxt_re_get_obj(srq->mem, qattr->sz_shad); 2303*9207f9d2SChandrakanth patil if (!srq->srwrid) 2304*9207f9d2SChandrakanth patil goto bail; 2305*9207f9d2SChandrakanth patil 2306*9207f9d2SChandrakanth patil srq->start_idx = 0; 2307*9207f9d2SChandrakanth patil srq->last_idx = que->depth - 1; 2308*9207f9d2SChandrakanth patil for (idx = 0; idx < que->depth; idx++) 2309*9207f9d2SChandrakanth patil srq->srwrid[idx].next_idx = idx + 1; 2310*9207f9d2SChandrakanth patil srq->srwrid[srq->last_idx].next_idx = -1; 2311*9207f9d2SChandrakanth patil return 0; 2312*9207f9d2SChandrakanth patil bail: 2313*9207f9d2SChandrakanth patil bnxt_re_dp_spin_destroy(&srq->srqq->qlock); 2314*9207f9d2SChandrakanth patil return ret; 2315*9207f9d2SChandrakanth patil } 2316*9207f9d2SChandrakanth patil 2317*9207f9d2SChandrakanth patil struct ibv_srq *bnxt_re_create_srq(struct ibv_pd *ibvpd, 2318*9207f9d2SChandrakanth patil struct ibv_srq_init_attr *attr) 2319*9207f9d2SChandrakanth patil { 2320*9207f9d2SChandrakanth patil struct bnxt_re_srq_resp resp = {}; 2321*9207f9d2SChandrakanth patil struct bnxt_re_srq_req cmd = {}; 2322*9207f9d2SChandrakanth patil struct bnxt_re_qattr qattr = {}; 2323*9207f9d2SChandrakanth patil struct bnxt_re_context *uctx; 2324*9207f9d2SChandrakanth patil struct bnxt_re_srq *srq; 2325*9207f9d2SChandrakanth patil void *mem; 2326*9207f9d2SChandrakanth patil int ret; 2327*9207f9d2SChandrakanth patil 2328*9207f9d2SChandrakanth patil uctx = to_bnxt_re_context(ibvpd->context); 2329*9207f9d2SChandrakanth patil mem = bnxt_re_alloc_srqslab(uctx, attr, &qattr); 2330*9207f9d2SChandrakanth patil if (!mem) 2331*9207f9d2SChandrakanth patil return NULL; 2332*9207f9d2SChandrakanth patil 2333*9207f9d2SChandrakanth patil srq = bnxt_re_srq_alloc_queue_ptr(mem); 2334*9207f9d2SChandrakanth patil if (!srq) 2335*9207f9d2SChandrakanth patil goto fail; 2336*9207f9d2SChandrakanth patil srq->uctx = uctx; 2337*9207f9d2SChandrakanth patil srq->mem = mem; 2338*9207f9d2SChandrakanth patil if (bnxt_re_srq_alloc_queue(srq, attr, &qattr)) 2339*9207f9d2SChandrakanth patil goto fail; 2340*9207f9d2SChandrakanth patil 2341*9207f9d2SChandrakanth patil cmd.srqva = (uint64_t)srq->srqq->va; 2342*9207f9d2SChandrakanth patil cmd.srq_handle = (uint64_t)srq; 2343*9207f9d2SChandrakanth patil ret = ibv_cmd_create_srq(ibvpd, &srq->ibvsrq, attr, 2344*9207f9d2SChandrakanth patil &cmd.cmd, sizeof(cmd), 2345*9207f9d2SChandrakanth patil &resp.resp, sizeof(resp)); 2346*9207f9d2SChandrakanth patil if (ret) 2347*9207f9d2SChandrakanth patil goto fail; 2348*9207f9d2SChandrakanth patil 2349*9207f9d2SChandrakanth patil srq->srqid = resp.srqid; 2350*9207f9d2SChandrakanth patil srq->udpi = &uctx->udpi; 2351*9207f9d2SChandrakanth patil srq->cap.max_wr = srq->srqq->depth; 2352*9207f9d2SChandrakanth patil srq->cap.max_sge = attr->attr.max_sge; 2353*9207f9d2SChandrakanth patil srq->cap.srq_limit = attr->attr.srq_limit; 2354*9207f9d2SChandrakanth patil srq->arm_req = false; 2355*9207f9d2SChandrakanth patil srq->rand.seed = srq->srqid; 2356*9207f9d2SChandrakanth patil srq->shadow_db_key = BNXT_RE_DB_KEY_INVALID; 2357*9207f9d2SChandrakanth patil 2358*9207f9d2SChandrakanth patil INIT_DBLY_LIST_NODE(&srq->dbnode); 2359*9207f9d2SChandrakanth patil if (_is_db_drop_recovery_enable(uctx)) { 2360*9207f9d2SChandrakanth patil pthread_spin_lock(&uctx->srq_dbr_res.lock); 2361*9207f9d2SChandrakanth patil bnxt_re_list_add_node(&srq->dbnode, &uctx->srq_dbr_res.head); 2362*9207f9d2SChandrakanth patil pthread_spin_unlock(&uctx->srq_dbr_res.lock); 2363*9207f9d2SChandrakanth patil } 2364*9207f9d2SChandrakanth patil return &srq->ibvsrq; 2365*9207f9d2SChandrakanth patil fail: 2366*9207f9d2SChandrakanth patil bnxt_re_free_mem(mem); 2367*9207f9d2SChandrakanth patil return NULL; 2368*9207f9d2SChandrakanth patil } 2369*9207f9d2SChandrakanth patil 2370*9207f9d2SChandrakanth patil int bnxt_re_modify_srq(struct ibv_srq *ibvsrq, struct ibv_srq_attr *attr, 2371*9207f9d2SChandrakanth patil int attr_mask) 2372*9207f9d2SChandrakanth patil { 2373*9207f9d2SChandrakanth patil struct bnxt_re_srq *srq = to_bnxt_re_srq(ibvsrq); 2374*9207f9d2SChandrakanth patil struct ibv_modify_srq cmd = {}; 2375*9207f9d2SChandrakanth patil int status = 0; 2376*9207f9d2SChandrakanth patil 2377*9207f9d2SChandrakanth patil status = ibv_cmd_modify_srq(ibvsrq, attr, attr_mask, 2378*9207f9d2SChandrakanth patil &cmd, sizeof(cmd)); 2379*9207f9d2SChandrakanth patil if (!status && ((attr_mask & IBV_SRQ_LIMIT) && 2380*9207f9d2SChandrakanth patil (srq->cap.srq_limit != attr->srq_limit))) { 2381*9207f9d2SChandrakanth patil srq->cap.srq_limit = attr->srq_limit; 2382*9207f9d2SChandrakanth patil } 2383*9207f9d2SChandrakanth patil srq->arm_req = true; 2384*9207f9d2SChandrakanth patil return status; 2385*9207f9d2SChandrakanth patil } 2386*9207f9d2SChandrakanth patil 2387*9207f9d2SChandrakanth patil int bnxt_re_destroy_srq(struct ibv_srq *ibvsrq) 2388*9207f9d2SChandrakanth patil { 2389*9207f9d2SChandrakanth patil struct bnxt_re_srq *srq = to_bnxt_re_srq(ibvsrq); 2390*9207f9d2SChandrakanth patil struct bnxt_re_mem *mem; 2391*9207f9d2SChandrakanth patil int ret; 2392*9207f9d2SChandrakanth patil 2393*9207f9d2SChandrakanth patil if (_is_db_drop_recovery_enable(srq->uctx)) { 2394*9207f9d2SChandrakanth patil pthread_spin_lock(&srq->uctx->srq_dbr_res.lock); 2395*9207f9d2SChandrakanth patil bnxt_re_list_del_node(&srq->dbnode, &srq->uctx->srq_dbr_res.head); 2396*9207f9d2SChandrakanth patil pthread_spin_unlock(&srq->uctx->srq_dbr_res.lock); 2397*9207f9d2SChandrakanth patil } 2398*9207f9d2SChandrakanth patil ret = ibv_cmd_destroy_srq(ibvsrq); 2399*9207f9d2SChandrakanth patil if (ret) { 2400*9207f9d2SChandrakanth patil if (_is_db_drop_recovery_enable(srq->uctx)) { 2401*9207f9d2SChandrakanth patil pthread_spin_lock(&srq->uctx->srq_dbr_res.lock); 2402*9207f9d2SChandrakanth patil bnxt_re_list_add_node(&srq->dbnode, 2403*9207f9d2SChandrakanth patil &srq->uctx->srq_dbr_res.head); 2404*9207f9d2SChandrakanth patil pthread_spin_unlock(&srq->uctx->srq_dbr_res.lock); 2405*9207f9d2SChandrakanth patil } 2406*9207f9d2SChandrakanth patil return ret; 2407*9207f9d2SChandrakanth patil } 2408*9207f9d2SChandrakanth patil bnxt_re_dp_spin_destroy(&srq->srqq->qlock); 2409*9207f9d2SChandrakanth patil mem = srq->mem; 2410*9207f9d2SChandrakanth patil bnxt_re_free_mem(mem); 2411*9207f9d2SChandrakanth patil return 0; 2412*9207f9d2SChandrakanth patil } 2413*9207f9d2SChandrakanth patil 2414*9207f9d2SChandrakanth patil int bnxt_re_query_srq(struct ibv_srq *ibvsrq, struct ibv_srq_attr *attr) 2415*9207f9d2SChandrakanth patil { 2416*9207f9d2SChandrakanth patil struct ibv_query_srq cmd = {}; 2417*9207f9d2SChandrakanth patil 2418*9207f9d2SChandrakanth patil return ibv_cmd_query_srq(ibvsrq, attr, &cmd, sizeof cmd); 2419*9207f9d2SChandrakanth patil } 2420*9207f9d2SChandrakanth patil 2421*9207f9d2SChandrakanth patil static int bnxt_re_build_srqe(struct bnxt_re_srq *srq, 2422*9207f9d2SChandrakanth patil struct ibv_recv_wr *wr, void *srqe) 2423*9207f9d2SChandrakanth patil { 2424*9207f9d2SChandrakanth patil struct bnxt_re_brqe *hdr = srqe; 2425*9207f9d2SChandrakanth patil struct bnxt_re_wrid *wrid; 2426*9207f9d2SChandrakanth patil struct bnxt_re_sge *sge; 2427*9207f9d2SChandrakanth patil int wqe_sz, len, next; 2428*9207f9d2SChandrakanth patil uint32_t hdrval = 0; 2429*9207f9d2SChandrakanth patil int indx; 2430*9207f9d2SChandrakanth patil 2431*9207f9d2SChandrakanth patil sge = (srqe + bnxt_re_get_srqe_hdr_sz()); 2432*9207f9d2SChandrakanth patil next = srq->start_idx; 2433*9207f9d2SChandrakanth patil wrid = &srq->srwrid[next]; 2434*9207f9d2SChandrakanth patil 2435*9207f9d2SChandrakanth patil len = 0; 2436*9207f9d2SChandrakanth patil for (indx = 0; indx < wr->num_sge; indx++, sge++) { 2437*9207f9d2SChandrakanth patil sge->pa = htole64(wr->sg_list[indx].addr); 2438*9207f9d2SChandrakanth patil sge->lkey = htole32(wr->sg_list[indx].lkey); 2439*9207f9d2SChandrakanth patil sge->length = htole32(wr->sg_list[indx].length); 2440*9207f9d2SChandrakanth patil len += wr->sg_list[indx].length; 2441*9207f9d2SChandrakanth patil } 2442*9207f9d2SChandrakanth patil 2443*9207f9d2SChandrakanth patil hdrval = BNXT_RE_WR_OPCD_RECV; 2444*9207f9d2SChandrakanth patil wqe_sz = wr->num_sge + (bnxt_re_get_srqe_hdr_sz() >> 4); /* 16B align */ 2445*9207f9d2SChandrakanth patil /* HW needs at least one SGE for SRQ Entries. 2446*9207f9d2SChandrakanth patil * Increment SRQ WQE size if num_sge = 0 to 2447*9207f9d2SChandrakanth patil * include the extra SGE. Set the sge length to 2448*9207f9d2SChandrakanth patil * zero. 2449*9207f9d2SChandrakanth patil */ 2450*9207f9d2SChandrakanth patil if (!wr->num_sge) { 2451*9207f9d2SChandrakanth patil wqe_sz++; 2452*9207f9d2SChandrakanth patil sge->length = 0; 2453*9207f9d2SChandrakanth patil } 2454*9207f9d2SChandrakanth patil hdrval |= ((wqe_sz & BNXT_RE_HDR_WS_MASK) << BNXT_RE_HDR_WS_SHIFT); 2455*9207f9d2SChandrakanth patil hdr->rsv_ws_fl_wt = htole32(hdrval); 2456*9207f9d2SChandrakanth patil hdr->wrid = htole32((uint32_t)next); 2457*9207f9d2SChandrakanth patil 2458*9207f9d2SChandrakanth patil /* Fill wrid */ 2459*9207f9d2SChandrakanth patil wrid->wrid = wr->wr_id; 2460*9207f9d2SChandrakanth patil wrid->bytes = len; /* N.A. for RQE */ 2461*9207f9d2SChandrakanth patil wrid->sig = 0; /* N.A. for RQE */ 2462*9207f9d2SChandrakanth patil 2463*9207f9d2SChandrakanth patil return len; 2464*9207f9d2SChandrakanth patil } 2465*9207f9d2SChandrakanth patil 2466*9207f9d2SChandrakanth patil int bnxt_re_post_srq_recv(struct ibv_srq *ibvsrq, struct ibv_recv_wr *wr, 2467*9207f9d2SChandrakanth patil struct ibv_recv_wr **bad) 2468*9207f9d2SChandrakanth patil { 2469*9207f9d2SChandrakanth patil struct bnxt_re_srq *srq = to_bnxt_re_srq(ibvsrq); 2470*9207f9d2SChandrakanth patil struct bnxt_re_queue *rq = srq->srqq; 2471*9207f9d2SChandrakanth patil int ret, count = 0; 2472*9207f9d2SChandrakanth patil void *srqe; 2473*9207f9d2SChandrakanth patil 2474*9207f9d2SChandrakanth patil bnxt_re_dp_spin_lock(&rq->qlock); 2475*9207f9d2SChandrakanth patil count = rq->tail > rq->head ? rq->tail - rq->head : 2476*9207f9d2SChandrakanth patil rq->depth - rq->head + rq->tail; 2477*9207f9d2SChandrakanth patil while (wr) { 2478*9207f9d2SChandrakanth patil if (srq->start_idx == srq->last_idx || 2479*9207f9d2SChandrakanth patil wr->num_sge > srq->cap.max_sge) { 2480*9207f9d2SChandrakanth patil *bad = wr; 2481*9207f9d2SChandrakanth patil bnxt_re_dp_spin_unlock(&rq->qlock); 2482*9207f9d2SChandrakanth patil return ENOMEM; 2483*9207f9d2SChandrakanth patil } 2484*9207f9d2SChandrakanth patil 2485*9207f9d2SChandrakanth patil srqe = (void *) (rq->va + (rq->tail * rq->stride)); 2486*9207f9d2SChandrakanth patil memset(srqe, 0, bnxt_re_get_srqe_sz()); 2487*9207f9d2SChandrakanth patil ret = bnxt_re_build_srqe(srq, wr, srqe); 2488*9207f9d2SChandrakanth patil if (ret < 0) { 2489*9207f9d2SChandrakanth patil bnxt_re_dp_spin_unlock(&rq->qlock); 2490*9207f9d2SChandrakanth patil *bad = wr; 2491*9207f9d2SChandrakanth patil return ENOMEM; 2492*9207f9d2SChandrakanth patil } 2493*9207f9d2SChandrakanth patil 2494*9207f9d2SChandrakanth patil srq->start_idx = srq->srwrid[srq->start_idx].next_idx; 2495*9207f9d2SChandrakanth patil bnxt_re_incr_tail(rq, 1); 2496*9207f9d2SChandrakanth patil wr = wr->next; 2497*9207f9d2SChandrakanth patil bnxt_re_ring_srq_db(srq); 2498*9207f9d2SChandrakanth patil count++; 2499*9207f9d2SChandrakanth patil if (srq->arm_req == true && count > srq->cap.srq_limit) { 2500*9207f9d2SChandrakanth patil srq->arm_req = false; 2501*9207f9d2SChandrakanth patil bnxt_re_ring_srq_arm(srq); 2502*9207f9d2SChandrakanth patil } 2503*9207f9d2SChandrakanth patil } 2504*9207f9d2SChandrakanth patil bnxt_re_dp_spin_unlock(&rq->qlock); 2505*9207f9d2SChandrakanth patil 2506*9207f9d2SChandrakanth patil return 0; 2507*9207f9d2SChandrakanth patil } 2508*9207f9d2SChandrakanth patil 2509*9207f9d2SChandrakanth patil struct ibv_ah *bnxt_re_create_ah(struct ibv_pd *ibvpd, struct ibv_ah_attr *attr) 2510*9207f9d2SChandrakanth patil { 2511*9207f9d2SChandrakanth patil struct bnxt_re_context *uctx; 2512*9207f9d2SChandrakanth patil struct bnxt_re_pd *pd; 2513*9207f9d2SChandrakanth patil struct bnxt_re_ah *ah; 2514*9207f9d2SChandrakanth patil int status; 2515*9207f9d2SChandrakanth patil struct ibv_create_ah_resp resp = {}; 2516*9207f9d2SChandrakanth patil 2517*9207f9d2SChandrakanth patil pd = to_bnxt_re_pd(ibvpd); 2518*9207f9d2SChandrakanth patil uctx = to_bnxt_re_context(ibvpd->context); 2519*9207f9d2SChandrakanth patil 2520*9207f9d2SChandrakanth patil ah = calloc(1, sizeof(struct bnxt_re_ah)); 2521*9207f9d2SChandrakanth patil if (!ah) { 2522*9207f9d2SChandrakanth patil goto failed; 2523*9207f9d2SChandrakanth patil } 2524*9207f9d2SChandrakanth patil 2525*9207f9d2SChandrakanth patil ah->pd = pd; 2526*9207f9d2SChandrakanth patil pthread_mutex_lock(&uctx->shlock); 2527*9207f9d2SChandrakanth patil status = ibv_cmd_create_ah(ibvpd, &ah->ibvah, attr, 2528*9207f9d2SChandrakanth patil &resp, sizeof(resp)); 2529*9207f9d2SChandrakanth patil 2530*9207f9d2SChandrakanth patil if (status) 2531*9207f9d2SChandrakanth patil { 2532*9207f9d2SChandrakanth patil pthread_mutex_unlock(&uctx->shlock); 2533*9207f9d2SChandrakanth patil free(ah); 2534*9207f9d2SChandrakanth patil goto failed; 2535*9207f9d2SChandrakanth patil } 2536*9207f9d2SChandrakanth patil /* read AV ID now. */ 2537*9207f9d2SChandrakanth patil ah->avid = *(uint32_t *)(uctx->shpg + BNXT_RE_SHPG_AVID_OFFT); 2538*9207f9d2SChandrakanth patil pthread_mutex_unlock(&uctx->shlock); 2539*9207f9d2SChandrakanth patil 2540*9207f9d2SChandrakanth patil return &ah->ibvah; 2541*9207f9d2SChandrakanth patil failed: 2542*9207f9d2SChandrakanth patil return NULL; 2543*9207f9d2SChandrakanth patil } 2544*9207f9d2SChandrakanth patil 2545*9207f9d2SChandrakanth patil int bnxt_re_destroy_ah(struct ibv_ah *ibvah) 2546*9207f9d2SChandrakanth patil { 2547*9207f9d2SChandrakanth patil struct bnxt_re_ah *ah; 2548*9207f9d2SChandrakanth patil int status; 2549*9207f9d2SChandrakanth patil 2550*9207f9d2SChandrakanth patil ah = to_bnxt_re_ah(ibvah); 2551*9207f9d2SChandrakanth patil status = ibv_cmd_destroy_ah(ibvah); 2552*9207f9d2SChandrakanth patil if (status) 2553*9207f9d2SChandrakanth patil return status; 2554*9207f9d2SChandrakanth patil free(ah); 2555*9207f9d2SChandrakanth patil 2556*9207f9d2SChandrakanth patil return 0; 2557*9207f9d2SChandrakanth patil } 2558