0001
0002
0003 #include "osdep.h"
0004 #include "defs.h"
0005 #include "user.h"
0006 #include "irdma.h"
0007
0008
0009
0010
0011
0012
0013
0014
0015 static void irdma_set_fragment(__le64 *wqe, u32 offset, struct ib_sge *sge,
0016 u8 valid)
0017 {
0018 if (sge) {
0019 set_64bit_val(wqe, offset,
0020 FIELD_PREP(IRDMAQPSQ_FRAG_TO, sge->addr));
0021 set_64bit_val(wqe, offset + 8,
0022 FIELD_PREP(IRDMAQPSQ_VALID, valid) |
0023 FIELD_PREP(IRDMAQPSQ_FRAG_LEN, sge->length) |
0024 FIELD_PREP(IRDMAQPSQ_FRAG_STAG, sge->lkey));
0025 } else {
0026 set_64bit_val(wqe, offset, 0);
0027 set_64bit_val(wqe, offset + 8,
0028 FIELD_PREP(IRDMAQPSQ_VALID, valid));
0029 }
0030 }
0031
0032
0033
0034
0035
0036
0037
0038
0039 static void irdma_set_fragment_gen_1(__le64 *wqe, u32 offset,
0040 struct ib_sge *sge, u8 valid)
0041 {
0042 if (sge) {
0043 set_64bit_val(wqe, offset,
0044 FIELD_PREP(IRDMAQPSQ_FRAG_TO, sge->addr));
0045 set_64bit_val(wqe, offset + 8,
0046 FIELD_PREP(IRDMAQPSQ_GEN1_FRAG_LEN, sge->length) |
0047 FIELD_PREP(IRDMAQPSQ_GEN1_FRAG_STAG, sge->lkey));
0048 } else {
0049 set_64bit_val(wqe, offset, 0);
0050 set_64bit_val(wqe, offset + 8, 0);
0051 }
0052 }
0053
0054
0055
0056
0057
0058 static int irdma_nop_1(struct irdma_qp_uk *qp)
0059 {
0060 u64 hdr;
0061 __le64 *wqe;
0062 u32 wqe_idx;
0063 bool signaled = false;
0064
0065 if (!qp->sq_ring.head)
0066 return -EINVAL;
0067
0068 wqe_idx = IRDMA_RING_CURRENT_HEAD(qp->sq_ring);
0069 wqe = qp->sq_base[wqe_idx].elem;
0070
0071 qp->sq_wrtrk_array[wqe_idx].quanta = IRDMA_QP_WQE_MIN_QUANTA;
0072
0073 set_64bit_val(wqe, 0, 0);
0074 set_64bit_val(wqe, 8, 0);
0075 set_64bit_val(wqe, 16, 0);
0076
0077 hdr = FIELD_PREP(IRDMAQPSQ_OPCODE, IRDMAQP_OP_NOP) |
0078 FIELD_PREP(IRDMAQPSQ_SIGCOMPL, signaled) |
0079 FIELD_PREP(IRDMAQPSQ_VALID, qp->swqe_polarity);
0080
0081
0082 dma_wmb();
0083
0084 set_64bit_val(wqe, 24, hdr);
0085
0086 return 0;
0087 }
0088
0089
0090
0091
0092
0093
0094 void irdma_clr_wqes(struct irdma_qp_uk *qp, u32 qp_wqe_idx)
0095 {
0096 __le64 *wqe;
0097 u32 wqe_idx;
0098
0099 if (!(qp_wqe_idx & 0x7F)) {
0100 wqe_idx = (qp_wqe_idx + 128) % qp->sq_ring.size;
0101 wqe = qp->sq_base[wqe_idx].elem;
0102 if (wqe_idx)
0103 memset(wqe, qp->swqe_polarity ? 0 : 0xFF, 0x1000);
0104 else
0105 memset(wqe, qp->swqe_polarity ? 0xFF : 0, 0x1000);
0106 }
0107 }
0108
0109
0110
0111
0112
0113 void irdma_uk_qp_post_wr(struct irdma_qp_uk *qp)
0114 {
0115 u64 temp;
0116 u32 hw_sq_tail;
0117 u32 sw_sq_head;
0118
0119
0120 mb();
0121
0122
0123 get_64bit_val(qp->shadow_area, 0, &temp);
0124
0125 hw_sq_tail = (u32)FIELD_GET(IRDMA_QP_DBSA_HW_SQ_TAIL, temp);
0126 sw_sq_head = IRDMA_RING_CURRENT_HEAD(qp->sq_ring);
0127 if (sw_sq_head != qp->initial_ring.head) {
0128 if (qp->push_dropped) {
0129 writel(qp->qp_id, qp->wqe_alloc_db);
0130 qp->push_dropped = false;
0131 } else if (sw_sq_head != hw_sq_tail) {
0132 if (sw_sq_head > qp->initial_ring.head) {
0133 if (hw_sq_tail >= qp->initial_ring.head &&
0134 hw_sq_tail < sw_sq_head)
0135 writel(qp->qp_id, qp->wqe_alloc_db);
0136 } else {
0137 if (hw_sq_tail >= qp->initial_ring.head ||
0138 hw_sq_tail < sw_sq_head)
0139 writel(qp->qp_id, qp->wqe_alloc_db);
0140 }
0141 }
0142 }
0143
0144 qp->initial_ring.head = qp->sq_ring.head;
0145 }
0146
0147
0148
0149
0150
0151
0152 static void irdma_qp_ring_push_db(struct irdma_qp_uk *qp, u32 wqe_idx)
0153 {
0154 set_32bit_val(qp->push_db, 0,
0155 FIELD_PREP(IRDMA_WQEALLOC_WQE_DESC_INDEX, wqe_idx >> 3) | qp->qp_id);
0156 qp->initial_ring.head = qp->sq_ring.head;
0157 qp->push_mode = true;
0158 qp->push_dropped = false;
0159 }
0160
0161 void irdma_qp_push_wqe(struct irdma_qp_uk *qp, __le64 *wqe, u16 quanta,
0162 u32 wqe_idx, bool post_sq)
0163 {
0164 __le64 *push;
0165
0166 if (IRDMA_RING_CURRENT_HEAD(qp->initial_ring) !=
0167 IRDMA_RING_CURRENT_TAIL(qp->sq_ring) &&
0168 !qp->push_mode) {
0169 if (post_sq)
0170 irdma_uk_qp_post_wr(qp);
0171 } else {
0172 push = (__le64 *)((uintptr_t)qp->push_wqe +
0173 (wqe_idx & 0x7) * 0x20);
0174 memcpy(push, wqe, quanta * IRDMA_QP_WQE_MIN_SIZE);
0175 irdma_qp_ring_push_db(qp, wqe_idx);
0176 }
0177 }
0178
0179
0180
0181
0182
0183
0184
0185
0186
0187 __le64 *irdma_qp_get_next_send_wqe(struct irdma_qp_uk *qp, u32 *wqe_idx,
0188 u16 quanta, u32 total_size,
0189 struct irdma_post_sq_info *info)
0190 {
0191 __le64 *wqe;
0192 __le64 *wqe_0 = NULL;
0193 u32 nop_wqe_idx;
0194 u16 avail_quanta;
0195 u16 i;
0196
0197 avail_quanta = qp->uk_attrs->max_hw_sq_chunk -
0198 (IRDMA_RING_CURRENT_HEAD(qp->sq_ring) %
0199 qp->uk_attrs->max_hw_sq_chunk);
0200 if (quanta <= avail_quanta) {
0201
0202 if (quanta > IRDMA_SQ_RING_FREE_QUANTA(qp->sq_ring))
0203 return NULL;
0204 } else {
0205
0206 if (quanta + avail_quanta >
0207 IRDMA_SQ_RING_FREE_QUANTA(qp->sq_ring))
0208 return NULL;
0209
0210 nop_wqe_idx = IRDMA_RING_CURRENT_HEAD(qp->sq_ring);
0211 for (i = 0; i < avail_quanta; i++) {
0212 irdma_nop_1(qp);
0213 IRDMA_RING_MOVE_HEAD_NOCHECK(qp->sq_ring);
0214 }
0215 if (qp->push_db && info->push_wqe)
0216 irdma_qp_push_wqe(qp, qp->sq_base[nop_wqe_idx].elem,
0217 avail_quanta, nop_wqe_idx, true);
0218 }
0219
0220 *wqe_idx = IRDMA_RING_CURRENT_HEAD(qp->sq_ring);
0221 if (!*wqe_idx)
0222 qp->swqe_polarity = !qp->swqe_polarity;
0223
0224 IRDMA_RING_MOVE_HEAD_BY_COUNT_NOCHECK(qp->sq_ring, quanta);
0225
0226 wqe = qp->sq_base[*wqe_idx].elem;
0227 if (qp->uk_attrs->hw_rev == IRDMA_GEN_1 && quanta == 1 &&
0228 (IRDMA_RING_CURRENT_HEAD(qp->sq_ring) & 1)) {
0229 wqe_0 = qp->sq_base[IRDMA_RING_CURRENT_HEAD(qp->sq_ring)].elem;
0230 wqe_0[3] = cpu_to_le64(FIELD_PREP(IRDMAQPSQ_VALID, qp->swqe_polarity ? 0 : 1));
0231 }
0232 qp->sq_wrtrk_array[*wqe_idx].wrid = info->wr_id;
0233 qp->sq_wrtrk_array[*wqe_idx].wr_len = total_size;
0234 qp->sq_wrtrk_array[*wqe_idx].quanta = quanta;
0235
0236 return wqe;
0237 }
0238
0239
0240
0241
0242
0243
0244 __le64 *irdma_qp_get_next_recv_wqe(struct irdma_qp_uk *qp, u32 *wqe_idx)
0245 {
0246 __le64 *wqe;
0247 int ret_code;
0248
0249 if (IRDMA_RING_FULL_ERR(qp->rq_ring))
0250 return NULL;
0251
0252 IRDMA_ATOMIC_RING_MOVE_HEAD(qp->rq_ring, *wqe_idx, ret_code);
0253 if (ret_code)
0254 return NULL;
0255
0256 if (!*wqe_idx)
0257 qp->rwqe_polarity = !qp->rwqe_polarity;
0258
0259 wqe = qp->rq_base[*wqe_idx * qp->rq_wqe_size_multiplier].elem;
0260
0261 return wqe;
0262 }
0263
0264
0265
0266
0267
0268
0269
0270 int irdma_uk_rdma_write(struct irdma_qp_uk *qp, struct irdma_post_sq_info *info,
0271 bool post_sq)
0272 {
0273 u64 hdr;
0274 __le64 *wqe;
0275 struct irdma_rdma_write *op_info;
0276 u32 i, wqe_idx;
0277 u32 total_size = 0, byte_off;
0278 int ret_code;
0279 u32 frag_cnt, addl_frag_cnt;
0280 bool read_fence = false;
0281 u16 quanta;
0282
0283 info->push_wqe = qp->push_db ? true : false;
0284
0285 op_info = &info->op.rdma_write;
0286 if (op_info->num_lo_sges > qp->max_sq_frag_cnt)
0287 return -EINVAL;
0288
0289 for (i = 0; i < op_info->num_lo_sges; i++)
0290 total_size += op_info->lo_sg_list[i].length;
0291
0292 read_fence |= info->read_fence;
0293
0294 if (info->imm_data_valid)
0295 frag_cnt = op_info->num_lo_sges + 1;
0296 else
0297 frag_cnt = op_info->num_lo_sges;
0298 addl_frag_cnt = frag_cnt > 1 ? (frag_cnt - 1) : 0;
0299 ret_code = irdma_fragcnt_to_quanta_sq(frag_cnt, &quanta);
0300 if (ret_code)
0301 return ret_code;
0302
0303 wqe = irdma_qp_get_next_send_wqe(qp, &wqe_idx, quanta, total_size,
0304 info);
0305 if (!wqe)
0306 return -ENOMEM;
0307
0308 irdma_clr_wqes(qp, wqe_idx);
0309
0310 set_64bit_val(wqe, 16,
0311 FIELD_PREP(IRDMAQPSQ_FRAG_TO, op_info->rem_addr.addr));
0312
0313 if (info->imm_data_valid) {
0314 set_64bit_val(wqe, 0,
0315 FIELD_PREP(IRDMAQPSQ_IMMDATA, info->imm_data));
0316 i = 0;
0317 } else {
0318 qp->wqe_ops.iw_set_fragment(wqe, 0,
0319 op_info->lo_sg_list,
0320 qp->swqe_polarity);
0321 i = 1;
0322 }
0323
0324 for (byte_off = 32; i < op_info->num_lo_sges; i++) {
0325 qp->wqe_ops.iw_set_fragment(wqe, byte_off,
0326 &op_info->lo_sg_list[i],
0327 qp->swqe_polarity);
0328 byte_off += 16;
0329 }
0330
0331
0332 if (qp->uk_attrs->hw_rev >= IRDMA_GEN_2 && !(frag_cnt & 0x01) &&
0333 frag_cnt) {
0334 qp->wqe_ops.iw_set_fragment(wqe, byte_off, NULL,
0335 qp->swqe_polarity);
0336 if (qp->uk_attrs->hw_rev == IRDMA_GEN_2)
0337 ++addl_frag_cnt;
0338 }
0339
0340 hdr = FIELD_PREP(IRDMAQPSQ_REMSTAG, op_info->rem_addr.lkey) |
0341 FIELD_PREP(IRDMAQPSQ_OPCODE, info->op_type) |
0342 FIELD_PREP(IRDMAQPSQ_IMMDATAFLAG, info->imm_data_valid) |
0343 FIELD_PREP(IRDMAQPSQ_REPORTRTT, info->report_rtt) |
0344 FIELD_PREP(IRDMAQPSQ_ADDFRAGCNT, addl_frag_cnt) |
0345 FIELD_PREP(IRDMAQPSQ_PUSHWQE, info->push_wqe) |
0346 FIELD_PREP(IRDMAQPSQ_READFENCE, read_fence) |
0347 FIELD_PREP(IRDMAQPSQ_LOCALFENCE, info->local_fence) |
0348 FIELD_PREP(IRDMAQPSQ_SIGCOMPL, info->signaled) |
0349 FIELD_PREP(IRDMAQPSQ_VALID, qp->swqe_polarity);
0350
0351 dma_wmb();
0352
0353 set_64bit_val(wqe, 24, hdr);
0354 if (info->push_wqe) {
0355 irdma_qp_push_wqe(qp, wqe, quanta, wqe_idx, post_sq);
0356 } else {
0357 if (post_sq)
0358 irdma_uk_qp_post_wr(qp);
0359 }
0360
0361 return 0;
0362 }
0363
0364
0365
0366
0367
0368
0369
0370
0371 int irdma_uk_rdma_read(struct irdma_qp_uk *qp, struct irdma_post_sq_info *info,
0372 bool inv_stag, bool post_sq)
0373 {
0374 struct irdma_rdma_read *op_info;
0375 int ret_code;
0376 u32 i, byte_off, total_size = 0;
0377 bool local_fence = false;
0378 u32 addl_frag_cnt;
0379 __le64 *wqe;
0380 u32 wqe_idx;
0381 u16 quanta;
0382 u64 hdr;
0383
0384 info->push_wqe = qp->push_db ? true : false;
0385
0386 op_info = &info->op.rdma_read;
0387 if (qp->max_sq_frag_cnt < op_info->num_lo_sges)
0388 return -EINVAL;
0389
0390 for (i = 0; i < op_info->num_lo_sges; i++)
0391 total_size += op_info->lo_sg_list[i].length;
0392
0393 ret_code = irdma_fragcnt_to_quanta_sq(op_info->num_lo_sges, &quanta);
0394 if (ret_code)
0395 return ret_code;
0396
0397 wqe = irdma_qp_get_next_send_wqe(qp, &wqe_idx, quanta, total_size,
0398 info);
0399 if (!wqe)
0400 return -ENOMEM;
0401
0402 irdma_clr_wqes(qp, wqe_idx);
0403
0404 addl_frag_cnt = op_info->num_lo_sges > 1 ?
0405 (op_info->num_lo_sges - 1) : 0;
0406 local_fence |= info->local_fence;
0407
0408 qp->wqe_ops.iw_set_fragment(wqe, 0, op_info->lo_sg_list,
0409 qp->swqe_polarity);
0410 for (i = 1, byte_off = 32; i < op_info->num_lo_sges; ++i) {
0411 qp->wqe_ops.iw_set_fragment(wqe, byte_off,
0412 &op_info->lo_sg_list[i],
0413 qp->swqe_polarity);
0414 byte_off += 16;
0415 }
0416
0417
0418 if (qp->uk_attrs->hw_rev >= IRDMA_GEN_2 &&
0419 !(op_info->num_lo_sges & 0x01) && op_info->num_lo_sges) {
0420 qp->wqe_ops.iw_set_fragment(wqe, byte_off, NULL,
0421 qp->swqe_polarity);
0422 if (qp->uk_attrs->hw_rev == IRDMA_GEN_2)
0423 ++addl_frag_cnt;
0424 }
0425 set_64bit_val(wqe, 16,
0426 FIELD_PREP(IRDMAQPSQ_FRAG_TO, op_info->rem_addr.addr));
0427 hdr = FIELD_PREP(IRDMAQPSQ_REMSTAG, op_info->rem_addr.lkey) |
0428 FIELD_PREP(IRDMAQPSQ_REPORTRTT, (info->report_rtt ? 1 : 0)) |
0429 FIELD_PREP(IRDMAQPSQ_ADDFRAGCNT, addl_frag_cnt) |
0430 FIELD_PREP(IRDMAQPSQ_OPCODE,
0431 (inv_stag ? IRDMAQP_OP_RDMA_READ_LOC_INV : IRDMAQP_OP_RDMA_READ)) |
0432 FIELD_PREP(IRDMAQPSQ_PUSHWQE, info->push_wqe) |
0433 FIELD_PREP(IRDMAQPSQ_READFENCE, info->read_fence) |
0434 FIELD_PREP(IRDMAQPSQ_LOCALFENCE, local_fence) |
0435 FIELD_PREP(IRDMAQPSQ_SIGCOMPL, info->signaled) |
0436 FIELD_PREP(IRDMAQPSQ_VALID, qp->swqe_polarity);
0437
0438 dma_wmb();
0439
0440 set_64bit_val(wqe, 24, hdr);
0441 if (info->push_wqe) {
0442 irdma_qp_push_wqe(qp, wqe, quanta, wqe_idx, post_sq);
0443 } else {
0444 if (post_sq)
0445 irdma_uk_qp_post_wr(qp);
0446 }
0447
0448 return 0;
0449 }
0450
0451
0452
0453
0454
0455
0456
0457 int irdma_uk_send(struct irdma_qp_uk *qp, struct irdma_post_sq_info *info,
0458 bool post_sq)
0459 {
0460 __le64 *wqe;
0461 struct irdma_post_send *op_info;
0462 u64 hdr;
0463 u32 i, wqe_idx, total_size = 0, byte_off;
0464 int ret_code;
0465 u32 frag_cnt, addl_frag_cnt;
0466 bool read_fence = false;
0467 u16 quanta;
0468
0469 info->push_wqe = qp->push_db ? true : false;
0470
0471 op_info = &info->op.send;
0472 if (qp->max_sq_frag_cnt < op_info->num_sges)
0473 return -EINVAL;
0474
0475 for (i = 0; i < op_info->num_sges; i++)
0476 total_size += op_info->sg_list[i].length;
0477
0478 if (info->imm_data_valid)
0479 frag_cnt = op_info->num_sges + 1;
0480 else
0481 frag_cnt = op_info->num_sges;
0482 ret_code = irdma_fragcnt_to_quanta_sq(frag_cnt, &quanta);
0483 if (ret_code)
0484 return ret_code;
0485
0486 wqe = irdma_qp_get_next_send_wqe(qp, &wqe_idx, quanta, total_size,
0487 info);
0488 if (!wqe)
0489 return -ENOMEM;
0490
0491 irdma_clr_wqes(qp, wqe_idx);
0492
0493 read_fence |= info->read_fence;
0494 addl_frag_cnt = frag_cnt > 1 ? (frag_cnt - 1) : 0;
0495 if (info->imm_data_valid) {
0496 set_64bit_val(wqe, 0,
0497 FIELD_PREP(IRDMAQPSQ_IMMDATA, info->imm_data));
0498 i = 0;
0499 } else {
0500 qp->wqe_ops.iw_set_fragment(wqe, 0,
0501 frag_cnt ? op_info->sg_list : NULL,
0502 qp->swqe_polarity);
0503 i = 1;
0504 }
0505
0506 for (byte_off = 32; i < op_info->num_sges; i++) {
0507 qp->wqe_ops.iw_set_fragment(wqe, byte_off, &op_info->sg_list[i],
0508 qp->swqe_polarity);
0509 byte_off += 16;
0510 }
0511
0512
0513 if (qp->uk_attrs->hw_rev >= IRDMA_GEN_2 && !(frag_cnt & 0x01) &&
0514 frag_cnt) {
0515 qp->wqe_ops.iw_set_fragment(wqe, byte_off, NULL,
0516 qp->swqe_polarity);
0517 if (qp->uk_attrs->hw_rev == IRDMA_GEN_2)
0518 ++addl_frag_cnt;
0519 }
0520
0521 set_64bit_val(wqe, 16,
0522 FIELD_PREP(IRDMAQPSQ_DESTQKEY, op_info->qkey) |
0523 FIELD_PREP(IRDMAQPSQ_DESTQPN, op_info->dest_qp));
0524 hdr = FIELD_PREP(IRDMAQPSQ_REMSTAG, info->stag_to_inv) |
0525 FIELD_PREP(IRDMAQPSQ_AHID, op_info->ah_id) |
0526 FIELD_PREP(IRDMAQPSQ_IMMDATAFLAG,
0527 (info->imm_data_valid ? 1 : 0)) |
0528 FIELD_PREP(IRDMAQPSQ_REPORTRTT, (info->report_rtt ? 1 : 0)) |
0529 FIELD_PREP(IRDMAQPSQ_OPCODE, info->op_type) |
0530 FIELD_PREP(IRDMAQPSQ_ADDFRAGCNT, addl_frag_cnt) |
0531 FIELD_PREP(IRDMAQPSQ_PUSHWQE, info->push_wqe) |
0532 FIELD_PREP(IRDMAQPSQ_READFENCE, read_fence) |
0533 FIELD_PREP(IRDMAQPSQ_LOCALFENCE, info->local_fence) |
0534 FIELD_PREP(IRDMAQPSQ_SIGCOMPL, info->signaled) |
0535 FIELD_PREP(IRDMAQPSQ_UDPHEADER, info->udp_hdr) |
0536 FIELD_PREP(IRDMAQPSQ_L4LEN, info->l4len) |
0537 FIELD_PREP(IRDMAQPSQ_VALID, qp->swqe_polarity);
0538
0539 dma_wmb();
0540
0541 set_64bit_val(wqe, 24, hdr);
0542 if (info->push_wqe) {
0543 irdma_qp_push_wqe(qp, wqe, quanta, wqe_idx, post_sq);
0544 } else {
0545 if (post_sq)
0546 irdma_uk_qp_post_wr(qp);
0547 }
0548
0549 return 0;
0550 }
0551
0552
0553
0554
0555
0556
0557 static void irdma_set_mw_bind_wqe_gen_1(__le64 *wqe,
0558 struct irdma_bind_window *op_info)
0559 {
0560 set_64bit_val(wqe, 0, (uintptr_t)op_info->va);
0561 set_64bit_val(wqe, 8,
0562 FIELD_PREP(IRDMAQPSQ_PARENTMRSTAG, op_info->mw_stag) |
0563 FIELD_PREP(IRDMAQPSQ_MWSTAG, op_info->mr_stag));
0564 set_64bit_val(wqe, 16, op_info->bind_len);
0565 }
0566
0567
0568
0569
0570
0571
0572
0573
0574 static void irdma_copy_inline_data_gen_1(u8 *dest, u8 *src, u32 len,
0575 u8 polarity)
0576 {
0577 if (len <= 16) {
0578 memcpy(dest, src, len);
0579 } else {
0580 memcpy(dest, src, 16);
0581 src += 16;
0582 dest = dest + 32;
0583 memcpy(dest, src, len - 16);
0584 }
0585 }
0586
0587
0588
0589
0590
0591
0592
0593 static inline u16 irdma_inline_data_size_to_quanta_gen_1(u32 data_size)
0594 {
0595 return data_size <= 16 ? IRDMA_QP_WQE_MIN_QUANTA : 2;
0596 }
0597
0598
0599
0600
0601
0602
0603 static void irdma_set_mw_bind_wqe(__le64 *wqe,
0604 struct irdma_bind_window *op_info)
0605 {
0606 set_64bit_val(wqe, 0, (uintptr_t)op_info->va);
0607 set_64bit_val(wqe, 8,
0608 FIELD_PREP(IRDMAQPSQ_PARENTMRSTAG, op_info->mr_stag) |
0609 FIELD_PREP(IRDMAQPSQ_MWSTAG, op_info->mw_stag));
0610 set_64bit_val(wqe, 16, op_info->bind_len);
0611 }
0612
0613
0614
0615
0616
0617
0618
0619
0620 static void irdma_copy_inline_data(u8 *dest, u8 *src, u32 len, u8 polarity)
0621 {
0622 u8 inline_valid = polarity << IRDMA_INLINE_VALID_S;
0623 u32 copy_size;
0624
0625 dest += 8;
0626 if (len <= 8) {
0627 memcpy(dest, src, len);
0628 return;
0629 }
0630
0631 *((u64 *)dest) = *((u64 *)src);
0632 len -= 8;
0633 src += 8;
0634 dest += 24;
0635
0636 while (len) {
0637 copy_size = len < 31 ? len : 31;
0638 memcpy(dest, src, copy_size);
0639 *(dest + 31) = inline_valid;
0640 len -= copy_size;
0641 dest += 32;
0642 src += copy_size;
0643 }
0644 }
0645
0646
0647
0648
0649
0650
0651
0652 static u16 irdma_inline_data_size_to_quanta(u32 data_size)
0653 {
0654 if (data_size <= 8)
0655 return IRDMA_QP_WQE_MIN_QUANTA;
0656 else if (data_size <= 39)
0657 return 2;
0658 else if (data_size <= 70)
0659 return 3;
0660 else if (data_size <= 101)
0661 return 4;
0662 else if (data_size <= 132)
0663 return 5;
0664 else if (data_size <= 163)
0665 return 6;
0666 else if (data_size <= 194)
0667 return 7;
0668 else
0669 return 8;
0670 }
0671
0672
0673
0674
0675
0676
0677
0678 int irdma_uk_inline_rdma_write(struct irdma_qp_uk *qp,
0679 struct irdma_post_sq_info *info, bool post_sq)
0680 {
0681 __le64 *wqe;
0682 struct irdma_inline_rdma_write *op_info;
0683 u64 hdr = 0;
0684 u32 wqe_idx;
0685 bool read_fence = false;
0686 u16 quanta;
0687
0688 info->push_wqe = qp->push_db ? true : false;
0689 op_info = &info->op.inline_rdma_write;
0690
0691 if (op_info->len > qp->max_inline_data)
0692 return -EINVAL;
0693
0694 quanta = qp->wqe_ops.iw_inline_data_size_to_quanta(op_info->len);
0695 wqe = irdma_qp_get_next_send_wqe(qp, &wqe_idx, quanta, op_info->len,
0696 info);
0697 if (!wqe)
0698 return -ENOMEM;
0699
0700 irdma_clr_wqes(qp, wqe_idx);
0701
0702 read_fence |= info->read_fence;
0703 set_64bit_val(wqe, 16,
0704 FIELD_PREP(IRDMAQPSQ_FRAG_TO, op_info->rem_addr.addr));
0705
0706 hdr = FIELD_PREP(IRDMAQPSQ_REMSTAG, op_info->rem_addr.lkey) |
0707 FIELD_PREP(IRDMAQPSQ_OPCODE, info->op_type) |
0708 FIELD_PREP(IRDMAQPSQ_INLINEDATALEN, op_info->len) |
0709 FIELD_PREP(IRDMAQPSQ_REPORTRTT, info->report_rtt ? 1 : 0) |
0710 FIELD_PREP(IRDMAQPSQ_INLINEDATAFLAG, 1) |
0711 FIELD_PREP(IRDMAQPSQ_IMMDATAFLAG, info->imm_data_valid ? 1 : 0) |
0712 FIELD_PREP(IRDMAQPSQ_PUSHWQE, info->push_wqe ? 1 : 0) |
0713 FIELD_PREP(IRDMAQPSQ_READFENCE, read_fence) |
0714 FIELD_PREP(IRDMAQPSQ_LOCALFENCE, info->local_fence) |
0715 FIELD_PREP(IRDMAQPSQ_SIGCOMPL, info->signaled) |
0716 FIELD_PREP(IRDMAQPSQ_VALID, qp->swqe_polarity);
0717
0718 if (info->imm_data_valid)
0719 set_64bit_val(wqe, 0,
0720 FIELD_PREP(IRDMAQPSQ_IMMDATA, info->imm_data));
0721
0722 qp->wqe_ops.iw_copy_inline_data((u8 *)wqe, op_info->data, op_info->len,
0723 qp->swqe_polarity);
0724 dma_wmb();
0725
0726 set_64bit_val(wqe, 24, hdr);
0727
0728 if (info->push_wqe) {
0729 irdma_qp_push_wqe(qp, wqe, quanta, wqe_idx, post_sq);
0730 } else {
0731 if (post_sq)
0732 irdma_uk_qp_post_wr(qp);
0733 }
0734
0735 return 0;
0736 }
0737
0738
0739
0740
0741
0742
0743
0744 int irdma_uk_inline_send(struct irdma_qp_uk *qp,
0745 struct irdma_post_sq_info *info, bool post_sq)
0746 {
0747 __le64 *wqe;
0748 struct irdma_post_inline_send *op_info;
0749 u64 hdr;
0750 u32 wqe_idx;
0751 bool read_fence = false;
0752 u16 quanta;
0753
0754 info->push_wqe = qp->push_db ? true : false;
0755 op_info = &info->op.inline_send;
0756
0757 if (op_info->len > qp->max_inline_data)
0758 return -EINVAL;
0759
0760 quanta = qp->wqe_ops.iw_inline_data_size_to_quanta(op_info->len);
0761 wqe = irdma_qp_get_next_send_wqe(qp, &wqe_idx, quanta, op_info->len,
0762 info);
0763 if (!wqe)
0764 return -ENOMEM;
0765
0766 irdma_clr_wqes(qp, wqe_idx);
0767
0768 set_64bit_val(wqe, 16,
0769 FIELD_PREP(IRDMAQPSQ_DESTQKEY, op_info->qkey) |
0770 FIELD_PREP(IRDMAQPSQ_DESTQPN, op_info->dest_qp));
0771
0772 read_fence |= info->read_fence;
0773 hdr = FIELD_PREP(IRDMAQPSQ_REMSTAG, info->stag_to_inv) |
0774 FIELD_PREP(IRDMAQPSQ_AHID, op_info->ah_id) |
0775 FIELD_PREP(IRDMAQPSQ_OPCODE, info->op_type) |
0776 FIELD_PREP(IRDMAQPSQ_INLINEDATALEN, op_info->len) |
0777 FIELD_PREP(IRDMAQPSQ_IMMDATAFLAG,
0778 (info->imm_data_valid ? 1 : 0)) |
0779 FIELD_PREP(IRDMAQPSQ_REPORTRTT, (info->report_rtt ? 1 : 0)) |
0780 FIELD_PREP(IRDMAQPSQ_INLINEDATAFLAG, 1) |
0781 FIELD_PREP(IRDMAQPSQ_PUSHWQE, info->push_wqe) |
0782 FIELD_PREP(IRDMAQPSQ_READFENCE, read_fence) |
0783 FIELD_PREP(IRDMAQPSQ_LOCALFENCE, info->local_fence) |
0784 FIELD_PREP(IRDMAQPSQ_SIGCOMPL, info->signaled) |
0785 FIELD_PREP(IRDMAQPSQ_UDPHEADER, info->udp_hdr) |
0786 FIELD_PREP(IRDMAQPSQ_L4LEN, info->l4len) |
0787 FIELD_PREP(IRDMAQPSQ_VALID, qp->swqe_polarity);
0788
0789 if (info->imm_data_valid)
0790 set_64bit_val(wqe, 0,
0791 FIELD_PREP(IRDMAQPSQ_IMMDATA, info->imm_data));
0792 qp->wqe_ops.iw_copy_inline_data((u8 *)wqe, op_info->data, op_info->len,
0793 qp->swqe_polarity);
0794
0795 dma_wmb();
0796
0797 set_64bit_val(wqe, 24, hdr);
0798
0799 if (info->push_wqe) {
0800 irdma_qp_push_wqe(qp, wqe, quanta, wqe_idx, post_sq);
0801 } else {
0802 if (post_sq)
0803 irdma_uk_qp_post_wr(qp);
0804 }
0805
0806 return 0;
0807 }
0808
0809
0810
0811
0812
0813
0814
0815 int irdma_uk_stag_local_invalidate(struct irdma_qp_uk *qp,
0816 struct irdma_post_sq_info *info,
0817 bool post_sq)
0818 {
0819 __le64 *wqe;
0820 struct irdma_inv_local_stag *op_info;
0821 u64 hdr;
0822 u32 wqe_idx;
0823 bool local_fence = false;
0824 struct ib_sge sge = {};
0825
0826 info->push_wqe = qp->push_db ? true : false;
0827 op_info = &info->op.inv_local_stag;
0828 local_fence = info->local_fence;
0829
0830 wqe = irdma_qp_get_next_send_wqe(qp, &wqe_idx, IRDMA_QP_WQE_MIN_QUANTA,
0831 0, info);
0832 if (!wqe)
0833 return -ENOMEM;
0834
0835 irdma_clr_wqes(qp, wqe_idx);
0836
0837 sge.lkey = op_info->target_stag;
0838 qp->wqe_ops.iw_set_fragment(wqe, 0, &sge, 0);
0839
0840 set_64bit_val(wqe, 16, 0);
0841
0842 hdr = FIELD_PREP(IRDMAQPSQ_OPCODE, IRDMA_OP_TYPE_INV_STAG) |
0843 FIELD_PREP(IRDMAQPSQ_PUSHWQE, info->push_wqe) |
0844 FIELD_PREP(IRDMAQPSQ_READFENCE, info->read_fence) |
0845 FIELD_PREP(IRDMAQPSQ_LOCALFENCE, local_fence) |
0846 FIELD_PREP(IRDMAQPSQ_SIGCOMPL, info->signaled) |
0847 FIELD_PREP(IRDMAQPSQ_VALID, qp->swqe_polarity);
0848
0849 dma_wmb();
0850
0851 set_64bit_val(wqe, 24, hdr);
0852
0853 if (info->push_wqe) {
0854 irdma_qp_push_wqe(qp, wqe, IRDMA_QP_WQE_MIN_QUANTA, wqe_idx,
0855 post_sq);
0856 } else {
0857 if (post_sq)
0858 irdma_uk_qp_post_wr(qp);
0859 }
0860
0861 return 0;
0862 }
0863
0864
0865
0866
0867
0868
0869 int irdma_uk_post_receive(struct irdma_qp_uk *qp,
0870 struct irdma_post_rq_info *info)
0871 {
0872 u32 wqe_idx, i, byte_off;
0873 u32 addl_frag_cnt;
0874 __le64 *wqe;
0875 u64 hdr;
0876
0877 if (qp->max_rq_frag_cnt < info->num_sges)
0878 return -EINVAL;
0879
0880 wqe = irdma_qp_get_next_recv_wqe(qp, &wqe_idx);
0881 if (!wqe)
0882 return -ENOMEM;
0883
0884 qp->rq_wrid_array[wqe_idx] = info->wr_id;
0885 addl_frag_cnt = info->num_sges > 1 ? (info->num_sges - 1) : 0;
0886 qp->wqe_ops.iw_set_fragment(wqe, 0, info->sg_list,
0887 qp->rwqe_polarity);
0888
0889 for (i = 1, byte_off = 32; i < info->num_sges; i++) {
0890 qp->wqe_ops.iw_set_fragment(wqe, byte_off, &info->sg_list[i],
0891 qp->rwqe_polarity);
0892 byte_off += 16;
0893 }
0894
0895
0896 if (qp->uk_attrs->hw_rev >= IRDMA_GEN_2 && !(info->num_sges & 0x01) &&
0897 info->num_sges) {
0898 qp->wqe_ops.iw_set_fragment(wqe, byte_off, NULL,
0899 qp->rwqe_polarity);
0900 if (qp->uk_attrs->hw_rev == IRDMA_GEN_2)
0901 ++addl_frag_cnt;
0902 }
0903
0904 set_64bit_val(wqe, 16, 0);
0905 hdr = FIELD_PREP(IRDMAQPSQ_ADDFRAGCNT, addl_frag_cnt) |
0906 FIELD_PREP(IRDMAQPSQ_VALID, qp->rwqe_polarity);
0907
0908 dma_wmb();
0909
0910 set_64bit_val(wqe, 24, hdr);
0911
0912 return 0;
0913 }
0914
0915
0916
0917
0918
0919
0920
0921 void irdma_uk_cq_resize(struct irdma_cq_uk *cq, void *cq_base, int cq_size)
0922 {
0923 cq->cq_base = cq_base;
0924 cq->cq_size = cq_size;
0925 IRDMA_RING_INIT(cq->cq_ring, cq->cq_size);
0926 cq->polarity = 1;
0927 }
0928
0929
0930
0931
0932
0933
0934 void irdma_uk_cq_set_resized_cnt(struct irdma_cq_uk *cq, u16 cq_cnt)
0935 {
0936 u64 temp_val;
0937 u16 sw_cq_sel;
0938 u8 arm_next_se;
0939 u8 arm_next;
0940 u8 arm_seq_num;
0941
0942 get_64bit_val(cq->shadow_area, 32, &temp_val);
0943
0944 sw_cq_sel = (u16)FIELD_GET(IRDMA_CQ_DBSA_SW_CQ_SELECT, temp_val);
0945 sw_cq_sel += cq_cnt;
0946
0947 arm_seq_num = (u8)FIELD_GET(IRDMA_CQ_DBSA_ARM_SEQ_NUM, temp_val);
0948 arm_next_se = (u8)FIELD_GET(IRDMA_CQ_DBSA_ARM_NEXT_SE, temp_val);
0949 arm_next = (u8)FIELD_GET(IRDMA_CQ_DBSA_ARM_NEXT, temp_val);
0950
0951 temp_val = FIELD_PREP(IRDMA_CQ_DBSA_ARM_SEQ_NUM, arm_seq_num) |
0952 FIELD_PREP(IRDMA_CQ_DBSA_SW_CQ_SELECT, sw_cq_sel) |
0953 FIELD_PREP(IRDMA_CQ_DBSA_ARM_NEXT_SE, arm_next_se) |
0954 FIELD_PREP(IRDMA_CQ_DBSA_ARM_NEXT, arm_next);
0955
0956 set_64bit_val(cq->shadow_area, 32, temp_val);
0957 }
0958
0959
0960
0961
0962
0963
0964 void irdma_uk_cq_request_notification(struct irdma_cq_uk *cq,
0965 enum irdma_cmpl_notify cq_notify)
0966 {
0967 u64 temp_val;
0968 u16 sw_cq_sel;
0969 u8 arm_next_se = 0;
0970 u8 arm_next = 0;
0971 u8 arm_seq_num;
0972
0973 get_64bit_val(cq->shadow_area, 32, &temp_val);
0974 arm_seq_num = (u8)FIELD_GET(IRDMA_CQ_DBSA_ARM_SEQ_NUM, temp_val);
0975 arm_seq_num++;
0976 sw_cq_sel = (u16)FIELD_GET(IRDMA_CQ_DBSA_SW_CQ_SELECT, temp_val);
0977 arm_next_se = (u8)FIELD_GET(IRDMA_CQ_DBSA_ARM_NEXT_SE, temp_val);
0978 arm_next_se |= 1;
0979 if (cq_notify == IRDMA_CQ_COMPL_EVENT)
0980 arm_next = 1;
0981 temp_val = FIELD_PREP(IRDMA_CQ_DBSA_ARM_SEQ_NUM, arm_seq_num) |
0982 FIELD_PREP(IRDMA_CQ_DBSA_SW_CQ_SELECT, sw_cq_sel) |
0983 FIELD_PREP(IRDMA_CQ_DBSA_ARM_NEXT_SE, arm_next_se) |
0984 FIELD_PREP(IRDMA_CQ_DBSA_ARM_NEXT, arm_next);
0985
0986 set_64bit_val(cq->shadow_area, 32, temp_val);
0987
0988 dma_wmb();
0989
0990 writel(cq->cq_id, cq->cqe_alloc_db);
0991 }
0992
0993
0994
0995
0996
0997
0998 int irdma_uk_cq_poll_cmpl(struct irdma_cq_uk *cq,
0999 struct irdma_cq_poll_info *info)
1000 {
1001 u64 comp_ctx, qword0, qword2, qword3;
1002 __le64 *cqe;
1003 struct irdma_qp_uk *qp;
1004 struct irdma_ring *pring = NULL;
1005 u32 wqe_idx, q_type;
1006 int ret_code;
1007 bool move_cq_head = true;
1008 u8 polarity;
1009 u8 op_type;
1010 bool ext_valid;
1011 __le64 *ext_cqe;
1012
1013 if (cq->avoid_mem_cflct)
1014 cqe = IRDMA_GET_CURRENT_EXTENDED_CQ_ELEM(cq);
1015 else
1016 cqe = IRDMA_GET_CURRENT_CQ_ELEM(cq);
1017
1018 get_64bit_val(cqe, 24, &qword3);
1019 polarity = (u8)FIELD_GET(IRDMA_CQ_VALID, qword3);
1020 if (polarity != cq->polarity)
1021 return -ENOENT;
1022
1023
1024 dma_rmb();
1025
1026 ext_valid = (bool)FIELD_GET(IRDMA_CQ_EXTCQE, qword3);
1027 if (ext_valid) {
1028 u64 qword6, qword7;
1029 u32 peek_head;
1030
1031 if (cq->avoid_mem_cflct) {
1032 ext_cqe = (__le64 *)((u8 *)cqe + 32);
1033 get_64bit_val(ext_cqe, 24, &qword7);
1034 polarity = (u8)FIELD_GET(IRDMA_CQ_VALID, qword7);
1035 } else {
1036 peek_head = (cq->cq_ring.head + 1) % cq->cq_ring.size;
1037 ext_cqe = cq->cq_base[peek_head].buf;
1038 get_64bit_val(ext_cqe, 24, &qword7);
1039 polarity = (u8)FIELD_GET(IRDMA_CQ_VALID, qword7);
1040 if (!peek_head)
1041 polarity ^= 1;
1042 }
1043 if (polarity != cq->polarity)
1044 return -ENOENT;
1045
1046
1047 dma_rmb();
1048
1049 info->imm_valid = (bool)FIELD_GET(IRDMA_CQ_IMMVALID, qword7);
1050 if (info->imm_valid) {
1051 u64 qword4;
1052
1053 get_64bit_val(ext_cqe, 0, &qword4);
1054 info->imm_data = (u32)FIELD_GET(IRDMA_CQ_IMMDATALOW32, qword4);
1055 }
1056 info->ud_smac_valid = (bool)FIELD_GET(IRDMA_CQ_UDSMACVALID, qword7);
1057 info->ud_vlan_valid = (bool)FIELD_GET(IRDMA_CQ_UDVLANVALID, qword7);
1058 if (info->ud_smac_valid || info->ud_vlan_valid) {
1059 get_64bit_val(ext_cqe, 16, &qword6);
1060 if (info->ud_vlan_valid)
1061 info->ud_vlan = (u16)FIELD_GET(IRDMA_CQ_UDVLAN, qword6);
1062 if (info->ud_smac_valid) {
1063 info->ud_smac[5] = qword6 & 0xFF;
1064 info->ud_smac[4] = (qword6 >> 8) & 0xFF;
1065 info->ud_smac[3] = (qword6 >> 16) & 0xFF;
1066 info->ud_smac[2] = (qword6 >> 24) & 0xFF;
1067 info->ud_smac[1] = (qword6 >> 32) & 0xFF;
1068 info->ud_smac[0] = (qword6 >> 40) & 0xFF;
1069 }
1070 }
1071 } else {
1072 info->imm_valid = false;
1073 info->ud_smac_valid = false;
1074 info->ud_vlan_valid = false;
1075 }
1076
1077 q_type = (u8)FIELD_GET(IRDMA_CQ_SQ, qword3);
1078 info->error = (bool)FIELD_GET(IRDMA_CQ_ERROR, qword3);
1079 info->push_dropped = (bool)FIELD_GET(IRDMACQ_PSHDROP, qword3);
1080 info->ipv4 = (bool)FIELD_GET(IRDMACQ_IPV4, qword3);
1081 if (info->error) {
1082 info->major_err = FIELD_GET(IRDMA_CQ_MAJERR, qword3);
1083 info->minor_err = FIELD_GET(IRDMA_CQ_MINERR, qword3);
1084 if (info->major_err == IRDMA_FLUSH_MAJOR_ERR) {
1085 info->comp_status = IRDMA_COMPL_STATUS_FLUSHED;
1086
1087 if (info->minor_err != FLUSH_GENERAL_ERR) {
1088 qword3 &= ~IRDMA_CQ_MINERR;
1089 qword3 |= FIELD_PREP(IRDMA_CQ_MINERR, FLUSH_GENERAL_ERR);
1090 set_64bit_val(cqe, 24, qword3);
1091 }
1092 } else {
1093 info->comp_status = IRDMA_COMPL_STATUS_UNKNOWN;
1094 }
1095 } else {
1096 info->comp_status = IRDMA_COMPL_STATUS_SUCCESS;
1097 }
1098
1099 get_64bit_val(cqe, 0, &qword0);
1100 get_64bit_val(cqe, 16, &qword2);
1101
1102 info->tcp_seq_num_rtt = (u32)FIELD_GET(IRDMACQ_TCPSEQNUMRTT, qword0);
1103 info->qp_id = (u32)FIELD_GET(IRDMACQ_QPID, qword2);
1104 info->ud_src_qpn = (u32)FIELD_GET(IRDMACQ_UDSRCQPN, qword2);
1105
1106 get_64bit_val(cqe, 8, &comp_ctx);
1107
1108 info->solicited_event = (bool)FIELD_GET(IRDMACQ_SOEVENT, qword3);
1109 qp = (struct irdma_qp_uk *)(unsigned long)comp_ctx;
1110 if (!qp || qp->destroy_pending) {
1111 ret_code = -EFAULT;
1112 goto exit;
1113 }
1114 wqe_idx = (u32)FIELD_GET(IRDMA_CQ_WQEIDX, qword3);
1115 info->qp_handle = (irdma_qp_handle)(unsigned long)qp;
1116
1117 if (q_type == IRDMA_CQE_QTYPE_RQ) {
1118 u32 array_idx;
1119
1120 array_idx = wqe_idx / qp->rq_wqe_size_multiplier;
1121
1122 if (info->comp_status == IRDMA_COMPL_STATUS_FLUSHED ||
1123 info->comp_status == IRDMA_COMPL_STATUS_UNKNOWN) {
1124 if (!IRDMA_RING_MORE_WORK(qp->rq_ring)) {
1125 ret_code = -ENOENT;
1126 goto exit;
1127 }
1128
1129 info->wr_id = qp->rq_wrid_array[qp->rq_ring.tail];
1130 array_idx = qp->rq_ring.tail;
1131 } else {
1132 info->wr_id = qp->rq_wrid_array[array_idx];
1133 }
1134
1135 info->bytes_xfered = (u32)FIELD_GET(IRDMACQ_PAYLDLEN, qword0);
1136
1137 if (info->imm_valid)
1138 info->op_type = IRDMA_OP_TYPE_REC_IMM;
1139 else
1140 info->op_type = IRDMA_OP_TYPE_REC;
1141 if (qword3 & IRDMACQ_STAG) {
1142 info->stag_invalid_set = true;
1143 info->inv_stag = (u32)FIELD_GET(IRDMACQ_INVSTAG, qword2);
1144 } else {
1145 info->stag_invalid_set = false;
1146 }
1147 IRDMA_RING_SET_TAIL(qp->rq_ring, array_idx + 1);
1148 if (info->comp_status == IRDMA_COMPL_STATUS_FLUSHED) {
1149 qp->rq_flush_seen = true;
1150 if (!IRDMA_RING_MORE_WORK(qp->rq_ring))
1151 qp->rq_flush_complete = true;
1152 else
1153 move_cq_head = false;
1154 }
1155 pring = &qp->rq_ring;
1156 } else {
1157 if (qp->first_sq_wq) {
1158 if (wqe_idx + 1 >= qp->conn_wqes)
1159 qp->first_sq_wq = false;
1160
1161 if (wqe_idx < qp->conn_wqes && qp->sq_ring.head == qp->sq_ring.tail) {
1162 IRDMA_RING_MOVE_HEAD_NOCHECK(cq->cq_ring);
1163 IRDMA_RING_MOVE_TAIL(cq->cq_ring);
1164 set_64bit_val(cq->shadow_area, 0,
1165 IRDMA_RING_CURRENT_HEAD(cq->cq_ring));
1166 memset(info, 0,
1167 sizeof(struct irdma_cq_poll_info));
1168 return irdma_uk_cq_poll_cmpl(cq, info);
1169 }
1170 }
1171
1172 if (info->push_dropped) {
1173 qp->push_mode = false;
1174 qp->push_dropped = true;
1175 }
1176 if (info->comp_status != IRDMA_COMPL_STATUS_FLUSHED) {
1177 info->wr_id = qp->sq_wrtrk_array[wqe_idx].wrid;
1178 if (!info->comp_status)
1179 info->bytes_xfered = qp->sq_wrtrk_array[wqe_idx].wr_len;
1180 info->op_type = (u8)FIELD_GET(IRDMACQ_OP, qword3);
1181 IRDMA_RING_SET_TAIL(qp->sq_ring,
1182 wqe_idx + qp->sq_wrtrk_array[wqe_idx].quanta);
1183 } else {
1184 if (!IRDMA_RING_MORE_WORK(qp->sq_ring)) {
1185 ret_code = -ENOENT;
1186 goto exit;
1187 }
1188
1189 do {
1190 __le64 *sw_wqe;
1191 u64 wqe_qword;
1192 u32 tail;
1193
1194 tail = qp->sq_ring.tail;
1195 sw_wqe = qp->sq_base[tail].elem;
1196 get_64bit_val(sw_wqe, 24,
1197 &wqe_qword);
1198 op_type = (u8)FIELD_GET(IRDMAQPSQ_OPCODE, wqe_qword);
1199 info->op_type = op_type;
1200 IRDMA_RING_SET_TAIL(qp->sq_ring,
1201 tail + qp->sq_wrtrk_array[tail].quanta);
1202 if (op_type != IRDMAQP_OP_NOP) {
1203 info->wr_id = qp->sq_wrtrk_array[tail].wrid;
1204 info->bytes_xfered = qp->sq_wrtrk_array[tail].wr_len;
1205 break;
1206 }
1207 } while (1);
1208 if (op_type == IRDMA_OP_TYPE_BIND_MW && info->minor_err == FLUSH_PROT_ERR)
1209 info->minor_err = FLUSH_MW_BIND_ERR;
1210 qp->sq_flush_seen = true;
1211 if (!IRDMA_RING_MORE_WORK(qp->sq_ring))
1212 qp->sq_flush_complete = true;
1213 }
1214 pring = &qp->sq_ring;
1215 }
1216
1217 ret_code = 0;
1218
1219 exit:
1220 if (!ret_code && info->comp_status == IRDMA_COMPL_STATUS_FLUSHED)
1221 if (pring && IRDMA_RING_MORE_WORK(*pring))
1222 move_cq_head = false;
1223
1224 if (move_cq_head) {
1225 IRDMA_RING_MOVE_HEAD_NOCHECK(cq->cq_ring);
1226 if (!IRDMA_RING_CURRENT_HEAD(cq->cq_ring))
1227 cq->polarity ^= 1;
1228
1229 if (ext_valid && !cq->avoid_mem_cflct) {
1230 IRDMA_RING_MOVE_HEAD_NOCHECK(cq->cq_ring);
1231 if (!IRDMA_RING_CURRENT_HEAD(cq->cq_ring))
1232 cq->polarity ^= 1;
1233 }
1234
1235 IRDMA_RING_MOVE_TAIL(cq->cq_ring);
1236 if (!cq->avoid_mem_cflct && ext_valid)
1237 IRDMA_RING_MOVE_TAIL(cq->cq_ring);
1238 set_64bit_val(cq->shadow_area, 0,
1239 IRDMA_RING_CURRENT_HEAD(cq->cq_ring));
1240 } else {
1241 qword3 &= ~IRDMA_CQ_WQEIDX;
1242 qword3 |= FIELD_PREP(IRDMA_CQ_WQEIDX, pring->tail);
1243 set_64bit_val(cqe, 24, qword3);
1244 }
1245
1246 return ret_code;
1247 }
1248
1249
1250
1251
1252
1253 static int irdma_qp_round_up(u32 wqdepth)
1254 {
1255 int scount = 1;
1256
1257 for (wqdepth--; scount <= 16; scount *= 2)
1258 wqdepth |= wqdepth >> scount;
1259
1260 return ++wqdepth;
1261 }
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277 void irdma_get_wqe_shift(struct irdma_uk_attrs *uk_attrs, u32 sge,
1278 u32 inline_data, u8 *shift)
1279 {
1280 *shift = 0;
1281 if (uk_attrs->hw_rev >= IRDMA_GEN_2) {
1282 if (sge > 1 || inline_data > 8) {
1283 if (sge < 4 && inline_data <= 39)
1284 *shift = 1;
1285 else if (sge < 8 && inline_data <= 101)
1286 *shift = 2;
1287 else
1288 *shift = 3;
1289 }
1290 } else if (sge > 1 || inline_data > 16) {
1291 *shift = (sge < 4 && inline_data <= 48) ? 1 : 2;
1292 }
1293 }
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303 int irdma_get_sqdepth(struct irdma_uk_attrs *uk_attrs, u32 sq_size, u8 shift,
1304 u32 *sqdepth)
1305 {
1306 *sqdepth = irdma_qp_round_up((sq_size << shift) + IRDMA_SQ_RSVD);
1307
1308 if (*sqdepth < (IRDMA_QP_SW_MIN_WQSIZE << shift))
1309 *sqdepth = IRDMA_QP_SW_MIN_WQSIZE << shift;
1310 else if (*sqdepth > uk_attrs->max_hw_wq_quanta)
1311 return -EINVAL;
1312
1313 return 0;
1314 }
1315
1316
1317
1318
1319
1320
1321
1322
1323 int irdma_get_rqdepth(struct irdma_uk_attrs *uk_attrs, u32 rq_size, u8 shift,
1324 u32 *rqdepth)
1325 {
1326 *rqdepth = irdma_qp_round_up((rq_size << shift) + IRDMA_RQ_RSVD);
1327
1328 if (*rqdepth < (IRDMA_QP_SW_MIN_WQSIZE << shift))
1329 *rqdepth = IRDMA_QP_SW_MIN_WQSIZE << shift;
1330 else if (*rqdepth > uk_attrs->max_hw_rq_quanta)
1331 return -EINVAL;
1332
1333 return 0;
1334 }
1335
1336 static const struct irdma_wqe_uk_ops iw_wqe_uk_ops = {
1337 .iw_copy_inline_data = irdma_copy_inline_data,
1338 .iw_inline_data_size_to_quanta = irdma_inline_data_size_to_quanta,
1339 .iw_set_fragment = irdma_set_fragment,
1340 .iw_set_mw_bind_wqe = irdma_set_mw_bind_wqe,
1341 };
1342
1343 static const struct irdma_wqe_uk_ops iw_wqe_uk_ops_gen_1 = {
1344 .iw_copy_inline_data = irdma_copy_inline_data_gen_1,
1345 .iw_inline_data_size_to_quanta = irdma_inline_data_size_to_quanta_gen_1,
1346 .iw_set_fragment = irdma_set_fragment_gen_1,
1347 .iw_set_mw_bind_wqe = irdma_set_mw_bind_wqe_gen_1,
1348 };
1349
1350
1351
1352
1353
1354
1355
1356 static void irdma_setup_connection_wqes(struct irdma_qp_uk *qp,
1357 struct irdma_qp_uk_init_info *info)
1358 {
1359 u16 move_cnt = 1;
1360
1361 if (!info->legacy_mode &&
1362 (qp->uk_attrs->feature_flags & IRDMA_FEATURE_RTS_AE))
1363 move_cnt = 3;
1364
1365 qp->conn_wqes = move_cnt;
1366 IRDMA_RING_MOVE_HEAD_BY_COUNT_NOCHECK(qp->sq_ring, move_cnt);
1367 IRDMA_RING_MOVE_TAIL_BY_COUNT(qp->sq_ring, move_cnt);
1368 IRDMA_RING_MOVE_HEAD_BY_COUNT_NOCHECK(qp->initial_ring, move_cnt);
1369 }
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381 int irdma_uk_qp_init(struct irdma_qp_uk *qp, struct irdma_qp_uk_init_info *info)
1382 {
1383 int ret_code = 0;
1384 u32 sq_ring_size;
1385 u8 sqshift, rqshift;
1386
1387 qp->uk_attrs = info->uk_attrs;
1388 if (info->max_sq_frag_cnt > qp->uk_attrs->max_hw_wq_frags ||
1389 info->max_rq_frag_cnt > qp->uk_attrs->max_hw_wq_frags)
1390 return -EINVAL;
1391
1392 irdma_get_wqe_shift(qp->uk_attrs, info->max_rq_frag_cnt, 0, &rqshift);
1393 if (qp->uk_attrs->hw_rev == IRDMA_GEN_1) {
1394 irdma_get_wqe_shift(qp->uk_attrs, info->max_sq_frag_cnt,
1395 info->max_inline_data, &sqshift);
1396 if (info->abi_ver > 4)
1397 rqshift = IRDMA_MAX_RQ_WQE_SHIFT_GEN1;
1398 } else {
1399 irdma_get_wqe_shift(qp->uk_attrs, info->max_sq_frag_cnt + 1,
1400 info->max_inline_data, &sqshift);
1401 }
1402 qp->qp_caps = info->qp_caps;
1403 qp->sq_base = info->sq;
1404 qp->rq_base = info->rq;
1405 qp->qp_type = info->type ? info->type : IRDMA_QP_TYPE_IWARP;
1406 qp->shadow_area = info->shadow_area;
1407 qp->sq_wrtrk_array = info->sq_wrtrk_array;
1408
1409 qp->rq_wrid_array = info->rq_wrid_array;
1410 qp->wqe_alloc_db = info->wqe_alloc_db;
1411 qp->qp_id = info->qp_id;
1412 qp->sq_size = info->sq_size;
1413 qp->push_mode = false;
1414 qp->max_sq_frag_cnt = info->max_sq_frag_cnt;
1415 sq_ring_size = qp->sq_size << sqshift;
1416 IRDMA_RING_INIT(qp->sq_ring, sq_ring_size);
1417 IRDMA_RING_INIT(qp->initial_ring, sq_ring_size);
1418 if (info->first_sq_wq) {
1419 irdma_setup_connection_wqes(qp, info);
1420 qp->swqe_polarity = 1;
1421 qp->first_sq_wq = true;
1422 } else {
1423 qp->swqe_polarity = 0;
1424 }
1425 qp->swqe_polarity_deferred = 1;
1426 qp->rwqe_polarity = 0;
1427 qp->rq_size = info->rq_size;
1428 qp->max_rq_frag_cnt = info->max_rq_frag_cnt;
1429 qp->max_inline_data = info->max_inline_data;
1430 qp->rq_wqe_size = rqshift;
1431 IRDMA_RING_INIT(qp->rq_ring, qp->rq_size);
1432 qp->rq_wqe_size_multiplier = 1 << rqshift;
1433 if (qp->uk_attrs->hw_rev == IRDMA_GEN_1)
1434 qp->wqe_ops = iw_wqe_uk_ops_gen_1;
1435 else
1436 qp->wqe_ops = iw_wqe_uk_ops;
1437 return ret_code;
1438 }
1439
1440
1441
1442
1443
1444
1445 void irdma_uk_cq_init(struct irdma_cq_uk *cq,
1446 struct irdma_cq_uk_init_info *info)
1447 {
1448 cq->cq_base = info->cq_base;
1449 cq->cq_id = info->cq_id;
1450 cq->cq_size = info->cq_size;
1451 cq->cqe_alloc_db = info->cqe_alloc_db;
1452 cq->cq_ack_db = info->cq_ack_db;
1453 cq->shadow_area = info->shadow_area;
1454 cq->avoid_mem_cflct = info->avoid_mem_cflct;
1455 IRDMA_RING_INIT(cq->cq_ring, cq->cq_size);
1456 cq->polarity = 1;
1457 }
1458
1459
1460
1461
1462
1463
1464 void irdma_uk_clean_cq(void *q, struct irdma_cq_uk *cq)
1465 {
1466 __le64 *cqe;
1467 u64 qword3, comp_ctx;
1468 u32 cq_head;
1469 u8 polarity, temp;
1470
1471 cq_head = cq->cq_ring.head;
1472 temp = cq->polarity;
1473 do {
1474 if (cq->avoid_mem_cflct)
1475 cqe = ((struct irdma_extended_cqe *)(cq->cq_base))[cq_head].buf;
1476 else
1477 cqe = cq->cq_base[cq_head].buf;
1478 get_64bit_val(cqe, 24, &qword3);
1479 polarity = (u8)FIELD_GET(IRDMA_CQ_VALID, qword3);
1480
1481 if (polarity != temp)
1482 break;
1483
1484 get_64bit_val(cqe, 8, &comp_ctx);
1485 if ((void *)(unsigned long)comp_ctx == q)
1486 set_64bit_val(cqe, 8, 0);
1487
1488 cq_head = (cq_head + 1) % cq->cq_ring.size;
1489 if (!cq_head)
1490 temp ^= 1;
1491 } while (true);
1492 }
1493
1494
1495
1496
1497
1498
1499
1500
1501 int irdma_nop(struct irdma_qp_uk *qp, u64 wr_id, bool signaled, bool post_sq)
1502 {
1503 __le64 *wqe;
1504 u64 hdr;
1505 u32 wqe_idx;
1506 struct irdma_post_sq_info info = {};
1507
1508 info.push_wqe = false;
1509 info.wr_id = wr_id;
1510 wqe = irdma_qp_get_next_send_wqe(qp, &wqe_idx, IRDMA_QP_WQE_MIN_QUANTA,
1511 0, &info);
1512 if (!wqe)
1513 return -ENOMEM;
1514
1515 irdma_clr_wqes(qp, wqe_idx);
1516
1517 set_64bit_val(wqe, 0, 0);
1518 set_64bit_val(wqe, 8, 0);
1519 set_64bit_val(wqe, 16, 0);
1520
1521 hdr = FIELD_PREP(IRDMAQPSQ_OPCODE, IRDMAQP_OP_NOP) |
1522 FIELD_PREP(IRDMAQPSQ_SIGCOMPL, signaled) |
1523 FIELD_PREP(IRDMAQPSQ_VALID, qp->swqe_polarity);
1524
1525 dma_wmb();
1526
1527 set_64bit_val(wqe, 24, hdr);
1528 if (post_sq)
1529 irdma_uk_qp_post_wr(qp);
1530
1531 return 0;
1532 }
1533
1534
1535
1536
1537
1538
1539 int irdma_fragcnt_to_quanta_sq(u32 frag_cnt, u16 *quanta)
1540 {
1541 switch (frag_cnt) {
1542 case 0:
1543 case 1:
1544 *quanta = IRDMA_QP_WQE_MIN_QUANTA;
1545 break;
1546 case 2:
1547 case 3:
1548 *quanta = 2;
1549 break;
1550 case 4:
1551 case 5:
1552 *quanta = 3;
1553 break;
1554 case 6:
1555 case 7:
1556 *quanta = 4;
1557 break;
1558 case 8:
1559 case 9:
1560 *quanta = 5;
1561 break;
1562 case 10:
1563 case 11:
1564 *quanta = 6;
1565 break;
1566 case 12:
1567 case 13:
1568 *quanta = 7;
1569 break;
1570 case 14:
1571 case 15:
1572 *quanta = 8;
1573 break;
1574 default:
1575 return -EINVAL;
1576 }
1577
1578 return 0;
1579 }
1580
1581
1582
1583
1584
1585
1586 int irdma_fragcnt_to_wqesize_rq(u32 frag_cnt, u16 *wqe_size)
1587 {
1588 switch (frag_cnt) {
1589 case 0:
1590 case 1:
1591 *wqe_size = 32;
1592 break;
1593 case 2:
1594 case 3:
1595 *wqe_size = 64;
1596 break;
1597 case 4:
1598 case 5:
1599 case 6:
1600 case 7:
1601 *wqe_size = 128;
1602 break;
1603 case 8:
1604 case 9:
1605 case 10:
1606 case 11:
1607 case 12:
1608 case 13:
1609 case 14:
1610 *wqe_size = 256;
1611 break;
1612 default:
1613 return -EINVAL;
1614 }
1615
1616 return 0;
1617 }