@@ -213,12 +213,13 @@ static inline enum comp_state check_ack(struct rxe_qp *qp,
struct rxe_pkt_info *pkt,
struct rxe_send_wqe *wqe)
{
+ struct rxe_dev *rxe = to_rdev(qp->ibqp.device);
unsigned int mask = pkt->mask;
+ int opcode;
u8 syn;
- struct rxe_dev *rxe = to_rdev(qp->ibqp.device);
- /* Check the sequence only */
- switch (qp->comp.opcode) {
+ /* Mask off type bits and check the sequence only */
+ switch (qp->comp.opcode & IB_OPCODE_CMD) {
case -1:
/* Will catch all *_ONLY cases. */
if (!(mask & RXE_FIRST_MASK))
@@ -226,42 +227,39 @@ static inline enum comp_state check_ack(struct rxe_qp *qp,
break;
- case IB_OPCODE_RC_RDMA_READ_RESPONSE_FIRST:
- case IB_OPCODE_RC_RDMA_READ_RESPONSE_MIDDLE:
- if (pkt->opcode != IB_OPCODE_RC_RDMA_READ_RESPONSE_MIDDLE &&
- pkt->opcode != IB_OPCODE_RC_RDMA_READ_RESPONSE_LAST) {
+ case IB_OPCODE_RDMA_READ_RESPONSE_FIRST:
+ case IB_OPCODE_RDMA_READ_RESPONSE_MIDDLE:
+ opcode = pkt->opcode & IB_OPCODE_CMD;
+ if (opcode != IB_OPCODE_RDMA_READ_RESPONSE_MIDDLE &&
+ opcode != IB_OPCODE_RDMA_READ_RESPONSE_LAST) {
/* read retries of partial data may restart from
* read response first or response only.
*/
if ((pkt->psn == wqe->first_psn &&
- pkt->opcode ==
- IB_OPCODE_RC_RDMA_READ_RESPONSE_FIRST) ||
+ opcode == IB_OPCODE_RDMA_READ_RESPONSE_FIRST) ||
(wqe->first_psn == wqe->last_psn &&
- pkt->opcode ==
- IB_OPCODE_RC_RDMA_READ_RESPONSE_ONLY))
+ opcode == IB_OPCODE_RDMA_READ_RESPONSE_ONLY))
break;
return COMPST_ERROR;
}
break;
default:
- WARN_ON_ONCE(1);
+ //WARN_ON_ONCE(1);
}
- /* Check operation validity. */
- switch (pkt->opcode) {
- case IB_OPCODE_RC_RDMA_READ_RESPONSE_FIRST:
- case IB_OPCODE_RC_RDMA_READ_RESPONSE_LAST:
- case IB_OPCODE_RC_RDMA_READ_RESPONSE_ONLY:
+ /* Mask off the type bits and check operation validity. */
+ switch (pkt->opcode & IB_OPCODE_CMD) {
+ case IB_OPCODE_RDMA_READ_RESPONSE_FIRST:
+ case IB_OPCODE_RDMA_READ_RESPONSE_LAST:
+ case IB_OPCODE_RDMA_READ_RESPONSE_ONLY:
syn = aeth_syn(pkt);
if ((syn & AETH_TYPE_MASK) != AETH_ACK)
return COMPST_ERROR;
fallthrough;
- /* (IB_OPCODE_RC_RDMA_READ_RESPONSE_MIDDLE doesn't have an AETH)
- */
- case IB_OPCODE_RC_RDMA_READ_RESPONSE_MIDDLE:
+ case IB_OPCODE_RDMA_READ_RESPONSE_MIDDLE:
if (wqe->wr.opcode != IB_WR_RDMA_READ &&
wqe->wr.opcode != IB_WR_RDMA_READ_WITH_INV) {
wqe->status = IB_WC_FATAL_ERR;
@@ -270,7 +268,7 @@ static inline enum comp_state check_ack(struct rxe_qp *qp,
reset_retry_counters(qp);
return COMPST_READ;
- case IB_OPCODE_RC_ATOMIC_ACKNOWLEDGE:
+ case IB_OPCODE_ATOMIC_ACKNOWLEDGE:
syn = aeth_syn(pkt);
if ((syn & AETH_TYPE_MASK) != AETH_ACK)
@@ -282,7 +280,7 @@ static inline enum comp_state check_ack(struct rxe_qp *qp,
reset_retry_counters(qp);
return COMPST_ATOMIC;
- case IB_OPCODE_RC_ACKNOWLEDGE:
+ case IB_OPCODE_ACKNOWLEDGE:
syn = aeth_syn(pkt);
switch (syn & AETH_TYPE_MASK) {
case AETH_ACK:
@@ -669,7 +667,8 @@ int rxe_completer(void *arg)
* timeouts but try to keep them as few as possible)
* (4) the timeout parameter is set
*/
- if ((qp_type(qp) == IB_QPT_RC) &&
+ if ((qp_type(qp) == IB_QPT_RC ||
+ qp_type(qp) == IB_QPT_XRC_INI) &&
(qp->req.state == QP_STATE_READY) &&
(psn_compare(qp->req.psn, qp->comp.psn) > 0) &&
qp->qp_timeout_jiffies)
Extend code in rxe_comp.c to support xrc qp types. Signed-off-by: Bob Pearson <rpearsonhpe@gmail.com> --- drivers/infiniband/sw/rxe/rxe_comp.c | 45 ++++++++++++++-------------- 1 file changed, 22 insertions(+), 23 deletions(-)