@@ -73,6 +73,9 @@ static inline void set_data_seg_v2(struct hns_roce_v2_wqe_data_seg *dseg,
#define HR_OPC_MAP(ib_key, hr_key) \
[IB_WR_ ## ib_key] = 1 + HNS_ROCE_V2_WQE_OP_ ## hr_key
+#define HR_PRIV_OPC_MAP(hr_key) \
+ [HNS_ROCE_WR_ ## hr_key] = 1 + HNS_ROCE_V2_WQE_OP_ ## hr_key
+
static const u32 hns_roce_op_code[] = {
HR_OPC_MAP(RDMA_WRITE, RDMA_WRITE),
HR_OPC_MAP(RDMA_WRITE_WITH_IMM, RDMA_WRITE_WITH_IMM),
@@ -86,6 +89,7 @@ static const u32 hns_roce_op_code[] = {
HR_OPC_MAP(MASKED_ATOMIC_CMP_AND_SWP, ATOM_MSK_CMP_AND_SWAP),
HR_OPC_MAP(MASKED_ATOMIC_FETCH_AND_ADD, ATOM_MSK_FETCH_AND_ADD),
HR_OPC_MAP(REG_MR, FAST_REG_PMR),
+ HR_PRIV_OPC_MAP(NOP),
};
static u32 to_hr_opcode(u32 ib_opcode)
@@ -540,6 +544,7 @@ static int set_rc_opcode(struct hns_roce_dev *hr_dev,
break;
case IB_WR_SEND:
case IB_WR_SEND_WITH_IMM:
+ case HNS_ROCE_WR_NOP:
break;
case IB_WR_ATOMIC_CMP_AND_SWP:
case IB_WR_ATOMIC_FETCH_AND_ADD:
@@ -3572,6 +3577,9 @@ static int get_cur_qp(struct hns_roce_cq *hr_cq, struct hns_roce_v2_cqe *cqe,
#define HR_WC_OP_MAP(hr_key, ib_key) \
[HNS_ROCE_V2_WQE_OP_ ## hr_key] = 1 + IB_WC_ ## ib_key
+#define HR_PRIV_WC_OP_MAP(hr_key) \
+ [HNS_ROCE_V2_WQE_OP_ ## hr_key] = 1 + HNS_ROCE_WC_ ## hr_key
+
static const u32 wc_send_op_map[] = {
HR_WC_OP_MAP(SEND, SEND),
HR_WC_OP_MAP(SEND_WITH_INV, SEND),
@@ -3586,6 +3594,7 @@ static const u32 wc_send_op_map[] = {
HR_WC_OP_MAP(ATOM_MSK_FETCH_AND_ADD, MASKED_FETCH_ADD),
HR_WC_OP_MAP(FAST_REG_PMR, REG_MR),
HR_WC_OP_MAP(BIND_MW, REG_MR),
+ HR_PRIV_WC_OP_MAP(NOP),
};
static int to_ib_wc_send_op(u32 hr_opcode)
@@ -158,6 +158,14 @@ enum {
#define GID_LEN_V2 16
+enum {
+ HNS_ROCE_WR_NOP = IB_WR_RESERVED4,
+};
+
+enum {
+ HNS_ROCE_WC_NOP = HNS_ROCE_WR_NOP,
+};
+
enum {
HNS_ROCE_V2_WQE_OP_SEND = 0x0,
HNS_ROCE_V2_WQE_OP_SEND_WITH_INV = 0x1,
@@ -172,6 +180,7 @@ enum {
HNS_ROCE_V2_WQE_OP_FAST_REG_PMR = 0xa,
HNS_ROCE_V2_WQE_OP_LOCAL_INV = 0xb,
HNS_ROCE_V2_WQE_OP_BIND_MW = 0xc,
+ HNS_ROCE_V2_WQE_OP_NOP = 0x13,
HNS_ROCE_V2_WQE_OP_MASK = 0x1f,
};