Lines Matching refs:wr
1498 const struct ib_ud_wr *wr, in build_mlx_header() argument
1508 mthca_ah_grh_present(to_mah(wr->ah)), 0, 0, 0, in build_mlx_header()
1511 err = mthca_read_ah(dev, to_mah(wr->ah), &sqp->ud_header); in build_mlx_header()
1522 switch (wr->wr.opcode) { in build_mlx_header()
1530 sqp->ud_header.immediate_data = wr->wr.ex.imm_data; in build_mlx_header()
1539 sqp->ud_header.bth.solicited_event = !!(wr->wr.send_flags & IB_SEND_SOLICITED); in build_mlx_header()
1544 ib_get_cached_pkey(&dev->ib_dev, qp->port, wr->pkey_index, in build_mlx_header()
1547 sqp->ud_header.bth.destination_qpn = cpu_to_be32(wr->remote_qpn); in build_mlx_header()
1549 sqp->ud_header.deth.qkey = cpu_to_be32(wr->remote_qkey & 0x80000000 ? in build_mlx_header()
1550 sqp->qkey : wr->remote_qkey); in build_mlx_header()
1592 const struct ib_atomic_wr *wr) in set_atomic_seg() argument
1594 if (wr->wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP) { in set_atomic_seg()
1595 aseg->swap_add = cpu_to_be64(wr->swap); in set_atomic_seg()
1596 aseg->compare = cpu_to_be64(wr->compare_add); in set_atomic_seg()
1598 aseg->swap_add = cpu_to_be64(wr->compare_add); in set_atomic_seg()
1605 const struct ib_ud_wr *wr) in set_tavor_ud_seg() argument
1607 useg->lkey = cpu_to_be32(to_mah(wr->ah)->key); in set_tavor_ud_seg()
1608 useg->av_addr = cpu_to_be64(to_mah(wr->ah)->avdma); in set_tavor_ud_seg()
1609 useg->dqpn = cpu_to_be32(wr->remote_qpn); in set_tavor_ud_seg()
1610 useg->qkey = cpu_to_be32(wr->remote_qkey); in set_tavor_ud_seg()
1615 const struct ib_ud_wr *wr) in set_arbel_ud_seg() argument
1617 memcpy(useg->av, to_mah(wr->ah)->av, MTHCA_AV_SIZE); in set_arbel_ud_seg()
1618 useg->dqpn = cpu_to_be32(wr->remote_qpn); in set_arbel_ud_seg()
1619 useg->qkey = cpu_to_be32(wr->remote_qkey); in set_arbel_ud_seg()
1622 int mthca_tavor_post_send(struct ib_qp *ibqp, const struct ib_send_wr *wr, in mthca_tavor_post_send() argument
1652 for (nreq = 0; wr; ++nreq, wr = wr->next) { in mthca_tavor_post_send()
1659 *bad_wr = wr; in mthca_tavor_post_send()
1670 ((wr->send_flags & IB_SEND_SIGNALED) ? in mthca_tavor_post_send()
1672 ((wr->send_flags & IB_SEND_SOLICITED) ? in mthca_tavor_post_send()
1675 if (wr->opcode == IB_WR_SEND_WITH_IMM || in mthca_tavor_post_send()
1676 wr->opcode == IB_WR_RDMA_WRITE_WITH_IMM) in mthca_tavor_post_send()
1677 ((struct mthca_next_seg *) wqe)->imm = wr->ex.imm_data; in mthca_tavor_post_send()
1684 switch (wr->opcode) { in mthca_tavor_post_send()
1687 set_raddr_seg(wqe, atomic_wr(wr)->remote_addr, in mthca_tavor_post_send()
1688 atomic_wr(wr)->rkey); in mthca_tavor_post_send()
1691 set_atomic_seg(wqe, atomic_wr(wr)); in mthca_tavor_post_send()
1700 set_raddr_seg(wqe, rdma_wr(wr)->remote_addr, in mthca_tavor_post_send()
1701 rdma_wr(wr)->rkey); in mthca_tavor_post_send()
1714 switch (wr->opcode) { in mthca_tavor_post_send()
1717 set_raddr_seg(wqe, rdma_wr(wr)->remote_addr, in mthca_tavor_post_send()
1718 rdma_wr(wr)->rkey); in mthca_tavor_post_send()
1731 set_tavor_ud_seg(wqe, ud_wr(wr)); in mthca_tavor_post_send()
1738 dev, qp, ind, ud_wr(wr), in mthca_tavor_post_send()
1741 *bad_wr = wr; in mthca_tavor_post_send()
1749 if (wr->num_sge > qp->sq.max_gs) { in mthca_tavor_post_send()
1752 *bad_wr = wr; in mthca_tavor_post_send()
1756 for (i = 0; i < wr->num_sge; ++i) { in mthca_tavor_post_send()
1757 mthca_set_data_seg(wqe, wr->sg_list + i); in mthca_tavor_post_send()
1771 qp->wrid[ind + qp->rq.max] = wr->wr_id; in mthca_tavor_post_send()
1773 if (wr->opcode >= ARRAY_SIZE(mthca_opcode)) { in mthca_tavor_post_send()
1776 *bad_wr = wr; in mthca_tavor_post_send()
1783 mthca_opcode[wr->opcode]); in mthca_tavor_post_send()
1787 ((wr->send_flags & IB_SEND_FENCE) ? in mthca_tavor_post_send()
1792 op0 = mthca_opcode[wr->opcode]; in mthca_tavor_post_send()
1793 f0 = wr->send_flags & IB_SEND_FENCE ? in mthca_tavor_post_send()
1820 int mthca_tavor_post_receive(struct ib_qp *ibqp, const struct ib_recv_wr *wr, in mthca_tavor_post_receive() argument
1848 for (nreq = 0; wr; wr = wr->next) { in mthca_tavor_post_receive()
1855 *bad_wr = wr; in mthca_tavor_post_receive()
1870 if (unlikely(wr->num_sge > qp->rq.max_gs)) { in mthca_tavor_post_receive()
1872 *bad_wr = wr; in mthca_tavor_post_receive()
1876 for (i = 0; i < wr->num_sge; ++i) { in mthca_tavor_post_receive()
1877 mthca_set_data_seg(wqe, wr->sg_list + i); in mthca_tavor_post_receive()
1882 qp->wrid[ind] = wr->wr_id; in mthca_tavor_post_receive()
1925 int mthca_arbel_post_send(struct ib_qp *ibqp, const struct ib_send_wr *wr, in mthca_arbel_post_send() argument
1956 for (nreq = 0; wr; ++nreq, wr = wr->next) { in mthca_arbel_post_send()
1989 *bad_wr = wr; in mthca_arbel_post_send()
1998 ((wr->send_flags & IB_SEND_SIGNALED) ? in mthca_arbel_post_send()
2000 ((wr->send_flags & IB_SEND_SOLICITED) ? in mthca_arbel_post_send()
2002 ((wr->send_flags & IB_SEND_IP_CSUM) ? in mthca_arbel_post_send()
2005 if (wr->opcode == IB_WR_SEND_WITH_IMM || in mthca_arbel_post_send()
2006 wr->opcode == IB_WR_RDMA_WRITE_WITH_IMM) in mthca_arbel_post_send()
2007 ((struct mthca_next_seg *) wqe)->imm = wr->ex.imm_data; in mthca_arbel_post_send()
2014 switch (wr->opcode) { in mthca_arbel_post_send()
2017 set_raddr_seg(wqe, atomic_wr(wr)->remote_addr, in mthca_arbel_post_send()
2018 atomic_wr(wr)->rkey); in mthca_arbel_post_send()
2021 set_atomic_seg(wqe, atomic_wr(wr)); in mthca_arbel_post_send()
2030 set_raddr_seg(wqe, rdma_wr(wr)->remote_addr, in mthca_arbel_post_send()
2031 rdma_wr(wr)->rkey); in mthca_arbel_post_send()
2044 switch (wr->opcode) { in mthca_arbel_post_send()
2047 set_raddr_seg(wqe, rdma_wr(wr)->remote_addr, in mthca_arbel_post_send()
2048 rdma_wr(wr)->rkey); in mthca_arbel_post_send()
2061 set_arbel_ud_seg(wqe, ud_wr(wr)); in mthca_arbel_post_send()
2068 dev, qp, ind, ud_wr(wr), in mthca_arbel_post_send()
2071 *bad_wr = wr; in mthca_arbel_post_send()
2079 if (wr->num_sge > qp->sq.max_gs) { in mthca_arbel_post_send()
2082 *bad_wr = wr; in mthca_arbel_post_send()
2086 for (i = 0; i < wr->num_sge; ++i) { in mthca_arbel_post_send()
2087 mthca_set_data_seg(wqe, wr->sg_list + i); in mthca_arbel_post_send()
2101 qp->wrid[ind + qp->rq.max] = wr->wr_id; in mthca_arbel_post_send()
2103 if (wr->opcode >= ARRAY_SIZE(mthca_opcode)) { in mthca_arbel_post_send()
2106 *bad_wr = wr; in mthca_arbel_post_send()
2113 mthca_opcode[wr->opcode]); in mthca_arbel_post_send()
2117 ((wr->send_flags & IB_SEND_FENCE) ? in mthca_arbel_post_send()
2122 op0 = mthca_opcode[wr->opcode]; in mthca_arbel_post_send()
2123 f0 = wr->send_flags & IB_SEND_FENCE ? in mthca_arbel_post_send()
2159 int mthca_arbel_post_receive(struct ib_qp *ibqp, const struct ib_recv_wr *wr, in mthca_arbel_post_receive() argument
2177 for (nreq = 0; wr; ++nreq, wr = wr->next) { in mthca_arbel_post_receive()
2184 *bad_wr = wr; in mthca_arbel_post_receive()
2194 if (unlikely(wr->num_sge > qp->rq.max_gs)) { in mthca_arbel_post_receive()
2196 *bad_wr = wr; in mthca_arbel_post_receive()
2200 for (i = 0; i < wr->num_sge; ++i) { in mthca_arbel_post_receive()
2201 mthca_set_data_seg(wqe, wr->sg_list + i); in mthca_arbel_post_receive()
2208 qp->wrid[ind] = wr->wr_id; in mthca_arbel_post_receive()