Lines Matching refs:wqe

42 static u32 restart_sge(struct rvt_sge_state *ss, struct rvt_swqe *wqe,  in restart_sge()  argument
47 len = ((psn - wqe->psn) & QIB_PSN_MASK) * pmtu; in restart_sge()
48 return rvt_restart_sge(ss, wqe, len); in restart_sge()
221 struct rvt_swqe *wqe; in qib_make_rc_req() local
251 wqe = rvt_get_swqe_ptr(qp, qp->s_last); in qib_make_rc_req()
252 rvt_send_complete(qp, wqe, qp->s_last != qp->s_acked ? in qib_make_rc_req()
275 wqe = rvt_get_swqe_ptr(qp, qp->s_cur); in qib_make_rc_req()
296 if ((wqe->wr.send_flags & IB_SEND_FENCE) && in qib_make_rc_req()
302 qp->s_psn = wqe->psn; in qib_make_rc_req()
309 len = wqe->length; in qib_make_rc_req()
312 switch (wqe->wr.opcode) { in qib_make_rc_req()
316 if (!rvt_rc_credit_avail(qp, wqe)) in qib_make_rc_req()
323 if (wqe->wr.opcode == IB_WR_SEND) in qib_make_rc_req()
328 ohdr->u.imm_data = wqe->wr.ex.imm_data; in qib_make_rc_req()
331 if (wqe->wr.send_flags & IB_SEND_SOLICITED) in qib_make_rc_req()
344 if (!rvt_rc_credit_avail(qp, wqe)) in qib_make_rc_req()
348 cpu_to_be64(wqe->rdma_wr.remote_addr); in qib_make_rc_req()
350 cpu_to_be32(wqe->rdma_wr.rkey); in qib_make_rc_req()
358 if (wqe->rdma_wr.wr.opcode == IB_WR_RDMA_WRITE) in qib_make_rc_req()
364 wqe->rdma_wr.wr.ex.imm_data; in qib_make_rc_req()
366 if (wqe->rdma_wr.wr.send_flags & IB_SEND_SOLICITED) in qib_make_rc_req()
391 cpu_to_be64(wqe->rdma_wr.remote_addr); in qib_make_rc_req()
393 cpu_to_be32(wqe->rdma_wr.rkey); in qib_make_rc_req()
420 if (wqe->atomic_wr.wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP) { in qib_make_rc_req()
422 put_ib_ateth_swap(wqe->atomic_wr.swap, in qib_make_rc_req()
424 put_ib_ateth_compare(wqe->atomic_wr.compare_add, in qib_make_rc_req()
428 put_ib_ateth_swap(wqe->atomic_wr.compare_add, in qib_make_rc_req()
432 put_ib_ateth_vaddr(wqe->atomic_wr.remote_addr, in qib_make_rc_req()
435 wqe->atomic_wr.rkey); in qib_make_rc_req()
447 qp->s_sge.sge = wqe->sg_list[0]; in qib_make_rc_req()
448 qp->s_sge.sg_list = wqe->sg_list + 1; in qib_make_rc_req()
449 qp->s_sge.num_sge = wqe->wr.num_sge; in qib_make_rc_req()
450 qp->s_sge.total_len = wqe->length; in qib_make_rc_req()
451 qp->s_len = wqe->length; in qib_make_rc_req()
457 if (wqe->wr.opcode == IB_WR_RDMA_READ) in qib_make_rc_req()
458 qp->s_psn = wqe->lpsn + 1; in qib_make_rc_req()
473 qp->s_len = restart_sge(&qp->s_sge, wqe, qp->s_psn, pmtu); in qib_make_rc_req()
486 if (wqe->wr.opcode == IB_WR_SEND) in qib_make_rc_req()
491 ohdr->u.imm_data = wqe->wr.ex.imm_data; in qib_make_rc_req()
494 if (wqe->wr.send_flags & IB_SEND_SOLICITED) in qib_make_rc_req()
512 qp->s_len = restart_sge(&qp->s_sge, wqe, qp->s_psn, pmtu); in qib_make_rc_req()
525 if (wqe->wr.opcode == IB_WR_RDMA_WRITE) in qib_make_rc_req()
530 ohdr->u.imm_data = wqe->wr.ex.imm_data; in qib_make_rc_req()
532 if (wqe->wr.send_flags & IB_SEND_SOLICITED) in qib_make_rc_req()
551 len = ((qp->s_psn - wqe->psn) & QIB_PSN_MASK) * pmtu; in qib_make_rc_req()
553 cpu_to_be64(wqe->rdma_wr.remote_addr + len); in qib_make_rc_req()
555 cpu_to_be32(wqe->rdma_wr.rkey); in qib_make_rc_req()
556 ohdr->u.rc.reth.length = cpu_to_be32(wqe->length - len); in qib_make_rc_req()
560 qp->s_psn = wqe->lpsn + 1; in qib_make_rc_req()
569 delta = (((int) bth2 - (int) wqe->psn) << 8) >> 8; in qib_make_rc_req()
738 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, n); in reset_psn() local
747 if (qib_cmp24(psn, wqe->psn) <= 0) { in reset_psn()
753 opcode = wqe->wr.opcode; in reset_psn()
761 wqe = rvt_get_swqe_ptr(qp, n); in reset_psn()
762 diff = qib_cmp24(psn, wqe->psn); in reset_psn()
774 opcode = wqe->wr.opcode; in reset_psn()
822 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, qp->s_acked); in qib_restart_rc() local
830 rvt_send_complete(qp, wqe, IB_WC_RETRY_EXC_ERR); in qib_restart_rc()
839 if (wqe->wr.opcode == IB_WR_RDMA_READ) in qib_restart_rc()
858 struct rvt_swqe *wqe; in reset_sending_psn() local
863 wqe = rvt_get_swqe_ptr(qp, n); in reset_sending_psn()
864 if (qib_cmp24(psn, wqe->lpsn) <= 0) { in reset_sending_psn()
865 if (wqe->wr.opcode == IB_WR_RDMA_READ) in reset_sending_psn()
866 qp->s_sending_psn = wqe->lpsn + 1; in reset_sending_psn()
884 struct rvt_swqe *wqe; in qib_rc_send_complete() local
918 wqe = rvt_get_swqe_ptr(qp, qp->s_last); in qib_rc_send_complete()
919 if (qib_cmp24(wqe->lpsn, qp->s_sending_psn) >= 0 && in qib_rc_send_complete()
923 wqe, in qib_rc_send_complete()
924 ib_qib_wc_opcode[wqe->wr.opcode], in qib_rc_send_complete()
951 struct rvt_swqe *wqe, in do_rc_completion() argument
959 if (qib_cmp24(wqe->lpsn, qp->s_sending_psn) < 0 || in do_rc_completion()
962 wqe, in do_rc_completion()
963 ib_qib_wc_opcode[wqe->wr.opcode], in do_rc_completion()
969 update_last_psn(qp, wqe->lpsn); in do_rc_completion()
980 wqe = rvt_get_swqe_ptr(qp, qp->s_cur); in do_rc_completion()
983 qp->s_psn = wqe->psn; in do_rc_completion()
990 wqe = rvt_get_swqe_ptr(qp, qp->s_acked); in do_rc_completion()
992 return wqe; in do_rc_completion()
1011 struct rvt_swqe *wqe; in do_rc_ack() local
1025 wqe = rvt_get_swqe_ptr(qp, qp->s_acked); in do_rc_ack()
1032 while ((diff = qib_cmp24(ack_psn, wqe->lpsn)) >= 0) { in do_rc_ack()
1039 if (wqe->wr.opcode == IB_WR_RDMA_READ && in do_rc_ack()
1054 if ((wqe->wr.opcode == IB_WR_RDMA_READ && in do_rc_ack()
1056 ((wqe->wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP || in do_rc_ack()
1057 wqe->wr.opcode == IB_WR_ATOMIC_FETCH_AND_ADD) && in do_rc_ack()
1076 if (wqe->wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP || in do_rc_ack()
1077 wqe->wr.opcode == IB_WR_ATOMIC_FETCH_AND_ADD) { in do_rc_ack()
1078 u64 *vaddr = wqe->sg_list[0].vaddr; in do_rc_ack()
1082 (wqe->wr.opcode == IB_WR_RDMA_READ || in do_rc_ack()
1083 wqe->wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP || in do_rc_ack()
1084 wqe->wr.opcode == IB_WR_ATOMIC_FETCH_AND_ADD)) { in do_rc_ack()
1098 wqe = do_rc_completion(qp, wqe, ibp); in do_rc_ack()
1195 rvt_send_complete(qp, wqe, status); in do_rc_ack()
1226 struct rvt_swqe *wqe; in rdma_seq_err() local
1231 wqe = rvt_get_swqe_ptr(qp, qp->s_acked); in rdma_seq_err()
1233 while (qib_cmp24(psn, wqe->lpsn) > 0) { in rdma_seq_err()
1234 if (wqe->wr.opcode == IB_WR_RDMA_READ || in rdma_seq_err()
1235 wqe->wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP || in rdma_seq_err()
1236 wqe->wr.opcode == IB_WR_ATOMIC_FETCH_AND_ADD) in rdma_seq_err()
1238 wqe = do_rc_completion(qp, wqe, ibp); in rdma_seq_err()
1275 struct rvt_swqe *wqe; in qib_rc_rcv_resp() local
1339 wqe = rvt_get_swqe_ptr(qp, qp->s_acked); in qib_rc_rcv_resp()
1355 wqe = rvt_get_swqe_ptr(qp, qp->s_acked); in qib_rc_rcv_resp()
1356 if (unlikely(wqe->wr.opcode != IB_WR_RDMA_READ)) in qib_rc_rcv_resp()
1364 wqe, psn, pmtu); in qib_rc_rcv_resp()
1371 if (unlikely(wqe->wr.opcode != IB_WR_RDMA_READ)) in qib_rc_rcv_resp()
1421 wqe = rvt_get_swqe_ptr(qp, qp->s_acked); in qib_rc_rcv_resp()
1423 wqe, psn, pmtu); in qib_rc_rcv_resp()
1430 if (unlikely(wqe->wr.opcode != IB_WR_RDMA_READ)) in qib_rc_rcv_resp()
1466 rvt_send_complete(qp, wqe, status); in qib_rc_rcv_resp()