Lines Matching refs:psn
130 struct rvt_qp *qp, u32 psn, int diff, bool fecn);
348 qpriv->flow_state.psn = 0; in hfi1_qp_priv_init()
801 fs->psn = 0; in hfi1_kern_setup_hw_flow()
828 fs->psn = 0; in hfi1_kern_clear_hw_flow()
1515 flow->flow_state.spsn = qpriv->flow_state.psn; in hfi1_kern_exp_rcv_setup()
1519 qpriv->flow_state.psn += flow->npkts; in hfi1_kern_exp_rcv_setup()
1675 u32 psn, u16 *fidx) in find_flow_ib() argument
1685 if (cmp_psn(psn, flow->flow_state.ib_spsn) >= 0 && in find_flow_ib()
1686 cmp_psn(psn, flow->flow_state.ib_lpsn) <= 0) { in find_flow_ib()
1793 trace_hfi1_tid_req_build_read_req(qp, 0, wqe->wr.opcode, wqe->psn, in hfi1_build_tid_rdma_read_req()
1832 if ((qpriv->flow_state.psn + npkts) > MAX_TID_FLOW_PSN - 1) { in hfi1_build_tid_rdma_read_req()
1886 u32 bth0, u32 psn, u64 vaddr, u32 len) in tid_rdma_rcv_read_request() argument
1944 flow->flow_state.ib_spsn = psn; in tid_rdma_rcv_read_request()
1958 e->psn = psn; in tid_rdma_rcv_read_request()
1959 e->lpsn = psn + flow->npkts - 1; in tid_rdma_rcv_read_request()
1971 req->r_flow_psn = e->psn; in tid_rdma_rcv_read_request()
1973 trace_hfi1_tid_req_rcv_read_req(qp, 0, e->opcode, e->psn, e->lpsn, in tid_rdma_rcv_read_request()
1980 struct rvt_qp *qp, u32 psn, int diff) in tid_rdma_rcv_error() argument
1992 trace_hfi1_rsp_tid_rcv_error(qp, psn); in tid_rdma_rcv_error()
1993 trace_hfi1_tid_rdma_rcv_err(qp, 0, psn, diff); in tid_rdma_rcv_error()
2008 e = find_prev_entry(qp, psn, &prev, NULL, &old_req); in tid_rdma_rcv_error()
2014 req->r_flow_psn = psn; in tid_rdma_rcv_error()
2015 trace_hfi1_tid_req_rcv_err(qp, 0, e->opcode, e->psn, e->lpsn, req); in tid_rdma_rcv_error()
2030 if (psn != e->psn || len != req->total_len) in tid_rdma_rcv_error()
2055 if (tid_rdma_rcv_read_request(qp, e, packet, ohdr, bth0, psn, in tid_rdma_rcv_error()
2126 delta_psn(psn, fstate->resp_ib_psn), in tid_rdma_rcv_error()
2129 delta_psn(psn, fstate->resp_ib_psn); in tid_rdma_rcv_error()
2139 req->cur_seg = delta_psn(psn, e->psn); in tid_rdma_rcv_error()
2155 trace_hfi1_tid_req_rcv_err(qp, 0, e->opcode, e->psn, in tid_rdma_rcv_error()
2231 u32 bth0, psn, len, rkey; in hfi1_rc_rcv_tid_rdma_read_req() local
2243 psn = mask_psn(be32_to_cpu(ohdr->bth[2])); in hfi1_rc_rcv_tid_rdma_read_req()
2244 trace_hfi1_rsp_rcv_tid_read_req(qp, psn); in hfi1_rc_rcv_tid_rdma_read_req()
2259 diff = delta_psn(psn, qp->r_psn); in hfi1_rc_rcv_tid_rdma_read_req()
2261 tid_rdma_rcv_err(packet, ohdr, qp, psn, diff, fecn); in hfi1_rc_rcv_tid_rdma_read_req()
2288 if (tid_rdma_rcv_read_request(qp, e, packet, ohdr, bth0, psn, vaddr, in hfi1_rc_rcv_tid_rdma_read_req()
2300 qp->r_psn += e->lpsn - e->psn + 1; in hfi1_rc_rcv_tid_rdma_read_req()
2408 find_tid_request(struct rvt_qp *qp, u32 psn, enum ib_wr_opcode opcode) in find_tid_request() argument
2420 if (cmp_psn(psn, wqe->psn) >= 0 && in find_tid_request()
2421 cmp_psn(psn, wqe->lpsn) <= 0) { in find_tid_request()
2522 req->e.swqe->psn, req->e.swqe->lpsn, in hfi1_rc_rcv_tid_rdma_read_resp()
2650 u8 rte, u32 psn, u32 ibpsn) in handle_read_kdeth_eflags() argument
2734 trace_hfi1_tid_req_read_kdeth_eflags(qp, 0, wqe->wr.opcode, wqe->psn, in handle_read_kdeth_eflags()
2754 diff = cmp_psn(psn, in handle_read_kdeth_eflags()
2780 if (cmp_psn(fpsn, psn) == 0) { in handle_read_kdeth_eflags()
2787 mask_psn(psn + 1); in handle_read_kdeth_eflags()
2851 u32 qp_num, psn, ibpsn; in hfi1_handle_kdeth_eflags() local
2876 psn = mask_psn(be32_to_cpu(ohdr->bth[2])); in hfi1_handle_kdeth_eflags()
2916 ret = handle_read_kdeth_eflags(rcd, packet, rcv_type, rte, psn, in hfi1_handle_kdeth_eflags()
2938 trace_hfi1_eflags_err_write(qp, rcv_type, rte, psn); in hfi1_handle_kdeth_eflags()
2939 trace_hfi1_rsp_handle_kdeth_eflags(qp, psn); in hfi1_handle_kdeth_eflags()
2941 trace_hfi1_tid_req_handle_kdeth_eflags(qp, 0, e->opcode, e->psn, in hfi1_handle_kdeth_eflags()
2966 diff = cmp_psn(psn, in hfi1_handle_kdeth_eflags()
2979 if (psn == full_flow_psn(flow, in hfi1_handle_kdeth_eflags()
2983 mask_psn(psn + 1); in hfi1_handle_kdeth_eflags()
3055 wqe->psn, wqe->lpsn, in hfi1_tid_rdma_restart_req()
3123 trace_hfi1_tid_req_restart_req(qp, 0, wqe->wr.opcode, wqe->psn, in hfi1_tid_rdma_restart_req()
3323 wqe->lpsn = wqe->psn; in setup_tid_rdma_wqe()
3345 wqe->psn, wqe->lpsn, in setup_tid_rdma_wqe()
3498 trace_hfi1_tid_req_write_alloc_res(qp, 0, e->opcode, e->psn, in hfi1_tid_write_alloc_resources()
3536 if (qpriv->flow_state.psn + npkts > MAX_TID_FLOW_PSN - 1) { in hfi1_tid_write_alloc_resources()
3590 qp->r_psn = e->psn + req->alloc_seg; in hfi1_tid_write_alloc_resources()
3656 u32 bth0, psn, len, rkey, num_segs; in hfi1_rc_rcv_tid_rdma_write_req() local
3667 psn = mask_psn(be32_to_cpu(ohdr->bth[2])); in hfi1_rc_rcv_tid_rdma_write_req()
3668 trace_hfi1_rsp_rcv_tid_write_req(qp, psn); in hfi1_rc_rcv_tid_rdma_write_req()
3681 diff = delta_psn(psn, qp->r_psn); in hfi1_rc_rcv_tid_rdma_write_req()
3683 tid_rdma_rcv_err(packet, ohdr, qp, psn, diff, fecn); in hfi1_rc_rcv_tid_rdma_write_req()
3741 e->psn = psn; in hfi1_rc_rcv_tid_rdma_write_req()
3755 req->r_flow_psn = e->psn; in hfi1_rc_rcv_tid_rdma_write_req()
3773 trace_hfi1_tid_req_rcv_write_req(qp, 0, e->opcode, e->psn, e->lpsn, in hfi1_rc_rcv_tid_rdma_write_req()
3836 trace_hfi1_tid_req_build_write_resp(qp, 0, e->opcode, e->psn, e->lpsn, in hfi1_build_tid_rdma_write_resp()
4036 u32 opcode, aeth, psn, flow_psn, i, tidlen = 0, pktlen; in hfi1_rc_rcv_tid_rdma_write_resp() local
4041 psn = mask_psn(be32_to_cpu(ohdr->bth[2])); in hfi1_rc_rcv_tid_rdma_write_resp()
4048 if (cmp_psn(psn, qp->s_next_psn) >= 0) in hfi1_rc_rcv_tid_rdma_write_resp()
4052 if (unlikely(cmp_psn(psn, qp->s_last_psn) <= 0)) in hfi1_rc_rcv_tid_rdma_write_resp()
4064 if (cmp_psn(psn, qp->s_last_psn + 1) != 0) in hfi1_rc_rcv_tid_rdma_write_resp()
4089 if (!do_rc_ack(qp, aeth, psn, opcode, 0, rcd)) in hfi1_rc_rcv_tid_rdma_write_resp()
4092 trace_hfi1_ack(qp, psn); in hfi1_rc_rcv_tid_rdma_write_resp()
4106 flow->flow_state.resp_ib_psn = psn; in hfi1_rc_rcv_tid_rdma_write_resp()
4143 trace_hfi1_tid_req_rcv_write_resp(qp, 0, wqe->wr.opcode, wqe->psn, in hfi1_rc_rcv_tid_rdma_write_resp()
4149 if (!cmp_psn(psn, wqe->psn)) { in hfi1_rc_rcv_tid_rdma_write_resp()
4150 req->r_last_acked = mask_psn(wqe->psn - 1); in hfi1_rc_rcv_tid_rdma_write_resp()
4268 u32 psn, next; in hfi1_rc_rcv_tid_rdma_write_data() local
4273 psn = mask_psn(be32_to_cpu(ohdr->bth[2])); in hfi1_rc_rcv_tid_rdma_write_data()
4284 if (cmp_psn(psn, full_flow_psn(flow, flow->flow_state.lpsn))) { in hfi1_rc_rcv_tid_rdma_write_data()
4287 if (cmp_psn(psn, flow->flow_state.r_next_psn)) in hfi1_rc_rcv_tid_rdma_write_data()
4290 flow->flow_state.r_next_psn = mask_psn(psn + 1); in hfi1_rc_rcv_tid_rdma_write_data()
4311 len += delta_psn(psn, in hfi1_rc_rcv_tid_rdma_write_data()
4329 priv->r_next_psn_kdeth = mask_psn(psn + 1); in hfi1_rc_rcv_tid_rdma_write_data()
4334 flow->flow_state.r_next_psn = mask_psn(psn + 1); in hfi1_rc_rcv_tid_rdma_write_data()
4337 rcd->flows[flow->idx].psn = psn & HFI1_KDETH_BTH_SEQ_MASK; in hfi1_rc_rcv_tid_rdma_write_data()
4348 trace_hfi1_rsp_rcv_tid_write_data(qp, psn); in hfi1_rc_rcv_tid_rdma_write_data()
4349 trace_hfi1_tid_req_rcv_write_data(qp, 0, e->opcode, e->psn, e->lpsn, in hfi1_rc_rcv_tid_rdma_write_data()
4408 static bool hfi1_tid_rdma_is_resync_psn(u32 psn) in hfi1_tid_rdma_is_resync_psn() argument
4410 return (bool)((psn & HFI1_KDETH_BTH_SEQ_MASK) == in hfi1_tid_rdma_is_resync_psn()
4493 u32 aeth, psn, req_psn, ack_psn, flpsn, resync_psn, ack_kpsn; in hfi1_rc_rcv_tid_rdma_ack() local
4499 psn = mask_psn(be32_to_cpu(ohdr->bth[2])); in hfi1_rc_rcv_tid_rdma_ack()
4505 trace_hfi1_rcv_tid_ack(qp, aeth, psn, req_psn, resync_psn); in hfi1_rc_rcv_tid_rdma_ack()
4509 cmp_psn(psn, qpriv->s_resync_psn)) in hfi1_rc_rcv_tid_rdma_ack()
4513 if (hfi1_tid_rdma_is_resync_psn(psn)) in hfi1_rc_rcv_tid_rdma_ack()
4516 ack_kpsn = psn; in hfi1_rc_rcv_tid_rdma_ack()
4531 trace_hfi1_tid_req_rcv_tid_ack(qp, 0, wqe->wr.opcode, wqe->psn, in hfi1_rc_rcv_tid_rdma_ack()
4537 if (cmp_psn(psn, full_flow_psn(flow, flow->flow_state.spsn)) < 0 || in hfi1_rc_rcv_tid_rdma_ack()
4548 trace_hfi1_tid_req_rcv_tid_ack(qp, 0, wqe->wr.opcode, wqe->psn, in hfi1_rc_rcv_tid_rdma_ack()
4567 trace_hfi1_tid_req_rcv_tid_ack(qp, 0, wqe->wr.opcode, wqe->psn, in hfi1_rc_rcv_tid_rdma_ack()
4573 if (!hfi1_tid_rdma_is_resync_psn(psn)) { in hfi1_rc_rcv_tid_rdma_ack()
4614 psn = mask_psn(psn + 1); in hfi1_rc_rcv_tid_rdma_ack()
4615 generation = psn >> HFI1_KDETH_BTH_SEQ_SHIFT; in hfi1_rc_rcv_tid_rdma_ack()
4636 req->r_ack_psn = psn; in hfi1_rc_rcv_tid_rdma_ack()
4713 if (cmp_psn(psn, flpsn) > 0) in hfi1_rc_rcv_tid_rdma_ack()
4812 qp, 0, wqe->wr.opcode, wqe->psn, wqe->lpsn, req); in hfi1_tid_retry_timeout()
4868 u32 psn, generation, idx, gen_next; in hfi1_rc_rcv_tid_rdma_resync() local
4873 psn = mask_psn(be32_to_cpu(ohdr->bth[2])); in hfi1_rc_rcv_tid_rdma_resync()
4875 generation = mask_psn(psn + 1) >> HFI1_KDETH_BTH_SEQ_SHIFT; in hfi1_rc_rcv_tid_rdma_resync()
4903 fs->psn = 0; in hfi1_rc_rcv_tid_rdma_resync()
4924 trace_hfi1_tid_req_rcv_resync(qp, 0, e->opcode, e->psn, in hfi1_rc_rcv_tid_rdma_resync()
4941 flow->flow_state.spsn = fs->psn; in hfi1_rc_rcv_tid_rdma_resync()
4947 fs->psn += flow->npkts; in hfi1_rc_rcv_tid_rdma_resync()
5058 trace_hfi1_tid_req_make_tid_pkt(qp, 0, wqe->wr.opcode, wqe->psn, in hfi1_make_tid_rdma_pkt()
5097 wqe->psn, wqe->lpsn, req); in hfi1_make_tid_rdma_pkt()
5219 trace_hfi1_rsp_make_tid_ack(qp, e->psn); in make_tid_rdma_ack()
5220 trace_hfi1_tid_req_make_tid_ack(qp, 0, e->opcode, e->psn, e->lpsn, in make_tid_rdma_ack()
5294 trace_hfi1_tid_req_make_tid_ack(qp, 0, e->opcode, e->psn, e->lpsn, in make_tid_rdma_ack()
5491 struct rvt_qp *qp, u32 psn, int diff, bool fecn) in tid_rdma_rcv_err() argument
5495 tid_rdma_rcv_error(packet, ohdr, qp, psn, diff); in tid_rdma_rcv_err()