Lines Matching refs:ndlp

57 lpfc_get_nvme_buf(struct lpfc_hba *phba, struct lpfc_nodelist *ndlp,
359 struct lpfc_nodelist *ndlp; in lpfc_nvme_remoteport_delete() local
361 ndlp = rport->ndlp; in lpfc_nvme_remoteport_delete()
362 if (!ndlp) in lpfc_nvme_remoteport_delete()
365 vport = ndlp->vport; in lpfc_nvme_remoteport_delete()
381 if (ndlp->upcall_flags & NLP_WAIT_FOR_UNREG) { in lpfc_nvme_remoteport_delete()
382 ndlp->nrport = NULL; in lpfc_nvme_remoteport_delete()
383 ndlp->upcall_flags &= ~NLP_WAIT_FOR_UNREG; in lpfc_nvme_remoteport_delete()
389 lpfc_nlp_put(ndlp); in lpfc_nvme_remoteport_delete()
426 vport = axchg->ndlp->vport; in lpfc_nvme_handle_lsreq()
427 lpfc_rport = axchg->ndlp->nrport; in lpfc_nvme_handle_lsreq()
475 struct lpfc_nodelist *ndlp; in __lpfc_nvme_ls_req_cmp() local
479 ndlp = (struct lpfc_nodelist *)cmdwqe->context1; in __lpfc_nvme_ls_req_cmp()
486 pnvme_lsreq, ndlp ? ndlp->nlp_DID : 0, in __lpfc_nvme_ls_req_cmp()
489 cmdwqe, pnvme_lsreq, cmdwqe->context3, ndlp); in __lpfc_nvme_ls_req_cmp()
506 pnvme_lsreq, ndlp ? ndlp->nlp_DID : 0, in __lpfc_nvme_ls_req_cmp()
508 if (ndlp) { in __lpfc_nvme_ls_req_cmp()
509 lpfc_nlp_put(ndlp); in __lpfc_nvme_ls_req_cmp()
546 struct lpfc_nodelist *ndlp, uint32_t num_entry, in lpfc_nvme_gen_req() argument
569 genwqe->context1 = lpfc_nlp_get(ndlp); in lpfc_nvme_gen_req()
613 phba->sli4_hba.rpi_ids[ndlp->nlp_rpi]); in lpfc_nvme_gen_req()
648 genwqe->sli4_xritag, genwqe->iotag, ndlp->nlp_DID); in lpfc_nvme_gen_req()
655 ndlp->nlp_DID, genwqe->iotag, in lpfc_nvme_gen_req()
665 ndlp->nlp_DID, genwqe->sli4_xritag, in lpfc_nvme_gen_req()
687 __lpfc_nvme_ls_req(struct lpfc_vport *vport, struct lpfc_nodelist *ndlp, in __lpfc_nvme_ls_req() argument
698 if (!ndlp || !NLP_CHK_NODE_ACT(ndlp)) { in __lpfc_nvme_ls_req()
702 ndlp); in __lpfc_nvme_ls_req()
706 ntype = ndlp->nlp_type; in __lpfc_nvme_ls_req()
707 nstate = ndlp->nlp_state; in __lpfc_nvme_ls_req()
713 ndlp->nlp_DID, ntype, nstate); in __lpfc_nvme_ls_req()
737 ndlp->nlp_DID); in __lpfc_nvme_ls_req()
746 ndlp->nlp_DID); in __lpfc_nvme_ls_req()
770 ndlp->nlp_DID, pnvme_lsreq, pnvme_lsreq->rqstlen, in __lpfc_nvme_ls_req()
775 pnvme_lsreq, gen_req_cmp, ndlp, 2, in __lpfc_nvme_ls_req()
781 pnvme_lsreq, ret, ndlp->nlp_DID); in __lpfc_nvme_ls_req()
824 ret = __lpfc_nvme_ls_req(vport, rport->ndlp, pnvme_lsreq, in lpfc_nvme_ls_req()
847 __lpfc_nvme_ls_abort(struct lpfc_vport *vport, struct lpfc_nodelist *ndlp, in __lpfc_nvme_ls_abort() argument
855 if (!ndlp) { in __lpfc_nvme_ls_abort()
859 ndlp, ndlp ? ndlp->nlp_DID : 0); in __lpfc_nvme_ls_abort()
947 struct lpfc_nodelist *ndlp; in lpfc_nvme_ls_abort() local
959 ndlp = lpfc_findnode_did(vport, pnvme_rport->port_id); in lpfc_nvme_ls_abort()
961 ret = __lpfc_nvme_ls_abort(vport, ndlp, pnvme_lsreq); in lpfc_nvme_ls_abort()
1085 struct lpfc_nodelist *ndlp; in lpfc_nvme_io_cmd_wqe_cmpl() local
1139 ndlp = lpfc_ncmd->ndlp; in lpfc_nvme_io_cmd_wqe_cmpl()
1140 if (!ndlp || !NLP_CHK_NODE_ACT(ndlp)) { in lpfc_nvme_io_cmd_wqe_cmpl()
1623 struct lpfc_nodelist *ndlp; in lpfc_nvme_fcp_io_submit() local
1682 ndlp = rport->ndlp; in lpfc_nvme_fcp_io_submit()
1683 if (!ndlp || !NLP_CHK_NODE_ACT(ndlp)) { in lpfc_nvme_fcp_io_submit()
1687 rport, ndlp, pnvme_rport->port_id); in lpfc_nvme_fcp_io_submit()
1694 if ((ndlp->nlp_type & NLP_NVME_TARGET) && in lpfc_nvme_fcp_io_submit()
1695 (ndlp->nlp_state != NLP_STE_MAPPED_NODE)) { in lpfc_nvme_fcp_io_submit()
1700 ndlp->nlp_state, ndlp->nlp_type, in lpfc_nvme_fcp_io_submit()
1701 ndlp->upcall_flags); in lpfc_nvme_fcp_io_submit()
1722 if (lpfc_ndlp_check_qdepth(phba, ndlp)) { in lpfc_nvme_fcp_io_submit()
1723 if ((atomic_read(&ndlp->cmd_pending) >= ndlp->cmd_qdepth) && in lpfc_nvme_fcp_io_submit()
1728 lpfc_queue_info->index, ndlp->nlp_DID, in lpfc_nvme_fcp_io_submit()
1729 atomic_read(&ndlp->cmd_pending), in lpfc_nvme_fcp_io_submit()
1730 ndlp->cmd_qdepth); in lpfc_nvme_fcp_io_submit()
1745 lpfc_ncmd = lpfc_get_nvme_buf(phba, ndlp, idx, expedite); in lpfc_nvme_fcp_io_submit()
1751 lpfc_queue_info->index, ndlp->nlp_DID); in lpfc_nvme_fcp_io_submit()
1772 lpfc_ncmd->ndlp = ndlp; in lpfc_nvme_fcp_io_submit()
1786 lpfc_nvme_prep_io_cmd(vport, lpfc_ncmd, ndlp, cstat); in lpfc_nvme_fcp_io_submit()
1792 lpfc_queue_info->index, ndlp->nlp_DID); in lpfc_nvme_fcp_io_submit()
1800 lpfc_queue_info->index, ndlp->nlp_DID); in lpfc_nvme_fcp_io_submit()
1808 ret, vport->fc_myDID, ndlp->nlp_DID, in lpfc_nvme_fcp_io_submit()
2097 lpfc_get_nvme_buf(struct lpfc_hba *phba, struct lpfc_nodelist *ndlp, in lpfc_get_nvme_buf() argument
2133 if (lpfc_ndlp_check_qdepth(phba, ndlp)) { in lpfc_get_nvme_buf()
2134 atomic_inc(&ndlp->cmd_pending); in lpfc_get_nvme_buf()
2162 if ((lpfc_ncmd->flags & LPFC_SBUF_BUMP_QDEPTH) && lpfc_ncmd->ndlp) in lpfc_release_nvme_buf()
2163 atomic_dec(&lpfc_ncmd->ndlp->cmd_pending); in lpfc_release_nvme_buf()
2165 lpfc_ncmd->ndlp = NULL; in lpfc_release_nvme_buf()
2429 lpfc_nvme_register_port(struct lpfc_vport *vport, struct lpfc_nodelist *ndlp) in lpfc_nvme_register_port() argument
2440 struct fc_rport *srport = ndlp->rport; in lpfc_nvme_register_port()
2442 lpfc_printf_vlog(ndlp->vport, KERN_INFO, LOG_NVME_DISC, in lpfc_nvme_register_port()
2444 ndlp->nlp_DID, ndlp->nlp_type); in lpfc_nvme_register_port()
2459 rpinfo.port_id = ndlp->nlp_DID; in lpfc_nvme_register_port()
2460 if (ndlp->nlp_type & NLP_NVME_TARGET) in lpfc_nvme_register_port()
2462 if (ndlp->nlp_type & NLP_NVME_INITIATOR) in lpfc_nvme_register_port()
2465 if (ndlp->nlp_type & NLP_NVME_DISCOVERY) in lpfc_nvme_register_port()
2468 rpinfo.port_name = wwn_to_u64(ndlp->nlp_portname.u.wwn); in lpfc_nvme_register_port()
2469 rpinfo.node_name = wwn_to_u64(ndlp->nlp_nodename.u.wwn); in lpfc_nvme_register_port()
2476 oldrport = lpfc_ndlp_get_nrport(ndlp); in lpfc_nvme_register_port()
2478 prev_ndlp = oldrport->ndlp; in lpfc_nvme_register_port()
2482 lpfc_nlp_get(ndlp); in lpfc_nvme_register_port()
2495 ndlp->upcall_flags &= ~NLP_WAIT_FOR_UNREG; in lpfc_nvme_register_port()
2505 ndlp->nrport = NULL; in lpfc_nvme_register_port()
2506 ndlp->upcall_flags &= ~NLP_WAIT_FOR_UNREG; in lpfc_nvme_register_port()
2508 rport->ndlp = NULL; in lpfc_nvme_register_port()
2515 if (prev_ndlp && prev_ndlp != ndlp) { in lpfc_nvme_register_port()
2525 rport->ndlp = ndlp; in lpfc_nvme_register_port()
2527 ndlp->nrport = rport; in lpfc_nvme_register_port()
2538 ndlp, prev_ndlp); in lpfc_nvme_register_port()
2544 ret, ndlp->nlp_DID); in lpfc_nvme_register_port()
2561 lpfc_nvme_rescan_port(struct lpfc_vport *vport, struct lpfc_nodelist *ndlp) in lpfc_nvme_rescan_port() argument
2568 nrport = lpfc_ndlp_get_nrport(ndlp); in lpfc_nvme_rescan_port()
2576 ndlp->nlp_DID, ndlp->nlp_type, ndlp->nlp_state, in lpfc_nvme_rescan_port()
2584 ndlp->nlp_state == NLP_STE_MAPPED_NODE) { in lpfc_nvme_rescan_port()
2590 ndlp->nlp_DID, remoteport->port_state); in lpfc_nvme_rescan_port()
2613 lpfc_nvme_unregister_port(struct lpfc_vport *vport, struct lpfc_nodelist *ndlp) in lpfc_nvme_unregister_port() argument
2635 rport = lpfc_ndlp_get_nrport(ndlp); in lpfc_nvme_unregister_port()
2647 ndlp->nlp_type); in lpfc_nvme_unregister_port()
2653 if (ndlp->nlp_type & NLP_NVME_TARGET) { in lpfc_nvme_unregister_port()
2657 ndlp->upcall_flags |= NLP_WAIT_FOR_UNREG; in lpfc_nvme_unregister_port()
2669 lpfc_nlp_put(ndlp); in lpfc_nvme_unregister_port()
2682 vport->localport, ndlp->rport, ndlp->nlp_DID); in lpfc_nvme_unregister_port()
2702 struct lpfc_nodelist *ndlp = lpfc_ncmd->ndlp; in lpfc_sli4_nvme_xri_aborted() local
2705 if (ndlp) in lpfc_sli4_nvme_xri_aborted()
2706 lpfc_sli4_abts_err_handler(phba, ndlp, axri); in lpfc_sli4_nvme_xri_aborted()