Lines Matching refs:ppa
186 luns[lunid].ppa = 0; in nvm_create_tgt_dev()
677 struct ppa_addr ppa; in nvm_set_rqd_ppalist() local
698 ppa = ppas[i]; in nvm_set_rqd_ppalist()
699 ppa.g.pl = pl_idx; in nvm_set_rqd_ppalist()
700 rqd->ppa_list[(pl_idx * nr_ppas) + i] = ppa; in nvm_set_rqd_ppalist()
824 static int nvm_bb_chunk_sense(struct nvm_dev *dev, struct ppa_addr ppa) in nvm_bb_chunk_sense() argument
844 rqd.ppa_addr = generic_to_dev_addr(dev, ppa); in nvm_bb_chunk_sense()
859 static int nvm_bb_chunk_scan(struct nvm_dev *dev, struct ppa_addr ppa, in nvm_bb_chunk_scan() argument
866 ret = nvm_bb_chunk_sense(dev, ppa); in nvm_bb_chunk_scan()
891 ppa.g.pg = geo->num_pg - 1; in nvm_bb_chunk_scan()
892 ppa.g.pl = geo->num_pln - 1; in nvm_bb_chunk_scan()
894 ret = nvm_bb_chunk_sense(dev, ppa); in nvm_bb_chunk_scan()
922 ppa.g.pg = pg; in nvm_bb_chunk_scan()
923 ppa.g.pl = pl; in nvm_bb_chunk_scan()
925 ret = nvm_bb_chunk_sense(dev, ppa); in nvm_bb_chunk_scan()
956 static int nvm_bb_to_chunk(struct nvm_dev *dev, struct ppa_addr ppa, in nvm_bb_to_chunk() argument
974 ppa.g.blk = blk; in nvm_bb_to_chunk()
979 meta->slba = generic_to_dev_addr(dev, ppa).ppa; in nvm_bb_to_chunk()
983 ret = nvm_bb_chunk_scan(dev, ppa, meta); in nvm_bb_to_chunk()
1000 struct ppa_addr ppa; in nvm_get_bb_meta() local
1005 ppa.ppa = slba; in nvm_get_bb_meta()
1006 ppa = dev_to_generic_addr(dev, ppa); in nvm_get_bb_meta()
1008 if (ppa.g.blk != 0) in nvm_get_bb_meta()
1020 for (ch = ppa.g.ch; ch < geo->num_ch; ch++) { in nvm_get_bb_meta()
1021 for (lun = ppa.g.lun; lun < geo->num_lun; lun++) { in nvm_get_bb_meta()
1027 ppa_gen.ppa = 0; in nvm_get_bb_meta()
1050 int nvm_get_chunk_meta(struct nvm_tgt_dev *tgt_dev, struct ppa_addr ppa, in nvm_get_chunk_meta() argument
1055 nvm_ppa_tgt_to_dev(tgt_dev, &ppa, 1); in nvm_get_chunk_meta()
1058 return nvm_get_bb_meta(dev, (sector_t)ppa.ppa, nchks, meta); in nvm_get_chunk_meta()
1060 return dev->ops->get_chk_meta(dev, (sector_t)ppa.ppa, nchks, meta); in nvm_get_chunk_meta()