Lines Matching refs:nrpages
3523 unsigned long nrpages, uint64_t dma_mask) in intel_alloc_iova() argument
3543 nrpages = __roundup_pow_of_two(nrpages); in intel_alloc_iova()
3551 iova_pfn = alloc_iova_fast(&domain->iovad, nrpages, in intel_alloc_iova()
3556 iova_pfn = alloc_iova_fast(&domain->iovad, nrpages, in intel_alloc_iova()
3560 nrpages); in intel_alloc_iova()
3649 unsigned long nrpages; in intel_unmap() local
3662 nrpages = aligned_nrpages(dev_addr, size); in intel_unmap()
3664 last_pfn = start_pfn + nrpages - 1; in intel_unmap()
3673 nrpages, !freelist, 0); in intel_unmap()
3675 free_iova_fast(&domain->iovad, iova_pfn, dma_to_mm_pfn(nrpages)); in intel_unmap()
3678 queue_iova(&domain->iovad, iova_pfn, nrpages, in intel_unmap()
3758 unsigned long nrpages = 0; in intel_unmap_sg() local
3763 nrpages += aligned_nrpages(sg_dma_address(sg), sg_dma_len(sg)); in intel_unmap_sg()
3766 intel_unmap(dev, startaddr, nrpages << VTD_PAGE_SHIFT); in intel_unmap_sg()
3768 trace_unmap_sg(dev, startaddr, nrpages << VTD_PAGE_SHIFT); in intel_unmap_sg()
3879 unsigned long nrpages; in bounce_map_single() local
3896 nrpages = aligned_nrpages(0, size); in bounce_map_single()
3898 dma_to_mm_pfn(nrpages), dma_mask); in bounce_map_single()
3940 tlb_addr >> VTD_PAGE_SHIFT, nrpages, prot); in bounce_map_single()
3953 free_iova_fast(&domain->iovad, iova_pfn, dma_to_mm_pfn(nrpages)); in bounce_map_single()