Lines Matching refs:vstart
1670 static unsigned long __ref kernel_map_hugepud(unsigned long vstart, in kernel_map_hugepud() argument
1675 u64 pte_val = vstart; in kernel_map_hugepud()
1678 if ((vstart & mask16gb) || in kernel_map_hugepud()
1679 (vend - vstart <= mask16gb)) { in kernel_map_hugepud()
1683 return vstart + PUD_SIZE; in kernel_map_hugepud()
1689 vend = vstart + mask16gb + 1UL; in kernel_map_hugepud()
1690 while (vstart < vend) { in kernel_map_hugepud()
1694 vstart += PUD_SIZE; in kernel_map_hugepud()
1697 return vstart; in kernel_map_hugepud()
1700 static bool kernel_can_map_hugepud(unsigned long vstart, unsigned long vend, in kernel_can_map_hugepud() argument
1703 if (guard && !(vstart & ~PUD_MASK) && (vend - vstart) >= PUD_SIZE) in kernel_can_map_hugepud()
1709 static unsigned long __ref kernel_map_hugepmd(unsigned long vstart, in kernel_map_hugepmd() argument
1715 u64 pte_val = vstart; in kernel_map_hugepmd()
1718 if ((vstart & mask256mb) || in kernel_map_hugepmd()
1719 (vend - vstart <= mask256mb)) { in kernel_map_hugepmd()
1723 return vstart + PMD_SIZE; in kernel_map_hugepmd()
1726 if ((vstart & mask2gb) || in kernel_map_hugepmd()
1727 (vend - vstart <= mask2gb)) { in kernel_map_hugepmd()
1730 vend = vstart + mask256mb + 1UL; in kernel_map_hugepmd()
1734 vend = vstart + mask2gb + 1UL; in kernel_map_hugepmd()
1737 while (vstart < vend) { in kernel_map_hugepmd()
1741 vstart += PMD_SIZE; in kernel_map_hugepmd()
1745 return vstart; in kernel_map_hugepmd()
1748 static bool kernel_can_map_hugepmd(unsigned long vstart, unsigned long vend, in kernel_can_map_hugepmd() argument
1751 if (guard && !(vstart & ~PMD_MASK) && (vend - vstart) >= PMD_SIZE) in kernel_can_map_hugepmd()
1761 unsigned long vstart = PAGE_OFFSET + pstart; in kernel_map_range() local
1765 if ((vstart & ~PAGE_MASK) || (vend & ~PAGE_MASK)) { in kernel_map_range()
1767 vstart, vend); in kernel_map_range()
1771 while (vstart < vend) { in kernel_map_range()
1772 unsigned long this_end, paddr = __pa(vstart); in kernel_map_range()
1773 pgd_t *pgd = pgd_offset_k(vstart); in kernel_map_range()
1790 p4d = p4d_offset(pgd, vstart); in kernel_map_range()
1802 pud = pud_offset(p4d, vstart); in kernel_map_range()
1806 if (kernel_can_map_hugepud(vstart, vend, use_huge)) { in kernel_map_range()
1807 vstart = kernel_map_hugepud(vstart, vend, pud); in kernel_map_range()
1818 pmd = pmd_offset(pud, vstart); in kernel_map_range()
1822 if (kernel_can_map_hugepmd(vstart, vend, use_huge)) { in kernel_map_range()
1823 vstart = kernel_map_hugepmd(vstart, vend, pmd); in kernel_map_range()
1834 pte = pte_offset_kernel(pmd, vstart); in kernel_map_range()
1835 this_end = (vstart + PMD_SIZE) & PMD_MASK; in kernel_map_range()
1839 while (vstart < this_end) { in kernel_map_range()
1842 vstart += PAGE_SIZE; in kernel_map_range()
2581 int __meminit vmemmap_populate(unsigned long vstart, unsigned long vend, in vmemmap_populate() argument
2595 vstart = vstart & PMD_MASK; in vmemmap_populate()
2597 for (; vstart < vend; vstart += PMD_SIZE) { in vmemmap_populate()
2598 pgd_t *pgd = vmemmap_pgd_populate(vstart, node); in vmemmap_populate()
2607 p4d = vmemmap_p4d_populate(pgd, vstart, node); in vmemmap_populate()
2611 pud = vmemmap_pud_populate(p4d, vstart, node); in vmemmap_populate()
2615 pmd = pmd_offset(pud, vstart); in vmemmap_populate()