PMD_SIZE           50 arch/x86/kernel/efi_64.c 	end = (end + PMD_SIZE - 1) & PMD_MASK;
PMD_SIZE           38 arch/x86/kernel/machine_kexec_64.c 		addr += PMD_SIZE;
PMD_SIZE          920 arch/x86/mm/fault.c 	     address += PMD_SIZE) {
PMD_SIZE          140 arch/x86/mm/hugetlbpage.c 			BUG_ON(sz != PMD_SIZE);
PMD_SIZE          432 arch/x86/mm/hugetlbpage.c 	if (ps == PMD_SIZE) {
PMD_SIZE          171 arch/x86/mm/init_32.c 			vaddr += PMD_SIZE;
PMD_SIZE          509 arch/x86/mm/init_32.c 	end = (FIXADDR_TOP + PMD_SIZE - 1) & PMD_MASK;
PMD_SIZE          785 arch/x86/mm/init_32.c 	pmds = (end + PMD_SIZE - 1) >> PMD_SHIFT;
PMD_SIZE          792 arch/x86/mm/init_32.c 		extra += PMD_SIZE;
PMD_SIZE          867 arch/x86/mm/init_32.c 	big_page_start = PMD_SIZE;
PMD_SIZE          875 arch/x86/mm/init_32.c 		end_pfn = ((start + (PMD_SIZE - 1))>>PMD_SHIFT)
PMD_SIZE          882 arch/x86/mm/init_32.c 	start_pfn = ((start + (PMD_SIZE - 1))>>PMD_SHIFT)
PMD_SIZE          237 arch/x86/mm/init_64.c 	for (; size; phys += PMD_SIZE, size -= PMD_SIZE) {
PMD_SIZE          282 arch/x86/mm/init_64.c 	unsigned long end = roundup((unsigned long)_end, PMD_SIZE) - 1;
PMD_SIZE          286 arch/x86/mm/init_64.c 	for (; pmd < last_pmd; pmd++, vaddr += PMD_SIZE) {
PMD_SIZE          387 arch/x86/mm/init_64.c 	for (; i < PTRS_PER_PMD; i++, address += PMD_SIZE) {
PMD_SIZE          433 arch/x86/mm/init_64.c 			last_map_addr = (address & PMD_MASK) + PMD_SIZE;
PMD_SIZE          554 arch/x86/mm/init_64.c 		pmds = (extra + PMD_SIZE - 1) >> PMD_SHIFT;
PMD_SIZE          556 arch/x86/mm/init_64.c 		pmds = (end + PMD_SIZE - 1) >> PMD_SHIFT;
PMD_SIZE          707 arch/x86/mm/init_64.c 	end_pfn = ((start + (PMD_SIZE - 1)) >> PMD_SHIFT)
PMD_SIZE          712 arch/x86/mm/init_64.c 	start_pfn = ((start + (PMD_SIZE - 1))>>PMD_SHIFT)
PMD_SIZE         1184 arch/x86/mm/init_64.c 				p = vmemmap_alloc_block(PMD_SIZE, node);
PMD_SIZE         1202 arch/x86/mm/init_64.c 				addr_end = addr + PMD_SIZE;
PMD_SIZE         1203 arch/x86/mm/init_64.c 				p_end = p + PMD_SIZE;
PMD_SIZE           99 arch/x86/mm/pageattr.c 	return __pa(roundup((unsigned long)_end, PMD_SIZE)) >> PAGE_SHIFT;
PMD_SIZE           74 arch/x86/mm/pgtable_32.c 	if (vaddr & (PMD_SIZE-1)) {		/* vaddr is misaligned */
PMD_SIZE           58 arch/x86/power/hibernate_64.c 		for (j = 0; j < PTRS_PER_PMD; pmd++, j++, paddr += PMD_SIZE) {
PMD_SIZE           87 arch/x86/vdso/vma.c 	end = (start + PMD_SIZE - 1) & PMD_MASK;
PMD_SIZE         1534 arch/x86/xen/enlighten.c 	l3 = m2v(pgd[pgd_index(__START_KERNEL_map + PMD_SIZE)].pgd);
PMD_SIZE         1535 arch/x86/xen/enlighten.c 	l2 = m2v(l3[pud_index(__START_KERNEL_map + PMD_SIZE)].pud);
PMD_SIZE          140 include/asm-frv/pgtable.h #define PMD_MASK		(~(PMD_SIZE - 1))
PMD_SIZE           22 include/asm-generic/pgtable-nopmd.h #define PMD_MASK  	(~(PMD_SIZE-1))
PMD_SIZE          152 include/asm-generic/pgtable.h ({	unsigned long __boundary = ((addr) + PMD_SIZE) & PMD_MASK;	\
PMD_SIZE           51 include/asm-m32r/pgtable.h #define PMD_MASK	(~(PMD_SIZE - 1))
PMD_SIZE           38 include/asm-m68k/pgtable.h #define PMD_MASK	(~(PMD_SIZE-1))
PMD_SIZE           84 include/asm-parisc/pgtable.h #define PMD_MASK	(~(PMD_SIZE-1))
PMD_SIZE           28 include/asm-um/pgtable-3level.h #define PMD_MASK	(~(PMD_SIZE-1))
PMD_SIZE           44 include/asm-x86/pgtable_32.h # define PMD_MASK	(~(PMD_SIZE - 1))
PMD_SIZE          142 include/asm-x86/pgtable_64.h #define PMD_MASK	(~(PMD_SIZE - 1))
PMD_SIZE          253 mm/memory.c    		addr += PMD_SIZE;
PMD_SIZE          263 mm/memory.c    		end -= PMD_SIZE;
PMD_SIZE          297 mm/memory.c    			while (next && next->vm_start <= vma->vm_end + PMD_SIZE
PMD_SIZE         2088 mm/mmap.c      	BUG_ON(mm->nr_ptes > (FIRST_USER_ADDRESS+PMD_SIZE-1)>>PMD_SHIFT);
PMD_SIZE          141 mm/mremap.c    		next = (old_addr + PMD_SIZE) & PMD_MASK;
PMD_SIZE          151 mm/mremap.c    		next = (new_addr + PMD_SIZE) & PMD_MASK;
PMD_SIZE          806 mm/rmap.c      #define CLUSTER_SIZE	min(32*PAGE_SIZE, PMD_SIZE)