Contributors: 7
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
David Gibson |
135 |
46.71% |
2 |
16.67% |
Aneesh Kumar K.V |
114 |
39.45% |
4 |
33.33% |
Mark Nelson |
18 |
6.23% |
1 |
8.33% |
Benjamin Herrenschmidt |
9 |
3.11% |
2 |
16.67% |
Hari Bathini |
6 |
2.08% |
1 |
8.33% |
Balbir Singh |
5 |
1.73% |
1 |
8.33% |
Thomas Gleixner |
2 |
0.69% |
1 |
8.33% |
Total |
289 |
|
12 |
|
/* SPDX-License-Identifier: GPL-2.0-or-later */
#ifndef _ASM_POWERPC_PGALLOC_64_H
#define _ASM_POWERPC_PGALLOC_64_H
/*
*/
#include <linux/slab.h>
#include <linux/cpumask.h>
#include <linux/percpu.h>
struct vmemmap_backing {
struct vmemmap_backing *list;
unsigned long phys;
unsigned long virt_addr;
};
extern struct vmemmap_backing *vmemmap_list;
#define pgd_populate(MM, PGD, PUD) pgd_set(PGD, (unsigned long)PUD)
static inline pud_t *pud_alloc_one(struct mm_struct *mm, unsigned long addr)
{
return kmem_cache_alloc(PGT_CACHE(PUD_INDEX_SIZE),
pgtable_gfp_flags(mm, GFP_KERNEL));
}
static inline void pud_free(struct mm_struct *mm, pud_t *pud)
{
kmem_cache_free(PGT_CACHE(PUD_INDEX_SIZE), pud);
}
static inline void pud_populate(struct mm_struct *mm, pud_t *pud, pmd_t *pmd)
{
pud_set(pud, (unsigned long)pmd);
}
static inline void pmd_populate_kernel(struct mm_struct *mm, pmd_t *pmd,
pte_t *pte)
{
pmd_set(pmd, (unsigned long)pte);
}
static inline void pmd_populate(struct mm_struct *mm, pmd_t *pmd,
pgtable_t pte_page)
{
pmd_set(pmd, (unsigned long)pte_page);
}
static inline pmd_t *pmd_alloc_one(struct mm_struct *mm, unsigned long addr)
{
return kmem_cache_alloc(PGT_CACHE(PMD_CACHE_INDEX),
pgtable_gfp_flags(mm, GFP_KERNEL));
}
static inline void pmd_free(struct mm_struct *mm, pmd_t *pmd)
{
kmem_cache_free(PGT_CACHE(PMD_CACHE_INDEX), pmd);
}
#define __pmd_free_tlb(tlb, pmd, addr) \
pgtable_free_tlb(tlb, pmd, PMD_CACHE_INDEX)
#define __pud_free_tlb(tlb, pud, addr) \
pgtable_free_tlb(tlb, pud, PUD_INDEX_SIZE)
#endif /* _ASM_POWERPC_PGALLOC_64_H */