Release 4.13 arch/sparc/include/asm/hugetlb.h
#ifndef _ASM_SPARC64_HUGETLB_H
#define _ASM_SPARC64_HUGETLB_H
#include <asm/page.h>
#include <asm-generic/hugetlb.h>
void set_huge_pte_at(struct mm_struct *mm, unsigned long addr,
pte_t *ptep, pte_t pte);
pte_t huge_ptep_get_and_clear(struct mm_struct *mm, unsigned long addr,
pte_t *ptep);
static inline int is_hugepage_only_range(struct mm_struct *mm,
unsigned long addr,
unsigned long len) {
return 0;
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Sam Ravnborg | 23 | 100.00% | 1 | 100.00% |
Total | 23 | 100.00% | 1 | 100.00% |
/*
* If the arch doesn't supply something else, assume that hugepage
* size aligned regions are ok without further preparation.
*/
static inline int prepare_hugepage_range(struct file *file,
unsigned long addr, unsigned long len)
{
struct hstate *h = hstate_file(file);
if (len & ~huge_page_mask(h))
return -EINVAL;
if (addr & ~huge_page_mask(h))
return -EINVAL;
return 0;
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Sam Ravnborg | 38 | 62.30% | 1 | 33.33% |
Nitin Gupta | 18 | 29.51% | 1 | 33.33% |
Andi Kleen | 5 | 8.20% | 1 | 33.33% |
Total | 61 | 100.00% | 3 | 100.00% |
static inline void huge_ptep_clear_flush(struct vm_area_struct *vma,
unsigned long addr, pte_t *ptep)
{
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Sam Ravnborg | 19 | 100.00% | 1 | 100.00% |
Total | 19 | 100.00% | 1 | 100.00% |
static inline int huge_pte_none(pte_t pte)
{
return pte_none(pte);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Sam Ravnborg | 16 | 100.00% | 1 | 100.00% |
Total | 16 | 100.00% | 1 | 100.00% |
static inline pte_t huge_pte_wrprotect(pte_t pte)
{
return pte_wrprotect(pte);
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Sam Ravnborg | 16 | 100.00% | 1 | 100.00% |
Total | 16 | 100.00% | 1 | 100.00% |
static inline void huge_ptep_set_wrprotect(struct mm_struct *mm,
unsigned long addr, pte_t *ptep)
{
pte_t old_pte = *ptep;
set_huge_pte_at(mm, addr, ptep, pte_wrprotect(old_pte));
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Sam Ravnborg | 28 | 70.00% | 1 | 50.00% |
Dave Kleikamp | 12 | 30.00% | 1 | 50.00% |
Total | 40 | 100.00% | 2 | 100.00% |
static inline int huge_ptep_set_access_flags(struct vm_area_struct *vma,
unsigned long addr, pte_t *ptep,
pte_t pte, int dirty)
{
int changed = !pte_same(*ptep, pte);
if (changed) {
set_huge_pte_at(vma->vm_mm, addr, ptep, pte);
flush_tlb_page(vma, addr);
}
return changed;
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Sam Ravnborg | 37 | 55.22% | 1 | 50.00% |
Dave Kleikamp | 30 | 44.78% | 1 | 50.00% |
Total | 67 | 100.00% | 2 | 100.00% |
static inline pte_t huge_ptep_get(pte_t *ptep)
{
return *ptep;
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Sam Ravnborg | 15 | 100.00% | 1 | 100.00% |
Total | 15 | 100.00% | 1 | 100.00% |
static inline void arch_clear_hugepage_flags(struct page *page)
{
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Will Deacon | 11 | 100.00% | 1 | 100.00% |
Total | 11 | 100.00% | 1 | 100.00% |
void hugetlb_free_pgd_range(struct mmu_gather *tlb, unsigned long addr,
unsigned long end, unsigned long floor,
unsigned long ceiling);
#endif /* _ASM_SPARC64_HUGETLB_H */
Overall Contributors
Person | Tokens | Prop | Commits | CommitProp |
Sam Ravnborg | 242 | 69.94% | 1 | 14.29% |
Nitin Gupta | 43 | 12.43% | 2 | 28.57% |
Dave Kleikamp | 42 | 12.14% | 1 | 14.29% |
Will Deacon | 11 | 3.18% | 1 | 14.29% |
Andi Kleen | 5 | 1.45% | 1 | 14.29% |
Gerald Schaefer | 3 | 0.87% | 1 | 14.29% |
Total | 346 | 100.00% | 7 | 100.00% |
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.