Contributors: 7
Author |
Tokens |
Token Proportion |
Commits |
Commit Proportion |
Qi Zheng |
311 |
92.28% |
1 |
10.00% |
Andrew Morton |
11 |
3.26% |
3 |
30.00% |
Lorenzo Stoakes |
6 |
1.78% |
1 |
10.00% |
Jan Beulich |
3 |
0.89% |
1 |
10.00% |
Hugh Dickins |
3 |
0.89% |
2 |
20.00% |
Richard Henderson |
2 |
0.59% |
1 |
10.00% |
Greg Kroah-Hartman |
1 |
0.30% |
1 |
10.00% |
Total |
337 |
|
10 |
|
// SPDX-License-Identifier: GPL-2.0
#include <linux/hugetlb.h>
#include <asm-generic/tlb.h>
#include <asm/pgalloc.h>
#include "internal.h"
bool reclaim_pt_is_enabled(unsigned long start, unsigned long end,
struct zap_details *details)
{
return details && details->reclaim_pt && (end - start >= PMD_SIZE);
}
bool try_get_and_clear_pmd(struct mm_struct *mm, pmd_t *pmd, pmd_t *pmdval)
{
spinlock_t *pml = pmd_lockptr(mm, pmd);
if (!spin_trylock(pml))
return false;
*pmdval = pmdp_get_lockless(pmd);
pmd_clear(pmd);
spin_unlock(pml);
return true;
}
void free_pte(struct mm_struct *mm, unsigned long addr, struct mmu_gather *tlb,
pmd_t pmdval)
{
pte_free_tlb(tlb, pmd_pgtable(pmdval), addr);
mm_dec_nr_ptes(mm);
}
void try_to_free_pte(struct mm_struct *mm, pmd_t *pmd, unsigned long addr,
struct mmu_gather *tlb)
{
pmd_t pmdval;
spinlock_t *pml, *ptl = NULL;
pte_t *start_pte, *pte;
int i;
pml = pmd_lock(mm, pmd);
start_pte = pte_offset_map_rw_nolock(mm, pmd, addr, &pmdval, &ptl);
if (!start_pte)
goto out_ptl;
if (ptl != pml)
spin_lock_nested(ptl, SINGLE_DEPTH_NESTING);
/* Check if it is empty PTE page */
for (i = 0, pte = start_pte; i < PTRS_PER_PTE; i++, pte++) {
if (!pte_none(ptep_get(pte)))
goto out_ptl;
}
pte_unmap(start_pte);
pmd_clear(pmd);
if (ptl != pml)
spin_unlock(ptl);
spin_unlock(pml);
free_pte(mm, addr, tlb, pmdval);
return;
out_ptl:
if (start_pte)
pte_unmap_unlock(start_pte, ptl);
if (ptl != pml)
spin_unlock(pml);
}