Release 4.12 include/linux/migrate.h
#ifndef _LINUX_MIGRATE_H
#define _LINUX_MIGRATE_H
#include <linux/mm.h>
#include <linux/mempolicy.h>
#include <linux/migrate_mode.h>
typedef struct page *new_page_t(struct page *page, unsigned long private,
int **reason);
typedef void free_page_t(struct page *page, unsigned long private);
/*
* Return values from addresss_space_operations.migratepage():
* - negative errno on page migration failure;
* - zero on page migration success;
*/
#define MIGRATEPAGE_SUCCESS 0
enum migrate_reason {
MR_COMPACTION,
MR_MEMORY_FAILURE,
MR_MEMORY_HOTPLUG,
MR_SYSCALL, /* also applies to cpusets */
MR_MEMPOLICY_MBIND,
MR_NUMA_MISPLACED,
MR_CMA,
MR_TYPES
};
/* In mm/debug.c; also keep sync with include/trace/events/migrate.h */
extern char *migrate_reason_names[MR_TYPES];
#ifdef CONFIG_MIGRATION
extern void putback_movable_pages(struct list_head *l);
extern int migrate_page(struct address_space *mapping,
struct page *newpage, struct page *page,
enum migrate_mode mode);
extern int migrate_pages(struct list_head *l, new_page_t new, free_page_t free,
unsigned long private, enum migrate_mode mode, int reason);
extern int isolate_movable_page(struct page *page, isolate_mode_t mode);
extern void putback_movable_page(struct page *page);
extern int migrate_prep(void);
extern int migrate_prep_local(void);
extern void migrate_page_copy(struct page *newpage, struct page *page);
extern int migrate_huge_page_move_mapping(struct address_space *mapping,
struct page *newpage, struct page *page);
extern int migrate_page_move_mapping(struct address_space *mapping,
struct page *newpage, struct page *page,
struct buffer_head *head, enum migrate_mode mode,
int extra_count);
#else
static inline void putback_movable_pages(struct list_head *l) {}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Rafael Aquini | 11 | 100.00% | 1 | 100.00% |
Total | 11 | 100.00% | 1 | 100.00% |
static inline int migrate_pages(struct list_head *l, new_page_t new,
free_page_t free, unsigned long private, enum migrate_mode mode,
int reason)
{ return -ENOSYS; }
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Christoph Lameter | 19 | 57.58% | 3 | 42.86% |
Mel Gorman | 6 | 18.18% | 2 | 28.57% |
David Rientjes | 4 | 12.12% | 1 | 14.29% |
Naoya Horiguchi | 4 | 12.12% | 1 | 14.29% |
Total | 33 | 100.00% | 7 | 100.00% |
static inline int isolate_movable_page(struct page *page, isolate_mode_t mode)
{ return -EBUSY; }
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Yisheng Xie | 19 | 100.00% | 1 | 100.00% |
Total | 19 | 100.00% | 1 | 100.00% |
static inline int migrate_prep(void) { return -ENOSYS; }
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Christoph Lameter | 12 | 92.31% | 1 | 50.00% |
Mel Gorman | 1 | 7.69% | 1 | 50.00% |
Total | 13 | 100.00% | 2 | 100.00% |
static inline int migrate_prep_local(void) { return -ENOSYS; }
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Mel Gorman | 12 | 92.31% | 1 | 50.00% |
Christoph Lameter | 1 | 7.69% | 1 | 50.00% |
Total | 13 | 100.00% | 2 | 100.00% |
static inline void migrate_page_copy(struct page *newpage,
struct page *page) {}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Naoya Horiguchi | 16 | 100.00% | 1 | 100.00% |
Total | 16 | 100.00% | 1 | 100.00% |
static inline int migrate_huge_page_move_mapping(struct address_space *mapping,
struct page *newpage, struct page *page)
{
return -ENOSYS;
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Naoya Horiguchi | 26 | 100.00% | 2 | 100.00% |
Total | 26 | 100.00% | 2 | 100.00% |
#endif /* CONFIG_MIGRATION */
#ifdef CONFIG_COMPACTION
extern int PageMovable(struct page *page);
extern void __SetPageMovable(struct page *page, struct address_space *mapping);
extern void __ClearPageMovable(struct page *page);
#else
static inline int PageMovable(struct page *page) { return 0; }Contributors
Person | Tokens | Prop | Commits | CommitProp |
MinChan Kim | 15 | 100.00% | 1 | 100.00% |
Total | 15 | 100.00% | 1 | 100.00% |
;
static inline void __SetPageMovable(struct page *page,
struct address_space *mapping)
{
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
MinChan Kim | 16 | 100.00% | 1 | 100.00% |
Total | 16 | 100.00% | 1 | 100.00% |
static inline void __ClearPageMovable(struct page *page)
{
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
MinChan Kim | 11 | 100.00% | 1 | 100.00% |
Total | 11 | 100.00% | 1 | 100.00% |
#endif
#ifdef CONFIG_NUMA_BALANCING
extern bool pmd_trans_migrating(pmd_t pmd);
extern int migrate_misplaced_page(struct page *page,
struct vm_area_struct *vma, int node);
#else
static inline bool pmd_trans_migrating(pmd_t pmd)
{
return false;
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Mel Gorman | 13 | 100.00% | 1 | 100.00% |
Total | 13 | 100.00% | 1 | 100.00% |
static inline int migrate_misplaced_page(struct page *page,
struct vm_area_struct *vma, int node)
{
return -EAGAIN; /* can't migrate now */
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Peter Zijlstra | 20 | 80.00% | 1 | 50.00% |
Mel Gorman | 5 | 20.00% | 1 | 50.00% |
Total | 25 | 100.00% | 2 | 100.00% |
#endif /* CONFIG_NUMA_BALANCING */
#if defined(CONFIG_NUMA_BALANCING) && defined(CONFIG_TRANSPARENT_HUGEPAGE)
extern int migrate_misplaced_transhuge_page(struct mm_struct *mm,
struct vm_area_struct *vma,
pmd_t *pmd, pmd_t entry,
unsigned long address,
struct page *page, int node);
#else
static inline int migrate_misplaced_transhuge_page(struct mm_struct *mm,
struct vm_area_struct *vma,
pmd_t *pmd, pmd_t entry,
unsigned long address,
struct page *page, int node)
{
return -EAGAIN;
}
Contributors
Person | Tokens | Prop | Commits | CommitProp |
Mel Gorman | 40 | 100.00% | 1 | 100.00% |
Total | 40 | 100.00% | 1 | 100.00% |
#endif /* CONFIG_NUMA_BALANCING && CONFIG_TRANSPARENT_HUGEPAGE*/
#endif /* _LINUX_MIGRATE_H */
Overall Contributors
Person | Tokens | Prop | Commits | CommitProp |
Mel Gorman | 177 | 27.70% | 9 | 26.47% |
Christoph Lameter | 115 | 18.00% | 7 | 20.59% |
MinChan Kim | 108 | 16.90% | 3 | 8.82% |
Naoya Horiguchi | 80 | 12.52% | 2 | 5.88% |
Peter Zijlstra | 38 | 5.95% | 1 | 2.94% |
Gu Zheng | 29 | 4.54% | 1 | 2.94% |
David Rientjes | 24 | 3.76% | 1 | 2.94% |
Rafael Aquini | 24 | 3.76% | 2 | 5.88% |
Yisheng Xie | 20 | 3.13% | 2 | 5.88% |
Vlastimil Babka | 11 | 1.72% | 1 | 2.94% |
Pushkar Jambhlekar | 4 | 0.63% | 1 | 2.94% |
Benjamin LaHaise | 3 | 0.47% | 1 | 2.94% |
Andrew Morton | 3 | 0.47% | 1 | 2.94% |
Hugh Dickins | 2 | 0.31% | 1 | 2.94% |
Konstantin Khlebnikov | 1 | 0.16% | 1 | 2.94% |
Total | 639 | 100.00% | 34 | 100.00% |
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.