/* SPDX-License-Identifier: GPL-2.0 */ #ifndef _ASM_X86_VDSO_H #define _ASM_X86_VDSO_H #include <asm/page_types.h> #include <linux/linkage.h> #include <linux/init.h> #ifndef __ASSEMBLER__ #include <linux/mm_types.h> struct vdso_image { void *data; unsigned long size; /* Always a multiple of PAGE_SIZE */ unsigned long alt, alt_len; long sym_vvar_start; /* Negative offset to the vvar area */ long sym_vvar_page; long sym_hpet_page; long sym_pvclock_page; long sym_hvclock_page; long sym_VDSO32_NOTE_MASK; long sym___kernel_sigreturn; long sym___kernel_rt_sigreturn; long sym___kernel_vsyscall; long sym_int80_landing_pad; }; #ifdef CONFIG_X86_64 extern const struct vdso_image vdso_image_64; #endif #ifdef CONFIG_X86_X32 extern const struct vdso_image vdso_image_x32; #endif #if defined CONFIG_X86_32 || defined CONFIG_COMPAT extern const struct vdso_image vdso_image_32; #endif extern void __init init_vdso_image(const struct vdso_image *image); extern int map_vdso_once(const struct vdso_image *image, unsigned long addr); #endif /* __ASSEMBLER__ */ #endif /* _ASM_X86_VDSO_H */Overall Contributors
Person | Tokens | Prop | Commits | CommitProp |
Andrew Lutomirski | 107 | 74.83% | 9 | 56.25% |
Dmitry Safonov | 15 | 10.49% | 1 | 6.25% |
Roland McGrath | 13 | 9.09% | 2 | 12.50% |
Vitaly Kuznetsov | 3 | 2.10% | 1 | 6.25% |
H. Peter Anvin | 3 | 2.10% | 1 | 6.25% |
Ingo Molnar | 1 | 0.70% | 1 | 6.25% |
Greg Kroah-Hartman | 1 | 0.70% | 1 | 6.25% |
Total | 143 | 100.00% | 16 | 100.00% |