0001
0002
0003
0004
0005
0006 #ifndef __SYSDEP_STUB_H
0007 #define __SYSDEP_STUB_H
0008
0009 #include <sysdep/ptrace_user.h>
0010 #include <generated/asm-offsets.h>
0011 #include <linux/stddef.h>
0012
0013 #define STUB_MMAP_NR __NR_mmap
0014 #define MMAP_OFFSET(o) (o)
0015
0016 #define __syscall_clobber "r11","rcx","memory"
0017 #define __syscall "syscall"
0018
0019 static inline long stub_syscall0(long syscall)
0020 {
0021 long ret;
0022
0023 __asm__ volatile (__syscall
0024 : "=a" (ret)
0025 : "0" (syscall) : __syscall_clobber );
0026
0027 return ret;
0028 }
0029
0030 static inline long stub_syscall2(long syscall, long arg1, long arg2)
0031 {
0032 long ret;
0033
0034 __asm__ volatile (__syscall
0035 : "=a" (ret)
0036 : "0" (syscall), "D" (arg1), "S" (arg2) : __syscall_clobber );
0037
0038 return ret;
0039 }
0040
0041 static inline long stub_syscall3(long syscall, long arg1, long arg2, long arg3)
0042 {
0043 long ret;
0044
0045 __asm__ volatile (__syscall
0046 : "=a" (ret)
0047 : "0" (syscall), "D" (arg1), "S" (arg2), "d" (arg3)
0048 : __syscall_clobber );
0049
0050 return ret;
0051 }
0052
0053 static inline long stub_syscall4(long syscall, long arg1, long arg2, long arg3,
0054 long arg4)
0055 {
0056 long ret;
0057
0058 __asm__ volatile ("movq %5,%%r10 ; " __syscall
0059 : "=a" (ret)
0060 : "0" (syscall), "D" (arg1), "S" (arg2), "d" (arg3),
0061 "g" (arg4)
0062 : __syscall_clobber, "r10" );
0063
0064 return ret;
0065 }
0066
0067 static inline long stub_syscall5(long syscall, long arg1, long arg2, long arg3,
0068 long arg4, long arg5)
0069 {
0070 long ret;
0071
0072 __asm__ volatile ("movq %5,%%r10 ; movq %6,%%r8 ; " __syscall
0073 : "=a" (ret)
0074 : "0" (syscall), "D" (arg1), "S" (arg2), "d" (arg3),
0075 "g" (arg4), "g" (arg5)
0076 : __syscall_clobber, "r10", "r8" );
0077
0078 return ret;
0079 }
0080
0081 static inline void trap_myself(void)
0082 {
0083 __asm("int3");
0084 }
0085
0086 static inline void remap_stack_and_trap(void)
0087 {
0088 __asm__ volatile (
0089 "movq %0,%%rax ;"
0090 "movq %%rsp,%%rdi ;"
0091 "andq %1,%%rdi ;"
0092 "movq %2,%%r10 ;"
0093 "movq %%rdi,%%r8 ; addq %3,%%r8 ; movq (%%r8),%%r8 ;"
0094 "movq %%rdi,%%r9 ; addq %4,%%r9 ; movq (%%r9),%%r9 ;"
0095 __syscall ";"
0096 "movq %%rsp,%%rdi ; andq %1,%%rdi ;"
0097 "addq %5,%%rdi ; movq %%rax, (%%rdi) ;"
0098 "int3"
0099 : :
0100 "g" (STUB_MMAP_NR),
0101 "g" (~(UM_KERN_PAGE_SIZE - 1)),
0102 "g" (MAP_FIXED | MAP_SHARED),
0103 "g" (UML_STUB_FIELD_FD),
0104 "g" (UML_STUB_FIELD_OFFSET),
0105 "g" (UML_STUB_FIELD_CHILD_ERR),
0106 "S" (UM_KERN_PAGE_SIZE),
0107 "d" (PROT_READ | PROT_WRITE)
0108 :
0109 __syscall_clobber, "r10", "r8", "r9");
0110 }
0111
0112 static __always_inline void *get_stub_page(void)
0113 {
0114 unsigned long ret;
0115
0116 asm volatile (
0117 "movq %%rsp,%0 ;"
0118 "andq %1,%0"
0119 : "=a" (ret)
0120 : "g" (~(UM_KERN_PAGE_SIZE - 1)));
0121
0122 return (void *)ret;
0123 }
0124 #endif