|
19 | 19 | #define _SVC_CALL_CONTEXT_SWITCH 0 |
20 | 20 | #define _SVC_CALL_IRQ_OFFLOAD 1 |
21 | 21 | #define _SVC_CALL_RUNTIME_EXCEPT 2 |
| 22 | +#define _SVC_CALL_SYSTEM_CALL 3 |
22 | 23 |
|
23 | 24 | #ifdef CONFIG_USERSPACE |
24 | 25 | #ifndef _ASMLANGUAGE |
|
31 | 32 | extern "C" { |
32 | 33 | #endif |
33 | 34 |
|
| 35 | +/* |
| 36 | + * Syscall invocation macros. arm-specific machine constraints used to ensure |
| 37 | + * args land in the proper registers. |
| 38 | + */ |
| 39 | +static inline uintptr_t arch_syscall_invoke6(uintptr_t arg1, uintptr_t arg2, |
| 40 | + uintptr_t arg3, uintptr_t arg4, |
| 41 | + uintptr_t arg5, uintptr_t arg6, |
| 42 | + uintptr_t call_id) |
| 43 | +{ |
| 44 | + register uint64_t ret __asm__("x0") = arg1; |
| 45 | + register uint64_t r1 __asm__("x1") = arg2; |
| 46 | + register uint64_t r2 __asm__("x2") = arg3; |
| 47 | + register uint64_t r3 __asm__("x3") = arg4; |
| 48 | + register uint64_t r4 __asm__("x4") = arg5; |
| 49 | + register uint64_t r5 __asm__("x5") = arg6; |
| 50 | + register uint64_t r8 __asm__("x8") = call_id; |
| 51 | + |
| 52 | + __asm__ volatile("svc %[svid]\n" |
| 53 | + : "=r"(ret) |
| 54 | + : [svid] "i" (_SVC_CALL_SYSTEM_CALL), |
| 55 | + "r" (ret), "r" (r1), "r" (r2), "r" (r3), |
| 56 | + "r" (r4), "r" (r5), "r" (r8) |
| 57 | + : "memory"); |
| 58 | + |
| 59 | + return ret; |
| 60 | +} |
| 61 | + |
| 62 | +static inline uintptr_t arch_syscall_invoke5(uintptr_t arg1, uintptr_t arg2, |
| 63 | + uintptr_t arg3, uintptr_t arg4, |
| 64 | + uintptr_t arg5, |
| 65 | + uintptr_t call_id) |
| 66 | +{ |
| 67 | + register uint64_t ret __asm__("x0") = arg1; |
| 68 | + register uint64_t r1 __asm__("x1") = arg2; |
| 69 | + register uint64_t r2 __asm__("x2") = arg3; |
| 70 | + register uint64_t r3 __asm__("x3") = arg4; |
| 71 | + register uint64_t r4 __asm__("x4") = arg5; |
| 72 | + register uint64_t r8 __asm__("x8") = call_id; |
| 73 | + |
| 74 | + __asm__ volatile("svc %[svid]\n" |
| 75 | + : "=r"(ret) |
| 76 | + : [svid] "i" (_SVC_CALL_SYSTEM_CALL), |
| 77 | + "r" (ret), "r" (r1), "r" (r2), "r" (r3), |
| 78 | + "r" (r4), "r" (r8) |
| 79 | + : "memory"); |
| 80 | + |
| 81 | + return ret; |
| 82 | +} |
| 83 | + |
| 84 | +static inline uintptr_t arch_syscall_invoke4(uintptr_t arg1, uintptr_t arg2, |
| 85 | + uintptr_t arg3, uintptr_t arg4, |
| 86 | + uintptr_t call_id) |
| 87 | +{ |
| 88 | + register uint64_t ret __asm__("x0") = arg1; |
| 89 | + register uint64_t r1 __asm__("x1") = arg2; |
| 90 | + register uint64_t r2 __asm__("x2") = arg3; |
| 91 | + register uint64_t r3 __asm__("x3") = arg4; |
| 92 | + register uint64_t r8 __asm__("x8") = call_id; |
| 93 | + |
| 94 | + __asm__ volatile("svc %[svid]\n" |
| 95 | + : "=r"(ret) |
| 96 | + : [svid] "i" (_SVC_CALL_SYSTEM_CALL), |
| 97 | + "r" (ret), "r" (r1), "r" (r2), "r" (r3), |
| 98 | + "r" (r8) |
| 99 | + : "memory"); |
| 100 | + |
| 101 | + return ret; |
| 102 | +} |
| 103 | + |
| 104 | +static inline uintptr_t arch_syscall_invoke3(uintptr_t arg1, uintptr_t arg2, |
| 105 | + uintptr_t arg3, |
| 106 | + uintptr_t call_id) |
| 107 | +{ |
| 108 | + register uint64_t ret __asm__("x0") = arg1; |
| 109 | + register uint64_t r1 __asm__("x1") = arg2; |
| 110 | + register uint64_t r2 __asm__("x2") = arg3; |
| 111 | + register uint64_t r8 __asm__("x8") = call_id; |
| 112 | + |
| 113 | + __asm__ volatile("svc %[svid]\n" |
| 114 | + : "=r"(ret) |
| 115 | + : [svid] "i" (_SVC_CALL_SYSTEM_CALL), |
| 116 | + "r" (ret), "r" (r1), "r" (r2), "r" (r8) |
| 117 | + : "memory"); |
| 118 | + |
| 119 | + return ret; |
| 120 | +} |
| 121 | + |
| 122 | +static inline uintptr_t arch_syscall_invoke2(uintptr_t arg1, uintptr_t arg2, |
| 123 | + uintptr_t call_id) |
| 124 | +{ |
| 125 | + register uint64_t ret __asm__("x0") = arg1; |
| 126 | + register uint64_t r1 __asm__("x1") = arg2; |
| 127 | + register uint64_t r8 __asm__("x8") = call_id; |
| 128 | + |
| 129 | + __asm__ volatile("svc %[svid]\n" |
| 130 | + : "=r"(ret) |
| 131 | + : [svid] "i" (_SVC_CALL_SYSTEM_CALL), |
| 132 | + "r" (ret), "r" (r1), "r" (r8) |
| 133 | + : "memory"); |
| 134 | + |
| 135 | + return ret; |
| 136 | +} |
| 137 | + |
| 138 | +static inline uintptr_t arch_syscall_invoke1(uintptr_t arg1, |
| 139 | + uintptr_t call_id) |
| 140 | +{ |
| 141 | + register uint64_t ret __asm__("x0") = arg1; |
| 142 | + register uint64_t r8 __asm__("x8") = call_id; |
| 143 | + |
| 144 | + __asm__ volatile("svc %[svid]\n" |
| 145 | + : "=r"(ret) |
| 146 | + : [svid] "i" (_SVC_CALL_SYSTEM_CALL), |
| 147 | + "r" (ret), "r" (r8) |
| 148 | + : "memory"); |
| 149 | + return ret; |
| 150 | +} |
| 151 | + |
| 152 | +static inline uintptr_t arch_syscall_invoke0(uintptr_t call_id) |
| 153 | +{ |
| 154 | + register uint64_t ret __asm__("x0"); |
| 155 | + register uint64_t r8 __asm__("x8") = call_id; |
| 156 | + |
| 157 | + __asm__ volatile("svc %[svid]\n" |
| 158 | + : "=r"(ret) |
| 159 | + : [svid] "i" (_SVC_CALL_SYSTEM_CALL), |
| 160 | + "r" (ret), "r" (r8) |
| 161 | + : "memory"); |
| 162 | + |
| 163 | + return ret; |
| 164 | +} |
| 165 | + |
34 | 166 | static inline bool arch_is_user_context(void) |
35 | 167 | { |
36 | 168 | uint64_t tpidrro_el0; |
|
0 commit comments