Commit | Line | Data |
---|---|---|
1965aae3 PA |
1 | #ifndef _ASM_X86_LINKAGE_H |
2 | #define _ASM_X86_LINKAGE_H | |
82f74e71 | 3 | |
78a9909a IM |
4 | #undef notrace |
5 | #define notrace __attribute__((no_instrument_function)) | |
6 | ||
82f74e71 HH |
7 | #ifdef CONFIG_X86_64 |
8 | #define __ALIGN .p2align 4,,15 | |
9 | #define __ALIGN_STR ".p2align 4,,15" | |
10 | #endif | |
11 | ||
96a388de | 12 | #ifdef CONFIG_X86_32 |
82f74e71 | 13 | #define asmlinkage CPP_ASMLINKAGE __attribute__((regparm(0))) |
d50efc6c IM |
14 | /* |
15 | * For 32-bit UML - mark functions implemented in assembly that use | |
16 | * regparm input parameters: | |
17 | */ | |
18 | #define asmregparm __attribute__((regparm(3))) | |
54a01510 | 19 | |
d10d89ec LT |
20 | /* |
21 | * Make sure the compiler doesn't do anything stupid with the | |
22 | * arguments on the stack - they are owned by the *caller*, not | |
23 | * the callee. This just fools gcc into not spilling into them, | |
24 | * and keeps it from doing tailcall recursion and/or using the | |
25 | * stack slots for temporaries, since they are live and "used" | |
26 | * all the way to the end of the function. | |
27 | * | |
28 | * NOTE! On x86-64, all the arguments are in registers, so this | |
29 | * only matters on a 32-bit kernel. | |
30 | */ | |
54a01510 RM |
31 | #define asmlinkage_protect(n, ret, args...) \ |
32 | __asmlinkage_protect##n(ret, ##args) | |
33 | #define __asmlinkage_protect_n(ret, args...) \ | |
34 | __asm__ __volatile__ ("" : "=r" (ret) : "0" (ret), ##args) | |
35 | #define __asmlinkage_protect0(ret) \ | |
36 | __asmlinkage_protect_n(ret) | |
37 | #define __asmlinkage_protect1(ret, arg1) \ | |
38 | __asmlinkage_protect_n(ret, "g" (arg1)) | |
39 | #define __asmlinkage_protect2(ret, arg1, arg2) \ | |
40 | __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2)) | |
41 | #define __asmlinkage_protect3(ret, arg1, arg2, arg3) \ | |
42 | __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2), "g" (arg3)) | |
43 | #define __asmlinkage_protect4(ret, arg1, arg2, arg3, arg4) \ | |
44 | __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2), "g" (arg3), \ | |
45 | "g" (arg4)) | |
46 | #define __asmlinkage_protect5(ret, arg1, arg2, arg3, arg4, arg5) \ | |
47 | __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2), "g" (arg3), \ | |
48 | "g" (arg4), "g" (arg5)) | |
49 | #define __asmlinkage_protect6(ret, arg1, arg2, arg3, arg4, arg5, arg6) \ | |
50 | __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2), "g" (arg3), \ | |
51 | "g" (arg4), "g" (arg5), "g" (arg6)) | |
52 | ||
82f74e71 HH |
53 | #endif |
54 | ||
55 | #ifdef CONFIG_X86_ALIGNMENT_16 | |
56 | #define __ALIGN .align 16,0x90 | |
57 | #define __ALIGN_STR ".align 16,0x90" | |
58 | #endif | |
59 | ||
3b6c52b5 CG |
60 | /* |
61 | * to check ENTRY_X86/END_X86 and | |
62 | * KPROBE_ENTRY_X86/KPROBE_END_X86 | |
63 | * unbalanced-missed-mixed appearance | |
64 | */ | |
65 | #define __set_entry_x86 .set ENTRY_X86_IN, 0 | |
66 | #define __unset_entry_x86 .set ENTRY_X86_IN, 1 | |
67 | #define __set_kprobe_x86 .set KPROBE_X86_IN, 0 | |
68 | #define __unset_kprobe_x86 .set KPROBE_X86_IN, 1 | |
69 | ||
70 | #define __macro_err_x86 .error "ENTRY_X86/KPROBE_X86 unbalanced,missed,mixed" | |
71 | ||
72 | #define __check_entry_x86 \ | |
73 | .ifdef ENTRY_X86_IN; \ | |
74 | .ifeq ENTRY_X86_IN; \ | |
75 | __macro_err_x86; \ | |
76 | .abort; \ | |
77 | .endif; \ | |
78 | .endif | |
79 | ||
80 | #define __check_kprobe_x86 \ | |
81 | .ifdef KPROBE_X86_IN; \ | |
82 | .ifeq KPROBE_X86_IN; \ | |
83 | __macro_err_x86; \ | |
84 | .abort; \ | |
85 | .endif; \ | |
86 | .endif | |
87 | ||
88 | #define __check_entry_kprobe_x86 \ | |
89 | __check_entry_x86; \ | |
90 | __check_kprobe_x86 | |
91 | ||
92 | #define ENTRY_KPROBE_FINAL_X86 __check_entry_kprobe_x86 | |
93 | ||
94 | #define ENTRY_X86(name) \ | |
95 | __check_entry_kprobe_x86; \ | |
96 | __set_entry_x86; \ | |
97 | .globl name; \ | |
98 | __ALIGN; \ | |
99 | name: | |
100 | ||
101 | #define END_X86(name) \ | |
102 | __unset_entry_x86; \ | |
103 | __check_entry_kprobe_x86; \ | |
104 | .size name, .-name | |
105 | ||
106 | #define KPROBE_ENTRY_X86(name) \ | |
107 | __check_entry_kprobe_x86; \ | |
108 | __set_kprobe_x86; \ | |
109 | .pushsection .kprobes.text, "ax"; \ | |
110 | .globl name; \ | |
111 | __ALIGN; \ | |
112 | name: | |
113 | ||
114 | #define KPROBE_END_X86(name) \ | |
115 | __unset_kprobe_x86; \ | |
116 | __check_entry_kprobe_x86; \ | |
117 | .size name, .-name; \ | |
118 | .popsection | |
119 | ||
1965aae3 | 120 | #endif /* _ASM_X86_LINKAGE_H */ |
82f74e71 | 121 |