1 #ifndef _ASM_X86_LINKAGE_H
2 #define _ASM_X86_LINKAGE_H
5 #define notrace __attribute__((no_instrument_function))
8 #define __ALIGN .p2align 4,,15
9 #define __ALIGN_STR ".p2align 4,,15"
13 #define asmlinkage CPP_ASMLINKAGE __attribute__((regparm(0)))
15 * For 32-bit UML - mark functions implemented in assembly that use
16 * regparm input parameters:
18 #define asmregparm __attribute__((regparm(3)))
21 * Make sure the compiler doesn't do anything stupid with the
22 * arguments on the stack - they are owned by the *caller*, not
23 * the callee. This just fools gcc into not spilling into them,
24 * and keeps it from doing tailcall recursion and/or using the
25 * stack slots for temporaries, since they are live and "used"
26 * all the way to the end of the function.
28 * NOTE! On x86-64, all the arguments are in registers, so this
29 * only matters on a 32-bit kernel.
31 #define asmlinkage_protect(n, ret, args...) \
32 __asmlinkage_protect##n(ret, ##args)
33 #define __asmlinkage_protect_n(ret, args...) \
34 __asm__ __volatile__ ("" : "=r" (ret) : "0" (ret), ##args)
35 #define __asmlinkage_protect0(ret) \
36 __asmlinkage_protect_n(ret)
37 #define __asmlinkage_protect1(ret, arg1) \
38 __asmlinkage_protect_n(ret, "g" (arg1))
39 #define __asmlinkage_protect2(ret, arg1, arg2) \
40 __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2))
41 #define __asmlinkage_protect3(ret, arg1, arg2, arg3) \
42 __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2), "g" (arg3))
43 #define __asmlinkage_protect4(ret, arg1, arg2, arg3, arg4) \
44 __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2), "g" (arg3), \
46 #define __asmlinkage_protect5(ret, arg1, arg2, arg3, arg4, arg5) \
47 __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2), "g" (arg3), \
48 "g" (arg4), "g" (arg5))
49 #define __asmlinkage_protect6(ret, arg1, arg2, arg3, arg4, arg5, arg6) \
50 __asmlinkage_protect_n(ret, "g" (arg1), "g" (arg2), "g" (arg3), \
51 "g" (arg4), "g" (arg5), "g" (arg6))
55 #ifdef CONFIG_X86_ALIGNMENT_16
56 #define __ALIGN .align 16,0x90
57 #define __ALIGN_STR ".align 16,0x90"
61 * to check ENTRY_X86/END_X86 and
62 * KPROBE_ENTRY_X86/KPROBE_END_X86
63 * unbalanced-missed-mixed appearance
65 #define __set_entry_x86 .set ENTRY_X86_IN, 0
66 #define __unset_entry_x86 .set ENTRY_X86_IN, 1
67 #define __set_kprobe_x86 .set KPROBE_X86_IN, 0
68 #define __unset_kprobe_x86 .set KPROBE_X86_IN, 1
70 #define __macro_err_x86 .error "ENTRY_X86/KPROBE_X86 unbalanced,missed,mixed"
72 #define __check_entry_x86 \
73 .ifdef ENTRY_X86_IN; \
80 #define __check_kprobe_x86 \
81 .ifdef KPROBE_X86_IN; \
82 .ifeq KPROBE_X86_IN; \
88 #define __check_entry_kprobe_x86 \
92 #define ENTRY_KPROBE_FINAL_X86 __check_entry_kprobe_x86
94 #define ENTRY_X86(name) \
95 __check_entry_kprobe_x86; \
101 #define END_X86(name) \
103 __check_entry_kprobe_x86; \
106 #define KPROBE_ENTRY_X86(name) \
107 __check_entry_kprobe_x86; \
109 .pushsection .kprobes.text, "ax"; \
114 #define KPROBE_END_X86(name) \
115 __unset_kprobe_x86; \
116 __check_entry_kprobe_x86; \
117 .size name, .-name; \
120 #endif /* _ASM_X86_LINKAGE_H */