5 #include <linux/stddef.h>
6 #include <linux/types.h>
7 #include <linux/cache.h>
8 #include <linux/threads.h>
10 #include <asm/percpu.h>
12 /* Per processor datastructure. %gs points to it while the kernel runs */
14 unsigned long unused1;
15 unsigned long unused2;
16 unsigned long unused3;
17 unsigned long unused4;
19 unsigned int unused6; /* 36 was cpunumber */
20 unsigned long stack_canary; /* 40 stack canary value */
21 /* gcc-ABI: this canary MUST be at
23 short in_bootmem; /* pda lives in bootmem */
24 } ____cacheline_aligned_in_smp;
26 DECLARE_PER_CPU(struct x8664_pda, __pda);
27 extern void pda_init(int);
29 #define cpu_pda(cpu) (&per_cpu(__pda, cpu))
31 #define read_pda(field) percpu_read(__pda.field)
32 #define write_pda(field, val) percpu_write(__pda.field, val)
33 #define add_pda(field, val) percpu_add(__pda.field, val)
34 #define sub_pda(field, val) percpu_sub(__pda.field, val)
35 #define or_pda(field, val) percpu_or(__pda.field, val)
37 /* This is not atomic against other CPUs -- CPU preemption needs to be off */
38 #define test_and_clear_bit_pda(bit, field) \
39 x86_test_and_clear_bit_percpu(bit, __pda.field)
43 #define refresh_stack_canary() write_pda(stack_canary, current->stack_canary)
45 #endif /* _ASM_X86_PDA_H */