1 #ifndef _ASM_GENERIC_PERCPU_H_
2 #define _ASM_GENERIC_PERCPU_H_
3 #include <linux/compiler.h>
4 #include <linux/threads.h>
6 #define __GENERIC_PER_CPU
9 extern unsigned long __per_cpu_offset[NR_CPUS];
11 #define per_cpu_offset(x) (__per_cpu_offset[x])
13 /* Separate out the type, so (int[3], foo) works. */
14 #define DEFINE_PER_CPU(type, name) \
15 __attribute__((__section__(".data.percpu"))) __typeof__(type) per_cpu__##name
17 #define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \
18 __attribute__((__section__(".data.percpu.shared_aligned"))) \
19 __typeof__(type) per_cpu__##name \
20 ____cacheline_aligned_in_smp
22 /* var is in discarded region: offset to particular copy we want */
23 #define per_cpu(var, cpu) (*({ \
24 extern int simple_identifier_##var(void); \
25 RELOC_HIDE(&per_cpu__##var, __per_cpu_offset[cpu]); }))
26 #define __get_cpu_var(var) per_cpu(var, smp_processor_id())
27 #define __raw_get_cpu_var(var) per_cpu(var, raw_smp_processor_id())
29 /* A macro to avoid #include hell... */
30 #define percpu_modcopy(pcpudst, src, size) \
33 for_each_possible_cpu(__i) \
34 memcpy((pcpudst)+__per_cpu_offset[__i], \
39 #define DEFINE_PER_CPU(type, name) \
40 __typeof__(type) per_cpu__##name
42 #define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \
43 DEFINE_PER_CPU(type, name)
45 #define per_cpu(var, cpu) (*((void)(cpu), &per_cpu__##var))
46 #define __get_cpu_var(var) per_cpu__##var
47 #define __raw_get_cpu_var(var) per_cpu__##var
51 #define DECLARE_PER_CPU(type, name) extern __typeof__(type) per_cpu__##name
53 #define EXPORT_PER_CPU_SYMBOL(var) EXPORT_SYMBOL(per_cpu__##var)
54 #define EXPORT_PER_CPU_SYMBOL_GPL(var) EXPORT_SYMBOL_GPL(per_cpu__##var)
56 #endif /* _ASM_GENERIC_PERCPU_H_ */