1 #ifndef __ARCH_SPARC64_PERCPU__
2 #define __ARCH_SPARC64_PERCPU__
4 #include <linux/compiler.h>
6 register unsigned long __local_per_cpu_offset asm("g5");
10 #define setup_per_cpu_areas() do { } while (0)
11 extern void real_setup_per_cpu_areas(void);
13 extern unsigned long __per_cpu_base;
14 extern unsigned long __per_cpu_shift;
15 #define __per_cpu_offset(__cpu) \
16 (__per_cpu_base + ((unsigned long)(__cpu) << __per_cpu_shift))
17 #define per_cpu_offset(x) (__per_cpu_offset(x))
19 /* Separate out the type, so (int[3], foo) works. */
20 #define DEFINE_PER_CPU(type, name) \
21 __attribute__((__section__(".data.percpu"))) __typeof__(type) per_cpu__##name
23 #define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \
24 __attribute__((__section__(".data.percpu.shared_aligned"))) \
25 __typeof__(type) per_cpu__##name \
26 ____cacheline_aligned_in_smp
28 /* var is in discarded region: offset to particular copy we want */
29 #define per_cpu(var, cpu) (*RELOC_HIDE(&per_cpu__##var, __per_cpu_offset(cpu)))
30 #define __get_cpu_var(var) (*RELOC_HIDE(&per_cpu__##var, __local_per_cpu_offset))
31 #define __raw_get_cpu_var(var) (*RELOC_HIDE(&per_cpu__##var, __local_per_cpu_offset))
33 /* A macro to avoid #include hell... */
34 #define percpu_modcopy(pcpudst, src, size) \
37 for_each_possible_cpu(__i) \
38 memcpy((pcpudst)+__per_cpu_offset(__i), \
43 #define real_setup_per_cpu_areas() do { } while (0)
44 #define DEFINE_PER_CPU(type, name) \
45 __typeof__(type) per_cpu__##name
46 #define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \
47 DEFINE_PER_CPU(type, name)
49 #define per_cpu(var, cpu) (*((void)cpu, &per_cpu__##var))
50 #define __get_cpu_var(var) per_cpu__##var
51 #define __raw_get_cpu_var(var) per_cpu__##var
55 #define DECLARE_PER_CPU(type, name) extern __typeof__(type) per_cpu__##name
57 #define EXPORT_PER_CPU_SYMBOL(var) EXPORT_SYMBOL(per_cpu__##var)
58 #define EXPORT_PER_CPU_SYMBOL_GPL(var) EXPORT_SYMBOL_GPL(per_cpu__##var)
60 #endif /* __ARCH_SPARC64_PERCPU__ */