Merge branch 'linus' into core/iommu
[linux-2.6] / arch / blackfin / include / asm / cache.h
1 /*
2  * include/asm-blackfin/cache.h
3  */
4 #ifndef __ARCH_BLACKFIN_CACHE_H
5 #define __ARCH_BLACKFIN_CACHE_H
6
7 /*
8  * Bytes per L1 cache line
9  * Blackfin loads 32 bytes for cache
10  */
11 #define L1_CACHE_SHIFT  5
12 #define L1_CACHE_BYTES  (1 << L1_CACHE_SHIFT)
13 #define SMP_CACHE_BYTES L1_CACHE_BYTES
14
15 #ifdef CONFIG_SMP
16 #define __cacheline_aligned
17 #else
18 #define ____cacheline_aligned
19
20 /*
21  * Put cacheline_aliged data to L1 data memory
22  */
23 #ifdef CONFIG_CACHELINE_ALIGNED_L1
24 #define __cacheline_aligned                             \
25           __attribute__((__aligned__(L1_CACHE_BYTES),   \
26                 __section__(".data_l1.cacheline_aligned")))
27 #endif
28
29 #endif
30
31 /*
32  * largest L1 which this arch supports
33  */
34 #define L1_CACHE_SHIFT_MAX      5
35
36 #if defined(CONFIG_SMP) && \
37     !defined(CONFIG_BFIN_CACHE_COHERENT) && \
38     defined(CONFIG_BFIN_DCACHE)
39 #define __ARCH_SYNC_CORE_DCACHE
40 #ifndef __ASSEMBLY__
41 asmlinkage void __raw_smp_mark_barrier_asm(void);
42 asmlinkage void __raw_smp_check_barrier_asm(void);
43
44 static inline void smp_mark_barrier(void)
45 {
46         __raw_smp_mark_barrier_asm();
47 }
48 static inline void smp_check_barrier(void)
49 {
50         __raw_smp_check_barrier_asm();
51 }
52
53 void resync_core_dcache(void);
54 #endif
55 #endif
56
57
58 #endif