printk: add interfaces for external access to the log buffer
[linux-2.6] / include / asm-x86 / cmpxchg_64.h
1 #ifndef __ASM_CMPXCHG_H
2 #define __ASM_CMPXCHG_H
3
4 #include <asm/alternative.h> /* Provides LOCK_PREFIX */
5
6 #define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
7
8 #define __xg(x) ((volatile long *)(x))
9
10 static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
11 {
12         *ptr = val;
13 }
14
15 #define _set_64bit set_64bit
16
17 /*
18  * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
19  * Note 2: xchg has side effect, so that attribute volatile is necessary,
20  *        but generally the primitive is invalid, *ptr is output argument. --ANK
21  */
22 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
23 {
24         switch (size) {
25                 case 1:
26                         __asm__ __volatile__("xchgb %b0,%1"
27                                 :"=q" (x)
28                                 :"m" (*__xg(ptr)), "0" (x)
29                                 :"memory");
30                         break;
31                 case 2:
32                         __asm__ __volatile__("xchgw %w0,%1"
33                                 :"=r" (x)
34                                 :"m" (*__xg(ptr)), "0" (x)
35                                 :"memory");
36                         break;
37                 case 4:
38                         __asm__ __volatile__("xchgl %k0,%1"
39                                 :"=r" (x)
40                                 :"m" (*__xg(ptr)), "0" (x)
41                                 :"memory");
42                         break;
43                 case 8:
44                         __asm__ __volatile__("xchgq %0,%1"
45                                 :"=r" (x)
46                                 :"m" (*__xg(ptr)), "0" (x)
47                                 :"memory");
48                         break;
49         }
50         return x;
51 }
52
53 /*
54  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
55  * store NEW in MEM.  Return the initial value in MEM.  Success is
56  * indicated by comparing RETURN with OLD.
57  */
58
59 #define __HAVE_ARCH_CMPXCHG 1
60
61 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
62                                       unsigned long new, int size)
63 {
64         unsigned long prev;
65         switch (size) {
66         case 1:
67                 __asm__ __volatile__(LOCK_PREFIX "cmpxchgb %b1,%2"
68                                      : "=a"(prev)
69                                      : "q"(new), "m"(*__xg(ptr)), "0"(old)
70                                      : "memory");
71                 return prev;
72         case 2:
73                 __asm__ __volatile__(LOCK_PREFIX "cmpxchgw %w1,%2"
74                                      : "=a"(prev)
75                                      : "r"(new), "m"(*__xg(ptr)), "0"(old)
76                                      : "memory");
77                 return prev;
78         case 4:
79                 __asm__ __volatile__(LOCK_PREFIX "cmpxchgl %k1,%2"
80                                      : "=a"(prev)
81                                      : "r"(new), "m"(*__xg(ptr)), "0"(old)
82                                      : "memory");
83                 return prev;
84         case 8:
85                 __asm__ __volatile__(LOCK_PREFIX "cmpxchgq %1,%2"
86                                      : "=a"(prev)
87                                      : "r"(new), "m"(*__xg(ptr)), "0"(old)
88                                      : "memory");
89                 return prev;
90         }
91         return old;
92 }
93
94 static inline unsigned long __cmpxchg_local(volatile void *ptr,
95                         unsigned long old, unsigned long new, int size)
96 {
97         unsigned long prev;
98         switch (size) {
99         case 1:
100                 __asm__ __volatile__("cmpxchgb %b1,%2"
101                                      : "=a"(prev)
102                                      : "q"(new), "m"(*__xg(ptr)), "0"(old)
103                                      : "memory");
104                 return prev;
105         case 2:
106                 __asm__ __volatile__("cmpxchgw %w1,%2"
107                                      : "=a"(prev)
108                                      : "r"(new), "m"(*__xg(ptr)), "0"(old)
109                                      : "memory");
110                 return prev;
111         case 4:
112                 __asm__ __volatile__("cmpxchgl %k1,%2"
113                                      : "=a"(prev)
114                                      : "r"(new), "m"(*__xg(ptr)), "0"(old)
115                                      : "memory");
116                 return prev;
117         case 8:
118                 __asm__ __volatile__("cmpxchgq %1,%2"
119                                      : "=a"(prev)
120                                      : "r"(new), "m"(*__xg(ptr)), "0"(old)
121                                      : "memory");
122                 return prev;
123         }
124         return old;
125 }
126
127 #define cmpxchg(ptr,o,n)\
128         ((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),\
129                                         (unsigned long)(n),sizeof(*(ptr))))
130 #define cmpxchg_local(ptr,o,n)\
131         ((__typeof__(*(ptr)))__cmpxchg_local((ptr),(unsigned long)(o),\
132                                         (unsigned long)(n),sizeof(*(ptr))))
133
134 #endif