x86: prepare merging futex_32/64.h
[linux-2.6] / include / asm-x86 / futex_64.h
1 #ifndef _ASM_FUTEX_H
2 #define _ASM_FUTEX_H
3
4 #ifdef __KERNEL__
5
6 #include <linux/futex.h>
7
8 #include <asm/asm.h>
9 #include <asm/errno.h>
10 #include <asm/system.h>
11 #include <asm/uaccess.h>
12
13 #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
14   __asm__ __volatile (                                          \
15 "1:     " insn "\n"                                             \
16 "2:     .section .fixup,\"ax\"\n\
17 3:      mov     %3, %1\n\
18         jmp     2b\n\
19         .previous\n\
20         .section __ex_table,\"a\"\n\
21         .align  8\n"                                            \
22         _ASM_PTR "1b,3b\n                                       \
23         .previous"                                              \
24         : "=r" (oldval), "=r" (ret), "+m" (*uaddr)              \
25         : "i" (-EFAULT), "0" (oparg), "1" (0))
26
27 #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg)     \
28   __asm__ __volatile (                                          \
29 "1:     movl    %2, %0\n\
30         movl    %0, %3\n"                                       \
31         insn "\n"                                               \
32 "2:     " LOCK_PREFIX "cmpxchgl %3, %2\n\
33         jnz     1b\n\
34 3:      .section .fixup,\"ax\"\n\
35 4:      mov     %5, %1\n\
36         jmp     3b\n\
37         .previous\n\
38         .section __ex_table,\"a\"\n\
39         .align  8\n"                                            \
40         _ASM_PTR "1b,4b,2b,4b\n                                 \
41         .previous"                                              \
42         : "=&a" (oldval), "=&r" (ret), "+m" (*uaddr),           \
43           "=&r" (tem)                                           \
44         : "r" (oparg), "i" (-EFAULT), "1" (0))
45
46 static inline int
47 futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
48 {
49         int op = (encoded_op >> 28) & 7;
50         int cmp = (encoded_op >> 24) & 15;
51         int oparg = (encoded_op << 8) >> 20;
52         int cmparg = (encoded_op << 20) >> 20;
53         int oldval = 0, ret, tem;
54         if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
55                 oparg = 1 << oparg;
56
57         if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
58                 return -EFAULT;
59
60         pagefault_disable();
61
62         switch (op) {
63         case FUTEX_OP_SET:
64                 __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
65                 break;
66         case FUTEX_OP_ADD:
67                 __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval,
68                                    uaddr, oparg);
69                 break;
70         case FUTEX_OP_OR:
71                 __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg);
72                 break;
73         case FUTEX_OP_ANDN:
74                 __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg);
75                 break;
76         case FUTEX_OP_XOR:
77                 __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg);
78                 break;
79         default:
80                 ret = -ENOSYS;
81         }
82
83         pagefault_enable();
84
85         if (!ret) {
86                 switch (cmp) {
87                 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
88                 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
89                 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
90                 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
91                 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
92                 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
93                 default: ret = -ENOSYS;
94                 }
95         }
96         return ret;
97 }
98
99 static inline int
100 futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
101 {
102         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
103                 return -EFAULT;
104
105         __asm__ __volatile__(
106                 "1:     " LOCK_PREFIX "cmpxchgl %3, %1          \n"
107
108                 "2:     .section .fixup, \"ax\"                 \n"
109                 "3:     mov     %2, %0                          \n"
110                 "       jmp     2b                              \n"
111                 "       .previous                               \n"
112
113                 "       .section __ex_table, \"a\"              \n"
114                 "       .align  8                               \n"
115                         _ASM_PTR " 1b,3b                        \n"
116                 "       .previous                               \n"
117
118                 : "=a" (oldval), "+m" (*uaddr)
119                 : "i" (-EFAULT), "r" (newval), "0" (oldval)
120                 : "memory"
121         );
122
123         return oldval;
124 }
125
126 #endif
127 #endif