net: Fix memory leak in the proto_register function
[linux-2.6] / include / asm-generic / cmpxchg-local.h
1 #ifndef __ASM_GENERIC_CMPXCHG_LOCAL_H
2 #define __ASM_GENERIC_CMPXCHG_LOCAL_H
3
4 #include <linux/types.h>
5
6 extern unsigned long wrong_size_cmpxchg(volatile void *ptr);
7
8 /*
9  * Generic version of __cmpxchg_local (disables interrupts). Takes an unsigned
10  * long parameter, supporting various types of architectures.
11  */
12 static inline unsigned long __cmpxchg_local_generic(volatile void *ptr,
13                 unsigned long old, unsigned long new, int size)
14 {
15         unsigned long flags, prev;
16
17         /*
18          * Sanity checking, compile-time.
19          */
20         if (size == 8 && sizeof(unsigned long) != 8)
21                 wrong_size_cmpxchg(ptr);
22
23         local_irq_save(flags);
24         switch (size) {
25         case 1: prev = *(u8 *)ptr;
26                 if (prev == old)
27                         *(u8 *)ptr = (u8)new;
28                 break;
29         case 2: prev = *(u16 *)ptr;
30                 if (prev == old)
31                         *(u16 *)ptr = (u16)new;
32                 break;
33         case 4: prev = *(u32 *)ptr;
34                 if (prev == old)
35                         *(u32 *)ptr = (u32)new;
36                 break;
37         case 8: prev = *(u64 *)ptr;
38                 if (prev == old)
39                         *(u64 *)ptr = (u64)new;
40                 break;
41         default:
42                 wrong_size_cmpxchg(ptr);
43         }
44         local_irq_restore(flags);
45         return prev;
46 }
47
48 /*
49  * Generic version of __cmpxchg64_local. Takes an u64 parameter.
50  */
51 static inline u64 __cmpxchg64_local_generic(volatile void *ptr,
52                 u64 old, u64 new)
53 {
54         u64 prev;
55         unsigned long flags;
56
57         local_irq_save(flags);
58         prev = *(u64 *)ptr;
59         if (prev == old)
60                 *(u64 *)ptr = new;
61         local_irq_restore(flags);
62         return prev;
63 }
64
65 #endif