1 #ifndef _PARISC_BYTEORDER_H
2 #define _PARISC_BYTEORDER_H
5 #include <linux/compiler.h>
9 static __inline__ __attribute_const__ __u16 ___arch__swab16(__u16 x)
11 __asm__("dep %0, 15, 8, %0\n\t" /* deposit 00ab -> 0bab */
12 "shd %%r0, %0, 8, %0" /* shift 000000ab -> 00ba */
18 static __inline__ __attribute_const__ __u32 ___arch__swab24(__u32 x)
20 __asm__("shd %0, %0, 8, %0\n\t" /* shift xabcxabc -> cxab */
21 "dep %0, 15, 8, %0\n\t" /* deposit cxab -> cbab */
22 "shd %%r0, %0, 8, %0" /* shift 0000cbab -> 0cba */
28 static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 x)
31 __asm__("shd %0, %0, 16, %1\n\t" /* shift abcdabcd -> cdab */
32 "dep %1, 15, 8, %1\n\t" /* deposit cdab -> cbab */
33 "shd %0, %1, 8, %0" /* shift abcdcbab -> dcba */
34 : "=r" (x), "=&r" (temp)
40 #if BITS_PER_LONG > 32
42 ** From "PA-RISC 2.0 Architecture", HP Professional Books.
43 ** See Appendix I page 8 , "Endian Byte Swapping".
45 ** Pretty cool algorithm: (* == zero'd bits)
46 ** PERMH 01234567 -> 67452301 into %0
47 ** HSHL 67452301 -> 7*5*3*1* into %1
48 ** HSHR 67452301 -> *6*4*2*0 into %0
49 ** OR %0 | %1 -> 76543210 into %0 (all done!)
51 static __inline__ __attribute_const__ __u64 ___arch__swab64(__u64 x) {
53 __asm__("permh,3210 %0, %0\n\t"
55 "hshr,u %0, 8, %0\n\t"
57 : "=r" (x), "=&r" (temp)
61 #define __arch__swab64(x) ___arch__swab64(x)
62 #define __BYTEORDER_HAS_U64__
63 #elif !defined(__STRICT_ANSI__)
64 static __inline__ __attribute_const__ __u64 ___arch__swab64(__u64 x)
66 __u32 t1 = ___arch__swab32((__u32) x);
67 __u32 t2 = ___arch__swab32((__u32) (x >> 32));
68 return (((__u64) t1 << 32) | t2);
70 #define __arch__swab64(x) ___arch__swab64(x)
71 #define __BYTEORDER_HAS_U64__
74 #define __arch__swab16(x) ___arch__swab16(x)
75 #define __arch__swab24(x) ___arch__swab24(x)
76 #define __arch__swab32(x) ___arch__swab32(x)
80 #include <linux/byteorder/big_endian.h>
82 #endif /* _PARISC_BYTEORDER_H */