1 #ifndef __ASM_SH_CMPXCHG_LLSC_H
2 #define __ASM_SH_CMPXCHG_LLSC_H
4 #include <linux/bitops.h>
5 #include <asm/byteorder.h>
7 static inline unsigned long xchg_u32(volatile u32
*m
, unsigned long val
)
12 __asm__
__volatile__ (
14 "movli.l @%2, %0 ! xchg_u32 \n\t"
17 "movco.l %0, @%2 \n\t"
20 : "=&z"(tmp
), "=&r" (retval
)
28 static inline unsigned long
29 __cmpxchg_u32(volatile u32
*m
, unsigned long old
, unsigned long new)
34 __asm__
__volatile__ (
36 "movli.l @%2, %0 ! __cmpxchg_u32 \n\t"
42 "movco.l %0, @%2 \n\t"
45 : "=&z" (tmp
), "=&r" (retval
)
46 : "r" (m
), "r" (old
), "r" (new)
53 static inline u32
__xchg_cmpxchg(volatile void *ptr
, u32 x
, int size
)
55 int off
= (unsigned long)ptr
% sizeof(u32
);
56 volatile u32
*p
= ptr
- off
;
58 int bitoff
= (sizeof(u32
) - 1 - off
) * BITS_PER_BYTE
;
60 int bitoff
= off
* BITS_PER_BYTE
;
62 u32 bitmask
= ((0x1 << size
* BITS_PER_BYTE
) - 1) << bitoff
;
68 ret
= (oldv
& bitmask
) >> bitoff
;
69 newv
= (oldv
& ~bitmask
) | (x
<< bitoff
);
70 } while (__cmpxchg_u32(p
, oldv
, newv
) != oldv
);
75 static inline unsigned long xchg_u16(volatile u16
*m
, unsigned long val
)
77 return __xchg_cmpxchg(m
, val
, sizeof *m
);
80 static inline unsigned long xchg_u8(volatile u8
*m
, unsigned long val
)
82 return __xchg_cmpxchg(m
, val
, sizeof *m
);
85 #endif /* __ASM_SH_CMPXCHG_LLSC_H */
This page took 0.03535 seconds and 5 git commands to generate.