xref: /linux/arch/sh/include/asm/cmpxchg-xchg.h (revision 4b132aacb0768ac1e652cf517097ea6f237214b9)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_SH_CMPXCHG_XCHG_H
3 #define __ASM_SH_CMPXCHG_XCHG_H
4 
5 /*
6  * Copyright (C) 2016 Red Hat, Inc.
7  * Author: Michael S. Tsirkin <mst@redhat.com>
8  */
9 #include <linux/bits.h>
10 #include <linux/compiler.h>
11 #include <asm/byteorder.h>
12 
13 /*
14  * Portable implementations of 1 and 2 byte xchg using a 4 byte cmpxchg.
15  * Note: this header isn't self-contained: before including it, __cmpxchg_u32
16  * must be defined first.
17  */
18 static inline u32 __xchg_cmpxchg(volatile void *ptr, u32 x, int size)
19 {
20 	int off = (unsigned long)ptr % sizeof(u32);
21 	volatile u32 *p = ptr - off;
22 #ifdef __BIG_ENDIAN
23 	int bitoff = (sizeof(u32) - size - off) * BITS_PER_BYTE;
24 #else
25 	int bitoff = off * BITS_PER_BYTE;
26 #endif
27 	u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
28 	u32 oldv, newv;
29 	u32 ret;
30 
31 	do {
32 		oldv = READ_ONCE(*p);
33 		ret = (oldv & bitmask) >> bitoff;
34 		newv = (oldv & ~bitmask) | (x << bitoff);
35 	} while (__cmpxchg_u32(p, oldv, newv) != oldv);
36 
37 	return ret;
38 }
39 
40 static inline unsigned long xchg_u16(volatile u16 *m, unsigned long val)
41 {
42 	return __xchg_cmpxchg(m, val, sizeof *m);
43 }
44 
45 static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
46 {
47 	return __xchg_cmpxchg(m, val, sizeof *m);
48 }
49 
50 #endif /* __ASM_SH_CMPXCHG_XCHG_H */
51