xref: /linux/arch/hexagon/include/asm/cmpxchg.h (revision 1e0731c05c985deb68a97fa44c1adcd3305dda90)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * xchg/cmpxchg operations for the Hexagon architecture
4  *
5  * Copyright (c) 2010-2011, The Linux Foundation. All rights reserved.
6  */
7 
8 #ifndef _ASM_CMPXCHG_H
9 #define _ASM_CMPXCHG_H
10 
11 /*
12  * __arch_xchg - atomically exchange a register and a memory location
13  * @x: value to swap
14  * @ptr: pointer to memory
15  * @size:  size of the value
16  *
17  * Only 4 bytes supported currently.
18  *
19  * Note:  there was an errata for V2 about .new's and memw_locked.
20  *
21  */
22 static inline unsigned long
23 __arch_xchg(unsigned long x, volatile void *ptr, int size)
24 {
25 	unsigned long retval;
26 
27 	/*  Can't seem to use printk or panic here, so just stop  */
28 	if (size != 4) do { asm volatile("brkpt;\n"); } while (1);
29 
30 	__asm__ __volatile__ (
31 	"1:	%0 = memw_locked(%1);\n"    /*  load into retval */
32 	"	memw_locked(%1,P0) = %2;\n" /*  store into memory */
33 	"	if (!P0) jump 1b;\n"
34 	: "=&r" (retval)
35 	: "r" (ptr), "r" (x)
36 	: "memory", "p0"
37 	);
38 	return retval;
39 }
40 
41 /*
42  * Atomically swap the contents of a register with memory.  Should be atomic
43  * between multiple CPU's and within interrupts on the same CPU.
44  */
45 #define arch_xchg(ptr, v) ((__typeof__(*(ptr)))__arch_xchg((unsigned long)(v), (ptr), \
46 							   sizeof(*(ptr))))
47 
48 /*
49  *  see rt-mutex-design.txt; cmpxchg supposedly checks if *ptr == A and swaps.
50  *  looks just like atomic_cmpxchg on our arch currently with a bunch of
51  *  variable casting.
52  */
53 
54 #define arch_cmpxchg(ptr, old, new)				\
55 ({								\
56 	__typeof__(ptr) __ptr = (ptr);				\
57 	__typeof__(*(ptr)) __old = (old);			\
58 	__typeof__(*(ptr)) __new = (new);			\
59 	__typeof__(*(ptr)) __oldval = 0;			\
60 								\
61 	asm volatile(						\
62 		"1:	%0 = memw_locked(%1);\n"		\
63 		"	{ P0 = cmp.eq(%0,%2);\n"		\
64 		"	  if (!P0.new) jump:nt 2f; }\n"		\
65 		"	memw_locked(%1,p0) = %3;\n"		\
66 		"	if (!P0) jump 1b;\n"			\
67 		"2:\n"						\
68 		: "=&r" (__oldval)				\
69 		: "r" (__ptr), "r" (__old), "r" (__new)		\
70 		: "memory", "p0"				\
71 	);							\
72 	__oldval;						\
73 })
74 
75 #endif /* _ASM_CMPXCHG_H */
76