1 /* 2 * Copyright 2009, 2010 Samy Al Bahra. 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions 7 * are met: 8 * 1. Redistributions of source code must retain the above copyright 9 * notice, this list of conditions and the following disclaimer. 10 * 2. Redistributions in binary form must reproduce the above copyright 11 * notice, this list of conditions and the following disclaimer in the 12 * documentation and/or other materials provided with the distribution. 13 * 14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24 * SUCH DAMAGE. 25 */ 26 27 #ifndef CK_PR_SPARCV9_H 28 #define CK_PR_SPARCV9_H 29 30 #ifndef CK_PR_H 31 #error Do not include this file directly, use ck_pr.h 32 #endif 33 34 #include <ck_cc.h> 35 #include <ck_md.h> 36 37 /* 38 * The following represent supported atomic operations. 39 * These operations may be emulated. 40 */ 41 #include "ck_f_pr.h" 42 43 /* 44 * Minimum interface requirement met. 45 */ 46 #define CK_F_PR 47 48 /* 49 * Order loads at the least. 50 */ 51 CK_CC_INLINE static void 52 ck_pr_stall(void) 53 { 54 55 __asm__ __volatile__("membar #LoadLoad" ::: "memory"); 56 return; 57 } 58 59 #define CK_PR_FENCE(T, I) \ 60 CK_CC_INLINE static void \ 61 ck_pr_fence_strict_##T(void) \ 62 { \ 63 __asm__ __volatile__(I ::: "memory"); \ 64 } 65 66 /* 67 * Atomic operations are treated as both load and store 68 * operations on SPARCv9. 69 */ 70 CK_PR_FENCE(atomic, "membar #StoreStore") 71 CK_PR_FENCE(atomic_store, "membar #StoreStore") 72 CK_PR_FENCE(atomic_load, "membar #StoreLoad") 73 CK_PR_FENCE(store_atomic, "membar #StoreStore") 74 CK_PR_FENCE(load_atomic, "membar #LoadStore") 75 CK_PR_FENCE(store, "membar #StoreStore") 76 CK_PR_FENCE(store_load, "membar #StoreLoad") 77 CK_PR_FENCE(load, "membar #LoadLoad") 78 CK_PR_FENCE(load_store, "membar #LoadStore") 79 CK_PR_FENCE(memory, "membar #LoadLoad | #LoadStore | #StoreStore | #StoreLoad") 80 CK_PR_FENCE(acquire, "membar #LoadLoad | #LoadStore") 81 CK_PR_FENCE(release, "membar #LoadStore | #StoreStore") 82 CK_PR_FENCE(acqrel, "membar #LoadLoad | #LoadStore | #StoreStore") 83 CK_PR_FENCE(lock, "membar #LoadLoad | #LoadStore | #StoreStore | #StoreLoad") 84 CK_PR_FENCE(unlock, "membar #LoadStore | #StoreStore") 85 86 #undef CK_PR_FENCE 87 88 #define CK_PR_LOAD(S, M, T, C, I) \ 89 CK_CC_INLINE static T \ 90 ck_pr_md_load_##S(const M *target) \ 91 { \ 92 T r; \ 93 __asm__ __volatile__(I " [%1], %0" \ 94 : "=&r" (r) \ 95 : "r" (target) \ 96 : "memory"); \ 97 return (r); \ 98 } 99 100 CK_PR_LOAD(ptr, void, void *, uint64_t, "ldx") 101 102 #define CK_PR_LOAD_S(S, T, I) CK_PR_LOAD(S, T, T, T, I) 103 104 CK_PR_LOAD_S(64, uint64_t, "ldx") 105 CK_PR_LOAD_S(32, uint32_t, "lduw") 106 CK_PR_LOAD_S(uint, unsigned int, "lduw") 107 CK_PR_LOAD_S(double, double, "ldx") 108 CK_PR_LOAD_S(int, int, "ldsw") 109 110 #undef CK_PR_LOAD_S 111 #undef CK_PR_LOAD 112 113 #define CK_PR_STORE(S, M, T, C, I) \ 114 CK_CC_INLINE static void \ 115 ck_pr_md_store_##S(M *target, T v) \ 116 { \ 117 __asm__ __volatile__(I " %0, [%1]" \ 118 : \ 119 : "r" (v), \ 120 "r" (target) \ 121 : "memory"); \ 122 return; \ 123 } 124 125 CK_PR_STORE(ptr, void, const void *, uint64_t, "stx") 126 127 #define CK_PR_STORE_S(S, T, I) CK_PR_STORE(S, T, T, T, I) 128 129 CK_PR_STORE_S(8, uint8_t, "stub") 130 CK_PR_STORE_S(64, uint64_t, "stx") 131 CK_PR_STORE_S(32, uint32_t, "stuw") 132 CK_PR_STORE_S(uint, unsigned int, "stuw") 133 CK_PR_STORE_S(double, double, "stx") 134 CK_PR_STORE_S(int, int, "stsw") 135 136 #undef CK_PR_STORE_S 137 #undef CK_PR_STORE 138 139 CK_CC_INLINE static bool 140 ck_pr_cas_64_value(uint64_t *target, uint64_t compare, uint64_t set, uint64_t *value) 141 { 142 143 __asm__ __volatile__("casx [%1], %2, %0" 144 : "+&r" (set) 145 : "r" (target), 146 "r" (compare) 147 : "memory"); 148 149 *value = set; 150 return (compare == set); 151 } 152 153 CK_CC_INLINE static bool 154 ck_pr_cas_64(uint64_t *target, uint64_t compare, uint64_t set) 155 { 156 157 __asm__ __volatile__("casx [%1], %2, %0" 158 : "+&r" (set) 159 : "r" (target), 160 "r" (compare) 161 : "memory"); 162 163 return (compare == set); 164 } 165 166 CK_CC_INLINE static bool 167 ck_pr_cas_ptr(void *target, void *compare, void *set) 168 { 169 170 return ck_pr_cas_64(target, (uint64_t)compare, (uint64_t)set); 171 } 172 173 CK_CC_INLINE static bool 174 ck_pr_cas_ptr_value(void *target, void *compare, void *set, void *previous) 175 { 176 177 return ck_pr_cas_64_value(target, (uint64_t)compare, (uint64_t)set, previous); 178 } 179 180 #define CK_PR_CAS(N, T) \ 181 CK_CC_INLINE static bool \ 182 ck_pr_cas_##N##_value(T *target, T compare, T set, T *value) \ 183 { \ 184 __asm__ __volatile__("cas [%1], %2, %0" \ 185 : "+&r" (set) \ 186 : "r" (target), \ 187 "r" (compare) \ 188 : "memory"); \ 189 *value = set; \ 190 return (compare == set); \ 191 } \ 192 CK_CC_INLINE static bool \ 193 ck_pr_cas_##N(T *target, T compare, T set) \ 194 { \ 195 __asm__ __volatile__("cas [%1], %2, %0" \ 196 : "+&r" (set) \ 197 : "r" (target), \ 198 "r" (compare) \ 199 : "memory"); \ 200 return (compare == set); \ 201 } 202 203 CK_PR_CAS(32, uint32_t) 204 CK_PR_CAS(uint, unsigned int) 205 CK_PR_CAS(int, int) 206 207 #undef CK_PR_CAS 208 209 #define CK_PR_FAS(N, T) \ 210 CK_CC_INLINE static T \ 211 ck_pr_fas_##N(T *target, T update) \ 212 { \ 213 \ 214 __asm__ __volatile__("swap [%1], %0" \ 215 : "+&r" (update) \ 216 : "r" (target) \ 217 : "memory"); \ 218 return (update); \ 219 } 220 221 CK_PR_FAS(int, int) 222 CK_PR_FAS(uint, unsigned int) 223 CK_PR_FAS(32, uint32_t) 224 225 #undef CK_PR_FAS 226 227 #endif /* CK_PR_SPARCV9_H */ 228 229