Lines Matching +full:1 +full:v
10 * 1. Redistributions of source code must retain the above copyright
68 * atomic_set_char(P, V) (*(u_char *)(P) |= (V))
69 * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V))
70 * atomic_add_char(P, V) (*(u_char *)(P) += (V))
71 * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V))
73 * atomic_set_short(P, V) (*(u_short *)(P) |= (V))
74 * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V))
75 * atomic_add_short(P, V) (*(u_short *)(P) += (V))
76 * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V))
78 * atomic_set_int(P, V) (*(u_int *)(P) |= (V))
79 * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V))
80 * atomic_add_int(P, V) (*(u_int *)(P) += (V))
81 * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V))
82 * atomic_swap_int(P, V) (return (*(u_int *)(P)); *(u_int *)(P) = (V);)
85 * atomic_set_long(P, V) (*(u_long *)(P) |= (V))
86 * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V))
87 * atomic_add_long(P, V) (*(u_long *)(P) += (V))
88 * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V))
89 * atomic_swap_long(P, V) (return (*(u_long *)(P)); *(u_long *)(P) = (V);)
102 #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ argument
104 atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
108 : CONS (V) \
113 atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
117 : CONS (V) \
144 " lock; cmpxchg %3,%1 ; " \
148 "+m" (*dst), /* 1 */ \
161 " lock; cmpxchg %3,%1 ; " \
165 "+m" (*dst), /* 1 */ \
177 * Atomically add the value of v to the integer pointed to by p and return
181 atomic_fetchadd_int(volatile u_int *p, u_int v) in atomic_fetchadd_int() argument
185 " lock; xaddl %0,%1 ; " in atomic_fetchadd_int()
187 : "+r" (v), /* 0 */ in atomic_fetchadd_int()
188 "+m" (*p) /* 1 */ in atomic_fetchadd_int()
190 return (v); in atomic_fetchadd_int()
194 atomic_testandset_int(volatile u_int *p, u_int v) in atomic_testandset_int() argument
199 " lock; btsl %2,%1 ; " in atomic_testandset_int()
203 "+m" (*p) /* 1 */ in atomic_testandset_int()
204 : "Ir" (v & 0x1f) /* 2 */ in atomic_testandset_int()
210 atomic_testandclear_int(volatile u_int *p, u_int v) in atomic_testandclear_int() argument
215 " lock; btrl %2,%1 ; " in atomic_testandclear_int()
219 "+m" (*p) /* 1 */ in atomic_testandclear_int()
220 : "Ir" (v & 0x1f) /* 2 */ in atomic_testandclear_int()
264 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) \
268 *p = v; \
313 " xorl %1,%%eax ; " in atomic_cmpset_64_i386()
316 " jne 1f ; " in atomic_cmpset_64_i386()
317 " movl %4,%1 ; " in atomic_cmpset_64_i386()
319 "1: " in atomic_cmpset_64_i386()
323 "+m" (*p), /* 1 */ in atomic_cmpset_64_i386()
324 "+m" (*(p + 1)), /* 2 */ in atomic_cmpset_64_i386()
337 return (1); in atomic_fcmpset_64_i386()
354 " movl %1,%%eax ; " in atomic_load_acq_64_i386()
358 : "m" (*q), /* 1 */ in atomic_load_acq_64_i386()
359 "m" (*(q + 1)) /* 2 */ in atomic_load_acq_64_i386()
365 atomic_store_rel_64_i386(volatile uint64_t *p, uint64_t v) in atomic_store_rel_64_i386() argument
374 " movl %%edx,%1 ; " in atomic_store_rel_64_i386()
377 "=m" (*(q + 1)) /* 1 */ in atomic_store_rel_64_i386()
378 : "A" (v) /* 2 */ in atomic_store_rel_64_i386()
383 atomic_swap_64_i386(volatile uint64_t *p, uint64_t v) in atomic_swap_64_i386() argument
392 " movl %1,%%eax ; " in atomic_swap_64_i386()
395 " movl %3,%1 ; " in atomic_swap_64_i386()
398 "+m" (*q), /* 1 */ in atomic_swap_64_i386()
399 "+m" (*(q + 1)) /* 2 */ in atomic_swap_64_i386()
400 : "r" ((uint32_t)v), /* 3 */ in atomic_swap_64_i386()
401 "r" ((uint32_t)(v >> 32))); /* 4 */ in atomic_swap_64_i386()
411 " lock; cmpxchg8b %1 ; " in atomic_cmpset_64_i586()
414 "+m" (*dst), /* 1 */ in atomic_cmpset_64_i586()
428 " lock; cmpxchg8b %1 ; " in atomic_fcmpset_64_i586()
431 "+m" (*dst), /* 1 */ in atomic_fcmpset_64_i586()
451 " lock; cmpxchg8b %1" in atomic_load_acq_64_i586()
453 : "m" (*p) /* 1 */ in atomic_load_acq_64_i586()
459 atomic_store_rel_64_i586(volatile uint64_t *p, uint64_t v) in atomic_store_rel_64_i586() argument
465 "1: " in atomic_store_rel_64_i586()
467 " jne 1b" in atomic_store_rel_64_i586()
469 "+A" (v) /* 1 */ in atomic_store_rel_64_i586()
474 atomic_swap_64_i586(volatile uint64_t *p, uint64_t v) in atomic_swap_64_i586() argument
480 "1: " in atomic_swap_64_i586()
482 " jne 1b" in atomic_swap_64_i586()
484 "+A" (v) /* 1 */ in atomic_swap_64_i586()
486 return (v); in atomic_swap_64_i586()
520 atomic_store_rel_64(volatile uint64_t *p, uint64_t v) in atomic_store_rel_64() argument
524 atomic_store_rel_64_i386(p, v); in atomic_store_rel_64()
526 atomic_store_rel_64_i586(p, v); in atomic_store_rel_64()
530 atomic_swap_64(volatile uint64_t *p, uint64_t v) in atomic_swap_64() argument
534 return (atomic_swap_64_i386(p, v)); in atomic_swap_64()
536 return (atomic_swap_64_i586(p, v)); in atomic_swap_64()
540 atomic_fetchadd_64(volatile uint64_t *p, uint64_t v) in atomic_fetchadd_64() argument
545 if (atomic_cmpset_64(p, t, t + v)) in atomic_fetchadd_64()
551 atomic_add_64(volatile uint64_t *p, uint64_t v) in atomic_add_64() argument
557 if (atomic_cmpset_64(p, t, t + v)) in atomic_add_64()
563 atomic_subtract_64(volatile uint64_t *p, uint64_t v) in atomic_subtract_64() argument
569 if (atomic_cmpset_64(p, t, t - v)) in atomic_subtract_64()
576 ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v);
577 ATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v);
578 ATOMIC_ASM(add, char, "addb %b1,%0", "iq", v);
579 ATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v);
581 ATOMIC_ASM(set, short, "orw %w1,%0", "ir", v);
582 ATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v);
583 ATOMIC_ASM(add, short, "addw %w1,%0", "ir", v);
584 ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v);
586 ATOMIC_ASM(set, int, "orl %1,%0", "ir", v);
587 ATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v);
588 ATOMIC_ASM(add, int, "addl %1,%0", "ir", v);
589 ATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v);
591 ATOMIC_ASM(set, long, "orl %1,%0", "ir", v);
592 ATOMIC_ASM(clear, long, "andl %1,%0", "ir", ~v);
593 ATOMIC_ASM(add, long, "addl %1,%0", "ir", v);
594 ATOMIC_ASM(subtract, long, "subl %1,%0", "ir", v);
627 atomic_fetchadd_long(volatile u_long *p, u_long v) in atomic_fetchadd_long() argument
630 return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v)); in atomic_fetchadd_long()
634 atomic_testandset_long(volatile u_long *p, u_int v) in atomic_testandset_long() argument
637 return (atomic_testandset_int((volatile u_int *)p, v)); in atomic_testandset_long()
641 atomic_testandclear_long(volatile u_long *p, u_int v) in atomic_testandclear_long() argument
644 return (atomic_testandclear_int((volatile u_int *)p, v)); in atomic_testandclear_long()
649 atomic_swap_int(volatile u_int *p, u_int v) in atomic_swap_int() argument
653 " xchgl %1,%0 ; " in atomic_swap_int()
655 : "+r" (v), /* 0 */ in atomic_swap_int()
656 "+m" (*p)); /* 1 */ in atomic_swap_int()
657 return (v); in atomic_swap_int()
661 atomic_swap_long(volatile u_long *p, u_long v) in atomic_swap_long() argument
664 return (atomic_swap_int((volatile u_int *)p, (u_int)v)); in atomic_swap_long()
811 #define atomic_set_ptr(p, v) \ argument
812 atomic_set_int((volatile u_int *)(p), (u_int)(v))
813 #define atomic_set_acq_ptr(p, v) \ argument
814 atomic_set_acq_int((volatile u_int *)(p), (u_int)(v))
815 #define atomic_set_rel_ptr(p, v) \ argument
816 atomic_set_rel_int((volatile u_int *)(p), (u_int)(v))
817 #define atomic_clear_ptr(p, v) \ argument
818 atomic_clear_int((volatile u_int *)(p), (u_int)(v))
819 #define atomic_clear_acq_ptr(p, v) \ argument
820 atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v))
821 #define atomic_clear_rel_ptr(p, v) \ argument
822 atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v))
823 #define atomic_add_ptr(p, v) \ argument
824 atomic_add_int((volatile u_int *)(p), (u_int)(v))
825 #define atomic_add_acq_ptr(p, v) \ argument
826 atomic_add_acq_int((volatile u_int *)(p), (u_int)(v))
827 #define atomic_add_rel_ptr(p, v) \ argument
828 atomic_add_rel_int((volatile u_int *)(p), (u_int)(v))
829 #define atomic_subtract_ptr(p, v) \ argument
830 atomic_subtract_int((volatile u_int *)(p), (u_int)(v))
831 #define atomic_subtract_acq_ptr(p, v) \ argument
832 atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v))
833 #define atomic_subtract_rel_ptr(p, v) \ argument
834 atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v))
837 #define atomic_store_rel_ptr(p, v) \ argument
838 atomic_store_rel_int((volatile u_int *)(p), (v))
855 #define atomic_swap_ptr(p, v) \ argument
856 atomic_swap_int((volatile u_int *)(p), (u_int)(v))