/Linux-v4.19/arch/x86/include/asm/ |
D | atomic.h | 55 asm volatile(LOCK_PREFIX "addl %1,%0" in arch_atomic_add() 69 asm volatile(LOCK_PREFIX "subl %1,%0" in arch_atomic_sub() 85 GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e); in arch_atomic_sub_and_test() 97 asm volatile(LOCK_PREFIX "incl %0" in arch_atomic_inc() 110 asm volatile(LOCK_PREFIX "decl %0" in arch_atomic_dec() 125 GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", e); in arch_atomic_dec_and_test() 139 GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", e); in arch_atomic_inc_and_test() 154 GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", s); in arch_atomic_add_negative() 210 asm volatile(LOCK_PREFIX "andl %1,%0" in arch_atomic_and() 227 asm volatile(LOCK_PREFIX "orl %1,%0" in arch_atomic_or() [all …]
|
D | atomic64_64.h | 46 asm volatile(LOCK_PREFIX "addq %1,%0" in arch_atomic64_add() 60 asm volatile(LOCK_PREFIX "subq %1,%0" in arch_atomic64_sub() 76 GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e); in arch_atomic64_sub_and_test() 88 asm volatile(LOCK_PREFIX "incq %0" in arch_atomic64_inc() 102 asm volatile(LOCK_PREFIX "decq %0" in arch_atomic64_dec() 118 GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e); in arch_atomic64_dec_and_test() 132 GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e); in arch_atomic64_inc_and_test() 147 GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s); in arch_atomic64_add_negative() 196 asm volatile(LOCK_PREFIX "andq %1,%0" in arch_atomic64_and() 213 asm volatile(LOCK_PREFIX "orq %1,%0" in arch_atomic64_or() [all …]
|
D | rwsem.h | 68 LOCK_PREFIX _ASM_INC "(%[sem])\n\t" \ 105 LOCK_PREFIX " cmpxchg %[tmp],%[count]\n\t" in __down_read_trylock() 125 LOCK_PREFIX " xadd %[tmp],(%[sem])\n\t" \ 168 LOCK_PREFIX " cmpxchg %[tmp1],%[count]\n\t" in __down_write_trylock() 187 LOCK_PREFIX " xadd %[tmp],(%[sem])\n\t" in __up_read() 205 LOCK_PREFIX " xadd %[tmp],(%[sem])\n\t" in __up_write() 222 LOCK_PREFIX _ASM_ADD "%[inc],(%[sem])\n\t" in __downgrade_write()
|
D | bitops.h | 76 asm volatile(LOCK_PREFIX "orb %1,%0" in set_bit() 81 asm volatile(LOCK_PREFIX __ASM_SIZE(bts) " %1,%0" in set_bit() 114 asm volatile(LOCK_PREFIX "andb %1,%0" in clear_bit() 118 asm volatile(LOCK_PREFIX __ASM_SIZE(btr) " %1,%0" in clear_bit() 146 asm volatile(LOCK_PREFIX "andb %2,%1" in clear_bit_unlock_is_negative_byte() 200 asm volatile(LOCK_PREFIX "xorb %1,%0" in change_bit() 204 asm volatile(LOCK_PREFIX __ASM_SIZE(btc) " %1,%0" in change_bit() 220 GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts), in test_and_set_bit() 267 GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr), in test_and_clear_bit() 321 GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc), in test_and_change_bit()
|
D | refcount.h | 44 asm volatile(LOCK_PREFIX "addl %1,%0\n\t" in refcount_add() 53 asm volatile(LOCK_PREFIX "incl %0\n\t" in refcount_inc() 61 asm volatile(LOCK_PREFIX "decl %0\n\t" in refcount_dec() 70 GEN_BINARY_SUFFIXED_RMWcc(LOCK_PREFIX "subl", REFCOUNT_CHECK_LT_ZERO, in refcount_sub_and_test() 76 GEN_UNARY_SUFFIXED_RMWcc(LOCK_PREFIX "decl", REFCOUNT_CHECK_LT_ZERO, in refcount_dec_and_test()
|
D | cmpxchg.h | 134 __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX) 222 __raw_try_cmpxchg((ptr), (pold), (new), (size), LOCK_PREFIX) 234 #define xadd(ptr, inc) __xadd((ptr), (inc), LOCK_PREFIX) 254 __cmpxchg_double(LOCK_PREFIX, p1, p2, o1, o2, n1, n2)
|
D | futex.h | 32 "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \ 57 __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval, in arch_futex_atomic_op_inuser()
|
D | cmpxchg_32.h | 31 LOCK_PREFIX "cmpxchg8b %0\n\t" in set_64bit() 50 asm volatile(LOCK_PREFIX "cmpxchg8b %1" in __cmpxchg64()
|
D | alternative-asm.h | 10 .macro LOCK_PREFIX 18 .macro LOCK_PREFIX
|
D | qspinlock_paravirt.h | 46 LOCK_PREFIX "cmpxchg %dl,(%rdi);"
|
D | uaccess.h | 599 "1:\t" LOCK_PREFIX "cmpxchgb %4, %2\n" \ 615 "1:\t" LOCK_PREFIX "cmpxchgw %4, %2\n" \ 631 "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n" \ 650 "1:\t" LOCK_PREFIX "cmpxchgq %4, %2\n" \
|
D | alternative.h | 41 #define LOCK_PREFIX LOCK_PREFIX_HERE "\n\tlock; " macro 45 #define LOCK_PREFIX "" macro
|
/Linux-v4.19/arch/x86/lib/ |
D | atomic64_cx8_32.S | 19 LOCK_PREFIX 40 LOCK_PREFIX 64 LOCK_PREFIX 92 LOCK_PREFIX 117 LOCK_PREFIX 147 LOCK_PREFIX 176 LOCK_PREFIX
|
/Linux-v4.19/tools/arch/x86/include/asm/ |
D | atomic.h | 9 #define LOCK_PREFIX "\n\tlock; " macro 51 asm volatile(LOCK_PREFIX "incl %0" in atomic_inc() 65 GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e"); in atomic_dec_and_test()
|
D | cmpxchg.h | 84 __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
|
/Linux-v4.19/arch/x86/include/asm/uv/ |
D | uv_bau.h | 833 asm volatile(LOCK_PREFIX "xaddw %0, %1" in atom_asr()
|