Lines Matching +full:64 +full:mb

30 #define ATOMIC_FETCH_OP(name, mb, op, asm_op, cl...)			\  in ATOMIC_OP()  argument
35 " " #asm_op #mb " %w[i], %w[i], %[v]" \ in ATOMIC_OP()
57 #define ATOMIC_OP_ADD_RETURN(name, mb, cl...) \ argument
64 " ldadd" #mb " %w[i], %w[tmp], %[v]\n" \
90 #define ATOMIC_FETCH_OP_AND(name, mb, cl...) \ argument
96 " ldclr" #mb " %w[i], %w[i], %[v]" \
121 #define ATOMIC_OP_SUB_RETURN(name, mb, cl...) \ argument
129 " ldadd" #mb " %w[i], %w[tmp], %[v]\n" \
145 #define ATOMIC_FETCH_OP_SUB(name, mb, cl...) \ argument
151 " ldadd" #mb " %w[i], %w[i], %[v]" \
183 #define ATOMIC64_FETCH_OP(name, mb, op, asm_op, cl...) \ in ATOMIC64_OP() argument
188 " " #asm_op #mb " %[i], %[i], %[v]" \ in ATOMIC64_OP()
210 #define ATOMIC64_OP_ADD_RETURN(name, mb, cl...) \ argument
217 " ldadd" #mb " %[i], %x[tmp], %[v]\n" \
243 #define ATOMIC64_FETCH_OP_AND(name, mb, cl...) \ argument
249 " ldclr" #mb " %[i], %[i], %[v]" \
274 #define ATOMIC64_OP_SUB_RETURN(name, mb, cl...) \ argument
282 " ldadd" #mb " %[i], %x[tmp], %[v]\n" \
298 #define ATOMIC64_FETCH_OP_SUB(name, mb, cl...) \ argument
304 " ldadd" #mb " %[i], %[i], %[v]" \
340 #define __CMPXCHG_CASE(w, sfx, name, sz, mb, cl...) \ argument
354 " cas" #mb #sfx "\t%" #w "[tmp], %" #w "[new], %[v]\n" \
367 __CMPXCHG_CASE(x, , , 64, )
371 __CMPXCHG_CASE(x, , acq_, 64, a, "memory")
375 __CMPXCHG_CASE(x, , rel_, 64, l, "memory")
379 __CMPXCHG_CASE(x, , mb_, 64, al, "memory")
383 #define __CMPXCHG_DBL(name, mb, cl...) \ argument
401 " casp" #mb "\t%[old1], %[old2], %[new1], %[new2], %[v]\n"\