Home
last modified time | relevance | path

Searched refs:refs (Results 1 – 25 of 93) sorted by relevance

1234

/Linux-v5.4/include/linux/
Drefcount.h20 atomic_t refs; member
23 #define REFCOUNT_INIT(n) { .refs = ATOMIC_INIT(n), }
32 atomic_set(&r->refs, n); in refcount_set()
43 return atomic_read(&r->refs); in refcount_read()
76 return atomic_add_unless(&r->refs, i, 0); in refcount_add_not_zero()
81 atomic_add(i, &r->refs); in refcount_add()
86 return atomic_add_unless(&r->refs, 1, 0); in refcount_inc_not_zero()
91 atomic_inc(&r->refs); in refcount_inc()
96 return atomic_sub_and_test(i, &r->refs); in refcount_sub_and_test()
101 return atomic_dec_and_test(&r->refs); in refcount_dec_and_test()
[all …]
/Linux-v5.4/tools/include/linux/
Drefcount.h53 atomic_t refs; member
56 #define REFCOUNT_INIT(n) { .refs = ATOMIC_INIT(n), }
60 atomic_set(&r->refs, n); in refcount_set()
65 return atomic_read(&r->refs); in refcount_read()
78 unsigned int old, new, val = atomic_read(&r->refs); in refcount_inc_not_zero()
89 old = atomic_cmpxchg_relaxed(&r->refs, val, new); in refcount_inc_not_zero()
123 unsigned int old, new, val = atomic_read(&r->refs); in refcount_sub_and_test()
135 old = atomic_cmpxchg_release(&r->refs, val, new); in refcount_sub_and_test()
/Linux-v5.4/drivers/gpu/drm/nouveau/nvkm/core/
Devent.c31 if (--event->refs[index * event->types_nr + type] == 0) { in nvkm_event_put()
44 if (++event->refs[index * event->types_nr + type] == 1) { in nvkm_event_get()
58 if (!event->refs || WARN_ON(index >= event->index_nr)) in nvkm_event_send()
77 if (event->refs) { in nvkm_event_fini()
78 kfree(event->refs); in nvkm_event_fini()
79 event->refs = NULL; in nvkm_event_fini()
87 event->refs = kzalloc(array3_size(index_nr, types_nr, in nvkm_event_init()
88 sizeof(*event->refs)), in nvkm_event_init()
90 if (!event->refs) in nvkm_event_init()
/Linux-v5.4/arch/x86/include/asm/
Drefcount.h46 : [var] "+m" (r->refs.counter) in refcount_add()
55 : [var] "+m" (r->refs.counter) in refcount_inc()
63 : [var] "+m" (r->refs.counter) in refcount_dec()
72 r->refs.counter, e, "er", i, "cx"); in refcount_sub_and_test()
86 r->refs.counter, e, "cx"); in refcount_dec_and_test()
101 c = atomic_read(&(r->refs)); in refcount_add_not_zero()
111 : : [var] "m" (r->refs.counter) in refcount_add_not_zero()
116 } while (!atomic_try_cmpxchg(&(r->refs), &c, result)); in refcount_add_not_zero()
/Linux-v5.4/lib/
Drefcount.c66 unsigned int new, val = atomic_read(&r->refs); in refcount_add_not_zero_checked()
79 } while (!atomic_try_cmpxchg_relaxed(&r->refs, &val, new)); in refcount_add_not_zero_checked()
123 unsigned int new, val = atomic_read(&r->refs); in refcount_inc_not_zero_checked()
134 } while (!atomic_try_cmpxchg_relaxed(&r->refs, &val, new)); in refcount_inc_not_zero_checked()
182 unsigned int new, val = atomic_read(&r->refs); in refcount_sub_and_test_checked()
194 } while (!atomic_try_cmpxchg_release(&r->refs, &val, new)); in refcount_sub_and_test_checked()
260 return atomic_try_cmpxchg_release(&r->refs, &val, 0); in refcount_dec_if_one()
277 unsigned int new, val = atomic_read(&r->refs); in refcount_dec_not_one()
292 } while (!atomic_try_cmpxchg_release(&r->refs, &val, new)); in refcount_dec_not_one()
/Linux-v5.4/drivers/xen/
Dgntdev-dmabuf.c49 grant_ref_t *refs; member
471 int count, u32 domid, u32 *refs, u32 *fd) in dmabuf_exp_from_refs() argument
483 map->grants[i].ref = refs[i]; in dmabuf_exp_from_refs()
522 dmabuf_imp_grant_foreign_access(struct page **pages, u32 *refs, in dmabuf_imp_grant_foreign_access() argument
546 refs[i] = cur_ref; in dmabuf_imp_grant_foreign_access()
556 static void dmabuf_imp_end_foreign_access(u32 *refs, int count) in dmabuf_imp_end_foreign_access() argument
561 if (refs[i] != GRANT_INVALID_REF) in dmabuf_imp_end_foreign_access()
562 gnttab_end_foreign_access(refs[i], 0, 0UL); in dmabuf_imp_end_foreign_access()
568 kfree(gntdev_dmabuf->u.imp.refs); in dmabuf_imp_free_storage()
581 gntdev_dmabuf->u.imp.refs = kcalloc(count, in dmabuf_imp_alloc_storage()
[all …]
/Linux-v5.4/fs/cifs/
Ddfs_cache.c252 static inline void dump_refs(const struct dfs_info3_param *refs, int numrefs) in dump_refs() argument
258 const struct dfs_info3_param *ref = &refs[i]; in dump_refs()
369 static int copy_ref_data(const struct dfs_info3_param *refs, int numrefs, in copy_ref_data() argument
374 ce->ce_ttl = refs[0].ttl; in copy_ref_data()
376 ce->ce_srvtype = refs[0].server_type; in copy_ref_data()
377 ce->ce_flags = refs[0].ref_flag; in copy_ref_data()
378 ce->ce_path_consumed = refs[0].path_consumed; in copy_ref_data()
383 t = alloc_tgt(refs[i].node_name); in copy_ref_data()
405 alloc_cache_entry(const char *path, const struct dfs_info3_param *refs, in alloc_cache_entry() argument
423 rc = copy_ref_data(refs, numrefs, ce, NULL); in alloc_cache_entry()
[all …]
/Linux-v5.4/mm/
Dgup.c1800 static inline struct page *try_get_compound_head(struct page *page, int refs) in try_get_compound_head() argument
1805 if (unlikely(!page_cache_add_speculative(head, refs))) in try_get_compound_head()
1982 int refs; in gup_hugepte() local
1996 refs = 0; in gup_hugepte()
2005 refs++; in gup_hugepte()
2008 head = try_get_compound_head(head, refs); in gup_hugepte()
2010 *nr -= refs; in gup_hugepte()
2016 *nr -= refs; in gup_hugepte()
2017 while (refs--) in gup_hugepte()
2057 int refs; in gup_huge_pmd() local
[all …]
/Linux-v5.4/fs/btrfs/
Ddelayed-ref.h36 refcount_t refs; member
74 refcount_t refs; member
311 WARN_ON(refcount_read(&ref->refs) == 0); in btrfs_put_delayed_ref()
312 if (refcount_dec_and_test(&ref->refs)) { in btrfs_put_delayed_ref()
331 if (refcount_dec_and_test(&head->refs)) in btrfs_put_delayed_ref_head()
Ddelayed-inode.c45 refcount_set(&delayed_node->refs, 0); in btrfs_init_delayed_node()
74 refcount_inc(&node->refs); in btrfs_get_delayed_node()
83 refcount_inc(&node->refs); /* can be accessed */ in btrfs_get_delayed_node()
105 if (refcount_inc_not_zero(&node->refs)) { in btrfs_get_delayed_node()
106 refcount_inc(&node->refs); in btrfs_get_delayed_node()
140 refcount_set(&node->refs, 2); in btrfs_get_or_create_delayed_node()
181 refcount_inc(&node->refs); /* inserted into list */ in btrfs_queue_delayed_node()
195 refcount_dec(&node->refs); /* not in the list */ in btrfs_dequeue_delayed_node()
216 refcount_inc(&node->refs); in btrfs_first_delayed_node()
243 refcount_inc(&next->refs); in btrfs_next_delayed_node()
[all …]
Dextent-tree.c152 u64 offset, int metadata, u64 *refs, u64 *flags) in btrfs_lookup_extent_info() argument
241 refcount_inc(&head->refs); in btrfs_lookup_extent_info()
268 if (refs) in btrfs_lookup_extent_info()
269 *refs = num_refs; in btrfs_lookup_extent_info()
1023 u64 refs; in setup_inline_extent_backref() local
1037 refs = btrfs_extent_refs(leaf, ei); in setup_inline_extent_backref()
1038 refs += refs_to_add; in setup_inline_extent_backref()
1039 btrfs_set_extent_refs(leaf, ei, refs); in setup_inline_extent_backref()
1117 u64 refs; in update_inline_extent_backref() local
1120 refs = btrfs_extent_refs(leaf, ei); in update_inline_extent_backref()
[all …]
Dref-verify.c70 struct rb_root refs; member
245 while((n = rb_first(&be->refs))) { in free_block_entry()
247 rb_erase(&ref->node, &be->refs); in free_block_entry()
298 be->refs = RB_ROOT; in add_block_entry()
342 exist = insert_ref_entry(&be->refs, ref); in add_tree_block()
371 if (insert_ref_entry(&be->refs, ref)) { in add_shared_data_ref()
409 if (insert_ref_entry(&be->refs, ref)) { in add_extent_data_ref()
643 for (n = rb_first(&be->refs); n; n = rb_next(n)) { in dump_block_entry()
814 exist = insert_ref_entry(&be->refs, ref); in btrfs_ref_tree_mod()
827 rb_erase(&exist->node, &be->refs); in btrfs_ref_tree_mod()
Dordered-data.c207 refcount_set(&entry->refs, 1); in __btrfs_add_ordered_extent()
358 refcount_inc(&entry->refs); in btrfs_dec_test_first_ordered_pending()
422 refcount_inc(&entry->refs); in btrfs_dec_test_ordered_pending()
439 if (refcount_dec_and_test(&entry->refs)) { in btrfs_put_ordered_extent()
546 refcount_inc(&ordered->refs); in btrfs_wait_ordered_extents()
725 refcount_inc(&entry->refs); in btrfs_lookup_ordered_extent()
766 refcount_inc(&entry->refs); in btrfs_lookup_ordered_range()
789 refcount_inc(&entry->refs); in btrfs_lookup_first_ordered_extent()
1006 refcount_dec(&cache->refs); in btrfs_lock_and_flush_ordered_range()
Ddelayed-inode.h57 refcount_t refs; member
70 refcount_t refs; member
Ddisk-io.h100 if (refcount_inc_not_zero(&root->refs)) in btrfs_grab_fs_root()
107 if (refcount_dec_and_test(&root->refs)) in btrfs_put_fs_root()
Dextent_io.c73 refcount_read(&state->refs)); in btrfs_leak_debug_check()
81 eb->start, eb->len, atomic_read(&eb->refs), eb->bflags); in btrfs_leak_debug_check()
308 refcount_set(&state->refs, 1); in alloc_extent_state()
318 if (refcount_dec_and_test(&state->refs)) { in free_extent_state()
716 refcount_dec(&cached->refs); in __clear_extent_bit()
868 refcount_inc(&state->refs); in wait_extent_bit()
913 refcount_inc(&state->refs); in cache_state_if_flags()
1715 refcount_inc(&state->refs); in find_delalloc_range()
3028 refcount_inc(&em->refs); in __get_extent_map()
3039 refcount_inc(&em->refs); in __get_extent_map()
[all …]
/Linux-v5.4/include/uapi/xen/
Dgntdev.h64 struct ioctl_gntdev_grant_ref refs[1]; member
250 __u32 refs[1]; member
292 __u32 refs[1]; member
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx4/
Dport.c69 table->refs[i] = 0; in mlx4_init_mac_table()
83 table->refs[i] = 0; in mlx4_init_vlan_table()
118 if (table->refs[i] && in find_index()
157 if (!table->refs[i]) in mlx4_find_cached_mac()
232 dup_table->refs[index_at_port]) { in __mlx4_register_mac()
242 if (!table->refs[index_at_dup_port] || in __mlx4_register_mac()
251 if (!table->refs[i]) { in __mlx4_register_mac()
255 if (!dup_table->refs[i]) in __mlx4_register_mac()
265 ++table->refs[i]; in __mlx4_register_mac()
308 table->refs[free] = 1; in __mlx4_register_mac()
[all …]
/Linux-v5.4/drivers/misc/sgi-xp/
Dxpc.h683 s32 refs = atomic_dec_return(&ch->references); in xpc_msgqueue_deref() local
685 DBUG_ON(refs < 0); in xpc_msgqueue_deref()
686 if (refs == 0) in xpc_msgqueue_deref()
700 s32 refs = atomic_dec_return(&part->references); in xpc_part_deref() local
702 DBUG_ON(refs < 0); in xpc_part_deref()
703 if (refs == 0 && part->setup_state == XPC_P_SS_WTEARDOWN) in xpc_part_deref()
/Linux-v5.4/drivers/gpu/drm/nouveau/
Dnouveau_vmm.c65 if (vma && --vma->refs <= 0) { in nouveau_vma_del()
86 vma->refs++; in nouveau_vma_new()
93 vma->refs = 1; in nouveau_vma_new()
/Linux-v5.4/drivers/staging/most/
Dcore.c43 int refs; member
935 if (c->pipe0.refs && c->pipe0.comp->tx_completion) in arm_mbo()
938 if (c->pipe1.refs && c->pipe1.comp->tx_completion) in arm_mbo()
1048 if (c->pipe0.refs && c->pipe1.refs && in channel_has_mbo()
1081 if (c->pipe0.refs && c->pipe1.refs && in most_get_mbo()
1153 if (c->pipe0.refs && c->pipe0.comp->rx_completion && in most_read_completion()
1157 if (c->pipe1.refs && c->pipe1.comp->rx_completion && in most_read_completion()
1186 if (c->pipe0.refs + c->pipe1.refs > 0) in most_start_channel()
1226 c->pipe0.refs++; in most_start_channel()
1228 c->pipe1.refs++; in most_start_channel()
[all …]
/Linux-v5.4/fs/btrfs/tests/
Dqgroup-tests.c74 u64 refs; in add_tree_ref() local
99 refs = btrfs_extent_refs(path->nodes[0], item); in add_tree_ref()
100 btrfs_set_extent_refs(path->nodes[0], item, refs + 1); in add_tree_ref()
158 u64 refs; in remove_extent_ref() local
183 refs = btrfs_extent_refs(path->nodes[0], item); in remove_extent_ref()
184 btrfs_set_extent_refs(path->nodes[0], item, refs - 1); in remove_extent_ref()
/Linux-v5.4/tools/perf/arch/x86/util/
Dintel-bts.c278 struct intel_bts_snapshot_ref *refs; in intel_bts_alloc_snapshot_refs() local
286 refs = calloc(new_cnt, sz); in intel_bts_alloc_snapshot_refs()
287 if (!refs) in intel_bts_alloc_snapshot_refs()
290 memcpy(refs, btsr->snapshot_refs, cnt * sz); in intel_bts_alloc_snapshot_refs()
292 btsr->snapshot_refs = refs; in intel_bts_alloc_snapshot_refs()
/Linux-v5.4/net/netfilter/
Dnf_conntrack_ecache.c42 struct nf_conn *refs[16]; in ecache_work_evict_list() local
67 refs[evicted] = ct; in ecache_work_evict_list()
69 if (++evicted >= ARRAY_SIZE(refs)) { in ecache_work_evict_list()
79 nf_ct_put(refs[--evicted]); in ecache_work_evict_list()
/Linux-v5.4/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
Dbase.c116 u32 refs; member
133 ptc->refs = 0; in nvkm_mmu_ptc_find()
155 if (pt->ptc->refs < 8 /* Heuristic. */ && !force) { in nvkm_mmu_ptc_put()
157 pt->ptc->refs++; in nvkm_mmu_ptc_put()
195 ptc->refs--; in nvkm_mmu_ptc_get()

1234