Home
last modified time | relevance | path

Searched refs:cb (Results 1 – 25 of 994) sorted by relevance

12345678910>>...40

/Linux-v5.4/drivers/misc/habanalabs/
Dcommand_buffer.c14 static void cb_fini(struct hl_device *hdev, struct hl_cb *cb) in cb_fini() argument
16 hdev->asic_funcs->asic_dma_free_coherent(hdev, cb->size, in cb_fini()
17 (void *) (uintptr_t) cb->kernel_address, in cb_fini()
18 cb->bus_address); in cb_fini()
19 kfree(cb); in cb_fini()
22 static void cb_do_release(struct hl_device *hdev, struct hl_cb *cb) in cb_do_release() argument
24 if (cb->is_pool) { in cb_do_release()
26 list_add(&cb->pool_list, &hdev->cb_pool); in cb_do_release()
29 cb_fini(hdev, cb); in cb_do_release()
36 struct hl_cb *cb; in cb_release() local
[all …]
/Linux-v5.4/drivers/scsi/
Dmyrb.c90 static bool myrb_create_mempools(struct pci_dev *pdev, struct myrb_hba *cb) in myrb_create_mempools() argument
95 elem_size = cb->host->sg_tablesize * elem_align; in myrb_create_mempools()
96 cb->sg_pool = dma_pool_create("myrb_sg", &pdev->dev, in myrb_create_mempools()
98 if (cb->sg_pool == NULL) { in myrb_create_mempools()
99 shost_printk(KERN_ERR, cb->host, in myrb_create_mempools()
104 cb->dcdb_pool = dma_pool_create("myrb_dcdb", &pdev->dev, in myrb_create_mempools()
107 if (!cb->dcdb_pool) { in myrb_create_mempools()
108 dma_pool_destroy(cb->sg_pool); in myrb_create_mempools()
109 cb->sg_pool = NULL; in myrb_create_mempools()
110 shost_printk(KERN_ERR, cb->host, in myrb_create_mempools()
[all …]
/Linux-v5.4/block/
Dblk-stat.c54 struct blk_stat_callback *cb; in blk_stat_add() local
64 list_for_each_entry_rcu(cb, &q->stats->callbacks, list) { in blk_stat_add()
65 if (!blk_stat_is_active(cb)) in blk_stat_add()
68 bucket = cb->bucket_fn(rq); in blk_stat_add()
72 stat = &get_cpu_ptr(cb->cpu_stat)[bucket]; in blk_stat_add()
74 put_cpu_ptr(cb->cpu_stat); in blk_stat_add()
81 struct blk_stat_callback *cb = from_timer(cb, t, timer); in blk_stat_timer_fn() local
85 for (bucket = 0; bucket < cb->buckets; bucket++) in blk_stat_timer_fn()
86 blk_rq_stat_init(&cb->stat[bucket]); in blk_stat_timer_fn()
91 cpu_stat = per_cpu_ptr(cb->cpu_stat, cpu); in blk_stat_timer_fn()
[all …]
Dblk-stat.h99 struct blk_stat_callback *cb);
111 struct blk_stat_callback *cb);
122 void blk_stat_free_callback(struct blk_stat_callback *cb);
129 static inline bool blk_stat_is_active(struct blk_stat_callback *cb) in blk_stat_is_active() argument
131 return timer_pending(&cb->timer); in blk_stat_is_active()
142 static inline void blk_stat_activate_nsecs(struct blk_stat_callback *cb, in blk_stat_activate_nsecs() argument
145 mod_timer(&cb->timer, jiffies + nsecs_to_jiffies(nsecs)); in blk_stat_activate_nsecs()
148 static inline void blk_stat_deactivate(struct blk_stat_callback *cb) in blk_stat_deactivate() argument
150 del_timer_sync(&cb->timer); in blk_stat_deactivate()
161 static inline void blk_stat_activate_msecs(struct blk_stat_callback *cb, in blk_stat_activate_msecs() argument
[all …]
/Linux-v5.4/drivers/irqchip/
Dirq-crossbar.c43 static struct crossbar_device *cb; variable
47 writel(cb_no, cb->crossbar_base + cb->register_offsets[irq_no]); in crossbar_writel()
52 writew(cb_no, cb->crossbar_base + cb->register_offsets[irq_no]); in crossbar_writew()
57 writeb(cb_no, cb->crossbar_base + cb->register_offsets[irq_no]); in crossbar_writeb()
84 raw_spin_lock(&cb->lock); in allocate_gic_irq()
85 for (i = cb->int_max - 1; i >= 0; i--) { in allocate_gic_irq()
86 if (cb->irq_map[i] == IRQ_FREE) { in allocate_gic_irq()
87 cb->irq_map[i] = hwirq; in allocate_gic_irq()
91 raw_spin_unlock(&cb->lock); in allocate_gic_irq()
104 cb->irq_map[i] = IRQ_FREE; in allocate_gic_irq()
[all …]
/Linux-v5.4/drivers/mfd/
Dlm3533-ctrlbank.c29 static inline u8 lm3533_ctrlbank_get_reg(struct lm3533_ctrlbank *cb, u8 base) in lm3533_ctrlbank_get_reg() argument
31 return base + cb->id; in lm3533_ctrlbank_get_reg()
34 int lm3533_ctrlbank_enable(struct lm3533_ctrlbank *cb) in lm3533_ctrlbank_enable() argument
39 dev_dbg(cb->dev, "%s - %d\n", __func__, cb->id); in lm3533_ctrlbank_enable()
41 mask = 1 << cb->id; in lm3533_ctrlbank_enable()
42 ret = lm3533_update(cb->lm3533, LM3533_REG_CTRLBANK_ENABLE, in lm3533_ctrlbank_enable()
45 dev_err(cb->dev, "failed to enable ctrlbank %d\n", cb->id); in lm3533_ctrlbank_enable()
51 int lm3533_ctrlbank_disable(struct lm3533_ctrlbank *cb) in lm3533_ctrlbank_disable() argument
56 dev_dbg(cb->dev, "%s - %d\n", __func__, cb->id); in lm3533_ctrlbank_disable()
58 mask = 1 << cb->id; in lm3533_ctrlbank_disable()
[all …]
/Linux-v5.4/drivers/net/phy/
Dmdio-mux.c40 struct mdio_mux_child_bus *cb = bus->priv; in mdio_mux_read() local
41 struct mdio_mux_parent_bus *pb = cb->parent; in mdio_mux_read()
45 r = pb->switch_fn(pb->current_child, cb->bus_number, pb->switch_data); in mdio_mux_read()
49 pb->current_child = cb->bus_number; in mdio_mux_read()
64 struct mdio_mux_child_bus *cb = bus->priv; in mdio_mux_write() local
65 struct mdio_mux_parent_bus *pb = cb->parent; in mdio_mux_write()
70 r = pb->switch_fn(pb->current_child, cb->bus_number, pb->switch_data); in mdio_mux_write()
74 pb->current_child = cb->bus_number; in mdio_mux_write()
97 struct mdio_mux_child_bus *cb; in mdio_mux_init() local
144 cb = devm_kzalloc(dev, sizeof(*cb), GFP_KERNEL); in mdio_mux_init()
[all …]
/Linux-v5.4/arch/s390/kernel/
Druntime_instr.c53 static void init_runtime_instr_cb(struct runtime_instr_cb *cb) in init_runtime_instr_cb() argument
55 cb->rla = 0xfff; in init_runtime_instr_cb()
56 cb->s = 1; in init_runtime_instr_cb()
57 cb->k = 1; in init_runtime_instr_cb()
58 cb->ps = 1; in init_runtime_instr_cb()
59 cb->pc = 1; in init_runtime_instr_cb()
60 cb->key = PAGE_DEFAULT_KEY; in init_runtime_instr_cb()
61 cb->v = 1; in init_runtime_instr_cb()
72 struct runtime_instr_cb *cb; in SYSCALL_DEFINE2() local
86 cb = kzalloc(sizeof(*cb), GFP_KERNEL); in SYSCALL_DEFINE2()
[all …]
/Linux-v5.4/drivers/misc/mei/
Dinterrupt.c31 struct mei_cl_cb *cb, *next; in mei_irq_compl_handler() local
34 list_for_each_entry_safe(cb, next, cmpl_list, list) { in mei_irq_compl_handler()
35 cl = cb->cl; in mei_irq_compl_handler()
36 list_del_init(&cb->list); in mei_irq_compl_handler()
39 mei_cl_complete(cl, cb); in mei_irq_compl_handler()
92 struct mei_cl_cb *cb; in mei_cl_irq_read_msg() local
96 cb = list_first_entry_or_null(&cl->rd_pending, struct mei_cl_cb, list); in mei_cl_irq_read_msg()
97 if (!cb) { in mei_cl_irq_read_msg()
102 cb = mei_cl_alloc_cb(cl, mei_cl_mtu(cl), MEI_FOP_READ, cl->fp); in mei_cl_irq_read_msg()
103 if (!cb) in mei_cl_irq_read_msg()
[all …]
Dclient.c315 void mei_io_cb_free(struct mei_cl_cb *cb) in mei_io_cb_free() argument
317 if (cb == NULL) in mei_io_cb_free()
320 list_del(&cb->list); in mei_io_cb_free()
321 kfree(cb->buf.data); in mei_io_cb_free()
322 kfree(cb); in mei_io_cb_free()
333 static inline void mei_tx_cb_enqueue(struct mei_cl_cb *cb, in mei_tx_cb_enqueue() argument
336 list_add_tail(&cb->list, head); in mei_tx_cb_enqueue()
337 cb->cl->tx_cb_queued++; in mei_tx_cb_enqueue()
347 static inline void mei_tx_cb_dequeue(struct mei_cl_cb *cb) in mei_tx_cb_dequeue() argument
349 if (!WARN_ON(cb->cl->tx_cb_queued == 0)) in mei_tx_cb_dequeue()
[all …]
/Linux-v5.4/drivers/misc/sgi-gru/
Dgru_instructions.h22 extern int gru_check_status_proc(void *cb);
23 extern int gru_wait_proc(void *cb);
24 extern void gru_wait_abort_proc(void *cb);
79 unsigned long cb; member
359 static inline void gru_vload_phys(void *cb, unsigned long gpa, in gru_vload_phys() argument
362 struct gru_instruction *ins = (struct gru_instruction *)cb; in gru_vload_phys()
371 static inline void gru_vstore_phys(void *cb, unsigned long gpa, in gru_vstore_phys() argument
374 struct gru_instruction *ins = (struct gru_instruction *)cb; in gru_vstore_phys()
383 static inline void gru_vload(void *cb, unsigned long mem_addr, in gru_vload() argument
387 struct gru_instruction *ins = (struct gru_instruction *)cb; in gru_vload()
[all …]
Dgrukservices.c254 static int gru_get_cpu_resources(int dsr_bytes, void **cb, void **dsr) in gru_get_cpu_resources() argument
263 *cb = bs->kernel_cb + lcpu * GRU_HANDLE_STRIDE; in gru_get_cpu_resources()
271 static void gru_free_cpu_resources(void *cb, void *dsr) in gru_free_cpu_resources() argument
358 void gru_lock_async_resource(unsigned long han, void **cb, void **dsr) in gru_lock_async_resource() argument
366 if (cb) in gru_lock_async_resource()
367 *cb = bs->kernel_cb + ncpus * GRU_HANDLE_STRIDE; in gru_lock_async_resource()
386 int gru_get_cb_exception_detail(void *cb, in gru_get_cb_exception_detail() argument
405 off = cb - kgts->ts_gru->gs_gru_base_vaddr; in gru_get_cb_exception_detail()
411 cbrnum = thread_cbr_number(kgts, get_cb_number(cb)); in gru_get_cb_exception_detail()
412 cbe = get_cbe(GRUBASE(cb), cbrnum); in gru_get_cb_exception_detail()
[all …]
/Linux-v5.4/fs/nfsd/
Dnfs4callback.c343 const struct nfsd4_callback *cb, in encode_cb_sequence4args() argument
346 struct nfsd4_session *session = cb->cb_clp->cl_cb_session; in encode_cb_sequence4args()
387 struct nfsd4_callback *cb) in decode_cb_sequence4resok() argument
389 struct nfsd4_session *session = cb->cb_clp->cl_cb_session; in decode_cb_sequence4resok()
425 cb->cb_seq_status = status; in decode_cb_sequence4resok()
433 struct nfsd4_callback *cb) in decode_cb_sequence4res() argument
437 if (cb->cb_clp->cl_minorversion == 0) in decode_cb_sequence4res()
440 status = decode_cb_op_status(xdr, OP_CB_SEQUENCE, &cb->cb_seq_status); in decode_cb_sequence4res()
441 if (unlikely(status || cb->cb_seq_status)) in decode_cb_sequence4res()
444 return decode_cb_sequence4resok(xdr, cb); in decode_cb_sequence4res()
[all …]
/Linux-v5.4/drivers/net/ethernet/netronome/nfp/
Dccm_mbox.c64 struct nfp_ccm_mbox_cmsg_cb *cb = (void *)skb->cb; in nfp_ccm_mbox_msg_init() local
66 cb->state = NFP_NET_MBOX_CMSG_STATE_QUEUED; in nfp_ccm_mbox_msg_init()
67 cb->err = 0; in nfp_ccm_mbox_msg_init()
68 cb->max_len = max_len; in nfp_ccm_mbox_msg_init()
69 cb->exp_reply = exp_reply; in nfp_ccm_mbox_msg_init()
70 cb->posted = false; in nfp_ccm_mbox_msg_init()
75 struct nfp_ccm_mbox_cmsg_cb *cb = (void *)skb->cb; in nfp_ccm_mbox_maxlen() local
77 return cb->max_len; in nfp_ccm_mbox_maxlen()
82 struct nfp_ccm_mbox_cmsg_cb *cb = (void *)skb->cb; in nfp_ccm_mbox_done() local
84 return cb->state == NFP_NET_MBOX_CMSG_STATE_DONE; in nfp_ccm_mbox_done()
[all …]
/Linux-v5.4/drivers/dma-buf/
Dst-dma-fence.c37 struct dma_fence_cb cb; member
41 static void mock_wakeup(struct dma_fence *f, struct dma_fence_cb *cb) in mock_wakeup() argument
43 wake_up_process(container_of(cb, struct wait_cb, cb)->task); in mock_wakeup()
49 struct wait_cb cb = { .task = current }; in mock_wait() local
51 if (dma_fence_add_callback(f, &cb.cb, mock_wakeup)) in mock_wait()
67 if (!dma_fence_remove_callback(f, &cb.cb)) in mock_wait()
147 struct dma_fence_cb cb; member
151 static void simple_callback(struct dma_fence *f, struct dma_fence_cb *cb) in simple_callback() argument
153 smp_store_mb(container_of(cb, struct simple_cb, cb)->seen, true); in simple_callback()
158 struct simple_cb cb = {}; in test_add_callback() local
[all …]
/Linux-v5.4/tools/testing/selftests/bpf/verifier/
Dctx_skb.c396 offsetof(struct __sk_buff, cb[0])),
398 offsetof(struct __sk_buff, cb[0]) + 1),
400 offsetof(struct __sk_buff, cb[0]) + 2),
402 offsetof(struct __sk_buff, cb[0]) + 3),
404 offsetof(struct __sk_buff, cb[1])),
406 offsetof(struct __sk_buff, cb[1]) + 1),
408 offsetof(struct __sk_buff, cb[1]) + 2),
410 offsetof(struct __sk_buff, cb[1]) + 3),
412 offsetof(struct __sk_buff, cb[2])),
414 offsetof(struct __sk_buff, cb[2]) + 1),
[all …]
/Linux-v5.4/fs/btrfs/
Dcompression.c63 static int btrfs_decompress_bio(struct compressed_bio *cb);
75 struct compressed_bio *cb, in check_compressed_csum() argument
86 u8 *cb_sum = cb->sums; in check_compressed_csum()
93 for (i = 0; i < cb->nr_pages; i++) { in check_compressed_csum()
94 page = cb->compressed_pages[i]; in check_compressed_csum()
104 csum, cb_sum, cb->mirror_num); in check_compressed_csum()
128 struct compressed_bio *cb = bio->bi_private; in end_compressed_bio_read() local
136 cb->errors = 1; in end_compressed_bio_read()
141 if (!refcount_dec_and_test(&cb->pending_bios)) in end_compressed_bio_read()
148 ASSERT(btrfs_io_bio(cb->orig_bio)); in end_compressed_bio_read()
[all …]
/Linux-v5.4/net/decnet/
Ddn_nsp_in.c77 struct dn_skb_cb *cb = DN_SKB_CB(skb); in dn_log_martian() local
80 le16_to_cpu(cb->src), in dn_log_martian()
81 le16_to_cpu(cb->dst), in dn_log_martian()
82 le16_to_cpu(cb->src_port), in dn_log_martian()
83 le16_to_cpu(cb->dst_port)); in dn_log_martian()
219 struct dn_skb_cb *cb = DN_SKB_CB(skb); in dn_find_listener() local
237 cb->src_port = msg->srcaddr; in dn_find_listener()
238 cb->dst_port = msg->dstaddr; in dn_find_listener()
239 cb->services = msg->services; in dn_find_listener()
240 cb->info = msg->info; in dn_find_listener()
[all …]
/Linux-v5.4/drivers/dma/
Ddmaengine.h107 struct dmaengine_desc_callback *cb) in dmaengine_desc_get_callback() argument
109 cb->callback = tx->callback; in dmaengine_desc_get_callback()
110 cb->callback_result = tx->callback_result; in dmaengine_desc_get_callback()
111 cb->callback_param = tx->callback_param; in dmaengine_desc_get_callback()
124 dmaengine_desc_callback_invoke(struct dmaengine_desc_callback *cb, in dmaengine_desc_callback_invoke() argument
132 if (cb->callback_result) { in dmaengine_desc_callback_invoke()
135 cb->callback_result(cb->callback_param, result); in dmaengine_desc_callback_invoke()
136 } else if (cb->callback) { in dmaengine_desc_callback_invoke()
137 cb->callback(cb->callback_param); in dmaengine_desc_callback_invoke()
155 struct dmaengine_desc_callback cb; in dmaengine_desc_get_callback_invoke() local
[all …]
/Linux-v5.4/arch/x86/kernel/cpu/
Dscattered.c51 const struct cpuid_bit *cb; in init_scattered_cpuid_features() local
53 for (cb = cpuid_bits; cb->feature; cb++) { in init_scattered_cpuid_features()
56 max_level = cpuid_eax(cb->level & 0xffff0000); in init_scattered_cpuid_features()
57 if (max_level < cb->level || in init_scattered_cpuid_features()
58 max_level > (cb->level | 0xffff)) in init_scattered_cpuid_features()
61 cpuid_count(cb->level, cb->sub_leaf, &regs[CPUID_EAX], in init_scattered_cpuid_features()
65 if (regs[cb->reg] & (1 << cb->bit)) in init_scattered_cpuid_features()
66 set_cpu_cap(c, cb->feature); in init_scattered_cpuid_features()
/Linux-v5.4/drivers/net/wireless/marvell/mwifiex/
Dutil.h55 struct mwifiex_cb *cb = (struct mwifiex_cb *)skb->cb; in MWIFIEX_SKB_RXCB() local
57 BUILD_BUG_ON(sizeof(struct mwifiex_cb) > sizeof(skb->cb)); in MWIFIEX_SKB_RXCB()
58 return &cb->rx_info; in MWIFIEX_SKB_RXCB()
63 struct mwifiex_cb *cb = (struct mwifiex_cb *)skb->cb; in MWIFIEX_SKB_TXCB() local
65 return &cb->tx_info; in MWIFIEX_SKB_TXCB()
71 struct mwifiex_cb *cb = (struct mwifiex_cb *)skb->cb; in mwifiex_store_mapping() local
73 memcpy(&cb->dma_mapping, mapping, sizeof(*mapping)); in mwifiex_store_mapping()
79 struct mwifiex_cb *cb = (struct mwifiex_cb *)skb->cb; in mwifiex_get_mapping() local
81 memcpy(mapping, &cb->dma_mapping, sizeof(*mapping)); in mwifiex_get_mapping()
/Linux-v5.4/net/core/
Dflow_offload.c170 struct flow_block_cb *flow_block_cb_alloc(flow_setup_cb_t *cb, in flow_block_cb_alloc() argument
180 block_cb->cb = cb; in flow_block_cb_alloc()
199 flow_setup_cb_t *cb, void *cb_ident) in flow_block_cb_lookup() argument
204 if (block_cb->cb == cb && in flow_block_cb_lookup()
231 bool flow_block_cb_is_busy(flow_setup_cb_t *cb, void *cb_ident, in flow_block_cb_is_busy() argument
237 if (block_cb->cb == cb && in flow_block_cb_is_busy()
248 flow_setup_cb_t *cb, in flow_block_cb_setup_simple() argument
262 if (flow_block_cb_is_busy(cb, cb_ident, driver_block_list)) in flow_block_cb_setup_simple()
265 block_cb = flow_block_cb_alloc(cb, cb_ident, cb_priv, NULL); in flow_block_cb_setup_simple()
273 block_cb = flow_block_cb_lookup(f->block, cb, cb_ident); in flow_block_cb_setup_simple()
[all …]
/Linux-v5.4/drivers/gpu/drm/i915/
Di915_sw_fence.c383 struct i915_sw_dma_fence_cb *cb = container_of(data, typeof(*cb), base); in dma_i915_sw_fence_wake() local
385 i915_sw_fence_set_error_once(cb->fence, dma->error); in dma_i915_sw_fence_wake()
386 i915_sw_fence_complete(cb->fence); in dma_i915_sw_fence_wake()
387 kfree(cb); in dma_i915_sw_fence_wake()
392 struct i915_sw_dma_fence_cb_timer *cb = from_timer(cb, t, timer); in timer_i915_sw_fence_wake() local
395 fence = xchg(&cb->base.fence, NULL); in timer_i915_sw_fence_wake()
400 cb->dma->ops->get_driver_name(cb->dma), in timer_i915_sw_fence_wake()
401 cb->dma->ops->get_timeline_name(cb->dma), in timer_i915_sw_fence_wake()
402 cb->dma->seqno, in timer_i915_sw_fence_wake()
412 struct i915_sw_dma_fence_cb_timer *cb = in dma_i915_sw_fence_wake_timer() local
[all …]
/Linux-v5.4/drivers/media/platform/vicodec/
Dcodec-v4l2-fwht.c112 rf->cb = NULL; in prepare_raw_frame()
119 rf->cb = NULL; in prepare_raw_frame()
123 rf->cb = rf->luma + size; in prepare_raw_frame()
124 rf->cr = rf->cb + size / 4; in prepare_raw_frame()
128 rf->cb = rf->cr + size / 4; in prepare_raw_frame()
131 rf->cb = rf->luma + size; in prepare_raw_frame()
132 rf->cr = rf->cb + size / 2; in prepare_raw_frame()
137 rf->cb = rf->luma + size; in prepare_raw_frame()
138 rf->cr = rf->cb + 1; in prepare_raw_frame()
144 rf->cb = rf->cr + 1; in prepare_raw_frame()
[all …]
/Linux-v5.4/drivers/platform/x86/intel_speed_select_if/
Disst_if_common.c149 static void isst_mbox_resume_command(struct isst_if_cmd_cb *cb, in isst_mbox_resume_command() argument
160 (cb->cmd_callback)((u8 *)&mbox_cmd, &wr_only, 1); in isst_mbox_resume_command()
176 struct isst_if_cmd_cb *cb; in isst_resume_common() local
179 cb = &punit_callbacks[ISST_IF_DEV_MBOX]; in isst_resume_common()
180 if (cb->registered) in isst_resume_common()
181 isst_mbox_resume_command(cb, sst_cmd); in isst_resume_common()
439 static long isst_if_exec_multi_cmd(void __user *argp, struct isst_if_cmd_cb *cb) in isst_if_exec_multi_cmd() argument
454 cmd_ptr = kmalloc(cb->cmd_size, GFP_KERNEL); in isst_if_exec_multi_cmd()
459 ptr = argp + cb->offset; in isst_if_exec_multi_cmd()
469 if (copy_from_user(cmd_ptr, ptr, cb->cmd_size)) { in isst_if_exec_multi_cmd()
[all …]

12345678910>>...40