Home
last modified time | relevance | path

Searched refs:vd (Results 1 – 25 of 83) sorted by relevance

1234

/Linux-v4.19/drivers/dma/
Dvirt-dma.c25 struct virt_dma_desc *vd = to_virt_desc(tx); in vchan_tx_submit() local
32 list_move_tail(&vd->node, &vc->desc_submitted); in vchan_tx_submit()
36 vc, vd, cookie); in vchan_tx_submit()
55 struct virt_dma_desc *vd = to_virt_desc(tx); in vchan_tx_desc_free() local
59 list_del(&vd->node); in vchan_tx_desc_free()
63 vc, vd, vd->tx.cookie); in vchan_tx_desc_free()
64 vc->desc_free(vd); in vchan_tx_desc_free()
72 struct virt_dma_desc *vd; in vchan_find_desc() local
74 list_for_each_entry(vd, &vc->desc_issued, node) in vchan_find_desc()
75 if (vd->tx.cookie == cookie) in vchan_find_desc()
[all …]
Dvirt-dma.h59 struct virt_dma_desc *vd, unsigned long tx_flags) in vchan_tx_prep() argument
63 dma_async_tx_descriptor_init(&vd->tx, &vc->chan); in vchan_tx_prep()
64 vd->tx.flags = tx_flags; in vchan_tx_prep()
65 vd->tx.tx_submit = vchan_tx_submit; in vchan_tx_prep()
66 vd->tx.desc_free = vchan_tx_desc_free; in vchan_tx_prep()
69 list_add_tail(&vd->node, &vc->desc_allocated); in vchan_tx_prep()
72 return &vd->tx; in vchan_tx_prep()
93 static inline void vchan_cookie_complete(struct virt_dma_desc *vd) in vchan_cookie_complete() argument
95 struct virt_dma_chan *vc = to_virt_chan(vd->tx.chan); in vchan_cookie_complete()
98 cookie = vd->tx.cookie; in vchan_cookie_complete()
[all …]
Dpxa_dma.c87 struct virt_dma_desc vd; /* Virtual descriptor */ member
147 container_of((_vd), struct pxad_desc_sw, vd)
583 static bool is_desc_completed(struct virt_dma_desc *vd) in is_desc_completed() argument
585 struct pxad_desc_sw *sw_desc = to_pxad_sw_desc(vd); in is_desc_completed()
604 struct virt_dma_desc *vd) in pxad_try_hotchain() argument
620 to_pxad_sw_desc(vd)->misaligned) in pxad_try_hotchain()
625 pxad_desc_chain(vd_last_issued, vd); in pxad_try_hotchain()
626 if (is_chan_running(chan) || is_desc_completed(vd)) in pxad_try_hotchain()
656 struct virt_dma_desc *vd, *tmp; in pxad_chan_handler() local
669 list_for_each_entry_safe(vd, tmp, &chan->vc.desc_issued, node) { in pxad_chan_handler()
[all …]
Damba-pl08x.c211 struct virt_dma_desc vd; member
294 const struct vendor_data *vd; member
332 return container_of(tx, struct pl08x_txd, vd.tx); in to_pl08x_txd()
404 if (pl08x->vd->pl080s) in pl08x_write_lli()
524 if (pl08x->vd->pl080s) in pl08x_write_lli()
541 struct virt_dma_desc *vd = vchan_next_desc(&plchan->vc); in pl08x_start_next_txd() local
542 struct pl08x_txd *txd = to_pl08x_txd(&vd->tx); in pl08x_start_next_txd()
545 list_del(&txd->vd.node); in pl08x_start_next_txd()
835 for (i = 0; i < pl08x->vd->channels; i++) { in pl08x_get_phy_channel()
849 if (i == pl08x->vd->channels) { in pl08x_get_phy_channel()
[all …]
Dmoxart-dma.c128 struct virt_dma_desc vd; member
178 return container_of(t, struct moxart_desc, vd.tx); in to_moxart_dma_desc()
181 static void moxart_dma_desc_free(struct virt_dma_desc *vd) in moxart_dma_desc_free() argument
183 kfree(container_of(vd, struct moxart_desc, vd)); in moxart_dma_desc_free()
198 moxart_dma_desc_free(&ch->desc->vd); in moxart_terminate_all()
329 return vchan_tx_prep(&ch->vc, &d->vd, tx_flags); in moxart_prep_slave_sg()
422 struct virt_dma_desc *vd; in moxart_dma_start_desc() local
424 vd = vchan_next_desc(&ch->vc); in moxart_dma_start_desc()
426 if (!vd) { in moxart_dma_start_desc()
431 list_del(&vd->node); in moxart_dma_start_desc()
[all …]
Dtegra210-adma.c109 struct virt_dma_desc vd; member
183 return container_of(td, struct tegra_adma_desc, vd.tx); in to_tegra_adma_desc()
191 static void tegra_adma_desc_free(struct virt_dma_desc *vd) in tegra_adma_desc_free() argument
193 kfree(container_of(vd, struct tegra_adma_desc, vd)); in tegra_adma_desc_free()
341 struct virt_dma_desc *vd = vchan_next_desc(&tdc->vc); in tegra_adma_start() local
345 if (!vd) in tegra_adma_start()
348 list_del(&vd->node); in tegra_adma_start()
350 desc = to_tegra_adma_desc(&vd->tx); in tegra_adma_start()
409 vchan_cyclic_callback(&tdc->desc->vd); in tegra_adma_isr()
456 struct virt_dma_desc *vd; in tegra_adma_tx_status() local
[all …]
Dimg-mdc-dma.c109 struct virt_dma_desc vd; member
181 return container_of(vdesc, struct mdc_tx_desc, vd); in to_mdc_desc()
280 static void mdc_desc_free(struct virt_dma_desc *vd) in mdc_desc_free() argument
282 struct mdc_tx_desc *mdesc = to_mdc_desc(&vd->tx); in mdc_desc_free()
335 return vchan_tx_prep(&mchan->vc, &mdesc->vd, flags); in mdc_prep_dma_memcpy()
338 mdc_desc_free(&mdesc->vd); in mdc_prep_dma_memcpy()
442 return vchan_tx_prep(&mchan->vc, &mdesc->vd, flags); in mdc_prep_dma_cyclic()
445 mdc_desc_free(&mdesc->vd); in mdc_prep_dma_cyclic()
519 return vchan_tx_prep(&mchan->vc, &mdesc->vd, flags); in mdc_prep_slave_sg()
522 mdc_desc_free(&mdesc->vd); in mdc_prep_slave_sg()
[all …]
Dbcm2835-dma.c94 struct virt_dma_desc vd; member
208 return container_of(t, struct bcm2835_desc, vd.tx); in to_bcm2835_dma_desc()
222 static void bcm2835_dma_desc_free(struct virt_dma_desc *vd) in bcm2835_dma_desc_free() argument
225 container_of(vd, struct bcm2835_desc, vd)); in bcm2835_dma_desc_free()
455 struct virt_dma_desc *vd = vchan_next_desc(&c->vc); in bcm2835_dma_start_desc() local
458 if (!vd) { in bcm2835_dma_start_desc()
463 list_del(&vd->node); in bcm2835_dma_start_desc()
465 c->desc = d = to_bcm2835_dma_desc(&vd->tx); in bcm2835_dma_start_desc()
496 vchan_cyclic_callback(&d->vd); in bcm2835_dma_callback()
502 vchan_cookie_complete(&c->desc->vd); in bcm2835_dma_callback()
[all …]
Dsa11x0-dma.c77 struct virt_dma_desc vd; member
142 struct virt_dma_desc *vd = vchan_next_desc(&c->vc); in sa11x0_dma_next_desc() local
144 return vd ? container_of(vd, struct sa11x0_dma_desc, vd) : NULL; in sa11x0_dma_next_desc()
147 static void sa11x0_dma_free_desc(struct virt_dma_desc *vd) in sa11x0_dma_free_desc() argument
149 kfree(container_of(vd, struct sa11x0_dma_desc, vd)); in sa11x0_dma_free_desc()
154 list_del(&txd->vd.node); in sa11x0_dma_start_desc()
159 p->num, &txd->vd, txd->vd.tx.cookie, txd->ddar); in sa11x0_dma_start_desc()
233 vchan_cookie_complete(&txd->vd); in sa11x0_dma_complete()
242 vchan_cyclic_callback(&txd->vd); in sa11x0_dma_complete()
427 struct virt_dma_desc *vd; in sa11x0_dma_tx_status() local
[all …]
Dsun4i-dma.c159 struct virt_dma_desc vd; member
186 static struct sun4i_dma_contract *to_sun4i_dma_contract(struct virt_dma_desc *vd) in to_sun4i_dma_contract() argument
188 return container_of(vd, struct sun4i_dma_contract, vd); in to_sun4i_dma_contract()
329 struct virt_dma_desc *vd; in __execute_vchan_pending() local
352 vd = vchan_next_desc(&vchan->vc); in __execute_vchan_pending()
353 if (!vd) { in __execute_vchan_pending()
360 contract = to_sun4i_dma_contract(vd); in __execute_vchan_pending()
363 list_del(&contract->vd.node); in __execute_vchan_pending()
364 vchan_cookie_complete(&contract->vd); in __execute_vchan_pending()
599 static void sun4i_dma_free_contract(struct virt_dma_desc *vd) in sun4i_dma_free_contract() argument
[all …]
Dzx_dma.c91 struct virt_dma_desc vd; member
193 struct virt_dma_desc *vd = vchan_next_desc(&c->vc); in zx_dma_start_txd() local
201 if (vd) { in zx_dma_start_txd()
203 container_of(vd, struct zx_dma_desc_sw, vd); in zx_dma_start_txd()
208 list_del(&ds->vd.node); in zx_dma_start_txd()
295 vchan_cyclic_callback(&p->ds_run->vd); in zx_dma_int_handler()
297 vchan_cookie_complete(&p->ds_run->vd); in zx_dma_int_handler()
340 struct virt_dma_desc *vd; in zx_dma_tx_status() local
357 vd = vchan_find_desc(&c->vc, cookie); in zx_dma_tx_status()
358 if (vd) { in zx_dma_tx_status()
[all …]
/Linux-v4.19/arch/powerpc/platforms/pseries/
Dmobility.c73 const char *name, u32 vd, char *value) in update_dt_property() argument
84 if (vd & 0x80000000) { in update_dt_property()
85 vd = ~vd + 1; in update_dt_property()
91 char *new_data = kzalloc(new_prop->length + vd, GFP_KERNEL); in update_dt_property()
96 memcpy(new_data + new_prop->length, value, vd); in update_dt_property()
100 new_prop->length += vd; in update_dt_property()
112 new_prop->length = vd; in update_dt_property()
120 memcpy(new_prop->value, value, vd); in update_dt_property()
142 u32 vd; in update_dt_node() local
177 vd = be32_to_cpu(*(__be32 *)prop_data); in update_dt_node()
[all …]
/Linux-v4.19/arch/powerpc/kernel/
Dvecemu.c264 unsigned int va, vb, vc, vd; in emulate_altivec() local
271 vd = (instr >> 21) & 0x1f; in emulate_altivec()
281 vaddfp(&vrs[vd], &vrs[va], &vrs[vb]); in emulate_altivec()
284 vsubfp(&vrs[vd], &vrs[va], &vrs[vb]); in emulate_altivec()
287 vrefp(&vrs[vd], &vrs[vb]); in emulate_altivec()
290 vrsqrtefp(&vrs[vd], &vrs[vb]); in emulate_altivec()
294 vrs[vd].u[i] = eexp2(vrs[vb].u[i]); in emulate_altivec()
298 vrs[vd].u[i] = elog2(vrs[vb].u[i]); in emulate_altivec()
302 vrs[vd].u[i] = rfin(vrs[vb].u[i]); in emulate_altivec()
306 vrs[vd].u[i] = rfiz(vrs[vb].u[i]); in emulate_altivec()
[all …]
/Linux-v4.19/drivers/dma/mediatek/
Dmtk-hsdma.c132 struct virt_dma_desc vd; member
148 struct virt_dma_desc *vd; member
259 static struct mtk_hsdma_vdesc *to_hsdma_vdesc(struct virt_dma_desc *vd) in to_hsdma_vdesc() argument
261 return container_of(vd, struct mtk_hsdma_vdesc, vd); in to_hsdma_vdesc()
300 static void mtk_hsdma_vdesc_free(struct virt_dma_desc *vd) in mtk_hsdma_vdesc_free() argument
302 kfree(container_of(vd, struct mtk_hsdma_vdesc, vd)); in mtk_hsdma_vdesc_free()
460 ring->cb[ring->cur_tptr].vd = &hvd->vd; in mtk_hsdma_issue_pending_vdesc()
498 struct virt_dma_desc *vd, *vd2; in mtk_hsdma_issue_vchan_pending() local
503 list_for_each_entry_safe(vd, vd2, &hvc->vc.desc_issued, node) { in mtk_hsdma_issue_vchan_pending()
506 hvd = to_hsdma_vdesc(vd); in mtk_hsdma_issue_vchan_pending()
[all …]
/Linux-v4.19/drivers/clk/versatile/
Dicst.c79 unsigned int vd; in icst_hz_to_vco() local
84 vd = (f + fref_div / 2) / fref_div; in icst_hz_to_vco()
85 if (vd < p->vd_min || vd > p->vd_max) in icst_hz_to_vco()
88 f_pll = fref_div * vd; in icst_hz_to_vco()
94 vco.v = vd - 8; in icst_hz_to_vco()
/Linux-v4.19/sound/core/
Dcontrol.c135 if (control->vd[idx].owner == ctl) in snd_ctl_release()
136 control->vd[idx].owner = NULL; in snd_ctl_release()
228 (*kctl)->vd[idx].access = access; in snd_ctl_new()
229 (*kctl)->vd[idx].owner = file; in snd_ctl_new()
559 if (!(kctl->vd[0].access & SNDRV_CTL_ELEM_ACCESS_USER)) { in snd_ctl_remove_user_ctl()
564 if (kctl->vd[idx].owner != NULL && kctl->vd[idx].owner != file) { in snd_ctl_remove_user_ctl()
593 struct snd_kcontrol_volatile *vd; in snd_ctl_activate_id() local
604 vd = &kctl->vd[index_offset]; in snd_ctl_activate_id()
607 if (!(vd->access & SNDRV_CTL_ELEM_ACCESS_INACTIVE)) in snd_ctl_activate_id()
609 vd->access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; in snd_ctl_activate_id()
[all …]
/Linux-v4.19/arch/arm/vfp/
Dvfpdouble.c54 static void vfp_double_normalise_denormal(struct vfp_double *vd) in vfp_double_normalise_denormal() argument
56 int bits = 31 - fls(vd->significand >> 32); in vfp_double_normalise_denormal()
58 bits = 63 - fls(vd->significand); in vfp_double_normalise_denormal()
60 vfp_double_dump("normalise_denormal: in", vd); in vfp_double_normalise_denormal()
63 vd->exponent -= bits - 1; in vfp_double_normalise_denormal()
64 vd->significand <<= bits; in vfp_double_normalise_denormal()
67 vfp_double_dump("normalise_denormal: out", vd); in vfp_double_normalise_denormal()
70 u32 vfp_double_normaliseround(int dd, struct vfp_double *vd, u32 fpscr, u32 exceptions, const char … in vfp_double_normaliseround() argument
76 vfp_double_dump("pack: in", vd); in vfp_double_normaliseround()
81 if (vd->exponent == 2047 && (vd->significand == 0 || exceptions)) in vfp_double_normaliseround()
[all …]
/Linux-v4.19/include/drm/tinydrm/
Dtinydrm.h86 #define TINYDRM_MODE(hd, vd, hd_mm, vd_mm) \ argument
91 .vdisplay = (vd), \
92 .vsync_start = (vd), \
93 .vsync_end = (vd), \
94 .vtotal = (vd), \
/Linux-v4.19/drivers/misc/cxl/
Dflash.c68 u32 vd, char *value) in update_property() argument
84 new_prop->length = vd; in update_property()
91 memcpy(new_prop->value, value, vd); in update_property()
96 dn->name, name, vd, be32_to_cpu(*val)); in update_property()
115 u32 vd; in update_node() local
143 vd = be32_to_cpu(*(__be32 *)prop_data); in update_node()
144 prop_data += vd + sizeof(vd); in update_node()
153 vd = be32_to_cpu(*(__be32 *)prop_data); in update_node()
154 prop_data += sizeof(vd); in update_node()
156 if ((vd != 0x00000000) && (vd != 0x80000000)) { in update_node()
[all …]
/Linux-v4.19/drivers/dma/sh/
Dusb-dmac.c55 struct virt_dma_desc vd; member
66 #define to_usb_dmac_desc(vd) container_of(vd, struct usb_dmac_desc, vd) argument
227 struct virt_dma_desc *vd; in usb_dmac_chan_start_desc() local
229 vd = vchan_next_desc(&chan->vc); in usb_dmac_chan_start_desc()
230 if (!vd) { in usb_dmac_chan_start_desc()
240 list_del(&vd->node); in usb_dmac_chan_start_desc()
242 chan->desc = to_usb_dmac_desc(vd); in usb_dmac_chan_start_desc()
445 return vchan_tx_prep(&uchan->vc, &desc->vd, dma_flags); in usb_dmac_prep_slave_sg()
510 struct virt_dma_desc *vd; in usb_dmac_chan_get_residue() local
515 vd = vchan_find_desc(&chan->vc, cookie); in usb_dmac_chan_get_residue()
[all …]
/Linux-v4.19/arch/s390/kernel/
Dvdso.c130 static void __init vdso_init_data(struct vdso_data *vd) in vdso_init_data() argument
132 vd->ectg_available = test_facility(31); in vdso_init_data()
157 struct vdso_per_cpu_data *vd; in vdso_alloc_per_cpu() local
168 vd = (struct vdso_per_cpu_data *) page_frame; in vdso_alloc_per_cpu()
169 vd->cpu_nr = lowcore->cpu_nr; in vdso_alloc_per_cpu()
170 vd->node_id = cpu_to_node(vd->cpu_nr); in vdso_alloc_per_cpu()
/Linux-v4.19/drivers/media/radio/
Dtea575x.c537 tea->vd = tea575x_radio; in snd_tea575x_init()
538 video_set_drvdata(&tea->vd, tea); in snd_tea575x_init()
540 strlcpy(tea->vd.name, tea->v4l2_dev->name, sizeof(tea->vd.name)); in snd_tea575x_init()
541 tea->vd.lock = &tea->mutex; in snd_tea575x_init()
542 tea->vd.v4l2_dev = tea->v4l2_dev; in snd_tea575x_init()
545 tea->vd.fops = &tea->fops; in snd_tea575x_init()
548 v4l2_disable_ioctl(&tea->vd, VIDIOC_S_HW_FREQ_SEEK); in snd_tea575x_init()
551 tea->vd.ctrl_handler = &tea->ctrl_handler; in snd_tea575x_init()
573 retval = video_register_device(&tea->vd, VFL_TYPE_RADIO, tea->radio_nr); in snd_tea575x_init()
576 v4l2_ctrl_handler_free(tea->vd.ctrl_handler); in snd_tea575x_init()
[all …]
Dradio-tea5777.c560 tea->vd = tea575x_radio; in radio_tea5777_init()
561 video_set_drvdata(&tea->vd, tea); in radio_tea5777_init()
563 strlcpy(tea->vd.name, tea->v4l2_dev->name, sizeof(tea->vd.name)); in radio_tea5777_init()
564 tea->vd.lock = &tea->mutex; in radio_tea5777_init()
565 tea->vd.v4l2_dev = tea->v4l2_dev; in radio_tea5777_init()
568 tea->vd.fops = &tea->fops; in radio_tea5777_init()
570 tea->vd.ctrl_handler = &tea->ctrl_handler; in radio_tea5777_init()
582 res = video_register_device(&tea->vd, VFL_TYPE_RADIO, -1); in radio_tea5777_init()
585 v4l2_ctrl_handler_free(tea->vd.ctrl_handler); in radio_tea5777_init()
595 video_unregister_device(&tea->vd); in radio_tea5777_exit()
[all …]
/Linux-v4.19/drivers/dma/dw-axi-dmac/
Ddw-axi-dmac-platform.c213 desc->vd.tx.phys = phys; in axi_desc_get()
228 dma_pool_free(dw->desc_pool, child, child->vd.tx.phys); in axi_desc_put()
232 dma_pool_free(dw->desc_pool, desc, desc->vd.tx.phys); in axi_desc_put()
298 write_chan_llp(chan, first->vd.tx.phys | lms); in axi_chan_block_xfer_start()
313 struct virt_dma_desc *vd; in axi_chan_start_first_queued() local
315 vd = vchan_next_desc(&chan->vc); in axi_chan_start_first_queued()
316 if (!vd) in axi_chan_start_first_queued()
319 desc = vd_to_axi_desc(vd); in axi_chan_start_first_queued()
321 vd->tx.cookie); in axi_chan_start_first_queued()
495 write_desc_llp(prev, desc->vd.tx.phys | lms); in dma_chan_prep_dma_memcpy()
[all …]
/Linux-v4.19/drivers/dma/qcom/
Dbam_dma.c72 struct virt_dma_desc vd; member
682 return vchan_tx_prep(&bchan->vc, &async_desc->vd, flags); in bam_prep_slave_sg()
708 list_add(&async_desc->vd.node, &bchan->vc.desc_issued); in bam_dma_terminate_all()
831 vchan_cookie_complete(&async_desc->vd); in process_channel_irqs()
833 list_add(&async_desc->vd.node, in process_channel_irqs()
899 struct virt_dma_desc *vd; in bam_tx_status() local
913 vd = vchan_find_desc(&bchan->vc, cookie); in bam_tx_status()
914 if (vd) { in bam_tx_status()
915 residue = container_of(vd, struct bam_async_desc, vd)->length; in bam_tx_status()
918 if (async_desc->vd.tx.cookie != cookie) in bam_tx_status()
[all …]

1234