/Linux-v5.4/drivers/gpu/drm/vc4/ |
D | vc4_irq.c | 60 struct vc4_dev *vc4 = in vc4_overflow_mem_work() local 67 mutex_lock(&vc4->bin_bo_lock); in vc4_overflow_mem_work() 69 if (!vc4->bin_bo) in vc4_overflow_mem_work() 72 bo = vc4->bin_bo; in vc4_overflow_mem_work() 74 bin_bo_slot = vc4_v3d_get_bin_slot(vc4); in vc4_overflow_mem_work() 80 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_overflow_mem_work() 82 if (vc4->bin_alloc_overflow) { in vc4_overflow_mem_work() 89 exec = vc4_first_bin_job(vc4); in vc4_overflow_mem_work() 91 exec = vc4_last_render_job(vc4); in vc4_overflow_mem_work() 93 exec->bin_slots |= vc4->bin_alloc_overflow; in vc4_overflow_mem_work() [all …]
|
D | vc4_bo.c | 40 static void vc4_bo_stats_print(struct drm_printer *p, struct vc4_dev *vc4) in vc4_bo_stats_print() argument 44 for (i = 0; i < vc4->num_labels; i++) { in vc4_bo_stats_print() 45 if (!vc4->bo_labels[i].num_allocated) in vc4_bo_stats_print() 49 vc4->bo_labels[i].name, in vc4_bo_stats_print() 50 vc4->bo_labels[i].size_allocated / 1024, in vc4_bo_stats_print() 51 vc4->bo_labels[i].num_allocated); in vc4_bo_stats_print() 54 mutex_lock(&vc4->purgeable.lock); in vc4_bo_stats_print() 55 if (vc4->purgeable.num) in vc4_bo_stats_print() 57 vc4->purgeable.size / 1024, vc4->purgeable.num); in vc4_bo_stats_print() 59 if (vc4->purgeable.purged_num) in vc4_bo_stats_print() [all …]
|
D | vc4_v3d.c | 103 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_v3d_debugfs_ident() local 104 int ret = vc4_v3d_pm_get(vc4); in vc4_v3d_debugfs_ident() 119 vc4_v3d_pm_put(vc4); in vc4_v3d_debugfs_ident() 130 vc4_v3d_pm_get(struct vc4_dev *vc4) in vc4_v3d_pm_get() argument 132 mutex_lock(&vc4->power_lock); in vc4_v3d_pm_get() 133 if (vc4->power_refcount++ == 0) { in vc4_v3d_pm_get() 134 int ret = pm_runtime_get_sync(&vc4->v3d->pdev->dev); in vc4_v3d_pm_get() 137 vc4->power_refcount--; in vc4_v3d_pm_get() 138 mutex_unlock(&vc4->power_lock); in vc4_v3d_pm_get() 142 mutex_unlock(&vc4->power_lock); in vc4_v3d_pm_get() [all …]
|
D | vc4_gem.c | 42 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_queue_hangcheck() local 44 mod_timer(&vc4->hangcheck.timer, in vc4_queue_hangcheck() 74 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_get_hang_state_ioctl() local 79 if (!vc4->v3d) { in vc4_get_hang_state_ioctl() 84 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_get_hang_state_ioctl() 85 kernel_state = vc4->hang_state; in vc4_get_hang_state_ioctl() 87 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_get_hang_state_ioctl() 97 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_get_hang_state_ioctl() 101 vc4->hang_state = NULL; in vc4_get_hang_state_ioctl() 102 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_get_hang_state_ioctl() [all …]
|
D | vc4_hvs.c | 66 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_dump_state() local 67 struct drm_printer p = drm_info_printer(&vc4->hvs->pdev->dev); in vc4_hvs_dump_state() 70 drm_print_regset32(&p, &vc4->hvs->regset); in vc4_hvs_dump_state() 76 readl((u32 __iomem *)vc4->hvs->dlist + i + 0), in vc4_hvs_dump_state() 77 readl((u32 __iomem *)vc4->hvs->dlist + i + 1), in vc4_hvs_dump_state() 78 readl((u32 __iomem *)vc4->hvs->dlist + i + 2), in vc4_hvs_dump_state() 79 readl((u32 __iomem *)vc4->hvs->dlist + i + 3)); in vc4_hvs_dump_state() 87 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_debugfs_underrun() local 90 drm_printf(&p, "%d\n", atomic_read(&vc4->underrun)); in vc4_hvs_debugfs_underrun() 159 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_mask_underrun() local [all …]
|
D | vc4_kms.c | 52 struct vc4_dev *vc4 = dev->dev_private; in vc4_get_ctm_state() local 56 ret = drm_modeset_lock(&vc4->ctm_state_lock, state->acquire_ctx); in vc4_get_ctm_state() 114 vc4_ctm_commit(struct vc4_dev *vc4, struct drm_atomic_state *state) in vc4_ctm_commit() argument 116 struct vc4_ctm_state *ctm_state = to_vc4_ctm_state(vc4->ctm_manager.state); in vc4_ctm_commit() 151 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_atomic_complete_commit() local 169 vc4_ctm_commit(vc4, state); in vc4_atomic_complete_commit() 187 up(&vc4->async_modeset); in vc4_atomic_complete_commit() 215 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_atomic_commit() local 219 ret = down_interruptible(&vc4->async_modeset); in vc4_atomic_commit() 225 up(&vc4->async_modeset); in vc4_atomic_commit() [all …]
|
D | vc4_drv.c | 70 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_get_param_ioctl() local 77 if (!vc4->v3d) in vc4_get_param_ioctl() 82 ret = vc4_v3d_pm_get(vc4); in vc4_get_param_ioctl() 86 vc4_v3d_pm_put(vc4); in vc4_get_param_ioctl() 89 ret = vc4_v3d_pm_get(vc4); in vc4_get_param_ioctl() 93 vc4_v3d_pm_put(vc4); in vc4_get_param_ioctl() 96 ret = vc4_v3d_pm_get(vc4); in vc4_get_param_ioctl() 100 vc4_v3d_pm_put(vc4); in vc4_get_param_ioctl() 133 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_close() local 137 vc4_v3d_bin_bo_put(vc4); in vc4_close() [all …]
|
D | vc4_perfmon.c | 30 void vc4_perfmon_start(struct vc4_dev *vc4, struct vc4_perfmon *perfmon) in vc4_perfmon_start() argument 35 if (WARN_ON_ONCE(!perfmon || vc4->active_perfmon)) in vc4_perfmon_start() 44 vc4->active_perfmon = perfmon; in vc4_perfmon_start() 47 void vc4_perfmon_stop(struct vc4_dev *vc4, struct vc4_perfmon *perfmon, in vc4_perfmon_stop() argument 52 if (WARN_ON_ONCE(!vc4->active_perfmon || in vc4_perfmon_stop() 53 perfmon != vc4->active_perfmon)) in vc4_perfmon_stop() 62 vc4->active_perfmon = NULL; in vc4_perfmon_stop() 103 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_perfmon_create_ioctl() local 110 if (!vc4->v3d) { in vc4_perfmon_create_ioctl() 155 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_perfmon_destroy_ioctl() local [all …]
|
D | vc4_hdmi.c | 97 #define HDMI_READ(offset) readl(vc4->hdmi->hdmicore_regs + offset) 98 #define HDMI_WRITE(offset, val) writel(val, vc4->hdmi->hdmicore_regs + offset) 99 #define HD_READ(offset) readl(vc4->hdmi->hd_regs + offset) 100 #define HD_WRITE(offset, val) writel(val, vc4->hdmi->hd_regs + offset) 186 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hdmi_debugfs_regs() local 187 struct vc4_hdmi *hdmi = vc4->hdmi; in vc4_hdmi_debugfs_regs() 200 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hdmi_connector_detect() local 202 if (vc4->hdmi->hpd_gpio) { in vc4_hdmi_connector_detect() 203 if (gpio_get_value_cansleep(vc4->hdmi->hpd_gpio) ^ in vc4_hdmi_connector_detect() 204 vc4->hdmi->hpd_active_low) in vc4_hdmi_connector_detect() [all …]
|
D | vc4_drv.h | 309 struct vc4_dev *vc4; member 478 #define V3D_READ(offset) readl(vc4->v3d->regs + offset) 479 #define V3D_WRITE(offset, val) writel(val, vc4->v3d->regs + offset) 480 #define HVS_READ(offset) readl(vc4->hvs->regs + offset) 481 #define HVS_WRITE(offset, val) writel(val, vc4->hvs->regs + offset) 616 vc4_first_bin_job(struct vc4_dev *vc4) in vc4_first_bin_job() argument 618 return list_first_entry_or_null(&vc4->bin_job_list, in vc4_first_bin_job() 623 vc4_first_render_job(struct vc4_dev *vc4) in vc4_first_render_job() argument 625 return list_first_entry_or_null(&vc4->render_job_list, in vc4_first_render_job() 630 vc4_last_render_job(struct vc4_dev *vc4) in vc4_last_render_job() argument [all …]
|
D | vc4_debugfs.c | 26 struct vc4_dev *vc4 = to_vc4_dev(minor->dev); in vc4_debugfs_init() local 30 minor->debugfs_root, &vc4->load_tracker_enabled); in vc4_debugfs_init() 32 list_for_each_entry(entry, &vc4->debugfs_list, link) { in vc4_debugfs_init() 68 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_debugfs_add_file() local 80 list_add(&entry->link, &vc4->debugfs_list); in vc4_debugfs_add_file()
|
D | Makefile | 5 vc4-y := \ 27 vc4-$(CONFIG_DEBUG_FS) += vc4_debugfs.o 29 obj-$(CONFIG_DRM_VC4) += vc4.o
|
D | vc4_crtc.c | 92 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_crtc_get_scanoutpos() local 214 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_crtc_lut_load() local 385 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_crtc_mode_set_nofb() local 446 struct vc4_dev *vc4 = to_vc4_dev(dev); in require_hvs_enabled() local 456 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_crtc_atomic_disable() local 517 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_crtc_update_dlist() local 549 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_crtc_atomic_enable() local 630 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_crtc_atomic_check() local 650 spin_lock_irqsave(&vc4->hvs->mm_lock, flags); in vc4_crtc_atomic_check() 651 ret = drm_mm_insert_node(&vc4->hvs->dlist_mm, &vc4_state->mm, in vc4_crtc_atomic_check() [all …]
|
D | vc4_fence.c | 39 struct vc4_dev *vc4 = to_vc4_dev(f->dev); in vc4_fence_signaled() local 41 return vc4->finished_seqno >= f->seqno; in vc4_fence_signaled()
|
D | vc4_trace.h | 14 #define TRACE_SYSTEM vc4 59 #define TRACE_INCLUDE_PATH ../../drivers/gpu/drm/vc4
|
D | vc4_dpi.c | 261 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_dpi_bind() local 321 vc4->dpi = dpi; in vc4_dpi_bind() 337 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_dpi_unbind() local 346 vc4->dpi = NULL; in vc4_dpi_unbind()
|
D | vc4_txp.c | 373 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_txp_bind() local 409 vc4->txp = txp; in vc4_txp_bind() 420 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_txp_unbind() local 425 vc4->txp = NULL; in vc4_txp_unbind()
|
D | vc4_plane.c | 178 struct vc4_dev *vc4 = to_vc4_dev(plane->dev); in vc4_plane_destroy_state() local 184 spin_lock_irqsave(&vc4->hvs->mm_lock, irqflags); in vc4_plane_destroy_state() 186 spin_unlock_irqrestore(&vc4->hvs->mm_lock, irqflags); in vc4_plane_destroy_state() 545 struct vc4_dev *vc4 = to_vc4_dev(state->plane->dev); in vc4_plane_allocate_lbm() local 563 spin_lock_irqsave(&vc4->hvs->mm_lock, irqflags); in vc4_plane_allocate_lbm() 564 ret = drm_mm_insert_node_generic(&vc4->hvs->lbm_mm, in vc4_plane_allocate_lbm() 567 spin_unlock_irqrestore(&vc4->hvs->mm_lock, irqflags); in vc4_plane_allocate_lbm() 586 struct vc4_dev *vc4 = to_vc4_dev(plane->dev); in vc4_plane_mode_set() local 893 u32 kernel = VC4_SET_FIELD(vc4->hvs->mitchell_netravali_filter.start, in vc4_plane_mode_set()
|
D | vc4_vec.c | 529 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_vec_bind() local 581 vc4->vec = vec; in vc4_vec_bind() 598 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_vec_unbind() local 605 vc4->vec = NULL; in vc4_vec_unbind()
|
D | vc4_validate.c | 352 struct vc4_dev *vc4 = to_vc4_dev(dev); in validate_tile_binning_config() local 382 bin_slot = vc4_v3d_get_bin_slot(vc4); in validate_tile_binning_config() 395 bin_addr = vc4->bin_bo->base.paddr + bin_slot * vc4->bin_alloc_size; in validate_tile_binning_config() 417 *(uint32_t *)(validated + 4) = (bin_addr + vc4->bin_alloc_size - in validate_tile_binning_config()
|
/Linux-v5.4/Documentation/gpu/ |
D | vc4.rst | 2 drm/vc4 Broadcom VC4 Graphics Driver 5 .. kernel-doc:: drivers/gpu/drm/vc4/vc4_drv.c 18 .. kernel-doc:: drivers/gpu/drm/vc4/vc4_crtc.c 24 .. kernel-doc:: drivers/gpu/drm/vc4/vc4_hvs.c 30 .. kernel-doc:: drivers/gpu/drm/vc4/vc4_plane.c 36 .. kernel-doc:: drivers/gpu/drm/vc4/vc4_hdmi.c 42 .. kernel-doc:: drivers/gpu/drm/vc4/vc4_dsi.c 48 .. kernel-doc:: drivers/gpu/drm/vc4/vc4_dpi.c 54 .. kernel-doc:: drivers/gpu/drm/vc4/vc4_vec.c 60 This section covers the GEM implementation in the vc4 driver. [all …]
|
D | drivers.rst | 16 vc4
|
/Linux-v5.4/Documentation/devicetree/bindings/display/ |
D | brcm,bcm-vc4.txt | 8 - compatible: Should be "brcm,bcm2835-vc4" or "brcm,cygnus-vc4" 162 vc4: gpu { 163 compatible = "brcm,bcm2835-vc4";
|
/Linux-v5.4/drivers/net/dsa/b53/ |
D | b53_common.c | 357 u8 mgmt, vc0, vc1, vc4 = 0, vc5; in b53_enable_vlan() local 364 b53_read8(dev, B53_VLAN_PAGE, B53_VLAN_CTRL4_25, &vc4); in b53_enable_vlan() 367 b53_read8(dev, B53_VLAN_PAGE, B53_VLAN_CTRL4_63XX, &vc4); in b53_enable_vlan() 370 b53_read8(dev, B53_VLAN_PAGE, B53_VLAN_CTRL4, &vc4); in b53_enable_vlan() 379 vc4 &= ~VC4_ING_VID_CHECK_MASK; in b53_enable_vlan() 381 vc4 |= VC4_ING_VID_VIO_DROP << VC4_ING_VID_CHECK_S; in b53_enable_vlan() 384 vc4 |= VC4_ING_VID_VIO_FWD << VC4_ING_VID_CHECK_S; in b53_enable_vlan() 397 vc4 &= ~VC4_ING_VID_CHECK_MASK; in b53_enable_vlan() 401 vc4 |= VC4_ING_VID_VIO_FWD << VC4_ING_VID_CHECK_S; in b53_enable_vlan() 403 vc4 |= VC4_ING_VID_VIO_TO_IMP << VC4_ING_VID_CHECK_S; in b53_enable_vlan() [all …]
|
/Linux-v5.4/drivers/staging/vc04_services/bcm2835-camera/ |
D | TODO | 5 vc4 driver can import them. This may involve bringing in the VCSM
|