Home
last modified time | relevance | path

Searched refs:rdev (Results 1 – 25 of 496) sorted by relevance

12345678910>>...20

/Linux-v4.19/drivers/gpu/drm/radeon/
Dradeon_asic.h34 uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
35 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
36 uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
37 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
39 uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
40 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
41 uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
42 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
61 int r100_init(struct radeon_device *rdev);
[all …]
Dr420.c39 void r420_pm_init_profile(struct radeon_device *rdev) in r420_pm_init_profile() argument
42 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; in r420_pm_init_profile()
43 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; in r420_pm_init_profile()
44 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; in r420_pm_init_profile()
45 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0; in r420_pm_init_profile()
47 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0; in r420_pm_init_profile()
48 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0; in r420_pm_init_profile()
49 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; in r420_pm_init_profile()
50 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; in r420_pm_init_profile()
52 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0; in r420_pm_init_profile()
[all …]
Dradeon_pm.c45 static int radeon_debugfs_pm_init(struct radeon_device *rdev);
46 static bool radeon_pm_in_vbl(struct radeon_device *rdev);
47 static bool radeon_pm_debug_check_in_vbl(struct radeon_device *rdev, bool finish);
48 static void radeon_pm_update_profile(struct radeon_device *rdev);
49 static void radeon_pm_set_clocks(struct radeon_device *rdev);
51 int radeon_pm_get_type_index(struct radeon_device *rdev, in radeon_pm_get_type_index() argument
58 for (i = 0; i < rdev->pm.num_power_states; i++) { in radeon_pm_get_type_index()
59 if (rdev->pm.power_state[i].type == ps_type) { in radeon_pm_get_type_index()
66 return rdev->pm.default_power_state_index; in radeon_pm_get_type_index()
69 void radeon_pm_acpi_event_handler(struct radeon_device *rdev) in radeon_pm_acpi_event_handler() argument
[all …]
Dradeon_device.c152 struct radeon_device *rdev = dev->dev_private; in radeon_is_px() local
154 if (rdev->flags & RADEON_IS_PX) in radeon_is_px()
159 static void radeon_device_handle_px_quirks(struct radeon_device *rdev) in radeon_device_handle_px_quirks() argument
165 if (rdev->pdev->vendor == p->chip_vendor && in radeon_device_handle_px_quirks()
166 rdev->pdev->device == p->chip_device && in radeon_device_handle_px_quirks()
167 rdev->pdev->subsystem_vendor == p->subsys_vendor && in radeon_device_handle_px_quirks()
168 rdev->pdev->subsystem_device == p->subsys_device) { in radeon_device_handle_px_quirks()
169 rdev->px_quirk_flags = p->px_quirk_flags; in radeon_device_handle_px_quirks()
175 if (rdev->px_quirk_flags & RADEON_PX_QUIRK_DISABLE_PX) in radeon_device_handle_px_quirks()
176 rdev->flags &= ~RADEON_IS_PX; in radeon_device_handle_px_quirks()
[all …]
Dr520.c36 int r520_mc_wait_for_idle(struct radeon_device *rdev) in r520_mc_wait_for_idle() argument
41 for (i = 0; i < rdev->usec_timeout; i++) { in r520_mc_wait_for_idle()
52 static void r520_gpu_init(struct radeon_device *rdev) in r520_gpu_init() argument
56 rv515_vga_render_disable(rdev); in r520_gpu_init()
78 if (rdev->family == CHIP_RV530) { in r520_gpu_init()
81 r420_pipes_init(rdev); in r520_gpu_init()
88 if (r520_mc_wait_for_idle(rdev)) { in r520_gpu_init()
93 static void r520_vram_get_type(struct radeon_device *rdev) in r520_vram_get_type() argument
97 rdev->mc.vram_width = 128; in r520_vram_get_type()
98 rdev->mc.vram_is_ddr = true; in r520_vram_get_type()
[all …]
Dradeon_irq_kms.c51 struct radeon_device *rdev = dev->dev_private; in radeon_driver_irq_handler_kms() local
54 ret = radeon_irq_process(rdev); in radeon_driver_irq_handler_kms()
76 struct radeon_device *rdev = container_of(work, struct radeon_device, in radeon_hotplug_work_func() local
78 struct drm_device *dev = rdev->ddev; in radeon_hotplug_work_func()
84 if (!rdev->mode_info.mode_config_initialized) in radeon_hotplug_work_func()
97 struct radeon_device *rdev = container_of(work, struct radeon_device, in radeon_dp_work_func() local
99 struct drm_device *dev = rdev->ddev; in radeon_dp_work_func()
117 struct radeon_device *rdev = dev->dev_private; in radeon_driver_irq_preinstall_kms() local
121 spin_lock_irqsave(&rdev->irq.lock, irqflags); in radeon_driver_irq_preinstall_kms()
124 atomic_set(&rdev->irq.ring_int[i], 0); in radeon_driver_irq_preinstall_kms()
[all …]
Drs400.c36 static int rs400_debugfs_pcie_gart_info_init(struct radeon_device *rdev);
38 void rs400_gart_adjust_size(struct radeon_device *rdev) in rs400_gart_adjust_size() argument
41 switch (rdev->mc.gtt_size/(1024*1024)) { in rs400_gart_adjust_size()
52 (unsigned)(rdev->mc.gtt_size >> 20)); in rs400_gart_adjust_size()
55 rdev->mc.gtt_size = 32 * 1024 * 1024; in rs400_gart_adjust_size()
60 void rs400_gart_tlb_flush(struct radeon_device *rdev) in rs400_gart_tlb_flush() argument
63 unsigned int timeout = rdev->usec_timeout; in rs400_gart_tlb_flush()
76 int rs400_gart_init(struct radeon_device *rdev) in rs400_gart_init() argument
80 if (rdev->gart.ptr) { in rs400_gart_init()
85 switch(rdev->mc.gtt_size / (1024 * 1024)) { in rs400_gart_init()
[all …]
Dradeon_gart.c68 int radeon_gart_table_ram_alloc(struct radeon_device *rdev) in radeon_gart_table_ram_alloc() argument
72 ptr = pci_alloc_consistent(rdev->pdev, rdev->gart.table_size, in radeon_gart_table_ram_alloc()
73 &rdev->gart.table_addr); in radeon_gart_table_ram_alloc()
78 if (rdev->family == CHIP_RS400 || rdev->family == CHIP_RS480 || in radeon_gart_table_ram_alloc()
79 rdev->family == CHIP_RS690 || rdev->family == CHIP_RS740) { in radeon_gart_table_ram_alloc()
81 rdev->gart.table_size >> PAGE_SHIFT); in radeon_gart_table_ram_alloc()
84 rdev->gart.ptr = ptr; in radeon_gart_table_ram_alloc()
85 memset((void *)rdev->gart.ptr, 0, rdev->gart.table_size); in radeon_gart_table_ram_alloc()
98 void radeon_gart_table_ram_free(struct radeon_device *rdev) in radeon_gart_table_ram_free() argument
100 if (rdev->gart.ptr == NULL) { in radeon_gart_table_ram_free()
[all …]
Dr600.c100 int r600_debugfs_mc_info_init(struct radeon_device *rdev);
103 int r600_mc_wait_for_idle(struct radeon_device *rdev);
104 static void r600_gpu_init(struct radeon_device *rdev);
105 void r600_fini(struct radeon_device *rdev);
106 void r600_irq_disable(struct radeon_device *rdev);
107 static void r600_pcie_gen2_enable(struct radeon_device *rdev);
108 extern int evergreen_rlc_resume(struct radeon_device *rdev);
109 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
114 u32 r600_rcu_rreg(struct radeon_device *rdev, u32 reg) in r600_rcu_rreg() argument
119 spin_lock_irqsave(&rdev->rcu_idx_lock, flags); in r600_rcu_rreg()
[all …]
Drv770.c42 static void rv770_gpu_init(struct radeon_device *rdev);
43 void rv770_fini(struct radeon_device *rdev);
44 static void rv770_pcie_gen2_enable(struct radeon_device *rdev);
45 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
47 int rv770_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk) in rv770_set_uvd_clocks() argument
53 if (rdev->family == CHIP_RV740) in rv770_set_uvd_clocks()
54 return evergreen_set_uvd_clocks(rdev, vclk, dclk); in rv770_set_uvd_clocks()
67 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 50000, 160000, in rv770_set_uvd_clocks()
87 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); in rv770_set_uvd_clocks()
118 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); in rv770_set_uvd_clocks()
[all …]
Dni.c42 u32 tn_smc_rreg(struct radeon_device *rdev, u32 reg) in tn_smc_rreg() argument
47 spin_lock_irqsave(&rdev->smc_idx_lock, flags); in tn_smc_rreg()
50 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); in tn_smc_rreg()
54 void tn_smc_wreg(struct radeon_device *rdev, u32 reg, u32 v) in tn_smc_wreg() argument
58 spin_lock_irqsave(&rdev->smc_idx_lock, flags); in tn_smc_wreg()
61 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); in tn_smc_wreg()
190 extern bool evergreen_is_display_hung(struct radeon_device *rdev);
191 extern void evergreen_print_gpu_status_regs(struct radeon_device *rdev);
192 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
193 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
[all …]
Dradeon.h246 bool radeon_get_bios(struct radeon_device *rdev);
256 int radeon_dummy_page_init(struct radeon_device *rdev);
257 void radeon_dummy_page_fini(struct radeon_device *rdev);
282 int radeon_pm_init(struct radeon_device *rdev);
283 int radeon_pm_late_init(struct radeon_device *rdev);
284 void radeon_pm_fini(struct radeon_device *rdev);
285 void radeon_pm_compute_clocks(struct radeon_device *rdev);
286 void radeon_pm_suspend(struct radeon_device *rdev);
287 void radeon_pm_resume(struct radeon_device *rdev);
288 void radeon_combios_get_power_modes(struct radeon_device *rdev);
[all …]
Drs600.c47 static void rs600_gpu_init(struct radeon_device *rdev);
48 int rs600_mc_wait_for_idle(struct radeon_device *rdev);
56 static bool avivo_is_in_vblank(struct radeon_device *rdev, int crtc) in avivo_is_in_vblank() argument
64 static bool avivo_is_counter_moving(struct radeon_device *rdev, int crtc) in avivo_is_counter_moving() argument
85 void avivo_wait_for_vblank(struct radeon_device *rdev, int crtc) in avivo_wait_for_vblank() argument
89 if (crtc >= rdev->num_crtc) in avivo_wait_for_vblank()
98 while (avivo_is_in_vblank(rdev, crtc)) { in avivo_wait_for_vblank()
100 if (!avivo_is_counter_moving(rdev, crtc)) in avivo_wait_for_vblank()
105 while (!avivo_is_in_vblank(rdev, crtc)) { in avivo_wait_for_vblank()
107 if (!avivo_is_counter_moving(rdev, crtc)) in avivo_wait_for_vblank()
[all …]
Devergreen.c45 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg) in eg_cg_rreg() argument
50 spin_lock_irqsave(&rdev->cg_idx_lock, flags); in eg_cg_rreg()
53 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags); in eg_cg_rreg()
57 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v) in eg_cg_wreg() argument
61 spin_lock_irqsave(&rdev->cg_idx_lock, flags); in eg_cg_wreg()
64 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags); in eg_cg_wreg()
67 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg) in eg_pif_phy0_rreg() argument
72 spin_lock_irqsave(&rdev->pif_idx_lock, flags); in eg_pif_phy0_rreg()
75 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); in eg_pif_phy0_rreg()
79 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v) in eg_pif_phy0_wreg() argument
[all …]
Drv6xx_dpm.c34 static u32 rv6xx_scale_count_given_unit(struct radeon_device *rdev,
44 static struct rv6xx_power_info *rv6xx_get_pi(struct radeon_device *rdev) in rv6xx_get_pi() argument
46 struct rv6xx_power_info *pi = rdev->pm.dpm.priv; in rv6xx_get_pi()
51 static void rv6xx_force_pcie_gen1(struct radeon_device *rdev) in rv6xx_force_pcie_gen1() argument
64 for (i = 0; i < rdev->usec_timeout; i++) { in rv6xx_force_pcie_gen1()
75 static void rv6xx_enable_pcie_gen2_support(struct radeon_device *rdev) in rv6xx_enable_pcie_gen2_support() argument
88 static void rv6xx_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev, in rv6xx_enable_bif_dynamic_pcie_gen2() argument
101 static void rv6xx_enable_l0s(struct radeon_device *rdev) in rv6xx_enable_l0s() argument
110 static void rv6xx_enable_l1(struct radeon_device *rdev) in rv6xx_enable_l1() argument
122 static void rv6xx_enable_pll_sleep_in_l1(struct radeon_device *rdev) in rv6xx_enable_pll_sleep_in_l1() argument
[all …]
Dradeon_kms.c57 struct radeon_device *rdev = dev->dev_private; in radeon_driver_unload_kms() local
59 if (rdev == NULL) in radeon_driver_unload_kms()
62 if (rdev->rmmio == NULL) in radeon_driver_unload_kms()
70 radeon_acpi_fini(rdev); in radeon_driver_unload_kms()
72 radeon_modeset_fini(rdev); in radeon_driver_unload_kms()
73 radeon_device_fini(rdev); in radeon_driver_unload_kms()
76 kfree(rdev); in radeon_driver_unload_kms()
95 struct radeon_device *rdev; in radeon_driver_load_kms() local
123 rdev = kzalloc(sizeof(struct radeon_device), GFP_KERNEL); in radeon_driver_load_kms()
124 if (rdev == NULL) { in radeon_driver_load_kms()
[all …]
Dradeon_fence.c62 static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring) in radeon_fence_write() argument
64 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_write()
65 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_write()
83 static u32 radeon_fence_read(struct radeon_device *rdev, int ring) in radeon_fence_read() argument
85 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_read()
88 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_read()
108 static void radeon_fence_schedule_check(struct radeon_device *rdev, int ring) in radeon_fence_schedule_check() argument
115 &rdev->fence_drv[ring].lockup_work, in radeon_fence_schedule_check()
129 int radeon_fence_emit(struct radeon_device *rdev, in radeon_fence_emit() argument
140 (*fence)->rdev = rdev; in radeon_fence_emit()
[all …]
Drs690.c35 int rs690_mc_wait_for_idle(struct radeon_device *rdev) in rs690_mc_wait_for_idle() argument
40 for (i = 0; i < rdev->usec_timeout; i++) { in rs690_mc_wait_for_idle()
50 static void rs690_gpu_init(struct radeon_device *rdev) in rs690_gpu_init() argument
53 r420_pipes_init(rdev); in rs690_gpu_init()
54 if (rs690_mc_wait_for_idle(rdev)) { in rs690_gpu_init()
64 void rs690_pm_info(struct radeon_device *rdev) in rs690_pm_info() argument
72 if (atom_parse_data_header(rdev->mode_info.atom_context, index, NULL, in rs690_pm_info()
74 info = (union igp_info *)(rdev->mode_info.atom_context->bios + data_offset); in rs690_pm_info()
80 rdev->pm.igp_sideport_mclk.full = dfixed_const(le32_to_cpu(info->info.ulBootUpMemoryClock)); in rs690_pm_info()
81 rdev->pm.igp_sideport_mclk.full = dfixed_div(rdev->pm.igp_sideport_mclk, tmp); in rs690_pm_info()
[all …]
/Linux-v4.19/net/wireless/
Drdev-ops.h10 static inline int rdev_suspend(struct cfg80211_registered_device *rdev, in rdev_suspend() argument
14 trace_rdev_suspend(&rdev->wiphy, wowlan); in rdev_suspend()
15 ret = rdev->ops->suspend(&rdev->wiphy, wowlan); in rdev_suspend()
16 trace_rdev_return_int(&rdev->wiphy, ret); in rdev_suspend()
20 static inline int rdev_resume(struct cfg80211_registered_device *rdev) in rdev_resume() argument
23 trace_rdev_resume(&rdev->wiphy); in rdev_resume()
24 ret = rdev->ops->resume(&rdev->wiphy); in rdev_resume()
25 trace_rdev_return_int(&rdev->wiphy, ret); in rdev_resume()
29 static inline void rdev_set_wakeup(struct cfg80211_registered_device *rdev, in rdev_set_wakeup() argument
32 trace_rdev_set_wakeup(&rdev->wiphy, enabled); in rdev_set_wakeup()
[all …]
Dcore.c57 struct cfg80211_registered_device *result = NULL, *rdev; in cfg80211_rdev_by_wiphy_idx() local
61 list_for_each_entry(rdev, &cfg80211_rdev_list, list) { in cfg80211_rdev_by_wiphy_idx()
62 if (rdev->wiphy_idx == wiphy_idx) { in cfg80211_rdev_by_wiphy_idx()
63 result = rdev; in cfg80211_rdev_by_wiphy_idx()
73 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); in get_wiphy_idx() local
75 return rdev->wiphy_idx; in get_wiphy_idx()
80 struct cfg80211_registered_device *rdev; in wiphy_idx_to_wiphy() local
84 rdev = cfg80211_rdev_by_wiphy_idx(wiphy_idx); in wiphy_idx_to_wiphy()
85 if (!rdev) in wiphy_idx_to_wiphy()
87 return &rdev->wiphy; in wiphy_idx_to_wiphy()
[all …]
/Linux-v4.19/net/ieee802154/
Drdev-ops.h11 rdev_add_virtual_intf_deprecated(struct cfg802154_registered_device *rdev, in rdev_add_virtual_intf_deprecated() argument
16 return rdev->ops->add_virtual_intf_deprecated(&rdev->wpan_phy, name, in rdev_add_virtual_intf_deprecated()
21 rdev_del_virtual_intf_deprecated(struct cfg802154_registered_device *rdev, in rdev_del_virtual_intf_deprecated() argument
24 rdev->ops->del_virtual_intf_deprecated(&rdev->wpan_phy, dev); in rdev_del_virtual_intf_deprecated()
28 rdev_suspend(struct cfg802154_registered_device *rdev) in rdev_suspend() argument
31 trace_802154_rdev_suspend(&rdev->wpan_phy); in rdev_suspend()
32 ret = rdev->ops->suspend(&rdev->wpan_phy); in rdev_suspend()
33 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); in rdev_suspend()
38 rdev_resume(struct cfg802154_registered_device *rdev) in rdev_resume() argument
41 trace_802154_rdev_resume(&rdev->wpan_phy); in rdev_resume()
[all …]
/Linux-v4.19/drivers/regulator/
Dhelpers.c31 int regulator_is_enabled_regmap(struct regulator_dev *rdev) in regulator_is_enabled_regmap() argument
36 ret = regmap_read(rdev->regmap, rdev->desc->enable_reg, &val); in regulator_is_enabled_regmap()
40 val &= rdev->desc->enable_mask; in regulator_is_enabled_regmap()
42 if (rdev->desc->enable_is_inverted) { in regulator_is_enabled_regmap()
43 if (rdev->desc->enable_val) in regulator_is_enabled_regmap()
44 return val != rdev->desc->enable_val; in regulator_is_enabled_regmap()
47 if (rdev->desc->enable_val) in regulator_is_enabled_regmap()
48 return val == rdev->desc->enable_val; in regulator_is_enabled_regmap()
63 int regulator_enable_regmap(struct regulator_dev *rdev) in regulator_enable_regmap() argument
67 if (rdev->desc->enable_is_inverted) { in regulator_enable_regmap()
[all …]
Dcore.c42 #define rdev_crit(rdev, fmt, ...) \ argument
43 pr_crit("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
44 #define rdev_err(rdev, fmt, ...) \ argument
45 pr_err("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
46 #define rdev_warn(rdev, fmt, ...) \ argument
47 pr_warn("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
48 #define rdev_info(rdev, fmt, ...) \ argument
49 pr_info("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
50 #define rdev_dbg(rdev, fmt, ...) \ argument
51 pr_debug("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
[all …]
/Linux-v4.19/drivers/infiniband/hw/bnxt_re/
Dmain.c81 static void bnxt_re_ib_unreg(struct bnxt_re_dev *rdev);
85 static void bnxt_re_get_sriov_func_type(struct bnxt_re_dev *rdev) in bnxt_re_get_sriov_func_type() argument
89 bp = netdev_priv(rdev->en_dev->net); in bnxt_re_get_sriov_func_type()
91 rdev->is_virtfn = 1; in bnxt_re_get_sriov_func_type()
99 static void bnxt_re_set_resource_limits(struct bnxt_re_dev *rdev) in bnxt_re_set_resource_limits() argument
105 struct bnxt_qplib_dev_attr *dev_attr = &rdev->dev_attr; in bnxt_re_set_resource_limits()
107 rdev->qplib_ctx.qpc_count = min_t(u32, BNXT_RE_MAX_QPC_COUNT, in bnxt_re_set_resource_limits()
110 rdev->qplib_ctx.mrw_count = BNXT_RE_MAX_MRW_COUNT_256K; in bnxt_re_set_resource_limits()
112 rdev->qplib_ctx.mrw_count = min_t(u32, rdev->qplib_ctx.mrw_count, in bnxt_re_set_resource_limits()
114 rdev->qplib_ctx.srqc_count = min_t(u32, BNXT_RE_MAX_SRQC_COUNT, in bnxt_re_set_resource_limits()
[all …]
/Linux-v4.19/drivers/infiniband/hw/cxgb4/
Dresource.c38 static int c4iw_init_qid_table(struct c4iw_rdev *rdev) in c4iw_init_qid_table() argument
42 if (c4iw_id_table_alloc(&rdev->resource.qid_table, in c4iw_init_qid_table()
43 rdev->lldi.vr->qp.start, in c4iw_init_qid_table()
44 rdev->lldi.vr->qp.size, in c4iw_init_qid_table()
45 rdev->lldi.vr->qp.size, 0)) in c4iw_init_qid_table()
48 for (i = rdev->lldi.vr->qp.start; in c4iw_init_qid_table()
49 i < rdev->lldi.vr->qp.start + rdev->lldi.vr->qp.size; i++) in c4iw_init_qid_table()
50 if (!(i & rdev->qpmask)) in c4iw_init_qid_table()
51 c4iw_id_free(&rdev->resource.qid_table, i); in c4iw_init_qid_table()
56 int c4iw_init_resource(struct c4iw_rdev *rdev, u32 nr_tpt, in c4iw_init_resource() argument
[all …]

12345678910>>...20