Home
last modified time | relevance | path

Searched refs:hwdev (Results 1 – 25 of 44) sorted by relevance

12

/Linux-v4.19/drivers/gpu/drm/arm/
Dmalidp_hw.c175 static int malidp500_query_hw(struct malidp_hw_device *hwdev) in malidp500_query_hw() argument
177 u32 conf = malidp_hw_read(hwdev, MALIDP500_CONFIG_ID); in malidp500_query_hw()
181 hwdev->min_line_size = 2; in malidp500_query_hw()
182 hwdev->max_line_size = SZ_2K * ln_size_mult; in malidp500_query_hw()
183 hwdev->rotation_memory[0] = SZ_1K * 64 * ln_size_mult; in malidp500_query_hw()
184 hwdev->rotation_memory[1] = 0; /* no second rotation memory bank */ in malidp500_query_hw()
189 static void malidp500_enter_config_mode(struct malidp_hw_device *hwdev) in malidp500_enter_config_mode() argument
193 malidp_hw_setbits(hwdev, MALIDP500_DC_CONFIG_REQ, MALIDP500_DC_CONTROL); in malidp500_enter_config_mode()
195 status = malidp_hw_read(hwdev, hwdev->hw->map.dc_base + MALIDP_REG_STATUS); in malidp500_enter_config_mode()
208 static void malidp500_leave_config_mode(struct malidp_hw_device *hwdev) in malidp500_leave_config_mode() argument
[all …]
Dmalidp_hw.h138 int (*query_hw)(struct malidp_hw_device *hwdev);
143 void (*enter_config_mode)(struct malidp_hw_device *hwdev);
148 void (*leave_config_mode)(struct malidp_hw_device *hwdev);
153 bool (*in_config_mode)(struct malidp_hw_device *hwdev);
162 void (*set_config_valid)(struct malidp_hw_device *hwdev, u8 value);
168 void (*modeset)(struct malidp_hw_device *hwdev, struct videomode *m);
174 int (*rotmem_required)(struct malidp_hw_device *hwdev, u16 w, u16 h, u32 fmt);
176 int (*se_set_scaling_coeffs)(struct malidp_hw_device *hwdev,
180 long (*se_calc_mclk)(struct malidp_hw_device *hwdev,
193 int (*enable_memwrite)(struct malidp_hw_device *hwdev, dma_addr_t *addrs,
[all …]
Dmalidp_drv.c41 static void malidp_write_gamma_table(struct malidp_hw_device *hwdev, in malidp_write_gamma_table() argument
52 malidp_hw_write(hwdev, gamma_write_mask, in malidp_write_gamma_table()
53 hwdev->hw->map.coeffs_base + MALIDP_COEF_TABLE_ADDR); in malidp_write_gamma_table()
55 malidp_hw_write(hwdev, data[i], in malidp_write_gamma_table()
56 hwdev->hw->map.coeffs_base + in malidp_write_gamma_table()
64 struct malidp_hw_device *hwdev = malidp->dev; in malidp_atomic_commit_update_gamma() local
70 malidp_hw_clearbits(hwdev, in malidp_atomic_commit_update_gamma()
79 malidp_write_gamma_table(hwdev, mc->gamma_coeffs); in malidp_atomic_commit_update_gamma()
81 malidp_hw_setbits(hwdev, MALIDP_DISP_FUNC_GAMMA, in malidp_atomic_commit_update_gamma()
91 struct malidp_hw_device *hwdev = malidp->dev; in malidp_atomic_commit_update_coloradj() local
[all …]
Dmalidp_crtc.c29 struct malidp_hw_device *hwdev = malidp->dev; in malidp_crtc_mode_valid() local
38 rate = clk_round_rate(hwdev->pxlclk, req_rate); in malidp_crtc_mode_valid()
53 struct malidp_hw_device *hwdev = malidp->dev; in malidp_crtc_atomic_enable() local
63 clk_prepare_enable(hwdev->pxlclk); in malidp_crtc_atomic_enable()
66 clk_set_rate(hwdev->pxlclk, crtc->state->adjusted_mode.crtc_clock * 1000); in malidp_crtc_atomic_enable()
68 hwdev->hw->modeset(hwdev, &vm); in malidp_crtc_atomic_enable()
69 hwdev->hw->leave_config_mode(hwdev); in malidp_crtc_atomic_enable()
77 struct malidp_hw_device *hwdev = malidp->dev; in malidp_crtc_atomic_disable() local
84 hwdev->hw->enter_config_mode(hwdev); in malidp_crtc_atomic_disable()
86 clk_disable_unprepare(hwdev->pxlclk); in malidp_crtc_atomic_disable()
[all …]
Dmalidp_planes.c193 ms->format = malidp_hw_get_format_id(&mp->hwdev->hw->map, in malidp_de_plane_check()
201 u8 alignment = malidp_hw_get_pitch_align(mp->hwdev, rotated); in malidp_de_plane_check()
209 if ((state->crtc_w > mp->hwdev->max_line_size) || in malidp_de_plane_check()
210 (state->crtc_h > mp->hwdev->max_line_size) || in malidp_de_plane_check()
211 (state->crtc_w < mp->hwdev->min_line_size) || in malidp_de_plane_check()
212 (state->crtc_h < mp->hwdev->min_line_size)) in malidp_de_plane_check()
221 !(mp->hwdev->hw->features & MALIDP_DEVICE_LV_HAS_3_STRIDES) && in malidp_de_plane_check()
239 val = mp->hwdev->hw->rotmem_required(mp->hwdev, state->crtc_w, in malidp_de_plane_check()
261 num_strides = (mp->hwdev->hw->features & in malidp_de_set_plane_pitches()
265 malidp_hw_write(mp->hwdev, pitches[i], in malidp_de_set_plane_pitches()
[all …]
Dmalidp_mw.c237 struct malidp_hw_device *hwdev = malidp->dev; in malidp_mw_atomic_commit() local
257 hwdev->hw->enable_memwrite(hwdev, mw_state->addrs, in malidp_mw_atomic_commit()
265 hwdev->hw->disable_memwrite(hwdev); in malidp_mw_atomic_commit()
/Linux-v4.19/drivers/net/ethernet/huawei/hinic/
Dhinic_hw_dev.c79 static int get_capability(struct hinic_hwdev *hwdev, in get_capability() argument
82 struct hinic_cap *nic_cap = &hwdev->nic_cap; in get_capability()
85 if (!HINIC_IS_PF(hwdev->hwif) && !HINIC_IS_PPF(hwdev->hwif)) in get_capability()
91 num_aeqs = HINIC_HWIF_NUM_AEQS(hwdev->hwif); in get_capability()
92 num_ceqs = HINIC_HWIF_NUM_CEQS(hwdev->hwif); in get_capability()
93 num_irqs = HINIC_HWIF_NUM_IRQS(hwdev->hwif); in get_capability()
122 struct hinic_hwdev *hwdev = &pfhwdev->hwdev; in get_cap_from_fw() local
123 struct hinic_hwif *hwif = hwdev->hwif; in get_cap_from_fw()
140 return get_capability(hwdev, &dev_cap); in get_cap_from_fw()
149 static int get_dev_cap(struct hinic_hwdev *hwdev) in get_dev_cap() argument
[all …]
Dhinic_port.c50 struct hinic_hwdev *hwdev = nic_dev->hwdev; in change_mac() local
52 struct hinic_hwif *hwif = hwdev->hwif; in change_mac()
72 err = hinic_port_msg_cmd(hwdev, cmd, &port_mac_cmd, in change_mac()
121 struct hinic_hwdev *hwdev = nic_dev->hwdev; in hinic_port_get_mac() local
123 struct hinic_hwif *hwif = hwdev->hwif; in hinic_port_get_mac()
130 err = hinic_port_msg_cmd(hwdev, HINIC_PORT_CMD_GET_MAC, in hinic_port_get_mac()
153 struct hinic_hwdev *hwdev = nic_dev->hwdev; in hinic_port_set_mtu() local
155 struct hinic_hwif *hwif = hwdev->hwif; in hinic_port_set_mtu()
174 err = hinic_port_msg_cmd(hwdev, HINIC_PORT_CMD_CHANGE_MTU, in hinic_port_set_mtu()
195 struct hinic_hwdev *hwdev = nic_dev->hwdev; in hinic_port_add_vlan() local
[all …]
Dhinic_hw_dev.h195 struct hinic_hwdev hwdev; member
202 void hinic_hwdev_cb_register(struct hinic_hwdev *hwdev,
208 void hinic_hwdev_cb_unregister(struct hinic_hwdev *hwdev,
211 int hinic_port_msg_cmd(struct hinic_hwdev *hwdev, enum hinic_port_cmd cmd,
215 int hinic_hwdev_ifup(struct hinic_hwdev *hwdev);
217 void hinic_hwdev_ifdown(struct hinic_hwdev *hwdev);
221 void hinic_free_hwdev(struct hinic_hwdev *hwdev);
223 int hinic_hwdev_num_qps(struct hinic_hwdev *hwdev);
225 struct hinic_sq *hinic_hwdev_get_sq(struct hinic_hwdev *hwdev, int i);
227 struct hinic_rq *hinic_hwdev_get_rq(struct hinic_hwdev *hwdev, int i);
[all …]
Dhinic_main.c166 struct hinic_hwdev *hwdev = nic_dev->hwdev; in hinic_get_drvinfo() local
167 struct hinic_hwif *hwif = hwdev->hwif; in hinic_get_drvinfo()
186 struct hinic_hwdev *hwdev = nic_dev->hwdev; in hinic_get_channels() local
188 channels->max_rx = hwdev->nic_cap.max_qps; in hinic_get_channels()
189 channels->max_tx = hwdev->nic_cap.max_qps; in hinic_get_channels()
192 channels->rx_count = hinic_hwdev_num_qps(hwdev); in hinic_get_channels()
193 channels->tx_count = hinic_hwdev_num_qps(hwdev); in hinic_get_channels()
245 int i, num_qps = hinic_hwdev_num_qps(nic_dev->hwdev); in update_nic_stats()
262 int err, i, j, num_txqs = hinic_hwdev_num_qps(nic_dev->hwdev); in create_txqs()
275 struct hinic_sq *sq = hinic_hwdev_get_sq(nic_dev->hwdev, i); in create_txqs()
[all …]
Dhinic_tx.c114 struct hinic_hwdev *hwdev = nic_dev->hwdev; in tx_map_skb() local
115 struct hinic_hwif *hwif = hwdev->hwif; in tx_map_skb()
165 struct hinic_hwdev *hwdev = nic_dev->hwdev; in tx_unmap_skb() local
166 struct hinic_hwif *hwif = hwdev->hwif; in tx_unmap_skb()
404 hinic_hwdev_msix_cnt_set(nic_dev->hwdev, txq->sq->msix_entry); in tx_irq()
413 struct hinic_hwdev *hwdev = nic_dev->hwdev; in tx_request_irq() local
414 struct hinic_hwif *hwif = hwdev->hwif; in tx_request_irq()
421 hinic_hwdev_msix_set(nic_dev->hwdev, sq->msix_entry, in tx_request_irq()
457 struct hinic_hwdev *hwdev = nic_dev->hwdev; in hinic_init_txq() local
489 err = hinic_hwdev_hw_ci_addr_set(hwdev, sq, CI_UPDATE_NO_PENDING, in hinic_init_txq()
Dhinic_rx.c103 struct hinic_hwdev *hwdev = nic_dev->hwdev; in rx_alloc_skb() local
104 struct hinic_hwif *hwif = hwdev->hwif; in rx_alloc_skb()
140 struct hinic_hwdev *hwdev = nic_dev->hwdev; in rx_unmap_skb() local
141 struct hinic_hwif *hwif = hwdev->hwif; in rx_unmap_skb()
405 hinic_hwdev_msix_cnt_set(nic_dev->hwdev, rq->msix_entry); in rx_irq()
414 struct hinic_hwdev *hwdev = nic_dev->hwdev; in rx_request_irq() local
422 hinic_hwdev_msix_set(hwdev, rq->msix_entry, in rx_request_irq()
/Linux-v4.19/include/xen/arm/
Dpage-coherent.h16 void __xen_dma_map_page(struct device *hwdev, struct page *page,
19 void __xen_dma_unmap_page(struct device *hwdev, dma_addr_t handle,
22 void __xen_dma_sync_single_for_cpu(struct device *hwdev,
25 void __xen_dma_sync_single_for_device(struct device *hwdev,
28 static inline void *xen_alloc_coherent_pages(struct device *hwdev, size_t size, in xen_alloc_coherent_pages() argument
31 return xen_get_dma_ops(hwdev)->alloc(hwdev, size, dma_handle, flags, attrs); in xen_alloc_coherent_pages()
34 static inline void xen_free_coherent_pages(struct device *hwdev, size_t size, in xen_free_coherent_pages() argument
37 xen_get_dma_ops(hwdev)->free(hwdev, size, cpu_addr, dma_handle, attrs); in xen_free_coherent_pages()
40 static inline void xen_dma_map_page(struct device *hwdev, struct page *page, in xen_dma_map_page() argument
61 xen_get_dma_ops(hwdev)->map_page(hwdev, page, offset, size, dir, attrs); in xen_dma_map_page()
[all …]
/Linux-v4.19/include/linux/
Dpci-dma-compat.h17 pci_alloc_consistent(struct pci_dev *hwdev, size_t size, in pci_alloc_consistent() argument
20 return dma_alloc_coherent(&hwdev->dev, size, dma_handle, GFP_ATOMIC); in pci_alloc_consistent()
24 pci_zalloc_consistent(struct pci_dev *hwdev, size_t size, in pci_zalloc_consistent() argument
27 return dma_zalloc_coherent(&hwdev->dev, size, dma_handle, GFP_ATOMIC); in pci_zalloc_consistent()
31 pci_free_consistent(struct pci_dev *hwdev, size_t size, in pci_free_consistent() argument
34 dma_free_coherent(&hwdev->dev, size, vaddr, dma_handle); in pci_free_consistent()
38 pci_map_single(struct pci_dev *hwdev, void *ptr, size_t size, int direction) in pci_map_single() argument
40 return dma_map_single(&hwdev->dev, ptr, size, (enum dma_data_direction)direction); in pci_map_single()
44 pci_unmap_single(struct pci_dev *hwdev, dma_addr_t dma_addr, in pci_unmap_single() argument
47 dma_unmap_single(&hwdev->dev, dma_addr, size, (enum dma_data_direction)direction); in pci_unmap_single()
[all …]
Dswiotlb.h52 extern phys_addr_t swiotlb_tbl_map_single(struct device *hwdev,
58 extern void swiotlb_tbl_unmap_single(struct device *hwdev,
63 extern void swiotlb_tbl_sync_single(struct device *hwdev,
70 void *swiotlb_alloc(struct device *hwdev, size_t size, dma_addr_t *dma_handle,
79 extern void swiotlb_unmap_page(struct device *hwdev, dma_addr_t dev_addr,
84 swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, int nelems,
89 swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
94 swiotlb_sync_single_for_cpu(struct device *hwdev, dma_addr_t dev_addr,
98 swiotlb_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg,
102 swiotlb_sync_single_for_device(struct device *hwdev, dma_addr_t dev_addr,
[all …]
/Linux-v4.19/drivers/xen/
Dswiotlb-xen.c288 xen_swiotlb_alloc_coherent(struct device *hwdev, size_t size, in xen_swiotlb_alloc_coherent() argument
311 ret = xen_alloc_coherent_pages(hwdev, size, dma_handle, flags, attrs); in xen_swiotlb_alloc_coherent()
316 if (hwdev && hwdev->coherent_dma_mask) in xen_swiotlb_alloc_coherent()
317 dma_mask = hwdev->coherent_dma_mask; in xen_swiotlb_alloc_coherent()
331 xen_free_coherent_pages(hwdev, size, ret, (dma_addr_t)phys, attrs); in xen_swiotlb_alloc_coherent()
340 xen_swiotlb_free_coherent(struct device *hwdev, size_t size, void *vaddr, in xen_swiotlb_free_coherent() argument
347 if (hwdev && hwdev->coherent_dma_mask) in xen_swiotlb_free_coherent()
348 dma_mask = hwdev->coherent_dma_mask; in xen_swiotlb_free_coherent()
358 xen_free_coherent_pages(hwdev, size, vaddr, (dma_addr_t)phys, attrs); in xen_swiotlb_free_coherent()
427 static void xen_unmap_single(struct device *hwdev, dma_addr_t dev_addr, in xen_unmap_single() argument
[all …]
/Linux-v4.19/drivers/hwmon/
Dhwmon.c61 struct hwmon_device *hwdev; /* Reference to hwmon device */ member
124 struct hwmon_device *hwdev = tdata->hwdev; in hwmon_thermal_get_temp() local
128 ret = hwdev->chip->ops->read(&hwdev->dev, hwmon_temp, hwmon_temp_input, in hwmon_thermal_get_temp()
143 struct hwmon_device *hwdev, int index) in hwmon_thermal_add_sensor() argument
152 tdata->hwdev = hwdev; in hwmon_thermal_add_sensor()
155 tzd = devm_thermal_zone_of_sensor_register(&hwdev->dev, index, tdata, in hwmon_thermal_add_sensor()
168 struct hwmon_device *hwdev, int index) in hwmon_thermal_add_sensor() argument
554 struct hwmon_device *hwdev; in __hwmon_device_register() local
568 hwdev = kzalloc(sizeof(*hwdev), GFP_KERNEL); in __hwmon_device_register()
569 if (hwdev == NULL) { in __hwmon_device_register()
[all …]
/Linux-v4.19/arch/arm/xen/
Dmm.c87 static void __xen_dma_page_dev_to_cpu(struct device *hwdev, dma_addr_t handle, in __xen_dma_page_dev_to_cpu() argument
93 static void __xen_dma_page_cpu_to_dev(struct device *hwdev, dma_addr_t handle, in __xen_dma_page_cpu_to_dev() argument
99 void __xen_dma_map_page(struct device *hwdev, struct page *page, in __xen_dma_map_page() argument
103 if (is_device_dma_coherent(hwdev)) in __xen_dma_map_page()
108 __xen_dma_page_cpu_to_dev(hwdev, dev_addr, size, dir); in __xen_dma_map_page()
111 void __xen_dma_unmap_page(struct device *hwdev, dma_addr_t handle, in __xen_dma_unmap_page() argument
116 if (is_device_dma_coherent(hwdev)) in __xen_dma_unmap_page()
121 __xen_dma_page_dev_to_cpu(hwdev, handle, size, dir); in __xen_dma_unmap_page()
124 void __xen_dma_sync_single_for_cpu(struct device *hwdev, in __xen_dma_sync_single_for_cpu() argument
127 if (is_device_dma_coherent(hwdev)) in __xen_dma_sync_single_for_cpu()
[all …]
/Linux-v4.19/kernel/dma/
Dswiotlb.c477 phys_addr_t swiotlb_tbl_map_single(struct device *hwdev, in swiotlb_tbl_map_single() argument
498 mask = dma_get_seg_boundary(hwdev); in swiotlb_tbl_map_single()
574 dev_warn(hwdev, "swiotlb buffer is full (sz: %zd bytes)\n", size); in swiotlb_tbl_map_single()
597 map_single(struct device *hwdev, phys_addr_t phys, size_t size, in map_single() argument
603 dev_warn_ratelimited(hwdev, "Cannot do DMA to address %pa\n", in map_single()
608 start_dma_addr = __phys_to_dma(hwdev, io_tlb_start); in map_single()
609 return swiotlb_tbl_map_single(hwdev, start_dma_addr, phys, size, in map_single()
616 void swiotlb_tbl_unmap_single(struct device *hwdev, phys_addr_t tlb_addr, in swiotlb_tbl_unmap_single() argument
661 void swiotlb_tbl_sync_single(struct device *hwdev, phys_addr_t tlb_addr, in swiotlb_tbl_sync_single() argument
845 static void unmap_single(struct device *hwdev, dma_addr_t dev_addr, in unmap_single() argument
[all …]
/Linux-v4.19/arch/x86/include/asm/xen/
Dpage-coherent.h8 static inline void *xen_alloc_coherent_pages(struct device *hwdev, size_t size, in xen_alloc_coherent_pages() argument
17 static inline void xen_free_coherent_pages(struct device *hwdev, size_t size, in xen_free_coherent_pages() argument
24 static inline void xen_dma_map_page(struct device *hwdev, struct page *page, in xen_dma_map_page() argument
28 static inline void xen_dma_unmap_page(struct device *hwdev, dma_addr_t handle, in xen_dma_unmap_page() argument
32 static inline void xen_dma_sync_single_for_cpu(struct device *hwdev, in xen_dma_sync_single_for_cpu() argument
35 static inline void xen_dma_sync_single_for_device(struct device *hwdev, in xen_dma_sync_single_for_device() argument
/Linux-v4.19/arch/hexagon/kernel/
Ddma.c84 static int check_addr(const char *name, struct device *hwdev, in check_addr() argument
87 if (hwdev && hwdev->dma_mask && !dma_capable(hwdev, bus, size)) { in check_addr()
88 if (*hwdev->dma_mask >= DMA_BIT_MASK(32)) in check_addr()
92 (long long)*hwdev->dma_mask); in check_addr()
98 static int hexagon_map_sg(struct device *hwdev, struct scatterlist *sg, in hexagon_map_sg() argument
109 if (!check_addr("map_sg", hwdev, s->dma_address, s->length)) in hexagon_map_sg()
/Linux-v4.19/drivers/infiniband/hw/qib/
Dqib_user_pages.c102 int qib_map_page(struct pci_dev *hwdev, struct page *page, dma_addr_t *daddr) in qib_map_page() argument
106 phys = pci_map_page(hwdev, page, 0, PAGE_SIZE, PCI_DMA_FROMDEVICE); in qib_map_page()
107 if (pci_dma_mapping_error(hwdev, phys)) in qib_map_page()
111 pci_unmap_page(hwdev, phys, PAGE_SIZE, PCI_DMA_FROMDEVICE); in qib_map_page()
112 phys = pci_map_page(hwdev, page, 0, PAGE_SIZE, in qib_map_page()
114 if (pci_dma_mapping_error(hwdev, phys)) in qib_map_page()
/Linux-v4.19/arch/ia64/sn/pci/pcibr/
Dpcibr_dma.c208 pcibr_dma_unmap(struct pci_dev *hwdev, dma_addr_t dma_handle, int direction) in pcibr_dma_unmap() argument
210 struct pcidev_info *pcidev_info = SN_PCIDEV_INFO(hwdev); in pcibr_dma_unmap()
357 pcibr_dma_map(struct pci_dev * hwdev, unsigned long phys_addr, size_t size, int dma_flags) in pcibr_dma_map() argument
360 struct pcidev_info *pcidev_info = SN_PCIDEV_INFO(hwdev); in pcibr_dma_map()
363 if (hwdev->dma_mask < 0x7fffffff) { in pcibr_dma_map()
367 if (hwdev->dma_mask == ~0UL) { in pcibr_dma_map()
395 pcibr_dma_map_consistent(struct pci_dev * hwdev, unsigned long phys_addr, in pcibr_dma_map_consistent() argument
399 struct pcidev_info *pcidev_info = SN_PCIDEV_INFO(hwdev); in pcibr_dma_map_consistent()
401 if (hwdev->dev.coherent_dma_mask == ~0UL) { in pcibr_dma_map_consistent()
/Linux-v4.19/drivers/fmc/
Dfmc-core.c223 if (!fmc->hwdev) { in fmc_device_register_n_gw()
230 dev_info(fmc->hwdev, "absent mezzanine in slot %d\n", in fmc_device_register_n_gw()
235 dev_err(fmc->hwdev, "no eeprom provided for slot %i\n", in fmc_device_register_n_gw()
240 dev_err(fmc->hwdev, "no eeprom_addr for slot %i\n", in fmc_device_register_n_gw()
246 dev_err(fmc->hwdev, in fmc_device_register_n_gw()
269 fmc->dev.parent = fmc->hwdev; in fmc_device_register_n_gw()
292 dev_warn(fmc->hwdev, in fmc_device_register_n_gw()
300 dev_err(fmc->hwdev, "Slot %i: Failed in registering " in fmc_device_register_n_gw()
/Linux-v4.19/drivers/net/ethernet/via/
Dvia-rhine.c678 static inline int verify_mmio(struct device *hwdev, in verify_mmio() argument
693 dev_err(hwdev, in verify_mmio()
904 static int rhine_init_one_common(struct device *hwdev, u32 quirks, in rhine_init_one_common() argument
913 rc = dma_set_mask(hwdev, DMA_BIT_MASK(32)); in rhine_init_one_common()
915 dev_err(hwdev, "32-bit DMA addresses not supported by the card!?\n"); in rhine_init_one_common()
924 SET_NETDEV_DEV(dev, hwdev); in rhine_init_one_common()
1001 dev_set_drvdata(hwdev, dev); in rhine_init_one_common()
1039 struct device *hwdev = &pdev->dev; in rhine_init_one_pci() local
1084 dev_err(hwdev, "Insufficient PCI resources, aborting\n"); in rhine_init_one_pci()
1100 dev_err(hwdev, in rhine_init_one_pci()
[all …]

12