Lines Matching refs:cd

44 int __genwqe_writeq(struct genwqe_dev *cd, u64 byte_offs, u64 val)  in __genwqe_writeq()  argument
46 struct pci_dev *pci_dev = cd->pci_dev; in __genwqe_writeq()
48 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_writeq()
51 if (cd->mmio == NULL) in __genwqe_writeq()
57 __raw_writeq((__force u64)cpu_to_be64(val), cd->mmio + byte_offs); in __genwqe_writeq()
68 u64 __genwqe_readq(struct genwqe_dev *cd, u64 byte_offs) in __genwqe_readq() argument
70 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_readq()
73 if ((cd->err_inject & GENWQE_INJECT_GFIR_FATAL) && in __genwqe_readq()
77 if ((cd->err_inject & GENWQE_INJECT_GFIR_INFO) && in __genwqe_readq()
81 if (cd->mmio == NULL) in __genwqe_readq()
84 return be64_to_cpu((__force __be64)__raw_readq(cd->mmio + byte_offs)); in __genwqe_readq()
95 int __genwqe_writel(struct genwqe_dev *cd, u64 byte_offs, u32 val) in __genwqe_writel() argument
97 struct pci_dev *pci_dev = cd->pci_dev; in __genwqe_writel()
99 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_writel()
102 if (cd->mmio == NULL) in __genwqe_writel()
108 __raw_writel((__force u32)cpu_to_be32(val), cd->mmio + byte_offs); in __genwqe_writel()
119 u32 __genwqe_readl(struct genwqe_dev *cd, u64 byte_offs) in __genwqe_readl() argument
121 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_readl()
124 if (cd->mmio == NULL) in __genwqe_readl()
127 return be32_to_cpu((__force __be32)__raw_readl(cd->mmio + byte_offs)); in __genwqe_readl()
135 int genwqe_read_app_id(struct genwqe_dev *cd, char *app_name, int len) in genwqe_read_app_id() argument
138 u32 app_id = (u32)cd->app_unitcfg; in genwqe_read_app_id()
207 void *__genwqe_alloc_consistent(struct genwqe_dev *cd, size_t size, in __genwqe_alloc_consistent() argument
213 return dma_alloc_coherent(&cd->pci_dev->dev, size, dma_handle, in __genwqe_alloc_consistent()
217 void __genwqe_free_consistent(struct genwqe_dev *cd, size_t size, in __genwqe_free_consistent() argument
223 dma_free_coherent(&cd->pci_dev->dev, size, vaddr, dma_handle); in __genwqe_free_consistent()
226 static void genwqe_unmap_pages(struct genwqe_dev *cd, dma_addr_t *dma_list, in genwqe_unmap_pages() argument
230 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_unmap_pages()
239 static int genwqe_map_pages(struct genwqe_dev *cd, in genwqe_map_pages() argument
244 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_map_pages()
268 genwqe_unmap_pages(cd, dma_list, num_pages); in genwqe_map_pages()
288 int genwqe_alloc_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, in genwqe_alloc_sync_sgl() argument
292 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_alloc_sync_sgl()
314 sgl->sgl = __genwqe_alloc_consistent(cd, sgl->sgl_size, in genwqe_alloc_sync_sgl()
324 sgl->fpage = __genwqe_alloc_consistent(cd, PAGE_SIZE, in genwqe_alloc_sync_sgl()
337 sgl->lpage = __genwqe_alloc_consistent(cd, PAGE_SIZE, in genwqe_alloc_sync_sgl()
352 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->lpage, in genwqe_alloc_sync_sgl()
357 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->fpage, in genwqe_alloc_sync_sgl()
362 __genwqe_free_consistent(cd, sgl->sgl_size, sgl->sgl, in genwqe_alloc_sync_sgl()
371 int genwqe_setup_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, in genwqe_setup_sgl() argument
468 int genwqe_free_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl) in genwqe_free_sync_sgl() argument
473 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_free_sync_sgl()
486 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->fpage, in genwqe_free_sync_sgl()
503 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->lpage, in genwqe_free_sync_sgl()
508 __genwqe_free_consistent(cd, sgl->sgl_size, sgl->sgl, in genwqe_free_sync_sgl()
564 int genwqe_user_vmap(struct genwqe_dev *cd, struct dma_mapping *m, void *uaddr, in genwqe_user_vmap() argument
569 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_user_vmap()
614 rc = genwqe_map_pages(cd, m->page_list, m->nr_pages, m->dma_list); in genwqe_user_vmap()
639 int genwqe_user_vunmap(struct genwqe_dev *cd, struct dma_mapping *m) in genwqe_user_vunmap() argument
641 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_user_vunmap()
650 genwqe_unmap_pages(cd, m->dma_list, m->nr_pages); in genwqe_user_vunmap()
674 u8 genwqe_card_type(struct genwqe_dev *cd) in genwqe_card_type() argument
676 u64 card_type = cd->slu_unitcfg; in genwqe_card_type()
685 int genwqe_card_reset(struct genwqe_dev *cd) in genwqe_card_reset() argument
688 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_card_reset()
690 if (!genwqe_is_privileged(cd)) in genwqe_card_reset()
694 __genwqe_writeq(cd, IO_SLC_CFGREG_SOFTRESET, 0x1ull); in genwqe_card_reset()
696 __genwqe_readq(cd, IO_HSU_FIR_CLR); in genwqe_card_reset()
697 __genwqe_readq(cd, IO_APP_FIR_CLR); in genwqe_card_reset()
698 __genwqe_readq(cd, IO_SLU_FIR_CLR); in genwqe_card_reset()
708 softrst = __genwqe_readq(cd, IO_SLC_CFGREG_SOFTRESET) & 0x3cull; in genwqe_card_reset()
709 __genwqe_writeq(cd, IO_SLC_CFGREG_SOFTRESET, softrst | 0x2ull); in genwqe_card_reset()
714 if (genwqe_need_err_masking(cd)) { in genwqe_card_reset()
717 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG, 0x0aull); in genwqe_card_reset()
722 int genwqe_read_softreset(struct genwqe_dev *cd) in genwqe_read_softreset() argument
726 if (!genwqe_is_privileged(cd)) in genwqe_read_softreset()
729 bitstream = __genwqe_readq(cd, IO_SLU_BITSTREAM) & 0x1; in genwqe_read_softreset()
730 cd->softreset = (bitstream == 0) ? 0x8ull : 0xcull; in genwqe_read_softreset()
739 int genwqe_set_interrupt_capability(struct genwqe_dev *cd, int count) in genwqe_set_interrupt_capability() argument
743 rc = pci_alloc_irq_vectors(cd->pci_dev, 1, count, PCI_IRQ_MSI); in genwqe_set_interrupt_capability()
753 void genwqe_reset_interrupt_capability(struct genwqe_dev *cd) in genwqe_reset_interrupt_capability() argument
755 pci_free_irq_vectors(cd->pci_dev); in genwqe_reset_interrupt_capability()
768 static int set_reg_idx(struct genwqe_dev *cd, struct genwqe_reg *r, in set_reg_idx() argument
782 static int set_reg(struct genwqe_dev *cd, struct genwqe_reg *r, in set_reg() argument
785 return set_reg_idx(cd, r, i, m, addr, 0, val); in set_reg()
788 int genwqe_read_ffdc_regs(struct genwqe_dev *cd, struct genwqe_reg *regs, in genwqe_read_ffdc_regs() argument
796 gfir = __genwqe_readq(cd, IO_SLC_CFGREG_GFIR); in genwqe_read_ffdc_regs()
797 set_reg(cd, regs, &idx, max_regs, IO_SLC_CFGREG_GFIR, gfir); in genwqe_read_ffdc_regs()
800 sluid = __genwqe_readq(cd, IO_SLU_UNITCFG); /* 0x00000000 */ in genwqe_read_ffdc_regs()
801 set_reg(cd, regs, &idx, max_regs, IO_SLU_UNITCFG, sluid); in genwqe_read_ffdc_regs()
804 appid = __genwqe_readq(cd, IO_APP_UNITCFG); /* 0x02000000 */ in genwqe_read_ffdc_regs()
805 set_reg(cd, regs, &idx, max_regs, IO_APP_UNITCFG, appid); in genwqe_read_ffdc_regs()
812 ufir = __genwqe_readq(cd, ufir_addr); in genwqe_read_ffdc_regs()
813 set_reg(cd, regs, &idx, max_regs, ufir_addr, ufir); in genwqe_read_ffdc_regs()
817 ufec = __genwqe_readq(cd, ufec_addr); in genwqe_read_ffdc_regs()
818 set_reg(cd, regs, &idx, max_regs, ufec_addr, ufec); in genwqe_read_ffdc_regs()
826 sfir = __genwqe_readq(cd, sfir_addr); in genwqe_read_ffdc_regs()
827 set_reg(cd, regs, &idx, max_regs, sfir_addr, sfir); in genwqe_read_ffdc_regs()
830 sfec = __genwqe_readq(cd, sfec_addr); in genwqe_read_ffdc_regs()
831 set_reg(cd, regs, &idx, max_regs, sfec_addr, sfec); in genwqe_read_ffdc_regs()
846 int genwqe_ffdc_buff_size(struct genwqe_dev *cd, int uid) in genwqe_ffdc_buff_size() argument
853 eevptr = __genwqe_readq(cd, eevptr_addr); in genwqe_ffdc_buff_size()
859 val = __genwqe_readq(cd, l_addr); in genwqe_ffdc_buff_size()
882 val = __genwqe_readq(cd, addr); in genwqe_ffdc_buff_size()
899 int genwqe_ffdc_buff_read(struct genwqe_dev *cd, int uid, in genwqe_ffdc_buff_read() argument
908 eevptr = __genwqe_readq(cd, eevptr_addr); in genwqe_ffdc_buff_read()
913 e = __genwqe_readq(cd, l_addr); in genwqe_ffdc_buff_read()
924 val = __genwqe_readq(cd, d_addr); in genwqe_ffdc_buff_read()
925 set_reg_idx(cd, regs, &idx, max_regs, in genwqe_ffdc_buff_read()
931 val = __genwqe_readq(cd, d_addr); in genwqe_ffdc_buff_read()
932 set_reg_idx(cd, regs, &idx, max_regs, in genwqe_ffdc_buff_read()
947 val = __genwqe_readq(cd, addr); in genwqe_ffdc_buff_read()
965 __genwqe_writeq(cd, addr, diag_sel); in genwqe_ffdc_buff_read()
972 val = __genwqe_readq(cd, addr); in genwqe_ffdc_buff_read()
973 set_reg_idx(cd, regs, &idx, max_regs, addr, in genwqe_ffdc_buff_read()
987 int genwqe_write_vreg(struct genwqe_dev *cd, u32 reg, u64 val, int func) in genwqe_write_vreg() argument
989 __genwqe_writeq(cd, IO_PF_SLC_VIRTUAL_WINDOW, func & 0xf); in genwqe_write_vreg()
990 __genwqe_writeq(cd, reg, val); in genwqe_write_vreg()
1000 u64 genwqe_read_vreg(struct genwqe_dev *cd, u32 reg, int func) in genwqe_read_vreg() argument
1002 __genwqe_writeq(cd, IO_PF_SLC_VIRTUAL_WINDOW, func & 0xf); in genwqe_read_vreg()
1003 return __genwqe_readq(cd, reg); in genwqe_read_vreg()
1018 int genwqe_base_clock_frequency(struct genwqe_dev *cd) in genwqe_base_clock_frequency() argument
1023 speed = (u16)((cd->slu_unitcfg >> 28) & 0x0full); in genwqe_base_clock_frequency()
1035 void genwqe_stop_traps(struct genwqe_dev *cd) in genwqe_stop_traps() argument
1037 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG_SET, 0xcull); in genwqe_stop_traps()
1045 void genwqe_start_traps(struct genwqe_dev *cd) in genwqe_start_traps() argument
1047 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG_CLR, 0xcull); in genwqe_start_traps()
1049 if (genwqe_need_err_masking(cd)) in genwqe_start_traps()
1050 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG, 0x0aull); in genwqe_start_traps()