Lines Matching refs:oct

569 static void *__retrieve_octeon_config_info(struct octeon_device *oct,  in __retrieve_octeon_config_info()  argument
572 u32 oct_id = oct->octeon_id; in __retrieve_octeon_config_info()
577 if (oct->chip_id == OCTEON_CN66XX) { in __retrieve_octeon_config_info()
579 } else if ((oct->chip_id == OCTEON_CN68XX) && in __retrieve_octeon_config_info()
582 } else if ((oct->chip_id == OCTEON_CN68XX) && in __retrieve_octeon_config_info()
585 } else if (oct->chip_id == OCTEON_CN23XX_PF_VID) { in __retrieve_octeon_config_info()
587 } else if (oct->chip_id == OCTEON_CN23XX_VF_VID) { in __retrieve_octeon_config_info()
597 static int __verify_octeon_config_info(struct octeon_device *oct, void *conf) in __verify_octeon_config_info() argument
599 switch (oct->chip_id) { in __verify_octeon_config_info()
602 return lio_validate_cn6xxx_config_info(oct, conf); in __verify_octeon_config_info()
613 void *oct_get_config_info(struct octeon_device *oct, u16 card_type) in oct_get_config_info() argument
617 conf = __retrieve_octeon_config_info(oct, card_type); in oct_get_config_info()
621 if (__verify_octeon_config_info(oct, conf)) { in oct_get_config_info()
622 dev_err(&oct->pci_dev->dev, "Configuration verification failed\n"); in oct_get_config_info()
645 void octeon_free_device_mem(struct octeon_device *oct) in octeon_free_device_mem() argument
649 for (i = 0; i < MAX_OCTEON_OUTPUT_QUEUES(oct); i++) { in octeon_free_device_mem()
650 if (oct->io_qmask.oq & BIT_ULL(i)) in octeon_free_device_mem()
651 vfree(oct->droq[i]); in octeon_free_device_mem()
654 for (i = 0; i < MAX_OCTEON_INSTR_QUEUES(oct); i++) { in octeon_free_device_mem()
655 if (oct->io_qmask.iq & BIT_ULL(i)) in octeon_free_device_mem()
656 vfree(oct->instr_queue[i]); in octeon_free_device_mem()
659 i = oct->octeon_id; in octeon_free_device_mem()
660 vfree(oct); in octeon_free_device_mem()
669 struct octeon_device *oct; in octeon_allocate_device_mem() local
709 oct = (struct octeon_device *)buf; in octeon_allocate_device_mem()
710 oct->priv = (void *)(buf + octdevsize); in octeon_allocate_device_mem()
711 oct->chip = (void *)(buf + octdevsize + priv_size); in octeon_allocate_device_mem()
712 oct->dispatch.dlist = (struct octeon_dispatch *) in octeon_allocate_device_mem()
715 return oct; in octeon_allocate_device_mem()
722 struct octeon_device *oct = NULL; in octeon_allocate_device() local
731 oct = octeon_allocate_device_mem(pci_id, priv_size); in octeon_allocate_device()
732 if (oct) { in octeon_allocate_device()
734 octeon_device[oct_idx] = oct; in octeon_allocate_device()
739 if (!oct) in octeon_allocate_device()
742 spin_lock_init(&oct->pci_win_lock); in octeon_allocate_device()
743 spin_lock_init(&oct->mem_access_lock); in octeon_allocate_device()
745 oct->octeon_id = oct_idx; in octeon_allocate_device()
746 snprintf(oct->device_name, sizeof(oct->device_name), in octeon_allocate_device()
747 "LiquidIO%d", (oct->octeon_id)); in octeon_allocate_device()
749 return oct; in octeon_allocate_device()
760 int octeon_register_device(struct octeon_device *oct, in octeon_register_device() argument
765 oct->loc.bus = bus; in octeon_register_device()
766 oct->loc.dev = dev; in octeon_register_device()
767 oct->loc.func = func; in octeon_register_device()
769 oct->adapter_refcount = &adapter_refcounts[oct->octeon_id]; in octeon_register_device()
770 atomic_set(oct->adapter_refcount, 0); in octeon_register_device()
773 oct->adapter_fw_state = &adapter_fw_states[oct->octeon_id]; in octeon_register_device()
774 atomic_set(oct->adapter_fw_state, FW_NEEDS_TO_BE_LOADED); in octeon_register_device()
777 for (idx = (int)oct->octeon_id - 1; idx >= 0; idx--) { in octeon_register_device()
779 dev_err(&oct->pci_dev->dev, in octeon_register_device()
783 atomic_inc(oct->adapter_refcount); in octeon_register_device()
791 oct->adapter_refcount = in octeon_register_device()
793 oct->adapter_fw_state = in octeon_register_device()
800 atomic_inc(oct->adapter_refcount); in octeon_register_device()
801 refcount = atomic_read(oct->adapter_refcount); in octeon_register_device()
803 dev_dbg(&oct->pci_dev->dev, "%s: %02x:%02x:%d refcount %u", __func__, in octeon_register_device()
804 oct->loc.bus, oct->loc.dev, oct->loc.func, refcount); in octeon_register_device()
813 int octeon_deregister_device(struct octeon_device *oct) in octeon_deregister_device() argument
817 atomic_dec(oct->adapter_refcount); in octeon_deregister_device()
818 refcount = atomic_read(oct->adapter_refcount); in octeon_deregister_device()
820 dev_dbg(&oct->pci_dev->dev, "%s: %04d:%02d:%d refcount %u", __func__, in octeon_deregister_device()
821 oct->loc.bus, oct->loc.dev, oct->loc.func, refcount); in octeon_deregister_device()
827 octeon_allocate_ioq_vector(struct octeon_device *oct, u32 num_ioqs) in octeon_allocate_ioq_vector() argument
836 oct->ioq_vector = vzalloc(size); in octeon_allocate_ioq_vector()
837 if (!oct->ioq_vector) in octeon_allocate_ioq_vector()
840 ioq_vector = &oct->ioq_vector[i]; in octeon_allocate_ioq_vector()
841 ioq_vector->oct_dev = oct; in octeon_allocate_ioq_vector()
844 ioq_vector->mbox = oct->mbox[i]; in octeon_allocate_ioq_vector()
849 if (oct->chip_id == OCTEON_CN23XX_PF_VID) in octeon_allocate_ioq_vector()
850 ioq_vector->ioq_num = i + oct->sriov_info.pf_srn; in octeon_allocate_ioq_vector()
859 octeon_free_ioq_vector(struct octeon_device *oct) in octeon_free_ioq_vector() argument
861 vfree(oct->ioq_vector); in octeon_free_ioq_vector()
865 int octeon_setup_instr_queues(struct octeon_device *oct) in octeon_setup_instr_queues() argument
870 int numa_node = dev_to_node(&oct->pci_dev->dev); in octeon_setup_instr_queues()
872 if (OCTEON_CN6XXX(oct)) in octeon_setup_instr_queues()
874 CFG_GET_NUM_DEF_TX_DESCS(CHIP_CONF(oct, cn6xxx)); in octeon_setup_instr_queues()
875 else if (OCTEON_CN23XX_PF(oct)) in octeon_setup_instr_queues()
876 num_descs = CFG_GET_NUM_DEF_TX_DESCS(CHIP_CONF(oct, cn23xx_pf)); in octeon_setup_instr_queues()
877 else if (OCTEON_CN23XX_VF(oct)) in octeon_setup_instr_queues()
878 num_descs = CFG_GET_NUM_DEF_TX_DESCS(CHIP_CONF(oct, cn23xx_vf)); in octeon_setup_instr_queues()
880 oct->num_iqs = 0; in octeon_setup_instr_queues()
882 oct->instr_queue[0] = vzalloc_node(sizeof(*oct->instr_queue[0]), in octeon_setup_instr_queues()
884 if (!oct->instr_queue[0]) in octeon_setup_instr_queues()
885 oct->instr_queue[0] = in octeon_setup_instr_queues()
887 if (!oct->instr_queue[0]) in octeon_setup_instr_queues()
889 memset(oct->instr_queue[0], 0, sizeof(struct octeon_instr_queue)); in octeon_setup_instr_queues()
890 oct->instr_queue[0]->q_index = 0; in octeon_setup_instr_queues()
891 oct->instr_queue[0]->app_ctx = (void *)(size_t)0; in octeon_setup_instr_queues()
892 oct->instr_queue[0]->ifidx = 0; in octeon_setup_instr_queues()
895 txpciq.s.pkind = oct->pfvf_hsword.pkind; in octeon_setup_instr_queues()
898 if (octeon_init_instr_queue(oct, txpciq, num_descs)) { in octeon_setup_instr_queues()
900 vfree(oct->instr_queue[0]); in octeon_setup_instr_queues()
901 oct->instr_queue[0] = NULL; in octeon_setup_instr_queues()
905 oct->num_iqs++; in octeon_setup_instr_queues()
909 int octeon_setup_output_queues(struct octeon_device *oct) in octeon_setup_output_queues() argument
914 int numa_node = dev_to_node(&oct->pci_dev->dev); in octeon_setup_output_queues()
916 if (OCTEON_CN6XXX(oct)) { in octeon_setup_output_queues()
918 CFG_GET_NUM_DEF_RX_DESCS(CHIP_CONF(oct, cn6xxx)); in octeon_setup_output_queues()
920 CFG_GET_DEF_RX_BUF_SIZE(CHIP_CONF(oct, cn6xxx)); in octeon_setup_output_queues()
921 } else if (OCTEON_CN23XX_PF(oct)) { in octeon_setup_output_queues()
922 num_descs = CFG_GET_NUM_DEF_RX_DESCS(CHIP_CONF(oct, cn23xx_pf)); in octeon_setup_output_queues()
923 desc_size = CFG_GET_DEF_RX_BUF_SIZE(CHIP_CONF(oct, cn23xx_pf)); in octeon_setup_output_queues()
924 } else if (OCTEON_CN23XX_VF(oct)) { in octeon_setup_output_queues()
925 num_descs = CFG_GET_NUM_DEF_RX_DESCS(CHIP_CONF(oct, cn23xx_vf)); in octeon_setup_output_queues()
926 desc_size = CFG_GET_DEF_RX_BUF_SIZE(CHIP_CONF(oct, cn23xx_vf)); in octeon_setup_output_queues()
928 oct->num_oqs = 0; in octeon_setup_output_queues()
929 oct->droq[0] = vzalloc_node(sizeof(*oct->droq[0]), numa_node); in octeon_setup_output_queues()
930 if (!oct->droq[0]) in octeon_setup_output_queues()
931 oct->droq[0] = vzalloc(sizeof(*oct->droq[0])); in octeon_setup_output_queues()
932 if (!oct->droq[0]) in octeon_setup_output_queues()
935 if (octeon_init_droq(oct, oq_no, num_descs, desc_size, NULL)) { in octeon_setup_output_queues()
936 vfree(oct->droq[oq_no]); in octeon_setup_output_queues()
937 oct->droq[oq_no] = NULL; in octeon_setup_output_queues()
940 oct->num_oqs++; in octeon_setup_output_queues()
945 int octeon_set_io_queues_off(struct octeon_device *oct) in octeon_set_io_queues_off() argument
949 if (OCTEON_CN6XXX(oct)) { in octeon_set_io_queues_off()
950 octeon_write_csr(oct, CN6XXX_SLI_PKT_INSTR_ENB, 0); in octeon_set_io_queues_off()
951 octeon_write_csr(oct, CN6XXX_SLI_PKT_OUT_ENB, 0); in octeon_set_io_queues_off()
952 } else if (oct->chip_id == OCTEON_CN23XX_VF_VID) { in octeon_set_io_queues_off()
959 for (q_no = 0; q_no < oct->sriov_info.rings_per_vf; q_no++) { in octeon_set_io_queues_off()
961 oct, CN23XX_VF_SLI_IQ_PKT_CONTROL64(q_no)); in octeon_set_io_queues_off()
967 oct, CN23XX_SLI_IQ_PKT_CONTROL64(q_no)); in octeon_set_io_queues_off()
971 dev_err(&oct->pci_dev->dev, in octeon_set_io_queues_off()
978 octeon_write_csr64(oct, in octeon_set_io_queues_off()
983 oct, CN23XX_SLI_IQ_PKT_CONTROL64(q_no)); in octeon_set_io_queues_off()
985 dev_err(&oct->pci_dev->dev, in octeon_set_io_queues_off()
994 void octeon_set_droq_pkt_op(struct octeon_device *oct, in octeon_set_droq_pkt_op() argument
1001 if (OCTEON_CN6XXX(oct)) { in octeon_set_droq_pkt_op()
1002 reg_val = octeon_read_csr(oct, CN6XXX_SLI_PKT_OUT_ENB); in octeon_set_droq_pkt_op()
1009 octeon_write_csr(oct, CN6XXX_SLI_PKT_OUT_ENB, reg_val); in octeon_set_droq_pkt_op()
1013 int octeon_init_dispatch_list(struct octeon_device *oct) in octeon_init_dispatch_list() argument
1017 oct->dispatch.count = 0; in octeon_init_dispatch_list()
1020 oct->dispatch.dlist[i].opcode = 0; in octeon_init_dispatch_list()
1021 INIT_LIST_HEAD(&oct->dispatch.dlist[i].list); in octeon_init_dispatch_list()
1025 octeon_register_reqtype_free_fn(oct, i, NULL); in octeon_init_dispatch_list()
1027 spin_lock_init(&oct->dispatch.lock); in octeon_init_dispatch_list()
1032 void octeon_delete_dispatch_list(struct octeon_device *oct) in octeon_delete_dispatch_list() argument
1039 spin_lock_bh(&oct->dispatch.lock); in octeon_delete_dispatch_list()
1044 dispatch = &oct->dispatch.dlist[i].list; in octeon_delete_dispatch_list()
1050 oct->dispatch.dlist[i].opcode = 0; in octeon_delete_dispatch_list()
1053 oct->dispatch.count = 0; in octeon_delete_dispatch_list()
1055 spin_unlock_bh(&oct->dispatch.lock); in octeon_delete_dispatch_list()
1122 octeon_register_dispatch_fn(struct octeon_device *oct, in octeon_register_dispatch_fn() argument
1133 spin_lock_bh(&oct->dispatch.lock); in octeon_register_dispatch_fn()
1135 if (oct->dispatch.dlist[idx].opcode == 0) { in octeon_register_dispatch_fn()
1136 oct->dispatch.dlist[idx].opcode = combined_opcode; in octeon_register_dispatch_fn()
1137 oct->dispatch.dlist[idx].dispatch_fn = fn; in octeon_register_dispatch_fn()
1138 oct->dispatch.dlist[idx].arg = fn_arg; in octeon_register_dispatch_fn()
1139 oct->dispatch.count++; in octeon_register_dispatch_fn()
1140 spin_unlock_bh(&oct->dispatch.lock); in octeon_register_dispatch_fn()
1144 spin_unlock_bh(&oct->dispatch.lock); in octeon_register_dispatch_fn()
1149 pfn = octeon_get_dispatch(oct, opcode, subcode); in octeon_register_dispatch_fn()
1153 dev_dbg(&oct->pci_dev->dev, in octeon_register_dispatch_fn()
1158 dev_err(&oct->pci_dev->dev, in octeon_register_dispatch_fn()
1169 spin_lock_bh(&oct->dispatch.lock); in octeon_register_dispatch_fn()
1170 list_add(&dispatch->list, &oct->dispatch.dlist[idx].list); in octeon_register_dispatch_fn()
1171 oct->dispatch.count++; in octeon_register_dispatch_fn()
1172 spin_unlock_bh(&oct->dispatch.lock); in octeon_register_dispatch_fn()
1176 octeon_get_dispatch_arg(oct, opcode, subcode) == fn_arg) in octeon_register_dispatch_fn()
1179 dev_err(&oct->pci_dev->dev, in octeon_register_dispatch_fn()
1192 struct octeon_device *oct = (struct octeon_device *)buf; in octeon_core_drv_init() local
1197 if (OCTEON_CN6XXX(oct)) in octeon_core_drv_init()
1199 CFG_GET_NUM_NIC_PORTS(CHIP_CONF(oct, cn6xxx)); in octeon_core_drv_init()
1200 else if (OCTEON_CN23XX_PF(oct)) in octeon_core_drv_init()
1202 CFG_GET_NUM_NIC_PORTS(CHIP_CONF(oct, cn23xx_pf)); in octeon_core_drv_init()
1204 if (atomic_read(&oct->status) >= OCT_DEV_RUNNING) { in octeon_core_drv_init()
1205 dev_err(&oct->pci_dev->dev, "Received CORE OK when device state is 0x%x\n", in octeon_core_drv_init()
1206 atomic_read(&oct->status)); in octeon_core_drv_init()
1214 oct->app_mode = (u32)recv_pkt->rh.r_core_drv_init.app_mode; in octeon_core_drv_init()
1216 oct->fw_info.max_nic_ports = in octeon_core_drv_init()
1218 oct->fw_info.num_gmx_ports = in octeon_core_drv_init()
1222 if (oct->fw_info.max_nic_ports < num_nic_ports) { in octeon_core_drv_init()
1223 dev_err(&oct->pci_dev->dev, in octeon_core_drv_init()
1225 num_nic_ports, oct->fw_info.max_nic_ports); in octeon_core_drv_init()
1228 oct->fw_info.app_cap_flags = recv_pkt->rh.r_core_drv_init.app_cap_flags; in octeon_core_drv_init()
1229 oct->fw_info.app_mode = (u32)recv_pkt->rh.r_core_drv_init.app_mode; in octeon_core_drv_init()
1230 oct->pfvf_hsword.app_mode = (u32)recv_pkt->rh.r_core_drv_init.app_mode; in octeon_core_drv_init()
1232 oct->pfvf_hsword.pkind = recv_pkt->rh.r_core_drv_init.pkind; in octeon_core_drv_init()
1234 for (i = 0; i < oct->num_iqs; i++) in octeon_core_drv_init()
1235 oct->instr_queue[i]->txpciq.s.pkind = oct->pfvf_hsword.pkind; in octeon_core_drv_init()
1237 atomic_set(&oct->status, OCT_DEV_CORE_OK); in octeon_core_drv_init()
1239 cs = &core_setup[oct->octeon_id]; in octeon_core_drv_init()
1242 dev_dbg(&oct->pci_dev->dev, "Core setup bytes expected %u found %d\n", in octeon_core_drv_init()
1250 strncpy(oct->boardinfo.name, cs->boardname, OCT_BOARD_NAME); in octeon_core_drv_init()
1251 strncpy(oct->boardinfo.serial_number, cs->board_serial_number, in octeon_core_drv_init()
1256 oct->boardinfo.major = cs->board_rev_major; in octeon_core_drv_init()
1257 oct->boardinfo.minor = cs->board_rev_minor; in octeon_core_drv_init()
1259 dev_info(&oct->pci_dev->dev, in octeon_core_drv_init()
1270 int octeon_get_tx_qsize(struct octeon_device *oct, u32 q_no) in octeon_get_tx_qsize() argument
1273 if (oct && (q_no < MAX_OCTEON_INSTR_QUEUES(oct)) && in octeon_get_tx_qsize()
1274 (oct->io_qmask.iq & BIT_ULL(q_no))) in octeon_get_tx_qsize()
1275 return oct->instr_queue[q_no]->max_count; in octeon_get_tx_qsize()
1280 int octeon_get_rx_qsize(struct octeon_device *oct, u32 q_no) in octeon_get_rx_qsize() argument
1282 if (oct && (q_no < MAX_OCTEON_OUTPUT_QUEUES(oct)) && in octeon_get_rx_qsize()
1283 (oct->io_qmask.oq & BIT_ULL(q_no))) in octeon_get_rx_qsize()
1284 return oct->droq[q_no]->max_count; in octeon_get_rx_qsize()
1289 struct octeon_config *octeon_get_conf(struct octeon_device *oct) in octeon_get_conf() argument
1297 if (OCTEON_CN6XXX(oct)) { in octeon_get_conf()
1299 (struct octeon_config *)(CHIP_CONF(oct, cn6xxx)); in octeon_get_conf()
1300 } else if (OCTEON_CN23XX_PF(oct)) { in octeon_get_conf()
1302 (CHIP_CONF(oct, cn23xx_pf)); in octeon_get_conf()
1303 } else if (OCTEON_CN23XX_VF(oct)) { in octeon_get_conf()
1305 (CHIP_CONF(oct, cn23xx_vf)); in octeon_get_conf()
1326 u64 lio_pci_readq(struct octeon_device *oct, u64 addr) in lio_pci_readq() argument
1332 spin_lock_irqsave(&oct->pci_win_lock, flags); in lio_pci_readq()
1338 if ((oct->chip_id == OCTEON_CN66XX) || in lio_pci_readq()
1339 (oct->chip_id == OCTEON_CN68XX) || in lio_pci_readq()
1340 (oct->chip_id == OCTEON_CN23XX_PF_VID)) in lio_pci_readq()
1342 writel(addrhi, oct->reg_list.pci_win_rd_addr_hi); in lio_pci_readq()
1345 val32 = readl(oct->reg_list.pci_win_rd_addr_hi); in lio_pci_readq()
1347 writel(addr & 0xffffffff, oct->reg_list.pci_win_rd_addr_lo); in lio_pci_readq()
1348 val32 = readl(oct->reg_list.pci_win_rd_addr_lo); in lio_pci_readq()
1350 val64 = readq(oct->reg_list.pci_win_rd_data); in lio_pci_readq()
1352 spin_unlock_irqrestore(&oct->pci_win_lock, flags); in lio_pci_readq()
1357 void lio_pci_writeq(struct octeon_device *oct, in lio_pci_writeq() argument
1364 spin_lock_irqsave(&oct->pci_win_lock, flags); in lio_pci_writeq()
1366 writeq(addr, oct->reg_list.pci_win_wr_addr); in lio_pci_writeq()
1369 writel(val >> 32, oct->reg_list.pci_win_wr_data_hi); in lio_pci_writeq()
1371 val32 = readl(oct->reg_list.pci_win_wr_data_hi); in lio_pci_writeq()
1373 writel(val & 0xffffffff, oct->reg_list.pci_win_wr_data_lo); in lio_pci_writeq()
1375 spin_unlock_irqrestore(&oct->pci_win_lock, flags); in lio_pci_writeq()
1378 int octeon_mem_access_ok(struct octeon_device *oct) in octeon_mem_access_ok() argument
1384 if (OCTEON_CN23XX_PF(oct)) { in octeon_mem_access_ok()
1385 lmc0_reset_ctl = lio_pci_readq(oct, CN23XX_LMC0_RESET_CTL); in octeon_mem_access_ok()
1389 lmc0_reset_ctl = lio_pci_readq(oct, CN6XXX_LMC0_RESET_CTL); in octeon_mem_access_ok()
1397 int octeon_wait_for_ddr_init(struct octeon_device *oct, u32 *timeout) in octeon_wait_for_ddr_init() argument
1407 ret = octeon_mem_access_ok(oct); in octeon_wait_for_ddr_init()
1437 struct octeon_device *oct = NULL; in lio_enable_irq() local
1444 oct = droq->oct_dev; in lio_enable_irq()
1453 oct = iq->oct_dev; in lio_enable_irq()
1458 if (oct && (OCTEON_CN23XX_PF(oct) || OCTEON_CN23XX_VF(oct))) { in lio_enable_irq()