Home
last modified time | relevance | path

Searched refs:queue_count (Results 1 – 25 of 43) sorted by relevance

12

/Linux-v5.4/drivers/mailbox/
Dti-msgmgr.c73 u8 queue_count; member
594 if (qinst->queue_id > d->queue_count) { in ti_msgmgr_queue_setup()
596 idx, qinst->queue_id, d->queue_count); in ti_msgmgr_queue_setup()
670 .queue_count = 64,
685 .queue_count = 190,
717 int queue_count; in ti_msgmgr_probe() local
765 queue_count = desc->num_valid_queues; in ti_msgmgr_probe()
766 if (!queue_count || queue_count > desc->queue_count) { in ti_msgmgr_probe()
768 queue_count, desc->queue_count); in ti_msgmgr_probe()
771 inst->num_valid_queues = queue_count; in ti_msgmgr_probe()
[all …]
/Linux-v5.4/drivers/s390/crypto/
Dap_queue.c143 aq->queue_count--; in ap_sm_recv()
144 if (aq->queue_count > 0) in ap_sm_recv()
157 if (!status.queue_empty || aq->queue_count <= 0) in ap_sm_recv()
160 aq->queue_count = 0; in ap_sm_recv()
186 if (aq->queue_count > 0) { in ap_sm_read()
193 if (aq->queue_count > 0) in ap_sm_read()
220 if (aq->queue_count > 0) in ap_sm_suspend_read()
247 aq->queue_count++; in ap_sm_write()
248 if (aq->queue_count == 1) in ap_sm_write()
253 if (aq->queue_count < aq->card->queue_depth) { in ap_sm_write()
[all …]
Dap_bus.h178 int queue_count; /* # messages currently on AP queue. */ member
/Linux-v5.4/drivers/gpu/drm/amd/amdkfd/
Dkfd_device_queue_manager.c346 qpd->queue_count++; in create_queue_nocpsch()
348 dqm->queue_count++; in create_queue_nocpsch()
476 qpd->queue_count--; in destroy_queue_nocpsch_locked()
478 dqm->queue_count--; in destroy_queue_nocpsch_locked()
545 dqm->queue_count++; in update_queue()
547 dqm->queue_count--; in update_queue()
604 dqm->queue_count--; in evict_process_queues_nocpsch()
636 dqm->queue_count--; in evict_process_queues_cpsch()
712 dqm->queue_count++; in restore_process_queues_nocpsch()
756 dqm->queue_count++; in restore_process_queues_cpsch()
[all …]
Dkfd_packet_manager.c44 unsigned int process_count, queue_count, compute_queue_count; in pm_calc_rlib_size() local
50 queue_count = pm->dqm->queue_count; in pm_calc_rlib_size()
51 compute_queue_count = queue_count - pm->dqm->sdma_queue_count - in pm_calc_rlib_size()
73 queue_count * map_queue_size; in pm_calc_rlib_size()
144 pm->dqm->processes_count, pm->dqm->queue_count); in pm_create_runlist_ib()
Dkfd_device_queue_manager.h180 unsigned int queue_count; member
Dkfd_process_queue_manager.c223 if (pdd->qpd.queue_count >= max_queues) in pqm_create_queue()
268 (dev->dqm->queue_count >= get_queues_num(dev->dqm)))) { in pqm_create_queue()
Dkfd_kernel_queue_v10.c90 packet->bitfields14.num_queues = (qpd->is_debug) ? 0 : qpd->queue_count; in pm_map_process_v10()
/Linux-v5.4/drivers/nvme/target/
Dloop.c221 BUG_ON(hctx_idx >= ctrl->ctrl.queue_count); in nvme_loop_init_hctx()
287 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_loop_destroy_io_queues()
312 ctrl->ctrl.queue_count++; in nvme_loop_init_io_queues()
326 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_loop_connect_io_queues()
356 ctrl->ctrl.queue_count = 1; in nvme_loop_configure_admin_queue()
409 if (ctrl->ctrl.queue_count > 1) { in nvme_loop_shutdown_ctrl()
473 ctrl->ctrl.queue_count - 1); in nvme_loop_reset_ctrl_work()
522 ctrl->tag_set.nr_hw_queues = ctrl->ctrl.queue_count - 1; in nvme_loop_create_io_queues()
/Linux-v5.4/drivers/nvme/host/
Drdma.c312 BUG_ON(hctx_idx >= ctrl->ctrl.queue_count); in nvme_rdma_init_hctx()
597 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_rdma_free_io_queues()
605 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_rdma_stop_io_queues()
635 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_rdma_start_io_queues()
668 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_rdma_alloc_io_queues()
669 if (ctrl->ctrl.queue_count < 2) in nvme_rdma_alloc_io_queues()
703 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_rdma_alloc_io_queues()
750 set->nr_hw_queues = nctrl->queue_count - 1; in nvme_rdma_alloc_tagset()
893 ctrl->ctrl.queue_count - 1); in nvme_rdma_configure_io_queues()
931 if (ctrl->ctrl.queue_count > 1) { in nvme_rdma_teardown_io_queues()
[all …]
Dtcp.c1488 set->nr_hw_queues = nctrl->queue_count - 1; in nvme_tcp_alloc_tagset()
1514 for (i = 1; i < ctrl->queue_count; i++) in nvme_tcp_free_io_queues()
1522 for (i = 1; i < ctrl->queue_count; i++) in nvme_tcp_stop_io_queues()
1530 for (i = 1; i < ctrl->queue_count; i++) { in nvme_tcp_start_io_queues()
1567 for (i = 1; i < ctrl->queue_count; i++) { in __nvme_tcp_alloc_io_queues()
1639 ctrl->queue_count = nr_io_queues + 1; in nvme_tcp_alloc_io_queues()
1640 if (ctrl->queue_count < 2) in nvme_tcp_alloc_io_queues()
1683 ctrl->queue_count - 1); in nvme_tcp_configure_io_queues()
1792 if (ctrl->queue_count <= 1) in nvme_tcp_teardown_io_queues()
1852 if (ctrl->queue_count > 1) { in nvme_tcp_setup_ctrl()
[all …]
Dfc.c1936 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_fc_free_io_queues()
1957 struct nvme_fc_queue *queue = &ctrl->queues[ctrl->ctrl.queue_count - 1]; in nvme_fc_delete_hw_io_queues()
1960 for (i = ctrl->ctrl.queue_count - 1; i >= 1; i--, queue--) in nvme_fc_delete_hw_io_queues()
1970 for (i = 1; i < ctrl->ctrl.queue_count; i++, queue++) { in nvme_fc_create_hw_io_queues()
1989 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_fc_connect_io_queues()
2009 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_fc_init_io_queues()
2454 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_fc_create_io_queues()
2470 ctrl->tag_set.nr_hw_queues = ctrl->ctrl.queue_count - 1; in nvme_fc_create_io_queues()
2515 u32 prior_ioq_cnt = ctrl->ctrl.queue_count - 1; in nvme_fc_recreate_io_queues()
2535 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_fc_recreate_io_queues()
[all …]
Dpci.c1370 for (i = dev->ctrl.queue_count - 1; i >= lowest; i--) { in nvme_free_queues()
1371 dev->ctrl.queue_count--; in nvme_free_queues()
1400 for (i = dev->ctrl.queue_count - 1; i > 0; i--) in nvme_suspend_io_queues()
1470 if (dev->ctrl.queue_count > qid) in nvme_alloc_queue()
1490 dev->ctrl.queue_count++; in nvme_alloc_queue()
1724 for (i = dev->ctrl.queue_count; i <= dev->max_qid; i++) { in nvme_create_io_queues()
1731 max = min(dev->max_qid, dev->ctrl.queue_count - 1); in nvme_create_io_queues()
2441 if (!dead && dev->ctrl.queue_count > 0) { in nvme_dev_disable()
/Linux-v5.4/drivers/net/ethernet/pensando/ionic/
Dionic_debugfs.c70 (u32 *)&ionic->ident.lif.eth.config.queue_count[IONIC_QTYPE_TXQ]); in ionic_debugfs_add_sizes()
72 (u32 *)&ionic->ident.lif.eth.config.queue_count[IONIC_QTYPE_RXQ]); in ionic_debugfs_add_sizes()
Dionic_lif.c2188 le32_to_cpu(lid->eth.config.queue_count[IONIC_QTYPE_ADMINQ])); in ionic_lif_identify()
2190 le32_to_cpu(lid->eth.config.queue_count[IONIC_QTYPE_NOTIFYQ])); in ionic_lif_identify()
2192 le32_to_cpu(lid->eth.config.queue_count[IONIC_QTYPE_RXQ])); in ionic_lif_identify()
2194 le32_to_cpu(lid->eth.config.queue_count[IONIC_QTYPE_TXQ])); in ionic_lif_identify()
2219 nnqs_per_lif = le32_to_cpu(lc->queue_count[IONIC_QTYPE_NOTIFYQ]); in ionic_lifs_size()
2220 ntxqs_per_lif = le32_to_cpu(lc->queue_count[IONIC_QTYPE_TXQ]); in ionic_lifs_size()
2221 nrxqs_per_lif = le32_to_cpu(lc->queue_count[IONIC_QTYPE_RXQ]); in ionic_lifs_size()
/Linux-v5.4/drivers/infiniband/sw/rxe/
Drxe_queue.h163 static inline unsigned int queue_count(const struct rxe_queue *q) in queue_count() function
Drxe_cq.c55 count = queue_count(cq->queue); in rxe_cq_chk_attr()
Drxe_queue.c138 if (!queue_empty(q) && (num_elem < queue_count(q))) in resize_finish()
/Linux-v5.4/drivers/net/ethernet/intel/fm10k/
Dfm10k_pf.c501 u16 glort, queue_count, vsi_count, pc_count; in fm10k_configure_dglort_map_pf() local
516 queue_count = BIT(dglort->rss_l + dglort->pc_l); in fm10k_configure_dglort_map_pf()
523 for (queue = 0; queue < queue_count; queue++, q_idx++) { in fm10k_configure_dglort_map_pf()
533 queue_count = BIT(dglort->queue_l + dglort->rss_l + dglort->vsi_l); in fm10k_configure_dglort_map_pf()
539 for (queue = 0; queue < queue_count; queue++) { in fm10k_configure_dglort_map_pf()
/Linux-v5.4/drivers/staging/wilc1000/
Dwilc_netdev.c716 int queue_count; in wilc_mac_xmit() local
736 queue_count = wilc_wlan_txq_add_net_pkt(ndev, (void *)tx_data, in wilc_mac_xmit()
740 if (queue_count > FLOW_CONTROL_UPPER_THRESHOLD) { in wilc_mac_xmit()
/Linux-v5.4/drivers/scsi/hisi_sas/
Dhisi_sas_main.c664 int queue = i % hisi_hba->queue_count; in hisi_sas_alloc_dev()
2266 for (i = 0; i < hisi_hba->queue_count; i++) { in hisi_sas_init_mem()
2318 for (i = 0; i < hisi_hba->queue_count; i++) { in hisi_sas_alloc()
2534 &hisi_hba->queue_count)) { in hisi_sas_get_fw_info()
2692 for (i = 0; i < hisi_hba->queue_count; i++) in hisi_sas_debugfs_snapshot_cq_reg()
2703 for (i = 0; i < hisi_hba->queue_count; i++) { in hisi_sas_debugfs_snapshot_dq_reg()
3212 for (c = 0; c < hisi_hba->queue_count; c++) { in hisi_sas_debugfs_create_files()
3221 for (d = 0; d < hisi_hba->queue_count; d++) { in hisi_sas_debugfs_create_files()
3695 for (i = 0; i < hisi_hba->queue_count; i++) in hisi_sas_debugfs_release()
3698 for (i = 0; i < hisi_hba->queue_count; i++) in hisi_sas_debugfs_release()
[all …]
Dhisi_sas_v1_hw.c655 (u32)((1ULL << hisi_hba->queue_count) - 1)); in init_reg_v1_hw()
697 for (i = 0; i < hisi_hba->queue_count; i++) { in init_reg_v1_hw()
1665 for (i = 0; i < hisi_hba->queue_count; i++, idx++) { in interrupt_init_v1_hw()
1682 idx = (hisi_hba->n_phy * HISI_SAS_PHY_INT_NR) + hisi_hba->queue_count; in interrupt_init_v1_hw()
1700 hisi_hba->cq_nvecs = hisi_hba->queue_count; in interrupt_init_v1_hw()
/Linux-v5.4/drivers/net/wireless/intel/iwlwifi/dvm/
Drs.h314 u8 queue_count; /* number of queues that has member
Drs.c253 while (tl->queue_count && in rs_tl_rm_old_stats()
258 tl->queue_count--; in rs_tl_rm_old_stats()
292 if (!(tl->queue_count)) { in rs_tl_add_packet()
295 tl->queue_count = 1; in rs_tl_add_packet()
313 if ((index + 1) > tl->queue_count) in rs_tl_add_packet()
314 tl->queue_count = index + 1; in rs_tl_add_packet()
366 if (!(tl->queue_count)) in rs_tl_get_load()
/Linux-v5.4/drivers/net/wireless/intel/iwlegacy/
D4965-rs.c240 while (tl->queue_count && tl->time_stamp < oldest_time) { in il4965_rs_tl_rm_old_stats()
244 tl->queue_count--; in il4965_rs_tl_rm_old_stats()
278 if (!(tl->queue_count)) { in il4965_rs_tl_add_packet()
281 tl->queue_count = 1; in il4965_rs_tl_add_packet()
299 if ((idx + 1) > tl->queue_count) in il4965_rs_tl_add_packet()
300 tl->queue_count = idx + 1; in il4965_rs_tl_add_packet()
323 if (!(tl->queue_count)) in il4965_rs_tl_get_load()

12