/Linux-v4.19/drivers/net/ethernet/qlogic/qed/ |
D | qed_init_fw_funcs.c | 430 struct qed_qm_pf_rt_init_params *p_params, in qed_tx_pq_map_rt_init() argument 434 struct init_qm_vport_params *vport_params = p_params->vport_params; in qed_tx_pq_map_rt_init() 437 struct init_qm_pq_params *pq_params = p_params->pq_params; in qed_tx_pq_map_rt_init() 440 num_pqs = p_params->num_pf_pqs + p_params->num_vf_pqs; in qed_tx_pq_map_rt_init() 442 first_pq_group = p_params->start_pq / QM_PF_QUEUE_GROUP_SIZE; in qed_tx_pq_map_rt_init() 443 last_pq_group = (p_params->start_pq + num_pqs - 1) / in qed_tx_pq_map_rt_init() 446 pq_mem_4kb = QM_PQ_MEM_4KB(p_params->num_pf_cids); in qed_tx_pq_map_rt_init() 447 vport_pq_mem_4kb = QM_PQ_MEM_4KB(p_params->num_vf_cids); in qed_tx_pq_map_rt_init() 453 (u32)(p_params->pf_id)); in qed_tx_pq_map_rt_init() 457 QM_PQ_SIZE_256B(p_params->num_pf_cids)); in qed_tx_pq_map_rt_init() [all …]
|
D | qed_l2.c | 217 struct qed_queue_start_common_params *p_params, in _qed_eth_queue_to_cid() argument 233 p_cid->rel.vport_id = p_params->vport_id; in _qed_eth_queue_to_cid() 234 p_cid->rel.queue_id = p_params->queue_id; in _qed_eth_queue_to_cid() 235 p_cid->rel.stats_id = p_params->stats_id; in _qed_eth_queue_to_cid() 236 p_cid->sb_igu_id = p_params->p_sb->igu_sb_id; in _qed_eth_queue_to_cid() 238 p_cid->sb_idx = p_params->sb_idx; in _qed_eth_queue_to_cid() 313 struct qed_queue_start_common_params *p_params, in qed_eth_queue_to_cid() argument 348 p_params, b_is_rx, p_vf_params); in qed_eth_queue_to_cid() 359 struct qed_queue_start_common_params *p_params) in qed_eth_queue_to_cid_pf() argument 361 return qed_eth_queue_to_cid(p_hwfn, opaque_fid, p_params, b_is_rx, in qed_eth_queue_to_cid_pf() [all …]
|
D | qed_dcbx.c | 485 struct qed_dcbx_params *p_params, bool ieee) in qed_dcbx_get_app_data() argument 491 p_params->app_willing = QED_MFW_GET_FIELD(p_app->flags, in qed_dcbx_get_app_data() 493 p_params->app_valid = QED_MFW_GET_FIELD(p_app->flags, DCBX_APP_ENABLED); in qed_dcbx_get_app_data() 494 p_params->app_error = QED_MFW_GET_FIELD(p_app->flags, DCBX_APP_ERROR); in qed_dcbx_get_app_data() 495 p_params->num_app_entries = QED_MFW_GET_FIELD(p_app->flags, in qed_dcbx_get_app_data() 498 entry = &p_params->app_entry[i]; in qed_dcbx_get_app_data() 543 p_params->app_willing, p_params->app_valid, in qed_dcbx_get_app_data() 544 p_params->app_error); in qed_dcbx_get_app_data() 549 u32 pfc, struct qed_dcbx_params *p_params) in qed_dcbx_get_pfc_data() argument 553 p_params->pfc.willing = QED_MFW_GET_FIELD(pfc, DCBX_PFC_WILLING); in qed_dcbx_get_pfc_data() [all …]
|
D | qed_iscsi.c | 170 struct qed_iscsi_pf_params *p_params = NULL; in qed_sp_iscsi_func_start() local 194 p_params = &p_hwfn->pf_params.iscsi_pf_params; in qed_sp_iscsi_func_start() 198 if (p_params->num_queues > p_hwfn->hw_info.feat_num[QED_ISCSI_CQ]) { in qed_sp_iscsi_func_start() 201 p_params->num_queues, in qed_sp_iscsi_func_start() 210 val = p_params->half_way_close_timeout; in qed_sp_iscsi_func_start() 212 p_init->num_sq_pages_in_ring = p_params->num_sq_pages_in_ring; in qed_sp_iscsi_func_start() 213 p_init->num_r2tq_pages_in_ring = p_params->num_r2tq_pages_in_ring; in qed_sp_iscsi_func_start() 214 p_init->num_uhq_pages_in_ring = p_params->num_uhq_pages_in_ring; in qed_sp_iscsi_func_start() 216 p_params->ll2_ooo_queue_id; in qed_sp_iscsi_func_start() 218 p_init->func_params.log_page_size = p_params->log_page_size; in qed_sp_iscsi_func_start() [all …]
|
D | qed_vf.c | 1067 struct qed_sp_vport_update_params *p_params) in qed_vf_pf_vport_update() argument 1080 update_rx = p_params->update_vport_active_rx_flg; in qed_vf_pf_vport_update() 1081 update_tx = p_params->update_vport_active_tx_flg; in qed_vf_pf_vport_update() 1098 p_act_tlv->active_rx = p_params->vport_active_rx_flg; in qed_vf_pf_vport_update() 1103 p_act_tlv->active_tx = p_params->vport_active_tx_flg; in qed_vf_pf_vport_update() 1107 if (p_params->update_tx_switching_flg) { in qed_vf_pf_vport_update() 1116 p_tx_switch_tlv->tx_switching = p_params->tx_switching_flg; in qed_vf_pf_vport_update() 1119 if (p_params->update_approx_mcast_flg) { in qed_vf_pf_vport_update() 1127 memcpy(p_mcast_tlv->bins, p_params->bins, in qed_vf_pf_vport_update() 1131 update_rx = p_params->accept_flags.update_rx_mode_config; in qed_vf_pf_vport_update() [all …]
|
D | qed_l2.h | 172 struct qed_sp_vport_start_params *p_params); 225 struct qed_sp_vport_update_params *p_params, 310 struct qed_ntuple_filter_params *p_params); 390 struct qed_queue_start_common_params *p_params, 396 struct qed_sp_vport_start_params *p_params);
|
D | qed_hw.c | 398 struct qed_dmae_params *p_params) in qed_dmae_opcode() argument 428 if (p_params->flags & QED_DMAE_FLAG_COMPLETION_DST) in qed_dmae_opcode() 444 if (p_params->flags & QED_DMAE_FLAG_VF_SRC) { in qed_dmae_opcode() 446 opcode_b |= p_params->src_vfid << DMAE_CMD_SRC_VF_ID_SHIFT; in qed_dmae_opcode() 452 if (p_params->flags & QED_DMAE_FLAG_VF_DST) { in qed_dmae_opcode() 454 opcode_b |= p_params->dst_vfid << DMAE_CMD_DST_VF_ID_SHIFT; in qed_dmae_opcode() 696 struct qed_dmae_params *p_params) in qed_dmae_execute_command() argument 709 p_params); in qed_dmae_execute_command() 725 if (!(p_params->flags & QED_DMAE_FLAG_RW_REPL_SRC)) { in qed_dmae_execute_command() 813 u32 size_in_dwords, struct qed_dmae_params *p_params) in qed_dmae_host2host() argument [all …]
|
D | qed_mcp.c | 960 struct qed_load_req_params *p_params) in qed_mcp_load_req() argument 972 rc = eocre_get_mfw_drv_role(p_hwfn, p_params->drv_role, &mfw_drv_role); in qed_mcp_load_req() 977 in_params.timeout_val = p_params->timeout_val; in qed_mcp_load_req() 982 in_params.avoid_eng_reset = p_params->avoid_eng_reset; in qed_mcp_load_req() 1006 p_params->override_force_load)) { in qed_mcp_load_req() 1068 p_params->load_code = out_params.load_code; in qed_mcp_load_req() 3161 struct qed_resc_lock_params *p_params) in __qed_mcp_resc_lock() argument 3167 switch (p_params->timeout) { in __qed_mcp_resc_lock() 3170 p_params->timeout = 0; in __qed_mcp_resc_lock() 3174 p_params->timeout = 0; in __qed_mcp_resc_lock() [all …]
|
D | qed_cxt.c | 2068 struct qed_rdma_pf_params *p_params, in qed_rdma_set_pf_params() argument 2074 num_srqs = min_t(u32, QED_RDMA_MAX_SRQS, p_params->num_srqs); in qed_rdma_set_pf_params() 2085 num_cons = min_t(u32, IWARP_MAX_QPS, p_params->num_qps); in qed_rdma_set_pf_params() 2089 num_qps = min_t(u32, ROCE_MAX_QPS, p_params->num_qps); in qed_rdma_set_pf_params() 2135 struct qed_eth_pf_params *p_params = in qed_cxt_set_pf_params() local 2138 if (!p_params->num_vf_cons) in qed_cxt_set_pf_params() 2139 p_params->num_vf_cons = in qed_cxt_set_pf_params() 2142 p_params->num_cons, in qed_cxt_set_pf_params() 2143 p_params->num_vf_cons); in qed_cxt_set_pf_params() 2144 p_hwfn->p_cxt_mngr->arfs_count = p_params->num_arfs_filters; in qed_cxt_set_pf_params() [all …]
|
D | qed_dev_api.h | 141 int qed_hw_init(struct qed_dev *cdev, struct qed_hw_init_params *p_params); 299 u32 size_in_dwords, struct qed_dmae_params *p_params);
|
D | qed_sriov.c | 995 struct qed_iov_vf_init_params *p_params) in qed_iov_init_hw_for_vf() argument 1007 vf = qed_iov_get_vf_info(p_hwfn, p_params->rel_vf_id, false); in qed_iov_init_hw_for_vf() 1015 p_params->rel_vf_id); in qed_iov_init_hw_for_vf() 1020 for (i = 0; i < p_params->num_queues; i++) { in qed_iov_init_hw_for_vf() 1025 qid = p_params->req_rx_queue[i]; in qed_iov_init_hw_for_vf() 1030 p_params->rel_vf_id, in qed_iov_init_hw_for_vf() 1035 qid = p_params->req_tx_queue[i]; in qed_iov_init_hw_for_vf() 1039 qid, p_params->rel_vf_id, max_vf_qzone); in qed_iov_init_hw_for_vf() 1048 p_params->rel_vf_id, qid, i); in qed_iov_init_hw_for_vf() 1056 vf->relative_vf_id, p_params->num_queues, (u16)cids); in qed_iov_init_hw_for_vf() [all …]
|
D | qed_mcp.h | 764 struct qed_load_req_params *p_params); 1033 struct qed_ptt *p_ptt, struct qed_resc_lock_params *p_params); 1058 struct qed_resc_unlock_params *p_params);
|
D | qed_vf.h | 933 struct qed_sp_vport_update_params *p_params); 1027 struct qed_mcp_link_params *p_params, 1157 struct qed_sp_vport_update_params *p_params) in qed_vf_pf_vport_update() argument 1216 *p_params, in __qed_vf_get_link_params()
|
D | qed_dev.c | 1694 struct qed_hw_init_params *p_params) in qed_vf_start() argument 1696 if (p_params->p_tunn) { in qed_vf_start() 1697 qed_vf_set_vf_start_tunn_update_param(p_params->p_tunn); in qed_vf_start() 1698 qed_vf_pf_tunnel_param_update(p_hwfn, p_params->p_tunn); in qed_vf_start() 1706 int qed_hw_init(struct qed_dev *cdev, struct qed_hw_init_params *p_params) in qed_hw_init() argument 1715 if ((p_params->int_mode == QED_INT_MODE_MSI) && (cdev->num_hwfns > 1)) { in qed_hw_init() 1721 rc = qed_init_fw_data(cdev, p_params->bin_fw_data); in qed_hw_init() 1736 qed_vf_start(p_hwfn, p_params); in qed_hw_init() 1766 p_params->p_drv_load_params); in qed_hw_init() 1802 p_params->p_tunn, in qed_hw_init() [all …]
|
D | qed_hsi.h | 3908 struct qed_qm_common_rt_init_params *p_params); 3932 struct qed_qm_pf_rt_init_params *p_params);
|
/Linux-v4.19/sound/soc/intel/skylake/ |
D | skl-pcm.c | 289 mconfig->pipe->p_params); in skl_pcm_prepare() 304 struct skl_pipe_params p_params = {0}; in skl_pcm_hw_params() local 320 p_params.s_fmt = snd_pcm_format_width(params_format(params)); in skl_pcm_hw_params() 321 p_params.ch = params_channels(params); in skl_pcm_hw_params() 322 p_params.s_freq = params_rate(params); in skl_pcm_hw_params() 323 p_params.host_dma_id = dma_id; in skl_pcm_hw_params() 324 p_params.stream = substream->stream; in skl_pcm_hw_params() 325 p_params.format = params_format(params); in skl_pcm_hw_params() 327 p_params.host_bps = dai->driver->playback.sig_bits; in skl_pcm_hw_params() 329 p_params.host_bps = dai->driver->capture.sig_bits; in skl_pcm_hw_params() [all …]
|
D | skl-debug.c | 120 mconfig->pipe->p_params->host_dma_id, in module_read() 121 mconfig->pipe->p_params->link_dma_id); in module_read() 125 mconfig->pipe->p_params->ch, in module_read() 126 mconfig->pipe->p_params->s_freq, in module_read() 127 mconfig->pipe->p_params->s_fmt); in module_read() 131 mconfig->pipe->p_params->linktype, in module_read() 132 mconfig->pipe->p_params->stream); in module_read()
|
D | skl-topology.c | 450 struct skl_pipe_params *params = m_cfg->pipe->p_params; in skl_tplg_update_module_params() 558 return skl_pcm_host_dma_prepare(ctx->dev, pipe->p_params); in skl_tplg_module_prepare() 561 return skl_pcm_link_dma_prepare(ctx->dev, pipe->p_params); in skl_tplg_module_prepare() 701 struct skl_pipe_params *params = pipe->p_params; in skl_tplg_get_pipe_config() 1642 pipe->p_params->link_dma_id = params->link_dma_id; in skl_tplg_fill_dma_id() 1643 pipe->p_params->link_index = params->link_index; in skl_tplg_fill_dma_id() 1644 pipe->p_params->link_bps = params->link_bps; in skl_tplg_fill_dma_id() 1648 pipe->p_params->host_dma_id = params->host_dma_id; in skl_tplg_fill_dma_id() 1649 pipe->p_params->host_bps = params->host_bps; in skl_tplg_fill_dma_id() 1655 pipe->p_params->s_fmt = params->s_fmt; in skl_tplg_fill_dma_id() [all …]
|
D | skl-topology.h | 315 struct skl_pipe_params *p_params; member
|
D | skl-messages.c | 521 struct skl_pipe_params *params = mconfig->pipe->p_params; in skl_get_node_id()
|
/Linux-v4.19/drivers/soundwire/ |
D | cadence_master.c | 876 struct sdw_port_params *p_params, unsigned int bank) in cdns_port_params() argument 882 dpn_config_off = CDNS_DPN_B1_CONFIG(p_params->num); in cdns_port_params() 884 dpn_config_off = CDNS_DPN_B0_CONFIG(p_params->num); in cdns_port_params() 888 dpn_config |= ((p_params->bps - 1) << in cdns_port_params() 890 dpn_config |= (p_params->flow_mode << in cdns_port_params() 892 dpn_config |= (p_params->data_mode << in cdns_port_params()
|
D | stream.c | 126 struct sdw_port_params *p_params = &p_rt->port_params; in sdw_program_slave_port_params() local 156 wbuf = p_params->data_mode << SDW_REG_SHIFT(SDW_DPN_PORTCTRL_DATAMODE); in sdw_program_slave_port_params() 157 wbuf |= p_params->flow_mode; in sdw_program_slave_port_params() 168 ret = sdw_write(s_rt->slave, addr2, (p_params->bps - 1)); in sdw_program_slave_port_params()
|