/Linux-v5.4/drivers/infiniband/hw/hfi1/ |
D | sdma.c | 243 struct sdma_engine *sde, 246 struct sdma_engine *sde, 248 static void dump_sdma_state(struct sdma_engine *sde); 249 static void sdma_make_progress(struct sdma_engine *sde, u64 status); 250 static void sdma_desc_avail(struct sdma_engine *sde, uint avail); 251 static void sdma_flush_descq(struct sdma_engine *sde); 287 struct sdma_engine *sde, in write_sde_csr() argument 291 write_kctxt_csr(sde->dd, sde->this_idx, offset0, value); in write_sde_csr() 295 struct sdma_engine *sde, in read_sde_csr() argument 298 return read_kctxt_csr(sde->dd, sde->this_idx, offset0); in read_sde_csr() [all …]
|
D | vnic_sdma.c | 91 static noinline int build_vnic_ulp_payload(struct sdma_engine *sde, in build_vnic_ulp_payload() argument 97 sde->dd, in build_vnic_ulp_payload() 108 ret = sdma_txadd_page(sde->dd, in build_vnic_ulp_payload() 118 ret = sdma_txadd_kvaddr(sde->dd, &tx->txreq, in build_vnic_ulp_payload() 126 static int build_vnic_tx_desc(struct sdma_engine *sde, in build_vnic_tx_desc() argument 148 sde->dd, in build_vnic_tx_desc() 156 ret = build_vnic_ulp_payload(sde, tx); in build_vnic_tx_desc() 172 struct sdma_engine *sde = vnic_sdma->sde; in hfi1_vnic_send_dma() local 179 if (unlikely(!sde || !sdma_running(sde))) in hfi1_vnic_send_dma() 192 ret = build_vnic_tx_desc(sde, tx, pbc); in hfi1_vnic_send_dma() [all …]
|
D | sdma.h | 432 static inline int sdma_empty(struct sdma_engine *sde) in sdma_empty() argument 434 return sde->descq_tail == sde->descq_head; in sdma_empty() 437 static inline u16 sdma_descq_freecnt(struct sdma_engine *sde) in sdma_descq_freecnt() argument 439 return sde->descq_cnt - in sdma_descq_freecnt() 440 (sde->descq_tail - in sdma_descq_freecnt() 441 READ_ONCE(sde->descq_head)) - 1; in sdma_descq_freecnt() 444 static inline u16 sdma_descq_inprocess(struct sdma_engine *sde) in sdma_descq_inprocess() argument 446 return sde->descq_cnt - sdma_descq_freecnt(sde); in sdma_descq_inprocess() 846 int sdma_send_txreq(struct sdma_engine *sde, 850 int sdma_send_txlist(struct sdma_engine *sde, [all …]
|
D | trace_tx.h | 150 TP_PROTO(struct sdma_engine *sde, 155 TP_ARGS(sde, desc0, desc1, e, descp), 156 TP_STRUCT__entry(DD_DEV_ENTRY(sde->dd) 163 TP_fast_assign(DD_DEV_ASSIGN(sde->dd); 166 __entry->idx = sde->this_idx; 344 TP_PROTO(struct sdma_engine *sde, u64 status), 345 TP_ARGS(sde, status), 346 TP_STRUCT__entry(DD_DEV_ENTRY(sde->dd) 350 TP_fast_assign(DD_DEV_ASSIGN(sde->dd); 352 __entry->idx = sde->this_idx; [all …]
|
D | msix.c | 229 int msix_request_sdma_irq(struct sdma_engine *sde) in msix_request_sdma_irq() argument 233 nr = msix_request_irq(sde->dd, sde, sdma_interrupt, NULL, in msix_request_sdma_irq() 234 sde->this_idx, IRQ_SDMA); in msix_request_sdma_irq() 237 sde->msix_intr = nr; in msix_request_sdma_irq() 238 remap_sdma_interrupts(sde->dd, sde->this_idx, nr); in msix_request_sdma_irq() 275 struct sdma_engine *sde = &dd->per_sdma[i]; in msix_request_irqs() local 277 ret = msix_request_sdma_irq(sde); in msix_request_irqs() 280 enable_sdma_srcs(sde->dd, i); in msix_request_irqs()
|
D | qp.c | 68 struct sdma_engine *sde, 489 struct sdma_engine *sde, in iowait_sleep() argument 513 write_seqlock(&sde->waitlock); in iowait_sleep() 514 if (sdma_progress(sde, seq, stx)) in iowait_sleep() 524 &sde->dmawait); in iowait_sleep() 525 priv->s_iowait.lock = &sde->waitlock; in iowait_sleep() 529 write_sequnlock(&sde->waitlock); in iowait_sleep() 539 write_sequnlock(&sde->waitlock); in iowait_sleep() 594 struct sdma_engine *sde; in qp_to_sdma_engine() local 604 sde = sdma_select_engine_sc(dd, qp->ibqp.qp_num >> dd->qos_shift, sc5); in qp_to_sdma_engine() [all …]
|
D | sysfs.c | 751 ssize_t (*show)(struct sdma_engine *sde, char *buf); 752 ssize_t (*store)(struct sdma_engine *sde, const char *buf, size_t cnt); 759 struct sdma_engine *sde = in sde_show() local 765 return sde_attr->show(sde, buf); in sde_show() 773 struct sdma_engine *sde = in sde_store() local 782 return sde_attr->store(sde, buf, count); in sde_store() 798 static ssize_t sde_show_cpu_to_sde_map(struct sdma_engine *sde, char *buf) in sde_show_cpu_to_sde_map() argument 800 return sdma_get_cpu_to_sde_map(sde, buf); in sde_show_cpu_to_sde_map() 803 static ssize_t sde_store_cpu_to_sde_map(struct sdma_engine *sde, in sde_store_cpu_to_sde_map() argument 806 return sdma_set_cpu_to_sde_map(sde, buf, count); in sde_store_cpu_to_sde_map() [all …]
|
D | verbs_txreq.h | 65 struct sdma_engine *sde; member 92 tx->sde = priv->s_sde; in get_txreq()
|
D | affinity.c | 791 struct sdma_engine *sde = msix->arg; in hfi1_update_sdma_affinity() local 792 struct hfi1_devdata *dd = sde->dd; in hfi1_update_sdma_affinity() 797 if (cpu > num_online_cpus() || cpu == sde->cpu) in hfi1_update_sdma_affinity() 805 old_cpu = sde->cpu; in hfi1_update_sdma_affinity() 806 sde->cpu = cpu; in hfi1_update_sdma_affinity() 811 sde->this_idx, cpu); in hfi1_update_sdma_affinity() 889 struct sdma_engine *sde = NULL; in get_irq_affinity() local 901 sde = (struct sdma_engine *)msix->arg; in get_irq_affinity() 902 scnprintf(extra, 64, "engine %u", sde->this_idx); in get_irq_affinity() 947 sde->cpu = cpu; in get_irq_affinity()
|
D | user_sdma.c | 102 struct sdma_engine *sde, 125 struct sdma_engine *sde, in defer_packet_queue() argument 134 write_seqlock(&sde->waitlock); in defer_packet_queue() 135 if (sdma_progress(sde, seq, txreq)) in defer_packet_queue() 145 iowait_queue(pkts_sent, &pq->busy, &sde->dmawait); in defer_packet_queue() 147 write_sequnlock(&sde->waitlock); in defer_packet_queue() 150 write_sequnlock(&sde->waitlock); in defer_packet_queue() 554 req->sde = sdma_select_user_engine(dd, selector, vl); in hfi1_user_sdma_process_request() 556 if (!req->sde || !sdma_running(req->sde)) { in hfi1_user_sdma_process_request() 563 req->ahg_idx = sdma_ahg_alloc(req->sde); in hfi1_user_sdma_process_request() [all …]
|
D | msix.h | 58 int msix_request_sdma_irq(struct sdma_engine *sde);
|
D | iowait.h | 142 struct sdma_engine *sde, 174 int (*sleep)(struct sdma_engine *sde,
|
D | vnic.h | 85 struct sdma_engine *sde; member
|
D | iowait.c | 44 int (*sleep)(struct sdma_engine *sde, in iowait_init() argument
|
D | verbs.c | 694 struct sdma_engine *sde, in build_verbs_ulp_payload() argument 709 sde->dd, in build_verbs_ulp_payload() 758 struct sdma_engine *sde, in build_verbs_tx_desc() argument 792 sde->dd, in build_verbs_tx_desc() 813 ret = build_verbs_ulp_payload(sde, length, tx); in build_verbs_tx_desc() 820 ret = sdma_txadd_daddr(sde->dd, &tx->txreq, in build_verbs_tx_desc() 821 sde->dd->sdma_pad_phys, extra_bytes); in build_verbs_tx_desc() 887 ret = build_verbs_tx_desc(tx->sde, len, tx, ahg_info, pbc); in hfi1_verbs_send_dma() 891 ret = sdma_send_txreq(tx->sde, ps->wait, &tx->txreq, ps->pkts_sent); in hfi1_verbs_send_dma()
|
D | user_sdma.h | 185 struct sdma_engine *sde; member
|
D | vnic_main.c | 430 struct sdma_engine *sde; in hfi1_vnic_select_queue() local 433 sde = sdma_select_engine_vl(vinfo->dd, mdata->entropy, mdata->vl); in hfi1_vnic_select_queue() 434 return sde->this_idx; in hfi1_vnic_select_queue()
|
D | chip.c | 6019 struct sdma_engine *sde; in handle_sdma_eng_err() local 6022 sde = &dd->per_sdma[source]; in handle_sdma_eng_err() 6024 dd_dev_err(sde->dd, "CONFIG SDMA(%u) %s:%d %s()\n", sde->this_idx, in handle_sdma_eng_err() 6026 dd_dev_err(sde->dd, "CONFIG SDMA(%u) source: %u status 0x%llx\n", in handle_sdma_eng_err() 6027 sde->this_idx, source, (unsigned long long)status); in handle_sdma_eng_err() 6029 sde->err_cnt++; in handle_sdma_eng_err() 6030 sdma_engine_error(sde, status); in handle_sdma_eng_err() 6049 struct sdma_engine *sde = &dd->per_sdma[source]; in is_sdma_eng_err_int() local 6051 dd_dev_err(dd, "CONFIG SDMA(%u) %s:%d %s()\n", sde->this_idx, in is_sdma_eng_err_int() 6053 dd_dev_err(dd, "CONFIG SDMA(%u) source: %u\n", sde->this_idx, in is_sdma_eng_err_int() [all …]
|
D | uc.c | 270 ps->s_txreq->sde = priv->s_sde; in hfi1_make_uc_req()
|
D | ud.c | 563 ps->s_txreq->sde = priv->s_sde; in hfi1_make_ud_req()
|
D | rc.c | 395 ps->s_txreq->sde = qpriv->s_sde; in make_rc_ack() 1189 ps->s_txreq->sde = priv->s_sde; in hfi1_make_rc_req()
|
/Linux-v5.4/drivers/net/fddi/skfp/ |
D | smt.c | 78 static void smt_fill_sde(struct s_smc *smc, struct smt_p_sde *sde); 942 smt_fill_sde(smc,&nif->sde) ; /* set station descriptor */ in smt_send_nif() 1034 smt_fill_sde(smc,&sif->sde) ; /* set station descriptor */ in smt_send_sif_config() 1161 static void smt_fill_sde(struct s_smc *smc, struct smt_p_sde *sde) in smt_fill_sde() argument 1163 SMTSETPARA(sde,SMT_P_SDE) ; in smt_fill_sde() 1164 sde->sde_non_master = smc->mib.fddiSMTNonMaster_Ct ; in smt_fill_sde() 1165 sde->sde_master = smc->mib.fddiSMTMaster_Ct ; in smt_fill_sde() 1166 sde->sde_mac_count = NUMMACS ; /* only 1 MAC */ in smt_fill_sde() 1168 sde->sde_type = SMT_SDE_CONCENTRATOR ; in smt_fill_sde() 1170 sde->sde_type = SMT_SDE_STATION ; in smt_fill_sde()
|
/Linux-v5.4/drivers/net/fddi/skfp/h/ |
D | smt.h | 727 struct smt_p_sde sde ; /* station descriptor */ member 740 struct smt_p_sde sde ; /* station descriptor */ member
|
/Linux-v5.4/arch/mips/include/asm/octeon/ |
D | cvmx-pciercx-defs.h | 264 __BITFIELD_FIELD(uint32_t sde:1,
|
/Linux-v5.4/Documentation/m68k/ |
D | kernel-options.rst | 83 /dev/sde: -> 0x0840 (fifth SCSI disk) 114 /dev/sde are in the table above, but not /dev/sdf. Although, you can
|