Home
last modified time | relevance | path

Searched refs:dq (Results 1 – 25 of 50) sorted by relevance

12

/Linux-v4.19/include/soc/fsl/
Ddpaa2-global.h20 struct dq { struct
33 } dq; member
68 static inline u32 dpaa2_dq_flags(const struct dpaa2_dq *dq) in dpaa2_dq_flags() argument
70 return dq->dq.stat; in dpaa2_dq_flags()
80 static inline int dpaa2_dq_is_pull(const struct dpaa2_dq *dq) in dpaa2_dq_is_pull() argument
82 return (int)(dpaa2_dq_flags(dq) & DPAA2_DQ_STAT_VOLATILE); in dpaa2_dq_is_pull()
91 static inline bool dpaa2_dq_is_pull_complete(const struct dpaa2_dq *dq) in dpaa2_dq_is_pull_complete() argument
93 return !!(dpaa2_dq_flags(dq) & DPAA2_DQ_STAT_EXPIRED); in dpaa2_dq_is_pull_complete()
104 static inline u16 dpaa2_dq_seqnum(const struct dpaa2_dq *dq) in dpaa2_dq_seqnum() argument
106 return le16_to_cpu(dq->dq.seqnum); in dpaa2_dq_seqnum()
[all …]
/Linux-v4.19/lib/raid6/
Drecov_avx2.c25 u8 *p, *q, *dp, *dq; in raid6_2data_recov_avx2() local
39 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_avx2()
41 ptrs[disks-1] = dq; in raid6_2data_recov_avx2()
47 ptrs[failb] = dq; in raid6_2data_recov_avx2()
67 asm volatile("vpxor %0, %%ymm1, %%ymm1" : : "m" (dq[0])); in raid6_2data_recov_avx2()
68 asm volatile("vpxor %0, %%ymm9, %%ymm9" : : "m" (dq[32])); in raid6_2data_recov_avx2()
126 asm volatile("vmovdqa %%ymm1, %0" : "=m" (dq[0])); in raid6_2data_recov_avx2()
127 asm volatile("vmovdqa %%ymm13,%0" : "=m" (dq[32])); in raid6_2data_recov_avx2()
138 dq += 64; in raid6_2data_recov_avx2()
142 asm volatile("vpxor %0, %%ymm1, %%ymm1" : : "m" (*dq)); in raid6_2data_recov_avx2()
[all …]
Drecov_avx512.c32 u8 *p, *q, *dp, *dq; in raid6_2data_recov_avx512() local
49 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_avx512()
51 ptrs[disks-1] = dq; in raid6_2data_recov_avx512()
57 ptrs[failb] = dq; in raid6_2data_recov_avx512()
83 "m" (p[64]), "m" (dq[0]), "m" (dq[64]), in raid6_2data_recov_avx512()
153 : "m" (dq[0]), "m" (dq[64])); in raid6_2data_recov_avx512()
164 dq += 128; in raid6_2data_recov_avx512()
171 : "m" (*q), "m" (*p), "m"(*dq), "m" (*dp)); in raid6_2data_recov_avx512()
214 : "m" (dq[0])); in raid6_2data_recov_avx512()
225 dq += 64; in raid6_2data_recov_avx512()
[all …]
Drecov_ssse3.c25 u8 *p, *q, *dp, *dq; in raid6_2data_recov_ssse3() local
41 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_ssse3()
43 ptrs[disks-1] = dq; in raid6_2data_recov_ssse3()
49 ptrs[failb] = dq; in raid6_2data_recov_ssse3()
77 asm volatile("pxor %0,%%xmm1" : : "m" (dq[0])); in raid6_2data_recov_ssse3()
78 asm volatile("pxor %0,%%xmm9" : : "m" (dq[16])); in raid6_2data_recov_ssse3()
130 asm volatile("movdqa %%xmm1,%0" : "=m" (dq[0])); in raid6_2data_recov_ssse3()
131 asm volatile("movdqa %%xmm9,%0" : "=m" (dq[16])); in raid6_2data_recov_ssse3()
142 dq += 32; in raid6_2data_recov_ssse3()
146 asm volatile("pxor %0,%%xmm1" : : "m" (*dq)); in raid6_2data_recov_ssse3()
[all …]
Drecov_s390xc.c26 u8 *p, *q, *dp, *dq; in raid6_2data_recov_s390xc() local
40 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_s390xc()
42 ptrs[disks-1] = dq; in raid6_2data_recov_s390xc()
48 ptrs[failb] = dq; in raid6_2data_recov_s390xc()
59 xor_block(dq, q); in raid6_2data_recov_s390xc()
61 dq[i] = pbmul[dp[i]] ^ qmul[dq[i]]; in raid6_2data_recov_s390xc()
62 xor_block(dp, dq); in raid6_2data_recov_s390xc()
66 dq += 256; in raid6_2data_recov_s390xc()
75 u8 *p, *q, *dq; in raid6_datap_recov_s390xc() local
84 dq = (u8 *)ptrs[faila]; in raid6_datap_recov_s390xc()
[all …]
Drecov.c28 u8 *p, *q, *dp, *dq; in raid6_2data_recov_intx1() local
42 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_intx1()
44 ptrs[disks-1] = dq; in raid6_2data_recov_intx1()
50 ptrs[failb] = dq; in raid6_2data_recov_intx1()
61 qx = qmul[*q ^ *dq]; in raid6_2data_recov_intx1()
62 *dq++ = db = pbmul[px] ^ qx; /* Reconstructed B */ in raid6_2data_recov_intx1()
72 u8 *p, *q, *dq; in raid6_datap_recov_intx1() local
80 dq = (u8 *)ptrs[faila]; in raid6_datap_recov_intx1()
82 ptrs[disks-1] = dq; in raid6_datap_recov_intx1()
87 ptrs[faila] = dq; in raid6_datap_recov_intx1()
[all …]
Drecov_neon.c27 uint8_t *dq, const uint8_t *pbmul,
30 void __raid6_datap_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dq,
36 u8 *p, *q, *dp, *dq; in raid6_2data_recov_neon() local
51 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_neon()
53 ptrs[disks - 1] = dq; in raid6_2data_recov_neon()
59 ptrs[failb] = dq; in raid6_2data_recov_neon()
69 __raid6_2data_recov_neon(bytes, p, q, dp, dq, pbmul, qmul); in raid6_2data_recov_neon()
76 u8 *p, *q, *dq; in raid6_datap_recov_neon() local
86 dq = (u8 *)ptrs[faila]; in raid6_datap_recov_neon()
88 ptrs[disks - 1] = dq; in raid6_datap_recov_neon()
[all …]
Drecov_neon_inner.c37 uint8_t *dq, const uint8_t *pbmul, in __raid6_2data_recov_neon() argument
61 vx = veorq_u8(vld1q_u8(q), vld1q_u8(dq)); in __raid6_2data_recov_neon()
74 vst1q_u8(dq, db); in __raid6_2data_recov_neon()
81 dq += 16; in __raid6_2data_recov_neon()
85 void __raid6_datap_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dq, in __raid6_datap_recov_neon() argument
101 vx = veorq_u8(vld1q_u8(q), vld1q_u8(dq)); in __raid6_datap_recov_neon()
109 vst1q_u8(dq, vx); in __raid6_datap_recov_neon()
115 dq += 16; in __raid6_datap_recov_neon()
/Linux-v4.19/drivers/soc/fsl/dpio/
Dqbman-portal.h159 void qbman_swp_dqrr_consume(struct qbman_swp *s, const struct dpaa2_dq *dq);
161 int qbman_result_has_new_result(struct qbman_swp *p, const struct dpaa2_dq *dq);
197 static inline int qbman_result_is_DQ(const struct dpaa2_dq *dq) in qbman_result_is_DQ() argument
199 return ((dq->dq.verb & QBMAN_RESULT_MASK) == QBMAN_RESULT_DQ); in qbman_result_is_DQ()
207 static inline int qbman_result_is_SCN(const struct dpaa2_dq *dq) in qbman_result_is_SCN() argument
209 return !qbman_result_is_DQ(dq); in qbman_result_is_SCN()
213 static inline int qbman_result_is_FQDAN(const struct dpaa2_dq *dq) in qbman_result_is_FQDAN() argument
215 return ((dq->dq.verb & QBMAN_RESULT_MASK) == QBMAN_RESULT_FQDAN); in qbman_result_is_FQDAN()
219 static inline int qbman_result_is_CDAN(const struct dpaa2_dq *dq) in qbman_result_is_CDAN() argument
221 return ((dq->dq.verb & QBMAN_RESULT_MASK) == QBMAN_RESULT_CDAN); in qbman_result_is_CDAN()
[all …]
Ddpio-service.c178 const struct dpaa2_dq *dq; in dpaa2_io_irq() local
188 dq = qbman_swp_dqrr_next(swp); in dpaa2_io_irq()
189 while (dq) { in dpaa2_io_irq()
190 if (qbman_result_is_SCN(dq)) { in dpaa2_io_irq()
194 q64 = qbman_result_SCN_ctx(dq); in dpaa2_io_irq()
200 qbman_swp_dqrr_consume(swp, dq); in dpaa2_io_irq()
204 dq = qbman_swp_dqrr_next(swp); in dpaa2_io_irq()
Dqbman-portal.c658 verb = p->dq.verb; in qbman_swp_dqrr_next()
686 flags = p->dq.stat; in qbman_swp_dqrr_next()
704 void qbman_swp_dqrr_consume(struct qbman_swp *s, const struct dpaa2_dq *dq) in qbman_swp_dqrr_consume() argument
706 qbman_write_register(s, QBMAN_CINH_SWP_DCAP, QBMAN_IDX_FROM_DQRR(dq)); in qbman_swp_dqrr_consume()
726 int qbman_result_has_new_result(struct qbman_swp *s, const struct dpaa2_dq *dq) in qbman_result_has_new_result() argument
728 if (dq->dq.tok != QMAN_DQ_TOKEN_VALID) in qbman_result_has_new_result()
736 ((struct dpaa2_dq *)dq)->dq.tok = 0; in qbman_result_has_new_result()
743 if (s->vdq.storage == dq) { in qbman_result_has_new_result()
/Linux-v4.19/crypto/async_tx/
Dasync_raid6_recov.c203 struct page *p, *q, *g, *dp, *dq; in __2data_recov_5() local
233 dq = blocks[failb]; in __2data_recov_5()
238 tx = async_mult(dq, g, raid6_gfexp[good], bytes, submit); in __2data_recov_5()
248 srcs[0] = dq; in __2data_recov_5()
252 tx = async_xor(dq, srcs, 0, 2, bytes, submit); in __2data_recov_5()
256 srcs[1] = dq; in __2data_recov_5()
260 tx = async_sum_product(dq, srcs, coef, bytes, submit); in __2data_recov_5()
264 srcs[1] = dq; in __2data_recov_5()
277 struct page *p, *q, *dp, *dq; in __2data_recov_n() local
295 dq = blocks[failb]; in __2data_recov_n()
[all …]
/Linux-v4.19/drivers/net/ethernet/cavium/liquidio/
Dcn66xx_regs.h311 #define CN6XXX_DMA_CNT(dq) \ argument
312 (CN6XXX_DMA_CNT_START + ((dq) * CN6XXX_DMA_OFFSET))
314 #define CN6XXX_DMA_INT_LEVEL(dq) \ argument
315 (CN6XXX_DMA_INT_LEVEL_START + ((dq) * CN6XXX_DMA_OFFSET))
317 #define CN6XXX_DMA_PKT_INT_LEVEL(dq) \ argument
318 (CN6XXX_DMA_INT_LEVEL_START + ((dq) * CN6XXX_DMA_OFFSET))
320 #define CN6XXX_DMA_TIME_INT_LEVEL(dq) \ argument
321 (CN6XXX_DMA_INT_LEVEL_START + 4 + ((dq) * CN6XXX_DMA_OFFSET))
323 #define CN6XXX_DMA_TIM(dq) \ argument
324 (CN6XXX_DMA_TIM_START + ((dq) * CN6XXX_DMA_OFFSET))
Dcn23xx_pf_regs.h366 #define CN23XX_DMA_CNT(dq) \ argument
367 (CN23XX_DMA_CNT_START + ((dq) * CN23XX_DMA_OFFSET))
369 #define CN23XX_DMA_INT_LEVEL(dq) \ argument
370 (CN23XX_DMA_INT_LEVEL_START + ((dq) * CN23XX_DMA_OFFSET))
372 #define CN23XX_DMA_PKT_INT_LEVEL(dq) \ argument
373 (CN23XX_DMA_INT_LEVEL_START + ((dq) * CN23XX_DMA_OFFSET))
375 #define CN23XX_DMA_TIME_INT_LEVEL(dq) \ argument
376 (CN23XX_DMA_INT_LEVEL_START + 4 + ((dq) * CN23XX_DMA_OFFSET))
378 #define CN23XX_DMA_TIM(dq) \ argument
379 (CN23XX_DMA_TIM_START + ((dq) * CN23XX_DMA_OFFSET))
/Linux-v4.19/drivers/s390/crypto/
Dzcrypt_msgtype50.c110 unsigned char dq[64]; member
123 unsigned char dq[128]; member
136 unsigned char dq[256]; member
268 unsigned char *p, *q, *dp, *dq, *u, *inp; in ICACRT_msg_to_type50CRT_msg() local
290 dq = crb1->dq + sizeof(crb1->dq) - short_len; in ICACRT_msg_to_type50CRT_msg()
304 dq = crb2->dq + sizeof(crb2->dq) - short_len; in ICACRT_msg_to_type50CRT_msg()
319 dq = crb3->dq + sizeof(crb3->dq) - short_len; in ICACRT_msg_to_type50CRT_msg()
332 copy_from_user(dq, crt->bq_key, short_len) || in ICACRT_msg_to_type50CRT_msg()
Dzcrypt_cex2a.h82 unsigned char dq[64]; member
95 unsigned char dq[128]; member
108 unsigned char dq[256]; member
/Linux-v4.19/drivers/scsi/hisi_sas/
Dhisi_sas_main.c227 struct hisi_sas_dq *dq = &hisi_hba->dq[slot->dlvry_queue]; in hisi_sas_slot_task_free() local
246 spin_lock_irqsave(&dq->lock, flags); in hisi_sas_slot_task_free()
248 spin_unlock_irqrestore(&dq->lock, flags); in hisi_sas_slot_task_free()
299 struct hisi_sas_dq *dq; in hisi_sas_task_prep() local
328 *dq_pointer = dq = sas_dev->dq; in hisi_sas_task_prep()
396 spin_lock_irqsave(&dq->lock, flags); in hisi_sas_task_prep()
397 wr_q_index = hisi_hba->hw->get_free_slot(hisi_hba, dq); in hisi_sas_task_prep()
399 spin_unlock_irqrestore(&dq->lock, flags); in hisi_sas_task_prep()
404 list_add_tail(&slot->delivery, &dq->list); in hisi_sas_task_prep()
406 spin_unlock_irqrestore(&dq->lock, flags); in hisi_sas_task_prep()
[all …]
Dhisi_sas.h178 struct hisi_sas_dq *dq; member
224 int (*get_free_slot)(struct hisi_hba *hisi_hba, struct hisi_sas_dq *dq);
225 void (*start_delivery)(struct hisi_sas_dq *dq);
297 struct hisi_sas_dq dq[HISI_SAS_MAX_QUEUES]; member
/Linux-v4.19/fs/xfs/
Dxfs_iomap.c296 struct xfs_dquot *dq = xfs_inode_dquot(ip, type); in xfs_quota_need_throttle() local
298 if (!dq || !xfs_this_quota_on(ip->i_mount, type)) in xfs_quota_need_throttle()
302 if (!dq->q_prealloc_hi_wmark) in xfs_quota_need_throttle()
306 if (dq->q_res_bcount + alloc_blocks < dq->q_prealloc_lo_wmark) in xfs_quota_need_throttle()
322 struct xfs_dquot *dq = xfs_inode_dquot(ip, type); in xfs_quota_calc_throttle() local
325 if (!dq || dq->q_res_bcount >= dq->q_prealloc_hi_wmark) { in xfs_quota_calc_throttle()
331 freesp = dq->q_prealloc_hi_wmark - dq->q_res_bcount; in xfs_quota_calc_throttle()
332 if (freesp < dq->q_low_space[XFS_QLOWSP_5_PCNT]) { in xfs_quota_calc_throttle()
334 if (freesp < dq->q_low_space[XFS_QLOWSP_3_PCNT]) in xfs_quota_calc_throttle()
336 if (freesp < dq->q_low_space[XFS_QLOWSP_1_PCNT]) in xfs_quota_calc_throttle()
Dxfs_dquot.c74 struct xfs_dquot *dq) in xfs_qm_adjust_dqlimits() argument
77 struct xfs_disk_dquot *d = &dq->q_core; in xfs_qm_adjust_dqlimits()
82 defq = xfs_get_defquota(dq, q); in xfs_qm_adjust_dqlimits()
102 xfs_dquot_set_prealloc_limits(dq); in xfs_qm_adjust_dqlimits()
1254 struct xfs_dquot *dq; in xfs_qm_dqiterate() local
1259 error = xfs_qm_dqget_next(mp, id, dqtype, &dq); in xfs_qm_dqiterate()
1265 error = iter_fn(dq, dqtype, priv); in xfs_qm_dqiterate()
1266 id = be32_to_cpu(dq->q_core.d_id); in xfs_qm_dqiterate()
1267 xfs_qm_dqput(dq); in xfs_qm_dqiterate()
/Linux-v4.19/drivers/soc/fsl/qbman/
Dqman_test_api.c211 const struct qm_dqrr_entry *dq) in cb_dqrr() argument
213 if (WARN_ON(fd_neq(&fd_dq, &dq->fd))) { in cb_dqrr()
218 if (!(dq->stat & QM_DQRR_STAT_UNSCHEDULED) && !fd_neq(&fd_dq, &fd)) { in cb_dqrr()
/Linux-v4.19/drivers/crypto/qat/qat_common/
Dqat_asym_algs.c84 dma_addr_t dq; member
110 char *dq; member
859 qat_req->in.rsa.dec_crt.dq = ctx->dma_dq; in qat_rsa_dec()
1109 ptr = rsa_key->dq; in qat_rsa_setkey_crt()
1114 ctx->dq = dma_zalloc_coherent(dev, half_key_sz, &ctx->dma_dq, in qat_rsa_setkey_crt()
1116 if (!ctx->dq) in qat_rsa_setkey_crt()
1118 memcpy(ctx->dq + (half_key_sz - len), ptr, len); in qat_rsa_setkey_crt()
1136 memset(ctx->dq, '\0', half_key_sz); in qat_rsa_setkey_crt()
1137 dma_free_coherent(dev, half_key_sz, ctx->dq, ctx->dma_dq); in qat_rsa_setkey_crt()
1138 ctx->dq = NULL; in qat_rsa_setkey_crt()
[all …]
/Linux-v4.19/drivers/ata/
Dsata_nv.c1712 struct defer_queue *dq = &pp->defer_queue; in nv_swncq_qc_to_dq() local
1715 WARN_ON(dq->tail - dq->head == ATA_MAX_QUEUE); in nv_swncq_qc_to_dq()
1716 dq->defer_bits |= (1 << qc->hw_tag); in nv_swncq_qc_to_dq()
1717 dq->tag[dq->tail++ & (ATA_MAX_QUEUE - 1)] = qc->hw_tag; in nv_swncq_qc_to_dq()
1723 struct defer_queue *dq = &pp->defer_queue; in nv_swncq_qc_from_dq() local
1726 if (dq->head == dq->tail) /* null queue */ in nv_swncq_qc_from_dq()
1729 tag = dq->tag[dq->head & (ATA_MAX_QUEUE - 1)]; in nv_swncq_qc_from_dq()
1730 dq->tag[dq->head++ & (ATA_MAX_QUEUE - 1)] = ATA_TAG_POISON; in nv_swncq_qc_from_dq()
1731 WARN_ON(!(dq->defer_bits & (1 << tag))); in nv_swncq_qc_from_dq()
1732 dq->defer_bits &= ~(1 << tag); in nv_swncq_qc_from_dq()
[all …]
/Linux-v4.19/include/crypto/internal/
Drsa.h43 const u8 *dq; member
/Linux-v4.19/drivers/atm/
Dfirestream.c637 static struct FS_BPENTRY dq[60]; variable
1206 dq[qd].flags = td->flags; in fs_send()
1207 dq[qd].next = td->next; in fs_send()
1208 dq[qd].bsa = td->bsa; in fs_send()
1209 dq[qd].skb = td->skb; in fs_send()
1210 dq[qd].dev = td->dev; in fs_send()
1960 i, da[qd], dq[qd].flags, dq[qd].bsa, dq[qd].skb, dq[qd].dev); in firestream_remove_one()

12