/Linux-v5.10/drivers/gpu/drm/i915/gt/ |
D | intel_context.h | 21 #define CE_TRACE(ce, fmt, ...) do { \ argument 22 const struct intel_context *ce__ = (ce); \ 30 void intel_context_init(struct intel_context *ce, 32 void intel_context_fini(struct intel_context *ce); 37 int intel_context_alloc_state(struct intel_context *ce); 39 void intel_context_free(struct intel_context *ce); 41 int intel_context_reconfigure_sseu(struct intel_context *ce, 52 static inline int intel_context_lock_pinned(struct intel_context *ce) in intel_context_lock_pinned() argument 53 __acquires(ce->pin_mutex) in intel_context_lock_pinned() 55 return mutex_lock_interruptible(&ce->pin_mutex); in intel_context_lock_pinned() [all …]
|
D | intel_context.c | 30 struct intel_context *ce = container_of(rcu, typeof(*ce), rcu); in rcu_context_free() local 32 kmem_cache_free(global.slab_ce, ce); in rcu_context_free() 35 void intel_context_free(struct intel_context *ce) in intel_context_free() argument 37 call_rcu(&ce->rcu, rcu_context_free); in intel_context_free() 43 struct intel_context *ce; in intel_context_create() local 45 ce = intel_context_alloc(); in intel_context_create() 46 if (!ce) in intel_context_create() 49 intel_context_init(ce, engine); in intel_context_create() 50 return ce; in intel_context_create() 53 int intel_context_alloc_state(struct intel_context *ce) in intel_context_alloc_state() argument [all …]
|
D | intel_context_param.c | 11 int intel_context_set_ring_size(struct intel_context *ce, long sz) in intel_context_set_ring_size() argument 15 if (intel_context_lock_pinned(ce)) in intel_context_set_ring_size() 18 err = i915_active_wait(&ce->active); in intel_context_set_ring_size() 22 if (intel_context_is_pinned(ce)) { in intel_context_set_ring_size() 27 if (test_bit(CONTEXT_ALLOC_BIT, &ce->flags)) { in intel_context_set_ring_size() 31 ring = intel_engine_create_ring(ce->engine, sz); in intel_context_set_ring_size() 37 intel_ring_put(ce->ring); in intel_context_set_ring_size() 38 ce->ring = ring; in intel_context_set_ring_size() 42 ce->ring = __intel_context_ring_size(sz); in intel_context_set_ring_size() 46 intel_context_unlock_pinned(ce); in intel_context_set_ring_size() [all …]
|
D | intel_context_sseu.c | 17 const struct intel_context *ce, in gen8_emit_rpcs_config() argument 27 offset = i915_ggtt_offset(ce->state) + in gen8_emit_rpcs_config() 41 gen8_modify_rpcs(struct intel_context *ce, const struct intel_sseu sseu) in gen8_modify_rpcs() argument 46 lockdep_assert_held(&ce->pin_mutex); in gen8_modify_rpcs() 54 if (!intel_context_pin_if_active(ce)) in gen8_modify_rpcs() 57 rq = intel_engine_create_kernel_request(ce->engine); in gen8_modify_rpcs() 64 ret = intel_context_prepare_remote_request(ce, rq); in gen8_modify_rpcs() 66 ret = gen8_emit_rpcs_config(rq, ce, sseu); in gen8_modify_rpcs() 70 intel_context_unpin(ce); in gen8_modify_rpcs() 75 intel_context_reconfigure_sseu(struct intel_context *ce, in intel_context_reconfigure_sseu() argument [all …]
|
D | selftest_mocs.c | 23 struct intel_context *ce; in mocs_context_create() local 25 ce = intel_context_create(engine); in mocs_context_create() 26 if (IS_ERR(ce)) in mocs_context_create() 27 return ce; in mocs_context_create() 30 ce->ring = __intel_context_ring_size(SZ_16K); in mocs_context_create() 32 return ce; in mocs_context_create() 229 struct intel_context *ce) in check_mocs_engine() argument 239 rq = intel_context_create_request(ce); in check_mocs_engine() 253 if (!err && ce->engine->class == RENDER_CLASS) in check_mocs_engine() 265 err = check_mocs_table(ce->engine, &arg->mocs, &vaddr); in check_mocs_engine() [all …]
|
D | intel_breadcrumbs.c | 113 struct intel_context *ce) in add_signaling_context() argument 115 lockdep_assert_held(&ce->signal_lock); in add_signaling_context() 118 list_add_rcu(&ce->signal_link, &b->signalers); in add_signaling_context() 123 struct intel_context *ce) in remove_signaling_context() argument 125 lockdep_assert_held(&ce->signal_lock); in remove_signaling_context() 127 if (!list_empty(&ce->signals)) in remove_signaling_context() 131 list_del_rcu(&ce->signal_link); in remove_signaling_context() 143 check_signal_order(struct intel_context *ce, struct i915_request *rq) in check_signal_order() argument 145 if (rq->context != ce) in check_signal_order() 148 if (!list_is_last(&rq->signal_link, &ce->signals) && in check_signal_order() [all …]
|
D | intel_engine_pm.c | 24 struct intel_context *ce; in __engine_unpark() local 31 ce = engine->kernel_context; in __engine_unpark() 32 if (ce) { in __engine_unpark() 33 GEM_BUG_ON(test_bit(CONTEXT_VALID_BIT, &ce->flags)); in __engine_unpark() 36 if (IS_ENABLED(CONFIG_DRM_I915_DEBUG_GEM) && ce->state) { in __engine_unpark() 37 struct drm_i915_gem_object *obj = ce->state->obj; in __engine_unpark() 49 ce->ops->reset(ce); in __engine_unpark() 61 static inline unsigned long __timeline_mark_lock(struct intel_context *ce) in __timeline_mark_lock() argument 66 mutex_acquire(&ce->timeline->mutex.dep_map, 2, 0, _THIS_IP_); in __timeline_mark_lock() 71 static inline void __timeline_mark_unlock(struct intel_context *ce, in __timeline_mark_unlock() argument [all …]
|
/Linux-v5.10/drivers/crypto/allwinner/sun8i-ce/ |
D | sun8i-ce-core.c | 131 int sun8i_ce_get_engine_number(struct sun8i_ce_dev *ce) in sun8i_ce_get_engine_number() argument 133 return atomic_inc_return(&ce->flow) % (MAXFLOW - 1); in sun8i_ce_get_engine_number() 136 int sun8i_ce_run_task(struct sun8i_ce_dev *ce, int flow, const char *name) in sun8i_ce_run_task() argument 140 struct ce_task *cet = ce->chanlist[flow].tl; in sun8i_ce_run_task() 143 ce->chanlist[flow].stat_req++; in sun8i_ce_run_task() 146 mutex_lock(&ce->mlock); in sun8i_ce_run_task() 148 v = readl(ce->base + CE_ICR); in sun8i_ce_run_task() 150 writel(v, ce->base + CE_ICR); in sun8i_ce_run_task() 152 reinit_completion(&ce->chanlist[flow].complete); in sun8i_ce_run_task() 153 writel(ce->chanlist[flow].t_phy, ce->base + CE_TDQ); in sun8i_ce_run_task() [all …]
|
D | sun8i-ce-trng.c | 25 struct sun8i_ce_dev *ce; in sun8i_ce_trng_read() local 35 ce = container_of(rng, struct sun8i_ce_dev, trng); in sun8i_ce_trng_read() 46 ce->hwrng_stat_req++; in sun8i_ce_trng_read() 47 ce->hwrng_stat_bytes += todo; in sun8i_ce_trng_read() 50 dma_dst = dma_map_single(ce->dev, d, todo, DMA_FROM_DEVICE); in sun8i_ce_trng_read() 51 if (dma_mapping_error(ce->dev, dma_dst)) { in sun8i_ce_trng_read() 52 dev_err(ce->dev, "Cannot DMA MAP DST\n"); in sun8i_ce_trng_read() 57 err = pm_runtime_get_sync(ce->dev); in sun8i_ce_trng_read() 59 pm_runtime_put_noidle(ce->dev); in sun8i_ce_trng_read() 63 mutex_lock(&ce->rnglock); in sun8i_ce_trng_read() [all …]
|
D | sun8i-ce-prng.c | 63 struct sun8i_ce_dev *ce; in sun8i_ce_prng_generate() local 74 ce = algt->ce; in sun8i_ce_prng_generate() 77 dev_err(ce->dev, "not seeded\n"); in sun8i_ce_prng_generate() 91 dev_dbg(ce->dev, "%s PRNG slen=%u dlen=%u todo=%u multi=%u\n", __func__, in sun8i_ce_prng_generate() 99 dma_iv = dma_map_single(ce->dev, ctx->seed, ctx->slen, DMA_TO_DEVICE); in sun8i_ce_prng_generate() 100 if (dma_mapping_error(ce->dev, dma_iv)) { in sun8i_ce_prng_generate() 101 dev_err(ce->dev, "Cannot DMA MAP IV\n"); in sun8i_ce_prng_generate() 105 dma_dst = dma_map_single(ce->dev, d, todo, DMA_FROM_DEVICE); in sun8i_ce_prng_generate() 106 if (dma_mapping_error(ce->dev, dma_dst)) { in sun8i_ce_prng_generate() 107 dev_err(ce->dev, "Cannot DMA MAP DST\n"); in sun8i_ce_prng_generate() [all …]
|
D | sun8i-ce-cipher.c | 83 struct sun8i_ce_dev *ce = op->ce; in sun8i_ce_cipher_prepare() local 99 dev_dbg(ce->dev, "%s %s %u %x IV(%p %u) key=%u\n", __func__, in sun8i_ce_cipher_prepare() 111 chan = &ce->chanlist[flow]; in sun8i_ce_cipher_prepare() 117 common = ce->variant->alg_cipher[algt->ce_algo_id]; in sun8i_ce_cipher_prepare() 121 if (ce->variant->cipher_t_dlen_in_bytes) in sun8i_ce_cipher_prepare() 126 sym = ce->variant->op_mode[algt->ce_blockmode]; in sun8i_ce_cipher_prepare() 143 rctx->addr_key = dma_map_single(ce->dev, op->key, op->keylen, DMA_TO_DEVICE); in sun8i_ce_cipher_prepare() 144 if (dma_mapping_error(ce->dev, rctx->addr_key)) { in sun8i_ce_cipher_prepare() 145 dev_err(ce->dev, "Cannot DMA MAP KEY\n"); in sun8i_ce_cipher_prepare() 170 rctx->addr_iv = dma_map_single(ce->dev, rctx->bounce_iv, rctx->ivlen, in sun8i_ce_cipher_prepare() [all …]
|
D | Makefile | 1 obj-$(CONFIG_CRYPTO_DEV_SUN8I_CE) += sun8i-ce.o 2 sun8i-ce-y += sun8i-ce-core.o sun8i-ce-cipher.o 3 sun8i-ce-$(CONFIG_CRYPTO_DEV_SUN8I_CE_HASH) += sun8i-ce-hash.o 4 sun8i-ce-$(CONFIG_CRYPTO_DEV_SUN8I_CE_PRNG) += sun8i-ce-prng.o 5 sun8i-ce-$(CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG) += sun8i-ce-trng.o
|
/Linux-v5.10/drivers/of/ |
D | dynamic.c | 459 static void __of_changeset_entry_destroy(struct of_changeset_entry *ce) in __of_changeset_entry_destroy() argument 461 if (ce->action == OF_RECONFIG_ATTACH_NODE && in __of_changeset_entry_destroy() 462 of_node_check_flag(ce->np, OF_OVERLAY)) { in __of_changeset_entry_destroy() 463 if (kref_read(&ce->np->kobj.kref) > 1) { in __of_changeset_entry_destroy() 465 kref_read(&ce->np->kobj.kref), ce->np); in __of_changeset_entry_destroy() 467 of_node_set_flag(ce->np, OF_OVERLAY_FREE_CSET); in __of_changeset_entry_destroy() 471 of_node_put(ce->np); in __of_changeset_entry_destroy() 472 list_del(&ce->node); in __of_changeset_entry_destroy() 473 kfree(ce); in __of_changeset_entry_destroy() 477 static void __of_changeset_entry_dump(struct of_changeset_entry *ce) in __of_changeset_entry_dump() argument [all …]
|
/Linux-v5.10/arch/arm64/crypto/ |
D | Makefile | 8 obj-$(CONFIG_CRYPTO_SHA1_ARM64_CE) += sha1-ce.o 9 sha1-ce-y := sha1-ce-glue.o sha1-ce-core.o 11 obj-$(CONFIG_CRYPTO_SHA2_ARM64_CE) += sha2-ce.o 12 sha2-ce-y := sha2-ce-glue.o sha2-ce-core.o 14 obj-$(CONFIG_CRYPTO_SHA512_ARM64_CE) += sha512-ce.o 15 sha512-ce-y := sha512-ce-glue.o sha512-ce-core.o 17 obj-$(CONFIG_CRYPTO_SHA3_ARM64) += sha3-ce.o 18 sha3-ce-y := sha3-ce-glue.o sha3-ce-core.o 20 obj-$(CONFIG_CRYPTO_SM3_ARM64_CE) += sm3-ce.o 21 sm3-ce-y := sm3-ce-glue.o sm3-ce-core.o [all …]
|
/Linux-v5.10/drivers/base/power/ |
D | clock_ops.c | 42 static inline void __pm_clk_enable(struct device *dev, struct pm_clock_entry *ce) in __pm_clk_enable() argument 46 if (ce->status < PCE_STATUS_ERROR) { in __pm_clk_enable() 47 ret = clk_enable(ce->clk); in __pm_clk_enable() 49 ce->status = PCE_STATUS_ENABLED; in __pm_clk_enable() 52 __func__, ce->clk, ret); in __pm_clk_enable() 61 static void pm_clk_acquire(struct device *dev, struct pm_clock_entry *ce) in pm_clk_acquire() argument 63 if (!ce->clk) in pm_clk_acquire() 64 ce->clk = clk_get(dev, ce->con_id); in pm_clk_acquire() 65 if (IS_ERR(ce->clk)) { in pm_clk_acquire() 66 ce->status = PCE_STATUS_ERROR; in pm_clk_acquire() [all …]
|
/Linux-v5.10/drivers/clocksource/ |
D | timer-sun5i.c | 73 static void sun5i_clkevt_sync(struct sun5i_timer_clkevt *ce) in sun5i_clkevt_sync() argument 75 u32 old = readl(ce->timer.base + TIMER_CNTVAL_LO_REG(1)); in sun5i_clkevt_sync() 77 while ((old - readl(ce->timer.base + TIMER_CNTVAL_LO_REG(1))) < TIMER_SYNC_TICKS) in sun5i_clkevt_sync() 81 static void sun5i_clkevt_time_stop(struct sun5i_timer_clkevt *ce, u8 timer) in sun5i_clkevt_time_stop() argument 83 u32 val = readl(ce->timer.base + TIMER_CTL_REG(timer)); in sun5i_clkevt_time_stop() 84 writel(val & ~TIMER_CTL_ENABLE, ce->timer.base + TIMER_CTL_REG(timer)); in sun5i_clkevt_time_stop() 86 sun5i_clkevt_sync(ce); in sun5i_clkevt_time_stop() 89 static void sun5i_clkevt_time_setup(struct sun5i_timer_clkevt *ce, u8 timer, u32 delay) in sun5i_clkevt_time_setup() argument 91 writel(delay, ce->timer.base + TIMER_INTVAL_LO_REG(timer)); in sun5i_clkevt_time_setup() 94 static void sun5i_clkevt_time_start(struct sun5i_timer_clkevt *ce, u8 timer, bool periodic) in sun5i_clkevt_time_start() argument [all …]
|
D | mps2-timer.c | 54 static int mps2_timer_shutdown(struct clock_event_device *ce) in mps2_timer_shutdown() argument 56 clockevent_mps2_writel(0, ce, TIMER_RELOAD); in mps2_timer_shutdown() 57 clockevent_mps2_writel(0, ce, TIMER_CTRL); in mps2_timer_shutdown() 62 static int mps2_timer_set_next_event(unsigned long next, struct clock_event_device *ce) in mps2_timer_set_next_event() argument 64 clockevent_mps2_writel(next, ce, TIMER_VALUE); in mps2_timer_set_next_event() 65 clockevent_mps2_writel(TIMER_CTRL_IE | TIMER_CTRL_ENABLE, ce, TIMER_CTRL); in mps2_timer_set_next_event() 70 static int mps2_timer_set_periodic(struct clock_event_device *ce) in mps2_timer_set_periodic() argument 72 u32 clock_count_per_tick = to_mps2_clkevt(ce)->clock_count_per_tick; in mps2_timer_set_periodic() 74 clockevent_mps2_writel(clock_count_per_tick, ce, TIMER_RELOAD); in mps2_timer_set_periodic() 75 clockevent_mps2_writel(clock_count_per_tick, ce, TIMER_VALUE); in mps2_timer_set_periodic() [all …]
|
D | timer-digicolor.c | 61 struct clock_event_device ce; member 67 static struct digicolor_timer *dc_timer(struct clock_event_device *ce) in dc_timer() argument 69 return container_of(ce, struct digicolor_timer, ce); in dc_timer() 72 static inline void dc_timer_disable(struct clock_event_device *ce) in dc_timer_disable() argument 74 struct digicolor_timer *dt = dc_timer(ce); in dc_timer_disable() 78 static inline void dc_timer_enable(struct clock_event_device *ce, u32 mode) in dc_timer_enable() argument 80 struct digicolor_timer *dt = dc_timer(ce); in dc_timer_enable() 84 static inline void dc_timer_set_count(struct clock_event_device *ce, in dc_timer_set_count() argument 87 struct digicolor_timer *dt = dc_timer(ce); in dc_timer_set_count() 91 static int digicolor_clkevt_shutdown(struct clock_event_device *ce) in digicolor_clkevt_shutdown() argument [all …]
|
D | timer-atlas7.c | 72 struct clock_event_device *ce = dev_id; in sirfsoc_timer_interrupt() local 78 if (clockevent_state_oneshot(ce)) in sirfsoc_timer_interrupt() 81 ce->event_handler(ce); in sirfsoc_timer_interrupt() 101 struct clock_event_device *ce) in sirfsoc_timer_set_next_event() argument 166 struct clock_event_device *ce = per_cpu_ptr(sirfsoc_clockevent, cpu); in sirfsoc_local_timer_starting_cpu() local 178 ce->irq = irq; in sirfsoc_local_timer_starting_cpu() 179 ce->name = "local_timer"; in sirfsoc_local_timer_starting_cpu() 180 ce->features = CLOCK_EVT_FEAT_ONESHOT; in sirfsoc_local_timer_starting_cpu() 181 ce->rating = 200; in sirfsoc_local_timer_starting_cpu() 182 ce->set_state_shutdown = sirfsoc_timer_shutdown; in sirfsoc_local_timer_starting_cpu() [all …]
|
D | timer-rockchip.c | 43 struct clock_event_device ce; member 50 static inline struct rk_timer *rk_timer(struct clock_event_device *ce) in rk_timer() argument 52 return &container_of(ce, struct rk_clkevt, ce)->timer; in rk_timer() 78 struct clock_event_device *ce) in rk_timer_set_next_event() argument 80 struct rk_timer *timer = rk_timer(ce); in rk_timer_set_next_event() 89 static int rk_timer_shutdown(struct clock_event_device *ce) in rk_timer_shutdown() argument 91 struct rk_timer *timer = rk_timer(ce); in rk_timer_shutdown() 97 static int rk_timer_set_periodic(struct clock_event_device *ce) in rk_timer_set_periodic() argument 99 struct rk_timer *timer = rk_timer(ce); in rk_timer_set_periodic() 109 struct clock_event_device *ce = dev_id; in rk_timer_interrupt() local [all …]
|
/Linux-v5.10/fs/cifs/ |
D | dfs_cache.c | 104 static inline bool cache_entry_expired(const struct cache_entry *ce) in cache_entry_expired() argument 109 return timespec64_compare(&ts, &ce->etime) >= 0; in cache_entry_expired() 112 static inline void free_tgts(struct cache_entry *ce) in free_tgts() argument 116 list_for_each_entry_safe(t, n, &ce->tlist, list) { in free_tgts() 123 static inline void flush_cache_ent(struct cache_entry *ce) in flush_cache_ent() argument 125 hlist_del_init(&ce->hlist); in flush_cache_ent() 126 kfree(ce->path); in flush_cache_ent() 127 free_tgts(ce); in flush_cache_ent() 129 kmem_cache_free(cache_slab, ce); in flush_cache_ent() 139 struct cache_entry *ce; in flush_cache_ents() local [all …]
|
/Linux-v5.10/arch/sparc/kernel/ |
D | time_32.c | 121 struct clock_event_device *ce = &timer_ce; in setup_timer_ce() local 125 ce->name = "timer_ce"; in setup_timer_ce() 126 ce->rating = 100; in setup_timer_ce() 127 ce->features = CLOCK_EVT_FEAT_PERIODIC; in setup_timer_ce() 128 ce->set_state_shutdown = timer_ce_shutdown; in setup_timer_ce() 129 ce->set_state_periodic = timer_ce_set_periodic; in setup_timer_ce() 130 ce->tick_resume = timer_ce_set_periodic; in setup_timer_ce() 131 ce->cpumask = cpu_possible_mask; in setup_timer_ce() 132 ce->shift = 32; in setup_timer_ce() 133 ce->mult = div_sc(sparc_config.clock_rate, NSEC_PER_SEC, in setup_timer_ce() [all …]
|
/Linux-v5.10/arch/arm/crypto/ |
D | Makefile | 17 obj-$(CONFIG_CRYPTO_AES_ARM_CE) += aes-arm-ce.o 18 obj-$(CONFIG_CRYPTO_SHA1_ARM_CE) += sha1-arm-ce.o 19 obj-$(CONFIG_CRYPTO_SHA2_ARM_CE) += sha2-arm-ce.o 20 obj-$(CONFIG_CRYPTO_GHASH_ARM_CE) += ghash-arm-ce.o 21 obj-$(CONFIG_CRYPTO_CRCT10DIF_ARM_CE) += crct10dif-arm-ce.o 22 obj-$(CONFIG_CRYPTO_CRC32_ARM_CE) += crc32-arm-ce.o 32 sha1-arm-ce-y := sha1-ce-core.o sha1-ce-glue.o 33 sha2-arm-ce-y := sha2-ce-core.o sha2-ce-glue.o 34 aes-arm-ce-y := aes-ce-core.o aes-ce-glue.o 35 ghash-arm-ce-y := ghash-ce-core.o ghash-ce-glue.o [all …]
|
/Linux-v5.10/drivers/gpu/drm/i915/selftests/ |
D | i915_request.c | 207 struct intel_context *ce; in igt_request_rewind() local 212 ce = i915_gem_context_get_engine(ctx[0], RCS0); in igt_request_rewind() 213 GEM_BUG_ON(IS_ERR(ce)); in igt_request_rewind() 214 request = mock_request(ce, 2 * HZ); in igt_request_rewind() 215 intel_context_put(ce); in igt_request_rewind() 226 ce = i915_gem_context_get_engine(ctx[1], RCS0); in igt_request_rewind() 227 GEM_BUG_ON(IS_ERR(ce)); in igt_request_rewind() 228 vip = mock_request(ce, 0); in igt_request_rewind() 229 intel_context_put(ce); in igt_request_rewind() 275 struct i915_request *(*request_alloc)(struct intel_context *ce); [all …]
|
/Linux-v5.10/drivers/gpu/drm/i915/gem/ |
D | i915_gem_object_blt.c | 15 struct i915_vma *intel_emit_vma_fill_blt(struct intel_context *ce, in intel_emit_vma_fill_blt() argument 20 struct drm_i915_private *i915 = ce->vm->i915; in intel_emit_vma_fill_blt() 31 GEM_BUG_ON(intel_engine_is_virtual(ce->engine)); in intel_emit_vma_fill_blt() 32 intel_engine_pm_get(ce->engine); in intel_emit_vma_fill_blt() 37 pool = intel_gt_get_buffer_pool(ce->engine->gt, size); in intel_emit_vma_fill_blt() 47 batch = i915_vma_instance(pool->obj, ce->vm, NULL); in intel_emit_vma_fill_blt() 100 intel_gt_chipset_flush(ce->vm->gt); in intel_emit_vma_fill_blt() 110 intel_engine_pm_put(ce->engine); in intel_emit_vma_fill_blt() 127 void intel_emit_vma_release(struct intel_context *ce, struct i915_vma *vma) in intel_emit_vma_release() argument 131 intel_engine_pm_put(ce->engine); in intel_emit_vma_release() [all …]
|