/Linux-v5.4/mm/ |
D | mmu_notifier.c | 44 struct mmu_notifier *mn; in __mmu_notifier_release() local 52 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) in __mmu_notifier_release() 59 if (mn->ops->release) in __mmu_notifier_release() 60 mn->ops->release(mn, mm); in __mmu_notifier_release() 64 mn = hlist_entry(mm->mmu_notifier_mm->list.first, in __mmu_notifier_release() 73 hlist_del_init_rcu(&mn->hlist); in __mmu_notifier_release() 99 struct mmu_notifier *mn; in __mmu_notifier_clear_flush_young() local 103 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_clear_flush_young() 104 if (mn->ops->clear_flush_young) in __mmu_notifier_clear_flush_young() 105 young |= mn->ops->clear_flush_young(mn, mm, start, end); in __mmu_notifier_clear_flush_young() [all …]
|
D | page_ext.c | 338 struct memory_notify *mn = arg; in page_ext_callback() local 343 ret = online_page_ext(mn->start_pfn, in page_ext_callback() 344 mn->nr_pages, mn->status_change_nid); in page_ext_callback() 347 offline_page_ext(mn->start_pfn, in page_ext_callback() 348 mn->nr_pages, mn->status_change_nid); in page_ext_callback() 351 offline_page_ext(mn->start_pfn, in page_ext_callback() 352 mn->nr_pages, mn->status_change_nid); in page_ext_callback()
|
D | hmm.c | 46 static void hmm_free_notifier(struct mmu_notifier *mn) in hmm_free_notifier() argument 48 struct hmm *hmm = container_of(mn, struct hmm, mmu_notifier); in hmm_free_notifier() 55 static void hmm_release(struct mmu_notifier *mn, struct mm_struct *mm) in hmm_release() argument 57 struct hmm *hmm = container_of(mn, struct hmm, mmu_notifier); in hmm_release() 97 static int hmm_invalidate_range_start(struct mmu_notifier *mn, in hmm_invalidate_range_start() argument 100 struct hmm *hmm = container_of(mn, struct hmm, mmu_notifier); in hmm_invalidate_range_start() 143 static void hmm_invalidate_range_end(struct mmu_notifier *mn, in hmm_invalidate_range_end() argument 146 struct hmm *hmm = container_of(mn, struct hmm, mmu_notifier); in hmm_invalidate_range_end() 177 struct mmu_notifier *mn; in hmm_mirror_register() local 185 mn = mmu_notifier_get_locked(&hmm_mmu_notifier_ops, mm); in hmm_mirror_register() [all …]
|
/Linux-v5.4/drivers/gpu/drm/i915/gem/ |
D | i915_gem_userptr.c | 23 struct i915_mmu_notifier *mn; member 35 struct mmu_notifier mn; member 41 struct i915_mmu_notifier *mn; member 49 interval_tree_insert(&mo->it, &mo->mn->objects); in add_object() 57 interval_tree_remove(&mo->it, &mo->mn->objects); in del_object() 80 spin_lock(&mo->mn->lock); in __i915_gem_userptr_set_active() 85 spin_unlock(&mo->mn->lock); in __i915_gem_userptr_set_active() 92 struct i915_mmu_notifier *mn = in userptr_mn_invalidate_range_start() local 93 container_of(_mn, struct i915_mmu_notifier, mn); in userptr_mn_invalidate_range_start() 99 if (RB_EMPTY_ROOT(&mn->objects.rb_root)) in userptr_mn_invalidate_range_start() [all …]
|
/Linux-v5.4/drivers/gpu/drm/radeon/ |
D | radeon_mn.c | 40 struct mmu_notifier mn; member 63 static int radeon_mn_invalidate_range_start(struct mmu_notifier *mn, in radeon_mn_invalidate_range_start() argument 66 struct radeon_mn *rmn = container_of(mn, struct radeon_mn, mn); in radeon_mn_invalidate_range_start() 128 static void radeon_mn_release(struct mmu_notifier *mn, struct mm_struct *mm) in radeon_mn_release() argument 138 radeon_mn_invalidate_range_start(mn, &range); in radeon_mn_release() 151 return &rmn->mn; in radeon_mn_alloc_notifier() 154 static void radeon_mn_free_notifier(struct mmu_notifier *mn) in radeon_mn_free_notifier() argument 156 kfree(container_of(mn, struct radeon_mn, mn)); in radeon_mn_free_notifier() 178 struct mmu_notifier *mn; in radeon_mn_register() local 184 mn = mmu_notifier_get(&radeon_mn_ops, current->mm); in radeon_mn_register() [all …]
|
/Linux-v5.4/drivers/clk/qcom/ |
D | clk-rcg.c | 106 static u32 md_to_m(struct mn *mn, u32 md) in md_to_m() argument 108 md >>= mn->m_val_shift; in md_to_m() 109 md &= BIT(mn->width) - 1; in md_to_m() 132 static u32 mn_to_md(struct mn *mn, u32 m, u32 n, u32 md) in mn_to_md() argument 136 mask_w = BIT(mn->width) - 1; in mn_to_md() 137 mask = (mask_w << mn->m_val_shift) | mask_w; in mn_to_md() 141 m <<= mn->m_val_shift; in mn_to_md() 149 static u32 ns_m_to_n(struct mn *mn, u32 ns, u32 m) in ns_m_to_n() argument 151 ns = ~ns >> mn->n_val_shift; in ns_m_to_n() 152 ns &= BIT(mn->width) - 1; in ns_m_to_n() [all …]
|
D | clk-rcg.h | 30 struct mn { struct 79 struct mn mn; member 117 struct mn mn[2]; member
|
D | gcc-msm8660.c | 103 .mn = { 154 .mn = { 205 .mn = { 256 .mn = { 307 .mn = { 358 .mn = { 409 .mn = { 460 .mn = { 509 .mn = { 558 .mn = { [all …]
|
D | gcc-mdm9615.c | 187 .mn = { 238 .mn = { 289 .mn = { 340 .mn = { 391 .mn = { 454 .mn = { 503 .mn = { 552 .mn = { 601 .mn = { 650 .mn = { [all …]
|
D | gcc-ipq806x.c | 343 .mn = { 394 .mn = { 445 .mn = { 496 .mn = { 547 .mn = { 598 .mn = { 662 .mn = { 711 .mn = { 760 .mn = { 809 .mn = { [all …]
|
D | gcc-msm8960.c | 329 .mn = { 380 .mn = { 431 .mn = { 482 .mn = { 533 .mn = { 584 .mn = { 635 .mn = { 686 .mn = { 735 .mn = { 784 .mn = { [all …]
|
D | mmcc-msm8960.c | 172 .mn = { 221 .mn = { 270 .mn = { 326 .mn = { 390 .mn = { 454 .mn = { 705 .mn = { 807 .mn[0] = { 815 .mn[1] = { 867 .mn[0] = { [all …]
|
/Linux-v5.4/drivers/misc/sgi-gru/ |
D | grutlbpurge.c | 209 static int gru_invalidate_range_start(struct mmu_notifier *mn, in gru_invalidate_range_start() argument 212 struct gru_mm_struct *gms = container_of(mn, struct gru_mm_struct, in gru_invalidate_range_start() 224 static void gru_invalidate_range_end(struct mmu_notifier *mn, in gru_invalidate_range_end() argument 227 struct gru_mm_struct *gms = container_of(mn, struct gru_mm_struct, in gru_invalidate_range_end() 252 static void gru_free_notifier(struct mmu_notifier *mn) in gru_free_notifier() argument 254 kfree(container_of(mn, struct gru_mm_struct, ms_notifier)); in gru_free_notifier() 267 struct mmu_notifier *mn; in gru_register_mmu_notifier() local 269 mn = mmu_notifier_get_locked(&gru_mmuops, current->mm); in gru_register_mmu_notifier() 270 if (IS_ERR(mn)) in gru_register_mmu_notifier() 271 return ERR_CAST(mn); in gru_register_mmu_notifier() [all …]
|
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/diag/ |
D | en_rep_tracepoint.h | 24 TP_fast_assign(const struct mlx5e_neigh *mn = &nhe->m_neigh; 28 __assign_str(devname, mn->dev->name); 34 if (mn->family == AF_INET) { 35 *p32 = mn->dst_ip.v4; 37 } else if (mn->family == AF_INET6) { 38 *pin6 = mn->dst_ip.v6;
|
D | en_tc_tracepoint.h | 85 TP_fast_assign(const struct mlx5e_neigh *mn = &nhe->m_neigh; 89 __assign_str(devname, mn->dev->name); 94 if (mn->family == AF_INET) { 95 *p32 = mn->dst_ip.v4; 97 } else if (mn->family == AF_INET6) { 98 *pin6 = mn->dst_ip.v6;
|
/Linux-v5.4/drivers/infiniband/core/ |
D | umem_odp.c | 77 static void ib_umem_notifier_release(struct mmu_notifier *mn, in ib_umem_notifier_release() argument 81 container_of(mn, struct ib_ucontext_per_mm, mn); in ib_umem_notifier_release() 85 if (!per_mm->mn.users) in ib_umem_notifier_release() 116 static int ib_umem_notifier_invalidate_range_start(struct mmu_notifier *mn, in ib_umem_notifier_invalidate_range_start() argument 120 container_of(mn, struct ib_ucontext_per_mm, mn); in ib_umem_notifier_invalidate_range_start() 128 if (!per_mm->mn.users) { in ib_umem_notifier_invalidate_range_start() 155 static void ib_umem_notifier_invalidate_range_end(struct mmu_notifier *mn, in ib_umem_notifier_invalidate_range_end() argument 159 container_of(mn, struct ib_ucontext_per_mm, mn); in ib_umem_notifier_invalidate_range_end() 161 if (unlikely(!per_mm->mn.users)) in ib_umem_notifier_invalidate_range_end() 185 return &per_mm->mn; in ib_umem_alloc_notifier() [all …]
|
/Linux-v5.4/drivers/gpu/drm/nouveau/nvkm/core/ |
D | memory.c | 38 nvkm_mm_free(&fb->tags, &tags->mn); in nvkm_memory_tags_put() 61 if (tags->mn && tags->mn->length != nr) { in nvkm_memory_tags_get() 77 if (!nvkm_mm_head(&fb->tags, 0, 1, nr, nr, 1, &tags->mn)) { in nvkm_memory_tags_get() 79 clr(device, tags->mn->offset, tags->mn->length); in nvkm_memory_tags_get() 90 tags->mn = NULL; in nvkm_memory_tags_get()
|
/Linux-v5.4/include/linux/ |
D | mmu_notifier.h | 97 void (*release)(struct mmu_notifier *mn, 109 int (*clear_flush_young)(struct mmu_notifier *mn, 119 int (*clear_young)(struct mmu_notifier *mn, 130 int (*test_young)(struct mmu_notifier *mn, 138 void (*change_pte)(struct mmu_notifier *mn, 193 int (*invalidate_range_start)(struct mmu_notifier *mn, 195 void (*invalidate_range_end)(struct mmu_notifier *mn, 216 void (*invalidate_range)(struct mmu_notifier *mn, struct mm_struct *mm, 230 void (*free_notifier)(struct mmu_notifier *mn); 269 void mmu_notifier_put(struct mmu_notifier *mn); [all …]
|
/Linux-v5.4/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_mn.h | 75 void amdgpu_mn_lock(struct amdgpu_mn *mn); 76 void amdgpu_mn_unlock(struct amdgpu_mn *mn); 83 static inline void amdgpu_mn_lock(struct amdgpu_mn *mn) {} in amdgpu_mn_lock() argument 84 static inline void amdgpu_mn_unlock(struct amdgpu_mn *mn) {} in amdgpu_mn_unlock() argument
|
D | amdgpu_mn.c | 86 bo->mn = NULL; in amdgpu_mn_destroy() 118 void amdgpu_mn_lock(struct amdgpu_mn *mn) in amdgpu_mn_lock() argument 120 if (mn) in amdgpu_mn_lock() 121 down_write(&mn->lock); in amdgpu_mn_lock() 129 void amdgpu_mn_unlock(struct amdgpu_mn *mn) in amdgpu_mn_unlock() argument 131 if (mn) in amdgpu_mn_unlock() 132 up_write(&mn->lock); in amdgpu_mn_unlock() 412 bo->mn = amn; in amdgpu_mn_register() 442 amn = bo->mn; in amdgpu_mn_unregister() 453 bo->mn = NULL; in amdgpu_mn_unregister()
|
/Linux-v5.4/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ |
D | ram.c | 34 struct nvkm_mm_node *mn; member 45 .mem = vram->mn, in nvkm_vram_map() 54 return (u64)nvkm_mm_size(nvkm_vram(memory)->mn) << NVKM_RAM_MM_SHIFT; in nvkm_vram_size() 61 if (!nvkm_mm_contiguous(vram->mn)) in nvkm_vram_addr() 63 return (u64)nvkm_mm_addr(vram->mn) << NVKM_RAM_MM_SHIFT; in nvkm_vram_addr() 82 struct nvkm_mm_node *next = vram->mn; in nvkm_vram_dtor() 130 node = &vram->mn; in nvkm_ram_get()
|
/Linux-v5.4/drivers/infiniband/hw/hfi1/ |
D | mmu_rb.c | 56 struct mmu_notifier mn; member 110 INIT_HLIST_NODE(&handlr->mn.hlist); in hfi1_mmu_rb_register() 112 handlr->mn.ops = &mn_opts; in hfi1_mmu_rb_register() 119 ret = mmu_notifier_register(&handlr->mn, handlr->mm); in hfi1_mmu_rb_register() 137 mmu_notifier_unregister(&handler->mn, handler->mm); in hfi1_mmu_rb_unregister() 285 static int mmu_notifier_range_start(struct mmu_notifier *mn, in mmu_notifier_range_start() argument 289 container_of(mn, struct mmu_rb_handler, mn); in mmu_notifier_range_start()
|
/Linux-v5.4/drivers/iommu/ |
D | amd_iommu_v2.c | 42 struct mmu_notifier mn; /* mmu_notifier handle */ member 337 mmu_notifier_unregister(&pasid_state->mn, pasid_state->mm); in free_pasid_states() 356 static struct pasid_state *mn_to_state(struct mmu_notifier *mn) in mn_to_state() argument 358 return container_of(mn, struct pasid_state, mn); in mn_to_state() 361 static void mn_invalidate_range(struct mmu_notifier *mn, in mn_invalidate_range() argument 368 pasid_state = mn_to_state(mn); in mn_invalidate_range() 378 static void mn_release(struct mmu_notifier *mn, struct mm_struct *mm) in mn_release() argument 386 pasid_state = mn_to_state(mn); in mn_release() 641 pasid_state->mn.ops = &iommu_mn; in amd_iommu_bind_pasid() 646 mmu_notifier_register(&pasid_state->mn, mm); in amd_iommu_bind_pasid() [all …]
|
/Linux-v5.4/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/ |
D | gk20a.c | 53 struct nvkm_mm_node *mn; member 130 return (u64)gk20a_instobj(memory)->mn->offset << 12; in gk20a_instobj_addr() 136 return (u64)gk20a_instobj(memory)->mn->length << 12; in gk20a_instobj_size() 288 .mem = node->mn, in gk20a_instobj_map() 304 dma_free_attrs(dev, (u64)node->base.mn->length << PAGE_SHIFT, in gk20a_instobj_dtor_dma() 317 struct nvkm_mm_node *r = node->base.mn; in gk20a_instobj_dtor_iommu() 335 for (i = 0; i < node->base.mn->length; i++) { in gk20a_instobj_dtor_iommu() 416 node->base.mn = &node->r; in gk20a_instobj_ctor_dma() 493 node->base.mn = r; in gk20a_instobj_ctor_iommu() 542 size, align, (u64)node->mn->offset << 12); in gk20a_instobj_new()
|
/Linux-v5.4/drivers/scsi/qla2xxx/ |
D | qla_bsg.c | 991 struct verify_chip_entry_84xx *mn = NULL; in qla84xx_updatefw() local 1037 mn = dma_pool_zalloc(ha->s_dma_pool, GFP_KERNEL, &mn_dma); in qla84xx_updatefw() 1038 if (!mn) { in qla84xx_updatefw() 1048 mn->entry_type = VERIFY_CHIP_IOCB_TYPE; in qla84xx_updatefw() 1049 mn->entry_count = 1; in qla84xx_updatefw() 1055 mn->options = cpu_to_le16(options); in qla84xx_updatefw() 1056 mn->fw_ver = cpu_to_le32(fw_ver); in qla84xx_updatefw() 1057 mn->fw_size = cpu_to_le32(data_len); in qla84xx_updatefw() 1058 mn->fw_seq_size = cpu_to_le32(data_len); in qla84xx_updatefw() 1059 put_unaligned_le64(fw_dma, &mn->dsd.address); in qla84xx_updatefw() [all …]
|