/Linux-v4.19/mm/ |
D | mmu_notifier.c | 59 struct mmu_notifier *mn; in __mmu_notifier_release() local 67 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) in __mmu_notifier_release() 74 if (mn->ops->release) in __mmu_notifier_release() 75 mn->ops->release(mn, mm); in __mmu_notifier_release() 79 mn = hlist_entry(mm->mmu_notifier_mm->list.first, in __mmu_notifier_release() 88 hlist_del_init_rcu(&mn->hlist); in __mmu_notifier_release() 114 struct mmu_notifier *mn; in __mmu_notifier_clear_flush_young() local 118 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_clear_flush_young() 119 if (mn->ops->clear_flush_young) in __mmu_notifier_clear_flush_young() 120 young |= mn->ops->clear_flush_young(mn, mm, start, end); in __mmu_notifier_clear_flush_young() [all …]
|
D | page_ext.c | 345 struct memory_notify *mn = arg; in page_ext_callback() local 350 ret = online_page_ext(mn->start_pfn, in page_ext_callback() 351 mn->nr_pages, mn->status_change_nid); in page_ext_callback() 354 offline_page_ext(mn->start_pfn, in page_ext_callback() 355 mn->nr_pages, mn->status_change_nid); in page_ext_callback() 358 offline_page_ext(mn->start_pfn, in page_ext_callback() 359 mn->nr_pages, mn->status_change_nid); in page_ext_callback()
|
/Linux-v4.19/drivers/gpu/drm/i915/ |
D | i915_gem_userptr.c | 39 struct i915_mmu_notifier *mn; member 51 struct mmu_notifier mn; member 57 struct i915_mmu_notifier *mn; member 102 interval_tree_insert(&mo->it, &mo->mn->objects); in add_object() 111 interval_tree_remove(&mo->it, &mo->mn->objects); in del_object() 121 struct i915_mmu_notifier *mn = in i915_gem_userptr_mn_invalidate_range_start() local 122 container_of(_mn, struct i915_mmu_notifier, mn); in i915_gem_userptr_mn_invalidate_range_start() 127 if (RB_EMPTY_ROOT(&mn->objects.rb_root)) in i915_gem_userptr_mn_invalidate_range_start() 133 spin_lock(&mn->lock); in i915_gem_userptr_mn_invalidate_range_start() 134 it = interval_tree_iter_first(&mn->objects, start, end); in i915_gem_userptr_mn_invalidate_range_start() [all …]
|
/Linux-v4.19/drivers/clk/qcom/ |
D | clk-rcg.c | 114 static u32 md_to_m(struct mn *mn, u32 md) in md_to_m() argument 116 md >>= mn->m_val_shift; in md_to_m() 117 md &= BIT(mn->width) - 1; in md_to_m() 140 static u32 mn_to_md(struct mn *mn, u32 m, u32 n, u32 md) in mn_to_md() argument 144 mask_w = BIT(mn->width) - 1; in mn_to_md() 145 mask = (mask_w << mn->m_val_shift) | mask_w; in mn_to_md() 149 m <<= mn->m_val_shift; in mn_to_md() 157 static u32 ns_m_to_n(struct mn *mn, u32 ns, u32 m) in ns_m_to_n() argument 159 ns = ~ns >> mn->n_val_shift; in ns_m_to_n() 160 ns &= BIT(mn->width) - 1; in ns_m_to_n() [all …]
|
D | clk-rcg.h | 30 struct mn { struct 80 struct mn mn; member 119 struct mn mn[2]; member
|
D | gcc-msm8660.c | 111 .mn = { 162 .mn = { 213 .mn = { 264 .mn = { 315 .mn = { 366 .mn = { 417 .mn = { 468 .mn = { 517 .mn = { 566 .mn = { [all …]
|
D | gcc-ipq806x.c | 272 .mn = { 323 .mn = { 374 .mn = { 425 .mn = { 476 .mn = { 527 .mn = { 591 .mn = { 640 .mn = { 689 .mn = { 738 .mn = { [all …]
|
D | gcc-mdm9615.c | 195 .mn = { 246 .mn = { 297 .mn = { 348 .mn = { 399 .mn = { 462 .mn = { 511 .mn = { 560 .mn = { 609 .mn = { 658 .mn = { [all …]
|
D | gcc-msm8960.c | 178 .mn = { 229 .mn = { 280 .mn = { 331 .mn = { 382 .mn = { 433 .mn = { 484 .mn = { 535 .mn = { 584 .mn = { 633 .mn = { [all …]
|
D | mmcc-msm8960.c | 180 .mn = { 229 .mn = { 278 .mn = { 334 .mn = { 398 .mn = { 462 .mn = { 713 .mn = { 815 .mn[0] = { 823 .mn[1] = { 875 .mn[0] = { [all …]
|
D | lcc-ipq806x.c | 119 .mn = { 233 .mn = { 313 .mn = { 372 .mn = {
|
/Linux-v4.19/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_mn.c | 76 struct mmu_notifier mn; member 125 bo->mn = NULL; in amdgpu_mn_destroy() 132 mmu_notifier_unregister_no_release(&amn->mn, amn->mm); in amdgpu_mn_destroy() 144 static void amdgpu_mn_release(struct mmu_notifier *mn, in amdgpu_mn_release() argument 147 struct amdgpu_mn *amn = container_of(mn, struct amdgpu_mn, mn); in amdgpu_mn_release() 159 void amdgpu_mn_lock(struct amdgpu_mn *mn) in amdgpu_mn_lock() argument 161 if (mn) in amdgpu_mn_lock() 162 down_write(&mn->lock); in amdgpu_mn_lock() 170 void amdgpu_mn_unlock(struct amdgpu_mn *mn) in amdgpu_mn_unlock() argument 172 if (mn) in amdgpu_mn_unlock() [all …]
|
D | amdgpu_mn.h | 38 void amdgpu_mn_lock(struct amdgpu_mn *mn); 39 void amdgpu_mn_unlock(struct amdgpu_mn *mn); 45 static inline void amdgpu_mn_lock(struct amdgpu_mn *mn) {} in amdgpu_mn_lock() argument 46 static inline void amdgpu_mn_unlock(struct amdgpu_mn *mn) {} in amdgpu_mn_unlock() argument
|
/Linux-v4.19/drivers/misc/sgi-gru/ |
D | grutlbpurge.c | 222 static int gru_invalidate_range_start(struct mmu_notifier *mn, in gru_invalidate_range_start() argument 227 struct gru_mm_struct *gms = container_of(mn, struct gru_mm_struct, in gru_invalidate_range_start() 239 static void gru_invalidate_range_end(struct mmu_notifier *mn, in gru_invalidate_range_end() argument 243 struct gru_mm_struct *gms = container_of(mn, struct gru_mm_struct, in gru_invalidate_range_end() 253 static void gru_release(struct mmu_notifier *mn, struct mm_struct *mm) in gru_release() argument 255 struct gru_mm_struct *gms = container_of(mn, struct gru_mm_struct, in gru_release() 274 struct mmu_notifier *mn, *gru_mn = NULL; in mmu_find_ops() local 278 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, in mmu_find_ops() 280 if (mn->ops == ops) { in mmu_find_ops() 281 gru_mn = mn; in mmu_find_ops() [all …]
|
/Linux-v4.19/drivers/gpu/drm/radeon/ |
D | radeon_mn.c | 43 struct mmu_notifier mn; member 83 bo->mn = NULL; in radeon_mn_destroy() 90 mmu_notifier_unregister(&rmn->mn, rmn->mm); in radeon_mn_destroy() 102 static void radeon_mn_release(struct mmu_notifier *mn, in radeon_mn_release() argument 105 struct radeon_mn *rmn = container_of(mn, struct radeon_mn, mn); in radeon_mn_release() 121 static int radeon_mn_invalidate_range_start(struct mmu_notifier *mn, in radeon_mn_invalidate_range_start() argument 127 struct radeon_mn *rmn = container_of(mn, struct radeon_mn, mn); in radeon_mn_invalidate_range_start() 223 rmn->mn.ops = &radeon_mn_ops; in radeon_mn_get() 227 r = __mmu_notifier_register(&rmn->mn, mm); in radeon_mn_get() 290 bo->mn = rmn; in radeon_mn_register() [all …]
|
/Linux-v4.19/drivers/gpu/drm/nouveau/nvkm/core/ |
D | memory.c | 38 nvkm_mm_free(&fb->tags, &tags->mn); in nvkm_memory_tags_put() 61 if (tags->mn && tags->mn->length != nr) { in nvkm_memory_tags_get() 77 if (!nvkm_mm_head(&fb->tags, 0, 1, nr, nr, 1, &tags->mn)) { in nvkm_memory_tags_get() 79 clr(device, tags->mn->offset, tags->mn->length); in nvkm_memory_tags_get() 90 tags->mn = NULL; in nvkm_memory_tags_get()
|
/Linux-v4.19/include/linux/ |
D | mmu_notifier.h | 65 void (*release)(struct mmu_notifier *mn, 77 int (*clear_flush_young)(struct mmu_notifier *mn, 87 int (*clear_young)(struct mmu_notifier *mn, 98 int (*test_young)(struct mmu_notifier *mn, 106 void (*change_pte)(struct mmu_notifier *mn, 159 int (*invalidate_range_start)(struct mmu_notifier *mn, 163 void (*invalidate_range_end)(struct mmu_notifier *mn, 189 void (*invalidate_range)(struct mmu_notifier *mn, struct mm_struct *mm, 214 extern int mmu_notifier_register(struct mmu_notifier *mn, 216 extern int __mmu_notifier_register(struct mmu_notifier *mn, [all …]
|
/Linux-v4.19/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ |
D | ram.c | 34 struct nvkm_mm_node *mn; member 45 .mem = vram->mn, in nvkm_vram_map() 54 return (u64)nvkm_mm_size(nvkm_vram(memory)->mn) << NVKM_RAM_MM_SHIFT; in nvkm_vram_size() 61 if (!nvkm_mm_contiguous(vram->mn)) in nvkm_vram_addr() 63 return (u64)nvkm_mm_addr(vram->mn) << NVKM_RAM_MM_SHIFT; in nvkm_vram_addr() 82 struct nvkm_mm_node *next = vram->mn; in nvkm_vram_dtor() 130 node = &vram->mn; in nvkm_ram_get()
|
/Linux-v4.19/drivers/iommu/ |
D | amd_iommu_v2.c | 52 struct mmu_notifier mn; /* mmu_notifier handle */ member 347 mmu_notifier_unregister(&pasid_state->mn, pasid_state->mm); in free_pasid_states() 366 static struct pasid_state *mn_to_state(struct mmu_notifier *mn) in mn_to_state() argument 368 return container_of(mn, struct pasid_state, mn); in mn_to_state() 371 static void __mn_flush_page(struct mmu_notifier *mn, in __mn_flush_page() argument 377 pasid_state = mn_to_state(mn); in __mn_flush_page() 383 static int mn_clear_flush_young(struct mmu_notifier *mn, in mn_clear_flush_young() argument 389 __mn_flush_page(mn, start); in mn_clear_flush_young() 394 static void mn_invalidate_range(struct mmu_notifier *mn, in mn_invalidate_range() argument 401 pasid_state = mn_to_state(mn); in mn_invalidate_range() [all …]
|
/Linux-v4.19/drivers/infiniband/hw/hfi1/ |
D | mmu_rb.c | 56 struct mmu_notifier mn; member 112 INIT_HLIST_NODE(&handlr->mn.hlist); in hfi1_mmu_rb_register() 114 handlr->mn.ops = &mn_opts; in hfi1_mmu_rb_register() 121 ret = mmu_notifier_register(&handlr->mn, handlr->mm); in hfi1_mmu_rb_register() 139 mmu_notifier_unregister(&handler->mn, handler->mm); in hfi1_mmu_rb_unregister() 287 static int mmu_notifier_range_start(struct mmu_notifier *mn, in mmu_notifier_range_start() argument 294 container_of(mn, struct mmu_rb_handler, mn); in mmu_notifier_range_start()
|
/Linux-v4.19/drivers/infiniband/core/ |
D | umem_odp.c | 176 static void ib_umem_notifier_release(struct mmu_notifier *mn, in ib_umem_notifier_release() argument 179 struct ib_ucontext *context = container_of(mn, struct ib_ucontext, mn); in ib_umem_notifier_release() 211 static int ib_umem_notifier_invalidate_range_start(struct mmu_notifier *mn, in ib_umem_notifier_invalidate_range_start() argument 217 struct ib_ucontext *context = container_of(mn, struct ib_ucontext, mn); in ib_umem_notifier_invalidate_range_start() 245 static void ib_umem_notifier_invalidate_range_end(struct mmu_notifier *mn, in ib_umem_notifier_invalidate_range_end() argument 250 struct ib_ucontext *context = container_of(mn, struct ib_ucontext, mn); in ib_umem_notifier_invalidate_range_end() 432 INIT_HLIST_NODE(&context->mn.hlist); in ib_umem_odp_get() 433 context->mn.ops = &ib_umem_notifiers; in ib_umem_odp_get() 439 ret_val = mmu_notifier_register(&context->mn, mm); in ib_umem_odp_get() 522 mmu_notifier_unregister(&context->mn, owning_mm); in ib_umem_odp_release()
|
/Linux-v4.19/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/ |
D | gk20a.c | 53 struct nvkm_mm_node *mn; member 130 return (u64)gk20a_instobj(memory)->mn->offset << 12; in gk20a_instobj_addr() 136 return (u64)gk20a_instobj(memory)->mn->length << 12; in gk20a_instobj_size() 288 .mem = node->mn, in gk20a_instobj_map() 304 dma_free_attrs(dev, (u64)node->base.mn->length << PAGE_SHIFT, in gk20a_instobj_dtor_dma() 317 struct nvkm_mm_node *r = node->base.mn; in gk20a_instobj_dtor_iommu() 335 for (i = 0; i < node->base.mn->length; i++) { in gk20a_instobj_dtor_iommu() 416 node->base.mn = &node->r; in gk20a_instobj_ctor_dma() 493 node->base.mn = r; in gk20a_instobj_ctor_iommu() 542 size, align, (u64)node->mn->offset << 12); in gk20a_instobj_new()
|
/Linux-v4.19/drivers/scsi/qla2xxx/ |
D | qla_bsg.c | 992 struct verify_chip_entry_84xx *mn = NULL; in qla84xx_updatefw() local 1038 mn = dma_pool_zalloc(ha->s_dma_pool, GFP_KERNEL, &mn_dma); in qla84xx_updatefw() 1039 if (!mn) { in qla84xx_updatefw() 1049 mn->entry_type = VERIFY_CHIP_IOCB_TYPE; in qla84xx_updatefw() 1050 mn->entry_count = 1; in qla84xx_updatefw() 1056 mn->options = cpu_to_le16(options); in qla84xx_updatefw() 1057 mn->fw_ver = cpu_to_le32(fw_ver); in qla84xx_updatefw() 1058 mn->fw_size = cpu_to_le32(data_len); in qla84xx_updatefw() 1059 mn->fw_seq_size = cpu_to_le32(data_len); in qla84xx_updatefw() 1060 mn->dseg_address[0] = cpu_to_le32(LSD(fw_dma)); in qla84xx_updatefw() [all …]
|
/Linux-v4.19/arch/powerpc/platforms/powernv/ |
D | npu-dma.c | 415 struct mmu_notifier mn; member 532 #define mn_to_npu_context(x) container_of(x, struct npu_context, mn) 657 static void pnv_npu2_mn_release(struct mmu_notifier *mn, in pnv_npu2_mn_release() argument 660 struct npu_context *npu_context = mn_to_npu_context(mn); in pnv_npu2_mn_release() 673 static void pnv_npu2_mn_change_pte(struct mmu_notifier *mn, in pnv_npu2_mn_change_pte() argument 678 struct npu_context *npu_context = mn_to_npu_context(mn); in pnv_npu2_mn_change_pte() 683 static void pnv_npu2_mn_invalidate_range(struct mmu_notifier *mn, in pnv_npu2_mn_invalidate_range() argument 687 struct npu_context *npu_context = mn_to_npu_context(mn); in pnv_npu2_mn_invalidate_range() 812 npu_context->mn.ops = &nv_nmmu_notifier_ops; in pnv_npu2_init_context() 813 rc = __mmu_notifier_register(&npu_context->mn, mm); in pnv_npu2_init_context() [all …]
|
/Linux-v4.19/arch/x86/kernel/apic/ |
D | x2apic_uv_x.c | 1116 struct mn { struct 1123 static void get_mn(struct mn *mnp) in get_mn() argument 1154 struct mn mn; in uv_init_hub_info() local 1156 get_mn(&mn); in uv_init_hub_info() 1157 hi->gpa_mask = mn.m_val ? in uv_init_hub_info() 1158 (1UL << (mn.m_val + mn.n_val)) - 1 : in uv_init_hub_info() 1161 hi->m_val = mn.m_val; in uv_init_hub_info() 1162 hi->n_val = mn.n_val; in uv_init_hub_info() 1163 hi->m_shift = mn.m_shift; in uv_init_hub_info() 1164 hi->n_lshift = mn.n_lshift ? mn.n_lshift : 0; in uv_init_hub_info() [all …]
|