Home
last modified time | relevance | path

Searched refs:ptl (Results 1 – 25 of 58) sorted by relevance

123

/Linux-v5.15/drivers/platform/surface/aggregator/
Dssh_packet_layer.c335 ptl_info(packet->ptl, "packet error injection: dropping ACK packet %p\n", in __ssh_ptl_should_drop_ack_packet()
347 ptl_info(packet->ptl, "packet error injection: dropping NAK packet %p\n", in __ssh_ptl_should_drop_nak_packet()
359 ptl_info(packet->ptl, in __ssh_ptl_should_drop_dsq_packet()
387 static int ssh_ptl_write_buf(struct ssh_ptl *ptl, struct ssh_packet *packet, in ssh_ptl_write_buf() argument
395 ptl_info(packet->ptl, in ssh_ptl_write_buf()
402 return serdev_device_write_buf(ptl->serdev, buf, count); in ssh_ptl_write_buf()
419 ptl_info(packet->ptl, in ssh_ptl_tx_inject_invalid_data()
431 static void ssh_ptl_rx_inject_invalid_syn(struct ssh_ptl *ptl, in ssh_ptl_rx_inject_invalid_syn() argument
448 static void ssh_ptl_rx_inject_invalid_data(struct ssh_ptl *ptl, in ssh_ptl_rx_inject_invalid_data() argument
486 static inline int ssh_ptl_write_buf(struct ssh_ptl *ptl, in ssh_ptl_write_buf() argument
[all …]
Dssh_packet_layer.h139 int ssh_ptl_init(struct ssh_ptl *ptl, struct serdev_device *serdev,
142 void ssh_ptl_destroy(struct ssh_ptl *ptl);
151 static inline struct device *ssh_ptl_get_device(struct ssh_ptl *ptl) in ssh_ptl_get_device() argument
153 return ptl->serdev ? &ptl->serdev->dev : NULL; in ssh_ptl_get_device()
156 int ssh_ptl_tx_start(struct ssh_ptl *ptl);
157 int ssh_ptl_tx_stop(struct ssh_ptl *ptl);
158 int ssh_ptl_rx_start(struct ssh_ptl *ptl);
159 int ssh_ptl_rx_stop(struct ssh_ptl *ptl);
160 void ssh_ptl_shutdown(struct ssh_ptl *ptl);
162 int ssh_ptl_submit(struct ssh_ptl *ptl, struct ssh_packet *p);
[all …]
Dssh_request_layer.h66 struct ssh_ptl ptl; member
94 #define rtl_dbg(r, fmt, ...) ptl_dbg(&(r)->ptl, fmt, ##__VA_ARGS__)
95 #define rtl_info(p, fmt, ...) ptl_info(&(p)->ptl, fmt, ##__VA_ARGS__)
96 #define rtl_warn(r, fmt, ...) ptl_warn(&(r)->ptl, fmt, ##__VA_ARGS__)
97 #define rtl_err(r, fmt, ...) ptl_err(&(r)->ptl, fmt, ##__VA_ARGS__)
112 return ssh_ptl_get_device(&rtl->ptl); in ssh_rtl_get_device()
123 struct ssh_ptl *ptl; in ssh_request_rtl() local
125 ptl = READ_ONCE(rqst->packet.ptl); in ssh_request_rtl()
126 return likely(ptl) ? to_ssh_rtl(ptl, ptl) : NULL; in ssh_request_rtl()
Dssh_request_layer.c258 status = ssh_ptl_submit(&rtl->ptl, &rqst->packet); in ssh_rtl_tx_try_process_one()
391 if (cmpxchg(&rqst->packet.ptl, NULL, &rtl->ptl)) { in ssh_rtl_submit()
619 if (flags == fixed && !READ_ONCE(r->packet.ptl)) { in ssh_rtl_cancel_nonpending()
674 if (!READ_ONCE(r->packet.ptl)) { in ssh_rtl_cancel_pending()
911 struct ssh_rtl *rtl = to_ssh_rtl(p, ptl); in ssh_rtl_rx_command()
1018 status = ssh_ptl_init(&rtl->ptl, serdev, &ptl_ops); in ssh_rtl_init()
1052 ssh_ptl_destroy(&rtl->ptl); in ssh_rtl_destroy()
1065 status = ssh_ptl_tx_start(&rtl->ptl); in ssh_rtl_start()
1071 status = ssh_ptl_rx_start(&rtl->ptl); in ssh_rtl_start()
1074 ssh_ptl_tx_stop(&rtl->ptl); in ssh_rtl_start()
[all …]
/Linux-v5.15/drivers/media/platform/allegro-dvt/
Dnal-hevc.c171 struct nal_hevc_profile_tier_level *ptl) in nal_hevc_rbsp_profile_tier_level() argument
176 rbsp_bits(rbsp, 2, &ptl->general_profile_space); in nal_hevc_rbsp_profile_tier_level()
177 rbsp_bit(rbsp, &ptl->general_tier_flag); in nal_hevc_rbsp_profile_tier_level()
178 rbsp_bits(rbsp, 5, &ptl->general_profile_idc); in nal_hevc_rbsp_profile_tier_level()
180 rbsp_bit(rbsp, &ptl->general_profile_compatibility_flag[i]); in nal_hevc_rbsp_profile_tier_level()
181 rbsp_bit(rbsp, &ptl->general_progressive_source_flag); in nal_hevc_rbsp_profile_tier_level()
182 rbsp_bit(rbsp, &ptl->general_interlaced_source_flag); in nal_hevc_rbsp_profile_tier_level()
183 rbsp_bit(rbsp, &ptl->general_non_packed_constraint_flag); in nal_hevc_rbsp_profile_tier_level()
184 rbsp_bit(rbsp, &ptl->general_frame_only_constraint_flag); in nal_hevc_rbsp_profile_tier_level()
185 if (ptl->general_profile_idc == 4 || in nal_hevc_rbsp_profile_tier_level()
[all …]
/Linux-v5.15/mm/
Dpage_vma_mapped.c51 pvmw->ptl = pte_lockptr(pvmw->vma->vm_mm, pvmw->pmd); in map_pte()
52 spin_lock(pvmw->ptl); in map_pte()
177 pvmw->ptl = huge_pte_lockptr(page_hstate(page), mm, pvmw->pte); in page_vma_mapped_walk()
178 spin_lock(pvmw->ptl); in page_vma_mapped_walk()
222 pvmw->ptl = pmd_lock(mm, pvmw->pmd); in page_vma_mapped_walk()
244 spin_unlock(pvmw->ptl); in page_vma_mapped_walk()
245 pvmw->ptl = NULL; in page_vma_mapped_walk()
254 spinlock_t *ptl = pmd_lock(mm, pvmw->pmd); in page_vma_mapped_walk() local
256 spin_unlock(ptl); in page_vma_mapped_walk()
273 if (pvmw->ptl) { in page_vma_mapped_walk()
[all …]
Dhuge_memory.c628 vmf->ptl = pmd_lock(vma->vm_mm, vmf->pmd); in __do_huge_pmd_anonymous_page()
640 spin_unlock(vmf->ptl); in __do_huge_pmd_anonymous_page()
657 spin_unlock(vmf->ptl); in __do_huge_pmd_anonymous_page()
664 spin_unlock(vmf->ptl); in __do_huge_pmd_anonymous_page()
752 vmf->ptl = pmd_lock(vma->vm_mm, vmf->pmd); in do_huge_pmd_anonymous_page()
757 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page()
760 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page()
768 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page()
771 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page()
792 spinlock_t *ptl; in insert_pfn_pmd() local
[all …]
Dmemory.c438 spinlock_t *ptl; in __pte_alloc() local
458 ptl = pmd_lock(mm, pmd); in __pte_alloc()
464 spin_unlock(ptl); in __pte_alloc()
1312 spinlock_t *ptl; in zap_pte_range() local
1320 start_pte = pte_offset_map_lock(mm, pmd, addr, &ptl); in zap_pte_range()
1422 pte_unmap_unlock(start_pte, ptl); in zap_pte_range()
1463 spinlock_t *ptl = pmd_lock(tlb->mm, pmd); in zap_pmd_range() local
1469 spin_unlock(ptl); in zap_pmd_range()
1730 spinlock_t **ptl) in __get_locked_pte() argument
1736 return pte_alloc_map_lock(mm, pmd, addr, ptl); in __get_locked_pte()
[all …]
Dmincore.c102 spinlock_t *ptl; in mincore_pte_range() local
108 ptl = pmd_trans_huge_lock(pmd, vma); in mincore_pte_range()
109 if (ptl) { in mincore_pte_range()
111 spin_unlock(ptl); in mincore_pte_range()
120 ptep = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in mincore_pte_range()
150 pte_unmap_unlock(ptep - 1, ptl); in mincore_pte_range()
Dmadvise.c201 spinlock_t *ptl; in swapin_walk_pmd_entry() local
203 orig_pte = pte_offset_map_lock(vma->vm_mm, pmd, start, &ptl); in swapin_walk_pmd_entry()
205 pte_unmap_unlock(orig_pte, ptl); in swapin_walk_pmd_entry()
318 spinlock_t *ptl; in madvise_cold_or_pageout_pte_range() local
331 ptl = pmd_trans_huge_lock(pmd, vma); in madvise_cold_or_pageout_pte_range()
332 if (!ptl) in madvise_cold_or_pageout_pte_range()
355 spin_unlock(ptl); in madvise_cold_or_pageout_pte_range()
385 spin_unlock(ptl); in madvise_cold_or_pageout_pte_range()
396 orig_pte = pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl); in madvise_cold_or_pageout_pte_range()
424 pte_unmap_unlock(orig_pte, ptl); in madvise_cold_or_pageout_pte_range()
[all …]
Dhmm.c420 spinlock_t *ptl = pud_trans_huge_lock(pudp, walk->vma); in hmm_vma_walk_pud() local
422 if (!ptl) in hmm_vma_walk_pud()
430 spin_unlock(ptl); in hmm_vma_walk_pud()
441 spin_unlock(ptl); in hmm_vma_walk_pud()
453 spin_unlock(ptl); in hmm_vma_walk_pud()
467 spin_unlock(ptl); in hmm_vma_walk_pud()
486 spinlock_t *ptl; in hmm_vma_walk_hugetlb_entry() local
489 ptl = huge_pte_lock(hstate_vma(vma), walk->mm, pte); in hmm_vma_walk_hugetlb_entry()
499 spin_unlock(ptl); in hmm_vma_walk_hugetlb_entry()
507 spin_unlock(ptl); in hmm_vma_walk_hugetlb_entry()
Dhugetlb.c4437 spinlock_t *ptl; in __unmap_hugepage_range() local
4467 ptl = huge_pte_lock(h, mm, ptep); in __unmap_hugepage_range()
4469 spin_unlock(ptl); in __unmap_hugepage_range()
4479 spin_unlock(ptl); in __unmap_hugepage_range()
4489 spin_unlock(ptl); in __unmap_hugepage_range()
4501 spin_unlock(ptl); in __unmap_hugepage_range()
4520 spin_unlock(ptl); in __unmap_hugepage_range()
4625 struct page *pagecache_page, spinlock_t *ptl) in hugetlb_cow() argument
4666 spin_unlock(ptl); in hugetlb_cow()
4702 spin_lock(ptl); in hugetlb_cow()
[all …]
Dmigrate.c290 spinlock_t *ptl) in __migration_entry_wait() argument
296 spin_lock(ptl); in __migration_entry_wait()
315 pte_unmap_unlock(ptep, ptl); in __migration_entry_wait()
319 pte_unmap_unlock(ptep, ptl); in __migration_entry_wait()
325 spinlock_t *ptl = pte_lockptr(mm, pmd); in migration_entry_wait() local
327 __migration_entry_wait(mm, ptep, ptl); in migration_entry_wait()
333 spinlock_t *ptl = huge_pte_lockptr(hstate_vma(vma), mm, pte); in migration_entry_wait_huge() local
334 __migration_entry_wait(mm, pte, ptl); in migration_entry_wait_huge()
340 spinlock_t *ptl; in pmd_migration_entry_wait() local
343 ptl = pmd_lock(mm, pmd); in pmd_migration_entry_wait()
[all …]
/Linux-v5.15/arch/arm/lib/
Duaccess_with_memcpy.c31 spinlock_t *ptl; in pin_page_for_write() local
60 ptl = &current->mm->page_table_lock; in pin_page_for_write()
61 spin_lock(ptl); in pin_page_for_write()
64 spin_unlock(ptl); in pin_page_for_write()
69 *ptlp = ptl; in pin_page_for_write()
76 pte = pte_offset_map_lock(current->mm, pmd, addr, &ptl); in pin_page_for_write()
79 pte_unmap_unlock(pte, ptl); in pin_page_for_write()
84 *ptlp = ptl; in pin_page_for_write()
107 spinlock_t *ptl; in __copy_to_user_memcpy() local
110 while (!pin_page_for_write(to, &pte, &ptl)) { in __copy_to_user_memcpy()
[all …]
/Linux-v5.15/arch/arm/mm/
Dfault-armv.c70 static inline void do_pte_lock(spinlock_t *ptl) in do_pte_lock() argument
76 spin_lock_nested(ptl, SINGLE_DEPTH_NESTING); in do_pte_lock()
79 static inline void do_pte_unlock(spinlock_t *ptl) in do_pte_unlock() argument
81 spin_unlock(ptl); in do_pte_unlock()
84 static inline void do_pte_lock(spinlock_t *ptl) {} in do_pte_lock() argument
85 static inline void do_pte_unlock(spinlock_t *ptl) {} in do_pte_unlock() argument
91 spinlock_t *ptl; in adjust_pte() local
120 ptl = pte_lockptr(vma->vm_mm, pmd); in adjust_pte()
122 do_pte_lock(ptl); in adjust_pte()
126 do_pte_unlock(ptl); in adjust_pte()
/Linux-v5.15/arch/powerpc/mm/
Dhugetlbpage.c46 unsigned int pshift, spinlock_t *ptl) in __hugepte_alloc() argument
81 spin_lock(ptl); in __hugepte_alloc()
101 spin_unlock(ptl); in __hugepte_alloc()
119 spinlock_t *ptl; in huge_pte_alloc() local
133 ptl = &mm->page_table_lock; in huge_pte_alloc()
143 ptl = pud_lockptr(mm, pu); in huge_pte_alloc()
154 ptl = pmd_lockptr(mm, pm); in huge_pte_alloc()
161 ptl = &mm->page_table_lock; in huge_pte_alloc()
169 ptl = pud_lockptr(mm, pu); in huge_pte_alloc()
176 ptl = pmd_lockptr(mm, pm); in huge_pte_alloc()
[all …]
/Linux-v5.15/arch/s390/mm/
Dpgtable.c776 spinlock_t *ptl; in set_guest_storage_key() local
785 ptl = pmd_lock(mm, pmdp); in set_guest_storage_key()
787 spin_unlock(ptl); in set_guest_storage_key()
799 spin_unlock(ptl); in set_guest_storage_key()
802 spin_unlock(ptl); in set_guest_storage_key()
804 ptep = pte_alloc_map_lock(mm, pmdp, addr, &ptl); in set_guest_storage_key()
832 pte_unmap_unlock(ptep, ptl); in set_guest_storage_key()
877 spinlock_t *ptl; in reset_guest_reference_bit() local
888 ptl = pmd_lock(mm, pmdp); in reset_guest_reference_bit()
890 spin_unlock(ptl); in reset_guest_reference_bit()
[all …]
Dgmap.c543 spinlock_t *ptl; in __gmap_link() local
599 ptl = pmd_lock(mm, pmd); in __gmap_link()
621 spin_unlock(ptl); in __gmap_link()
676 spinlock_t *ptl; in __gmap_zap() local
685 ptep = get_locked_pte(gmap->mm, vmaddr, &ptl); in __gmap_zap()
688 pte_unmap_unlock(ptep, ptl); in __gmap_zap()
849 spinlock_t **ptl) in gmap_pte_op_walk() argument
858 return pte_alloc_map_lock(gmap->mm, (pmd_t *) table, gaddr, ptl); in gmap_pte_op_walk()
894 static void gmap_pte_op_end(spinlock_t *ptl) in gmap_pte_op_end() argument
896 if (ptl) in gmap_pte_op_end()
[all …]
/Linux-v5.15/mm/damon/
Dvaddr.c455 spinlock_t *ptl; in damon_mkold_pmd_entry() local
458 ptl = pmd_lock(walk->mm, pmd); in damon_mkold_pmd_entry()
461 spin_unlock(ptl); in damon_mkold_pmd_entry()
464 spin_unlock(ptl); in damon_mkold_pmd_entry()
469 pte = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in damon_mkold_pmd_entry()
474 pte_unmap_unlock(pte, ptl); in damon_mkold_pmd_entry()
526 spinlock_t *ptl; in damon_young_pmd_entry() local
532 ptl = pmd_lock(walk->mm, pmd); in damon_young_pmd_entry()
534 spin_unlock(ptl); in damon_young_pmd_entry()
548 spin_unlock(ptl); in damon_young_pmd_entry()
[all …]
/Linux-v5.15/arch/s390/pci/
Dpci_mmio.c125 spinlock_t *ptl; in SYSCALL_DEFINE3() local
171 ret = follow_pte(vma->vm_mm, mmio_addr, &ptep, &ptl); in SYSCALL_DEFINE3()
183 pte_unmap_unlock(ptep, ptl); in SYSCALL_DEFINE3()
267 spinlock_t *ptl; in SYSCALL_DEFINE3() local
310 ret = follow_pte(vma->vm_mm, mmio_addr, &ptep, &ptl); in SYSCALL_DEFINE3()
324 pte_unmap_unlock(ptep, ptl); in SYSCALL_DEFINE3()
/Linux-v5.15/fs/proc/
Dtask_mmu.c576 spinlock_t *ptl; in smaps_pte_range() local
578 ptl = pmd_trans_huge_lock(pmd, vma); in smaps_pte_range()
579 if (ptl) { in smaps_pte_range()
581 spin_unlock(ptl); in smaps_pte_range()
592 pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl); in smaps_pte_range()
595 pte_unmap_unlock(pte - 1, ptl); in smaps_pte_range()
1121 spinlock_t *ptl; in clear_refs_pte_range() local
1124 ptl = pmd_trans_huge_lock(pmd, vma); in clear_refs_pte_range()
1125 if (ptl) { in clear_refs_pte_range()
1141 spin_unlock(ptl); in clear_refs_pte_range()
[all …]
/Linux-v5.15/Documentation/vm/
Dsplit_page_table_lock.rst63 This field shares storage with page->ptl.
80 page->ptl
83 page->ptl is used to access split page table lock, where 'page' is struct
92 - if size of spinlock_t is bigger then size of long, we use page->ptl as
100 Please, never access page->ptl directly -- use appropriate helper.
/Linux-v5.15/include/linux/
Drmap.h212 spinlock_t *ptl; member
221 if (pvmw->ptl) in page_vma_mapped_walk_done()
222 spin_unlock(pvmw->ptl); in page_vma_mapped_walk_done()
/Linux-v5.15/arch/x86/kernel/
Dldt.c292 spinlock_t *ptl; in map_ldt_struct() local
326 ptep = get_locked_pte(mm, va, &ptl); in map_ldt_struct()
339 pte_unmap_unlock(ptep, ptl); in map_ldt_struct()
365 spinlock_t *ptl; in unmap_ldt_struct() local
369 ptep = get_locked_pte(mm, va, &ptl); in unmap_ldt_struct()
371 pte_unmap_unlock(ptep, ptl); in unmap_ldt_struct()
/Linux-v5.15/arch/m68k/kernel/
Dsys_m68k.c474 spinlock_t *ptl; in sys_atomic_cmpxchg_32() local
490 pte = pte_offset_map_lock(mm, pmd, (unsigned long)mem, &ptl); in sys_atomic_cmpxchg_32()
493 pte_unmap_unlock(pte, ptl); in sys_atomic_cmpxchg_32()
505 pte_unmap_unlock(pte, ptl); in sys_atomic_cmpxchg_32()

123