Home
last modified time | relevance | path

Searched refs:ptl (Results 1 – 25 of 58) sorted by relevance

123

/linux/drivers/platform/surface/aggregator/
A Dssh_packet_layer.c741 struct ssh_ptl *ptl = packet->ptl; in __ssh_ptl_queue_push() local
776 struct ssh_ptl *ptl = packet->ptl; in ssh_ptl_queue_remove() local
793 struct ssh_ptl *ptl = p->ptl; in ssh_ptl_pending_push() local
833 struct ssh_ptl *ptl = packet->ptl; in ssh_ptl_pending_remove() local
853 struct ssh_ptl *ptl = READ_ONCE(p->ptl); in __ssh_ptl_complete() local
885 struct ssh_ptl *ptl = packet->ptl; in ssh_ptl_tx_can_process() local
975 struct ssh_ptl *ptl = packet->ptl; in ssh_ptl_tx_compl_success() local
1336 WRITE_ONCE(p->ptl, ptl); in ssh_ptl_submit()
1616 ptl->rx.blocked.seqs[ptl->rx.blocked.offset] = seq; in ssh_ptl_rx_retransmit_check()
1630 ptl->ops.data_received(ptl, payload); in ssh_ptl_rx_dataframe()
[all …]
A Dssh_packet_layer.h142 void ssh_ptl_destroy(struct ssh_ptl *ptl);
153 return ptl->serdev ? &ptl->serdev->dev : NULL; in ssh_ptl_get_device()
156 int ssh_ptl_tx_start(struct ssh_ptl *ptl);
157 int ssh_ptl_tx_stop(struct ssh_ptl *ptl);
158 int ssh_ptl_rx_start(struct ssh_ptl *ptl);
159 int ssh_ptl_rx_stop(struct ssh_ptl *ptl);
160 void ssh_ptl_shutdown(struct ssh_ptl *ptl);
162 int ssh_ptl_submit(struct ssh_ptl *ptl, struct ssh_packet *p);
176 static inline void ssh_ptl_tx_wakeup_transfer(struct ssh_ptl *ptl) in ssh_ptl_tx_wakeup_transfer() argument
178 if (test_bit(SSH_PTL_SF_SHUTDOWN_BIT, &ptl->state)) in ssh_ptl_tx_wakeup_transfer()
[all …]
A Dssh_request_layer.h66 struct ssh_ptl ptl; member
94 #define rtl_dbg(r, fmt, ...) ptl_dbg(&(r)->ptl, fmt, ##__VA_ARGS__)
95 #define rtl_info(p, fmt, ...) ptl_info(&(p)->ptl, fmt, ##__VA_ARGS__)
96 #define rtl_warn(r, fmt, ...) ptl_warn(&(r)->ptl, fmt, ##__VA_ARGS__)
97 #define rtl_err(r, fmt, ...) ptl_err(&(r)->ptl, fmt, ##__VA_ARGS__)
112 return ssh_ptl_get_device(&rtl->ptl); in ssh_rtl_get_device()
123 struct ssh_ptl *ptl; in ssh_request_rtl() local
125 ptl = READ_ONCE(rqst->packet.ptl); in ssh_request_rtl()
126 return likely(ptl) ? to_ssh_rtl(ptl, ptl) : NULL; in ssh_request_rtl()
A Dssh_request_layer.c258 status = ssh_ptl_submit(&rtl->ptl, &rqst->packet); in ssh_rtl_tx_try_process_one()
391 if (cmpxchg(&rqst->packet.ptl, NULL, &rtl->ptl)) { in ssh_rtl_submit()
619 if (flags == fixed && !READ_ONCE(r->packet.ptl)) { in ssh_rtl_cancel_nonpending()
674 if (!READ_ONCE(r->packet.ptl)) { in ssh_rtl_cancel_pending()
911 struct ssh_rtl *rtl = to_ssh_rtl(p, ptl); in ssh_rtl_rx_command()
1018 status = ssh_ptl_init(&rtl->ptl, serdev, &ptl_ops); in ssh_rtl_init()
1052 ssh_ptl_destroy(&rtl->ptl); in ssh_rtl_destroy()
1065 status = ssh_ptl_tx_start(&rtl->ptl); in ssh_rtl_start()
1071 status = ssh_ptl_rx_start(&rtl->ptl); in ssh_rtl_start()
1074 ssh_ptl_tx_stop(&rtl->ptl); in ssh_rtl_start()
[all …]
/linux/drivers/media/platform/allegro-dvt/
A Dnal-hevc.c115 if (ptl->general_profile_idc == 4 || in nal_hevc_rbsp_profile_tier_level()
117 ptl->general_profile_idc == 5 || in nal_hevc_rbsp_profile_tier_level()
119 ptl->general_profile_idc == 6 || in nal_hevc_rbsp_profile_tier_level()
121 ptl->general_profile_idc == 7 || in nal_hevc_rbsp_profile_tier_level()
123 ptl->general_profile_idc == 8 || in nal_hevc_rbsp_profile_tier_level()
125 ptl->general_profile_idc == 9 || in nal_hevc_rbsp_profile_tier_level()
127 ptl->general_profile_idc == 10 || in nal_hevc_rbsp_profile_tier_level()
138 if (ptl->general_profile_idc == 5 || in nal_hevc_rbsp_profile_tier_level()
140 ptl->general_profile_idc == 9 || in nal_hevc_rbsp_profile_tier_level()
161 if ((ptl->general_profile_idc >= 1 && ptl->general_profile_idc <= 5) || in nal_hevc_rbsp_profile_tier_level()
[all …]
/linux/mm/
A Dpage_vma_mapped.c52 spin_lock(pvmw->ptl); in map_pte()
178 spin_lock(pvmw->ptl); in page_vma_mapped_walk()
244 spin_unlock(pvmw->ptl); in page_vma_mapped_walk()
245 pvmw->ptl = NULL; in page_vma_mapped_walk()
256 spin_unlock(ptl); in page_vma_mapped_walk()
273 if (pvmw->ptl) { in page_vma_mapped_walk()
274 spin_unlock(pvmw->ptl); in page_vma_mapped_walk()
275 pvmw->ptl = NULL; in page_vma_mapped_walk()
284 spin_lock(pvmw->ptl); in page_vma_mapped_walk()
288 if (!pvmw->ptl) { in page_vma_mapped_walk()
[all …]
A Dhuge_memory.c792 spinlock_t *ptl; in insert_pfn_pmd() local
893 spinlock_t *ptl; in insert_pfn_pud() local
1504 spinlock_t *ptl; in madvise_free_huge_pmd() local
1513 if (!ptl) in madvise_free_huge_pmd()
1584 spinlock_t *ptl; in zap_huge_pmd() local
1589 if (!ptl) in zap_huge_pmd()
1727 spinlock_t *ptl; in change_huge_pmd() local
1739 if (!ptl) in change_huge_pmd()
1837 return ptl; in __pmd_trans_huge_lock()
1854 return ptl; in __pud_trans_huge_lock()
[all …]
A Dmemory.c459 spin_unlock(ptl); in pmd_install()
1315 spinlock_t *ptl; in zap_pte_range() local
1759 spinlock_t *ptl; in insert_page() local
2022 spinlock_t *ptl; in insert_pfn() local
2268 spinlock_t *ptl; in remap_pte_range() local
2497 spinlock_t *ptl; in apply_to_pte_range() local
3451 &vmf->ptl); in remove_device_exclusive_entry()
3607 &vmf->ptl); in do_swap_page()
3781 &vmf->ptl); in do_anonymous_page()
5449 if (!ptl) in ptlock_alloc()
[all …]
A Dmadvise.c201 spinlock_t *ptl; in swapin_walk_pmd_entry() local
205 pte_unmap_unlock(orig_pte, ptl); in swapin_walk_pmd_entry()
318 spinlock_t *ptl; in madvise_cold_or_pageout_pte_range() local
332 if (!ptl) in madvise_cold_or_pageout_pte_range()
355 spin_unlock(ptl); in madvise_cold_or_pageout_pte_range()
385 spin_unlock(ptl); in madvise_cold_or_pageout_pte_range()
424 pte_unmap_unlock(orig_pte, ptl); in madvise_cold_or_pageout_pte_range()
473 pte_unmap_unlock(orig_pte, ptl); in madvise_cold_or_pageout_pte_range()
578 spinlock_t *ptl; in madvise_free_pte_range() local
635 pte_unmap_unlock(orig_pte, ptl); in madvise_free_pte_range()
[all …]
A Dmincore.c102 spinlock_t *ptl; in mincore_pte_range() local
108 ptl = pmd_trans_huge_lock(pmd, vma); in mincore_pte_range()
109 if (ptl) { in mincore_pte_range()
111 spin_unlock(ptl); in mincore_pte_range()
120 ptep = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in mincore_pte_range()
150 pte_unmap_unlock(ptep - 1, ptl); in mincore_pte_range()
A Dhmm.c420 spinlock_t *ptl = pud_trans_huge_lock(pudp, walk->vma); in hmm_vma_walk_pud() local
422 if (!ptl) in hmm_vma_walk_pud()
430 spin_unlock(ptl); in hmm_vma_walk_pud()
441 spin_unlock(ptl); in hmm_vma_walk_pud()
453 spin_unlock(ptl); in hmm_vma_walk_pud()
467 spin_unlock(ptl); in hmm_vma_walk_pud()
486 spinlock_t *ptl; in hmm_vma_walk_hugetlb_entry() local
489 ptl = huge_pte_lock(hstate_vma(vma), walk->mm, pte); in hmm_vma_walk_hugetlb_entry()
499 spin_unlock(ptl); in hmm_vma_walk_hugetlb_entry()
507 spin_unlock(ptl); in hmm_vma_walk_hugetlb_entry()
A Dhugetlb.c4937 spinlock_t *ptl; in __unmap_hugepage_range() local
5181 spin_unlock(ptl); in hugetlb_cow()
5254 spin_lock(ptl); in hugetlb_cow()
5389 spinlock_t *ptl; in hugetlb_no_page() local
5578 spinlock_t *ptl; in hugetlb_fault() local
5750 spinlock_t *ptl; in hugetlb_mcopy_atomic_pte() local
5853 spin_lock(ptl); in hugetlb_mcopy_atomic_pte()
6495 spinlock_t *ptl; in huge_pmd_share() local
6678 spinlock_t *ptl; in follow_huge_pmd() local
6688 spin_lock(ptl); in follow_huge_pmd()
[all …]
A Dmigrate.c290 spinlock_t *ptl) in __migration_entry_wait() argument
296 spin_lock(ptl); in __migration_entry_wait()
340 spinlock_t *ptl; in pmd_migration_entry_wait() local
343 ptl = pmd_lock(mm, pmd); in pmd_migration_entry_wait()
349 spin_unlock(ptl); in pmd_migration_entry_wait()
353 spin_unlock(ptl); in pmd_migration_entry_wait()
2250 spinlock_t *ptl; in migrate_vma_collect_pmd() local
2262 spin_unlock(ptl); in migrate_vma_collect_pmd()
2268 spin_unlock(ptl); in migrate_vma_collect_pmd()
2277 spin_unlock(ptl); in migrate_vma_collect_pmd()
[all …]
A Dkhugepaged.c742 spinlock_t *ptl, in __collapse_huge_page_copy() argument
758 spin_lock(ptl); in __collapse_huge_page_copy()
764 spin_unlock(ptl); in __collapse_huge_page_copy()
776 spin_lock(ptl); in __collapse_huge_page_copy()
783 spin_unlock(ptl); in __collapse_huge_page_copy()
1233 spinlock_t *ptl; in khugepaged_scan_pmd() local
1366 pte_unmap_unlock(pte, ptl); in khugepaged_scan_pmd()
1440 spinlock_t *ptl; in collapse_pte_mapped_thp() local
1515 ptl = pmd_lock(vma->vm_mm, pmd); in collapse_pte_mapped_thp()
1517 spin_unlock(ptl); in collapse_pte_mapped_thp()
[all …]
/linux/arch/arm/lib/
A Duaccess_with_memcpy.c31 spinlock_t *ptl; in pin_page_for_write() local
61 spin_lock(ptl); in pin_page_for_write()
64 spin_unlock(ptl); in pin_page_for_write()
69 *ptlp = ptl; in pin_page_for_write()
79 pte_unmap_unlock(pte, ptl); in pin_page_for_write()
84 *ptlp = ptl; in pin_page_for_write()
107 spinlock_t *ptl; in __copy_to_user_memcpy() local
131 pte_unmap_unlock(pte, ptl); in __copy_to_user_memcpy()
133 spin_unlock(ptl); in __copy_to_user_memcpy()
176 spinlock_t *ptl; in __clear_user_memset() local
[all …]
/linux/arch/arm/mm/
A Dfault-armv.c70 static inline void do_pte_lock(spinlock_t *ptl) in do_pte_lock() argument
76 spin_lock_nested(ptl, SINGLE_DEPTH_NESTING); in do_pte_lock()
79 static inline void do_pte_unlock(spinlock_t *ptl) in do_pte_unlock() argument
81 spin_unlock(ptl); in do_pte_unlock()
84 static inline void do_pte_lock(spinlock_t *ptl) {} in do_pte_lock() argument
85 static inline void do_pte_unlock(spinlock_t *ptl) {} in do_pte_unlock() argument
91 spinlock_t *ptl; in adjust_pte() local
120 ptl = pte_lockptr(vma->vm_mm, pmd); in adjust_pte()
122 do_pte_lock(ptl); in adjust_pte()
126 do_pte_unlock(ptl); in adjust_pte()
/linux/arch/powerpc/mm/
A Dhugetlbpage.c81 spin_lock(ptl); in __hugepte_alloc()
101 spin_unlock(ptl); in __hugepte_alloc()
119 spinlock_t *ptl; in huge_pte_alloc() local
133 ptl = &mm->page_table_lock; in huge_pte_alloc()
143 ptl = pud_lockptr(mm, pu); in huge_pte_alloc()
161 ptl = &mm->page_table_lock; in huge_pte_alloc()
169 ptl = pud_lockptr(mm, pu); in huge_pte_alloc()
513 spinlock_t *ptl; in follow_huge_pd() local
525 spin_lock(ptl); in follow_huge_pd()
536 spin_unlock(ptl); in follow_huge_pd()
[all …]
/linux/arch/s390/mm/
A Dpgtable.c790 spinlock_t *ptl; in set_guest_storage_key() local
810 spin_unlock(ptl); in set_guest_storage_key()
822 spin_unlock(ptl); in set_guest_storage_key()
825 spin_unlock(ptl); in set_guest_storage_key()
897 spinlock_t *ptl; in reset_guest_reference_bit() local
930 spin_unlock(ptl); in reset_guest_reference_bit()
959 spinlock_t *ptl; in get_guest_storage_key() local
992 spin_unlock(ptl); in get_guest_storage_key()
1025 spinlock_t *ptl; in pgste_perform_essa() local
1130 spinlock_t *ptl; in set_pgste_bits() local
[all …]
A Dgmap.c543 spinlock_t *ptl; in __gmap_link() local
621 spin_unlock(ptl); in __gmap_link()
677 spinlock_t *ptl; in __gmap_zap() local
903 if (ptl) in gmap_pte_op_end()
904 spin_unlock(ptl); in gmap_pte_op_end()
1138 spinlock_t *ptl; in gmap_read_table() local
1217 spinlock_t *ptl; in gmap_protect_rmap() local
2116 spinlock_t *ptl; in gmap_shadow_page() local
2471 spinlock_t *ptl; in gmap_sync_dirty_log_pmd() local
2487 spin_unlock(ptl); in gmap_sync_dirty_log_pmd()
[all …]
/linux/mm/damon/
A Dvaddr.c375 spinlock_t *ptl; in damon_mkold_pmd_entry() local
378 ptl = pmd_lock(walk->mm, pmd); in damon_mkold_pmd_entry()
381 spin_unlock(ptl); in damon_mkold_pmd_entry()
384 spin_unlock(ptl); in damon_mkold_pmd_entry()
389 pte = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in damon_mkold_pmd_entry()
394 pte_unmap_unlock(pte, ptl); in damon_mkold_pmd_entry()
446 spinlock_t *ptl; in damon_young_pmd_entry() local
452 ptl = pmd_lock(walk->mm, pmd); in damon_young_pmd_entry()
454 spin_unlock(ptl); in damon_young_pmd_entry()
468 spin_unlock(ptl); in damon_young_pmd_entry()
[all …]
/linux/arch/s390/pci/
A Dpci_mmio.c125 spinlock_t *ptl; in SYSCALL_DEFINE3() local
171 ret = follow_pte(vma->vm_mm, mmio_addr, &ptep, &ptl); in SYSCALL_DEFINE3()
183 pte_unmap_unlock(ptep, ptl); in SYSCALL_DEFINE3()
267 spinlock_t *ptl; in SYSCALL_DEFINE3() local
310 ret = follow_pte(vma->vm_mm, mmio_addr, &ptep, &ptl); in SYSCALL_DEFINE3()
324 pte_unmap_unlock(ptep, ptl); in SYSCALL_DEFINE3()
/linux/fs/proc/
A Dtask_mmu.c583 spinlock_t *ptl; in smaps_pte_range() local
586 if (ptl) { in smaps_pte_range()
588 spin_unlock(ptl); in smaps_pte_range()
1125 spinlock_t *ptl; in clear_refs_pte_range() local
1129 if (ptl) { in clear_refs_pte_range()
1145 spin_unlock(ptl); in clear_refs_pte_range()
1410 spinlock_t *ptl; in pagemap_pmd_range() local
1416 if (ptl) { in pagemap_pmd_range()
1473 spin_unlock(ptl); in pagemap_pmd_range()
1798 spinlock_t *ptl; in gather_pte_stats() local
[all …]
/linux/Documentation/vm/
A Dsplit_page_table_lock.rst63 This field shares storage with page->ptl.
80 page->ptl
83 page->ptl is used to access split page table lock, where 'page' is struct
92 - if size of spinlock_t is bigger then size of long, we use page->ptl as
100 Please, never access page->ptl directly -- use appropriate helper.
/linux/arch/x86/kernel/
A Dldt.c292 spinlock_t *ptl; in map_ldt_struct() local
326 ptep = get_locked_pte(mm, va, &ptl); in map_ldt_struct()
339 pte_unmap_unlock(ptep, ptl); in map_ldt_struct()
365 spinlock_t *ptl; in unmap_ldt_struct() local
369 ptep = get_locked_pte(mm, va, &ptl); in unmap_ldt_struct()
371 pte_unmap_unlock(ptep, ptl); in unmap_ldt_struct()
/linux/include/linux/
A Drmap.h212 spinlock_t *ptl; member
221 if (pvmw->ptl) in page_vma_mapped_walk_done()
222 spin_unlock(pvmw->ptl); in page_vma_mapped_walk_done()

Completed in 100 milliseconds

123