Home
last modified time | relevance | path

Searched refs:ptl (Results 1 – 25 of 49) sorted by relevance

12

/linux/drivers/platform/surface/aggregator/
H A Dssh_packet_layer.c335 ptl_info(packet->ptl, "packet error injection: dropping ACK packet %p\n", in __ssh_ptl_should_drop_ack_packet()
347 ptl_info(packet->ptl, "packet error injection: dropping NAK packet %p\n", in __ssh_ptl_should_drop_nak_packet()
359 ptl_info(packet->ptl, in __ssh_ptl_should_drop_dsq_packet()
387 static int ssh_ptl_write_buf(struct ssh_ptl *ptl, struct ssh_packet *packet, in ssh_ptl_write_buf() argument
395 ptl_info(packet->ptl, in ssh_ptl_write_buf()
402 return serdev_device_write_buf(ptl->serdev, buf, count); in ssh_ptl_write_buf()
419 ptl_info(packet->ptl, in ssh_ptl_tx_inject_invalid_data()
431 static void ssh_ptl_rx_inject_invalid_syn(struct ssh_ptl *ptl, in ssh_ptl_rx_inject_invalid_syn() argument
448 static void ssh_ptl_rx_inject_invalid_data(struct ssh_ptl *ptl, in ssh_ptl_rx_inject_invalid_data() argument
486 static inline int ssh_ptl_write_buf(struct ssh_ptl *ptl, in ssh_ptl_write_buf() argument
[all …]
H A Dssh_packet_layer.h139 int ssh_ptl_init(struct ssh_ptl *ptl, struct serdev_device *serdev,
142 void ssh_ptl_destroy(struct ssh_ptl *ptl);
151 static inline struct device *ssh_ptl_get_device(struct ssh_ptl *ptl) in ssh_ptl_get_device() argument
153 return ptl->serdev ? &ptl->serdev->dev : NULL; in ssh_ptl_get_device()
156 int ssh_ptl_tx_start(struct ssh_ptl *ptl);
157 int ssh_ptl_tx_stop(struct ssh_ptl *ptl);
158 int ssh_ptl_rx_start(struct ssh_ptl *ptl);
159 int ssh_ptl_rx_stop(struct ssh_ptl *ptl);
160 void ssh_ptl_shutdown(struct ssh_ptl *ptl);
162 int ssh_ptl_submit(struct ssh_ptl *ptl, struct ssh_packet *p);
[all …]
H A Dssh_request_layer.h66 struct ssh_ptl ptl; member
94 #define rtl_dbg(r, fmt, ...) ptl_dbg(&(r)->ptl, fmt, ##__VA_ARGS__)
95 #define rtl_info(p, fmt, ...) ptl_info(&(p)->ptl, fmt, ##__VA_ARGS__)
96 #define rtl_warn(r, fmt, ...) ptl_warn(&(r)->ptl, fmt, ##__VA_ARGS__)
97 #define rtl_err(r, fmt, ...) ptl_err(&(r)->ptl, fmt, ##__VA_ARGS__)
112 return ssh_ptl_get_device(&rtl->ptl); in ssh_rtl_get_device()
123 struct ssh_ptl *ptl; in ssh_request_rtl() local
125 ptl = READ_ONCE(rqst->packet.ptl); in ssh_request_rtl()
126 return likely(ptl) ? to_ssh_rtl(ptl, ptl) : NULL; in ssh_request_rtl()
H A Dssh_request_layer.c258 status = ssh_ptl_submit(&rtl->ptl, &rqst->packet); in ssh_rtl_tx_try_process_one()
391 if (cmpxchg(&rqst->packet.ptl, NULL, &rtl->ptl)) { in ssh_rtl_submit()
619 if (flags == fixed && !READ_ONCE(r->packet.ptl)) { in ssh_rtl_cancel_nonpending()
674 if (!READ_ONCE(r->packet.ptl)) { in ssh_rtl_cancel_pending()
911 struct ssh_rtl *rtl = to_ssh_rtl(p, ptl); in ssh_rtl_rx_command()
1033 status = ssh_ptl_init(&rtl->ptl, serdev, &ptl_ops); in ssh_rtl_init()
1067 ssh_ptl_destroy(&rtl->ptl); in ssh_rtl_destroy()
1080 status = ssh_ptl_tx_start(&rtl->ptl); in ssh_rtl_start()
1086 status = ssh_ptl_rx_start(&rtl->ptl); in ssh_rtl_start()
1089 ssh_ptl_tx_stop(&rtl->ptl); in ssh_rtl_start()
[all …]
/linux/drivers/media/platform/allegro-dvt/
H A Dnal-hevc.c101 struct nal_hevc_profile_tier_level *ptl) in nal_hevc_rbsp_profile_tier_level() argument
106 rbsp_bits(rbsp, 2, &ptl->general_profile_space); in nal_hevc_rbsp_profile_tier_level()
107 rbsp_bit(rbsp, &ptl->general_tier_flag); in nal_hevc_rbsp_profile_tier_level()
108 rbsp_bits(rbsp, 5, &ptl->general_profile_idc); in nal_hevc_rbsp_profile_tier_level()
110 rbsp_bit(rbsp, &ptl->general_profile_compatibility_flag[i]); in nal_hevc_rbsp_profile_tier_level()
111 rbsp_bit(rbsp, &ptl->general_progressive_source_flag); in nal_hevc_rbsp_profile_tier_level()
112 rbsp_bit(rbsp, &ptl->general_interlaced_source_flag); in nal_hevc_rbsp_profile_tier_level()
113 rbsp_bit(rbsp, &ptl->general_non_packed_constraint_flag); in nal_hevc_rbsp_profile_tier_level()
114 rbsp_bit(rbsp, &ptl->general_frame_only_constraint_flag); in nal_hevc_rbsp_profile_tier_level()
115 if (ptl->general_profile_idc == 4 || in nal_hevc_rbsp_profile_tier_level()
[all …]
/linux/Documentation/translations/zh_CN/mm/
H A Dsplit_page_table_lock.rst63 面。这个区域与page->ptl共享存储。
79 page->ptl
82 page->ptl用于访问分割页表锁,其中'page'是包含该表的页面struct page。它
89 - 如果spinlock_t的大小大于long的大小,我们使用page->ptl作为spinlock_t
96 请不要直接访问page->ptl - -使用适当的辅助函数。
/linux/mm/
H A Dhuge_memory.c1236 vmf->ptl = pmd_lock(vma->vm_mm, vmf->pmd); in __do_huge_pmd_anonymous_page()
1246 spin_unlock(vmf->ptl); in __do_huge_pmd_anonymous_page()
1257 spin_unlock(vmf->ptl); in __do_huge_pmd_anonymous_page()
1262 spin_unlock(vmf->ptl); in __do_huge_pmd_anonymous_page()
1350 vmf->ptl = pmd_lock(vma->vm_mm, vmf->pmd); in do_huge_pmd_anonymous_page()
1355 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page()
1358 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page()
1366 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page()
1369 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page()
1384 spinlock_t *ptl; in insert_pfn_pmd() local
[all …]
H A Dmincore.c103 spinlock_t *ptl; in mincore_pte_range() local
109 ptl = pmd_trans_huge_lock(pmd, vma); in mincore_pte_range()
110 if (ptl) { in mincore_pte_range()
112 spin_unlock(ptl); in mincore_pte_range()
116 ptep = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in mincore_pte_range()
151 pte_unmap_unlock(ptep - 1, ptl); in mincore_pte_range()
H A Dmemory.c421 spinlock_t *ptl = pmd_lock(mm, pmd); in pmd_install() local
442 spin_unlock(ptl); in pmd_install()
1716 spinlock_t *ptl; in zap_pte_range() local
1728 start_pte = pte = pte_offset_map_lock(mm, pmd, addr, &ptl); in zap_pte_range()
1772 pte_unmap_unlock(start_pte, ptl); in zap_pte_range()
1822 spinlock_t *ptl = pmd_lock(tlb->mm, pmd); in zap_pmd_range() local
1828 spin_unlock(ptl); in zap_pmd_range()
2080 spinlock_t **ptl) in __get_locked_pte() argument
2086 return pte_alloc_map_lock(mm, pmd, addr, ptl); in __get_locked_pte()
2164 spinlock_t *ptl; in insert_page() local
[all …]
H A Dkhugepaged.c713 spinlock_t *ptl, in __collapse_huge_page_copy_succeeded() argument
729 spin_lock(ptl); in __collapse_huge_page_copy_succeeded()
731 spin_unlock(ptl); in __collapse_huge_page_copy_succeeded()
745 spin_lock(ptl); in __collapse_huge_page_copy_succeeded()
748 spin_unlock(ptl); in __collapse_huge_page_copy_succeeded()
804 unsigned long address, spinlock_t *ptl, in __collapse_huge_page_copy() argument
831 __collapse_huge_page_copy_succeeded(pte, vma, address, ptl, in __collapse_huge_page_copy()
1010 spinlock_t *ptl; in __collapse_huge_page_swapin() local
1026 pte = pte_offset_map_ro_nolock(mm, pmd, address, &ptl); in __collapse_huge_page_swapin()
1039 vmf.ptl = ptl; in __collapse_huge_page_swapin()
[all …]
H A Dmigrate_device.c66 spinlock_t *ptl; in migrate_vma_collect_pmd() local
76 ptl = pmd_lock(mm, pmdp); in migrate_vma_collect_pmd()
78 spin_unlock(ptl); in migrate_vma_collect_pmd()
84 spin_unlock(ptl); in migrate_vma_collect_pmd()
90 spin_unlock(ptl); in migrate_vma_collect_pmd()
103 ptep = pte_offset_map_lock(mm, pmdp, addr, &ptl); in migrate_vma_collect_pmd()
277 pte_unmap_unlock(ptep - 1, ptl); in migrate_vma_collect_pmd()
576 spinlock_t *ptl; in migrate_vma_insert_page() local
636 ptep = pte_offset_map_lock(mm, pmdp, addr, &ptl); in migrate_vma_insert_page()
673 pte_unmap_unlock(ptep, ptl); in migrate_vma_insert_page()
[all …]
H A Dmadvise.c185 spinlock_t *ptl; in swapin_walk_pmd_entry() local
194 ptep = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl); in swapin_walk_pmd_entry()
206 pte_unmap_unlock(ptep, ptl); in swapin_walk_pmd_entry()
216 pte_unmap_unlock(ptep, ptl); in swapin_walk_pmd_entry()
354 spinlock_t *ptl; in madvise_cold_or_pageout_pte_range() local
373 ptl = pmd_trans_huge_lock(pmd, vma); in madvise_cold_or_pageout_pte_range()
374 if (!ptl) in madvise_cold_or_pageout_pte_range()
400 spin_unlock(ptl); in madvise_cold_or_pageout_pte_range()
432 spin_unlock(ptl); in madvise_cold_or_pageout_pte_range()
442 start_pte = pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl); in madvise_cold_or_pageout_pte_range()
[all …]
H A Dhmm.c418 spinlock_t *ptl = pud_trans_huge_lock(pudp, walk->vma); in hmm_vma_walk_pud() local
420 if (!ptl) in hmm_vma_walk_pud()
428 spin_unlock(ptl); in hmm_vma_walk_pud()
446 spin_unlock(ptl); in hmm_vma_walk_pud()
460 spin_unlock(ptl); in hmm_vma_walk_pud()
479 spinlock_t *ptl; in hmm_vma_walk_hugetlb_entry() local
482 ptl = huge_pte_lock(hstate_vma(vma), walk->mm, pte); in hmm_vma_walk_hugetlb_entry()
494 spin_unlock(ptl); in hmm_vma_walk_hugetlb_entry()
513 spin_unlock(ptl); in hmm_vma_walk_hugetlb_entry()
H A Dpgtable-generic.c394 spinlock_t *ptl; in __pte_offset_map_lock() local
401 ptl = pte_lockptr(mm, &pmdval); in __pte_offset_map_lock()
402 spin_lock(ptl); in __pte_offset_map_lock()
404 *ptlp = ptl; in __pte_offset_map_lock()
407 pte_unmap_unlock(pte, ptl); in __pte_offset_map_lock()
H A Duserfaultfd.c182 spinlock_t *ptl; in mfill_atomic_install_pte() local
196 dst_pte = pte_offset_map_lock(dst_mm, dst_pmd, dst_addr, &ptl); in mfill_atomic_install_pte()
236 pte_unmap_unlock(dst_pte, ptl); in mfill_atomic_install_pte()
354 spinlock_t *ptl; in mfill_atomic_pte_zeropage() local
363 dst_pte = pte_offset_map_lock(dst_vma->vm_mm, dst_pmd, dst_addr, &ptl); in mfill_atomic_pte_zeropage()
378 pte_unmap_unlock(dst_pte, ptl); in mfill_atomic_pte_zeropage()
436 spinlock_t *ptl; in mfill_atomic_pte_poison() local
440 dst_pte = pte_offset_map_lock(dst_mm, dst_pmd, dst_addr, &ptl); in mfill_atomic_pte_poison()
460 pte_unmap_unlock(dst_pte, ptl); in mfill_atomic_pte_poison()
1741 spinlock_t *ptl; in move_pages() local
[all …]
/linux/arch/arm/mm/
H A Dfault-armv.c67 spinlock_t *ptl; in adjust_pte() local
98 pte = pte_offset_map_rw_nolock(vma->vm_mm, pmd, address, &pmdval, &ptl); in adjust_pte()
107 spin_lock_nested(ptl, SINGLE_DEPTH_NESTING); in adjust_pte()
109 pte_unmap_unlock(pte, ptl); in adjust_pte()
117 spin_unlock(ptl); in adjust_pte()
/linux/arch/powerpc/lib/
H A Dcode-patching.c156 spinlock_t *ptl; in text_area_cpu_up_mm() local
177 pte = get_locked_pte(mm, addr, &ptl); in text_area_cpu_up_mm()
180 pte_unmap_unlock(pte, ptl); in text_area_cpu_up_mm()
290 spinlock_t *ptl; in __do_patch_mem_mm() local
296 pte = get_locked_pte(patching_mm, text_poke_addr, &ptl); in __do_patch_mem_mm()
322 pte_unmap_unlock(pte, ptl); in __do_patch_mem_mm()
473 spinlock_t *ptl; in __do_patch_instructions_mm() local
482 pte = get_locked_pte(patching_mm, text_poke_addr, &ptl); in __do_patch_instructions_mm()
510 pte_unmap_unlock(pte, ptl); in __do_patch_instructions_mm()
/linux/fs/proc/
H A Dtask_mmu.c886 spinlock_t *ptl; in smaps_pte_range() local
888 ptl = pmd_trans_huge_lock(pmd, vma); in smaps_pte_range()
889 if (ptl) { in smaps_pte_range()
891 spin_unlock(ptl); in smaps_pte_range()
895 pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl); in smaps_pte_range()
902 pte_unmap_unlock(pte - 1, ptl); in smaps_pte_range()
1454 spinlock_t *ptl; in clear_refs_pte_range() local
1457 ptl = pmd_trans_huge_lock(pmd, vma); in clear_refs_pte_range()
1458 if (ptl) { in clear_refs_pte_range()
1474 spin_unlock(ptl); in clear_refs_pte_range()
[all …]
/linux/arch/s390/mm/
H A Dgmap.c530 spinlock_t *ptl; in __gmap_link() local
586 ptl = pmd_lock(mm, pmd); in __gmap_link()
610 spin_unlock(ptl); in __gmap_link()
623 spinlock_t *ptl; in __gmap_zap() local
637 ptep = get_locked_pte(gmap->mm, vmaddr, &ptl); in __gmap_zap()
640 pte_unmap_unlock(ptep, ptl); in __gmap_zap()
802 spinlock_t **ptl) in gmap_pte_op_walk() argument
811 return pte_alloc_map_lock(gmap->mm, (pmd_t *) table, gaddr, ptl); in gmap_pte_op_walk()
848 static void gmap_pte_op_end(pte_t *ptep, spinlock_t *ptl) in gmap_pte_op_end() argument
850 pte_unmap_unlock(ptep, ptl); in gmap_pte_op_end()
[all …]
/linux/arch/x86/kernel/
H A Dldt.c292 spinlock_t *ptl; in map_ldt_struct() local
326 ptep = get_locked_pte(mm, va, &ptl); in map_ldt_struct()
339 pte_unmap_unlock(ptep, ptl); in map_ldt_struct()
365 spinlock_t *ptl; in unmap_ldt_struct() local
369 ptep = get_locked_pte(mm, va, &ptl); in unmap_ldt_struct()
372 pte_unmap_unlock(ptep, ptl); in unmap_ldt_struct()
/linux/sound/soc/sof/intel/
H A DMakefile37 snd-sof-pci-intel-ptl-y := pci-ptl.o
47 obj-$(CONFIG_SND_SOC_SOF_INTEL_PTL) += snd-sof-pci-intel-ptl.o
/linux/arch/m68k/kernel/
H A Dsys_m68k.c474 spinlock_t *ptl; in sys_atomic_cmpxchg_32() local
490 pte = pte_offset_map_lock(mm, pmd, (unsigned long)mem, &ptl); in sys_atomic_cmpxchg_32()
495 pte_unmap_unlock(pte, ptl); in sys_atomic_cmpxchg_32()
507 pte_unmap_unlock(pte, ptl); in sys_atomic_cmpxchg_32()
/linux/drivers/input/touchscreen/
H A Dilitek_ts_i2c.c56 struct ilitek_protocol_info ptl; member
246 ts->ptl.ver = get_unaligned_be16(outbuf); in api_protocol_get_ptl_ver()
247 ts->ptl.ver_major = outbuf[0]; in api_protocol_get_ptl_ver()
419 if (ts->ptl.ver_major == 0x3 || in ilitek_protocol_init()
420 ts->ptl.ver == BL_V1_6 || in ilitek_protocol_init()
421 ts->ptl.ver == BL_V1_7) in ilitek_protocol_init()
/linux/arch/powerpc/mm/book3s64/
H A Dsubpage_prot.c61 spinlock_t *ptl; in hpte_flush_range() local
73 pte = pte_offset_map_lock(mm, pmd, addr, &ptl); in hpte_flush_range()
83 pte_unmap_unlock(pte - 1, ptl); in hpte_flush_range()
/linux/arch/x86/xen/
H A Dmmu_pv.c711 spinlock_t *ptl = NULL; in xen_pte_lock() local
714 ptl = ptlock_ptr(page_ptdesc(page)); in xen_pte_lock()
715 spin_lock_nest_lock(ptl, &mm->page_table_lock); in xen_pte_lock()
718 return ptl; in xen_pte_lock()
723 spinlock_t *ptl = v; in xen_pte_unlock() local
724 spin_unlock(ptl); in xen_pte_unlock()
746 spinlock_t *ptl; in xen_pin_page() local
768 ptl = NULL; in xen_pin_page()
770 ptl = xen_pte_lock(page, mm); in xen_pin_page()
776 if (ptl) { in xen_pin_page()
[all …]

12