Home
last modified time | relevance | path

Searched refs:mm (Results 1 – 25 of 1417) sorted by relevance

12345678910>>...57

/linux/include/linux/
H A Dmmap_lock.h20 void __mmap_lock_do_trace_start_locking(struct mm_struct *mm, bool write);
21 void __mmap_lock_do_trace_acquire_returned(struct mm_struct *mm, bool write,
23 void __mmap_lock_do_trace_released(struct mm_struct *mm, bool write);
25 static inline void __mmap_lock_trace_start_locking(struct mm_struct *mm, in __mmap_lock_trace_start_locking() argument
29 __mmap_lock_do_trace_start_locking(mm, write); in __mmap_lock_trace_start_locking()
32 static inline void __mmap_lock_trace_acquire_returned(struct mm_struct *mm, in __mmap_lock_trace_acquire_returned() argument
36 __mmap_lock_do_trace_acquire_returned(mm, write, success); in __mmap_lock_trace_acquire_returned()
39 static inline void __mmap_lock_trace_released(struct mm_struct *mm, bool write) in __mmap_lock_trace_released() argument
42 __mmap_lock_do_trace_released(mm, write); in __mmap_lock_trace_released()
47 static inline void __mmap_lock_trace_start_locking(struct mm_struct *mm, in __mmap_lock_trace_start_locking() argument
52 __mmap_lock_trace_acquire_returned(struct mm_struct * mm,bool write,bool success) __mmap_lock_trace_acquire_returned() argument
57 __mmap_lock_trace_released(struct mm_struct * mm,bool write) __mmap_lock_trace_released() argument
63 mmap_assert_locked(struct mm_struct * mm) mmap_assert_locked() argument
69 mmap_assert_write_locked(struct mm_struct * mm) mmap_assert_write_locked() argument
84 vma_end_write_all(struct mm_struct * mm) vma_end_write_all() argument
97 vma_end_write_all(struct mm_struct * mm) vma_end_write_all() argument
100 mmap_init_lock(struct mm_struct * mm) mmap_init_lock() argument
105 mmap_write_lock(struct mm_struct * mm) mmap_write_lock() argument
112 mmap_write_lock_nested(struct mm_struct * mm,int subclass) mmap_write_lock_nested() argument
119 mmap_write_lock_killable(struct mm_struct * mm) mmap_write_lock_killable() argument
129 mmap_write_unlock(struct mm_struct * mm) mmap_write_unlock() argument
136 mmap_write_downgrade(struct mm_struct * mm) mmap_write_downgrade() argument
143 mmap_read_lock(struct mm_struct * mm) mmap_read_lock() argument
150 mmap_read_lock_killable(struct mm_struct * mm) mmap_read_lock_killable() argument
160 mmap_read_trylock(struct mm_struct * mm) mmap_read_trylock() argument
170 mmap_read_unlock(struct mm_struct * mm) mmap_read_unlock() argument
176 mmap_read_unlock_non_owner(struct mm_struct * mm) mmap_read_unlock_non_owner() argument
182 mmap_lock_is_contended(struct mm_struct * mm) mmap_lock_is_contended() argument
[all...]
H A Dmmu_notifier.h89 struct mm_struct *mm);
101 struct mm_struct *mm,
111 struct mm_struct *mm,
122 struct mm_struct *mm,
199 struct mm_struct *mm,
213 struct mmu_notifier *(*alloc_notifier)(struct mm_struct *mm);
231 struct mm_struct *mm; member
251 struct mm_struct *mm; member
263 struct mm_struct *mm; member
271 static inline int mm_has_notifiers(struct mm_struct *mm) in mm_has_notifiers() argument
[all …]
H A Dpage_table_check.h17 void __page_table_check_pte_clear(struct mm_struct *mm, pte_t pte);
18 void __page_table_check_pmd_clear(struct mm_struct *mm, pmd_t pmd);
19 void __page_table_check_pud_clear(struct mm_struct *mm, pud_t pud);
20 void __page_table_check_ptes_set(struct mm_struct *mm, pte_t *ptep, pte_t pte,
22 void __page_table_check_pmd_set(struct mm_struct *mm, pmd_t *pmdp, pmd_t pmd);
23 void __page_table_check_pud_set(struct mm_struct *mm, pud_t *pudp, pud_t pud);
24 void __page_table_check_pte_clear_range(struct mm_struct *mm,
44 static inline void page_table_check_pte_clear(struct mm_struct *mm, pte_t pte) in page_table_check_pte_clear() argument
49 __page_table_check_pte_clear(mm, pte); in page_table_check_pte_clear()
52 static inline void page_table_check_pmd_clear(struct mm_struct *mm, pmd_t pmd) in page_table_check_pmd_clear() argument
[all …]
/linux/arch/x86/include/asm/
H A Dmmu_context.h55 static inline void init_new_context_ldt(struct mm_struct *mm) in init_new_context_ldt() argument
57 mm->context.ldt = NULL; in init_new_context_ldt()
58 init_rwsem(&mm->context.ldt_usr_sem); in init_new_context_ldt()
60 int ldt_dup_context(struct mm_struct *oldmm, struct mm_struct *mm);
61 void destroy_context_ldt(struct mm_struct *mm);
62 void ldt_arch_exit_mmap(struct mm_struct *mm);
64 static inline void init_new_context_ldt(struct mm_struct *mm) { } in init_new_context_ldt() argument
66 struct mm_struct *mm) in ldt_dup_context() argument
70 static inline void destroy_context_ldt(struct mm_struct *mm) { } in destroy_context_ldt() argument
71 static inline void ldt_arch_exit_mmap(struct mm_struct *mm) { } in ldt_arch_exit_mmap() argument
[all …]
/linux/arch/powerpc/include/asm/
H A Dmmu_context.h18 extern int init_new_context(struct task_struct *tsk, struct mm_struct *mm);
20 extern void destroy_context(struct mm_struct *mm);
24 extern bool mm_iommu_preregistered(struct mm_struct *mm);
25 extern long mm_iommu_new(struct mm_struct *mm,
28 extern long mm_iommu_newdev(struct mm_struct *mm, unsigned long ua,
31 extern long mm_iommu_put(struct mm_struct *mm,
33 extern void mm_iommu_init(struct mm_struct *mm);
34 extern struct mm_iommu_table_group_mem_t *mm_iommu_lookup(struct mm_struct *mm,
36 extern struct mm_iommu_table_group_mem_t *mm_iommu_get(struct mm_struct *mm,
40 extern bool mm_iommu_is_devmem(struct mm_struct *mm, unsigned long hpa,
[all …]
/linux/drivers/gpu/drm/
H A Ddrm_buddy.c14 static struct drm_buddy_block *drm_block_alloc(struct drm_buddy *mm, in drm_block_alloc() argument
35 static void drm_block_free(struct drm_buddy *mm, in drm_block_free() argument
41 static void list_insert_sorted(struct drm_buddy *mm, in list_insert_sorted() argument
47 head = &mm->free_list[drm_buddy_block_order(block)]; in list_insert_sorted()
78 static void mark_free(struct drm_buddy *mm, in mark_free() argument
84 list_insert_sorted(mm, block); in mark_free()
120 static unsigned int __drm_buddy_free(struct drm_buddy *mm, in __drm_buddy_free() argument
150 mm->clear_avail -= drm_buddy_block_size(mm, buddy); in __drm_buddy_free()
152 drm_block_free(mm, bloc in __drm_buddy_free()
164 __force_merge(struct drm_buddy * mm,u64 start,u64 end,unsigned int min_order) __force_merge() argument
234 drm_buddy_init(struct drm_buddy * mm,u64 size,u64 chunk_size) drm_buddy_init() argument
325 drm_buddy_fini(struct drm_buddy * mm) drm_buddy_fini() argument
351 split_block(struct drm_buddy * mm,struct drm_buddy_block * block) split_block() argument
408 drm_buddy_free_block(struct drm_buddy * mm,struct drm_buddy_block * block) drm_buddy_free_block() argument
420 __drm_buddy_free_list(struct drm_buddy * mm,struct list_head * objects,bool mark_clear,bool mark_dirty) __drm_buddy_free_list() argument
440 drm_buddy_free_list_internal(struct drm_buddy * mm,struct list_head * objects) drm_buddy_free_list_internal() argument
458 drm_buddy_free_list(struct drm_buddy * mm,struct list_head * objects,unsigned int flags) drm_buddy_free_list() argument
476 __alloc_range_bias(struct drm_buddy * mm,u64 start,u64 end,unsigned int order,unsigned long flags,bool fallback) __alloc_range_bias() argument
568 __drm_buddy_alloc_range_bias(struct drm_buddy * mm,u64 start,u64 end,unsigned int order,unsigned long flags) __drm_buddy_alloc_range_bias() argument
586 get_maxblock(struct drm_buddy * mm,unsigned int order,unsigned long flags) get_maxblock() argument
621 alloc_from_freelist(struct drm_buddy * mm,unsigned int order,unsigned long flags) alloc_from_freelist() argument
685 __alloc_range(struct drm_buddy * mm,struct list_head * dfs,u64 start,u64 size,struct list_head * blocks,u64 * total_allocated_on_err) __alloc_range() argument
780 __drm_buddy_alloc_range(struct drm_buddy * mm,u64 start,u64 size,u64 * total_allocated_on_err,struct list_head * blocks) __drm_buddy_alloc_range() argument
796 __alloc_contig_try_harder(struct drm_buddy * mm,u64 size,u64 min_block_size,struct list_head * blocks) __alloc_contig_try_harder() argument
868 drm_buddy_block_trim(struct drm_buddy * mm,u64 new_size,struct list_head * blocks) drm_buddy_block_trim() argument
924 __drm_buddy_alloc_blocks(struct drm_buddy * mm,u64 start,u64 end,unsigned int order,unsigned long flags) __drm_buddy_alloc_blocks() argument
958 drm_buddy_alloc_blocks(struct drm_buddy * mm,u64 start,u64 end,u64 size,u64 min_block_size,struct list_head * blocks,unsigned long flags) drm_buddy_alloc_blocks() argument
1109 drm_buddy_block_print(struct drm_buddy * mm,struct drm_buddy_block * block,struct drm_printer * p) drm_buddy_block_print() argument
1126 drm_buddy_print(struct drm_buddy * mm,struct drm_printer * p) drm_buddy_print() argument
[all...]
/linux/arch/s390/include/asm/
H A Dpgalloc.h26 struct ptdesc *page_table_alloc_pgste(struct mm_struct *mm);
36 int crst_table_upgrade(struct mm_struct *mm, unsigned long limit);
38 static inline unsigned long check_asce_limit(struct mm_struct *mm, unsigned long addr, in check_asce_limit() argument
43 if (addr + len > mm->context.asce_limit && in check_asce_limit()
45 rc = crst_table_upgrade(mm, addr + len); in check_asce_limit()
52 static inline p4d_t *p4d_alloc_one(struct mm_struct *mm, unsigned long address) in p4d_alloc_one() argument
54 unsigned long *table = crst_table_alloc(mm); in p4d_alloc_one()
61 static inline void p4d_free(struct mm_struct *mm, p4d_t *p4d) in p4d_free() argument
63 if (!mm_p4d_folded(mm)) in p4d_free()
64 crst_table_free(mm, (unsigned long *) p4d); in p4d_free()
[all …]
H A Dmmu_context.h20 struct mm_struct *mm) in init_new_context() argument
24 spin_lock_init(&mm->context.lock); in init_new_context()
25 INIT_LIST_HEAD(&mm->context.gmap_list); in init_new_context()
26 cpumask_clear(&mm->context.cpu_attach_mask); in init_new_context()
27 atomic_set(&mm->context.flush_count, 0); in init_new_context()
28 atomic_set(&mm->context.protected_count, 0); in init_new_context()
29 mm->context.gmap_asce = 0; in init_new_context()
30 mm->context.flush_mm = 0; in init_new_context()
32 mm->context.alloc_pgste = page_table_allocate_pgste || in init_new_context()
34 (current->mm && current->mm->context.alloc_pgste); in init_new_context()
[all …]
/linux/arch/m68k/include/asm/
H A Dmmu_context.h28 static inline void get_mmu_context(struct mm_struct *mm) in get_mmu_context() argument
32 if (mm->context != NO_CONTEXT) in get_mmu_context()
45 mm->context = ctx; in get_mmu_context()
46 context_mm[ctx] = mm; in get_mmu_context()
52 #define init_new_context(tsk, mm) (((mm)->context = NO_CONTEXT), 0) argument
58 static inline void destroy_context(struct mm_struct *mm) in destroy_context() argument
60 if (mm->context != NO_CONTEXT) { in destroy_context()
61 clear_bit(mm->context, context_map); in destroy_context()
62 mm->context = NO_CONTEXT; in destroy_context()
75 get_mmu_context(tsk->mm); in switch_mm()
[all …]
/linux/mm/
H A Dmmu_notifier.c189 interval_sub->mm->notifier_subscriptions; in mmu_interval_read_begin()
262 struct mm_struct *mm) in mn_itree_release() argument
267 .mm = mm, in mn_itree_release()
300 struct mm_struct *mm) in mn_hlist_release() argument
319 subscription->ops->release(subscription, mm); in mn_hlist_release()
348 void __mmu_notifier_release(struct mm_struct *mm) in __mmu_notifier_release() argument
351 mm->notifier_subscriptions; in __mmu_notifier_release()
354 mn_itree_release(subscriptions, mm); in __mmu_notifier_release()
357 mn_hlist_release(subscriptions, mm); in __mmu_notifier_release()
365 int __mmu_notifier_clear_flush_young(struct mm_struct *mm, in __mmu_notifier_clear_flush_young() argument
[all …]
H A Ddebug.c3 * mm/debug.c
5 * mm/ specific debug routines.
10 #include <linux/mm.h>
161 pr_emerg("vma %px start %px end %px mm %px\n" in dump_page()
173 void dump_mm(const struct mm_struct *mm) in dump_vma()
175 pr_emerg("mm %px task_size %lu\n" in dump_vma()
200 mm, mm->task_size, in dump_mm()
201 mm->mmap_base, mm in dump_mm()
180 dump_mm(const struct mm_struct * mm) dump_mm() argument
[all...]
/linux/drivers/gpu/drm/tests/
H A Ddrm_buddy_test.c31 struct drm_buddy mm; in drm_test_buddy_alloc_range_bias() local
41 KUNIT_ASSERT_FALSE_MSG(test, drm_buddy_init(&mm, mm_size, ps), in drm_test_buddy_alloc_range_bias()
65 drm_buddy_alloc_blocks(&mm, bias_start, in drm_test_buddy_alloc_range_bias()
74 drm_buddy_alloc_blocks(&mm, bias_start, in drm_test_buddy_alloc_range_bias()
83 drm_buddy_alloc_blocks(&mm, bias_start + ps, in drm_test_buddy_alloc_range_bias()
92 drm_buddy_alloc_blocks(&mm, bias_start + ps, in drm_test_buddy_alloc_range_bias()
102 drm_buddy_alloc_blocks(&mm, bias_start, in drm_test_buddy_alloc_range_bias()
108 drm_buddy_free_list(&mm, &tmp, 0); in drm_test_buddy_alloc_range_bias()
112 drm_buddy_alloc_blocks(&mm, bias_start, in drm_test_buddy_alloc_range_bias()
118 drm_buddy_free_list(&mm, in drm_test_buddy_alloc_range_bias()
269 struct drm_buddy mm; drm_test_buddy_alloc_clear() local
408 struct drm_buddy mm; drm_test_buddy_alloc_contiguous() local
496 struct drm_buddy mm; drm_test_buddy_alloc_pathological() local
576 struct drm_buddy mm; drm_test_buddy_alloc_pessimistic() local
671 struct drm_buddy mm; drm_test_buddy_alloc_optimistic() local
717 struct drm_buddy mm; drm_test_buddy_alloc_limit() local
[all...]
H A Ddrm_mm_test.c38 static bool assert_no_holes(struct kunit *test, const struct drm_mm *mm) in assert_no_holes() argument
45 drm_mm_for_each_hole(hole, mm, hole_start, hole_end) in assert_no_holes()
53 drm_mm_for_each_node(hole, mm) { in assert_no_holes()
63 static bool assert_one_hole(struct kunit *test, const struct drm_mm *mm, u64 start, u64 end) in assert_one_hole() argument
74 drm_mm_for_each_hole(hole, mm, hole_start, hole_end) { in assert_one_hole()
103 static bool assert_node(struct kunit *test, struct drm_mm_node *node, struct drm_mm *mm, in assert_node() argument
108 if (!drm_mm_node_allocated(node) || node->mm != mm) { in assert_node()
138 struct drm_mm mm; in drm_test_mm_init() local
142 memset(&mm, 0, sizeof(mm)); in drm_test_mm_init()
143 KUNIT_ASSERT_FALSE_MSG(test, drm_mm_initialized(&mm), in drm_test_mm_init()
[all …]
/linux/arch/powerpc/mm/book3s64/
H A Dmmu_context.c95 static int hash__init_new_context(struct mm_struct *mm) in hash__init_new_context() argument
99 mm->context.hash_context = kmalloc(sizeof(struct hash_mm_context), in hash__init_new_context()
101 if (!mm->context.hash_context) in hash__init_new_context()
118 if (mm->context.id == 0) { in hash__init_new_context()
119 memset(mm->context.hash_context, 0, sizeof(struct hash_mm_context)); in hash__init_new_context()
120 slice_init_new_context_exec(mm); in hash__init_new_context()
123 …memcpy(mm->context.hash_context, current->mm->context.hash_context, sizeof(struct hash_mm_context)… in hash__init_new_context()
126 if (current->mm->context.hash_context->spt) { in hash__init_new_context()
127 mm->context.hash_context->spt = kmalloc(sizeof(struct subpage_prot_table), in hash__init_new_context()
129 if (!mm->context.hash_context->spt) { in hash__init_new_context()
[all …]
H A Dslice.c15 #include <linux/mm.h>
21 #include <linux/sched/mm.h>
86 static int slice_area_is_free(struct mm_struct *mm, unsigned long addr, in slice_area_is_free() argument
91 if ((mm_ctx_slb_addr_limit(&mm->context) - len) < addr) in slice_area_is_free()
93 vma = find_vma(mm, addr); in slice_area_is_free()
97 static int slice_low_has_vma(struct mm_struct *mm, unsigned long slice) in slice_low_has_vma() argument
99 return !slice_area_is_free(mm, slice << SLICE_LOW_SHIFT, in slice_low_has_vma()
103 static int slice_high_has_vma(struct mm_struct *mm, unsigned long slice) in slice_high_has_vma() argument
114 return !slice_area_is_free(mm, start, end - start); in slice_high_has_vma()
117 static void slice_mask_for_free(struct mm_struct *mm, struc argument
138 slice_check_range_fits(struct mm_struct * mm,const struct slice_mask * available,unsigned long start,unsigned long len) slice_check_range_fits() argument
173 struct mm_struct *mm = parm; slice_flush_segments() local
187 slice_convert(struct mm_struct * mm,const struct slice_mask * mask,int psize) slice_convert() argument
278 slice_find_area_bottomup(struct mm_struct * mm,unsigned long addr,unsigned long len,const struct slice_mask * available,int psize,unsigned long high_limit) slice_find_area_bottomup() argument
322 slice_find_area_topdown(struct mm_struct * mm,unsigned long addr,unsigned long len,const struct slice_mask * available,int psize,unsigned long high_limit) slice_find_area_topdown() argument
380 slice_find_area(struct mm_struct * mm,unsigned long len,const struct slice_mask * mask,int psize,int topdown,unsigned long high_limit) slice_find_area() argument
436 struct mm_struct *mm = current->mm; slice_get_unmapped_area() local
664 get_slice_psize(struct mm_struct * mm,unsigned long addr) get_slice_psize() argument
683 slice_init_new_context_exec(struct mm_struct * mm) slice_init_new_context_exec() argument
719 struct mm_struct *mm = current->mm; slice_setup_new_exec() local
729 slice_set_range_psize(struct mm_struct * mm,unsigned long start,unsigned long len,unsigned int psize) slice_set_range_psize() argument
760 slice_is_hugepage_only_range(struct mm_struct * mm,unsigned long addr,unsigned long len) slice_is_hugepage_only_range() argument
[all...]
/linux/Documentation/core-api/
H A Dmm-api.rst14 .. kernel-doc:: mm/gup.c
40 .. kernel-doc:: mm/slub.c
43 .. kernel-doc:: mm/slab_common.c
46 .. kernel-doc:: mm/util.c
52 .. kernel-doc:: mm/vmalloc.c
61 .. kernel-doc:: mm/filemap.c
67 .. kernel-doc:: mm/readahead.c
70 .. kernel-doc:: mm/readahead.c
76 .. kernel-doc:: mm/page-writeback.c
82 .. kernel-doc:: mm/truncate.c
[all …]
/linux/arch/s390/mm/
H A Dpgtable.c47 static inline void ptep_ipte_local(struct mm_struct *mm, unsigned long addr, in ptep_ipte_local() argument
54 asce = READ_ONCE(mm->context.gmap_asce); in ptep_ipte_local()
58 asce = asce ? : mm->context.asce; in ptep_ipte_local()
67 static inline void ptep_ipte_global(struct mm_struct *mm, unsigned long addr, in ptep_ipte_global() argument
74 asce = READ_ONCE(mm->context.gmap_asce); in ptep_ipte_global()
78 asce = asce ? : mm->context.asce; in ptep_ipte_global()
87 static inline pte_t ptep_flush_direct(struct mm_struct *mm, in ptep_flush_direct() argument
96 atomic_inc(&mm->context.flush_count); in ptep_flush_direct()
98 cpumask_equal(mm_cpumask(mm), cpumask_of(smp_processor_id()))) in ptep_flush_direct()
99 ptep_ipte_local(mm, addr, ptep, nodat); in ptep_flush_direct()
[all …]
/linux/arch/sparc/include/asm/
H A Dmmu_context_64.h24 void get_new_mmu_context(struct mm_struct *mm);
27 int init_new_context(struct task_struct *tsk, struct mm_struct *mm);
29 void destroy_context(struct mm_struct *mm);
37 static inline void tsb_context_switch_ctx(struct mm_struct *mm, in tsb_context_switch_ctx() argument
40 __tsb_context_switch(__pa(mm->pgd), in tsb_context_switch_ctx()
41 &mm->context.tsb_block[MM_TSB_BASE], in tsb_context_switch_ctx()
43 (mm->context.tsb_block[MM_TSB_HUGE].tsb ? in tsb_context_switch_ctx()
44 &mm->context.tsb_block[MM_TSB_HUGE] : in tsb_context_switch_ctx()
49 , __pa(&mm->context.tsb_descr[MM_TSB_BASE]), in tsb_context_switch_ctx()
55 void tsb_grow(struct mm_struct *mm,
[all …]
/linux/arch/arm/include/asm/
H A Dmmu_context.h24 void __check_vmalloc_seq(struct mm_struct *mm);
27 static inline void check_vmalloc_seq(struct mm_struct *mm) in check_vmalloc_seq() argument
30 unlikely(atomic_read(&mm->context.vmalloc_seq) != in check_vmalloc_seq()
32 __check_vmalloc_seq(mm); in check_vmalloc_seq()
38 void check_and_switch_context(struct mm_struct *mm, struct task_struct *tsk);
42 init_new_context(struct task_struct *tsk, struct mm_struct *mm) in init_new_context() argument
44 atomic64_set(&mm->context.id, 0); in init_new_context()
49 void a15_erratum_get_cpumask(int this_cpu, struct mm_struct *mm,
52 static inline void a15_erratum_get_cpumask(int this_cpu, struct mm_struct *mm, in a15_erratum_get_cpumask() argument
62 static inline void check_and_switch_context(struct mm_struct *mm, in check_and_switch_context() argument
[all …]
/linux/arch/x86/kernel/
H A Dldt.c42 void load_mm_ldt(struct mm_struct *mm) in load_mm_ldt() argument
47 ldt = READ_ONCE(mm->context.ldt); in load_mm_ldt()
138 struct mm_struct *mm = __mm; in flush_ldt() local
140 if (this_cpu_read(cpu_tlbstate.loaded_mm) != mm) in flush_ldt()
143 load_mm_ldt(mm); in flush_ldt()
189 static void do_sanity_check(struct mm_struct *mm, in do_sanity_check() argument
193 if (mm->context.ldt) { in do_sanity_check()
234 static void map_ldt_struct_to_user(struct mm_struct *mm) in map_ldt_struct_to_user() argument
236 pgd_t *k_pgd = pgd_offset(mm, LDT_BASE_ADDR); in map_ldt_struct_to_user()
243 if (boot_cpu_has(X86_FEATURE_PTI) && !mm->context.ldt) in map_ldt_struct_to_user()
[all …]
/linux/arch/sparc/mm/
H A Dtlb.c2 /* arch/sparc64/mm/tlb.c
9 #include <linux/mm.h>
26 struct mm_struct *mm = tb->mm; in flush_tlb_pending() local
33 if (CTX_VALID(mm->context)) { in flush_tlb_pending()
35 global_flush_tlb_page(mm, tb->vaddrs[0]); in flush_tlb_pending()
38 smp_flush_tlb_pending(tb->mm, tb->tlb_nr, in flush_tlb_pending()
41 __flush_tlb_pending(CTX_HWBITS(tb->mm->context), in flush_tlb_pending()
69 static void tlb_batch_add_one(struct mm_struct *mm, unsigned long vaddr, in tlb_batch_add_one() argument
81 if (unlikely(nr != 0 && mm ! in tlb_batch_add_one()
112 tlb_batch_add(struct mm_struct * mm,unsigned long vaddr,pte_t * ptep,pte_t orig,int fullmm,unsigned int hugepage_shift) tlb_batch_add() argument
147 tlb_batch_pmd_scan(struct mm_struct * mm,unsigned long vaddr,pmd_t pmd) tlb_batch_pmd_scan() argument
170 __set_pmd_acct(struct mm_struct * mm,unsigned long addr,pmd_t orig,pmd_t pmd) __set_pmd_acct() argument
222 set_pmd_at(struct mm_struct * mm,unsigned long addr,pmd_t * pmdp,pmd_t pmd) set_pmd_at() argument
268 pgtable_trans_huge_deposit(struct mm_struct * mm,pmd_t * pmdp,pgtable_t pgtable) pgtable_trans_huge_deposit() argument
283 pgtable_trans_huge_withdraw(struct mm_struct * mm,pmd_t * pmdp) pgtable_trans_huge_withdraw() argument
[all...]
H A Dtsb.c121 struct mm_struct *mm = tb->mm; in flush_tsb_user() local
124 spin_lock_irqsave(&mm->context.lock, flags); in flush_tsb_user()
127 base = (unsigned long) mm->context.tsb_block[MM_TSB_BASE].tsb; in flush_tsb_user()
128 nentries = mm->context.tsb_block[MM_TSB_BASE].tsb_nentries; in flush_tsb_user()
140 else if (mm->context.tsb_block[MM_TSB_HUGE].tsb) { in flush_tsb_user()
141 base = (unsigned long) mm->context.tsb_block[MM_TSB_HUGE].tsb; in flush_tsb_user()
142 nentries = mm->context.tsb_block[MM_TSB_HUGE].tsb_nentries; in flush_tsb_user()
149 spin_unlock_irqrestore(&mm->context.lock, flags); in flush_tsb_user()
152 void flush_tsb_user_page(struct mm_struct *mm, unsigned long vaddr, in flush_tsb_user_page() argument
157 spin_lock_irqsave(&mm->context.lock, flags); in flush_tsb_user_page()
[all …]
/linux/include/linux/sched/
H A Dmm.h20 * @mm: The &struct mm_struct to pin.
22 * Make sure that @mm will not get freed even after the owning task
27 * This is a preferred way to pin @mm for a longer/unbounded amount
32 * See also <Documentation/mm/active_mm.rst> for an in-depth explanation
35 static inline void mmgrab(struct mm_struct *mm) in mmgrab()
37 atomic_inc(&mm->mm_count); in mmgrab()
45 extern void __mmdrop(struct mm_struct *mm);
47 static inline void mmdrop(struct mm_struct *mm) in mmdrop()
54 if (unlikely(atomic_dec_and_test(&mm->mm_count))) in mmdrop()
55 __mmdrop(mm); in mmdrop()
34 mmgrab(struct mm_struct * mm) mmgrab() argument
46 mmdrop(struct mm_struct * mm) mmdrop() argument
64 struct mm_struct *mm = container_of(rhp, struct mm_struct, delayed_drop); __mmdrop_delayed() local
73 mmdrop_sched(struct mm_struct * mm) mmdrop_sched() argument
80 mmdrop_sched(struct mm_struct * mm) mmdrop_sched() argument
87 mmgrab_lazy_tlb(struct mm_struct * mm) mmgrab_lazy_tlb() argument
93 mmdrop_lazy_tlb(struct mm_struct * mm) mmdrop_lazy_tlb() argument
106 mmdrop_lazy_tlb_sched(struct mm_struct * mm) mmdrop_lazy_tlb_sched() argument
130 mmget(struct mm_struct * mm) mmget() argument
135 mmget_not_zero(struct mm_struct * mm) mmget_not_zero() argument
165 mm_update_next_owner(struct mm_struct * mm) mm_update_next_owner() argument
198 arch_pick_mmap_layout(struct mm_struct * mm,struct rlimit * rlim_stack) arch_pick_mmap_layout() argument
526 membarrier_mm_sync_core_before_usermode(struct mm_struct * mm) membarrier_mm_sync_core_before_usermode() argument
548 membarrier_exec_mmap(struct mm_struct * mm) membarrier_exec_mmap() argument
551 membarrier_mm_sync_core_before_usermode(struct mm_struct * mm) membarrier_mm_sync_core_before_usermode() argument
[all...]
/linux/drivers/gpu/drm/i915/gem/
H A Di915_gem_shrinker.c37 return swap_available() || obj->mm.madv == I915_MADV_DONTNEED; in can_release_pages()
113 { &i915->mm.purge_list, ~0u }, in i915_gem_shrink()
115 &i915->mm.shrink_list, in i915_gem_shrink()
194 spin_lock_irqsave(&i915->mm.obj_lock, flags); in i915_gem_shrink()
198 mm.link))) { in i915_gem_shrink()
199 list_move_tail(&obj->mm.link, &still_in_list); in i915_gem_shrink()
202 !is_vmalloc_addr(obj->mm.mapping)) in i915_gem_shrink()
215 spin_unlock_irqrestore(&i915->mm.obj_lock, flags); in i915_gem_shrink()
239 spin_lock_irqsave(&i915->mm.obj_lock, flags); in i915_gem_shrink()
244 spin_unlock_irqrestore(&i915->mm.obj_lock, flags); in i915_gem_shrink()
[all …]
/linux/drivers/net/ethernet/mscc/
H A Docelot_mm.c55 struct ocelot_mm_state *mm = &ocelot->mm[port]; in ocelot_port_update_active_preemptible_tcs() local
66 ocelot_port->speed == SPEED_1000) && mm->tx_active) in ocelot_port_update_active_preemptible_tcs()
67 val = mm->preemptible_tcs; in ocelot_port_update_active_preemptible_tcs()
75 mm->active_preemptible_tcs = val; in ocelot_port_update_active_preemptible_tcs()
82 mm->tx_active ? "active" : "inactive", mm->preemptible_tcs, in ocelot_port_update_active_preemptible_tcs()
83 mm->active_preemptible_tcs); in ocelot_port_update_active_preemptible_tcs()
93 struct ocelot_mm_state *mm = &ocelot->mm[port]; in ocelot_port_change_fp() local
97 if (mm->preemptible_tcs == preemptible_tcs) in ocelot_port_change_fp()
100 mm->preemptible_tcs = preemptible_tcs; in ocelot_port_change_fp()
108 struct ocelot_mm_state *mm = &ocelot->mm[port]; in ocelot_mm_update_port_status() local
[all …]

12345678910>>...57