Home
last modified time | relevance | path

Searched refs:mn (Results 1 – 25 of 42) sorted by relevance

12

/linux/drivers/clk/qcom/
H A Dclk-rcg.c106 static u32 md_to_m(struct mn *mn, u32 md) in md_to_m() argument
108 md >>= mn->m_val_shift; in md_to_m()
109 md &= BIT(mn->width) - 1; in md_to_m()
132 static u32 mn_to_md(struct mn *mn, u32 m, u32 n, u32 md) in mn_to_md() argument
136 mask_w = BIT(mn->width) - 1; in mn_to_md()
137 mask = (mask_w << mn->m_val_shift) | mask_w; in mn_to_md()
141 m <<= mn->m_val_shift; in mn_to_md()
149 static u32 ns_m_to_n(struct mn *mn, u32 ns, u32 m) in ns_m_to_n() argument
151 ns = ~ns >> mn->n_val_shift; in ns_m_to_n()
152 ns &= BIT(mn->width) - 1; in ns_m_to_n()
[all …]
H A Dgcc-msm8660.c106 .mn = {
157 .mn = {
208 .mn = {
259 .mn = {
310 .mn = {
361 .mn = {
412 .mn = {
463 .mn = {
514 .mn = {
565 .mn = {
[all …]
H A Dgcc-mdm9615.c188 .mn = {
239 .mn = {
290 .mn = {
341 .mn = {
392 .mn = {
455 .mn = {
506 .mn = {
557 .mn = {
608 .mn = {
659 .mn = {
[all …]
H A Dgcc-ipq806x.c414 .mn = {
465 .mn = {
516 .mn = {
567 .mn = {
618 .mn = {
669 .mn = {
733 .mn = {
784 .mn = {
835 .mn = {
886 .mn = {
[all …]
H A Dlcc-ipq806x.c113 .mn = {
230 .mn = {
314 .mn = {
371 .mn = {
H A Dlcc-msm8960.c96 .mn = { \
258 .mn = {
328 .mn = {
/linux/drivers/misc/sgi-gru/
H A Dgrutlbpurge.c207 static int gru_invalidate_range_start(struct mmu_notifier *mn, in gru_invalidate_range_start() argument
210 struct gru_mm_struct *gms = container_of(mn, struct gru_mm_struct, in gru_invalidate_range_start()
222 static void gru_invalidate_range_end(struct mmu_notifier *mn, in gru_invalidate_range_end() argument
225 struct gru_mm_struct *gms = container_of(mn, struct gru_mm_struct, in gru_invalidate_range_end()
250 static void gru_free_notifier(struct mmu_notifier *mn) in gru_free_notifier() argument
252 kfree(container_of(mn, struct gru_mm_struct, ms_notifier)); in gru_free_notifier()
265 struct mmu_notifier *mn; in gru_register_mmu_notifier() local
267 mn = mmu_notifier_get_locked(&gru_mmuops, current->mm); in gru_register_mmu_notifier()
268 if (IS_ERR(mn)) in gru_register_mmu_notifier()
269 return ERR_CAST(mn); in gru_register_mmu_notifier()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/fb/
H A Dram.c35 struct nvkm_mm_node *mn; member
52 .mem = vram->mn, in nvkm_vram_map()
61 return (u64)nvkm_mm_size(nvkm_vram(memory)->mn) << NVKM_RAM_MM_SHIFT; in nvkm_vram_size()
68 if (!nvkm_mm_contiguous(vram->mn)) in nvkm_vram_addr()
70 return (u64)nvkm_mm_addr(vram->mn) << NVKM_RAM_MM_SHIFT; in nvkm_vram_addr()
89 struct nvkm_mm_node *next = vram->mn; in nvkm_vram_dtor()
101 kfree(vram->mn); in nvkm_vram_dtor()
138 vram->mn = kzalloc(sizeof(*vram->mn), GFP_KERNEL); in nvkm_ram_wrap()
139 if (!vram->mn) in nvkm_ram_wrap()
142 vram->mn->offset = addr >> NVKM_RAM_MM_SHIFT; in nvkm_ram_wrap()
[all …]
/linux/drivers/iommu/amd/
H A Dpasid.c59 static void sva_arch_invalidate_secondary_tlbs(struct mmu_notifier *mn, in sva_arch_invalidate_secondary_tlbs() argument
67 sva_pdom = container_of(mn, struct protection_domain, mn); in sva_arch_invalidate_secondary_tlbs()
80 static void sva_mn_release(struct mmu_notifier *mn, struct mm_struct *mm) in sva_mn_release() argument
86 sva_pdom = container_of(mn, struct protection_domain, mn); in sva_mn_release()
171 if (sva_pdom->mn.ops) in iommu_sva_domain_free()
172 mmu_notifier_unregister(&sva_pdom->mn, domain->mm); in iommu_sva_domain_free()
193 pdom->mn.ops = &sva_mn; in amd_iommu_domain_alloc_sva()
196 ret = mmu_notifier_register(&pdom->mn, mm); in amd_iommu_domain_alloc_sva()
/linux/drivers/infiniband/hw/hfi1/
H A Dmmu_rb.c59 INIT_HLIST_NODE(&h->mn.hlist); in hfi1_mmu_rb_register()
61 h->mn.ops = &mn_opts; in hfi1_mmu_rb_register()
68 ret = mmu_notifier_register(&h->mn, current->mm); in hfi1_mmu_rb_register()
86 mmgrab(handler->mn.mm); in hfi1_mmu_rb_unregister()
89 mmu_notifier_unregister(&handler->mn, handler->mn.mm); in hfi1_mmu_rb_unregister()
115 mmdrop(handler->mn.mm); in hfi1_mmu_rb_unregister()
129 if (current->mm != handler->mn.mm) in hfi1_mmu_rb_insert()
233 if (current->mm != handler->mn.mm) in hfi1_mmu_rb_evict()
261 static int mmu_notifier_range_start(struct mmu_notifier *mn, in mmu_notifier_range_start() argument
265 container_of(mn, struct mmu_rb_handler, mn); in mmu_notifier_range_start()
H A Dmmu_rb.h39 struct mmu_notifier mn; member
/linux/drivers/gpu/drm/nouveau/nvkm/core/
H A Dmemory.c38 nvkm_mm_free(&fb->tags.mm, &tags->mn); in nvkm_memory_tags_put()
61 if (tags->mn && tags->mn->length != nr) { in nvkm_memory_tags_get()
77 if (!nvkm_mm_head(&fb->tags.mm, 0, 1, nr, nr, 1, &tags->mn)) { in nvkm_memory_tags_get()
79 clr(device, tags->mn->offset, tags->mn->length); in nvkm_memory_tags_get()
90 tags->mn = NULL; in nvkm_memory_tags_get()
/linux/drivers/iommu/intel/
H A Dsvm.c51 static void intel_arch_invalidate_secondary_tlbs(struct mmu_notifier *mn, in intel_arch_invalidate_secondary_tlbs() argument
55 struct dmar_domain *domain = container_of(mn, struct dmar_domain, notifier); in intel_arch_invalidate_secondary_tlbs()
70 static void intel_mm_release(struct mmu_notifier *mn, struct mm_struct *mm) in intel_mm_release() argument
72 struct dmar_domain *domain = container_of(mn, struct dmar_domain, notifier); in intel_mm_release()
99 static void intel_mm_free_notifier(struct mmu_notifier *mn) in intel_mm_free_notifier() argument
101 struct dmar_domain *domain = container_of(mn, struct dmar_domain, notifier); in intel_mm_free_notifier()
/linux/drivers/iommu/arm/arm-smmu-v3/
H A Darm-smmu-v3-sva.c134 static void arm_smmu_mm_arch_invalidate_secondary_tlbs(struct mmu_notifier *mn, in arm_smmu_mm_arch_invalidate_secondary_tlbs() argument
140 container_of(mn, struct arm_smmu_domain, mmu_notifier); in arm_smmu_mm_arch_invalidate_secondary_tlbs()
166 static void arm_smmu_mm_release(struct mmu_notifier *mn, struct mm_struct *mm) in arm_smmu_mm_release() argument
169 container_of(mn, struct arm_smmu_domain, mmu_notifier); in arm_smmu_mm_release()
198 static void arm_smmu_mmu_notifier_free(struct mmu_notifier *mn) in arm_smmu_mmu_notifier_free() argument
200 kfree(container_of(mn, struct arm_smmu_domain, mmu_notifier)); in arm_smmu_mmu_notifier_free()
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/
H A Dgk20a.c53 struct nvkm_mm_node *mn; member
130 return (u64)gk20a_instobj(memory)->mn->offset << 12; in gk20a_instobj_addr()
136 return (u64)gk20a_instobj(memory)->mn->length << 12; in gk20a_instobj_size()
288 .mem = node->mn, in gk20a_instobj_map()
304 dma_free_attrs(dev, (u64)node->base.mn->length << PAGE_SHIFT, in gk20a_instobj_dtor_dma()
317 struct nvkm_mm_node *r = node->base.mn; in gk20a_instobj_dtor_iommu()
335 for (i = 0; i < node->base.mn->length; i++) { in gk20a_instobj_dtor_iommu()
416 node->base.mn = &node->r; in gk20a_instobj_ctor_dma()
494 node->base.mn = r; in gk20a_instobj_ctor_iommu()
543 size, align, (u64)node->mn->offset << 12); in gk20a_instobj_new()
/linux/drivers/gpu/drm/radeon/
H A Dradeon_mn.c49 static bool radeon_mn_invalidate(struct mmu_interval_notifier *mn, in radeon_mn_invalidate() argument
53 struct radeon_bo *bo = container_of(mn, struct radeon_bo, notifier); in radeon_mn_invalidate()
/linux/net/sched/
H A Dsch_teql.c230 struct neighbour *mn; in __teql_resolve() local
232 mn = __neigh_lookup_errno(n->tbl, n->primary_key, dev); in __teql_resolve()
234 if (IS_ERR(mn)) in __teql_resolve()
235 return PTR_ERR(mn); in __teql_resolve()
236 n = mn; in __teql_resolve()
/linux/lib/
H A Dmaple_tree.c332 static inline void mte_set_node_dead(struct maple_enode *mn) in mte_set_node_dead() argument
334 mte_to_node(mn)->parent = ma_parent_ptr(mte_to_node(mn)); in mte_set_node_dead()
678 static inline void mte_set_pivot(struct maple_enode *mn, unsigned char piv, in mte_set_pivot() argument
681 struct maple_node *node = mte_to_node(mn); in mte_set_pivot()
682 enum maple_type type = mte_node_type(mn); in mte_set_pivot()
706 static inline void __rcu **ma_slots(struct maple_node *mn, enum maple_type mt) in ma_slots() argument
710 return mn->ma64.slot; in ma_slots()
713 return mn->mr64.slot; in ma_slots()
715 return mn->slot; in ma_slots()
799 static inline struct maple_metadata *ma_meta(struct maple_node *mn, in ma_meta() argument
[all …]
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_cs.h68 struct amdgpu_mn *mn; member
/linux/drivers/md/persistent-data/
H A Ddm-btree.c743 struct btree_node *ln, *rn, *mn, *pn; in split_two_into_three() local
768 mn = dm_block_data(middle); in split_two_into_three()
771 mn->header.nr_entries = cpu_to_le32(0); in split_two_into_three()
772 mn->header.flags = ln->header.flags; in split_two_into_three()
773 mn->header.max_entries = ln->header.max_entries; in split_two_into_three()
774 mn->header.value_size = ln->header.value_size; in split_two_into_three()
776 redistribute3(ln, mn, rn); in split_two_into_three()
783 le64_to_cpu(mn->keys[0]), &location); in split_two_into_three()
798 if (key < le64_to_cpu(mn->keys[0])) { in split_two_into_three()
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
H A Dvmmnv50.c307 if (map->tags->mn) { in nv50_vmm_valid()
308 u32 tags = map->tags->mn->offset + in nv50_vmm_valid()
H A Dvmmgf100.c300 if (!map->no_comp && map->tags->mn) { in gf100_vmm_valid()
301 u64 tags = map->tags->mn->offset + (map->offset >> 17); in gf100_vmm_valid()
/linux/virt/kvm/
H A Dkvm_main.c506 static inline struct kvm *mmu_notifier_to_kvm(struct mmu_notifier *mn) in mmu_notifier_to_kvm() argument
508 return container_of(mn, struct kvm, mmu_notifier); in mmu_notifier_to_kvm()
651 static __always_inline int kvm_age_hva_range(struct mmu_notifier *mn, in kvm_age_hva_range() argument
657 struct kvm *kvm = mmu_notifier_to_kvm(mn); in kvm_age_hva_range()
671 static __always_inline int kvm_age_hva_range_no_flush(struct mmu_notifier *mn, in kvm_age_hva_range_no_flush() argument
676 return kvm_age_hva_range(mn, start, end, handler, false); in kvm_age_hva_range_no_flush()
727 static int kvm_mmu_notifier_invalidate_range_start(struct mmu_notifier *mn, in kvm_mmu_notifier_invalidate_range_start() argument
730 struct kvm *kvm = mmu_notifier_to_kvm(mn); in kvm_mmu_notifier_invalidate_range_start()
803 static void kvm_mmu_notifier_invalidate_range_end(struct mmu_notifier *mn, in kvm_mmu_notifier_invalidate_range_end() argument
806 struct kvm *kvm = mmu_notifier_to_kvm(mn); in kvm_mmu_notifier_invalidate_range_end()
834 kvm_mmu_notifier_clear_flush_young(struct mmu_notifier * mn,struct mm_struct * mm,unsigned long start,unsigned long end) kvm_mmu_notifier_clear_flush_young() argument
845 kvm_mmu_notifier_clear_young(struct mmu_notifier * mn,struct mm_struct * mm,unsigned long start,unsigned long end) kvm_mmu_notifier_clear_young() argument
868 kvm_mmu_notifier_test_young(struct mmu_notifier * mn,struct mm_struct * mm,unsigned long address) kvm_mmu_notifier_test_young() argument
878 kvm_mmu_notifier_release(struct mmu_notifier * mn,struct mm_struct * mm) kvm_mmu_notifier_release() argument
[all...]
/linux/drivers/video/fbdev/
H A Dpxa168fb.h221 #define CFG_HWC_1BITENA(mn) ((mn) << 25) argument
/linux/tools/testing/selftests/tc-testing/
H A Dtdc.py55 mn = fn[0:-3]
56 foo = importlib.import_module('plugins.' + mn)
57 self.plugins.add(mn)
58 self.plugin_instances[mn] = foo.SubPlugin()

12