| /linux/fs/nfsd/ |
| H A D | nfscache.c | 35 spinlock_t cache_lock; member 161 spin_lock(&b->cache_lock); in nfsd_reply_cache_free() 163 spin_unlock(&b->cache_lock); in nfsd_reply_cache_free() 207 spin_lock_init(&nn->drc_hashtbl[i].cache_lock); in nfsd_reply_cache_init() 267 lockdep_assert_held(&b->cache_lock); in nfsd_prune_bucket_locked() 326 spin_lock(&b->cache_lock); in nfsd_reply_cache_scan() 328 spin_unlock(&b->cache_lock); in nfsd_reply_cache_scan() 493 spin_lock(&b->cache_lock); in nfsd_cache_lookup() 500 spin_unlock(&b->cache_lock); in nfsd_cache_lookup() 546 spin_unlock(&b->cache_lock); in nfsd_cache_lookup() [all …]
|
| /linux/drivers/iommu/intel/ |
| H A D | cache.c | 67 spin_lock_irqsave(&domain->cache_lock, flags); in cache_tag_assign() 72 spin_unlock_irqrestore(&domain->cache_lock, flags); in cache_tag_assign() 86 spin_unlock_irqrestore(&domain->cache_lock, flags); in cache_tag_assign() 102 spin_lock_irqsave(&domain->cache_lock, flags); in cache_tag_unassign() 113 spin_unlock_irqrestore(&domain->cache_lock, flags); in cache_tag_unassign() 122 spin_lock_irqsave(&domain->cache_lock, flags); in domain_qi_batch_alloc() 130 spin_unlock_irqrestore(&domain->cache_lock, flags); in domain_qi_batch_alloc() 445 spin_lock_irqsave(&domain->cache_lock, flags); in cache_tag_flush_range() 475 spin_unlock_irqrestore(&domain->cache_lock, flags); in cache_tag_flush_range() 508 spin_lock_irqsave(&domain->cache_lock, flags); in cache_tag_flush_range_np() [all …]
|
| H A D | svm.c | 220 spin_lock_init(&domain->cache_lock); in intel_svm_domain_alloc()
|
| H A D | nested.c | 234 spin_lock_init(&domain->cache_lock); in intel_iommu_domain_alloc_nested()
|
| H A D | iommu.h | 620 spinlock_t cache_lock; /* Protect the cache tag list */ member
|
| H A D | iommu.c | 2793 spin_lock_init(&domain->cache_lock); in paging_domain_alloc()
|
| /linux/drivers/soc/qcom/ |
| H A D | rpmh.c | 119 spin_lock_irqsave(&ctrlr->cache_lock, flags); in cache_rpm_request() 154 spin_unlock_irqrestore(&ctrlr->cache_lock, flags); in cache_rpm_request() 278 spin_lock_irqsave(&ctrlr->cache_lock, flags); in cache_batch() 281 spin_unlock_irqrestore(&ctrlr->cache_lock, flags); in cache_batch() 441 if (!spin_trylock(&ctrlr->cache_lock)) in rpmh_flush() 478 spin_unlock(&ctrlr->cache_lock); in rpmh_flush() 495 spin_lock_irqsave(&ctrlr->cache_lock, flags); in rpmh_invalidate() 500 spin_unlock_irqrestore(&ctrlr->cache_lock, flags); in rpmh_invalidate()
|
| H A D | rpmh-internal.h | 82 spinlock_t cache_lock; member
|
| /linux/drivers/base/regmap/ |
| H A D | regmap-debugfs.c | 114 mutex_lock(&map->cache_lock); in regmap_debugfs_get_dump_start() 136 mutex_unlock(&map->cache_lock); in regmap_debugfs_get_dump_start() 169 mutex_unlock(&map->cache_lock); in regmap_debugfs_get_dump_start() 176 mutex_unlock(&map->cache_lock); in regmap_debugfs_get_dump_start() 201 mutex_lock(&map->cache_lock); in regmap_next_readable_reg() 210 mutex_unlock(&map->cache_lock); in regmap_next_readable_reg() 400 mutex_lock(&map->cache_lock); in regmap_reg_ranges_read_file() 412 mutex_unlock(&map->cache_lock); in regmap_reg_ranges_read_file() 569 mutex_init(&map->cache_lock); in regmap_debugfs_init() 660 mutex_lock(&map->cache_lock); in regmap_debugfs_exit() [all …]
|
| /linux/Documentation/kernel-hacking/ |
| H A D | locking.rst | 400 static DEFINE_MUTEX(cache_lock); 405 /* Must be holding cache_lock */ 418 /* Must be holding cache_lock */ 427 /* Must be holding cache_lock */ 452 mutex_lock(&cache_lock); 454 mutex_unlock(&cache_lock); 460 mutex_lock(&cache_lock); 462 mutex_unlock(&cache_lock); 470 mutex_lock(&cache_lock); 476 mutex_unlock(&cache_lock); [all …]
|
| /linux/Documentation/translations/it_IT/kernel-hacking/ |
| H A D | locking.rst | 420 static DEFINE_MUTEX(cache_lock); 425 /* Must be holding cache_lock */ 438 /* Must be holding cache_lock */ 447 /* Must be holding cache_lock */ 472 mutex_lock(&cache_lock); 474 mutex_unlock(&cache_lock); 480 mutex_lock(&cache_lock); 482 mutex_unlock(&cache_lock); 490 mutex_lock(&cache_lock); 496 mutex_unlock(&cache_lock); [all …]
|
| /linux/arch/csky/mm/ |
| H A D | cachev2.c | 38 static DEFINE_SPINLOCK(cache_lock); 52 spin_lock_irqsave(&cache_lock, flags); in local_icache_inv_range() 57 spin_unlock_irqrestore(&cache_lock, flags); in local_icache_inv_range()
|
| H A D | cachev1.c | 23 static DEFINE_SPINLOCK(cache_lock); 65 spin_lock_irqsave(&cache_lock, flags); in cache_op_range() 75 spin_unlock_irqrestore(&cache_lock, flags); in cache_op_range()
|
| /linux/drivers/infiniband/core/ |
| H A D | cache.c | 1059 read_lock_irqsave(&device->cache_lock, flags); in ib_get_cached_pkey() 1068 read_unlock_irqrestore(&device->cache_lock, flags); in ib_get_cached_pkey() 1079 read_lock_irqsave(&device->cache_lock, flags); in ib_get_cached_subnet_prefix() 1081 read_unlock_irqrestore(&device->cache_lock, flags); in ib_get_cached_subnet_prefix() 1097 read_lock_irqsave(&device->cache_lock, flags); in ib_find_cached_pkey() 1124 read_unlock_irqrestore(&device->cache_lock, flags); in ib_find_cached_pkey() 1138 read_lock_irqsave(&device->cache_lock, flags); in ib_get_cached_lmc() 1140 read_unlock_irqrestore(&device->cache_lock, flags); in ib_get_cached_lmc() 1155 read_lock_irqsave(&device->cache_lock, flags); in ib_get_cached_port_state() 1157 read_unlock_irqrestore(&device->cache_lock, flags); in ib_get_cached_port_state() [all …]
|
| H A D | device.c | 642 rwlock_init(&device->cache_lock); in _ib_alloc_device() 2940 write_lock_irq(&ibdev->cache_lock); in ib_dispatch_port_state_event() 2942 write_unlock_irq(&ibdev->cache_lock); in ib_dispatch_port_state_event() 2946 write_unlock_irq(&ibdev->cache_lock); in ib_dispatch_port_state_event()
|
| /linux/drivers/gpu/drm/i915/gvt/ |
| H A D | kvmgt.c | 311 mutex_lock(&vgpu->cache_lock); in gvt_cache_destroy() 314 mutex_unlock(&vgpu->cache_lock); in gvt_cache_destroy() 320 mutex_unlock(&vgpu->cache_lock); in gvt_cache_destroy() 329 mutex_init(&vgpu->cache_lock); in gvt_cache_init() 629 mutex_lock(&vgpu->cache_lock); in intel_vgpu_dma_unmap() 639 mutex_unlock(&vgpu->cache_lock); in intel_vgpu_dma_unmap() 1619 mutex_lock(&vgpu->cache_lock); in intel_gvt_dma_map_guest_page() 1647 mutex_unlock(&vgpu->cache_lock); in intel_gvt_dma_map_guest_page() 1653 mutex_unlock(&vgpu->cache_lock); in intel_gvt_dma_map_guest_page() 1665 mutex_lock(&vgpu->cache_lock); in intel_gvt_dma_pin_guest_page() [all …]
|
| /linux/drivers/net/ethernet/netronome/nfp/bpf/ |
| H A D | cmsg.c | 211 spin_lock(&nfp_map->cache_lock); in nfp_bpf_ctrl_op_cache_get() 259 spin_unlock(&nfp_map->cache_lock); in nfp_bpf_ctrl_op_cache_get() 277 spin_lock(&nfp_map->cache_lock); in nfp_bpf_ctrl_op_cache_put() 287 spin_unlock(&nfp_map->cache_lock); in nfp_bpf_ctrl_op_cache_put()
|
| H A D | main.h | 214 spinlock_t cache_lock; member
|
| H A D | offload.c | 392 spin_lock_init(&nfp_map->cache_lock); in nfp_bpf_map_alloc()
|
| /linux/net/sunrpc/ |
| H A D | auth.c | 272 spinlock_t *cache_lock; in rpcauth_unhash_cred() local 277 cache_lock = &cred->cr_auth->au_credcache->lock; in rpcauth_unhash_cred() 278 spin_lock(cache_lock); in rpcauth_unhash_cred() 280 spin_unlock(cache_lock); in rpcauth_unhash_cred()
|
| /linux/security/selinux/ss/ |
| H A D | sidtab.h | 97 spinlock_t cache_lock; member
|
| /linux/drivers/infiniband/hw/hns/ |
| H A D | hns_roce_main.c | 148 write_lock_irq(&ibdev->cache_lock); in handle_en_event() 150 write_unlock_irq(&ibdev->cache_lock); in handle_en_event() 154 write_unlock_irq(&ibdev->cache_lock); in handle_en_event()
|
| /linux/include/rdma/ |
| H A D | ib_verbs.h | 2804 rwlock_t cache_lock; member
|