| /linux/mm/ |
| H A D | backing-dev.c | 63 struct bdi_writeback *wb) in collect_wb_stats() argument 67 spin_lock(&wb->list_lock); in collect_wb_stats() 68 list_for_each_entry(inode, &wb->b_dirty, i_io_list) in collect_wb_stats() 70 list_for_each_entry(inode, &wb->b_io, i_io_list) in collect_wb_stats() 72 list_for_each_entry(inode, &wb->b_more_io, i_io_list) in collect_wb_stats() 74 list_for_each_entry(inode, &wb->b_dirty_time, i_io_list) in collect_wb_stats() 77 spin_unlock(&wb->list_lock); in collect_wb_stats() 79 stats->nr_writeback += wb_stat(wb, WB_WRITEBACK); in collect_wb_stats() 80 stats->nr_reclaimable += wb_stat(wb, WB_RECLAIMABLE); in collect_wb_stats() 81 stats->nr_dirtied += wb_stat(wb, WB_DIRTIED); in collect_wb_stats() [all …]
|
| H A D | page-writeback.c | 133 #define GDTC_INIT(__wb) .wb = (__wb), \ 139 #define MDTC_INIT(__wb, __gdtc) .wb = (__wb), \ 159 static struct fprop_local_percpu *wb_memcg_completions(struct bdi_writeback *wb) in wb_memcg_completions() argument 161 return &wb->memcg_completions; in wb_memcg_completions() 164 static void wb_min_max_ratio(struct bdi_writeback *wb, in wb_min_max_ratio() argument 167 unsigned long this_bw = READ_ONCE(wb->avg_write_bandwidth); in wb_min_max_ratio() 168 unsigned long tot_bw = atomic_long_read(&wb->bdi->tot_write_bandwidth); in wb_min_max_ratio() 169 unsigned long long min = wb->bdi->min_ratio; in wb_min_max_ratio() 170 unsigned long long max = wb->bdi->max_ratio; in wb_min_max_ratio() 193 #define GDTC_INIT(__wb) .wb = (__wb), \ [all …]
|
| /linux/fs/ |
| H A D | fs-writeback.c | 81 static bool wb_io_lists_populated(struct bdi_writeback *wb) in wb_io_lists_populated() argument 83 if (wb_has_dirty_io(wb)) { in wb_io_lists_populated() 86 set_bit(WB_has_dirty_io, &wb->state); in wb_io_lists_populated() 87 WARN_ON_ONCE(!wb->avg_write_bandwidth); in wb_io_lists_populated() 88 atomic_long_add(wb->avg_write_bandwidth, in wb_io_lists_populated() 89 &wb->bdi->tot_write_bandwidth); in wb_io_lists_populated() 94 static void wb_io_lists_depopulated(struct bdi_writeback *wb) in wb_io_lists_depopulated() argument 96 if (wb_has_dirty_io(wb) && list_empty(&wb->b_dirty) && in wb_io_lists_depopulated() 97 list_empty(&wb->b_io) && list_empty(&wb->b_more_io)) { in wb_io_lists_depopulated() 98 clear_bit(WB_has_dirty_io, &wb->state); in wb_io_lists_depopulated() [all …]
|
| /linux/include/linux/ |
| H A D | backing-dev.h | 39 void wb_start_background_writeback(struct bdi_writeback *wb); 49 static inline bool wb_has_dirty_io(struct bdi_writeback *wb) in wb_has_dirty_io() argument 51 return test_bit(WB_has_dirty_io, &wb->state); in wb_has_dirty_io() 63 static inline void wb_stat_mod(struct bdi_writeback *wb, in wb_stat_mod() argument 66 percpu_counter_add_batch(&wb->stat[item], amount, WB_STAT_BATCH); in wb_stat_mod() 69 static inline s64 wb_stat(struct bdi_writeback *wb, enum wb_stat_item item) in wb_stat() argument 71 return percpu_counter_read_positive(&wb->stat[item]); in wb_stat() 74 static inline s64 wb_stat_sum(struct bdi_writeback *wb, enum wb_stat_item item) in wb_stat_sum() argument 76 return percpu_counter_sum_positive(&wb->stat[item]); in wb_stat_sum() 79 extern void wb_writeout_inc(struct bdi_writeback *wb); [all …]
|
| H A D | writeback.h | 80 struct bdi_writeback *wb; /* wb this writeback is issued under */ member 107 ((wbc)->wb ? (wbc)->wb->blkcg_css : blkcg_root_css) 211 bool cleanup_offline_cgwb(struct bdi_writeback *wb); 264 if (wbc->wb) in wbc_init_bio() 265 bio_associate_blkg_from_css(bio, wbc->wb->blkcg_css); in wbc_init_bio() 313 struct bdi_writeback *wb; member 348 unsigned long wb_calc_thresh(struct bdi_writeback *wb, unsigned long thresh); 349 unsigned long cgwb_calc_thresh(struct bdi_writeback *wb); 351 void wb_update_bandwidth(struct bdi_writeback *wb); 360 bool wb_over_bg_thresh(struct bdi_writeback *wb);
|
| H A D | memcontrol.h | 1522 struct wb_domain *mem_cgroup_wb_domain(struct bdi_writeback *wb); 1523 void mem_cgroup_wb_stats(struct bdi_writeback *wb, unsigned long *pfilepages, 1528 struct bdi_writeback *wb); 1531 struct bdi_writeback *wb) in mem_cgroup_track_foreign_dirty() argument 1539 if (unlikely(memcg && &memcg->css != wb->memcg_css)) in mem_cgroup_track_foreign_dirty() 1540 mem_cgroup_track_foreign_dirty_slowpath(folio, wb); in mem_cgroup_track_foreign_dirty() 1543 void mem_cgroup_flush_foreign(struct bdi_writeback *wb); 1547 static inline struct wb_domain *mem_cgroup_wb_domain(struct bdi_writeback *wb) in mem_cgroup_wb_domain() argument 1552 static inline void mem_cgroup_wb_stats(struct bdi_writeback *wb, in mem_cgroup_wb_stats() argument 1561 struct bdi_writeback *wb) in mem_cgroup_track_foreign_dirty() argument [all …]
|
| /linux/include/trace/events/ |
| H A D | writeback.h | 159 static inline ino_t __trace_wb_assign_cgroup(struct bdi_writeback *wb) in __trace_wb_assign_cgroup() argument 161 return cgroup_ino(wb->memcg_css->cgroup); in __trace_wb_assign_cgroup() 166 if (wbc->wb) in __trace_wbc_assign_cgroup() 167 return __trace_wb_assign_cgroup(wbc->wb); in __trace_wbc_assign_cgroup() 173 static inline ino_t __trace_wb_assign_cgroup(struct bdi_writeback *wb) in __trace_wb_assign_cgroup() argument 276 TP_PROTO(struct folio *folio, struct bdi_writeback *wb), 278 TP_ARGS(folio, wb), 293 strscpy_pad(__entry->name, bdi_dev_name(wb->bdi), 32); 294 __entry->bdi_id = wb->bdi->id; 296 __entry->memcg_id = wb->memcg_css->id; [all …]
|
| /linux/security/apparmor/ |
| H A D | match.c | 682 #define inc_wb_pos(wb) \ argument 685 wb->pos = (wb->pos + 1) & (WB_HISTORY_SIZE - 1); \ 686 wb->len = (wb->len + 1) > WB_HISTORY_SIZE ? WB_HISTORY_SIZE : \ 687 wb->len + 1; \ 692 static bool is_loop(struct match_workbuf *wb, aa_state_t state, in is_loop() argument 695 int pos = wb->pos; in is_loop() 698 if (wb->history[pos] < state) in is_loop() 701 for (i = 0; i < wb->len; i++) { in is_loop() 702 if (wb->history[pos] == state) { in is_loop() 714 const char *str, struct match_workbuf *wb, in leftmatch_fb() argument [all …]
|
| /linux/drivers/gpu/drm/radeon/ |
| H A D | r600_dma.c | 55 if (rdev->wb.enabled) in r600_dma_get_rptr() 56 rptr = rdev->wb.wb[ring->rptr_offs/4]; in r600_dma_get_rptr() 143 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume() 145 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume() 147 if (rdev->wb.enabled) in r600_dma_resume() 243 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test() 246 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in r600_dma_ring_test() 260 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ring_test() 350 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ib_test() 381 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ib_test() [all …]
|
| H A D | cik_sdma.c | 67 if (rdev->wb.enabled) { in cik_sdma_get_rptr() 68 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cik_sdma_get_rptr() 138 if (rdev->wb.enabled) { in cik_sdma_ring_ib_execute() 400 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume() 402 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume() 404 if (rdev->wb.enabled) in cik_sdma_gfx_resume() 658 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test() 661 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in cik_sdma_ring_test() 676 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in cik_sdma_ring_test() 715 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ib_test() [all …]
|
| H A D | ni_dma.c | 57 if (rdev->wb.enabled) { in cayman_dma_get_rptr() 58 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cayman_dma_get_rptr() 127 if (rdev->wb.enabled) { in cayman_dma_ring_ib_execute() 222 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume() 224 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume() 226 if (rdev->wb.enabled) in cayman_dma_resume()
|
| H A D | radeon_fence.c | 71 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_write() 93 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_read() 765 if (rdev->wb.use_event || !radeon_ring_supports_scratch_reg(rdev, &rdev->ring[ring])) { in radeon_fence_driver_start_ring() 769 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring() 770 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + in radeon_fence_driver_start_ring() 789 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring() 790 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index; in radeon_fence_driver_start_ring()
|
| H A D | radeon_ring.c | 308 else if (rdev->wb.enabled) in radeon_ring_backup() 422 if (rdev->wb.enabled) { in radeon_ring_init() 424 ring->next_rptr_gpu_addr = rdev->wb.gpu_addr + index; in radeon_ring_init() 425 ring->next_rptr_cpu_addr = &rdev->wb.wb[index/4]; in radeon_ring_init()
|
| /linux/drivers/usb/class/ |
| H A D | cdc-acm.c | 157 usb_poison_urb(acm->wb[i].urb); in acm_poison_urbs() 169 usb_unpoison_urb(acm->wb[i].urb); in acm_unpoison_urbs() 182 struct acm_wb *wb; in acm_wb_alloc() local 187 wb = &acm->wb[wbn]; in acm_wb_alloc() 188 if (!wb->use) { in acm_wb_alloc() 189 wb->use = true; in acm_wb_alloc() 190 wb->len = 0; in acm_wb_alloc() 207 if(acm->wb[i].use) in acm_wb_is_avail() 216 static void acm_write_done(struct acm *acm, struct acm_wb *wb) in acm_write_done() argument 218 wb->use = false; in acm_write_done() [all …]
|
| /linux/certs/ |
| H A D | extract-cert.c | 47 static BIO *wb; variable 55 if (!wb) { in write_cert() 56 wb = BIO_new_file(cert_dst, "wb"); in write_cert() 57 ERR(!wb, "%s", cert_dst); in write_cert() 60 ERR(!i2d_X509_bio(wb, x509), "%s", cert_dst); in write_cert() 166 if (wb && !x509) { in main() 179 BIO_free(wb); in main()
|
| /linux/drivers/gpu/drm/msm/disp/dpu1/ |
| H A D | dpu_hw_wb.h | 37 struct dpu_hw_wb_cfg *wb); 40 struct dpu_hw_wb_cfg *wb, 44 struct dpu_hw_wb_cfg *wb);
|
| H A D | dpu_encoder_phys_wb.c | 38 static bool _dpu_encoder_phys_wb_clk_force_ctrl(struct dpu_hw_wb *wb, in _dpu_encoder_phys_wb_clk_force_ctrl() argument 42 if (wb->ops.setup_clk_force_ctrl) { in _dpu_encoder_phys_wb_clk_force_ctrl() 43 *forced_on = wb->ops.setup_clk_force_ctrl(wb, enable); in _dpu_encoder_phys_wb_clk_force_ctrl() 48 *forced_on = mdp->ops.setup_clk_force_ctrl(mdp, wb->caps->clk_ctrl, enable); in _dpu_encoder_phys_wb_clk_force_ctrl() 241 intf_cfg.wb = hw_wb->idx; in dpu_encoder_phys_wb_setup_ctl() 262 intf_cfg.wb = hw_wb->idx; in dpu_encoder_phys_wb_setup_ctl()
|
| /linux/tools/testing/selftests/cgroup/ |
| H A D | test_zswap.c | 322 static int test_zswap_writeback_one(const char *cgroup, bool wb) in test_zswap_writeback_one() argument 332 if (cg_run(cgroup, attempt_writeback, (void *) &wb)) in test_zswap_writeback_one() 340 if (wb != !!zswpwb_after) { in test_zswap_writeback_one() 342 zswpwb_after, wb ? "enabled" : "disabled"); in test_zswap_writeback_one() 350 static int test_zswap_writeback(const char *root, bool wb) in test_zswap_writeback() argument 363 if (cg_write(test_group, "memory.zswap.writeback", wb ? "1" : "0")) in test_zswap_writeback() 366 if (test_zswap_writeback_one(test_group, wb)) in test_zswap_writeback() 385 if (test_zswap_writeback_one(test_group_child, wb)) in test_zswap_writeback()
|
| /linux/drivers/media/platform/mediatek/vcodec/encoder/venc/ |
| H A D | venc_vp8_if.c | 155 struct venc_vp8_vpu_buf *wb = inst->vsi->work_bufs; in vp8_enc_alloc_work_buf() local 158 if (wb[i].size == 0) in vp8_enc_alloc_work_buf() 170 inst->work_bufs[i].size = wb[i].size; in vp8_enc_alloc_work_buf() 189 wb[i].vpua); in vp8_enc_alloc_work_buf() 190 memcpy(inst->work_bufs[i].va, tmp_va, wb[i].size); in vp8_enc_alloc_work_buf() 192 wb[i].iova = inst->work_bufs[i].dma_addr; in vp8_enc_alloc_work_buf()
|
| /linux/drivers/md/ |
| H A D | dm-writecache.c | 1656 struct writeback_struct *wb = container_of(bio, struct writeback_struct, bio); in writecache_writeback_endio() local 1657 struct dm_writecache *wc = wb->wc; in writecache_writeback_endio() 1663 list_add_tail(&wb->endio_entry, &wc->endio_list); in writecache_writeback_endio() 1684 struct writeback_struct *wb; in __writecache_endio_pmem() local 1689 wb = list_entry(list->next, struct writeback_struct, endio_entry); in __writecache_endio_pmem() 1690 list_del(&wb->endio_entry); in __writecache_endio_pmem() 1692 if (unlikely(wb->bio.bi_status != BLK_STS_OK)) in __writecache_endio_pmem() 1693 writecache_error(wc, blk_status_to_errno(wb->bio.bi_status), in __writecache_endio_pmem() 1694 "write error %d", wb->bio.bi_status); in __writecache_endio_pmem() 1697 e = wb->wc_list[i]; in __writecache_endio_pmem() [all …]
|
| /linux/drivers/net/ethernet/intel/igc/ |
| H A D | igc_base.h | 24 } wb; member 86 } wb; /* writeback */ member
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_mes.c | 168 adev->wb.gpu_addr + (adev->mes.sch_ctx_offs[i] * 4); in amdgpu_mes_init() 170 (uint64_t *)&adev->wb.wb[adev->mes.sch_ctx_offs[i]]; in amdgpu_mes_init() 180 adev->mes.query_status_fence_gpu_addr[i] = adev->wb.gpu_addr + in amdgpu_mes_init() 183 (uint64_t *)&adev->wb.wb[adev->mes.query_status_fence_offs[i]]; in amdgpu_mes_init() 451 read_val_gpu_addr = adev->wb.gpu_addr + (addr_offset * 4); in amdgpu_mes_rreg() 452 read_val_ptr = (uint32_t *)&adev->wb.wb[addr_offset]; in amdgpu_mes_rreg()
|
| H A D | amdgpu_vpe.c | 788 adev->wb.wb[index] = 0; in vpe_ring_test_ring() 789 wb_addr = adev->wb.gpu_addr + (index * 4); in vpe_ring_test_ring() 804 if (le32_to_cpu(adev->wb.wb[index]) == test_pattern) in vpe_ring_test_ring() 832 adev->wb.wb[index] = 0; in vpe_ring_test_ib() 833 wb_addr = adev->wb.gpu_addr + (index * 4); in vpe_ring_test_ib() 859 ret = (le32_to_cpu(adev->wb.wb[index]) == test_pattern) ? 0 : -EINVAL; in vpe_ring_test_ib()
|
| /linux/include/uapi/drm/ |
| H A D | lima_drm.h | 84 __u32 wb[3 * LIMA_PP_WB_REG_NUM]; member 93 __u32 wb[3 * LIMA_PP_WB_REG_NUM]; member
|
| /linux/arch/xtensa/kernel/ |
| H A D | signal.c | 60 const unsigned long wb = regs->windowbase; in flush_window_regs_user() local 73 wm = (ws >> wb) | (ws << (XCHAL_NUM_AREGS / 4 - wb)); in flush_window_regs_user() 120 regs->windowstart = 1 << wb; in flush_window_regs_user()
|