Lines Matching refs:ws
90 static int compression_compress_pages(int type, struct list_head *ws, in compression_compress_pages() argument
97 return zlib_compress_folios(ws, inode, start, folios, in compression_compress_pages()
100 return lzo_compress_folios(ws, inode, start, folios, in compression_compress_pages()
103 return zstd_compress_folios(ws, inode, start, folios, in compression_compress_pages()
121 static int compression_decompress_bio(struct list_head *ws, in compression_decompress_bio() argument
125 case BTRFS_COMPRESS_ZLIB: return zlib_decompress_bio(ws, cb); in compression_decompress_bio()
126 case BTRFS_COMPRESS_LZO: return lzo_decompress_bio(ws, cb); in compression_decompress_bio()
127 case BTRFS_COMPRESS_ZSTD: return zstd_decompress_bio(ws, cb); in compression_decompress_bio()
138 static int compression_decompress(int type, struct list_head *ws, in compression_decompress() argument
143 case BTRFS_COMPRESS_ZLIB: return zlib_decompress(ws, data_in, dest_folio, in compression_decompress()
145 case BTRFS_COMPRESS_LZO: return lzo_decompress(ws, data_in, dest_folio, in compression_decompress()
147 case BTRFS_COMPRESS_ZSTD: return zstd_decompress(ws, data_in, dest_folio, in compression_decompress()
693 static void free_heuristic_ws(struct list_head *ws) in free_heuristic_ws() argument
697 workspace = list_entry(ws, struct heuristic_ws, list); in free_heuristic_ws()
707 struct heuristic_ws *ws; in alloc_heuristic_ws() local
709 ws = kzalloc(sizeof(*ws), GFP_KERNEL); in alloc_heuristic_ws()
710 if (!ws) in alloc_heuristic_ws()
713 ws->sample = kvmalloc(MAX_SAMPLE_SIZE, GFP_KERNEL); in alloc_heuristic_ws()
714 if (!ws->sample) in alloc_heuristic_ws()
717 ws->bucket = kcalloc(BUCKET_SIZE, sizeof(*ws->bucket), GFP_KERNEL); in alloc_heuristic_ws()
718 if (!ws->bucket) in alloc_heuristic_ws()
721 ws->bucket_b = kcalloc(BUCKET_SIZE, sizeof(*ws->bucket_b), GFP_KERNEL); in alloc_heuristic_ws()
722 if (!ws->bucket_b) in alloc_heuristic_ws()
725 INIT_LIST_HEAD(&ws->list); in alloc_heuristic_ws()
726 return &ws->list; in alloc_heuristic_ws()
728 free_heuristic_ws(&ws->list); in alloc_heuristic_ws()
758 static void free_workspace(int type, struct list_head *ws) in free_workspace() argument
761 case BTRFS_COMPRESS_NONE: return free_heuristic_ws(ws); in free_workspace()
762 case BTRFS_COMPRESS_ZLIB: return zlib_free_workspace(ws); in free_workspace()
763 case BTRFS_COMPRESS_LZO: return lzo_free_workspace(ws); in free_workspace()
764 case BTRFS_COMPRESS_ZSTD: return zstd_free_workspace(ws); in free_workspace()
811 struct list_head *ws; in free_workspace_manager() local
820 ws = gwsm->idle_ws.next; in free_workspace_manager()
821 list_del(ws); in free_workspace_manager()
822 free_workspace(type, ws); in free_workspace_manager()
933 void btrfs_put_workspace(struct btrfs_fs_info *fs_info, int type, struct list_head *ws) in btrfs_put_workspace() argument
951 list_add(ws, idle_ws); in btrfs_put_workspace()
958 free_workspace(type, ws); in btrfs_put_workspace()
964 static void put_workspace(struct btrfs_fs_info *fs_info, int type, struct list_head *ws) in put_workspace() argument
967 case BTRFS_COMPRESS_NONE: return btrfs_put_workspace(fs_info, type, ws); in put_workspace()
968 case BTRFS_COMPRESS_ZLIB: return btrfs_put_workspace(fs_info, type, ws); in put_workspace()
969 case BTRFS_COMPRESS_LZO: return btrfs_put_workspace(fs_info, type, ws); in put_workspace()
970 case BTRFS_COMPRESS_ZSTD: return zstd_put_workspace(fs_info, ws); in put_workspace()
1310 static u32 shannon_entropy(struct heuristic_ws *ws) in shannon_entropy() argument
1317 sz_base = ilog2_w(ws->sample_size); in shannon_entropy()
1318 for (i = 0; i < BUCKET_SIZE && ws->bucket[i].count > 0; i++) { in shannon_entropy()
1319 p = ws->bucket[i].count; in shannon_entropy()
1324 entropy_sum /= ws->sample_size; in shannon_entropy()
1446 static int byte_core_set_size(struct heuristic_ws *ws) in byte_core_set_size() argument
1450 const u32 core_set_threshold = ws->sample_size * 90 / 100; in byte_core_set_size()
1451 struct bucket_item *bucket = ws->bucket; in byte_core_set_size()
1454 radix_sort(ws->bucket, ws->bucket_b, BUCKET_SIZE); in byte_core_set_size()
1484 static u32 byte_set_size(const struct heuristic_ws *ws) in byte_set_size() argument
1490 if (ws->bucket[i].count > 0) in byte_set_size()
1500 if (ws->bucket[i].count > 0) { in byte_set_size()
1510 static bool sample_repeated_patterns(struct heuristic_ws *ws) in sample_repeated_patterns() argument
1512 const u32 half_of_sample = ws->sample_size / 2; in sample_repeated_patterns()
1513 const u8 *data = ws->sample; in sample_repeated_patterns()
1519 struct heuristic_ws *ws) in heuristic_collect_sample() argument
1555 memcpy(&ws->sample[curr_sample_pos], &in_data[i], in heuristic_collect_sample()
1567 ws->sample_size = curr_sample_pos; in heuristic_collect_sample()
1584 struct heuristic_ws *ws; in btrfs_compress_heuristic() local
1589 ws = list_entry(ws_list, struct heuristic_ws, list); in btrfs_compress_heuristic()
1591 heuristic_collect_sample(&inode->vfs_inode, start, end, ws); in btrfs_compress_heuristic()
1593 if (sample_repeated_patterns(ws)) { in btrfs_compress_heuristic()
1598 memset(ws->bucket, 0, sizeof(*ws->bucket)*BUCKET_SIZE); in btrfs_compress_heuristic()
1600 for (i = 0; i < ws->sample_size; i++) { in btrfs_compress_heuristic()
1601 byte = ws->sample[i]; in btrfs_compress_heuristic()
1602 ws->bucket[byte].count++; in btrfs_compress_heuristic()
1605 i = byte_set_size(ws); in btrfs_compress_heuristic()
1611 i = byte_core_set_size(ws); in btrfs_compress_heuristic()
1622 i = shannon_entropy(ws); in btrfs_compress_heuristic()