Lines Matching refs:ws
65 static int compression_compress_pages(int type, struct list_head *ws, in compression_compress_pages() argument
72 return zlib_compress_pages(ws, mapping, start, pages, in compression_compress_pages()
75 return lzo_compress_pages(ws, mapping, start, pages, in compression_compress_pages()
78 return zstd_compress_pages(ws, mapping, start, pages, in compression_compress_pages()
96 static int compression_decompress_bio(int type, struct list_head *ws, in compression_decompress_bio() argument
100 case BTRFS_COMPRESS_ZLIB: return zlib_decompress_bio(ws, cb); in compression_decompress_bio()
101 case BTRFS_COMPRESS_LZO: return lzo_decompress_bio(ws, cb); in compression_decompress_bio()
102 case BTRFS_COMPRESS_ZSTD: return zstd_decompress_bio(ws, cb); in compression_decompress_bio()
113 static int compression_decompress(int type, struct list_head *ws, in compression_decompress() argument
118 case BTRFS_COMPRESS_ZLIB: return zlib_decompress(ws, data_in, dest_page, in compression_decompress()
120 case BTRFS_COMPRESS_LZO: return lzo_decompress(ws, data_in, dest_page, in compression_decompress()
122 case BTRFS_COMPRESS_ZSTD: return zstd_decompress(ws, data_in, dest_page, in compression_decompress()
835 static void free_heuristic_ws(struct list_head *ws) in free_heuristic_ws() argument
839 workspace = list_entry(ws, struct heuristic_ws, list); in free_heuristic_ws()
849 struct heuristic_ws *ws; in alloc_heuristic_ws() local
851 ws = kzalloc(sizeof(*ws), GFP_KERNEL); in alloc_heuristic_ws()
852 if (!ws) in alloc_heuristic_ws()
855 ws->sample = kvmalloc(MAX_SAMPLE_SIZE, GFP_KERNEL); in alloc_heuristic_ws()
856 if (!ws->sample) in alloc_heuristic_ws()
859 ws->bucket = kcalloc(BUCKET_SIZE, sizeof(*ws->bucket), GFP_KERNEL); in alloc_heuristic_ws()
860 if (!ws->bucket) in alloc_heuristic_ws()
863 ws->bucket_b = kcalloc(BUCKET_SIZE, sizeof(*ws->bucket_b), GFP_KERNEL); in alloc_heuristic_ws()
864 if (!ws->bucket_b) in alloc_heuristic_ws()
867 INIT_LIST_HEAD(&ws->list); in alloc_heuristic_ws()
868 return &ws->list; in alloc_heuristic_ws()
870 free_heuristic_ws(&ws->list); in alloc_heuristic_ws()
902 static void free_workspace(int type, struct list_head *ws) in free_workspace() argument
905 case BTRFS_COMPRESS_NONE: return free_heuristic_ws(ws); in free_workspace()
906 case BTRFS_COMPRESS_ZLIB: return zlib_free_workspace(ws); in free_workspace()
907 case BTRFS_COMPRESS_LZO: return lzo_free_workspace(ws); in free_workspace()
908 case BTRFS_COMPRESS_ZSTD: return zstd_free_workspace(ws); in free_workspace()
947 struct list_head *ws; in btrfs_cleanup_workspace_manager() local
951 ws = wsman->idle_ws.next; in btrfs_cleanup_workspace_manager()
952 list_del(ws); in btrfs_cleanup_workspace_manager()
953 free_workspace(type, ws); in btrfs_cleanup_workspace_manager()
1063 void btrfs_put_workspace(int type, struct list_head *ws) in btrfs_put_workspace() argument
1081 list_add(ws, idle_ws); in btrfs_put_workspace()
1088 free_workspace(type, ws); in btrfs_put_workspace()
1094 static void put_workspace(int type, struct list_head *ws) in put_workspace() argument
1097 case BTRFS_COMPRESS_NONE: return btrfs_put_workspace(type, ws); in put_workspace()
1098 case BTRFS_COMPRESS_ZLIB: return btrfs_put_workspace(type, ws); in put_workspace()
1099 case BTRFS_COMPRESS_LZO: return btrfs_put_workspace(type, ws); in put_workspace()
1100 case BTRFS_COMPRESS_ZSTD: return zstd_put_workspace(ws); in put_workspace()
1361 static u32 shannon_entropy(struct heuristic_ws *ws) in shannon_entropy() argument
1368 sz_base = ilog2_w(ws->sample_size); in shannon_entropy()
1369 for (i = 0; i < BUCKET_SIZE && ws->bucket[i].count > 0; i++) { in shannon_entropy()
1370 p = ws->bucket[i].count; in shannon_entropy()
1375 entropy_sum /= ws->sample_size; in shannon_entropy()
1497 static int byte_core_set_size(struct heuristic_ws *ws) in byte_core_set_size() argument
1501 const u32 core_set_threshold = ws->sample_size * 90 / 100; in byte_core_set_size()
1502 struct bucket_item *bucket = ws->bucket; in byte_core_set_size()
1505 radix_sort(ws->bucket, ws->bucket_b, BUCKET_SIZE); in byte_core_set_size()
1535 static u32 byte_set_size(const struct heuristic_ws *ws) in byte_set_size() argument
1541 if (ws->bucket[i].count > 0) in byte_set_size()
1551 if (ws->bucket[i].count > 0) { in byte_set_size()
1561 static bool sample_repeated_patterns(struct heuristic_ws *ws) in sample_repeated_patterns() argument
1563 const u32 half_of_sample = ws->sample_size / 2; in sample_repeated_patterns()
1564 const u8 *data = ws->sample; in sample_repeated_patterns()
1570 struct heuristic_ws *ws) in heuristic_collect_sample() argument
1606 memcpy(&ws->sample[curr_sample_pos], &in_data[i], in heuristic_collect_sample()
1618 ws->sample_size = curr_sample_pos; in heuristic_collect_sample()
1639 struct heuristic_ws *ws; in btrfs_compress_heuristic() local
1644 ws = list_entry(ws_list, struct heuristic_ws, list); in btrfs_compress_heuristic()
1646 heuristic_collect_sample(inode, start, end, ws); in btrfs_compress_heuristic()
1648 if (sample_repeated_patterns(ws)) { in btrfs_compress_heuristic()
1653 memset(ws->bucket, 0, sizeof(*ws->bucket)*BUCKET_SIZE); in btrfs_compress_heuristic()
1655 for (i = 0; i < ws->sample_size; i++) { in btrfs_compress_heuristic()
1656 byte = ws->sample[i]; in btrfs_compress_heuristic()
1657 ws->bucket[byte].count++; in btrfs_compress_heuristic()
1660 i = byte_set_size(ws); in btrfs_compress_heuristic()
1666 i = byte_core_set_size(ws); in btrfs_compress_heuristic()
1677 i = shannon_entropy(ws); in btrfs_compress_heuristic()