Lines Matching refs:ws

164 MEM_STATIC size_t ZSTD_cwksp_available_space(ZSTD_cwksp* ws);
166 MEM_STATIC void ZSTD_cwksp_assert_internal_consistency(ZSTD_cwksp* ws) { in ZSTD_cwksp_assert_internal_consistency() argument
167 (void)ws; in ZSTD_cwksp_assert_internal_consistency()
168 assert(ws->workspace <= ws->objectEnd); in ZSTD_cwksp_assert_internal_consistency()
169 assert(ws->objectEnd <= ws->tableEnd); in ZSTD_cwksp_assert_internal_consistency()
170 assert(ws->objectEnd <= ws->tableValidEnd); in ZSTD_cwksp_assert_internal_consistency()
171 assert(ws->tableEnd <= ws->allocStart); in ZSTD_cwksp_assert_internal_consistency()
172 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_assert_internal_consistency()
173 assert(ws->allocStart <= ws->workspaceEnd); in ZSTD_cwksp_assert_internal_consistency()
252 ZSTD_cwksp_reserve_internal_buffer_space(ZSTD_cwksp* ws, size_t const bytes) in ZSTD_cwksp_reserve_internal_buffer_space() argument
254 void* const alloc = (BYTE*)ws->allocStart - bytes; in ZSTD_cwksp_reserve_internal_buffer_space()
255 void* const bottom = ws->tableEnd; in ZSTD_cwksp_reserve_internal_buffer_space()
257 alloc, bytes, ZSTD_cwksp_available_space(ws) - bytes); in ZSTD_cwksp_reserve_internal_buffer_space()
258 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_reserve_internal_buffer_space()
262 ws->allocFailed = 1; in ZSTD_cwksp_reserve_internal_buffer_space()
267 if (alloc < ws->tableValidEnd) { in ZSTD_cwksp_reserve_internal_buffer_space()
268 ws->tableValidEnd = alloc; in ZSTD_cwksp_reserve_internal_buffer_space()
270 ws->allocStart = alloc; in ZSTD_cwksp_reserve_internal_buffer_space()
280 ZSTD_cwksp_internal_advance_phase(ZSTD_cwksp* ws, ZSTD_cwksp_alloc_phase_e phase) in ZSTD_cwksp_internal_advance_phase() argument
282 assert(phase >= ws->phase); in ZSTD_cwksp_internal_advance_phase()
283 if (phase > ws->phase) { in ZSTD_cwksp_internal_advance_phase()
285 if (ws->phase < ZSTD_cwksp_alloc_buffers && in ZSTD_cwksp_internal_advance_phase()
287 ws->tableValidEnd = ws->objectEnd; in ZSTD_cwksp_internal_advance_phase()
291 if (ws->phase < ZSTD_cwksp_alloc_aligned && in ZSTD_cwksp_internal_advance_phase()
295 …ZSTD_CWKSP_ALIGNMENT_BYTES - ZSTD_cwksp_bytes_to_align_ptr(ws->allocStart, ZSTD_CWKSP_ALIGNMENT_BY… in ZSTD_cwksp_internal_advance_phase()
298 RETURN_ERROR_IF(!ZSTD_cwksp_reserve_internal_buffer_space(ws, bytesToAlign), in ZSTD_cwksp_internal_advance_phase()
302 void* const alloc = ws->objectEnd; in ZSTD_cwksp_internal_advance_phase()
306 RETURN_ERROR_IF(objectEnd > ws->workspaceEnd, memory_allocation, in ZSTD_cwksp_internal_advance_phase()
308 ws->objectEnd = objectEnd; in ZSTD_cwksp_internal_advance_phase()
309 ws->tableEnd = objectEnd; /* table area starts being empty */ in ZSTD_cwksp_internal_advance_phase()
310 if (ws->tableValidEnd < ws->tableEnd) { in ZSTD_cwksp_internal_advance_phase()
311 ws->tableValidEnd = ws->tableEnd; in ZSTD_cwksp_internal_advance_phase()
313 ws->phase = phase; in ZSTD_cwksp_internal_advance_phase()
314 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_internal_advance_phase()
322 MEM_STATIC int ZSTD_cwksp_owns_buffer(const ZSTD_cwksp* ws, const void* ptr) in ZSTD_cwksp_owns_buffer() argument
324 return (ptr != NULL) && (ws->workspace <= ptr) && (ptr <= ws->workspaceEnd); in ZSTD_cwksp_owns_buffer()
331 ZSTD_cwksp_reserve_internal(ZSTD_cwksp* ws, size_t bytes, ZSTD_cwksp_alloc_phase_e phase) in ZSTD_cwksp_reserve_internal() argument
334 if (ZSTD_isError(ZSTD_cwksp_internal_advance_phase(ws, phase)) || bytes == 0) { in ZSTD_cwksp_reserve_internal()
343 alloc = ZSTD_cwksp_reserve_internal_buffer_space(ws, bytes); in ZSTD_cwksp_reserve_internal()
350 if (ws->isStatic == ZSTD_cwksp_dynamic_alloc) { in ZSTD_cwksp_reserve_internal()
362 MEM_STATIC BYTE* ZSTD_cwksp_reserve_buffer(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_buffer() argument
364 return (BYTE*)ZSTD_cwksp_reserve_internal(ws, bytes, ZSTD_cwksp_alloc_buffers); in ZSTD_cwksp_reserve_buffer()
370 MEM_STATIC void* ZSTD_cwksp_reserve_aligned(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_aligned() argument
372 void* ptr = ZSTD_cwksp_reserve_internal(ws, ZSTD_cwksp_align(bytes, ZSTD_CWKSP_ALIGNMENT_BYTES), in ZSTD_cwksp_reserve_aligned()
383 MEM_STATIC void* ZSTD_cwksp_reserve_table(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_table() argument
390 if (ZSTD_isError(ZSTD_cwksp_internal_advance_phase(ws, phase))) { in ZSTD_cwksp_reserve_table()
393 alloc = ws->tableEnd; in ZSTD_cwksp_reserve_table()
395 top = ws->allocStart; in ZSTD_cwksp_reserve_table()
398 alloc, bytes, ZSTD_cwksp_available_space(ws) - bytes); in ZSTD_cwksp_reserve_table()
400 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_reserve_table()
404 ws->allocFailed = 1; in ZSTD_cwksp_reserve_table()
407 ws->tableEnd = end; in ZSTD_cwksp_reserve_table()
410 if (ws->isStatic == ZSTD_cwksp_dynamic_alloc) { in ZSTD_cwksp_reserve_table()
424 MEM_STATIC void* ZSTD_cwksp_reserve_object(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_object() argument
427 void* alloc = ws->objectEnd; in ZSTD_cwksp_reserve_object()
437 alloc, bytes, roundedBytes, ZSTD_cwksp_available_space(ws) - roundedBytes); in ZSTD_cwksp_reserve_object()
440 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_reserve_object()
442 if (ws->phase != ZSTD_cwksp_alloc_objects || end > ws->workspaceEnd) { in ZSTD_cwksp_reserve_object()
444 ws->allocFailed = 1; in ZSTD_cwksp_reserve_object()
447 ws->objectEnd = end; in ZSTD_cwksp_reserve_object()
448 ws->tableEnd = end; in ZSTD_cwksp_reserve_object()
449 ws->tableValidEnd = end; in ZSTD_cwksp_reserve_object()
455 if (ws->isStatic == ZSTD_cwksp_dynamic_alloc) { in ZSTD_cwksp_reserve_object()
463 MEM_STATIC void ZSTD_cwksp_mark_tables_dirty(ZSTD_cwksp* ws) in ZSTD_cwksp_mark_tables_dirty() argument
472 size_t size = (BYTE*)ws->tableValidEnd - (BYTE*)ws->objectEnd; in ZSTD_cwksp_mark_tables_dirty()
473 assert(__msan_test_shadow(ws->objectEnd, size) == -1); in ZSTD_cwksp_mark_tables_dirty()
474 __msan_poison(ws->objectEnd, size); in ZSTD_cwksp_mark_tables_dirty()
478 assert(ws->tableValidEnd >= ws->objectEnd); in ZSTD_cwksp_mark_tables_dirty()
479 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_mark_tables_dirty()
480 ws->tableValidEnd = ws->objectEnd; in ZSTD_cwksp_mark_tables_dirty()
481 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_mark_tables_dirty()
484 MEM_STATIC void ZSTD_cwksp_mark_tables_clean(ZSTD_cwksp* ws) { in ZSTD_cwksp_mark_tables_clean() argument
486 assert(ws->tableValidEnd >= ws->objectEnd); in ZSTD_cwksp_mark_tables_clean()
487 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_mark_tables_clean()
488 if (ws->tableValidEnd < ws->tableEnd) { in ZSTD_cwksp_mark_tables_clean()
489 ws->tableValidEnd = ws->tableEnd; in ZSTD_cwksp_mark_tables_clean()
491 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_mark_tables_clean()
497 MEM_STATIC void ZSTD_cwksp_clean_tables(ZSTD_cwksp* ws) { in ZSTD_cwksp_clean_tables() argument
499 assert(ws->tableValidEnd >= ws->objectEnd); in ZSTD_cwksp_clean_tables()
500 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_clean_tables()
501 if (ws->tableValidEnd < ws->tableEnd) { in ZSTD_cwksp_clean_tables()
502 ZSTD_memset(ws->tableValidEnd, 0, (BYTE*)ws->tableEnd - (BYTE*)ws->tableValidEnd); in ZSTD_cwksp_clean_tables()
504 ZSTD_cwksp_mark_tables_clean(ws); in ZSTD_cwksp_clean_tables()
511 MEM_STATIC void ZSTD_cwksp_clear_tables(ZSTD_cwksp* ws) { in ZSTD_cwksp_clear_tables() argument
519 if (ws->isStatic == ZSTD_cwksp_dynamic_alloc) { in ZSTD_cwksp_clear_tables()
520 size_t size = (BYTE*)ws->tableValidEnd - (BYTE*)ws->objectEnd; in ZSTD_cwksp_clear_tables()
521 __asan_poison_memory_region(ws->objectEnd, size); in ZSTD_cwksp_clear_tables()
525 ws->tableEnd = ws->objectEnd; in ZSTD_cwksp_clear_tables()
526 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_clear_tables()
533 MEM_STATIC void ZSTD_cwksp_clear(ZSTD_cwksp* ws) { in ZSTD_cwksp_clear() argument
542 size_t size = (BYTE*)ws->workspaceEnd - (BYTE*)ws->tableValidEnd; in ZSTD_cwksp_clear()
543 __msan_poison(ws->tableValidEnd, size); in ZSTD_cwksp_clear()
552 if (ws->isStatic == ZSTD_cwksp_dynamic_alloc) { in ZSTD_cwksp_clear()
553 size_t size = (BYTE*)ws->workspaceEnd - (BYTE*)ws->objectEnd; in ZSTD_cwksp_clear()
554 __asan_poison_memory_region(ws->objectEnd, size); in ZSTD_cwksp_clear()
558 ws->tableEnd = ws->objectEnd; in ZSTD_cwksp_clear()
559 ws->allocStart = ws->workspaceEnd; in ZSTD_cwksp_clear()
560 ws->allocFailed = 0; in ZSTD_cwksp_clear()
561 if (ws->phase > ZSTD_cwksp_alloc_buffers) { in ZSTD_cwksp_clear()
562 ws->phase = ZSTD_cwksp_alloc_buffers; in ZSTD_cwksp_clear()
564 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_clear()
572 MEM_STATIC void ZSTD_cwksp_init(ZSTD_cwksp* ws, void* start, size_t size, ZSTD_cwksp_static_alloc_e… in ZSTD_cwksp_init() argument
575 ws->workspace = start; in ZSTD_cwksp_init()
576 ws->workspaceEnd = (BYTE*)start + size; in ZSTD_cwksp_init()
577 ws->objectEnd = ws->workspace; in ZSTD_cwksp_init()
578 ws->tableValidEnd = ws->objectEnd; in ZSTD_cwksp_init()
579 ws->phase = ZSTD_cwksp_alloc_objects; in ZSTD_cwksp_init()
580 ws->isStatic = isStatic; in ZSTD_cwksp_init()
581 ZSTD_cwksp_clear(ws); in ZSTD_cwksp_init()
582 ws->workspaceOversizedDuration = 0; in ZSTD_cwksp_init()
583 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_init()
586 MEM_STATIC size_t ZSTD_cwksp_create(ZSTD_cwksp* ws, size_t size, ZSTD_customMem customMem) { in ZSTD_cwksp_create() argument
590 ZSTD_cwksp_init(ws, workspace, size, ZSTD_cwksp_dynamic_alloc); in ZSTD_cwksp_create()
594 MEM_STATIC void ZSTD_cwksp_free(ZSTD_cwksp* ws, ZSTD_customMem customMem) { in ZSTD_cwksp_free() argument
595 void *ptr = ws->workspace; in ZSTD_cwksp_free()
597 ZSTD_memset(ws, 0, sizeof(ZSTD_cwksp)); in ZSTD_cwksp_free()
610 MEM_STATIC size_t ZSTD_cwksp_sizeof(const ZSTD_cwksp* ws) { in ZSTD_cwksp_sizeof() argument
611 return (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->workspace); in ZSTD_cwksp_sizeof()
614 MEM_STATIC size_t ZSTD_cwksp_used(const ZSTD_cwksp* ws) { in ZSTD_cwksp_used() argument
615 return (size_t)((BYTE*)ws->tableEnd - (BYTE*)ws->workspace) in ZSTD_cwksp_used()
616 + (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->allocStart); in ZSTD_cwksp_used()
619 MEM_STATIC int ZSTD_cwksp_reserve_failed(const ZSTD_cwksp* ws) { in ZSTD_cwksp_reserve_failed() argument
620 return ws->allocFailed; in ZSTD_cwksp_reserve_failed()
631 MEM_STATIC int ZSTD_cwksp_estimated_space_within_bounds(const ZSTD_cwksp* const ws, in ZSTD_cwksp_estimated_space_within_bounds() argument
635 return ZSTD_cwksp_used(ws) == estimatedSpace; in ZSTD_cwksp_estimated_space_within_bounds()
640 …return (ZSTD_cwksp_used(ws) >= estimatedSpace - 63) && (ZSTD_cwksp_used(ws) <= estimatedSpace + 63… in ZSTD_cwksp_estimated_space_within_bounds()
645 MEM_STATIC size_t ZSTD_cwksp_available_space(ZSTD_cwksp* ws) { in ZSTD_cwksp_available_space() argument
646 return (size_t)((BYTE*)ws->allocStart - (BYTE*)ws->tableEnd); in ZSTD_cwksp_available_space()
649 MEM_STATIC int ZSTD_cwksp_check_available(ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_check_available() argument
650 return ZSTD_cwksp_available_space(ws) >= additionalNeededSpace; in ZSTD_cwksp_check_available()
653 MEM_STATIC int ZSTD_cwksp_check_too_large(ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_check_too_large() argument
655 ws, additionalNeededSpace * ZSTD_WORKSPACETOOLARGE_FACTOR); in ZSTD_cwksp_check_too_large()
658 MEM_STATIC int ZSTD_cwksp_check_wasteful(ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_check_wasteful() argument
659 return ZSTD_cwksp_check_too_large(ws, additionalNeededSpace) in ZSTD_cwksp_check_wasteful()
660 && ws->workspaceOversizedDuration > ZSTD_WORKSPACETOOLARGE_MAXDURATION; in ZSTD_cwksp_check_wasteful()
664 ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_bump_oversized_duration() argument
665 if (ZSTD_cwksp_check_too_large(ws, additionalNeededSpace)) { in ZSTD_cwksp_bump_oversized_duration()
666 ws->workspaceOversizedDuration++; in ZSTD_cwksp_bump_oversized_duration()
668 ws->workspaceOversizedDuration = 0; in ZSTD_cwksp_bump_oversized_duration()