Lines Matching refs:blockNb
242 U32 blockNb; in BMK_benchMem() local
244 for (blockNb=0; blockNb<nbBlocks; blockNb++) { in BMK_benchMem()
247 … blockTable[blockNb].cPtr, blockTable[blockNb].cRoom, in BMK_benchMem()
248 … blockTable[blockNb].srcPtr,blockTable[blockNb].srcSize, in BMK_benchMem()
252 … blockTable[blockNb].cPtr, blockTable[blockNb].cRoom, in BMK_benchMem()
253 … blockTable[blockNb].srcPtr,blockTable[blockNb].srcSize, cLevel); in BMK_benchMem()
256 blockTable[blockNb].cSize = rSize; in BMK_benchMem()
284 U32 blockNb; in BMK_benchMem() local
285 for (blockNb=0; blockNb<nbBlocks; blockNb++) { in BMK_benchMem()
288 rSize = ZSTD_CCtx_setPledgedSrcSize(zbc, blockTable[blockNb].srcSize); in BMK_benchMem()
290 inBuffer.src = blockTable[blockNb].srcPtr; in BMK_benchMem()
291 inBuffer.size = blockTable[blockNb].srcSize; in BMK_benchMem()
293 outBuffer.dst = blockTable[blockNb].cPtr; in BMK_benchMem()
294 outBuffer.size = blockTable[blockNb].cRoom; in BMK_benchMem()
300 blockTable[blockNb].cSize = outBuffer.pos; in BMK_benchMem()
321 U32 blockNb; in BMK_benchMem() local
322 for (blockNb=0; blockNb<nbBlocks; blockNb++) { in BMK_benchMem()
333 def.next_in = (z_const z_Bytef*) blockTable[blockNb].srcPtr; in BMK_benchMem()
334 def.avail_in = (uInt)blockTable[blockNb].srcSize; in BMK_benchMem()
336 def.next_out = (z_Bytef*) blockTable[blockNb].cPtr; in BMK_benchMem()
337 def.avail_out = (uInt)blockTable[blockNb].cRoom; in BMK_benchMem()
340 …EAM_END) EXM_THROW(1, "deflate failure ret=%d srcSize=%d" , ret, (int)blockTable[blockNb].srcSize); in BMK_benchMem()
341 blockTable[blockNb].cSize = def.total_out; in BMK_benchMem()
352 U32 blockNb; in BMK_benchMem() local
353 for (blockNb=0; blockNb<nbBlocks; blockNb++) { in BMK_benchMem()
364 def.next_in = (z_const z_Bytef*) blockTable[blockNb].srcPtr; in BMK_benchMem()
365 def.avail_in = (uInt)blockTable[blockNb].srcSize; in BMK_benchMem()
367 def.next_out = (z_Bytef*) blockTable[blockNb].cPtr; in BMK_benchMem()
368 def.avail_out = (uInt)blockTable[blockNb].cRoom; in BMK_benchMem()
374 blockTable[blockNb].cSize = def.total_out; in BMK_benchMem()
386 … { U32 blockNb; for (blockNb=0; blockNb<nbBlocks; blockNb++) cSize += blockTable[blockNb].cSize; } in BMK_benchMem() local
408 unsigned blockNb; in BMK_benchMem() local
409 for (blockNb=0; blockNb<nbBlocks; blockNb++) { in BMK_benchMem()
411 blockTable[blockNb].resPtr, blockTable[blockNb].srcSize, in BMK_benchMem()
412 blockTable[blockNb].cPtr, blockTable[blockNb].cSize, in BMK_benchMem()
416 blockNb, ZSTD_getErrorName(regenSize)); in BMK_benchMem()
420 blockTable[blockNb].resSize = regenSize; in BMK_benchMem()
434 U32 blockNb; in BMK_benchMem() local
435 for (blockNb=0; blockNb<nbBlocks; blockNb++) { in BMK_benchMem()
438 inBuffer.src = blockTable[blockNb].cPtr; in BMK_benchMem()
439 inBuffer.size = blockTable[blockNb].cSize; in BMK_benchMem()
441 outBuffer.dst = blockTable[blockNb].resPtr; in BMK_benchMem()
442 outBuffer.size = blockTable[blockNb].srcSize; in BMK_benchMem()
446 blockTable[blockNb].resSize = outBuffer.pos; in BMK_benchMem()
462 U32 blockNb; in BMK_benchMem() local
463 for (blockNb=0; blockNb<nbBlocks; blockNb++) { in BMK_benchMem()
469 inf.next_in = (z_const z_Bytef*) blockTable[blockNb].cPtr; in BMK_benchMem()
470 inf.avail_in = (uInt)blockTable[blockNb].cSize; in BMK_benchMem()
472 inf.next_out = (z_Bytef*) blockTable[blockNb].resPtr; in BMK_benchMem()
473 inf.avail_out = (uInt)blockTable[blockNb].srcSize; in BMK_benchMem()
482 blockTable[blockNb].resSize = inf.total_out; in BMK_benchMem()
493 U32 blockNb; in BMK_benchMem() local
494 for (blockNb=0; blockNb<nbBlocks; blockNb++) { in BMK_benchMem()
501 inf.next_in = (z_const z_Bytef*) blockTable[blockNb].cPtr; in BMK_benchMem()
502 inf.avail_in = (uInt)blockTable[blockNb].cSize; in BMK_benchMem()
504 inf.next_out = (z_Bytef*) blockTable[blockNb].resPtr; in BMK_benchMem()
505 inf.avail_out = (uInt)blockTable[blockNb].srcSize; in BMK_benchMem()
516 blockTable[blockNb].resSize = inf.total_out; in BMK_benchMem()