/linux/include/uapi/linux/ |
H A D | cciss_defs.h | 44 #define BYTE __u8 macro 59 BYTE Dev; 60 BYTE Bus:6; 61 BYTE Mode:2; /* b00 */ 64 BYTE DevLSB; 65 BYTE DevMSB:6; 66 BYTE Mode:2; /* b01 */ 69 BYTE Dev:5; 70 BYTE Bus:3; 71 BYTE Targ:6; [all …]
|
/linux/lib/lz4/ |
H A D | lz4_compress.c | 91 const BYTE *p, in LZ4_putPositionOnHash() 95 const BYTE *srcBase) in LZ4_putPositionOnHash() 100 const BYTE **hashTable = (const BYTE **)tableBase; in LZ4_putPositionOnHash() 123 const BYTE *p, in LZ4_putPosition() 126 const BYTE *srcBase) in LZ4_putPosition() 133 static const BYTE *LZ4_getPositionOnHash( 137 const BYTE *srcBase) in LZ4_getPositionOnHash() 140 const BYTE **hashTable = (const BYTE **) tableBas in LZ4_getPositionOnHash() [all...] |
H A D | lz4hc_compress.c | 60 static void LZ4HC_init(LZ4HC_CCtx_internal *hc4, const BYTE *start) 74 const BYTE *ip) in LZ4HC_Insert() 78 const BYTE * const base = hc4->base; in LZ4HC_Insert() 100 const BYTE *ip, in LZ4HC_InsertAndFindBestMatch() 101 const BYTE * const iLimit, in LZ4HC_InsertAndFindBestMatch() 102 const BYTE **matchpos, in LZ4HC_InsertAndFindBestMatch() 107 const BYTE * const base = hc4->base; in LZ4HC_InsertAndFindBestMatch() 108 const BYTE * const dictBase = hc4->dictBase; in LZ4HC_InsertAndFindBestMatch() 125 const BYTE * const match = base + matchIndex; in LZ4HC_InsertAndFindBestMatch() 138 const BYTE * cons in LZ4HC_InsertAndFindBestMatch() [all...] |
H A D | lz4_decompress.c | 75 const BYTE * const lowPrefix, in LZ4_decompress_generic() 77 const BYTE * const dictStart, in LZ4_decompress_generic() 82 const BYTE *ip = (const BYTE *) src; in LZ4_decompress_generic() 83 const BYTE * const iend = ip + srcSize; in LZ4_decompress_generic() 85 BYTE *op = (BYTE *) dst; in LZ4_decompress_generic() 86 BYTE * const oend = op + outputSize; in LZ4_decompress_generic() 87 BYTE *cpy; in LZ4_decompress_generic() 89 const BYTE * cons in LZ4_decompress_generic() [all...] |
H A D | lz4defs.h | 51 typedef uint8_t BYTE; 175 BYTE *d = (BYTE *)dstPtr; in LZ4_wildCopy() 176 const BYTE *s = (const BYTE *)srcPtr; in LZ4_wildCopy() 177 BYTE *const e = (BYTE *)dstEnd; in LZ4_wildCopy() 196 const BYTE *pIn, in LZ4_count() 197 const BYTE *pMatch, in LZ4_count() 198 const BYTE *pInLimi in LZ4_count() 50 typedef uint8_t BYTE; global() typedef [all...] |
/linux/lib/zstd/compress/ |
H A D | zstd_compress_superblock.c | 43 const BYTE* literals, size_t litSize, in ZSTD_compressSubBlock_literal() 49 BYTE* const ostart = (BYTE*)dst; in ZSTD_compressSubBlock_literal() 50 BYTE* const oend = ostart + dstSize; in ZSTD_compressSubBlock_literal() 51 BYTE* op = ostart + lhSize; in ZSTD_compressSubBlock_literal() 118 ostart[4] = (BYTE)(cLitSize >> 10); in ZSTD_compressSubBlock_literal() 162 … const BYTE* llCode, const BYTE* mlCode, const BYTE* ofCode, in ZSTD_compressSubBlock_sequences() 168 BYTE* const ostart = (BYTE*)dst; in ZSTD_compressSubBlock_sequences() 169 BYTE* const oend = ostart + dstCapacity; in ZSTD_compressSubBlock_sequences() 170 BYTE* op = ostart; in ZSTD_compressSubBlock_sequences() 171 BYTE* seqHead; in ZSTD_compressSubBlock_sequences() [all …]
|
H A D | zstd_double_fast.c | 24 const BYTE* const base = ms->window.base; in ZSTD_fillDoubleHashTable() 25 const BYTE* ip = base + ms->nextToUpdate; in ZSTD_fillDoubleHashTable() 26 const BYTE* const iend = ((const BYTE*)end) - HASH_READ_SIZE; in ZSTD_fillDoubleHashTable() 60 const BYTE* const base = ms->window.base; in ZSTD_compressBlock_doubleFast_noDict_generic() 61 const BYTE* const istart = (const BYTE*)src; in ZSTD_compressBlock_doubleFast_noDict_generic() 62 const BYTE* anchor = istart; in ZSTD_compressBlock_doubleFast_noDict_generic() 66 const BYTE* const prefixLowest = base + prefixLowestIndex; in ZSTD_compressBlock_doubleFast_noDict_generic() 67 const BYTE* const iend = istart + srcSize; in ZSTD_compressBlock_doubleFast_noDict_generic() 68 const BYTE* const ilimit = iend - HASH_READ_SIZE; in ZSTD_compressBlock_doubleFast_noDict_generic() 79 const BYTE* nextStep; in ZSTD_compressBlock_doubleFast_noDict_generic() [all …]
|
H A D | hist.c | 32 const BYTE* ip = (const BYTE*)src; in HIST_count_simple() 33 const BYTE* const end = ip + srcSize; in HIST_count_simple() 72 const BYTE* ip = (const BYTE*)source; in HIST_count_parallel_wksp() 73 const BYTE* const iend = ip+sourceSize; in HIST_count_parallel_wksp() 94 Counting1[(BYTE) c ]++; in HIST_count_parallel_wksp() 95 Counting2[(BYTE)(c>>8) ]++; in HIST_count_parallel_wksp() 96 Counting3[(BYTE)(c>>16)]++; in HIST_count_parallel_wksp() 99 Counting1[(BYTE) c ]++; in HIST_count_parallel_wksp() 100 Counting2[(BYTE)(c>>8) ]++; in HIST_count_parallel_wksp() 101 Counting3[(BYTE)(c>>16)]++; in HIST_count_parallel_wksp() [all …]
|
H A D | zstd_ldm.c | 66 BYTE const* data, size_t minMatchLength) in ZSTD_ldm_gear_reset() 97 BYTE const* data, size_t size, in ZSTD_ldm_gear_feed() 184 BYTE* const pOffset = ldmState->bucketOffsets + hash; in ZSTD_ldm_insertEntry() 188 *pOffset = (BYTE)((offset + 1) & ((1u << ldmParams.bucketSizeLog) - 1)); in ZSTD_ldm_insertEntry() 197 const BYTE* pIn, const BYTE* pAnchor, in ZSTD_ldm_countBackwardsMatch() 198 const BYTE* pMatch, const BYTE* pMatchBase) in ZSTD_ldm_countBackwardsMatch() 215 const BYTE* pIn, const BYTE* pAnchor, in ZSTD_ldm_countBackwardsMatch_2segments() 216 const BYTE* pMatch, const BYTE* pMatchBase, in ZSTD_ldm_countBackwardsMatch_2segments() 217 const BYTE* pExtDictStart, const BYTE* pExtDictEnd) in ZSTD_ldm_countBackwardsMatch_2segments() 240 const BYTE* const iend = (const BYTE*)end; in ZSTD_ldm_fillFastTables() [all …]
|
H A D | zstd_fast.c | 23 const BYTE* const base = ms->window.base; in ZSTD_fillHashTable() 24 const BYTE* ip = base + ms->nextToUpdate; in ZSTD_fillHashTable() 25 const BYTE* const iend = ((const BYTE*)end) - HASH_READ_SIZE; in ZSTD_fillHashTable() 103 const BYTE* const base = ms->window.base; in ZSTD_compressBlock_fast_noDict_generic() 104 const BYTE* const istart = (const BYTE*)src; in ZSTD_compressBlock_fast_noDict_generic() 107 const BYTE* const prefixStart = base + prefixStartIndex; in ZSTD_compressBlock_fast_noDict_generic() 108 const BYTE* const iend = istart + srcSize; in ZSTD_compressBlock_fast_noDict_generic() 109 const BYTE* const ilimit = iend - HASH_READ_SIZE; in ZSTD_compressBlock_fast_noDict_generic() 111 const BYTE* anchor = istart; in ZSTD_compressBlock_fast_noDict_generic() 112 const BYTE* ip0 = istart; in ZSTD_compressBlock_fast_noDict_generic() [all …]
|
H A D | zstd_lazy.c | 21 const BYTE* ip, const BYTE* iend, in ZSTD_updateDUBT() 32 const BYTE* const base = ms->window.base; in ZSTD_updateDUBT() 65 U32 curr, const BYTE* inputEnd, in ZSTD_insertDUBT1() 74 const BYTE* const base = ms->window.base; in ZSTD_insertDUBT1() 75 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_insertDUBT1() 77 const BYTE* const ip = (curr>=dictLimit) ? base + curr : dictBase + curr; in ZSTD_insertDUBT1() 78 const BYTE* const iend = (curr>=dictLimit) ? inputEnd : dictBase + dictLimit; in ZSTD_insertDUBT1() 79 const BYTE* const dictEnd = dictBase + dictLimit; in ZSTD_insertDUBT1() 80 const BYTE* const prefixStart = base + dictLimit; in ZSTD_insertDUBT1() 81 const BYTE* match; in ZSTD_insertDUBT1() [all …]
|
H A D | zstd_compress_internal.h | 88 BYTE hufDesBuffer[ZSTD_MAX_HUF_HEADER_SIZE]; 101 BYTE fseTablesBuffer[ZSTD_MAX_FSE_HEADERS_SIZE]; 185 BYTE const* nextSrc; /* next block here to continue on current prefix */ 186 BYTE const* base; /* All regular indexes relative to this position */ 187 BYTE const* dictBase; /* extDict indexes relative to this position */ 245 BYTE const* split; 257 BYTE* bucketOffsets; /* Next position in bucket to insert entry */ 459 static const BYTE LL_Code[64] = { 0, 1, 2, 3, 4, 5, 6, 7, in ZSTD_LLcode() 476 … static const BYTE ML_Code[128] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, in ZSTD_MLcode() 509 ZSTD_memcpy((BYTE*)dst + ZSTD_blockHeaderSize, src, srcSize); in ZSTD_noCompressBlock() [all …]
|
H A D | huf_compress.c | 60 BYTE* const aligned = (BYTE*)workspace + add; in HUF_alignUpWorkspace() 91 BYTE* const ostart = (BYTE*) dst; in HUF_compressWeights() 92 BYTE* op = ostart; in HUF_compressWeights() 93 BYTE* const oend = ostart + dstSize; in HUF_compressWeights() 165 BYTE bitsToWeight[HUF_TABLELOG_MAX + 1]; /* precomputed conversion table */ 166 BYTE huffWeight[HUF_SYMBOLVALUE_MAX]; 174 BYTE* op = (BYTE*)dst; in HUF_writeCTable_wksp() 185 wksp->bitsToWeight[n] = (BYTE)(huffLog + 1 - n); in HUF_writeCTable_wksp() 193 op[0] = (BYTE)hSize; in HUF_writeCTable_wksp() 200 op[0] = (BYTE)(128 /*special case*/ + (maxSymbolValue-1)); in HUF_writeCTable_wksp() [all …]
|
H A D | zstd_compress_sequences.c | 73 BYTE wksp[FSE_NCOUNTBOUND]; in ZSTD_NCountCost() 246 const BYTE* codeTable, size_t nbSeq, in ZSTD_buildCTable() 251 BYTE* op = (BYTE*)dst; in ZSTD_buildCTable() 252 const BYTE* const oend = op + dstCapacity; in ZSTD_buildCTable() 257 FORWARD_IF_ERROR(FSE_buildCTable_rle(nextCTable, (BYTE)max), ""); in ZSTD_buildCTable() 293 FSE_CTable const* CTable_MatchLength, BYTE const* mlCodeTable, in ZSTD_encodeSequences_body() 294 FSE_CTable const* CTable_OffsetBits, BYTE const* ofCodeTable, in ZSTD_encodeSequences_body() 295 FSE_CTable const* CTable_LitLength, BYTE const* llCodeTable, in ZSTD_encodeSequences_body() 334 BYTE const llCode = llCodeTable[n]; in ZSTD_encodeSequences_body() 335 BYTE const ofCode = ofCodeTable[n]; in ZSTD_encodeSequences_body() [all …]
|
H A D | zstd_compress_literals.c | 18 BYTE* const ostart = (BYTE*)dst; in ZSTD_noCompressLiterals() 26 ostart[0] = (BYTE)((U32)set_basic + (srcSize<<3)); in ZSTD_noCompressLiterals() 45 BYTE* const ostart = (BYTE*)dst; in ZSTD_compressRleLiteralsBlock() 53 ostart[0] = (BYTE)((U32)set_rle + (srcSize<<3)); in ZSTD_compressRleLiteralsBlock() 65 ostart[flSize] = *(const BYTE*)src; in ZSTD_compressRleLiteralsBlock() 81 BYTE* const ostart = (BYTE*)dst; in ZSTD_compressLiterals() 151 ostart[4] = (BYTE)(cLitSize >> 10); in ZSTD_compressLiterals()
|
H A D | zstd_cwksp.h | 151 BYTE allocFailed; 247 void* const alloc = (BYTE*)ws->allocStart - bytes; in ZSTD_cwksp_reserve_internal_buffer_space() 297 void* const objectEnd = (BYTE*)alloc + bytesToAlign; in ZSTD_cwksp_internal_advance_phase() 341 MEM_STATIC BYTE* ZSTD_cwksp_reserve_buffer(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_buffer() 343 return (BYTE*)ZSTD_cwksp_reserve_internal(ws, bytes, ZSTD_cwksp_alloc_buffers); in ZSTD_cwksp_reserve_buffer() 373 end = (BYTE *)alloc + bytes; in ZSTD_cwksp_reserve_table() 402 void* end = (BYTE*)alloc + roundedBytes; in ZSTD_cwksp_reserve_object() 454 ZSTD_memset(ws->tableValidEnd, 0, (BYTE*)ws->tableEnd - (BYTE*)ws->tableValidEnd); in ZSTD_cwksp_clean_tables() 498 ws->workspaceEnd = (BYTE*)start + size; in ZSTD_cwksp_init() 533 return (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->workspace); in ZSTD_cwksp_sizeof() [all …]
|
H A D | zstd_opt.c | 125 const BYTE* const src, size_t const srcSize, in ZSTD_rescaleFreqs() 245 static U32 ZSTD_rawLiteralsCost(const BYTE* const literals, U32 const litLength, in ZSTD_rawLiteralsCost() 330 U32 litLength, const BYTE* literals, in ZSTD_updateStats() 384 const BYTE* const ip) in ZSTD_insertAndFindFirstIndexHash3() 388 const BYTE* const base = ms->window.base; in ZSTD_insertAndFindFirstIndexHash3() 413 const BYTE* const ip, const BYTE* const iend, in ZSTD_insertBt1() 426 const BYTE* const base = ms->window.base; in ZSTD_insertBt1() 427 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_insertBt1() 429 const BYTE* const dictEnd = dictBase + dictLimit; in ZSTD_insertBt1() 430 const BYTE* const prefixStart = base + dictLimit; in ZSTD_insertBt1() [all …]
|
/linux/lib/zstd/decompress/ |
H A D | zstd_decompress_block.c | 79 dctx->litBuffer = (BYTE*)dst + ZSTD_BLOCKSIZE_MAX + WILDCOPY_OVERLENGTH; in ZSTD_allocateLiteralsBuffer() 88 …dctx->litBuffer = (BYTE*)dst + expectedWriteSize - litSize + ZSTD_LITBUFFEREXTRASIZE - WILDCOPY_OV… in ZSTD_allocateLiteralsBuffer() 93 dctx->litBuffer = (BYTE*)dst + expectedWriteSize - litSize; in ZSTD_allocateLiteralsBuffer() 94 dctx->litBufferEnd = (BYTE*)dst + expectedWriteSize; in ZSTD_allocateLiteralsBuffer() 126 { const BYTE* const istart = (const BYTE*) src; in ZSTD_decodeLiteralsBlock() 454 BYTE* spread = (BYTE*)(symbolNext + MaxSeq + 1); in ZSTD_buildFSETable_body() 554 tableDecode[u].nbBits = (BYTE) (tableLog - BIT_highbit32(nextState) ); in ZSTD_buildFSETable_body() 617 RETURN_ERROR_IF((*(const BYTE*)src) > max, corruption_detected, ""); in ZSTD_buildSeqTable() 618 { U32 const symbol = *(const BYTE*)src; in ZSTD_buildSeqTable() 656 const BYTE* const istart = (const BYTE*)src; in ZSTD_decodeSeqHeaders() [all …]
|
H A D | huf_decompress.c | 128 typedef struct { BYTE maxTableLog; BYTE tableType; BYTE tableLog; BYTE reserved; } DTableDesc; 139 static size_t HUF_initDStream(BYTE const* ip) { in HUF_initDStream() 140 BYTE const lastByte = ip[7]; in HUF_initDStream() 147 BYTE const* ip[4]; 148 BYTE* op[4]; 151 BYTE const* ilimit; 152 BYTE* oend; 153 BYTE const* iend[4]; 167 const BYTE* const ilimit = (const BYTE*)src + 6 + 8; in HUF_DecompressAsmArgs_init() 169 BYTE* const oend = (BYTE*)dst + dstSize; in HUF_DecompressAsmArgs_init() [all …]
|
H A D | zstd_decompress_internal.h | 69 BYTE nbAdditionalBits; 70 BYTE nbBits; 150 const BYTE* litPtr; 185 BYTE* litBuffer; 186 const BYTE* litBufferEnd; 188 …BYTE litExtraBuffer[ZSTD_LITBUFFEREXTRASIZE + WILDCOPY_OVERLENGTH]; /* literal buffer can be split… 189 BYTE headerBuffer[ZSTD_FRAMEHEADERSIZE_MAX];
|
/linux/Documentation/scsi/ |
H A D | arcmsr_spec.rst | 445 BYTE grsRaidSetName[16]; 449 BYTE grsDevArray[32]; 450 BYTE grsMemberDevices; 451 BYTE grsNewMemberDevices; 452 BYTE grsRaidState; 453 BYTE grsVolumes; 454 BYTE grsVolumeList[16]; 455 BYTE grsRes1; 456 BYTE grsRes2; 457 BYTE grsRes3; [all …]
|
/linux/lib/zstd/common/ |
H A D | zstd_internal.h | 184 BYTE copy16_buf[16]; in ZSTD_copy16() 210 ptrdiff_t diff = (BYTE*)dst - (const BYTE*)src; in ZSTD_wildcopy() 211 const BYTE* ip = (const BYTE*)src; in ZSTD_wildcopy() 212 BYTE* op = (BYTE*)dst; in ZSTD_wildcopy() 213 BYTE* const oend = op + length; in ZSTD_wildcopy() 292 BYTE* litStart; 293 BYTE* lit; /* ptr to end of literals */ 294 BYTE* llCode; 295 BYTE* mlCode; 296 BYTE* ofCode;
|
H A D | entropy_common.c | 62 const BYTE* const istart = (const BYTE*) headerBuffer; in FSE_readNCount_body() 63 const BYTE* const iend = istart + hbSize; in FSE_readNCount_body() 64 const BYTE* ip = istart; in FSE_readNCount_body() 251 size_t HUF_readStats(BYTE* huffWeight, size_t hwSize, U32* rankStats, in HUF_readStats() 260 HUF_readStats_body(BYTE* huffWeight, size_t hwSize, U32* rankStats, in HUF_readStats_body() 267 const BYTE* ip = (const BYTE*) src; in HUF_readStats_body() 313 huffWeight[oSize] = (BYTE)lastWeight; in HUF_readStats_body() 326 static size_t HUF_readStats_body_default(BYTE* huffWeight, size_t hwSize, U32* rankStats, in HUF_readStats_body_default() 335 static BMI2_TARGET_ATTRIBUTE size_t HUF_readStats_body_bmi2(BYTE* huffWeight, size_t hwSize, U32* r… in HUF_readStats_body_bmi2() 344 size_t HUF_readStats_wksp(BYTE* huffWeight, size_t hwSize, U32* rankStats, in HUF_readStats_wksp()
|
H A D | fse_decompress.c | 76 BYTE* spread = (BYTE*)(symbolNext + maxSymbolValue + 1); in FSE_buildDTable_internal() 169 tableDecode[u].nbBits = (BYTE) (tableLog - BIT_highbit32(nextState) ); in FSE_buildDTable_internal() 187 size_t FSE_buildDTable_rle (FSE_DTable* dt, BYTE symbolValue) in FSE_buildDTable_rle() 224 dinfo[s].symbol = (BYTE)s; in FSE_buildDTable_raw() 225 dinfo[s].nbBits = (BYTE)nbBits; in FSE_buildDTable_raw() 236 BYTE* const ostart = (BYTE*) dst; in FSE_decompress_usingDTable_generic() 237 BYTE* op = ostart; in FSE_decompress_usingDTable_generic() 238 BYTE* const omax = op + maxDstSize; in FSE_decompress_usingDTable_generic() 239 BYTE* const olimit = omax-3; in FSE_decompress_usingDTable_generic() 325 const BYTE* const istart = (const BYTE*)cSrc; in FSE_decompress_wksp_body() [all …]
|
/linux/arch/loongarch/include/asm/ |
H A D | module.lds.h | 5 .got 0 : { BYTE(0) } 6 .plt 0 : { BYTE(0) } 7 .plt.idx 0 : { BYTE(0) } 8 .ftrace_trampoline 0 : { BYTE(0) }
|