/linux/include/uapi/linux/ |
H A D | cciss_defs.h | 44 #define BYTE __u8 macro 59 BYTE Dev; 60 BYTE Bus:6; 61 BYTE Mode:2; /* b00 */ 64 BYTE DevLSB; 65 BYTE DevMSB:6; 66 BYTE Mode:2; /* b01 */ 69 BYTE Dev:5; 70 BYTE Bus:3; 71 BYTE Tar [all...] |
/linux/lib/lz4/ |
H A D | lz4_compress.c | 91 const BYTE *p, in LZ4_putPositionOnHash() 95 const BYTE *srcBase) in LZ4_putPositionOnHash() 100 const BYTE **hashTable = (const BYTE **)tableBase; in LZ4_putPositionOnHash() 123 const BYTE *p, in LZ4_putPosition() 126 const BYTE *srcBase) in LZ4_putPosition() 133 static const BYTE *LZ4_getPositionOnHash( in LZ4_getPositionOnHash() 137 const BYTE *srcBase) in LZ4_getPositionOnHash() 140 const BYTE **hashTable = (const BYTE **) tableBas in LZ4_getPositionOnHash() [all...] |
H A D | lz4hc_compress.c | 60 static void LZ4HC_init(LZ4HC_CCtx_internal *hc4, const BYTE *start) in LZ4HC_init() 74 const BYTE *ip) in LZ4HC_Insert() 78 const BYTE * const base = hc4->base; in LZ4HC_Insert() 100 const BYTE *ip, in LZ4HC_InsertAndFindBestMatch() 101 const BYTE * const iLimit, in LZ4HC_InsertAndFindBestMatch() 102 const BYTE **matchpos, in LZ4HC_InsertAndFindBestMatch() 107 const BYTE * const base = hc4->base; in LZ4HC_InsertAndFindBestMatch() 108 const BYTE * const dictBase = hc4->dictBase; in LZ4HC_InsertAndFindBestMatch() 125 const BYTE * const match = base + matchIndex; in LZ4HC_InsertAndFindBestMatch() 138 const BYTE * cons in LZ4HC_InsertAndFindBestMatch() [all...] |
H A D | lz4_decompress.c | 75 const BYTE * const lowPrefix, in LZ4_decompress_generic() 77 const BYTE * const dictStart, in LZ4_decompress_generic() 82 const BYTE *ip = (const BYTE *) src; in LZ4_decompress_generic() 83 const BYTE * const iend = ip + srcSize; in LZ4_decompress_generic() 85 BYTE *op = (BYTE *) dst; in LZ4_decompress_generic() 86 BYTE * const oend = op + outputSize; in LZ4_decompress_generic() 87 BYTE *cpy; in LZ4_decompress_generic() 89 const BYTE * cons in LZ4_decompress_generic() [all...] |
H A D | lz4defs.h | 51 typedef uint8_t BYTE; typedef 175 BYTE *d = (BYTE *)dstPtr; in LZ4_wildCopy() 176 const BYTE *s = (const BYTE *)srcPtr; in LZ4_wildCopy() 177 BYTE *const e = (BYTE *)dstEnd; in LZ4_wildCopy() 196 const BYTE *pIn, in LZ4_count() 197 const BYTE *pMatch, in LZ4_count() 198 const BYTE *pInLimi in LZ4_count() [all...] |
/linux/lib/zstd/compress/ |
H A D | hist.c | 18 #include "../common/mem.h" /* U32, BYTE, etc. */ 32 const BYTE* ip = (const BYTE*)src; in HIST_add() 33 const BYTE* const end = ip + srcSize; in HIST_add() 43 const BYTE* ip = (const BYTE*)src; in HIST_count_simple() 44 const BYTE* const end = ip + srcSize; in HIST_count_simple() 83 const BYTE* ip = (const BYTE*)source; in HIST_count_parallel_wksp() 84 const BYTE* cons in HIST_count_parallel_wksp() [all...] |
H A D | zstd_compress_superblock.c | 45 const BYTE* literals, size_t litSize, in ZSTD_compressSubBlock_literal() 51 BYTE* const ostart = (BYTE*)dst; in ZSTD_compressSubBlock_literal() 52 BYTE* const oend = ostart + dstSize; in ZSTD_compressSubBlock_literal() 53 BYTE* op = ostart + lhSize; in ZSTD_compressSubBlock_literal() 118 ostart[4] = (BYTE)(cLitSize >> 10); in ZSTD_compressSubBlock_literal() 167 const BYTE* llCode, const BYTE* mlCode, const BYTE* ofCode, in ZSTD_compressSubBlock_sequences() 173 BYTE* cons in ZSTD_compressSubBlock_sequences() [all...] |
H A D | zstd_fast.c | 25 const BYTE* const base = ms->window.base; in ZSTD_fillHashTableForCDict() 26 const BYTE* ip = base + ms->nextToUpdate; in ZSTD_fillHashTableForCDict() 27 const BYTE* const iend = ((const BYTE*)end) - HASH_READ_SIZE; in ZSTD_fillHashTableForCDict() 62 const BYTE* const base = ms->window.base; in ZSTD_fillHashTableForCCtx() 63 const BYTE* ip = base + ms->nextToUpdate; in ZSTD_fillHashTableForCCtx() 64 const BYTE* const iend = ((const BYTE*)end) - HASH_READ_SIZE; in ZSTD_fillHashTableForCCtx() 101 typedef int (*ZSTD_match4Found) (const BYTE* currentPtr, const BYTE* matchAddres [all...] |
H A D | zstd_double_fast.c | 28 const BYTE* const base = ms->window.base; in ZSTD_fillDoubleHashTableForCDict() 29 const BYTE* ip = base + ms->nextToUpdate; in ZSTD_fillDoubleHashTableForCDict() 30 const BYTE* const iend = ((const BYTE*)end) - HASH_READ_SIZE; in ZSTD_fillDoubleHashTableForCDict() 66 const BYTE* const base = ms->window.base; in ZSTD_fillDoubleHashTableForCCtx() 67 const BYTE* ip = base + ms->nextToUpdate; in ZSTD_fillDoubleHashTableForCCtx() 68 const BYTE* const iend = ((const BYTE*)end) - HASH_READ_SIZE; in ZSTD_fillDoubleHashTableForCCtx() 115 const BYTE* const base = ms->window.base; in ZSTD_compressBlock_doubleFast_noDict_generic() 116 const BYTE* cons in ZSTD_compressBlock_doubleFast_noDict_generic() [all...] |
H A D | zstd_ldm.c | 67 BYTE const* data, size_t minMatchLength) in ZSTD_ldm_gear_reset() 98 BYTE const* data, size_t size, in ZSTD_ldm_gear_feed() 199 BYTE* const pOffset = ldmState->bucketOffsets + hash; in ZSTD_ldm_insertEntry() 203 *pOffset = (BYTE)((offset + 1) & ((1u << bucketSizeLog) - 1)); in ZSTD_ldm_insertEntry() 212 const BYTE* pIn, const BYTE* pAnchor, in ZSTD_ldm_countBackwardsMatch() 213 const BYTE* pMatch, const BYTE* pMatchBase) in ZSTD_ldm_countBackwardsMatch() 230 const BYTE* pIn, const BYTE* pAncho in ZSTD_ldm_countBackwardsMatch_2segments() [all...] |
H A D | zstd_lazy.c | 31 const BYTE* ip, const BYTE* iend, in ZSTD_updateDUBT() 42 const BYTE* const base = ms->window.base; in ZSTD_updateDUBT() 76 U32 curr, const BYTE* inputEnd, in ZSTD_insertDUBT1() 85 const BYTE* const base = ms->window.base; in ZSTD_insertDUBT1() 86 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_insertDUBT1() 88 const BYTE* const ip = (curr>=dictLimit) ? base + curr : dictBase + curr; in ZSTD_insertDUBT1() 89 const BYTE* const iend = (curr>=dictLimit) ? inputEnd : dictBase + dictLimit; in ZSTD_insertDUBT1() 90 const BYTE* const dictEnd = dictBase + dictLimit; in ZSTD_insertDUBT1() 91 const BYTE* cons in ZSTD_insertDUBT1() [all...] |
H A D | zstd_compress_literals.c | 25 const BYTE* const ip = (const BYTE*)src; in showHexa() 42 BYTE* const ostart = (BYTE*)dst; in ZSTD_noCompressLiterals() 52 ostart[0] = (BYTE)((U32)set_basic + (srcSize<<3)); in ZSTD_noCompressLiterals() 73 { const BYTE b = ((const BYTE*)src)[0]; in allBytesIdentical() 76 if (((const BYTE*)src)[p] != b) return 0; in allBytesIdentical() 84 BYTE* const ostart = (BYTE*)ds in ZSTD_compressRleLiteralsBlock() [all...] |
H A D | zstd_compress_internal.h | 99 BYTE* litStart; 100 BYTE* lit; /* ptr to end of literals */ 101 BYTE* llCode; 102 BYTE* mlCode; 103 BYTE* ofCode; 154 BYTE hufDesBuffer[ZSTD_MAX_HUF_HEADER_SIZE]; 167 BYTE fseTablesBuffer[ZSTD_MAX_FSE_HEADERS_SIZE]; 253 BYTE const* nextSrc; /* next block here to continue on current prefix */ 254 BYTE const* base; /* All regular indexes relative to this position */ 255 BYTE cons [all...] |
H A D | zstd_compress_sequences.c | 74 BYTE wksp[FSE_NCOUNTBOUND]; in ZSTD_NCountCost() 247 const BYTE* codeTable, size_t nbSeq, in ZSTD_buildCTable() 252 BYTE* op = (BYTE*)dst; in ZSTD_buildCTable() 253 const BYTE* const oend = op + dstCapacity; in ZSTD_buildCTable() 258 FORWARD_IF_ERROR(FSE_buildCTable_rle(nextCTable, (BYTE)max), ""); in ZSTD_buildCTable() 294 FSE_CTable const* CTable_MatchLength, BYTE const* mlCodeTable, in ZSTD_encodeSequences_body() 295 FSE_CTable const* CTable_OffsetBits, BYTE const* ofCodeTable, in ZSTD_encodeSequences_body() 296 FSE_CTable const* CTable_LitLength, BYTE const* llCodeTable, in ZSTD_encodeSequences_body() 335 BYTE cons in ZSTD_encodeSequences_body() [all...] |
H A D | huf_compress.c | 48 BYTE byte; 49 BYTE nbBits; 115 BYTE* const aligned = (BYTE*)workspace + add; in HUF_alignUpWorkspace() 149 BYTE* const ostart = (BYTE*) dst; in HUF_compressWeights() 150 BYTE* op = ostart; in HUF_compressWeights() 151 BYTE* const oend = ostart + dstSize; in HUF_compressWeights() 234 header.tableLog = (BYTE)tableLog; in HUF_writeCTableHeader() 236 header.maxSymbolValue = (BYTE)maxSymbolValu in HUF_writeCTableHeader() [all...] |
H A D | zstd_cwksp.h | 166 BYTE allocFailed; 277 void* const alloc = (BYTE*)ws->allocStart - bytes; in ZSTD_cwksp_reserve_internal_buffer_space() 316 void *const objectEnd = (BYTE *) alloc + bytesToAlign; in ZSTD_cwksp_internal_advance_phase() 362 MEM_STATIC BYTE* ZSTD_cwksp_reserve_buffer(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_buffer() 364 return (BYTE*)ZSTD_cwksp_reserve_internal(ws, bytes, ZSTD_cwksp_alloc_buffers); in ZSTD_cwksp_reserve_buffer() 426 end = (BYTE *)alloc + bytes; in ZSTD_cwksp_reserve_table() 455 void* end = (BYTE*)alloc + roundedBytes; in ZSTD_cwksp_reserve_object() 521 ZSTD_memset(ws->tableValidEnd, 0, (size_t)((BYTE*)ws->tableEnd - (BYTE*)ws->tableValidEnd)); in ZSTD_cwksp_clean_tables() 558 return (size_t)((BYTE*)w in ZSTD_cwksp_sizeof() [all...] |
H A D | fse_compress.c | 121 BYTE* const spread = tableSymbol + tableSize; /* size = tableSize + 8 (may write beyond tableSize) */ in FSE_buildCTable_wksp() 239 BYTE* const ostart = (BYTE*) header; in FSE_writeNCount_generic() 240 BYTE* out = ostart; in FSE_writeNCount_generic() 241 BYTE* const oend = ostart + headerBufferSize; in FSE_writeNCount_generic() 271 out[0] = (BYTE) bitStream; in FSE_writeNCount_generic() 272 out[1] = (BYTE)(bitStream>>8); in FSE_writeNCount_generic() 286 out[0] = (BYTE)bitStream; in FSE_writeNCount_generic() 287 out[1] = (BYTE)(bitStream>>8); in FSE_writeNCount_generic() 308 out[0] = (BYTE)bitStrea in FSE_writeNCount_generic() [all...] |
/linux/lib/zstd/decompress/ |
H A D | zstd_decompress_block.c | 94 dctx->litBuffer = (BYTE*)dst + blockSizeMax + WILDCOPY_OVERLENGTH; in ZSTD_allocateLiteralsBuffer() 115 dctx->litBuffer = (BYTE*)dst + expectedWriteSize - litSize + ZSTD_LITBUFFEREXTRASIZE - WILDCOPY_OVERLENGTH; in ZSTD_allocateLiteralsBuffer() 119 dctx->litBuffer = (BYTE*)dst + expectedWriteSize - litSize; in ZSTD_allocateLiteralsBuffer() 120 dctx->litBufferEnd = (BYTE*)dst + expectedWriteSize; in ZSTD_allocateLiteralsBuffer() 123 assert(dctx->litBufferEnd <= (BYTE*)dst + expectedWriteSize); in ZSTD_allocateLiteralsBuffer() 142 { const BYTE* const istart = (const BYTE*) src; in ZSTD_decodeLiteralsBlock() 239 assert(dctx->litBufferEnd <= (BYTE*)dst + blockSizeMax); in ZSTD_decodeLiteralsBlock() 496 BYTE* spread = (BYTE*)(symbolNex in ZSTD_buildFSETable_body() [all...] |
H A D | huf_decompress.c | 138 typedef struct { BYTE maxTableLog; BYTE tableType; BYTE tableLog; BYTE reserved; } DTableDesc; 147 static size_t HUF_initFastDStream(BYTE const* ip) { in HUF_initFastDStream() 148 BYTE const lastByte = ip[7]; in HUF_initFastDStream() 171 BYTE const* ip[4]; 172 BYTE* op[4]; 175 BYTE const* ilowest; 176 BYTE* oen [all...] |
H A D | zstd_decompress_internal.h | 23 #include "../common/mem.h" /* BYTE, U16, U32 */ 70 BYTE nbAdditionalBits; 71 BYTE nbBits; 152 const BYTE* litPtr; 190 BYTE* litBuffer; 191 const BYTE* litBufferEnd; 193 BYTE litExtraBuffer[ZSTD_LITBUFFEREXTRASIZE + WILDCOPY_OVERLENGTH]; /* literal buffer can be split between storage within dst and within this scratch buffer */ 194 BYTE headerBuffer[ZSTD_FRAMEHEADERSIZE_MAX];
|
/linux/Documentation/scsi/ |
H A D | arcmsr_spec.rst | 445 BYTE grsRaidSetName[16]; 449 BYTE grsDevArray[32]; 450 BYTE grsMemberDevices; 451 BYTE grsNewMemberDevices; 452 BYTE grsRaidState; 453 BYTE grsVolumes; 454 BYTE grsVolumeList[16]; 455 BYTE grsRes1; 456 BYTE grsRes2; 457 BYTE grsRes [all...] |
/linux/arch/loongarch/include/asm/ |
H A D | module.lds.h | 5 .got 0 : { BYTE(0) } 6 .plt 0 : { BYTE(0) } 7 .plt.idx 0 : { BYTE(0) } 8 .ftrace_trampoline 0 : { BYTE(0) }
|
/linux/lib/zstd/common/ |
H A D | entropy_common.c | 46 const BYTE* const istart = (const BYTE*) headerBuffer; in FSE_readNCount_body() 47 const BYTE* const iend = istart + hbSize; in FSE_readNCount_body() 48 const BYTE* ip = istart; in FSE_readNCount_body() 235 size_t HUF_readStats(BYTE* huffWeight, size_t hwSize, U32* rankStats, in HUF_readStats() 244 HUF_readStats_body(BYTE* huffWeight, size_t hwSize, U32* rankStats, in HUF_readStats_body() 251 const BYTE* ip = (const BYTE*) src; in HUF_readStats_body() 297 huffWeight[oSize] = (BYTE)lastWeight; in HUF_readStats_body() 310 static size_t HUF_readStats_body_default(BYTE* huffWeigh [all...] |
H A D | zstd_internal.h | 186 BYTE copy16_buf[16]; in ZSTD_copy16() 212 ptrdiff_t diff = (BYTE*)dst - (const BYTE*)src; in ZSTD_wildcopy() 213 const BYTE* ip = (const BYTE*)src; in ZSTD_wildcopy() 214 BYTE* op = (BYTE*)dst; in ZSTD_wildcopy() 215 BYTE* const oend = op + length; in ZSTD_wildcopy()
|
H A D | fse_decompress.c | 64 BYTE* spread = (BYTE*)(symbolNext + maxSymbolValue + 1); in FSE_buildDTable_internal() 155 tableDecode[u].nbBits = (BYTE) (tableLog - ZSTD_highbit32(nextState) ); in FSE_buildDTable_internal() 179 BYTE* const ostart = (BYTE*) dst; in FSE_decompress_usingDTable_generic() 180 BYTE* op = ostart; in FSE_decompress_usingDTable_generic() 181 BYTE* const omax = op + maxDstSize; in FSE_decompress_usingDTable_generic() 182 BYTE* const olimit = omax-3; in FSE_decompress_usingDTable_generic() 250 const BYTE* const istart = (const BYTE*)cSr in FSE_decompress_wksp_body() [all...] |