0001 #ifndef __LZ4DEFS_H__
0002 #define __LZ4DEFS_H__
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038 #include <asm/unaligned.h>
0039
0040 #include <linux/bitops.h>
0041 #include <linux/string.h> /* memset, memcpy */
0042
0043 #define FORCE_INLINE __always_inline
0044
0045
0046
0047
0048 #include <linux/types.h>
0049
0050 typedef uint8_t BYTE;
0051 typedef uint16_t U16;
0052 typedef uint32_t U32;
0053 typedef int32_t S32;
0054 typedef uint64_t U64;
0055 typedef uintptr_t uptrval;
0056
0057
0058
0059
0060 #if defined(CONFIG_64BIT)
0061 #define LZ4_ARCH64 1
0062 #else
0063 #define LZ4_ARCH64 0
0064 #endif
0065
0066 #if defined(__LITTLE_ENDIAN)
0067 #define LZ4_LITTLE_ENDIAN 1
0068 #else
0069 #define LZ4_LITTLE_ENDIAN 0
0070 #endif
0071
0072
0073
0074
0075 #define MINMATCH 4
0076
0077 #define WILDCOPYLENGTH 8
0078 #define LASTLITERALS 5
0079 #define MFLIMIT (WILDCOPYLENGTH + MINMATCH)
0080
0081
0082
0083
0084 #define MATCH_SAFEGUARD_DISTANCE ((2 * WILDCOPYLENGTH) - MINMATCH)
0085
0086
0087 #define LZ4_SKIPTRIGGER 6
0088
0089 #define HASH_UNIT sizeof(size_t)
0090
0091 #define KB (1 << 10)
0092 #define MB (1 << 20)
0093 #define GB (1U << 30)
0094
0095 #define MAXD_LOG 16
0096 #define MAX_DISTANCE ((1 << MAXD_LOG) - 1)
0097 #define STEPSIZE sizeof(size_t)
0098
0099 #define ML_BITS 4
0100 #define ML_MASK ((1U << ML_BITS) - 1)
0101 #define RUN_BITS (8 - ML_BITS)
0102 #define RUN_MASK ((1U << RUN_BITS) - 1)
0103
0104
0105
0106
0107 static FORCE_INLINE U16 LZ4_read16(const void *ptr)
0108 {
0109 return get_unaligned((const U16 *)ptr);
0110 }
0111
0112 static FORCE_INLINE U32 LZ4_read32(const void *ptr)
0113 {
0114 return get_unaligned((const U32 *)ptr);
0115 }
0116
0117 static FORCE_INLINE size_t LZ4_read_ARCH(const void *ptr)
0118 {
0119 return get_unaligned((const size_t *)ptr);
0120 }
0121
0122 static FORCE_INLINE void LZ4_write16(void *memPtr, U16 value)
0123 {
0124 put_unaligned(value, (U16 *)memPtr);
0125 }
0126
0127 static FORCE_INLINE void LZ4_write32(void *memPtr, U32 value)
0128 {
0129 put_unaligned(value, (U32 *)memPtr);
0130 }
0131
0132 static FORCE_INLINE U16 LZ4_readLE16(const void *memPtr)
0133 {
0134 return get_unaligned_le16(memPtr);
0135 }
0136
0137 static FORCE_INLINE void LZ4_writeLE16(void *memPtr, U16 value)
0138 {
0139 return put_unaligned_le16(value, memPtr);
0140 }
0141
0142
0143
0144
0145
0146
0147
0148
0149
0150 #define LZ4_memcpy(dst, src, size) __builtin_memcpy(dst, src, size)
0151 #define LZ4_memmove(dst, src, size) __builtin_memmove(dst, src, size)
0152
0153 static FORCE_INLINE void LZ4_copy8(void *dst, const void *src)
0154 {
0155 #if LZ4_ARCH64
0156 U64 a = get_unaligned((const U64 *)src);
0157
0158 put_unaligned(a, (U64 *)dst);
0159 #else
0160 U32 a = get_unaligned((const U32 *)src);
0161 U32 b = get_unaligned((const U32 *)src + 1);
0162
0163 put_unaligned(a, (U32 *)dst);
0164 put_unaligned(b, (U32 *)dst + 1);
0165 #endif
0166 }
0167
0168
0169
0170
0171
0172 static FORCE_INLINE void LZ4_wildCopy(void *dstPtr,
0173 const void *srcPtr, void *dstEnd)
0174 {
0175 BYTE *d = (BYTE *)dstPtr;
0176 const BYTE *s = (const BYTE *)srcPtr;
0177 BYTE *const e = (BYTE *)dstEnd;
0178
0179 do {
0180 LZ4_copy8(d, s);
0181 d += 8;
0182 s += 8;
0183 } while (d < e);
0184 }
0185
0186 static FORCE_INLINE unsigned int LZ4_NbCommonBytes(register size_t val)
0187 {
0188 #if LZ4_LITTLE_ENDIAN
0189 return __ffs(val) >> 3;
0190 #else
0191 return (BITS_PER_LONG - 1 - __fls(val)) >> 3;
0192 #endif
0193 }
0194
0195 static FORCE_INLINE unsigned int LZ4_count(
0196 const BYTE *pIn,
0197 const BYTE *pMatch,
0198 const BYTE *pInLimit)
0199 {
0200 const BYTE *const pStart = pIn;
0201
0202 while (likely(pIn < pInLimit - (STEPSIZE - 1))) {
0203 size_t const diff = LZ4_read_ARCH(pMatch) ^ LZ4_read_ARCH(pIn);
0204
0205 if (!diff) {
0206 pIn += STEPSIZE;
0207 pMatch += STEPSIZE;
0208 continue;
0209 }
0210
0211 pIn += LZ4_NbCommonBytes(diff);
0212
0213 return (unsigned int)(pIn - pStart);
0214 }
0215
0216 #if LZ4_ARCH64
0217 if ((pIn < (pInLimit - 3))
0218 && (LZ4_read32(pMatch) == LZ4_read32(pIn))) {
0219 pIn += 4;
0220 pMatch += 4;
0221 }
0222 #endif
0223
0224 if ((pIn < (pInLimit - 1))
0225 && (LZ4_read16(pMatch) == LZ4_read16(pIn))) {
0226 pIn += 2;
0227 pMatch += 2;
0228 }
0229
0230 if ((pIn < pInLimit) && (*pMatch == *pIn))
0231 pIn++;
0232
0233 return (unsigned int)(pIn - pStart);
0234 }
0235
0236 typedef enum { noLimit = 0, limitedOutput = 1 } limitedOutput_directive;
0237 typedef enum { byPtr, byU32, byU16 } tableType_t;
0238
0239 typedef enum { noDict = 0, withPrefix64k, usingExtDict } dict_directive;
0240 typedef enum { noDictIssue = 0, dictSmall } dictIssue_directive;
0241
0242 typedef enum { endOnOutputSize = 0, endOnInputSize = 1 } endCondition_directive;
0243 typedef enum { decode_full_block = 0, partial_decode = 1 } earlyEnd_directive;
0244
0245 #define LZ4_STATIC_ASSERT(c) BUILD_BUG_ON(!(c))
0246
0247 #endif