mirror of
https://github.com/Xevion/easy7zip.git
synced 2025-12-15 18:11:43 -06:00
Update Brotli to v1.0.1
- update README.md to the new versions (LZ4, Brotli)
This commit is contained in:
@@ -26,36 +26,37 @@
|
||||
#define __LITTLE_ENDIAN LITTLE_ENDIAN
|
||||
#endif
|
||||
|
||||
/* define the macro IS_LITTLE_ENDIAN
|
||||
/* define the macro BROTLI_LITTLE_ENDIAN
|
||||
using the above endian definitions from endian.h if
|
||||
endian.h was included */
|
||||
#ifdef __BYTE_ORDER
|
||||
#if __BYTE_ORDER == __LITTLE_ENDIAN
|
||||
#define IS_LITTLE_ENDIAN
|
||||
#define BROTLI_LITTLE_ENDIAN
|
||||
#endif
|
||||
|
||||
#else
|
||||
|
||||
#if defined(__LITTLE_ENDIAN__)
|
||||
#define IS_LITTLE_ENDIAN
|
||||
#define BROTLI_LITTLE_ENDIAN
|
||||
#endif
|
||||
#endif /* __BYTE_ORDER */
|
||||
|
||||
#if defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
|
||||
#define IS_LITTLE_ENDIAN
|
||||
#define BROTLI_LITTLE_ENDIAN
|
||||
#endif
|
||||
|
||||
/* Enable little-endian optimization for x64 architecture on Windows. */
|
||||
#if (defined(_WIN32) || defined(_WIN64)) && defined(_M_X64)
|
||||
#define IS_LITTLE_ENDIAN
|
||||
#define BROTLI_LITTLE_ENDIAN
|
||||
#endif
|
||||
|
||||
/* Portable handling of unaligned loads, stores, and copies.
|
||||
On some platforms, like ARM, the copy functions can be more efficient
|
||||
then a load and a store. */
|
||||
|
||||
#if defined(ARCH_PIII) || \
|
||||
defined(ARCH_ATHLON) || defined(ARCH_K8) || defined(_ARCH_PPC)
|
||||
#if defined(BROTLI_LITTLE_ENDIAN) && (\
|
||||
defined(ARCH_PIII) || defined(ARCH_ATHLON) || \
|
||||
defined(ARCH_K8) || defined(_ARCH_PPC))
|
||||
|
||||
/* x86 and x86-64 can perform unaligned loads/stores directly;
|
||||
modern PowerPC hardware can also do unaligned integer loads and stores;
|
||||
@@ -63,14 +64,12 @@
|
||||
*/
|
||||
|
||||
#define BROTLI_UNALIGNED_LOAD32(_p) (*(const uint32_t *)(_p))
|
||||
#define BROTLI_UNALIGNED_LOAD64(_p) (*(const uint64_t *)(_p))
|
||||
#define BROTLI_UNALIGNED_LOAD64LE(_p) (*(const uint64_t *)(_p))
|
||||
|
||||
#define BROTLI_UNALIGNED_STORE32(_p, _val) \
|
||||
(*(uint32_t *)(_p) = (_val))
|
||||
#define BROTLI_UNALIGNED_STORE64(_p, _val) \
|
||||
#define BROTLI_UNALIGNED_STORE64LE(_p, _val) \
|
||||
(*(uint64_t *)(_p) = (_val))
|
||||
|
||||
#elif defined(__arm__) && \
|
||||
#elif defined(BROTLI_LITTLE_ENDIAN) && defined(__arm__) && \
|
||||
!defined(__ARM_ARCH_5__) && \
|
||||
!defined(__ARM_ARCH_5T__) && \
|
||||
!defined(__ARM_ARCH_5TE__) && \
|
||||
@@ -88,16 +87,14 @@
|
||||
slowly (trip through kernel mode). */
|
||||
|
||||
#define BROTLI_UNALIGNED_LOAD32(_p) (*(const uint32_t *)(_p))
|
||||
#define BROTLI_UNALIGNED_STORE32(_p, _val) \
|
||||
(*(uint32_t *)(_p) = (_val))
|
||||
|
||||
static BROTLI_INLINE uint64_t BROTLI_UNALIGNED_LOAD64(const void *p) {
|
||||
static BROTLI_INLINE uint64_t BROTLI_UNALIGNED_LOAD64LE(const void *p) {
|
||||
uint64_t t;
|
||||
memcpy(&t, p, sizeof t);
|
||||
return t;
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void BROTLI_UNALIGNED_STORE64(void *p, uint64_t v) {
|
||||
static BROTLI_INLINE void BROTLI_UNALIGNED_STORE64LE(void *p, uint64_t v) {
|
||||
memcpy(p, &v, sizeof v);
|
||||
}
|
||||
|
||||
@@ -112,20 +109,47 @@ static BROTLI_INLINE uint32_t BROTLI_UNALIGNED_LOAD32(const void *p) {
|
||||
return t;
|
||||
}
|
||||
|
||||
static BROTLI_INLINE uint64_t BROTLI_UNALIGNED_LOAD64(const void *p) {
|
||||
#if defined(BROTLI_LITTLE_ENDIAN)
|
||||
|
||||
static BROTLI_INLINE uint64_t BROTLI_UNALIGNED_LOAD64LE(const void *p) {
|
||||
uint64_t t;
|
||||
memcpy(&t, p, sizeof t);
|
||||
return t;
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void BROTLI_UNALIGNED_STORE32(void *p, uint32_t v) {
|
||||
static BROTLI_INLINE void BROTLI_UNALIGNED_STORE64LE(void *p, uint64_t v) {
|
||||
memcpy(p, &v, sizeof v);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void BROTLI_UNALIGNED_STORE64(void *p, uint64_t v) {
|
||||
memcpy(p, &v, sizeof v);
|
||||
#else /* BROTLI_LITTLE_ENDIAN */
|
||||
|
||||
static BROTLI_INLINE uint64_t BROTLI_UNALIGNED_LOAD64LE(const void *p) {
|
||||
const uint8_t* in = (const uint8_t*)p;
|
||||
uint64_t value = (uint64_t)(in[0]);
|
||||
value |= (uint64_t)(in[1]) << 8;
|
||||
value |= (uint64_t)(in[2]) << 16;
|
||||
value |= (uint64_t)(in[3]) << 24;
|
||||
value |= (uint64_t)(in[4]) << 32;
|
||||
value |= (uint64_t)(in[5]) << 40;
|
||||
value |= (uint64_t)(in[6]) << 48;
|
||||
value |= (uint64_t)(in[7]) << 56;
|
||||
return value;
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void BROTLI_UNALIGNED_STORE64LE(void *p, uint64_t v) {
|
||||
uint8_t* out = (uint8_t*)p;
|
||||
out[0] = (uint8_t)v;
|
||||
out[1] = (uint8_t)(v >> 8);
|
||||
out[2] = (uint8_t)(v >> 16);
|
||||
out[3] = (uint8_t)(v >> 24);
|
||||
out[4] = (uint8_t)(v >> 32);
|
||||
out[5] = (uint8_t)(v >> 40);
|
||||
out[6] = (uint8_t)(v >> 48);
|
||||
out[7] = (uint8_t)(v >> 56);
|
||||
}
|
||||
|
||||
#endif /* BROTLI_LITTLE_ENDIAN */
|
||||
|
||||
#endif
|
||||
|
||||
#define TEMPLATE_(T) \
|
||||
|
||||
Reference in New Issue
Block a user