mirror of
https://github.com/Xevion/easy7zip.git
synced 2025-12-15 02:11:43 -06:00
Update brotli to version 1.0.9
This commit is contained in:
@@ -10,6 +10,7 @@
|
||||
#define BROTLI_ENC_BACKWARD_REFERENCES_H_
|
||||
|
||||
#include "../common/constants.h"
|
||||
#include "../common/context.h"
|
||||
#include "../common/dictionary.h"
|
||||
#include "../common/platform.h"
|
||||
#include "../types.h"
|
||||
@@ -25,10 +26,10 @@ extern "C" {
|
||||
initially the total amount of commands output by previous
|
||||
CreateBackwardReferences calls, and must be incremented by the amount written
|
||||
by this call. */
|
||||
BROTLI_INTERNAL void BrotliCreateBackwardReferences(
|
||||
size_t num_bytes, size_t position, const uint8_t* ringbuffer,
|
||||
size_t ringbuffer_mask, const BrotliEncoderParams* params,
|
||||
HasherHandle hasher, int* dist_cache, size_t* last_insert_len,
|
||||
BROTLI_INTERNAL void BrotliCreateBackwardReferences(size_t num_bytes,
|
||||
size_t position, const uint8_t* ringbuffer, size_t ringbuffer_mask,
|
||||
ContextLut literal_context_lut, const BrotliEncoderParams* params,
|
||||
Hasher* hasher, int* dist_cache, size_t* last_insert_len,
|
||||
Command* commands, size_t* num_commands, size_t* num_literals);
|
||||
|
||||
#if defined(__cplusplus) || defined(c_plusplus)
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
#define BROTLI_ENC_BACKWARD_REFERENCES_HQ_H_
|
||||
|
||||
#include "../common/constants.h"
|
||||
#include "../common/context.h"
|
||||
#include "../common/dictionary.h"
|
||||
#include "../common/platform.h"
|
||||
#include "../types.h"
|
||||
@@ -23,15 +24,17 @@ extern "C" {
|
||||
#endif
|
||||
|
||||
BROTLI_INTERNAL void BrotliCreateZopfliBackwardReferences(MemoryManager* m,
|
||||
size_t num_bytes, size_t position, const uint8_t* ringbuffer,
|
||||
size_t ringbuffer_mask, const BrotliEncoderParams* params,
|
||||
HasherHandle hasher, int* dist_cache, size_t* last_insert_len,
|
||||
size_t num_bytes,
|
||||
size_t position, const uint8_t* ringbuffer, size_t ringbuffer_mask,
|
||||
ContextLut literal_context_lut, const BrotliEncoderParams* params,
|
||||
Hasher* hasher, int* dist_cache, size_t* last_insert_len,
|
||||
Command* commands, size_t* num_commands, size_t* num_literals);
|
||||
|
||||
BROTLI_INTERNAL void BrotliCreateHqZopfliBackwardReferences(MemoryManager* m,
|
||||
size_t num_bytes, size_t position, const uint8_t* ringbuffer,
|
||||
size_t ringbuffer_mask, const BrotliEncoderParams* params,
|
||||
HasherHandle hasher, int* dist_cache, size_t* last_insert_len,
|
||||
size_t num_bytes,
|
||||
size_t position, const uint8_t* ringbuffer, size_t ringbuffer_mask,
|
||||
ContextLut literal_context_lut, const BrotliEncoderParams* params,
|
||||
Hasher* hasher, int* dist_cache, size_t* last_insert_len,
|
||||
Command* commands, size_t* num_commands, size_t* num_literals);
|
||||
|
||||
typedef struct ZopfliNode {
|
||||
@@ -77,8 +80,8 @@ BROTLI_INTERNAL void BrotliInitZopfliNodes(ZopfliNode* array, size_t length);
|
||||
BROTLI_INTERNAL size_t BrotliZopfliComputeShortestPath(
|
||||
MemoryManager* m, size_t num_bytes,
|
||||
size_t position, const uint8_t* ringbuffer, size_t ringbuffer_mask,
|
||||
const BrotliEncoderParams* params,
|
||||
const int* dist_cache, HasherHandle hasher, ZopfliNode* nodes);
|
||||
ContextLut literal_context_lut, const BrotliEncoderParams* params,
|
||||
const int* dist_cache, Hasher* hasher, ZopfliNode* nodes);
|
||||
|
||||
BROTLI_INTERNAL void BrotliZopfliCreateCommands(
|
||||
const size_t num_bytes, const size_t block_start, const ZopfliNode* nodes,
|
||||
|
||||
@@ -10,11 +10,13 @@
|
||||
static BROTLI_NOINLINE void EXPORT_FN(CreateBackwardReferences)(
|
||||
size_t num_bytes, size_t position,
|
||||
const uint8_t* ringbuffer, size_t ringbuffer_mask,
|
||||
const BrotliEncoderParams* params,
|
||||
HasherHandle hasher, int* dist_cache, size_t* last_insert_len,
|
||||
ContextLut literal_context_lut, const BrotliEncoderParams* params,
|
||||
Hasher* hasher, int* dist_cache, size_t* last_insert_len,
|
||||
Command* commands, size_t* num_commands, size_t* num_literals) {
|
||||
HASHER()* privat = &hasher->privat.FN(_);
|
||||
/* Set maximum distance, see section 9.1. of the spec. */
|
||||
const size_t max_backward_limit = BROTLI_MAX_BACKWARD_LIMIT(params->lgwin);
|
||||
const size_t position_offset = params->stream_offset;
|
||||
|
||||
const Command* const orig_commands = commands;
|
||||
size_t insert_length = *last_insert_len;
|
||||
@@ -31,19 +33,23 @@ static BROTLI_NOINLINE void EXPORT_FN(CreateBackwardReferences)(
|
||||
/* Minimum score to accept a backward reference. */
|
||||
const score_t kMinScore = BROTLI_SCORE_BASE + 100;
|
||||
|
||||
FN(PrepareDistanceCache)(hasher, dist_cache);
|
||||
BROTLI_UNUSED(literal_context_lut);
|
||||
|
||||
FN(PrepareDistanceCache)(privat, dist_cache);
|
||||
|
||||
while (position + FN(HashTypeLength)() < pos_end) {
|
||||
size_t max_length = pos_end - position;
|
||||
size_t max_distance = BROTLI_MIN(size_t, position, max_backward_limit);
|
||||
size_t dictionary_start = BROTLI_MIN(size_t,
|
||||
position + position_offset, max_backward_limit);
|
||||
HasherSearchResult sr;
|
||||
sr.len = 0;
|
||||
sr.len_code_delta = 0;
|
||||
sr.distance = 0;
|
||||
sr.score = kMinScore;
|
||||
FN(FindLongestMatch)(hasher, ¶ms->dictionary,
|
||||
FN(FindLongestMatch)(privat, ¶ms->dictionary,
|
||||
ringbuffer, ringbuffer_mask, dist_cache, position, max_length,
|
||||
max_distance, gap, params->dist.max_distance, &sr);
|
||||
max_distance, dictionary_start + gap, params->dist.max_distance, &sr);
|
||||
if (sr.score > kMinScore) {
|
||||
/* Found a match. Let's look for something even better ahead. */
|
||||
int delayed_backward_references_in_row = 0;
|
||||
@@ -57,10 +63,13 @@ static BROTLI_NOINLINE void EXPORT_FN(CreateBackwardReferences)(
|
||||
sr2.distance = 0;
|
||||
sr2.score = kMinScore;
|
||||
max_distance = BROTLI_MIN(size_t, position + 1, max_backward_limit);
|
||||
FN(FindLongestMatch)(hasher,
|
||||
dictionary_start = BROTLI_MIN(size_t,
|
||||
position + 1 + position_offset, max_backward_limit);
|
||||
FN(FindLongestMatch)(privat,
|
||||
¶ms->dictionary,
|
||||
ringbuffer, ringbuffer_mask, dist_cache, position + 1, max_length,
|
||||
max_distance, gap, params->dist.max_distance, &sr2);
|
||||
max_distance, dictionary_start + gap, params->dist.max_distance,
|
||||
&sr2);
|
||||
if (sr2.score >= sr.score + cost_diff_lazy) {
|
||||
/* Ok, let's just write one byte for now and start a match from the
|
||||
next byte. */
|
||||
@@ -76,18 +85,19 @@ static BROTLI_NOINLINE void EXPORT_FN(CreateBackwardReferences)(
|
||||
}
|
||||
apply_random_heuristics =
|
||||
position + 2 * sr.len + random_heuristics_window_size;
|
||||
max_distance = BROTLI_MIN(size_t, position, max_backward_limit);
|
||||
dictionary_start = BROTLI_MIN(size_t,
|
||||
position + position_offset, max_backward_limit);
|
||||
{
|
||||
/* The first 16 codes are special short-codes,
|
||||
and the minimum offset is 1. */
|
||||
size_t distance_code = ComputeDistanceCode(
|
||||
sr.distance, max_distance + gap, dist_cache);
|
||||
if ((sr.distance <= (max_distance + gap)) && distance_code > 0) {
|
||||
sr.distance, dictionary_start + gap, dist_cache);
|
||||
if ((sr.distance <= (dictionary_start + gap)) && distance_code > 0) {
|
||||
dist_cache[3] = dist_cache[2];
|
||||
dist_cache[2] = dist_cache[1];
|
||||
dist_cache[1] = dist_cache[0];
|
||||
dist_cache[0] = (int)sr.distance;
|
||||
FN(PrepareDistanceCache)(hasher, dist_cache);
|
||||
FN(PrepareDistanceCache)(privat, dist_cache);
|
||||
}
|
||||
InitCommand(commands++, ¶ms->dist, insert_length,
|
||||
sr.len, sr.len_code_delta, distance_code);
|
||||
@@ -105,7 +115,7 @@ static BROTLI_NOINLINE void EXPORT_FN(CreateBackwardReferences)(
|
||||
range_start = BROTLI_MIN(size_t, range_end, BROTLI_MAX(size_t,
|
||||
range_start, position + sr.len - (sr.distance << 2)));
|
||||
}
|
||||
FN(StoreRange)(hasher, ringbuffer, ringbuffer_mask, range_start,
|
||||
FN(StoreRange)(privat, ringbuffer, ringbuffer_mask, range_start,
|
||||
range_end);
|
||||
}
|
||||
position += sr.len;
|
||||
@@ -131,7 +141,7 @@ static BROTLI_NOINLINE void EXPORT_FN(CreateBackwardReferences)(
|
||||
size_t pos_jump =
|
||||
BROTLI_MIN(size_t, position + 16, pos_end - kMargin);
|
||||
for (; position < pos_jump; position += 4) {
|
||||
FN(Store)(hasher, ringbuffer, ringbuffer_mask, position);
|
||||
FN(Store)(privat, ringbuffer, ringbuffer_mask, position);
|
||||
insert_length += 4;
|
||||
}
|
||||
} else {
|
||||
@@ -140,7 +150,7 @@ static BROTLI_NOINLINE void EXPORT_FN(CreateBackwardReferences)(
|
||||
size_t pos_jump =
|
||||
BROTLI_MIN(size_t, position + 8, pos_end - kMargin);
|
||||
for (; position < pos_jump; position += 2) {
|
||||
FN(Store)(hasher, ringbuffer, ringbuffer_mask, position);
|
||||
FN(Store)(privat, ringbuffer, ringbuffer_mask, position);
|
||||
insert_length += 2;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -219,7 +219,12 @@ static void FN(ClusterBlocks)(MemoryManager* m,
|
||||
uint32_t symbols[HISTOGRAMS_PER_BATCH] = { 0 };
|
||||
uint32_t remap[HISTOGRAMS_PER_BATCH] = { 0 };
|
||||
|
||||
if (BROTLI_IS_OOM(m)) return;
|
||||
if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(histogram_symbols) ||
|
||||
BROTLI_IS_NULL(block_lengths) || BROTLI_IS_NULL(all_histograms) ||
|
||||
BROTLI_IS_NULL(cluster_size) || BROTLI_IS_NULL(histograms) ||
|
||||
BROTLI_IS_NULL(pairs)) {
|
||||
return;
|
||||
}
|
||||
|
||||
memset(block_lengths, 0, num_blocks * sizeof(uint32_t));
|
||||
|
||||
@@ -278,11 +283,11 @@ static void FN(ClusterBlocks)(MemoryManager* m,
|
||||
if (pairs_capacity < max_num_pairs + 1) {
|
||||
BROTLI_FREE(m, pairs);
|
||||
pairs = BROTLI_ALLOC(m, HistogramPair, max_num_pairs + 1);
|
||||
if (BROTLI_IS_OOM(m)) return;
|
||||
if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(pairs)) return;
|
||||
}
|
||||
|
||||
clusters = BROTLI_ALLOC(m, uint32_t, num_clusters);
|
||||
if (BROTLI_IS_OOM(m)) return;
|
||||
if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(clusters)) return;
|
||||
for (i = 0; i < num_clusters; ++i) {
|
||||
clusters[i] = (uint32_t)i;
|
||||
}
|
||||
@@ -294,7 +299,7 @@ static void FN(ClusterBlocks)(MemoryManager* m,
|
||||
BROTLI_FREE(m, cluster_size);
|
||||
|
||||
new_index = BROTLI_ALLOC(m, uint32_t, num_clusters);
|
||||
if (BROTLI_IS_OOM(m)) return;
|
||||
if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(new_index)) return;
|
||||
for (i = 0; i < num_clusters; ++i) new_index[i] = kInvalidIndex;
|
||||
pos = 0;
|
||||
{
|
||||
@@ -386,7 +391,7 @@ static void FN(SplitByteVector)(MemoryManager* m,
|
||||
return;
|
||||
}
|
||||
histograms = BROTLI_ALLOC(m, HistogramType, num_histograms);
|
||||
if (BROTLI_IS_OOM(m)) return;
|
||||
if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(histograms)) return;
|
||||
/* Find good entropy codes. */
|
||||
FN(InitialEntropyCodes)(data, length,
|
||||
sampling_stride_length,
|
||||
@@ -405,7 +410,11 @@ static void FN(SplitByteVector)(MemoryManager* m,
|
||||
uint16_t* new_id = BROTLI_ALLOC(m, uint16_t, num_histograms);
|
||||
const size_t iters = params->quality < HQ_ZOPFLIFICATION_QUALITY ? 3 : 10;
|
||||
size_t i;
|
||||
if (BROTLI_IS_OOM(m)) return;
|
||||
if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(block_ids) ||
|
||||
BROTLI_IS_NULL(insert_cost) || BROTLI_IS_NULL(cost) ||
|
||||
BROTLI_IS_NULL(switch_signal) || BROTLI_IS_NULL(new_id)) {
|
||||
return;
|
||||
}
|
||||
for (i = 0; i < iters; ++i) {
|
||||
num_blocks = FN(FindBlocks)(data, length,
|
||||
block_switch_cost,
|
||||
|
||||
@@ -215,7 +215,7 @@ BROTLI_INTERNAL size_t FN(BrotliHistogramReindex)(MemoryManager* m,
|
||||
uint32_t next_index;
|
||||
HistogramType* tmp;
|
||||
size_t i;
|
||||
if (BROTLI_IS_OOM(m)) return 0;
|
||||
if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(new_index)) return 0;
|
||||
for (i = 0; i < length; ++i) {
|
||||
new_index[i] = kInvalidIndex;
|
||||
}
|
||||
@@ -229,7 +229,7 @@ BROTLI_INTERNAL size_t FN(BrotliHistogramReindex)(MemoryManager* m,
|
||||
/* TODO: by using idea of "cycle-sort" we can avoid allocation of
|
||||
tmp and reduce the number of copying by the factor of 2. */
|
||||
tmp = BROTLI_ALLOC(m, HistogramType, next_index);
|
||||
if (BROTLI_IS_OOM(m)) return 0;
|
||||
if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(tmp)) return 0;
|
||||
next_index = 0;
|
||||
for (i = 0; i < length; ++i) {
|
||||
if (new_index[symbols[i]] == next_index) {
|
||||
@@ -259,7 +259,10 @@ BROTLI_INTERNAL void FN(BrotliClusterHistograms)(
|
||||
HistogramPair* pairs = BROTLI_ALLOC(m, HistogramPair, pairs_capacity + 1);
|
||||
size_t i;
|
||||
|
||||
if (BROTLI_IS_OOM(m)) return;
|
||||
if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(cluster_size) ||
|
||||
BROTLI_IS_NULL(clusters) || BROTLI_IS_NULL(pairs)) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (i = 0; i < in_size; ++i) {
|
||||
cluster_size[i] = 1;
|
||||
|
||||
@@ -20,14 +20,14 @@
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
static uint32_t kInsBase[] = { 0, 1, 2, 3, 4, 5, 6, 8, 10, 14, 18, 26, 34, 50,
|
||||
66, 98, 130, 194, 322, 578, 1090, 2114, 6210, 22594 };
|
||||
static uint32_t kInsExtra[] = { 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4,
|
||||
5, 5, 6, 7, 8, 9, 10, 12, 14, 24 };
|
||||
static uint32_t kCopyBase[] = { 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 14, 18, 22, 30,
|
||||
38, 54, 70, 102, 134, 198, 326, 582, 1094, 2118 };
|
||||
static uint32_t kCopyExtra[] = { 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3,
|
||||
4, 4, 5, 5, 6, 7, 8, 9, 10, 24 };
|
||||
BROTLI_INTERNAL extern const uint32_t
|
||||
kBrotliInsBase[BROTLI_NUM_INS_COPY_CODES];
|
||||
BROTLI_INTERNAL extern const uint32_t
|
||||
kBrotliInsExtra[BROTLI_NUM_INS_COPY_CODES];
|
||||
BROTLI_INTERNAL extern const uint32_t
|
||||
kBrotliCopyBase[BROTLI_NUM_INS_COPY_CODES];
|
||||
BROTLI_INTERNAL extern const uint32_t
|
||||
kBrotliCopyExtra[BROTLI_NUM_INS_COPY_CODES];
|
||||
|
||||
static BROTLI_INLINE uint16_t GetInsertLengthCode(size_t insertlen) {
|
||||
if (insertlen < 6) {
|
||||
@@ -89,19 +89,19 @@ static BROTLI_INLINE void GetLengthCode(size_t insertlen, size_t copylen,
|
||||
}
|
||||
|
||||
static BROTLI_INLINE uint32_t GetInsertBase(uint16_t inscode) {
|
||||
return kInsBase[inscode];
|
||||
return kBrotliInsBase[inscode];
|
||||
}
|
||||
|
||||
static BROTLI_INLINE uint32_t GetInsertExtra(uint16_t inscode) {
|
||||
return kInsExtra[inscode];
|
||||
return kBrotliInsExtra[inscode];
|
||||
}
|
||||
|
||||
static BROTLI_INLINE uint32_t GetCopyBase(uint16_t copycode) {
|
||||
return kCopyBase[copycode];
|
||||
return kBrotliCopyBase[copycode];
|
||||
}
|
||||
|
||||
static BROTLI_INLINE uint32_t GetCopyExtra(uint16_t copycode) {
|
||||
return kCopyExtra[copycode];
|
||||
return kBrotliCopyExtra[copycode];
|
||||
}
|
||||
|
||||
typedef struct Command {
|
||||
|
||||
@@ -15,7 +15,8 @@
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
extern const uint16_t kStaticDictionaryHash[32768];
|
||||
extern const uint16_t kStaticDictionaryHashWords[32768];
|
||||
extern const uint8_t kStaticDictionaryHashLengths[32768];
|
||||
|
||||
#if defined(__cplusplus) || defined(c_plusplus)
|
||||
} /* extern "C" */
|
||||
|
||||
@@ -19,13 +19,15 @@ extern "C" {
|
||||
/* Dictionary data (words and transforms) for 1 possible context */
|
||||
typedef struct BrotliEncoderDictionary {
|
||||
const BrotliDictionary* words;
|
||||
uint32_t num_transforms;
|
||||
|
||||
/* cut off for fast encoder */
|
||||
uint32_t cutoffTransformsCount;
|
||||
uint64_t cutoffTransforms;
|
||||
|
||||
/* from dictionary_hash.h, for fast encoder */
|
||||
const uint16_t* hash_table;
|
||||
const uint16_t* hash_table_words;
|
||||
const uint8_t* hash_table_lengths;
|
||||
|
||||
/* from static_dict_lut.h, for slow encoder */
|
||||
const uint16_t* buckets;
|
||||
|
||||
@@ -76,12 +76,12 @@ BROTLI_INTERNAL void BrotliConvertBitDepthsToSymbols(const uint8_t* depth,
|
||||
size_t len,
|
||||
uint16_t* bits);
|
||||
|
||||
BROTLI_INTERNAL extern const size_t kBrotliShellGaps[6];
|
||||
/* Input size optimized Shell sort. */
|
||||
typedef BROTLI_BOOL (*HuffmanTreeComparator)(
|
||||
const HuffmanTree*, const HuffmanTree*);
|
||||
static BROTLI_INLINE void SortHuffmanTreeItems(HuffmanTree* items,
|
||||
const size_t n, HuffmanTreeComparator comparator) {
|
||||
static const size_t gaps[] = {132, 57, 23, 10, 4, 1};
|
||||
if (n < 13) {
|
||||
/* Insertion sort. */
|
||||
size_t i;
|
||||
@@ -101,7 +101,7 @@ static BROTLI_INLINE void SortHuffmanTreeItems(HuffmanTree* items,
|
||||
/* Shell sort. */
|
||||
int g = n < 57 ? 2 : 0;
|
||||
for (; g < 6; ++g) {
|
||||
size_t gap = gaps[g];
|
||||
size_t gap = kBrotliShellGaps[g];
|
||||
size_t i;
|
||||
for (i = gap; i < n; ++i) {
|
||||
size_t j = i;
|
||||
|
||||
@@ -19,10 +19,8 @@ extern "C" {
|
||||
#endif
|
||||
|
||||
static BROTLI_INLINE uint32_t Log2FloorNonZero(size_t n) {
|
||||
/* TODO: generalize and move to platform.h */
|
||||
#if BROTLI_GNUC_HAS_BUILTIN(__builtin_clz, 3, 4, 0) || \
|
||||
BROTLI_INTEL_VERSION_CHECK(16, 0, 0)
|
||||
return 31u ^ (uint32_t)__builtin_clz((uint32_t)n);
|
||||
#if defined(BROTLI_BSR32)
|
||||
return BROTLI_BSR32((uint32_t)n);
|
||||
#else
|
||||
uint32_t result = 0;
|
||||
while (n >>= 1) result++;
|
||||
@@ -30,110 +28,31 @@ static BROTLI_INLINE uint32_t Log2FloorNonZero(size_t n) {
|
||||
#endif
|
||||
}
|
||||
|
||||
/* A lookup table for small values of log2(int) to be used in entropy
|
||||
computation.
|
||||
#define BROTLI_LOG2_TABLE_SIZE 256
|
||||
|
||||
", ".join(["%.16ff" % x for x in [0.0]+[log2(x) for x in range(1, 256)]]) */
|
||||
static const float kLog2Table[] = {
|
||||
0.0000000000000000f, 0.0000000000000000f, 1.0000000000000000f,
|
||||
1.5849625007211563f, 2.0000000000000000f, 2.3219280948873622f,
|
||||
2.5849625007211561f, 2.8073549220576042f, 3.0000000000000000f,
|
||||
3.1699250014423126f, 3.3219280948873626f, 3.4594316186372978f,
|
||||
3.5849625007211565f, 3.7004397181410922f, 3.8073549220576037f,
|
||||
3.9068905956085187f, 4.0000000000000000f, 4.0874628412503400f,
|
||||
4.1699250014423122f, 4.2479275134435852f, 4.3219280948873626f,
|
||||
4.3923174227787607f, 4.4594316186372973f, 4.5235619560570131f,
|
||||
4.5849625007211570f, 4.6438561897747244f, 4.7004397181410926f,
|
||||
4.7548875021634691f, 4.8073549220576037f, 4.8579809951275728f,
|
||||
4.9068905956085187f, 4.9541963103868758f, 5.0000000000000000f,
|
||||
5.0443941193584534f, 5.0874628412503400f, 5.1292830169449664f,
|
||||
5.1699250014423122f, 5.2094533656289501f, 5.2479275134435852f,
|
||||
5.2854022188622487f, 5.3219280948873626f, 5.3575520046180838f,
|
||||
5.3923174227787607f, 5.4262647547020979f, 5.4594316186372973f,
|
||||
5.4918530963296748f, 5.5235619560570131f, 5.5545888516776376f,
|
||||
5.5849625007211570f, 5.6147098441152083f, 5.6438561897747244f,
|
||||
5.6724253419714961f, 5.7004397181410926f, 5.7279204545631996f,
|
||||
5.7548875021634691f, 5.7813597135246599f, 5.8073549220576046f,
|
||||
5.8328900141647422f, 5.8579809951275719f, 5.8826430493618416f,
|
||||
5.9068905956085187f, 5.9307373375628867f, 5.9541963103868758f,
|
||||
5.9772799234999168f, 6.0000000000000000f, 6.0223678130284544f,
|
||||
6.0443941193584534f, 6.0660891904577721f, 6.0874628412503400f,
|
||||
6.1085244567781700f, 6.1292830169449672f, 6.1497471195046822f,
|
||||
6.1699250014423122f, 6.1898245588800176f, 6.2094533656289510f,
|
||||
6.2288186904958804f, 6.2479275134435861f, 6.2667865406949019f,
|
||||
6.2854022188622487f, 6.3037807481771031f, 6.3219280948873617f,
|
||||
6.3398500028846252f, 6.3575520046180847f, 6.3750394313469254f,
|
||||
6.3923174227787598f, 6.4093909361377026f, 6.4262647547020979f,
|
||||
6.4429434958487288f, 6.4594316186372982f, 6.4757334309663976f,
|
||||
6.4918530963296748f, 6.5077946401986964f, 6.5235619560570131f,
|
||||
6.5391588111080319f, 6.5545888516776376f, 6.5698556083309478f,
|
||||
6.5849625007211561f, 6.5999128421871278f, 6.6147098441152092f,
|
||||
6.6293566200796095f, 6.6438561897747253f, 6.6582114827517955f,
|
||||
6.6724253419714952f, 6.6865005271832185f, 6.7004397181410917f,
|
||||
6.7142455176661224f, 6.7279204545631988f, 6.7414669864011465f,
|
||||
6.7548875021634691f, 6.7681843247769260f, 6.7813597135246599f,
|
||||
6.7944158663501062f, 6.8073549220576037f, 6.8201789624151887f,
|
||||
6.8328900141647422f, 6.8454900509443757f, 6.8579809951275719f,
|
||||
6.8703647195834048f, 6.8826430493618416f, 6.8948177633079437f,
|
||||
6.9068905956085187f, 6.9188632372745955f, 6.9307373375628867f,
|
||||
6.9425145053392399f, 6.9541963103868758f, 6.9657842846620879f,
|
||||
6.9772799234999168f, 6.9886846867721664f, 7.0000000000000000f,
|
||||
7.0112272554232540f, 7.0223678130284544f, 7.0334230015374501f,
|
||||
7.0443941193584534f, 7.0552824355011898f, 7.0660891904577721f,
|
||||
7.0768155970508317f, 7.0874628412503400f, 7.0980320829605272f,
|
||||
7.1085244567781700f, 7.1189410727235076f, 7.1292830169449664f,
|
||||
7.1395513523987937f, 7.1497471195046822f, 7.1598713367783891f,
|
||||
7.1699250014423130f, 7.1799090900149345f, 7.1898245588800176f,
|
||||
7.1996723448363644f, 7.2094533656289492f, 7.2191685204621621f,
|
||||
7.2288186904958804f, 7.2384047393250794f, 7.2479275134435861f,
|
||||
7.2573878426926521f, 7.2667865406949019f, 7.2761244052742384f,
|
||||
7.2854022188622487f, 7.2946207488916270f, 7.3037807481771031f,
|
||||
7.3128829552843557f, 7.3219280948873617f, 7.3309168781146177f,
|
||||
7.3398500028846243f, 7.3487281542310781f, 7.3575520046180847f,
|
||||
7.3663222142458151f, 7.3750394313469254f, 7.3837042924740528f,
|
||||
7.3923174227787607f, 7.4008794362821844f, 7.4093909361377026f,
|
||||
7.4178525148858991f, 7.4262647547020979f, 7.4346282276367255f,
|
||||
7.4429434958487288f, 7.4512111118323299f, 7.4594316186372973f,
|
||||
7.4676055500829976f, 7.4757334309663976f, 7.4838157772642564f,
|
||||
7.4918530963296748f, 7.4998458870832057f, 7.5077946401986964f,
|
||||
7.5156998382840436f, 7.5235619560570131f, 7.5313814605163119f,
|
||||
7.5391588111080319f, 7.5468944598876373f, 7.5545888516776376f,
|
||||
7.5622424242210728f, 7.5698556083309478f, 7.5774288280357487f,
|
||||
7.5849625007211561f, 7.5924570372680806f, 7.5999128421871278f,
|
||||
7.6073303137496113f, 7.6147098441152075f, 7.6220518194563764f,
|
||||
7.6293566200796095f, 7.6366246205436488f, 7.6438561897747244f,
|
||||
7.6510516911789290f, 7.6582114827517955f, 7.6653359171851765f,
|
||||
7.6724253419714952f, 7.6794800995054464f, 7.6865005271832185f,
|
||||
7.6934869574993252f, 7.7004397181410926f, 7.7073591320808825f,
|
||||
7.7142455176661224f, 7.7210991887071856f, 7.7279204545631996f,
|
||||
7.7347096202258392f, 7.7414669864011465f, 7.7481928495894596f,
|
||||
7.7548875021634691f, 7.7615512324444795f, 7.7681843247769260f,
|
||||
7.7747870596011737f, 7.7813597135246608f, 7.7879025593914317f,
|
||||
7.7944158663501062f, 7.8008998999203047f, 7.8073549220576037f,
|
||||
7.8137811912170374f, 7.8201789624151887f, 7.8265484872909159f,
|
||||
7.8328900141647422f, 7.8392037880969445f, 7.8454900509443757f,
|
||||
7.8517490414160571f, 7.8579809951275719f, 7.8641861446542798f,
|
||||
7.8703647195834048f, 7.8765169465650002f, 7.8826430493618425f,
|
||||
7.8887432488982601f, 7.8948177633079446f, 7.9008668079807496f,
|
||||
7.9068905956085187f, 7.9128893362299619f, 7.9188632372745955f,
|
||||
7.9248125036057813f, 7.9307373375628867f, 7.9366379390025719f,
|
||||
7.9425145053392399f, 7.9483672315846778f, 7.9541963103868758f,
|
||||
7.9600019320680806f, 7.9657842846620870f, 7.9715435539507720f,
|
||||
7.9772799234999168f, 7.9829935746943104f, 7.9886846867721664f,
|
||||
7.9943534368588578f
|
||||
};
|
||||
/* A lookup table for small values of log2(int) to be used in entropy
|
||||
computation. */
|
||||
BROTLI_INTERNAL extern const double kBrotliLog2Table[BROTLI_LOG2_TABLE_SIZE];
|
||||
|
||||
/* Visual Studio 2012 and Android API levels < 18 do not have the log2()
|
||||
* function defined, so we use log() and a multiplication instead. */
|
||||
#if !defined(BROTLI_HAVE_LOG2)
|
||||
#if ((defined(_MSC_VER) && _MSC_VER <= 1700) || \
|
||||
(defined(__ANDROID_API__) && __ANDROID_API__ < 18))
|
||||
#define BROTLI_HAVE_LOG2 0
|
||||
#else
|
||||
#define BROTLI_HAVE_LOG2 1
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#define LOG_2_INV 1.4426950408889634
|
||||
|
||||
/* Faster logarithm for small integers, with the property of log2(0) == 0. */
|
||||
static BROTLI_INLINE double FastLog2(size_t v) {
|
||||
if (v < sizeof(kLog2Table) / sizeof(kLog2Table[0])) {
|
||||
return kLog2Table[v];
|
||||
if (v < BROTLI_LOG2_TABLE_SIZE) {
|
||||
return kBrotliLog2Table[v];
|
||||
}
|
||||
#if (defined(_MSC_VER) && _MSC_VER <= 1700) || \
|
||||
(defined(__ANDROID_API__) && __ANDROID_API__ < 18)
|
||||
/* Visual Studio 2012 and Android API levels < 18 do not have the log2()
|
||||
* function defined, so we use log() and a multiplication instead. */
|
||||
#if !(BROTLI_HAVE_LOG2)
|
||||
return log((double)v) * LOG_2_INV;
|
||||
#else
|
||||
return log2((double)v);
|
||||
|
||||
@@ -17,8 +17,7 @@ extern "C" {
|
||||
#endif
|
||||
|
||||
/* Separate implementation for little-endian 64-bit targets, for speed. */
|
||||
#if defined(__GNUC__) && defined(_LP64) && defined(BROTLI_LITTLE_ENDIAN)
|
||||
|
||||
#if defined(BROTLI_TZCNT64) && BROTLI_64_BITS && BROTLI_LITTLE_ENDIAN
|
||||
static BROTLI_INLINE size_t FindMatchLengthWithLimit(const uint8_t* s1,
|
||||
const uint8_t* s2,
|
||||
size_t limit) {
|
||||
@@ -32,7 +31,7 @@ static BROTLI_INLINE size_t FindMatchLengthWithLimit(const uint8_t* s1,
|
||||
} else {
|
||||
uint64_t x = BROTLI_UNALIGNED_LOAD64LE(s2) ^
|
||||
BROTLI_UNALIGNED_LOAD64LE(s1 + matched);
|
||||
size_t matching_bits = (size_t)__builtin_ctzll(x);
|
||||
size_t matching_bits = (size_t)BROTLI_TZCNT64(x);
|
||||
matched += matching_bits >> 3;
|
||||
return matched;
|
||||
}
|
||||
|
||||
@@ -27,34 +27,19 @@
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
/* Pointer to hasher data.
|
||||
*
|
||||
* Excluding initialization and destruction, hasher can be passed as
|
||||
* HasherHandle by value.
|
||||
*
|
||||
* Typically hasher data consists of 3 sections:
|
||||
* * HasherCommon structure
|
||||
* * private structured hasher data, depending on hasher type
|
||||
* * private dynamic hasher data, depending on hasher type and parameters
|
||||
*
|
||||
* Using "define" instead of "typedef", because on MSVC __restrict does not work
|
||||
* on typedef pointer types. */
|
||||
#define HasherHandle uint8_t*
|
||||
|
||||
typedef struct {
|
||||
/* Dynamically allocated area; first member for quickest access. */
|
||||
void* extra;
|
||||
|
||||
size_t dict_num_lookups;
|
||||
size_t dict_num_matches;
|
||||
|
||||
BrotliHasherParams params;
|
||||
|
||||
/* False if hasher needs to be "prepared" before use. */
|
||||
BROTLI_BOOL is_prepared_;
|
||||
|
||||
size_t dict_num_lookups;
|
||||
size_t dict_num_matches;
|
||||
} HasherCommon;
|
||||
|
||||
static BROTLI_INLINE HasherCommon* GetHasherCommon(HasherHandle handle) {
|
||||
return (HasherCommon*)handle;
|
||||
}
|
||||
|
||||
#define score_t size_t
|
||||
|
||||
static const uint32_t kCutoffTransformsCount = 10;
|
||||
@@ -149,17 +134,13 @@ static BROTLI_INLINE score_t BackwardReferencePenaltyUsingLastDistance(
|
||||
}
|
||||
|
||||
static BROTLI_INLINE BROTLI_BOOL TestStaticDictionaryItem(
|
||||
const BrotliEncoderDictionary* dictionary, size_t item,
|
||||
const BrotliEncoderDictionary* dictionary, size_t len, size_t word_idx,
|
||||
const uint8_t* data, size_t max_length, size_t max_backward,
|
||||
size_t max_distance, HasherSearchResult* out) {
|
||||
size_t len;
|
||||
size_t word_idx;
|
||||
size_t offset;
|
||||
size_t matchlen;
|
||||
size_t backward;
|
||||
score_t score;
|
||||
len = item & 0x1F;
|
||||
word_idx = item >> 5;
|
||||
offset = dictionary->words->offsets_by_length[len] + len * word_idx;
|
||||
if (len > max_length) {
|
||||
return BROTLI_FALSE;
|
||||
@@ -193,25 +174,24 @@ static BROTLI_INLINE BROTLI_BOOL TestStaticDictionaryItem(
|
||||
|
||||
static BROTLI_INLINE void SearchInStaticDictionary(
|
||||
const BrotliEncoderDictionary* dictionary,
|
||||
HasherHandle handle, const uint8_t* data, size_t max_length,
|
||||
HasherCommon* common, const uint8_t* data, size_t max_length,
|
||||
size_t max_backward, size_t max_distance,
|
||||
HasherSearchResult* out, BROTLI_BOOL shallow) {
|
||||
size_t key;
|
||||
size_t i;
|
||||
HasherCommon* self = GetHasherCommon(handle);
|
||||
if (self->dict_num_matches < (self->dict_num_lookups >> 7)) {
|
||||
if (common->dict_num_matches < (common->dict_num_lookups >> 7)) {
|
||||
return;
|
||||
}
|
||||
key = Hash14(data) << 1;
|
||||
for (i = 0; i < (shallow ? 1u : 2u); ++i, ++key) {
|
||||
size_t item = dictionary->hash_table[key];
|
||||
self->dict_num_lookups++;
|
||||
if (item != 0) {
|
||||
common->dict_num_lookups++;
|
||||
if (dictionary->hash_table_lengths[key] != 0) {
|
||||
BROTLI_BOOL item_matches = TestStaticDictionaryItem(
|
||||
dictionary, item, data,
|
||||
dictionary, dictionary->hash_table_lengths[key],
|
||||
dictionary->hash_table_words[key], data,
|
||||
max_length, max_backward, max_distance, out);
|
||||
if (item_matches) {
|
||||
self->dict_num_matches++;
|
||||
common->dict_num_matches++;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -260,37 +240,37 @@ static BROTLI_INLINE size_t BackwardMatchLengthCode(const BackwardMatch* self) {
|
||||
/* MAX_NUM_MATCHES == 64 + MAX_TREE_SEARCH_DEPTH */
|
||||
#define MAX_NUM_MATCHES_H10 128
|
||||
|
||||
/* For BUCKET_SWEEP == 1, enabling the dictionary lookup makes compression
|
||||
/* For BUCKET_SWEEP_BITS == 0, enabling the dictionary lookup makes compression
|
||||
a little faster (0.5% - 1%) and it compresses 0.15% better on small text
|
||||
and HTML inputs. */
|
||||
|
||||
#define HASHER() H2
|
||||
#define BUCKET_BITS 16
|
||||
#define BUCKET_SWEEP 1
|
||||
#define BUCKET_SWEEP_BITS 0
|
||||
#define HASH_LEN 5
|
||||
#define USE_DICTIONARY 1
|
||||
#include "./hash_longest_match_quickly_inc.h" /* NOLINT(build/include) */
|
||||
#undef BUCKET_SWEEP
|
||||
#undef BUCKET_SWEEP_BITS
|
||||
#undef USE_DICTIONARY
|
||||
#undef HASHER
|
||||
|
||||
#define HASHER() H3
|
||||
#define BUCKET_SWEEP 2
|
||||
#define BUCKET_SWEEP_BITS 1
|
||||
#define USE_DICTIONARY 0
|
||||
#include "./hash_longest_match_quickly_inc.h" /* NOLINT(build/include) */
|
||||
#undef USE_DICTIONARY
|
||||
#undef BUCKET_SWEEP
|
||||
#undef BUCKET_SWEEP_BITS
|
||||
#undef BUCKET_BITS
|
||||
#undef HASHER
|
||||
|
||||
#define HASHER() H4
|
||||
#define BUCKET_BITS 17
|
||||
#define BUCKET_SWEEP 4
|
||||
#define BUCKET_SWEEP_BITS 2
|
||||
#define USE_DICTIONARY 1
|
||||
#include "./hash_longest_match_quickly_inc.h" /* NOLINT(build/include) */
|
||||
#undef USE_DICTIONARY
|
||||
#undef HASH_LEN
|
||||
#undef BUCKET_SWEEP
|
||||
#undef BUCKET_SWEEP_BITS
|
||||
#undef BUCKET_BITS
|
||||
#undef HASHER
|
||||
|
||||
@@ -334,13 +314,13 @@ static BROTLI_INLINE size_t BackwardMatchLengthCode(const BackwardMatch* self) {
|
||||
|
||||
#define HASHER() H54
|
||||
#define BUCKET_BITS 20
|
||||
#define BUCKET_SWEEP 4
|
||||
#define BUCKET_SWEEP_BITS 2
|
||||
#define HASH_LEN 7
|
||||
#define USE_DICTIONARY 0
|
||||
#include "./hash_longest_match_quickly_inc.h" /* NOLINT(build/include) */
|
||||
#undef USE_DICTIONARY
|
||||
#undef HASH_LEN
|
||||
#undef BUCKET_SWEEP
|
||||
#undef BUCKET_SWEEP_BITS
|
||||
#undef BUCKET_BITS
|
||||
#undef HASHER
|
||||
|
||||
@@ -393,97 +373,107 @@ static BROTLI_INLINE size_t BackwardMatchLengthCode(const BackwardMatch* self) {
|
||||
#undef CAT
|
||||
#undef EXPAND_CAT
|
||||
|
||||
#define FOR_GENERIC_HASHERS(H) H(2) H(3) H(4) H(5) H(6) H(40) H(41) H(42) H(54)\
|
||||
H(35) H(55) H(65)
|
||||
#define FOR_SIMPLE_HASHERS(H) H(2) H(3) H(4) H(5) H(6) H(40) H(41) H(42) H(54)
|
||||
#define FOR_COMPOSITE_HASHERS(H) H(35) H(55) H(65)
|
||||
#define FOR_GENERIC_HASHERS(H) FOR_SIMPLE_HASHERS(H) FOR_COMPOSITE_HASHERS(H)
|
||||
#define FOR_ALL_HASHERS(H) FOR_GENERIC_HASHERS(H) H(10)
|
||||
|
||||
static BROTLI_INLINE void DestroyHasher(
|
||||
MemoryManager* m, HasherHandle* handle) {
|
||||
if (*handle == NULL) return;
|
||||
BROTLI_FREE(m, *handle);
|
||||
typedef struct {
|
||||
HasherCommon common;
|
||||
|
||||
union {
|
||||
#define MEMBER_(N) \
|
||||
H ## N _H ## N;
|
||||
FOR_ALL_HASHERS(MEMBER_)
|
||||
#undef MEMBER_
|
||||
} privat;
|
||||
} Hasher;
|
||||
|
||||
/* MUST be invoked before any other method. */
|
||||
static BROTLI_INLINE void HasherInit(Hasher* hasher) {
|
||||
hasher->common.extra = NULL;
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void HasherReset(HasherHandle handle) {
|
||||
if (handle == NULL) return;
|
||||
GetHasherCommon(handle)->is_prepared_ = BROTLI_FALSE;
|
||||
static BROTLI_INLINE void DestroyHasher(MemoryManager* m, Hasher* hasher) {
|
||||
if (hasher->common.extra == NULL) return;
|
||||
BROTLI_FREE(m, hasher->common.extra);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void HasherReset(Hasher* hasher) {
|
||||
hasher->common.is_prepared_ = BROTLI_FALSE;
|
||||
}
|
||||
|
||||
static BROTLI_INLINE size_t HasherSize(const BrotliEncoderParams* params,
|
||||
BROTLI_BOOL one_shot, const size_t input_size) {
|
||||
size_t result = sizeof(HasherCommon);
|
||||
switch (params->hasher.type) {
|
||||
#define SIZE_(N) \
|
||||
case N: \
|
||||
result += HashMemAllocInBytesH ## N(params, one_shot, input_size); \
|
||||
break;
|
||||
#define SIZE_(N) \
|
||||
case N: \
|
||||
return HashMemAllocInBytesH ## N(params, one_shot, input_size);
|
||||
FOR_ALL_HASHERS(SIZE_)
|
||||
#undef SIZE_
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
return 0; /* Default case. */
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void HasherSetup(MemoryManager* m, HasherHandle* handle,
|
||||
static BROTLI_INLINE void HasherSetup(MemoryManager* m, Hasher* hasher,
|
||||
BrotliEncoderParams* params, const uint8_t* data, size_t position,
|
||||
size_t input_size, BROTLI_BOOL is_last) {
|
||||
HasherHandle self = NULL;
|
||||
HasherCommon* common = NULL;
|
||||
BROTLI_BOOL one_shot = (position == 0 && is_last);
|
||||
if (*handle == NULL) {
|
||||
if (hasher->common.extra == NULL) {
|
||||
size_t alloc_size;
|
||||
ChooseHasher(params, ¶ms->hasher);
|
||||
alloc_size = HasherSize(params, one_shot, input_size);
|
||||
self = BROTLI_ALLOC(m, uint8_t, alloc_size);
|
||||
if (BROTLI_IS_OOM(m)) return;
|
||||
*handle = self;
|
||||
common = GetHasherCommon(self);
|
||||
common->params = params->hasher;
|
||||
switch (common->params.type) {
|
||||
#define INITIALIZE_(N) \
|
||||
case N: \
|
||||
InitializeH ## N(*handle, params); \
|
||||
hasher->common.extra = BROTLI_ALLOC(m, uint8_t, alloc_size);
|
||||
if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(hasher->common.extra)) return;
|
||||
hasher->common.params = params->hasher;
|
||||
switch (hasher->common.params.type) {
|
||||
#define INITIALIZE_(N) \
|
||||
case N: \
|
||||
InitializeH ## N(&hasher->common, \
|
||||
&hasher->privat._H ## N, params); \
|
||||
break;
|
||||
FOR_ALL_HASHERS(INITIALIZE_);
|
||||
#undef INITIALIZE_
|
||||
default:
|
||||
break;
|
||||
}
|
||||
HasherReset(*handle);
|
||||
HasherReset(hasher);
|
||||
}
|
||||
|
||||
self = *handle;
|
||||
common = GetHasherCommon(self);
|
||||
if (!common->is_prepared_) {
|
||||
switch (common->params.type) {
|
||||
#define PREPARE_(N) \
|
||||
case N: \
|
||||
PrepareH ## N(self, one_shot, input_size, data); \
|
||||
if (!hasher->common.is_prepared_) {
|
||||
switch (hasher->common.params.type) {
|
||||
#define PREPARE_(N) \
|
||||
case N: \
|
||||
PrepareH ## N( \
|
||||
&hasher->privat._H ## N, \
|
||||
one_shot, input_size, data); \
|
||||
break;
|
||||
FOR_ALL_HASHERS(PREPARE_)
|
||||
#undef PREPARE_
|
||||
default: break;
|
||||
}
|
||||
if (position == 0) {
|
||||
common->dict_num_lookups = 0;
|
||||
common->dict_num_matches = 0;
|
||||
hasher->common.dict_num_lookups = 0;
|
||||
hasher->common.dict_num_matches = 0;
|
||||
}
|
||||
common->is_prepared_ = BROTLI_TRUE;
|
||||
hasher->common.is_prepared_ = BROTLI_TRUE;
|
||||
}
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void InitOrStitchToPreviousBlock(
|
||||
MemoryManager* m, HasherHandle* handle, const uint8_t* data, size_t mask,
|
||||
MemoryManager* m, Hasher* hasher, const uint8_t* data, size_t mask,
|
||||
BrotliEncoderParams* params, size_t position, size_t input_size,
|
||||
BROTLI_BOOL is_last) {
|
||||
HasherHandle self;
|
||||
HasherSetup(m, handle, params, data, position, input_size, is_last);
|
||||
HasherSetup(m, hasher, params, data, position, input_size, is_last);
|
||||
if (BROTLI_IS_OOM(m)) return;
|
||||
self = *handle;
|
||||
switch (GetHasherCommon(self)->params.type) {
|
||||
#define INIT_(N) \
|
||||
case N: \
|
||||
StitchToPreviousBlockH ## N(self, input_size, position, data, mask); \
|
||||
switch (hasher->common.params.type) {
|
||||
#define INIT_(N) \
|
||||
case N: \
|
||||
StitchToPreviousBlockH ## N( \
|
||||
&hasher->privat._H ## N, \
|
||||
input_size, position, data, mask); \
|
||||
break;
|
||||
FOR_ALL_HASHERS(INIT_)
|
||||
#undef INIT_
|
||||
|
||||
@@ -28,20 +28,25 @@ static BROTLI_INLINE size_t FN(StoreLookahead)(void) {
|
||||
}
|
||||
|
||||
typedef struct HashComposite {
|
||||
HasherHandle ha;
|
||||
HasherHandle hb;
|
||||
HASHER_A ha;
|
||||
HASHER_B hb;
|
||||
HasherCommon hb_common;
|
||||
|
||||
/* Shortcuts. */
|
||||
void* extra;
|
||||
HasherCommon* common;
|
||||
|
||||
BROTLI_BOOL fresh;
|
||||
const BrotliEncoderParams* params;
|
||||
} HashComposite;
|
||||
|
||||
static BROTLI_INLINE HashComposite* FN(Self)(HasherHandle handle) {
|
||||
return (HashComposite*)&(GetHasherCommon(handle)[1]);
|
||||
}
|
||||
static void FN(Initialize)(HasherCommon* common,
|
||||
HashComposite* BROTLI_RESTRICT self, const BrotliEncoderParams* params) {
|
||||
self->common = common;
|
||||
self->extra = common->extra;
|
||||
|
||||
static void FN(Initialize)(
|
||||
HasherHandle handle, const BrotliEncoderParams* params) {
|
||||
HashComposite* self = FN(Self)(handle);
|
||||
self->ha = 0;
|
||||
self->hb = 0;
|
||||
self->hb_common = *self->common;
|
||||
self->fresh = BROTLI_TRUE;
|
||||
self->params = params;
|
||||
/* TODO: Initialize of the hashers is defered to Prepare (and params
|
||||
remembered here) because we don't get the one_shot and input_size params
|
||||
@@ -49,87 +54,71 @@ static void FN(Initialize)(
|
||||
those params to all hashers FN(Initialize) */
|
||||
}
|
||||
|
||||
static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot,
|
||||
size_t input_size, const uint8_t* data) {
|
||||
HashComposite* self = FN(Self)(handle);
|
||||
if (!self->ha) {
|
||||
HasherCommon* common_a;
|
||||
HasherCommon* common_b;
|
||||
static void FN(Prepare)(
|
||||
HashComposite* BROTLI_RESTRICT self, BROTLI_BOOL one_shot,
|
||||
size_t input_size, const uint8_t* BROTLI_RESTRICT data) {
|
||||
if (self->fresh) {
|
||||
self->fresh = BROTLI_FALSE;
|
||||
self->hb_common.extra = (uint8_t*)self->extra +
|
||||
FN_A(HashMemAllocInBytes)(self->params, one_shot, input_size);
|
||||
|
||||
self->ha = handle + sizeof(HasherCommon) + sizeof(HashComposite);
|
||||
common_a = (HasherCommon*)self->ha;
|
||||
common_a->params = self->params->hasher;
|
||||
common_a->is_prepared_ = BROTLI_FALSE;
|
||||
common_a->dict_num_lookups = 0;
|
||||
common_a->dict_num_matches = 0;
|
||||
FN_A(Initialize)(self->ha, self->params);
|
||||
|
||||
self->hb = self->ha + sizeof(HasherCommon) + FN_A(HashMemAllocInBytes)(
|
||||
self->params, one_shot, input_size);
|
||||
common_b = (HasherCommon*)self->hb;
|
||||
common_b->params = self->params->hasher;
|
||||
common_b->is_prepared_ = BROTLI_FALSE;
|
||||
common_b->dict_num_lookups = 0;
|
||||
common_b->dict_num_matches = 0;
|
||||
FN_B(Initialize)(self->hb, self->params);
|
||||
FN_A(Initialize)(self->common, &self->ha, self->params);
|
||||
FN_B(Initialize)(&self->hb_common, &self->hb, self->params);
|
||||
}
|
||||
FN_A(Prepare)(self->ha, one_shot, input_size, data);
|
||||
FN_B(Prepare)(self->hb, one_shot, input_size, data);
|
||||
FN_A(Prepare)(&self->ha, one_shot, input_size, data);
|
||||
FN_B(Prepare)(&self->hb, one_shot, input_size, data);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE size_t FN(HashMemAllocInBytes)(
|
||||
const BrotliEncoderParams* params, BROTLI_BOOL one_shot,
|
||||
size_t input_size) {
|
||||
return sizeof(HashComposite) + 2 * sizeof(HasherCommon) +
|
||||
FN_A(HashMemAllocInBytes)(params, one_shot, input_size) +
|
||||
return FN_A(HashMemAllocInBytes)(params, one_shot, input_size) +
|
||||
FN_B(HashMemAllocInBytes)(params, one_shot, input_size);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(Store)(HasherHandle BROTLI_RESTRICT handle,
|
||||
static BROTLI_INLINE void FN(Store)(HashComposite* BROTLI_RESTRICT self,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t mask, const size_t ix) {
|
||||
HashComposite* self = FN(Self)(handle);
|
||||
FN_A(Store)(self->ha, data, mask, ix);
|
||||
FN_B(Store)(self->hb, data, mask, ix);
|
||||
FN_A(Store)(&self->ha, data, mask, ix);
|
||||
FN_B(Store)(&self->hb, data, mask, ix);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(StoreRange)(HasherHandle handle,
|
||||
const uint8_t* data, const size_t mask, const size_t ix_start,
|
||||
static BROTLI_INLINE void FN(StoreRange)(
|
||||
HashComposite* BROTLI_RESTRICT self, const uint8_t* BROTLI_RESTRICT data,
|
||||
const size_t mask, const size_t ix_start,
|
||||
const size_t ix_end) {
|
||||
HashComposite* self = FN(Self)(handle);
|
||||
FN_A(StoreRange)(self->ha, data, mask, ix_start, ix_end);
|
||||
FN_B(StoreRange)(self->hb, data, mask, ix_start, ix_end);
|
||||
FN_A(StoreRange)(&self->ha, data, mask, ix_start, ix_end);
|
||||
FN_B(StoreRange)(&self->hb, data, mask, ix_start, ix_end);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(StitchToPreviousBlock)(HasherHandle handle,
|
||||
static BROTLI_INLINE void FN(StitchToPreviousBlock)(
|
||||
HashComposite* BROTLI_RESTRICT self,
|
||||
size_t num_bytes, size_t position, const uint8_t* ringbuffer,
|
||||
size_t ring_buffer_mask) {
|
||||
HashComposite* self = FN(Self)(handle);
|
||||
FN_A(StitchToPreviousBlock)(self->ha, num_bytes, position, ringbuffer,
|
||||
ring_buffer_mask);
|
||||
FN_B(StitchToPreviousBlock)(self->hb, num_bytes, position, ringbuffer,
|
||||
ring_buffer_mask);
|
||||
FN_A(StitchToPreviousBlock)(&self->ha, num_bytes, position,
|
||||
ringbuffer, ring_buffer_mask);
|
||||
FN_B(StitchToPreviousBlock)(&self->hb, num_bytes, position,
|
||||
ringbuffer, ring_buffer_mask);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(PrepareDistanceCache)(
|
||||
HasherHandle handle, int* BROTLI_RESTRICT distance_cache) {
|
||||
HashComposite* self = FN(Self)(handle);
|
||||
FN_A(PrepareDistanceCache)(self->ha, distance_cache);
|
||||
FN_B(PrepareDistanceCache)(self->hb, distance_cache);
|
||||
HashComposite* BROTLI_RESTRICT self, int* BROTLI_RESTRICT distance_cache) {
|
||||
FN_A(PrepareDistanceCache)(&self->ha, distance_cache);
|
||||
FN_B(PrepareDistanceCache)(&self->hb, distance_cache);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
static BROTLI_INLINE void FN(FindLongestMatch)(
|
||||
HashComposite* BROTLI_RESTRICT self,
|
||||
const BrotliEncoderDictionary* dictionary,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t ring_buffer_mask,
|
||||
const int* BROTLI_RESTRICT distance_cache, const size_t cur_ix,
|
||||
const size_t max_length, const size_t max_backward,
|
||||
const size_t gap, const size_t max_distance,
|
||||
const size_t dictionary_distance, const size_t max_distance,
|
||||
HasherSearchResult* BROTLI_RESTRICT out) {
|
||||
HashComposite* self = FN(Self)(handle);
|
||||
FN_A(FindLongestMatch)(self->ha, dictionary, data, ring_buffer_mask,
|
||||
distance_cache, cur_ix, max_length, max_backward, gap,
|
||||
FN_A(FindLongestMatch)(&self->ha, dictionary, data, ring_buffer_mask,
|
||||
distance_cache, cur_ix, max_length, max_backward, dictionary_distance,
|
||||
max_distance, out);
|
||||
FN_B(FindLongestMatch)(self->hb, dictionary, data, ring_buffer_mask,
|
||||
distance_cache, cur_ix, max_length, max_backward, gap,
|
||||
FN_B(FindLongestMatch)(&self->hb, dictionary, data, ring_buffer_mask,
|
||||
distance_cache, cur_ix, max_length, max_backward, dictionary_distance,
|
||||
max_distance, out);
|
||||
}
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ static BROTLI_INLINE size_t FN(HashTypeLength)(void) { return 4; }
|
||||
static BROTLI_INLINE size_t FN(StoreLookahead)(void) { return 4; }
|
||||
|
||||
/* HashBytes is the function that chooses the bucket to place the address in.*/
|
||||
static BROTLI_INLINE size_t FN(HashBytes)(const uint8_t* data) {
|
||||
static BROTLI_INLINE size_t FN(HashBytes)(const uint8_t* BROTLI_RESTRICT data) {
|
||||
const uint32_t h = BROTLI_UNALIGNED_LOAD32LE(data) * kHashMul32;
|
||||
/* The higher bits contain more mixture from the multiplication,
|
||||
so we take our results from there. */
|
||||
@@ -45,28 +45,56 @@ typedef struct FN(Bank) {
|
||||
} FN(Bank);
|
||||
|
||||
typedef struct HashForgetfulChain {
|
||||
uint32_t addr[BUCKET_SIZE];
|
||||
uint16_t head[BUCKET_SIZE];
|
||||
/* Truncated hash used for quick rejection of "distance cache" candidates. */
|
||||
uint8_t tiny_hash[65536];
|
||||
FN(Bank) banks[NUM_BANKS];
|
||||
uint16_t free_slot_idx[NUM_BANKS];
|
||||
uint16_t free_slot_idx[NUM_BANKS]; /* Up to 1KiB. Move to dynamic? */
|
||||
size_t max_hops;
|
||||
|
||||
/* Shortcuts. */
|
||||
void* extra;
|
||||
HasherCommon* common;
|
||||
|
||||
/* --- Dynamic size members --- */
|
||||
|
||||
/* uint32_t addr[BUCKET_SIZE]; */
|
||||
|
||||
/* uint16_t head[BUCKET_SIZE]; */
|
||||
|
||||
/* Truncated hash used for quick rejection of "distance cache" candidates. */
|
||||
/* uint8_t tiny_hash[65536];*/
|
||||
|
||||
/* FN(Bank) banks[NUM_BANKS]; */
|
||||
} HashForgetfulChain;
|
||||
|
||||
static BROTLI_INLINE HashForgetfulChain* FN(Self)(HasherHandle handle) {
|
||||
return (HashForgetfulChain*)&(GetHasherCommon(handle)[1]);
|
||||
static uint32_t* FN(Addr)(void* extra) {
|
||||
return (uint32_t*)extra;
|
||||
}
|
||||
|
||||
static uint16_t* FN(Head)(void* extra) {
|
||||
return (uint16_t*)(&FN(Addr)(extra)[BUCKET_SIZE]);
|
||||
}
|
||||
|
||||
static uint8_t* FN(TinyHash)(void* extra) {
|
||||
return (uint8_t*)(&FN(Head)(extra)[BUCKET_SIZE]);
|
||||
}
|
||||
|
||||
static FN(Bank)* FN(Banks)(void* extra) {
|
||||
return (FN(Bank)*)(&FN(TinyHash)(extra)[65536]);
|
||||
}
|
||||
|
||||
static void FN(Initialize)(
|
||||
HasherHandle handle, const BrotliEncoderParams* params) {
|
||||
FN(Self)(handle)->max_hops =
|
||||
(params->quality > 6 ? 7u : 8u) << (params->quality - 4);
|
||||
HasherCommon* common, HashForgetfulChain* BROTLI_RESTRICT self,
|
||||
const BrotliEncoderParams* params) {
|
||||
self->common = common;
|
||||
self->extra = common->extra;
|
||||
|
||||
self->max_hops = (params->quality > 6 ? 7u : 8u) << (params->quality - 4);
|
||||
}
|
||||
|
||||
static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot,
|
||||
size_t input_size, const uint8_t* data) {
|
||||
HashForgetfulChain* self = FN(Self)(handle);
|
||||
static void FN(Prepare)(
|
||||
HashForgetfulChain* BROTLI_RESTRICT self, BROTLI_BOOL one_shot,
|
||||
size_t input_size, const uint8_t* BROTLI_RESTRICT data) {
|
||||
uint32_t* BROTLI_RESTRICT addr = FN(Addr)(self->extra);
|
||||
uint16_t* BROTLI_RESTRICT head = FN(Head)(self->extra);
|
||||
uint8_t* BROTLI_RESTRICT tiny_hash = FN(TinyHash)(self->extra);
|
||||
/* Partial preparation is 100 times slower (per socket). */
|
||||
size_t partial_prepare_threshold = BUCKET_SIZE >> 6;
|
||||
if (one_shot && input_size <= partial_prepare_threshold) {
|
||||
@@ -74,17 +102,17 @@ static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot,
|
||||
for (i = 0; i < input_size; ++i) {
|
||||
size_t bucket = FN(HashBytes)(&data[i]);
|
||||
/* See InitEmpty comment. */
|
||||
self->addr[bucket] = 0xCCCCCCCC;
|
||||
self->head[bucket] = 0xCCCC;
|
||||
addr[bucket] = 0xCCCCCCCC;
|
||||
head[bucket] = 0xCCCC;
|
||||
}
|
||||
} else {
|
||||
/* Fill |addr| array with 0xCCCCCCCC value. Because of wrapping, position
|
||||
processed by hasher never reaches 3GB + 64M; this makes all new chains
|
||||
to be terminated after the first node. */
|
||||
memset(self->addr, 0xCC, sizeof(self->addr));
|
||||
memset(self->head, 0, sizeof(self->head));
|
||||
memset(addr, 0xCC, sizeof(uint32_t) * BUCKET_SIZE);
|
||||
memset(head, 0, sizeof(uint16_t) * BUCKET_SIZE);
|
||||
}
|
||||
memset(self->tiny_hash, 0, sizeof(self->tiny_hash));
|
||||
memset(tiny_hash, 0, sizeof(uint8_t) * 65536);
|
||||
memset(self->free_slot_idx, 0, sizeof(self->free_slot_idx));
|
||||
}
|
||||
|
||||
@@ -94,51 +122,58 @@ static BROTLI_INLINE size_t FN(HashMemAllocInBytes)(
|
||||
BROTLI_UNUSED(params);
|
||||
BROTLI_UNUSED(one_shot);
|
||||
BROTLI_UNUSED(input_size);
|
||||
return sizeof(HashForgetfulChain);
|
||||
return sizeof(uint32_t) * BUCKET_SIZE + sizeof(uint16_t) * BUCKET_SIZE +
|
||||
sizeof(uint8_t) * 65536 + sizeof(FN(Bank)) * NUM_BANKS;
|
||||
}
|
||||
|
||||
/* Look at 4 bytes at &data[ix & mask]. Compute a hash from these, and prepend
|
||||
node to corresponding chain; also update tiny_hash for current position. */
|
||||
static BROTLI_INLINE void FN(Store)(HasherHandle BROTLI_RESTRICT handle,
|
||||
static BROTLI_INLINE void FN(Store)(HashForgetfulChain* BROTLI_RESTRICT self,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t mask, const size_t ix) {
|
||||
HashForgetfulChain* self = FN(Self)(handle);
|
||||
uint32_t* BROTLI_RESTRICT addr = FN(Addr)(self->extra);
|
||||
uint16_t* BROTLI_RESTRICT head = FN(Head)(self->extra);
|
||||
uint8_t* BROTLI_RESTRICT tiny_hash = FN(TinyHash)(self->extra);
|
||||
FN(Bank)* BROTLI_RESTRICT banks = FN(Banks)(self->extra);
|
||||
const size_t key = FN(HashBytes)(&data[ix & mask]);
|
||||
const size_t bank = key & (NUM_BANKS - 1);
|
||||
const size_t idx = self->free_slot_idx[bank]++ & (BANK_SIZE - 1);
|
||||
size_t delta = ix - self->addr[key];
|
||||
self->tiny_hash[(uint16_t)ix] = (uint8_t)key;
|
||||
size_t delta = ix - addr[key];
|
||||
tiny_hash[(uint16_t)ix] = (uint8_t)key;
|
||||
if (delta > 0xFFFF) delta = CAPPED_CHAINS ? 0 : 0xFFFF;
|
||||
self->banks[bank].slots[idx].delta = (uint16_t)delta;
|
||||
self->banks[bank].slots[idx].next = self->head[key];
|
||||
self->addr[key] = (uint32_t)ix;
|
||||
self->head[key] = (uint16_t)idx;
|
||||
banks[bank].slots[idx].delta = (uint16_t)delta;
|
||||
banks[bank].slots[idx].next = head[key];
|
||||
addr[key] = (uint32_t)ix;
|
||||
head[key] = (uint16_t)idx;
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(StoreRange)(HasherHandle handle,
|
||||
const uint8_t* data, const size_t mask, const size_t ix_start,
|
||||
const size_t ix_end) {
|
||||
static BROTLI_INLINE void FN(StoreRange)(
|
||||
HashForgetfulChain* BROTLI_RESTRICT self,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t mask,
|
||||
const size_t ix_start, const size_t ix_end) {
|
||||
size_t i;
|
||||
for (i = ix_start; i < ix_end; ++i) {
|
||||
FN(Store)(handle, data, mask, i);
|
||||
FN(Store)(self, data, mask, i);
|
||||
}
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(StitchToPreviousBlock)(HasherHandle handle,
|
||||
static BROTLI_INLINE void FN(StitchToPreviousBlock)(
|
||||
HashForgetfulChain* BROTLI_RESTRICT self,
|
||||
size_t num_bytes, size_t position, const uint8_t* ringbuffer,
|
||||
size_t ring_buffer_mask) {
|
||||
if (num_bytes >= FN(HashTypeLength)() - 1 && position >= 3) {
|
||||
/* Prepare the hashes for three last bytes of the last write.
|
||||
These could not be calculated before, since they require knowledge
|
||||
of both the previous and the current block. */
|
||||
FN(Store)(handle, ringbuffer, ring_buffer_mask, position - 3);
|
||||
FN(Store)(handle, ringbuffer, ring_buffer_mask, position - 2);
|
||||
FN(Store)(handle, ringbuffer, ring_buffer_mask, position - 1);
|
||||
FN(Store)(self, ringbuffer, ring_buffer_mask, position - 3);
|
||||
FN(Store)(self, ringbuffer, ring_buffer_mask, position - 2);
|
||||
FN(Store)(self, ringbuffer, ring_buffer_mask, position - 1);
|
||||
}
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(PrepareDistanceCache)(
|
||||
HasherHandle handle, int* BROTLI_RESTRICT distance_cache) {
|
||||
BROTLI_UNUSED(handle);
|
||||
HashForgetfulChain* BROTLI_RESTRICT self,
|
||||
int* BROTLI_RESTRICT distance_cache) {
|
||||
BROTLI_UNUSED(self);
|
||||
PrepareDistanceCache(distance_cache, NUM_LAST_DISTANCES_TO_CHECK);
|
||||
}
|
||||
|
||||
@@ -153,14 +188,18 @@ static BROTLI_INLINE void FN(PrepareDistanceCache)(
|
||||
Does not look for matches further away than max_backward.
|
||||
Writes the best match into |out|.
|
||||
|out|->score is updated only if a better match is found. */
|
||||
static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
static BROTLI_INLINE void FN(FindLongestMatch)(
|
||||
HashForgetfulChain* BROTLI_RESTRICT self,
|
||||
const BrotliEncoderDictionary* dictionary,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t ring_buffer_mask,
|
||||
const int* BROTLI_RESTRICT distance_cache,
|
||||
const size_t cur_ix, const size_t max_length, const size_t max_backward,
|
||||
const size_t gap, const size_t max_distance,
|
||||
const size_t dictionary_distance, const size_t max_distance,
|
||||
HasherSearchResult* BROTLI_RESTRICT out) {
|
||||
HashForgetfulChain* self = FN(Self)(handle);
|
||||
uint32_t* BROTLI_RESTRICT addr = FN(Addr)(self->extra);
|
||||
uint16_t* BROTLI_RESTRICT head = FN(Head)(self->extra);
|
||||
uint8_t* BROTLI_RESTRICT tiny_hashes = FN(TinyHash)(self->extra);
|
||||
FN(Bank)* BROTLI_RESTRICT banks = FN(Banks)(self->extra);
|
||||
const size_t cur_ix_masked = cur_ix & ring_buffer_mask;
|
||||
/* Don't accept a short copy from far away. */
|
||||
score_t min_score = out->score;
|
||||
@@ -176,7 +215,7 @@ static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
const size_t backward = (size_t)distance_cache[i];
|
||||
size_t prev_ix = (cur_ix - backward);
|
||||
/* For distance code 0 we want to consider 2-byte matches. */
|
||||
if (i > 0 && self->tiny_hash[(uint16_t)prev_ix] != tiny_hash) continue;
|
||||
if (i > 0 && tiny_hashes[(uint16_t)prev_ix] != tiny_hash) continue;
|
||||
if (prev_ix >= cur_ix || backward > max_backward) {
|
||||
continue;
|
||||
}
|
||||
@@ -204,16 +243,16 @@ static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
const size_t bank = key & (NUM_BANKS - 1);
|
||||
size_t backward = 0;
|
||||
size_t hops = self->max_hops;
|
||||
size_t delta = cur_ix - self->addr[key];
|
||||
size_t slot = self->head[key];
|
||||
size_t delta = cur_ix - addr[key];
|
||||
size_t slot = head[key];
|
||||
while (hops--) {
|
||||
size_t prev_ix;
|
||||
size_t last = slot;
|
||||
backward += delta;
|
||||
if (backward > max_backward || (CAPPED_CHAINS && !delta)) break;
|
||||
prev_ix = (cur_ix - backward) & ring_buffer_mask;
|
||||
slot = self->banks[bank].slots[last].next;
|
||||
delta = self->banks[bank].slots[last].delta;
|
||||
slot = banks[bank].slots[last].next;
|
||||
delta = banks[bank].slots[last].delta;
|
||||
if (cur_ix_masked + best_len > ring_buffer_mask ||
|
||||
prev_ix + best_len > ring_buffer_mask ||
|
||||
data[cur_ix_masked + best_len] != data[prev_ix + best_len]) {
|
||||
@@ -238,11 +277,11 @@ static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
}
|
||||
}
|
||||
}
|
||||
FN(Store)(handle, data, ring_buffer_mask, cur_ix);
|
||||
FN(Store)(self, data, ring_buffer_mask, cur_ix);
|
||||
}
|
||||
if (out->score == min_score) {
|
||||
SearchInStaticDictionary(dictionary,
|
||||
handle, &data[cur_ix_masked], max_length, max_backward + gap,
|
||||
self->common, &data[cur_ix_masked], max_length, dictionary_distance,
|
||||
max_distance, out, BROTLI_FALSE);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ static BROTLI_INLINE size_t FN(HashTypeLength)(void) { return 8; }
|
||||
static BROTLI_INLINE size_t FN(StoreLookahead)(void) { return 8; }
|
||||
|
||||
/* HashBytes is the function that chooses the bucket to place the address in. */
|
||||
static BROTLI_INLINE uint32_t FN(HashBytes)(const uint8_t* data,
|
||||
static BROTLI_INLINE uint32_t FN(HashBytes)(const uint8_t* BROTLI_RESTRICT data,
|
||||
const uint64_t mask,
|
||||
const int shift) {
|
||||
const uint64_t h = (BROTLI_UNALIGNED_LOAD64LE(data) & mask) * kHashMul64Long;
|
||||
@@ -42,43 +42,43 @@ typedef struct HashLongestMatch {
|
||||
/* Mask for accessing entries in a block (in a ring-buffer manner). */
|
||||
uint32_t block_mask_;
|
||||
|
||||
int block_bits_;
|
||||
int num_last_distances_to_check_;
|
||||
|
||||
/* Shortcuts. */
|
||||
HasherCommon* common_;
|
||||
|
||||
/* --- Dynamic size members --- */
|
||||
|
||||
/* Number of entries in a particular bucket. */
|
||||
/* uint16_t num[bucket_size]; */
|
||||
uint16_t* num_; /* uint16_t[bucket_size]; */
|
||||
|
||||
/* Buckets containing block_size_ of backward references. */
|
||||
/* uint32_t* buckets[bucket_size * block_size]; */
|
||||
uint32_t* buckets_; /* uint32_t[bucket_size * block_size]; */
|
||||
} HashLongestMatch;
|
||||
|
||||
static BROTLI_INLINE HashLongestMatch* FN(Self)(HasherHandle handle) {
|
||||
return (HashLongestMatch*)&(GetHasherCommon(handle)[1]);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE uint16_t* FN(Num)(HashLongestMatch* self) {
|
||||
return (uint16_t*)(&self[1]);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE uint32_t* FN(Buckets)(HashLongestMatch* self) {
|
||||
return (uint32_t*)(&FN(Num)(self)[self->bucket_size_]);
|
||||
}
|
||||
|
||||
static void FN(Initialize)(
|
||||
HasherHandle handle, const BrotliEncoderParams* params) {
|
||||
HasherCommon* common = GetHasherCommon(handle);
|
||||
HashLongestMatch* self = FN(Self)(handle);
|
||||
HasherCommon* common, HashLongestMatch* BROTLI_RESTRICT self,
|
||||
const BrotliEncoderParams* params) {
|
||||
self->common_ = common;
|
||||
|
||||
BROTLI_UNUSED(params);
|
||||
self->hash_shift_ = 64 - common->params.bucket_bits;
|
||||
self->hash_mask_ = (~((uint64_t)0U)) >> (64 - 8 * common->params.hash_len);
|
||||
self->bucket_size_ = (size_t)1 << common->params.bucket_bits;
|
||||
self->block_bits_ = common->params.block_bits;
|
||||
self->block_size_ = (size_t)1 << common->params.block_bits;
|
||||
self->block_mask_ = (uint32_t)(self->block_size_ - 1);
|
||||
self->num_last_distances_to_check_ =
|
||||
common->params.num_last_distances_to_check;
|
||||
self->num_ = (uint16_t*)common->extra;
|
||||
self->buckets_ = (uint32_t*)&self->num_[self->bucket_size_];
|
||||
}
|
||||
|
||||
static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot,
|
||||
size_t input_size, const uint8_t* data) {
|
||||
HashLongestMatch* self = FN(Self)(handle);
|
||||
uint16_t* num = FN(Num)(self);
|
||||
static void FN(Prepare)(
|
||||
HashLongestMatch* BROTLI_RESTRICT self, BROTLI_BOOL one_shot,
|
||||
size_t input_size, const uint8_t* BROTLI_RESTRICT data) {
|
||||
uint16_t* BROTLI_RESTRICT num = self->num_;
|
||||
/* Partial preparation is 100 times slower (per socket). */
|
||||
size_t partial_prepare_threshold = self->bucket_size_ >> 6;
|
||||
if (one_shot && input_size <= partial_prepare_threshold) {
|
||||
@@ -100,50 +100,52 @@ static BROTLI_INLINE size_t FN(HashMemAllocInBytes)(
|
||||
size_t block_size = (size_t)1 << params->hasher.block_bits;
|
||||
BROTLI_UNUSED(one_shot);
|
||||
BROTLI_UNUSED(input_size);
|
||||
return sizeof(HashLongestMatch) + bucket_size * (2 + 4 * block_size);
|
||||
return sizeof(uint16_t) * bucket_size +
|
||||
sizeof(uint32_t) * bucket_size * block_size;
|
||||
}
|
||||
|
||||
/* Look at 4 bytes at &data[ix & mask].
|
||||
Compute a hash from these, and store the value of ix at that position. */
|
||||
static BROTLI_INLINE void FN(Store)(HasherHandle handle, const uint8_t* data,
|
||||
static BROTLI_INLINE void FN(Store)(
|
||||
HashLongestMatch* BROTLI_RESTRICT self, const uint8_t* BROTLI_RESTRICT data,
|
||||
const size_t mask, const size_t ix) {
|
||||
HashLongestMatch* self = FN(Self)(handle);
|
||||
uint16_t* num = FN(Num)(self);
|
||||
uint16_t* BROTLI_RESTRICT num = self->num_;
|
||||
uint32_t* BROTLI_RESTRICT buckets = self->buckets_;
|
||||
const uint32_t key = FN(HashBytes)(&data[ix & mask], self->hash_mask_,
|
||||
self->hash_shift_);
|
||||
const size_t minor_ix = num[key] & self->block_mask_;
|
||||
const size_t offset =
|
||||
minor_ix + (key << GetHasherCommon(handle)->params.block_bits);
|
||||
FN(Buckets)(self)[offset] = (uint32_t)ix;
|
||||
const size_t offset = minor_ix + (key << self->block_bits_);
|
||||
++num[key];
|
||||
buckets[offset] = (uint32_t)ix;
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(StoreRange)(HasherHandle handle,
|
||||
const uint8_t* data, const size_t mask, const size_t ix_start,
|
||||
const size_t ix_end) {
|
||||
static BROTLI_INLINE void FN(StoreRange)(HashLongestMatch* BROTLI_RESTRICT self,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t mask,
|
||||
const size_t ix_start, const size_t ix_end) {
|
||||
size_t i;
|
||||
for (i = ix_start; i < ix_end; ++i) {
|
||||
FN(Store)(handle, data, mask, i);
|
||||
FN(Store)(self, data, mask, i);
|
||||
}
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(StitchToPreviousBlock)(HasherHandle handle,
|
||||
static BROTLI_INLINE void FN(StitchToPreviousBlock)(
|
||||
HashLongestMatch* BROTLI_RESTRICT self,
|
||||
size_t num_bytes, size_t position, const uint8_t* ringbuffer,
|
||||
size_t ringbuffer_mask) {
|
||||
if (num_bytes >= FN(HashTypeLength)() - 1 && position >= 3) {
|
||||
/* Prepare the hashes for three last bytes of the last write.
|
||||
These could not be calculated before, since they require knowledge
|
||||
of both the previous and the current block. */
|
||||
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 3);
|
||||
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 2);
|
||||
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 1);
|
||||
FN(Store)(self, ringbuffer, ringbuffer_mask, position - 3);
|
||||
FN(Store)(self, ringbuffer, ringbuffer_mask, position - 2);
|
||||
FN(Store)(self, ringbuffer, ringbuffer_mask, position - 1);
|
||||
}
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(PrepareDistanceCache)(
|
||||
HasherHandle handle, int* BROTLI_RESTRICT distance_cache) {
|
||||
PrepareDistanceCache(distance_cache,
|
||||
GetHasherCommon(handle)->params.num_last_distances_to_check);
|
||||
HashLongestMatch* BROTLI_RESTRICT self,
|
||||
int* BROTLI_RESTRICT distance_cache) {
|
||||
PrepareDistanceCache(distance_cache, self->num_last_distances_to_check_);
|
||||
}
|
||||
|
||||
/* Find a longest backward match of &data[cur_ix] up to the length of
|
||||
@@ -157,17 +159,16 @@ static BROTLI_INLINE void FN(PrepareDistanceCache)(
|
||||
Does not look for matches further away than max_backward.
|
||||
Writes the best match into |out|.
|
||||
|out|->score is updated only if a better match is found. */
|
||||
static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
static BROTLI_INLINE void FN(FindLongestMatch)(
|
||||
HashLongestMatch* BROTLI_RESTRICT self,
|
||||
const BrotliEncoderDictionary* dictionary,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t ring_buffer_mask,
|
||||
const int* BROTLI_RESTRICT distance_cache, const size_t cur_ix,
|
||||
const size_t max_length, const size_t max_backward,
|
||||
const size_t gap, const size_t max_distance,
|
||||
const size_t dictionary_distance, const size_t max_distance,
|
||||
HasherSearchResult* BROTLI_RESTRICT out) {
|
||||
HasherCommon* common = GetHasherCommon(handle);
|
||||
HashLongestMatch* self = FN(Self)(handle);
|
||||
uint16_t* num = FN(Num)(self);
|
||||
uint32_t* buckets = FN(Buckets)(self);
|
||||
uint16_t* BROTLI_RESTRICT num = self->num_;
|
||||
uint32_t* BROTLI_RESTRICT buckets = self->buckets_;
|
||||
const size_t cur_ix_masked = cur_ix & ring_buffer_mask;
|
||||
/* Don't accept a short copy from far away. */
|
||||
score_t min_score = out->score;
|
||||
@@ -177,7 +178,7 @@ static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
out->len = 0;
|
||||
out->len_code_delta = 0;
|
||||
/* Try last distance first. */
|
||||
for (i = 0; i < (size_t)common->params.num_last_distances_to_check; ++i) {
|
||||
for (i = 0; i < (size_t)self->num_last_distances_to_check_; ++i) {
|
||||
const size_t backward = (size_t)distance_cache[i];
|
||||
size_t prev_ix = (size_t)(cur_ix - backward);
|
||||
if (prev_ix >= cur_ix) {
|
||||
@@ -218,8 +219,7 @@ static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
{
|
||||
const uint32_t key = FN(HashBytes)(
|
||||
&data[cur_ix_masked], self->hash_mask_, self->hash_shift_);
|
||||
uint32_t* BROTLI_RESTRICT bucket =
|
||||
&buckets[key << common->params.block_bits];
|
||||
uint32_t* BROTLI_RESTRICT bucket = &buckets[key << self->block_bits_];
|
||||
const size_t down =
|
||||
(num[key] > self->block_size_) ?
|
||||
(num[key] - self->block_size_) : 0u;
|
||||
@@ -259,7 +259,7 @@ static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
}
|
||||
if (min_score == out->score) {
|
||||
SearchInStaticDictionary(dictionary,
|
||||
handle, &data[cur_ix_masked], max_length, max_backward + gap,
|
||||
self->common_, &data[cur_ix_masked], max_length, dictionary_distance,
|
||||
max_distance, out, BROTLI_FALSE);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,8 @@ static BROTLI_INLINE size_t FN(HashTypeLength)(void) { return 4; }
|
||||
static BROTLI_INLINE size_t FN(StoreLookahead)(void) { return 4; }
|
||||
|
||||
/* HashBytes is the function that chooses the bucket to place the address in. */
|
||||
static uint32_t FN(HashBytes)(const uint8_t* data, const int shift) {
|
||||
static uint32_t FN(HashBytes)(
|
||||
const uint8_t* BROTLI_RESTRICT data, const int shift) {
|
||||
uint32_t h = BROTLI_UNALIGNED_LOAD32LE(data) * kHashMul32;
|
||||
/* The higher bits contain more mixture from the multiplication,
|
||||
so we take our results from there. */
|
||||
@@ -38,42 +39,46 @@ typedef struct HashLongestMatch {
|
||||
/* Mask for accessing entries in a block (in a ring-buffer manner). */
|
||||
uint32_t block_mask_;
|
||||
|
||||
int block_bits_;
|
||||
int num_last_distances_to_check_;
|
||||
|
||||
/* Shortcuts. */
|
||||
HasherCommon* common_;
|
||||
|
||||
/* --- Dynamic size members --- */
|
||||
|
||||
/* Number of entries in a particular bucket. */
|
||||
/* uint16_t num[bucket_size]; */
|
||||
uint16_t* num_; /* uint16_t[bucket_size]; */
|
||||
|
||||
/* Buckets containing block_size_ of backward references. */
|
||||
/* uint32_t* buckets[bucket_size * block_size]; */
|
||||
uint32_t* buckets_; /* uint32_t[bucket_size * block_size]; */
|
||||
} HashLongestMatch;
|
||||
|
||||
static BROTLI_INLINE HashLongestMatch* FN(Self)(HasherHandle handle) {
|
||||
return (HashLongestMatch*)&(GetHasherCommon(handle)[1]);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE uint16_t* FN(Num)(HashLongestMatch* self) {
|
||||
return (uint16_t*)(&self[1]);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE uint32_t* FN(Buckets)(HashLongestMatch* self) {
|
||||
return (uint32_t*)(&FN(Num)(self)[self->bucket_size_]);
|
||||
static BROTLI_INLINE uint16_t* FN(Num)(void* extra) {
|
||||
return (uint16_t*)extra;
|
||||
}
|
||||
|
||||
static void FN(Initialize)(
|
||||
HasherHandle handle, const BrotliEncoderParams* params) {
|
||||
HasherCommon* common = GetHasherCommon(handle);
|
||||
HashLongestMatch* self = FN(Self)(handle);
|
||||
HasherCommon* common, HashLongestMatch* BROTLI_RESTRICT self,
|
||||
const BrotliEncoderParams* params) {
|
||||
self->common_ = common;
|
||||
|
||||
BROTLI_UNUSED(params);
|
||||
self->hash_shift_ = 32 - common->params.bucket_bits;
|
||||
self->bucket_size_ = (size_t)1 << common->params.bucket_bits;
|
||||
self->block_size_ = (size_t)1 << common->params.block_bits;
|
||||
self->block_mask_ = (uint32_t)(self->block_size_ - 1);
|
||||
self->num_ = (uint16_t*)common->extra;
|
||||
self->buckets_ = (uint32_t*)(&self->num_[self->bucket_size_]);
|
||||
self->block_bits_ = common->params.block_bits;
|
||||
self->num_last_distances_to_check_ =
|
||||
common->params.num_last_distances_to_check;
|
||||
}
|
||||
|
||||
static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot,
|
||||
size_t input_size, const uint8_t* data) {
|
||||
HashLongestMatch* self = FN(Self)(handle);
|
||||
uint16_t* num = FN(Num)(self);
|
||||
static void FN(Prepare)(
|
||||
HashLongestMatch* BROTLI_RESTRICT self, BROTLI_BOOL one_shot,
|
||||
size_t input_size, const uint8_t* BROTLI_RESTRICT data) {
|
||||
uint16_t* BROTLI_RESTRICT num = self->num_;
|
||||
/* Partial preparation is 100 times slower (per socket). */
|
||||
size_t partial_prepare_threshold = self->bucket_size_ >> 6;
|
||||
if (one_shot && input_size <= partial_prepare_threshold) {
|
||||
@@ -94,49 +99,49 @@ static BROTLI_INLINE size_t FN(HashMemAllocInBytes)(
|
||||
size_t block_size = (size_t)1 << params->hasher.block_bits;
|
||||
BROTLI_UNUSED(one_shot);
|
||||
BROTLI_UNUSED(input_size);
|
||||
return sizeof(HashLongestMatch) + bucket_size * (2 + 4 * block_size);
|
||||
return sizeof(uint16_t) * bucket_size +
|
||||
sizeof(uint32_t) * bucket_size * block_size;
|
||||
}
|
||||
|
||||
/* Look at 4 bytes at &data[ix & mask].
|
||||
Compute a hash from these, and store the value of ix at that position. */
|
||||
static BROTLI_INLINE void FN(Store)(HasherHandle handle, const uint8_t* data,
|
||||
static BROTLI_INLINE void FN(Store)(
|
||||
HashLongestMatch* BROTLI_RESTRICT self, const uint8_t* BROTLI_RESTRICT data,
|
||||
const size_t mask, const size_t ix) {
|
||||
HashLongestMatch* self = FN(Self)(handle);
|
||||
uint16_t* num = FN(Num)(self);
|
||||
const uint32_t key = FN(HashBytes)(&data[ix & mask], self->hash_shift_);
|
||||
const size_t minor_ix = num[key] & self->block_mask_;
|
||||
const size_t offset =
|
||||
minor_ix + (key << GetHasherCommon(handle)->params.block_bits);
|
||||
FN(Buckets)(self)[offset] = (uint32_t)ix;
|
||||
++num[key];
|
||||
const size_t minor_ix = self->num_[key] & self->block_mask_;
|
||||
const size_t offset = minor_ix + (key << self->block_bits_);
|
||||
self->buckets_[offset] = (uint32_t)ix;
|
||||
++self->num_[key];
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(StoreRange)(HasherHandle handle,
|
||||
const uint8_t* data, const size_t mask, const size_t ix_start,
|
||||
const size_t ix_end) {
|
||||
static BROTLI_INLINE void FN(StoreRange)(HashLongestMatch* BROTLI_RESTRICT self,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t mask,
|
||||
const size_t ix_start, const size_t ix_end) {
|
||||
size_t i;
|
||||
for (i = ix_start; i < ix_end; ++i) {
|
||||
FN(Store)(handle, data, mask, i);
|
||||
FN(Store)(self, data, mask, i);
|
||||
}
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(StitchToPreviousBlock)(HasherHandle handle,
|
||||
static BROTLI_INLINE void FN(StitchToPreviousBlock)(
|
||||
HashLongestMatch* BROTLI_RESTRICT self,
|
||||
size_t num_bytes, size_t position, const uint8_t* ringbuffer,
|
||||
size_t ringbuffer_mask) {
|
||||
if (num_bytes >= FN(HashTypeLength)() - 1 && position >= 3) {
|
||||
/* Prepare the hashes for three last bytes of the last write.
|
||||
These could not be calculated before, since they require knowledge
|
||||
of both the previous and the current block. */
|
||||
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 3);
|
||||
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 2);
|
||||
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 1);
|
||||
FN(Store)(self, ringbuffer, ringbuffer_mask, position - 3);
|
||||
FN(Store)(self, ringbuffer, ringbuffer_mask, position - 2);
|
||||
FN(Store)(self, ringbuffer, ringbuffer_mask, position - 1);
|
||||
}
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(PrepareDistanceCache)(
|
||||
HasherHandle handle, int* BROTLI_RESTRICT distance_cache) {
|
||||
PrepareDistanceCache(distance_cache,
|
||||
GetHasherCommon(handle)->params.num_last_distances_to_check);
|
||||
HashLongestMatch* BROTLI_RESTRICT self,
|
||||
int* BROTLI_RESTRICT distance_cache) {
|
||||
PrepareDistanceCache(distance_cache, self->num_last_distances_to_check_);
|
||||
}
|
||||
|
||||
/* Find a longest backward match of &data[cur_ix] up to the length of
|
||||
@@ -150,17 +155,16 @@ static BROTLI_INLINE void FN(PrepareDistanceCache)(
|
||||
Does not look for matches further away than max_backward.
|
||||
Writes the best match into |out|.
|
||||
|out|->score is updated only if a better match is found. */
|
||||
static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
static BROTLI_INLINE void FN(FindLongestMatch)(
|
||||
HashLongestMatch* BROTLI_RESTRICT self,
|
||||
const BrotliEncoderDictionary* dictionary,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t ring_buffer_mask,
|
||||
const int* BROTLI_RESTRICT distance_cache, const size_t cur_ix,
|
||||
const size_t max_length, const size_t max_backward,
|
||||
const size_t gap, const size_t max_distance,
|
||||
const size_t dictionary_distance, const size_t max_distance,
|
||||
HasherSearchResult* BROTLI_RESTRICT out) {
|
||||
HasherCommon* common = GetHasherCommon(handle);
|
||||
HashLongestMatch* self = FN(Self)(handle);
|
||||
uint16_t* num = FN(Num)(self);
|
||||
uint32_t* buckets = FN(Buckets)(self);
|
||||
uint16_t* BROTLI_RESTRICT num = self->num_;
|
||||
uint32_t* BROTLI_RESTRICT buckets = self->buckets_;
|
||||
const size_t cur_ix_masked = cur_ix & ring_buffer_mask;
|
||||
/* Don't accept a short copy from far away. */
|
||||
score_t min_score = out->score;
|
||||
@@ -170,7 +174,7 @@ static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
out->len = 0;
|
||||
out->len_code_delta = 0;
|
||||
/* Try last distance first. */
|
||||
for (i = 0; i < (size_t)common->params.num_last_distances_to_check; ++i) {
|
||||
for (i = 0; i < (size_t)self->num_last_distances_to_check_; ++i) {
|
||||
const size_t backward = (size_t)distance_cache[i];
|
||||
size_t prev_ix = (size_t)(cur_ix - backward);
|
||||
if (prev_ix >= cur_ix) {
|
||||
@@ -211,8 +215,7 @@ static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
{
|
||||
const uint32_t key =
|
||||
FN(HashBytes)(&data[cur_ix_masked], self->hash_shift_);
|
||||
uint32_t* BROTLI_RESTRICT bucket =
|
||||
&buckets[key << common->params.block_bits];
|
||||
uint32_t* BROTLI_RESTRICT bucket = &buckets[key << self->block_bits_];
|
||||
const size_t down =
|
||||
(num[key] > self->block_size_) ? (num[key] - self->block_size_) : 0u;
|
||||
for (i = num[key]; i > down;) {
|
||||
@@ -251,7 +254,7 @@ static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
}
|
||||
if (min_score == out->score) {
|
||||
SearchInStaticDictionary(dictionary,
|
||||
handle, &data[cur_ix_masked], max_length, max_backward + gap,
|
||||
self->common_, &data[cur_ix_masked], max_length, dictionary_distance,
|
||||
max_distance, out, BROTLI_FALSE);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,15 +5,16 @@
|
||||
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
/* template parameters: FN, BUCKET_BITS, BUCKET_SWEEP, HASH_LEN,
|
||||
/* template parameters: FN, BUCKET_BITS, BUCKET_SWEEP_BITS, HASH_LEN,
|
||||
USE_DICTIONARY
|
||||
*/
|
||||
|
||||
#define HashLongestMatchQuickly HASHER()
|
||||
|
||||
#define BUCKET_SIZE (1 << BUCKET_BITS)
|
||||
|
||||
#define HASH_MAP_SIZE (4 << BUCKET_BITS)
|
||||
#define BUCKET_MASK (BUCKET_SIZE - 1)
|
||||
#define BUCKET_SWEEP (1 << BUCKET_SWEEP_BITS)
|
||||
#define BUCKET_SWEEP_MASK ((BUCKET_SWEEP - 1) << 3)
|
||||
|
||||
static BROTLI_INLINE size_t FN(HashTypeLength)(void) { return 8; }
|
||||
static BROTLI_INLINE size_t FN(StoreLookahead)(void) { return 8; }
|
||||
@@ -32,39 +33,50 @@ static uint32_t FN(HashBytes)(const uint8_t* data) {
|
||||
/* A (forgetful) hash table to the data seen by the compressor, to
|
||||
help create backward references to previous data.
|
||||
|
||||
This is a hash map of fixed size (BUCKET_SIZE). Starting from the
|
||||
given index, BUCKET_SWEEP buckets are used to store values of a key. */
|
||||
This is a hash map of fixed size (BUCKET_SIZE). */
|
||||
typedef struct HashLongestMatchQuickly {
|
||||
uint32_t buckets_[BUCKET_SIZE + BUCKET_SWEEP];
|
||||
/* Shortcuts. */
|
||||
HasherCommon* common;
|
||||
|
||||
/* --- Dynamic size members --- */
|
||||
|
||||
uint32_t* buckets_; /* uint32_t[BUCKET_SIZE]; */
|
||||
} HashLongestMatchQuickly;
|
||||
|
||||
static BROTLI_INLINE HashLongestMatchQuickly* FN(Self)(HasherHandle handle) {
|
||||
return (HashLongestMatchQuickly*)&(GetHasherCommon(handle)[1]);
|
||||
}
|
||||
|
||||
static void FN(Initialize)(
|
||||
HasherHandle handle, const BrotliEncoderParams* params) {
|
||||
BROTLI_UNUSED(handle);
|
||||
HasherCommon* common, HashLongestMatchQuickly* BROTLI_RESTRICT self,
|
||||
const BrotliEncoderParams* params) {
|
||||
self->common = common;
|
||||
|
||||
BROTLI_UNUSED(params);
|
||||
self->buckets_ = (uint32_t*)common->extra;
|
||||
}
|
||||
|
||||
static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot,
|
||||
size_t input_size, const uint8_t* data) {
|
||||
HashLongestMatchQuickly* self = FN(Self)(handle);
|
||||
static void FN(Prepare)(
|
||||
HashLongestMatchQuickly* BROTLI_RESTRICT self, BROTLI_BOOL one_shot,
|
||||
size_t input_size, const uint8_t* BROTLI_RESTRICT data) {
|
||||
uint32_t* BROTLI_RESTRICT buckets = self->buckets_;
|
||||
/* Partial preparation is 100 times slower (per socket). */
|
||||
size_t partial_prepare_threshold = HASH_MAP_SIZE >> 7;
|
||||
size_t partial_prepare_threshold = BUCKET_SIZE >> 5;
|
||||
if (one_shot && input_size <= partial_prepare_threshold) {
|
||||
size_t i;
|
||||
for (i = 0; i < input_size; ++i) {
|
||||
const uint32_t key = FN(HashBytes)(&data[i]);
|
||||
memset(&self->buckets_[key], 0, BUCKET_SWEEP * sizeof(self->buckets_[0]));
|
||||
if (BUCKET_SWEEP == 1) {
|
||||
buckets[key] = 0;
|
||||
} else {
|
||||
uint32_t j;
|
||||
for (j = 0; j < BUCKET_SWEEP; ++j) {
|
||||
buckets[(key + (j << 3)) & BUCKET_MASK] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
/* It is not strictly necessary to fill this buffer here, but
|
||||
not filling will make the results of the compression stochastic
|
||||
(but correct). This is because random data would cause the
|
||||
system to find accidentally good backward references here and there. */
|
||||
memset(&self->buckets_[0], 0, sizeof(self->buckets_));
|
||||
memset(buckets, 0, sizeof(uint32_t) * BUCKET_SIZE);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -74,45 +86,53 @@ static BROTLI_INLINE size_t FN(HashMemAllocInBytes)(
|
||||
BROTLI_UNUSED(params);
|
||||
BROTLI_UNUSED(one_shot);
|
||||
BROTLI_UNUSED(input_size);
|
||||
return sizeof(HashLongestMatchQuickly);
|
||||
return sizeof(uint32_t) * BUCKET_SIZE;
|
||||
}
|
||||
|
||||
/* Look at 5 bytes at &data[ix & mask].
|
||||
Compute a hash from these, and store the value somewhere within
|
||||
[ix .. ix+3]. */
|
||||
static BROTLI_INLINE void FN(Store)(HasherHandle handle,
|
||||
const uint8_t* data, const size_t mask, const size_t ix) {
|
||||
static BROTLI_INLINE void FN(Store)(
|
||||
HashLongestMatchQuickly* BROTLI_RESTRICT self,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t mask, const size_t ix) {
|
||||
const uint32_t key = FN(HashBytes)(&data[ix & mask]);
|
||||
/* Wiggle the value with the bucket sweep range. */
|
||||
const uint32_t off = (ix >> 3) % BUCKET_SWEEP;
|
||||
FN(Self)(handle)->buckets_[key + off] = (uint32_t)ix;
|
||||
if (BUCKET_SWEEP == 1) {
|
||||
self->buckets_[key] = (uint32_t)ix;
|
||||
} else {
|
||||
/* Wiggle the value with the bucket sweep range. */
|
||||
const uint32_t off = ix & BUCKET_SWEEP_MASK;
|
||||
self->buckets_[(key + off) & BUCKET_MASK] = (uint32_t)ix;
|
||||
}
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(StoreRange)(HasherHandle handle,
|
||||
const uint8_t* data, const size_t mask, const size_t ix_start,
|
||||
const size_t ix_end) {
|
||||
static BROTLI_INLINE void FN(StoreRange)(
|
||||
HashLongestMatchQuickly* BROTLI_RESTRICT self,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t mask,
|
||||
const size_t ix_start, const size_t ix_end) {
|
||||
size_t i;
|
||||
for (i = ix_start; i < ix_end; ++i) {
|
||||
FN(Store)(handle, data, mask, i);
|
||||
FN(Store)(self, data, mask, i);
|
||||
}
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(StitchToPreviousBlock)(
|
||||
HasherHandle handle, size_t num_bytes, size_t position,
|
||||
HashLongestMatchQuickly* BROTLI_RESTRICT self,
|
||||
size_t num_bytes, size_t position,
|
||||
const uint8_t* ringbuffer, size_t ringbuffer_mask) {
|
||||
if (num_bytes >= FN(HashTypeLength)() - 1 && position >= 3) {
|
||||
/* Prepare the hashes for three last bytes of the last write.
|
||||
These could not be calculated before, since they require knowledge
|
||||
of both the previous and the current block. */
|
||||
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 3);
|
||||
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 2);
|
||||
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 1);
|
||||
FN(Store)(self, ringbuffer, ringbuffer_mask, position - 3);
|
||||
FN(Store)(self, ringbuffer, ringbuffer_mask, position - 2);
|
||||
FN(Store)(self, ringbuffer, ringbuffer_mask, position - 1);
|
||||
}
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(PrepareDistanceCache)(
|
||||
HasherHandle handle, int* BROTLI_RESTRICT distance_cache) {
|
||||
BROTLI_UNUSED(handle);
|
||||
HashLongestMatchQuickly* BROTLI_RESTRICT self,
|
||||
int* BROTLI_RESTRICT distance_cache) {
|
||||
BROTLI_UNUSED(self);
|
||||
BROTLI_UNUSED(distance_cache);
|
||||
}
|
||||
|
||||
@@ -125,17 +145,19 @@ static BROTLI_INLINE void FN(PrepareDistanceCache)(
|
||||
Writes the best match into |out|.
|
||||
|out|->score is updated only if a better match is found. */
|
||||
static BROTLI_INLINE void FN(FindLongestMatch)(
|
||||
HasherHandle handle, const BrotliEncoderDictionary* dictionary,
|
||||
HashLongestMatchQuickly* BROTLI_RESTRICT self,
|
||||
const BrotliEncoderDictionary* dictionary,
|
||||
const uint8_t* BROTLI_RESTRICT data,
|
||||
const size_t ring_buffer_mask, const int* BROTLI_RESTRICT distance_cache,
|
||||
const size_t cur_ix, const size_t max_length, const size_t max_backward,
|
||||
const size_t gap, const size_t max_distance,
|
||||
const size_t dictionary_distance, const size_t max_distance,
|
||||
HasherSearchResult* BROTLI_RESTRICT out) {
|
||||
HashLongestMatchQuickly* self = FN(Self)(handle);
|
||||
uint32_t* BROTLI_RESTRICT buckets = self->buckets_;
|
||||
const size_t best_len_in = out->len;
|
||||
const size_t cur_ix_masked = cur_ix & ring_buffer_mask;
|
||||
const uint32_t key = FN(HashBytes)(&data[cur_ix_masked]);
|
||||
int compare_char = data[cur_ix_masked + best_len_in];
|
||||
size_t key = FN(HashBytes)(&data[cur_ix_masked]);
|
||||
size_t key_out;
|
||||
score_t min_score = out->score;
|
||||
score_t best_score = out->score;
|
||||
size_t best_len = best_len_in;
|
||||
@@ -145,21 +167,21 @@ static BROTLI_INLINE void FN(FindLongestMatch)(
|
||||
if (prev_ix < cur_ix) {
|
||||
prev_ix &= (uint32_t)ring_buffer_mask;
|
||||
if (compare_char == data[prev_ix + best_len]) {
|
||||
size_t len = FindMatchLengthWithLimit(&data[prev_ix],
|
||||
&data[cur_ix_masked],
|
||||
max_length);
|
||||
const size_t len = FindMatchLengthWithLimit(
|
||||
&data[prev_ix], &data[cur_ix_masked], max_length);
|
||||
if (len >= 4) {
|
||||
const score_t score = BackwardReferenceScoreUsingLastDistance(len);
|
||||
if (best_score < score) {
|
||||
best_score = score;
|
||||
best_len = len;
|
||||
out->len = len;
|
||||
out->distance = cached_backward;
|
||||
out->score = best_score;
|
||||
compare_char = data[cur_ix_masked + best_len];
|
||||
out->score = score;
|
||||
if (BUCKET_SWEEP == 1) {
|
||||
self->buckets_[key] = (uint32_t)cur_ix;
|
||||
buckets[key] = (uint32_t)cur_ix;
|
||||
return;
|
||||
} else {
|
||||
best_len = len;
|
||||
best_score = score;
|
||||
compare_char = data[cur_ix_masked + len];
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -169,8 +191,8 @@ static BROTLI_INLINE void FN(FindLongestMatch)(
|
||||
size_t backward;
|
||||
size_t len;
|
||||
/* Only one to look for, don't bother to prepare for a loop. */
|
||||
prev_ix = self->buckets_[key];
|
||||
self->buckets_[key] = (uint32_t)cur_ix;
|
||||
prev_ix = buckets[key];
|
||||
buckets[key] = (uint32_t)cur_ix;
|
||||
backward = cur_ix - prev_ix;
|
||||
prev_ix &= (uint32_t)ring_buffer_mask;
|
||||
if (compare_char != data[prev_ix + best_len_in]) {
|
||||
@@ -192,12 +214,17 @@ static BROTLI_INLINE void FN(FindLongestMatch)(
|
||||
}
|
||||
}
|
||||
} else {
|
||||
uint32_t* bucket = self->buckets_ + key;
|
||||
int i;
|
||||
prev_ix = *bucket++;
|
||||
for (i = 0; i < BUCKET_SWEEP; ++i, prev_ix = *bucket++) {
|
||||
const size_t backward = cur_ix - prev_ix;
|
||||
size_t keys[BUCKET_SWEEP];
|
||||
size_t i;
|
||||
for (i = 0; i < BUCKET_SWEEP; ++i) {
|
||||
keys[i] = (key + (i << 3)) & BUCKET_MASK;
|
||||
}
|
||||
key_out = keys[(cur_ix & BUCKET_SWEEP_MASK) >> 3];
|
||||
for (i = 0; i < BUCKET_SWEEP; ++i) {
|
||||
size_t len;
|
||||
size_t backward;
|
||||
prev_ix = buckets[keys[i]];
|
||||
backward = cur_ix - prev_ix;
|
||||
prev_ix &= (uint32_t)ring_buffer_mask;
|
||||
if (compare_char != data[prev_ix + best_len]) {
|
||||
continue;
|
||||
@@ -211,25 +238,29 @@ static BROTLI_INLINE void FN(FindLongestMatch)(
|
||||
if (len >= 4) {
|
||||
const score_t score = BackwardReferenceScore(len, backward);
|
||||
if (best_score < score) {
|
||||
best_score = score;
|
||||
best_len = len;
|
||||
out->len = best_len;
|
||||
out->distance = backward;
|
||||
out->len = len;
|
||||
compare_char = data[cur_ix_masked + len];
|
||||
best_score = score;
|
||||
out->score = score;
|
||||
compare_char = data[cur_ix_masked + best_len];
|
||||
out->distance = backward;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (USE_DICTIONARY && min_score == out->score) {
|
||||
SearchInStaticDictionary(dictionary,
|
||||
handle, &data[cur_ix_masked], max_length, max_backward + gap,
|
||||
self->common, &data[cur_ix_masked], max_length, dictionary_distance,
|
||||
max_distance, out, BROTLI_TRUE);
|
||||
}
|
||||
self->buckets_[key + ((cur_ix >> 3) % BUCKET_SWEEP)] = (uint32_t)cur_ix;
|
||||
if (BUCKET_SWEEP != 1) {
|
||||
buckets[key_out] = (uint32_t)cur_ix;
|
||||
}
|
||||
}
|
||||
|
||||
#undef HASH_MAP_SIZE
|
||||
#undef BUCKET_SWEEP_MASK
|
||||
#undef BUCKET_SWEEP
|
||||
#undef BUCKET_MASK
|
||||
#undef BUCKET_SIZE
|
||||
|
||||
#undef HashLongestMatchQuickly
|
||||
|
||||
@@ -51,13 +51,9 @@ typedef struct HashRolling {
|
||||
uint32_t factor_remove;
|
||||
} HashRolling;
|
||||
|
||||
static BROTLI_INLINE HashRolling* FN(Self)(HasherHandle handle) {
|
||||
return (HashRolling*)&(GetHasherCommon(handle)[1]);
|
||||
}
|
||||
|
||||
static void FN(Initialize)(
|
||||
HasherHandle handle, const BrotliEncoderParams* params) {
|
||||
HashRolling* self = FN(Self)(handle);
|
||||
HasherCommon* common, HashRolling* BROTLI_RESTRICT self,
|
||||
const BrotliEncoderParams* params) {
|
||||
size_t i;
|
||||
self->state = 0;
|
||||
self->next_ix = 0;
|
||||
@@ -71,7 +67,7 @@ static void FN(Initialize)(
|
||||
self->factor_remove *= self->factor;
|
||||
}
|
||||
|
||||
self->table = (uint32_t*)((HasherHandle)self + sizeof(HashRolling));
|
||||
self->table = (uint32_t*)common->extra;
|
||||
for (i = 0; i < NUMBUCKETS; i++) {
|
||||
self->table[i] = FN(kInvalidPos);
|
||||
}
|
||||
@@ -79,9 +75,8 @@ static void FN(Initialize)(
|
||||
BROTLI_UNUSED(params);
|
||||
}
|
||||
|
||||
static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot,
|
||||
size_t input_size, const uint8_t* data) {
|
||||
HashRolling* self = FN(Self)(handle);
|
||||
static void FN(Prepare)(HashRolling* BROTLI_RESTRICT self, BROTLI_BOOL one_shot,
|
||||
size_t input_size, const uint8_t* BROTLI_RESTRICT data) {
|
||||
size_t i;
|
||||
/* Too small size, cannot use this hasher. */
|
||||
if (input_size < CHUNKLEN) return;
|
||||
@@ -96,36 +91,36 @@ static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot,
|
||||
static BROTLI_INLINE size_t FN(HashMemAllocInBytes)(
|
||||
const BrotliEncoderParams* params, BROTLI_BOOL one_shot,
|
||||
size_t input_size) {
|
||||
return sizeof(HashRolling) + NUMBUCKETS * sizeof(uint32_t);
|
||||
return NUMBUCKETS * sizeof(uint32_t);
|
||||
BROTLI_UNUSED(params);
|
||||
BROTLI_UNUSED(one_shot);
|
||||
BROTLI_UNUSED(input_size);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(Store)(HasherHandle BROTLI_RESTRICT handle,
|
||||
static BROTLI_INLINE void FN(Store)(HashRolling* BROTLI_RESTRICT self,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t mask, const size_t ix) {
|
||||
BROTLI_UNUSED(handle);
|
||||
BROTLI_UNUSED(self);
|
||||
BROTLI_UNUSED(data);
|
||||
BROTLI_UNUSED(mask);
|
||||
BROTLI_UNUSED(ix);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(StoreRange)(HasherHandle handle,
|
||||
const uint8_t* data, const size_t mask, const size_t ix_start,
|
||||
const size_t ix_end) {
|
||||
BROTLI_UNUSED(handle);
|
||||
static BROTLI_INLINE void FN(StoreRange)(HashRolling* BROTLI_RESTRICT self,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t mask,
|
||||
const size_t ix_start, const size_t ix_end) {
|
||||
BROTLI_UNUSED(self);
|
||||
BROTLI_UNUSED(data);
|
||||
BROTLI_UNUSED(mask);
|
||||
BROTLI_UNUSED(ix_start);
|
||||
BROTLI_UNUSED(ix_end);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(StitchToPreviousBlock)(HasherHandle handle,
|
||||
static BROTLI_INLINE void FN(StitchToPreviousBlock)(
|
||||
HashRolling* BROTLI_RESTRICT self,
|
||||
size_t num_bytes, size_t position, const uint8_t* ringbuffer,
|
||||
size_t ring_buffer_mask) {
|
||||
/* In this case we must re-initialize the hasher from scratch from the
|
||||
current position. */
|
||||
HashRolling* self = FN(Self)(handle);
|
||||
size_t position_masked;
|
||||
size_t available = num_bytes;
|
||||
if ((position & (JUMP - 1)) != 0) {
|
||||
@@ -139,28 +134,29 @@ static BROTLI_INLINE void FN(StitchToPreviousBlock)(HasherHandle handle,
|
||||
available = ring_buffer_mask - position_masked;
|
||||
}
|
||||
|
||||
FN(Prepare)(handle, BROTLI_FALSE, available,
|
||||
FN(Prepare)(self, BROTLI_FALSE, available,
|
||||
ringbuffer + (position & ring_buffer_mask));
|
||||
self->next_ix = position;
|
||||
BROTLI_UNUSED(num_bytes);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(PrepareDistanceCache)(
|
||||
HasherHandle handle, int* BROTLI_RESTRICT distance_cache) {
|
||||
BROTLI_UNUSED(handle);
|
||||
HashRolling* BROTLI_RESTRICT self,
|
||||
int* BROTLI_RESTRICT distance_cache) {
|
||||
BROTLI_UNUSED(self);
|
||||
BROTLI_UNUSED(distance_cache);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
static BROTLI_INLINE void FN(FindLongestMatch)(
|
||||
HashRolling* BROTLI_RESTRICT self,
|
||||
const BrotliEncoderDictionary* dictionary,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t ring_buffer_mask,
|
||||
const int* BROTLI_RESTRICT distance_cache, const size_t cur_ix,
|
||||
const size_t max_length, const size_t max_backward,
|
||||
const size_t gap, const size_t max_distance,
|
||||
const size_t dictionary_distance, const size_t max_distance,
|
||||
HasherSearchResult* BROTLI_RESTRICT out) {
|
||||
HashRolling* self = FN(Self)(handle);
|
||||
const size_t cur_ix_masked = cur_ix & ring_buffer_mask;
|
||||
size_t pos = self->next_ix;
|
||||
size_t pos;
|
||||
|
||||
if ((cur_ix & (JUMP - 1)) != 0) return;
|
||||
|
||||
@@ -209,7 +205,7 @@ static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
|
||||
backup-hasher, the main hasher already searches in it. */
|
||||
BROTLI_UNUSED(dictionary);
|
||||
BROTLI_UNUSED(distance_cache);
|
||||
BROTLI_UNUSED(gap);
|
||||
BROTLI_UNUSED(dictionary_distance);
|
||||
BROTLI_UNUSED(max_distance);
|
||||
}
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ static BROTLI_INLINE size_t FN(StoreLookahead)(void) {
|
||||
return MAX_TREE_COMP_LENGTH;
|
||||
}
|
||||
|
||||
static uint32_t FN(HashBytes)(const uint8_t* data) {
|
||||
static uint32_t FN(HashBytes)(const uint8_t* BROTLI_RESTRICT data) {
|
||||
uint32_t h = BROTLI_UNALIGNED_LOAD32LE(data) * kHashMul32;
|
||||
/* The higher bits contain more mixture from the multiplication,
|
||||
so we take our results from there. */
|
||||
@@ -38,7 +38,7 @@ typedef struct HashToBinaryTree {
|
||||
/* Hash table that maps the 4-byte hashes of the sequence to the last
|
||||
position where this hash was found, which is the root of the binary
|
||||
tree of sequences that share this hash bucket. */
|
||||
uint32_t buckets_[BUCKET_SIZE];
|
||||
uint32_t* buckets_; /* uint32_t[BUCKET_SIZE]; */
|
||||
|
||||
/* A position used to mark a non-existent sequence, i.e. a tree is empty if
|
||||
its root is at invalid_pos_ and a node is a leaf if both its children
|
||||
@@ -51,34 +51,30 @@ typedef struct HashToBinaryTree {
|
||||
corresponding to a hash is a sequence starting at buckets_[hash] and
|
||||
the left and right children of a sequence starting at pos are
|
||||
forest_[2 * pos] and forest_[2 * pos + 1]. */
|
||||
/* uint32_t forest[2 * num_nodes] */
|
||||
uint32_t* forest_; /* uint32_t[2 * num_nodes] */
|
||||
} HashToBinaryTree;
|
||||
|
||||
static BROTLI_INLINE HashToBinaryTree* FN(Self)(HasherHandle handle) {
|
||||
return (HashToBinaryTree*)&(GetHasherCommon(handle)[1]);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE uint32_t* FN(Forest)(HashToBinaryTree* self) {
|
||||
return (uint32_t*)(&self[1]);
|
||||
}
|
||||
|
||||
static void FN(Initialize)(
|
||||
HasherHandle handle, const BrotliEncoderParams* params) {
|
||||
HashToBinaryTree* self = FN(Self)(handle);
|
||||
HasherCommon* common, HashToBinaryTree* BROTLI_RESTRICT self,
|
||||
const BrotliEncoderParams* params) {
|
||||
self->buckets_ = (uint32_t*)common->extra;
|
||||
self->forest_ = &self->buckets_[BUCKET_SIZE];
|
||||
|
||||
self->window_mask_ = (1u << params->lgwin) - 1u;
|
||||
self->invalid_pos_ = (uint32_t)(0 - self->window_mask_);
|
||||
}
|
||||
|
||||
static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot,
|
||||
size_t input_size, const uint8_t* data) {
|
||||
HashToBinaryTree* self = FN(Self)(handle);
|
||||
static void FN(Prepare)
|
||||
(HashToBinaryTree* BROTLI_RESTRICT self, BROTLI_BOOL one_shot,
|
||||
size_t input_size, const uint8_t* BROTLI_RESTRICT data) {
|
||||
uint32_t invalid_pos = self->invalid_pos_;
|
||||
uint32_t i;
|
||||
uint32_t* BROTLI_RESTRICT buckets = self->buckets_;
|
||||
BROTLI_UNUSED(data);
|
||||
BROTLI_UNUSED(one_shot);
|
||||
BROTLI_UNUSED(input_size);
|
||||
for (i = 0; i < BUCKET_SIZE; i++) {
|
||||
self->buckets_[i] = invalid_pos;
|
||||
buckets[i] = invalid_pos;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,15 +85,17 @@ static BROTLI_INLINE size_t FN(HashMemAllocInBytes)(
|
||||
if (one_shot && input_size < num_nodes) {
|
||||
num_nodes = input_size;
|
||||
}
|
||||
return sizeof(HashToBinaryTree) + 2 * sizeof(uint32_t) * num_nodes;
|
||||
return sizeof(uint32_t) * BUCKET_SIZE + 2 * sizeof(uint32_t) * num_nodes;
|
||||
}
|
||||
|
||||
static BROTLI_INLINE size_t FN(LeftChildIndex)(HashToBinaryTree* self,
|
||||
static BROTLI_INLINE size_t FN(LeftChildIndex)(
|
||||
HashToBinaryTree* BROTLI_RESTRICT self,
|
||||
const size_t pos) {
|
||||
return 2 * (pos & self->window_mask_);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE size_t FN(RightChildIndex)(HashToBinaryTree* self,
|
||||
static BROTLI_INLINE size_t FN(RightChildIndex)(
|
||||
HashToBinaryTree* BROTLI_RESTRICT self,
|
||||
const size_t pos) {
|
||||
return 2 * (pos & self->window_mask_) + 1;
|
||||
}
|
||||
@@ -113,7 +111,7 @@ static BROTLI_INLINE size_t FN(RightChildIndex)(HashToBinaryTree* self,
|
||||
|
||||
This function must be called with increasing cur_ix positions. */
|
||||
static BROTLI_INLINE BackwardMatch* FN(StoreAndFindMatches)(
|
||||
HashToBinaryTree* self, const uint8_t* const BROTLI_RESTRICT data,
|
||||
HashToBinaryTree* BROTLI_RESTRICT self, const uint8_t* BROTLI_RESTRICT data,
|
||||
const size_t cur_ix, const size_t ring_buffer_mask, const size_t max_length,
|
||||
const size_t max_backward, size_t* const BROTLI_RESTRICT best_len,
|
||||
BackwardMatch* BROTLI_RESTRICT matches) {
|
||||
@@ -123,8 +121,9 @@ static BROTLI_INLINE BackwardMatch* FN(StoreAndFindMatches)(
|
||||
const BROTLI_BOOL should_reroot_tree =
|
||||
TO_BROTLI_BOOL(max_length >= MAX_TREE_COMP_LENGTH);
|
||||
const uint32_t key = FN(HashBytes)(&data[cur_ix_masked]);
|
||||
uint32_t* forest = FN(Forest)(self);
|
||||
size_t prev_ix = self->buckets_[key];
|
||||
uint32_t* BROTLI_RESTRICT buckets = self->buckets_;
|
||||
uint32_t* BROTLI_RESTRICT forest = self->forest_;
|
||||
size_t prev_ix = buckets[key];
|
||||
/* The forest index of the rightmost node of the left subtree of the new
|
||||
root, updated as we traverse and re-root the tree of the hash bucket. */
|
||||
size_t node_left = FN(LeftChildIndex)(self, cur_ix);
|
||||
@@ -139,7 +138,7 @@ static BROTLI_INLINE BackwardMatch* FN(StoreAndFindMatches)(
|
||||
size_t best_len_right = 0;
|
||||
size_t depth_remaining;
|
||||
if (should_reroot_tree) {
|
||||
self->buckets_[key] = (uint32_t)cur_ix;
|
||||
buckets[key] = (uint32_t)cur_ix;
|
||||
}
|
||||
for (depth_remaining = MAX_TREE_SEARCH_DEPTH; ; --depth_remaining) {
|
||||
const size_t backward = cur_ix - prev_ix;
|
||||
@@ -199,11 +198,13 @@ static BROTLI_INLINE BackwardMatch* FN(StoreAndFindMatches)(
|
||||
matches in matches[0] to matches[*num_matches - 1]. The matches will be
|
||||
sorted by strictly increasing length and (non-strictly) increasing
|
||||
distance. */
|
||||
static BROTLI_INLINE size_t FN(FindAllMatches)(HasherHandle handle,
|
||||
const BrotliEncoderDictionary* dictionary, const uint8_t* data,
|
||||
static BROTLI_INLINE size_t FN(FindAllMatches)(
|
||||
HashToBinaryTree* BROTLI_RESTRICT self,
|
||||
const BrotliEncoderDictionary* dictionary,
|
||||
const uint8_t* BROTLI_RESTRICT data,
|
||||
const size_t ring_buffer_mask, const size_t cur_ix,
|
||||
const size_t max_length, const size_t max_backward,
|
||||
const size_t gap, const BrotliEncoderParams* params,
|
||||
const size_t dictionary_distance, const BrotliEncoderParams* params,
|
||||
BackwardMatch* matches) {
|
||||
BackwardMatch* const orig_matches = matches;
|
||||
const size_t cur_ix_masked = cur_ix & ring_buffer_mask;
|
||||
@@ -236,7 +237,7 @@ static BROTLI_INLINE size_t FN(FindAllMatches)(HasherHandle handle,
|
||||
}
|
||||
}
|
||||
if (best_len < max_length) {
|
||||
matches = FN(StoreAndFindMatches)(FN(Self)(handle), data, cur_ix,
|
||||
matches = FN(StoreAndFindMatches)(self, data, cur_ix,
|
||||
ring_buffer_mask, max_length, max_backward, &best_len, matches);
|
||||
}
|
||||
for (i = 0; i <= BROTLI_MAX_STATIC_DICTIONARY_MATCH_LEN; ++i) {
|
||||
@@ -252,7 +253,7 @@ static BROTLI_INLINE size_t FN(FindAllMatches)(HasherHandle handle,
|
||||
for (l = minlen; l <= maxlen; ++l) {
|
||||
uint32_t dict_id = dict_matches[l];
|
||||
if (dict_id < kInvalidMatch) {
|
||||
size_t distance = max_backward + gap + (dict_id >> 5) + 1;
|
||||
size_t distance = dictionary_distance + (dict_id >> 5) + 1;
|
||||
if (distance <= params->dist.max_distance) {
|
||||
InitDictionaryBackwardMatch(matches++, distance, l, dict_id & 31);
|
||||
}
|
||||
@@ -266,18 +267,18 @@ static BROTLI_INLINE size_t FN(FindAllMatches)(HasherHandle handle,
|
||||
/* Stores the hash of the next 4 bytes and re-roots the binary tree at the
|
||||
current sequence, without returning any matches.
|
||||
REQUIRES: ix + MAX_TREE_COMP_LENGTH <= end-of-current-block */
|
||||
static BROTLI_INLINE void FN(Store)(HasherHandle handle, const uint8_t* data,
|
||||
static BROTLI_INLINE void FN(Store)(HashToBinaryTree* BROTLI_RESTRICT self,
|
||||
const uint8_t* BROTLI_RESTRICT data,
|
||||
const size_t mask, const size_t ix) {
|
||||
HashToBinaryTree* self = FN(Self)(handle);
|
||||
/* Maximum distance is window size - 16, see section 9.1. of the spec. */
|
||||
const size_t max_backward = self->window_mask_ - BROTLI_WINDOW_GAP + 1;
|
||||
FN(StoreAndFindMatches)(self, data, ix, mask, MAX_TREE_COMP_LENGTH,
|
||||
max_backward, NULL, NULL);
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(StoreRange)(HasherHandle handle,
|
||||
const uint8_t* data, const size_t mask, const size_t ix_start,
|
||||
const size_t ix_end) {
|
||||
static BROTLI_INLINE void FN(StoreRange)(HashToBinaryTree* BROTLI_RESTRICT self,
|
||||
const uint8_t* BROTLI_RESTRICT data, const size_t mask,
|
||||
const size_t ix_start, const size_t ix_end) {
|
||||
size_t i = ix_start;
|
||||
size_t j = ix_start;
|
||||
if (ix_start + 63 <= ix_end) {
|
||||
@@ -285,18 +286,18 @@ static BROTLI_INLINE void FN(StoreRange)(HasherHandle handle,
|
||||
}
|
||||
if (ix_start + 512 <= i) {
|
||||
for (; j < i; j += 8) {
|
||||
FN(Store)(handle, data, mask, j);
|
||||
FN(Store)(self, data, mask, j);
|
||||
}
|
||||
}
|
||||
for (; i < ix_end; ++i) {
|
||||
FN(Store)(handle, data, mask, i);
|
||||
FN(Store)(self, data, mask, i);
|
||||
}
|
||||
}
|
||||
|
||||
static BROTLI_INLINE void FN(StitchToPreviousBlock)(HasherHandle handle,
|
||||
static BROTLI_INLINE void FN(StitchToPreviousBlock)(
|
||||
HashToBinaryTree* BROTLI_RESTRICT self,
|
||||
size_t num_bytes, size_t position, const uint8_t* ringbuffer,
|
||||
size_t ringbuffer_mask) {
|
||||
HashToBinaryTree* self = FN(Self)(handle);
|
||||
if (num_bytes >= FN(HashTypeLength)() - 1 &&
|
||||
position >= MAX_TREE_COMP_LENGTH) {
|
||||
/* Store the last `MAX_TREE_COMP_LENGTH - 1` positions in the hasher.
|
||||
|
||||
@@ -56,6 +56,18 @@ BROTLI_INTERNAL void BrotliFree(MemoryManager* m, void* p);
|
||||
#define BROTLI_IS_OOM(M) (!!(M)->is_oom)
|
||||
#endif /* BROTLI_ENCODER_EXIT_ON_OOM */
|
||||
|
||||
/*
|
||||
BROTLI_IS_NULL is a fake check, BROTLI_IS_OOM does the heavy lifting.
|
||||
The only purpose of it is to explain static analyzers the state of things.
|
||||
NB: use ONLY together with BROTLI_IS_OOM
|
||||
AND ONLY for allocations in the current scope.
|
||||
*/
|
||||
#if defined(__clang_analyzer__) && !defined(BROTLI_ENCODER_EXIT_ON_OOM)
|
||||
#define BROTLI_IS_NULL(A) ((A) == nullptr)
|
||||
#else /* defined(__clang_analyzer__) */
|
||||
#define BROTLI_IS_NULL(A) (!!0)
|
||||
#endif /* defined(__clang_analyzer__) */
|
||||
|
||||
BROTLI_INTERNAL void BrotliWipeOutMemoryManager(MemoryManager* m);
|
||||
|
||||
/*
|
||||
@@ -66,18 +78,18 @@ A: array
|
||||
C: capacity
|
||||
R: requested size
|
||||
*/
|
||||
#define BROTLI_ENSURE_CAPACITY(M, T, A, C, R) { \
|
||||
if (C < (R)) { \
|
||||
size_t _new_size = (C == 0) ? (R) : C; \
|
||||
T* new_array; \
|
||||
while (_new_size < (R)) _new_size *= 2; \
|
||||
new_array = BROTLI_ALLOC((M), T, _new_size); \
|
||||
if (!BROTLI_IS_OOM(M) && C != 0) \
|
||||
memcpy(new_array, A, C * sizeof(T)); \
|
||||
BROTLI_FREE((M), A); \
|
||||
A = new_array; \
|
||||
C = _new_size; \
|
||||
} \
|
||||
#define BROTLI_ENSURE_CAPACITY(M, T, A, C, R) { \
|
||||
if (C < (R)) { \
|
||||
size_t _new_size = (C == 0) ? (R) : C; \
|
||||
T* new_array; \
|
||||
while (_new_size < (R)) _new_size *= 2; \
|
||||
new_array = BROTLI_ALLOC((M), T, _new_size); \
|
||||
if (!BROTLI_IS_OOM(M) && !BROTLI_IS_NULL(new_array) && C != 0) \
|
||||
memcpy(new_array, A, C * sizeof(T)); \
|
||||
BROTLI_FREE((M), A); \
|
||||
A = new_array; \
|
||||
C = _new_size; \
|
||||
} \
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -71,7 +71,7 @@ static void FN(InitBlockSplitter)(
|
||||
*histograms_size = max_num_types;
|
||||
*histograms = BROTLI_ALLOC(m, HistogramType, *histograms_size);
|
||||
self->histograms_ = *histograms;
|
||||
if (BROTLI_IS_OOM(m)) return;
|
||||
if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(*histograms)) return;
|
||||
/* Clear only current histogram. */
|
||||
FN(HistogramClear)(&self->histograms_[0]);
|
||||
self->last_histogram_ix_[0] = self->last_histogram_ix_[1] = 0;
|
||||
|
||||
@@ -23,7 +23,8 @@ typedef struct BrotliHasherParams {
|
||||
typedef struct BrotliDistanceParams {
|
||||
uint32_t distance_postfix_bits;
|
||||
uint32_t num_direct_distance_codes;
|
||||
uint32_t alphabet_size;
|
||||
uint32_t alphabet_size_max;
|
||||
uint32_t alphabet_size_limit;
|
||||
size_t max_distance;
|
||||
} BrotliDistanceParams;
|
||||
|
||||
@@ -33,6 +34,7 @@ typedef struct BrotliEncoderParams {
|
||||
int quality;
|
||||
int lgwin;
|
||||
int lgblock;
|
||||
size_t stream_offset;
|
||||
size_t size_hint;
|
||||
BROTLI_BOOL disable_literal_context_modeling;
|
||||
BROTLI_BOOL large_window;
|
||||
|
||||
@@ -75,7 +75,7 @@ static BROTLI_INLINE void RingBufferInitBuffer(
|
||||
uint8_t* new_data = BROTLI_ALLOC(
|
||||
m, uint8_t, 2 + buflen + kSlackForEightByteHashingEverywhere);
|
||||
size_t i;
|
||||
if (BROTLI_IS_OOM(m)) return;
|
||||
if (BROTLI_IS_OOM(m) || BROTLI_IS_NULL(new_data)) return;
|
||||
if (rb->data_) {
|
||||
memcpy(new_data, rb->data_,
|
||||
2 + rb->cur_size_ + kSlackForEightByteHashingEverywhere);
|
||||
@@ -125,6 +125,9 @@ static BROTLI_INLINE void RingBufferWrite(
|
||||
later when we copy the last two bytes to the first two positions. */
|
||||
rb->buffer_[rb->size_ - 2] = 0;
|
||||
rb->buffer_[rb->size_ - 1] = 0;
|
||||
/* Initialize tail; might be touched by "best_len++" optimization when
|
||||
ring buffer is "full". */
|
||||
rb->buffer_[rb->size_] = 241;
|
||||
}
|
||||
{
|
||||
const size_t masked_pos = rb->pos_ & rb->mask_;
|
||||
|
||||
@@ -16,8 +16,6 @@
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
/*#define BIT_WRITER_DEBUG */
|
||||
|
||||
/* This function writes bits into bytes in increasing addresses, and within
|
||||
a byte least-significant-bit first.
|
||||
|
||||
@@ -28,7 +26,7 @@ extern "C" {
|
||||
|
||||
0000 0RRR 0000 0000 0000 0000
|
||||
|
||||
Now, we could write 5 or less bits in MSB by just sifting by 3
|
||||
Now, we could write 5 or less bits in MSB by just shifting by 3
|
||||
and OR'ing to BYTE-0.
|
||||
|
||||
For n bits, we take the last 5 bits, OR that with high bits in BYTE-0,
|
||||
@@ -37,37 +35,41 @@ static BROTLI_INLINE void BrotliWriteBits(size_t n_bits,
|
||||
uint64_t bits,
|
||||
size_t* BROTLI_RESTRICT pos,
|
||||
uint8_t* BROTLI_RESTRICT array) {
|
||||
BROTLI_LOG(("WriteBits %2d 0x%08x%08x %10d\n", (int)n_bits,
|
||||
(uint32_t)(bits >> 32), (uint32_t)(bits & 0xFFFFFFFF),
|
||||
(int)*pos));
|
||||
BROTLI_DCHECK((bits >> n_bits) == 0);
|
||||
BROTLI_DCHECK(n_bits <= 56);
|
||||
#if defined(BROTLI_LITTLE_ENDIAN)
|
||||
/* This branch of the code can write up to 56 bits at a time,
|
||||
7 bits are lost by being perhaps already in *p and at least
|
||||
1 bit is needed to initialize the bit-stream ahead (i.e. if 7
|
||||
bits are in *p and we write 57 bits, then the next write will
|
||||
access a byte that was never initialized). */
|
||||
uint8_t* p = &array[*pos >> 3];
|
||||
uint64_t v = (uint64_t)(*p); /* Zero-extend 8 to 64 bits. */
|
||||
BROTLI_LOG(("WriteBits %2d 0x%08x%08x %10d\n", (int)n_bits,
|
||||
(uint32_t)(bits >> 32), (uint32_t)(bits & 0xFFFFFFFF),
|
||||
(int)*pos));
|
||||
BROTLI_DCHECK((bits >> n_bits) == 0);
|
||||
BROTLI_DCHECK(n_bits <= 56);
|
||||
v |= bits << (*pos & 7);
|
||||
BROTLI_UNALIGNED_STORE64LE(p, v); /* Set some bits. */
|
||||
*pos += n_bits;
|
||||
{
|
||||
uint8_t* p = &array[*pos >> 3];
|
||||
uint64_t v = (uint64_t)(*p); /* Zero-extend 8 to 64 bits. */
|
||||
v |= bits << (*pos & 7);
|
||||
BROTLI_UNALIGNED_STORE64LE(p, v); /* Set some bits. */
|
||||
*pos += n_bits;
|
||||
}
|
||||
#else
|
||||
/* implicit & 0xFF is assumed for uint8_t arithmetics */
|
||||
uint8_t* array_pos = &array[*pos >> 3];
|
||||
const size_t bits_reserved_in_first_byte = (*pos & 7);
|
||||
size_t bits_left_to_write;
|
||||
bits <<= bits_reserved_in_first_byte;
|
||||
*array_pos++ |= (uint8_t)bits;
|
||||
for (bits_left_to_write = n_bits + bits_reserved_in_first_byte;
|
||||
bits_left_to_write >= 9;
|
||||
bits_left_to_write -= 8) {
|
||||
bits >>= 8;
|
||||
*array_pos++ = (uint8_t)bits;
|
||||
{
|
||||
uint8_t* array_pos = &array[*pos >> 3];
|
||||
const size_t bits_reserved_in_first_byte = (*pos & 7);
|
||||
size_t bits_left_to_write;
|
||||
bits <<= bits_reserved_in_first_byte;
|
||||
*array_pos++ |= (uint8_t)bits;
|
||||
for (bits_left_to_write = n_bits + bits_reserved_in_first_byte;
|
||||
bits_left_to_write >= 9;
|
||||
bits_left_to_write -= 8) {
|
||||
bits >>= 8;
|
||||
*array_pos++ = (uint8_t)bits;
|
||||
}
|
||||
*array_pos = 0;
|
||||
*pos += n_bits;
|
||||
}
|
||||
*array_pos = 0;
|
||||
*pos += n_bits;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user