chromium/third_party/lzma_sdk/C/LzFind.c

/* LzFind.c -- Match finder for LZ algorithms
2021-11-29 : Igor Pavlov : Public domain */

#include "Precomp.h"

#include <string.h>
// #include <stdio.h>

#include "CpuArch.h"
#include "LzFind.h"
#include "LzHash.h"

#define kBlockMoveAlign
#define kBlockSizeAlign
#define kBlockSizeReserveMin

#define kEmptyHashValue

#define kMaxValForNormalize
// #define kMaxValForNormalize ((UInt32)(1 << 20) + 0xFFF) // for debug

// #define kNormalizeAlign (1 << 7) // alignment for speculated accesses

#define GET_AVAIL_BYTES(p)


// #define kFix5HashSize (kHash2Size + kHash3Size + kHash4Size)
#define kFix5HashSize

/*
 HASH2_CALC:
   if (hv) match, then cur[0] and cur[1] also match
*/
#define HASH2_CALC

// (crc[0 ... 255] & 0xFF) provides one-to-one correspondence to [0 ... 255]

/*
 HASH3_CALC:
   if (cur[0]) and (h2) match, then cur[1]            also match
   if (cur[0]) and (hv) match, then cur[1] and cur[2] also match
*/
#define HASH3_CALC

#define HASH4_CALC

#define HASH5_CALC

#define HASH_ZIP_CALC


static void LzInWindow_Free(CMatchFinder *p, ISzAllocPtr alloc)
{}


static int LzInWindow_Create2(CMatchFinder *p, UInt32 blockSize, ISzAllocPtr alloc)
{}

static const Byte *MatchFinder_GetPointerToCurrentPos(CMatchFinder *p) {}

static UInt32 MatchFinder_GetNumAvailableBytes(CMatchFinder *p) {}


MY_NO_INLINE
static void MatchFinder_ReadBlock(CMatchFinder *p)
{}



MY_NO_INLINE
void MatchFinder_MoveBlock(CMatchFinder *p)
{}

/* We call MoveBlock() before ReadBlock().
   So MoveBlock() can be wasteful operation, if the whole input data
   can fit in current block even without calling MoveBlock().
   in important case where (dataSize <= historySize)
     condition (p->blockSize > dataSize + p->keepSizeAfter) is met
     So there is no MoveBlock() in that case case.
*/

int MatchFinder_NeedMove(CMatchFinder *p)
{}

void MatchFinder_ReadIfRequired(CMatchFinder *p)
{}



static void MatchFinder_SetDefaultSettings(CMatchFinder *p)
{}

#define kCrcPoly

void MatchFinder_Construct(CMatchFinder *p)
{}

static void MatchFinder_FreeThisClassMemory(CMatchFinder *p, ISzAllocPtr alloc)
{}

void MatchFinder_Free(CMatchFinder *p, ISzAllocPtr alloc)
{}

static CLzRef* AllocRefs(size_t num, ISzAllocPtr alloc)
{}

#if (kBlockSizeReserveMin < kBlockSizeAlign * 2)
  #error Stop_Compiling_Bad_Reserve
#endif



static UInt32 GetBlockSize(CMatchFinder *p, UInt32 historySize)
{}


int MatchFinder_Create(CMatchFinder *p, UInt32 historySize,
    UInt32 keepAddBufferBefore, UInt32 matchMaxLen, UInt32 keepAddBufferAfter,
    ISzAllocPtr alloc)
{}


static void MatchFinder_SetLimits(CMatchFinder *p)
{}


void MatchFinder_Init_LowHash(CMatchFinder *p)
{}


void MatchFinder_Init_HighHash(CMatchFinder *p)
{}


void MatchFinder_Init_4(CMatchFinder *p)
{}


// (CYC_TO_POS_OFFSET == 0) is expected by some optimized code
#define CYC_TO_POS_OFFSET
// #define CYC_TO_POS_OFFSET 1 // for debug

void MatchFinder_Init(CMatchFinder *p)
{}


#if 0
#ifdef MY_CPU_X86_OR_AMD64
  #if defined(__clang__) && (__clang_major__ >= 8) \
    || defined(__GNUC__) && (__GNUC__ >= 8) \
    || defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 1900)
      #define USE_SATUR_SUB_128
      #define USE_AVX2
      #define ATTRIB_SSE41
      #define ATTRIB_AVX2
  #elif defined(_MSC_VER)
    #if (_MSC_VER >= 1600)
      #define USE_SATUR_SUB_128
      #if (_MSC_VER >= 1900)
        #define USE_AVX2
        #include <immintrin.h> // avx
      #endif
    #endif
  #endif

// #elif defined(MY_CPU_ARM_OR_ARM64)
#elif defined(MY_CPU_ARM64)

  #if defined(__clang__) && (__clang_major__ >= 8) \
    || defined(__GNUC__) && (__GNUC__ >= 8)
      #define USE_SATUR_SUB_128
    #ifdef MY_CPU_ARM64
      // #define ATTRIB_SSE41 __attribute__((__target__("")))
    #else
      // #define ATTRIB_SSE41 __attribute__((__target__("fpu=crypto-neon-fp-armv8")))
    #endif

  #elif defined(_MSC_VER)
    #if (_MSC_VER >= 1910)
      #define USE_SATUR_SUB_128
    #endif
  #endif

  #if defined(_MSC_VER) && defined(MY_CPU_ARM64)
    #include <arm64_neon.h>
  #else
    #include <arm_neon.h>
  #endif

#endif
#endif

/*
#ifndef ATTRIB_SSE41
  #define ATTRIB_SSE41
#endif
#ifndef ATTRIB_AVX2
  #define ATTRIB_AVX2
#endif
*/

#ifdef USE_SATUR_SUB_128

// #define _SHOW_HW_STATUS

#ifdef _SHOW_HW_STATUS
#include <stdio.h>
#define _PRF
_PRF(;)
#else
#define _PRF
#endif

#ifdef MY_CPU_ARM_OR_ARM64

#ifdef MY_CPU_ARM64
// #define FORCE_SATUR_SUB_128
#endif

typedef uint32x4_t v128;
#define SASUB_128

#else

#include <smmintrin.h> // sse4.1

typedef __m128i v128;
#define SASUB_128

#endif



MY_NO_INLINE
static
#ifdef ATTRIB_SSE41
ATTRIB_SSE41
#endif
void
MY_FAST_CALL
LzFind_SaturSub_128(UInt32 subValue, CLzRef *items, const CLzRef *lim)
{
  v128 sub2 =
    #ifdef MY_CPU_ARM_OR_ARM64
      vdupq_n_u32(subValue);
    #else
      _mm_set_epi32((Int32)subValue, (Int32)subValue, (Int32)subValue, (Int32)subValue);
    #endif
  do
  {
    SASUB_128(0)
    SASUB_128(1)
    SASUB_128(2)
    SASUB_128(3)
    items += 4 * 4;
  }
  while (items != lim);
}



#ifdef USE_AVX2

#include <immintrin.h> // avx

#define SASUB_256

MY_NO_INLINE
static
#ifdef ATTRIB_AVX2
ATTRIB_AVX2
#endif
void
MY_FAST_CALL
LzFind_SaturSub_256(UInt32 subValue, CLzRef *items, const CLzRef *lim)
{
  __m256i sub2 = _mm256_set_epi32(
      (Int32)subValue, (Int32)subValue, (Int32)subValue, (Int32)subValue,
      (Int32)subValue, (Int32)subValue, (Int32)subValue, (Int32)subValue);
  do
  {
    SASUB_256(0)
    SASUB_256(1)
    items += 2 * 8;
  }
  while (items != lim);
}
#endif // USE_AVX2

#ifndef FORCE_SATUR_SUB_128
typedef void (MY_FAST_CALL *LZFIND_SATUR_SUB_CODE_FUNC)(
    UInt32 subValue, CLzRef *items, const CLzRef *lim);
static LZFIND_SATUR_SUB_CODE_FUNC g_LzFind_SaturSub;
#endif // FORCE_SATUR_SUB_128

#endif // USE_SATUR_SUB_128


// kEmptyHashValue must be zero
// #define SASUB_32(i) v = items[i];  m = v - subValue;  if (v < subValue) m = kEmptyHashValue;  items[i] = m;
#define SASUB_32(i)

#ifdef FORCE_SATUR_SUB_128

#define DEFAULT_SaturSub

#else

#define DEFAULT_SaturSub

MY_NO_INLINE
static
void
MY_FAST_CALL
LzFind_SaturSub_32(UInt32 subValue, CLzRef *items, const CLzRef *lim)
{}

#endif


MY_NO_INLINE
void MatchFinder_Normalize3(UInt32 subValue, CLzRef *items, size_t numItems)
{}



// call MatchFinder_CheckLimits() only after (p->pos++) update

MY_NO_INLINE
static void MatchFinder_CheckLimits(CMatchFinder *p)
{}


/*
  (lenLimit > maxLen)
*/
MY_FORCE_INLINE
static UInt32 * Hc_GetMatchesSpec(size_t lenLimit, UInt32 curMatch, UInt32 pos, const Byte *cur, CLzRef *son,
    size_t _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 cutValue,
    UInt32 *d, unsigned maxLen)
{}


MY_FORCE_INLINE
UInt32 * GetMatchesSpec1(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const Byte *cur, CLzRef *son,
    size_t _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 cutValue,
    UInt32 *d, UInt32 maxLen)
{}


static void SkipMatchesSpec(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const Byte *cur, CLzRef *son,
    size_t _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 cutValue)
{}


#define MOVE_POS

#define MOVE_POS_RET

MY_NO_INLINE
static void MatchFinder_MovePos(CMatchFinder *p)
{}

#define GET_MATCHES_HEADER2(minLen, ret_op)

#define GET_MATCHES_HEADER(minLen)
#define SKIP_HEADER(minLen)

#define MF_PARAMS(p)

#define SKIP_FOOTER

#define GET_MATCHES_FOOTER_BASE(_maxLen_, func)

#define GET_MATCHES_FOOTER_BT(_maxLen_)

#define GET_MATCHES_FOOTER_HC(_maxLen_)



#define UPDATE_maxLen

static UInt32* Bt2_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{}

UInt32* Bt3Zip_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{}


#define SET_mmm


static UInt32* Bt3_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{}


static UInt32* Bt4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{}


static UInt32* Bt5_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{}


static UInt32* Hc4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{}


static UInt32 * Hc5_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{}


UInt32* Hc3Zip_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{}


static void Bt2_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
{}

void Bt3Zip_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
{}

static void Bt3_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
{}

static void Bt4_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
{}

static void Bt5_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
{}


#define HC_SKIP_HEADER(minLen)


#define HC_SKIP_FOOTER \


static void Hc4_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
{}


static void Hc5_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
{}


void Hc3Zip_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
{}


void MatchFinder_CreateVTable(CMatchFinder *p, IMatchFinder2 *vTable)
{}



void LzFindPrepare()
{}