summaryrefslogtreecommitdiffstats
path: root/searchlib
diff options
context:
space:
mode:
authorHenning Baldersheim <balder@yahoo-inc.com>2021-04-30 14:35:44 +0200
committerGitHub <noreply@github.com>2021-04-30 14:35:44 +0200
commit8e2478b8965bbd29709957e2c4fc37e8333a59e5 (patch)
treeaed17b87ec54b0faa72fc3fb3e5e4154d2f908ac /searchlib
parent34347787e234ce49a46c788ff29642e5e0a5c2b7 (diff)
parent516c81d880e8a4d170c64d46bc0674a6ce3c00fa (diff)
Merge pull request #17681 from vespa-engine/toregge/avoid-intel-asm-on-arm
Avoid intel asm when not using x86_64 platform.
Diffstat (limited to 'searchlib')
-rw-r--r--searchlib/src/vespa/searchlib/bitcompression/compression.cpp17
-rw-r--r--searchlib/src/vespa/searchlib/bitcompression/compression.h42
2 files changed, 3 insertions, 56 deletions
diff --git a/searchlib/src/vespa/searchlib/bitcompression/compression.cpp b/searchlib/src/vespa/searchlib/bitcompression/compression.cpp
index 21ce8dc6c06..62ae8854880 100644
--- a/searchlib/src/vespa/searchlib/bitcompression/compression.cpp
+++ b/searchlib/src/vespa/searchlib/bitcompression/compression.cpp
@@ -10,23 +10,6 @@
namespace search::bitcompression {
-uint8_t CodingTables::_log2Table[64_Ki];
-
-CodingTables tables; // Static initializer
-
-CodingTables::CodingTables()
-{
- unsigned int x;
- uint8_t log2Val;
-
- for (x = 0; x < 64_Ki; x++) {
- unsigned int val = x;
- for (log2Val = 0; (val >>= 1) != 0; log2Val++) {
- }
- _log2Table[x] = log2Val;
- }
-}
-
uint64_t CodingTables::_intMask64[65] =
{
(UINT64_C(1) << 0) - 1, (UINT64_C(1) << 1) - 1,
diff --git a/searchlib/src/vespa/searchlib/bitcompression/compression.h b/searchlib/src/vespa/searchlib/bitcompression/compression.h
index e6b171ec871..48b12c193ed 100644
--- a/searchlib/src/vespa/searchlib/bitcompression/compression.h
+++ b/searchlib/src/vespa/searchlib/bitcompression/compression.h
@@ -30,9 +30,6 @@ private:
int _bitOffset;
};
-// Use inline assembly for asmlog2 calculations
-#define DO_ASMLOG
-
/*
* The so-called rice2 code is very similar to the well known exp
* golomb code. One difference is that the first bits are inverted.
@@ -82,11 +79,8 @@ private:
class CodingTables
{
public:
- static uint8_t _log2Table[65536];
static uint64_t _intMask64[65];
static uint64_t _intMask64le[65];
-
- CodingTables();
};
#define UC64_DECODECONTEXT(prefix) \
@@ -933,8 +927,7 @@ template <>
inline uint64_t
EncodeContext64EBase<true>::bswap(uint64_t val)
{
- __asm__("bswap %0" : "=r" (val) : "0" (val));
- return val;
+ return __builtin_bswap64(val);
}
@@ -963,42 +956,13 @@ public:
static inline uint32_t
asmlog2(uint64_t x)
{
- uint64_t retVal;
-
-#if (defined(__x86_64__)) && defined(DO_ASMLOG)
- __asm("bsrq %1,%0" : "=r" (retVal) : "r" (x));
-
-#else
- register uint64_t lower = x;
- uint32_t upper32 = lower >> 32;
- if (upper32 != 0) {
- uint32_t upper16 = upper32 >> 16;
- if (upper16 != 0) {
- retVal = 48 + CodingTables::_log2Table[upper16];
- } else {
- retVal = 32 + CodingTables::_log2Table[upper32];
- }
- } else {
- uint32_t lower32 = static_cast<uint32_t>(x);
- uint32_t upper16 = lower32 >> 16;
-
- if (upper16 != 0) {
- retVal = 16 + CodingTables::_log2Table[upper16];
- } else {
- retVal = CodingTables::_log2Table[lower32];
- }
- }
-#endif
-
- return retVal;
+ return sizeof(uint64_t) * 8 - 1 - __builtin_clzl(x);
}
static inline uint64_t
ffsl(uint64_t x)
{
- uint64_t retVal;
- __asm("bsfq %1,%0" : "=r" (retVal) : "r" (x));
- return retVal;
+ return __builtin_ctzl(x);
}
/**