Disable VAES in 32-bit builds

This commit is contained in:
SChernykh 2021-11-29 09:32:00 +01:00
parent a64f4d1870
commit 6db480a1ab
3 changed files with 45 additions and 10 deletions

View File

@ -28,6 +28,7 @@ option(WITH_STRICT_CACHE "Enable strict checks for OpenCL cache" ON)
option(WITH_INTERLEAVE_DEBUG_LOG "Enable debug log for threads interleave" OFF) option(WITH_INTERLEAVE_DEBUG_LOG "Enable debug log for threads interleave" OFF)
option(WITH_PROFILING "Enable profiling for developers" OFF) option(WITH_PROFILING "Enable profiling for developers" OFF)
option(WITH_SSE4_1 "Enable SSE 4.1 for Blake2" ON) option(WITH_SSE4_1 "Enable SSE 4.1 for Blake2" ON)
option(WITH_VAES "Enable VAES instructions for Cryptonight" ON)
option(WITH_BENCHMARK "Enable builtin RandomX benchmark and stress test" ON) option(WITH_BENCHMARK "Enable builtin RandomX benchmark and stress test" ON)
option(WITH_SECURE_JIT "Enable secure access to JIT memory" OFF) option(WITH_SECURE_JIT "Enable secure access to JIT memory" OFF)
option(WITH_DMI "Enable DMI/SMBIOS reader" ON) option(WITH_DMI "Enable DMI/SMBIOS reader" ON)
@ -133,7 +134,8 @@ if (CMAKE_C_COMPILER_ID MATCHES GNU)
set_source_files_properties(src/crypto/cn/CnHash.cpp PROPERTIES COMPILE_FLAGS "-Ofast -fno-tree-vectorize") set_source_files_properties(src/crypto/cn/CnHash.cpp PROPERTIES COMPILE_FLAGS "-Ofast -fno-tree-vectorize")
endif() endif()
if (NOT XMRIG_ARM) if (WITH_VAES)
add_definitions(-DXMRIG_VAES)
set(HEADERS_CRYPTO "${HEADERS_CRYPTO}" src/crypto/cn/CryptoNight_x86_vaes.h) set(HEADERS_CRYPTO "${HEADERS_CRYPTO}" src/crypto/cn/CryptoNight_x86_vaes.h)
set(SOURCES_CRYPTO "${SOURCES_CRYPTO}" src/crypto/cn/CryptoNight_x86_vaes.cpp) set(SOURCES_CRYPTO "${SOURCES_CRYPTO}" src/crypto/cn/CryptoNight_x86_vaes.cpp)
if (CMAKE_C_COMPILER_ID MATCHES GNU) if (CMAKE_C_COMPILER_ID MATCHES GNU)

View File

@ -13,6 +13,7 @@ if (XMRIG_64_BIT AND CMAKE_SYSTEM_PROCESSOR MATCHES "^(x86_64|AMD64)$")
add_definitions(-DRAPIDJSON_SSE2) add_definitions(-DRAPIDJSON_SSE2)
else() else()
set(WITH_SSE4_1 OFF) set(WITH_SSE4_1 OFF)
set(WITH_VAES OFF)
endif() endif()
if (NOT ARM_TARGET) if (NOT ARM_TARGET)

View File

@ -40,10 +40,14 @@
#include "crypto/cn/CnAlgo.h" #include "crypto/cn/CnAlgo.h"
#include "crypto/cn/CryptoNight_monero.h" #include "crypto/cn/CryptoNight_monero.h"
#include "crypto/cn/CryptoNight.h" #include "crypto/cn/CryptoNight.h"
#include "crypto/cn/CryptoNight_x86_vaes.h"
#include "crypto/cn/soft_aes.h" #include "crypto/cn/soft_aes.h"
#ifdef XMRIG_VAES
# include "crypto/cn/CryptoNight_x86_vaes.h"
#endif
extern "C" extern "C"
{ {
#include "crypto/cn/c_groestl.h" #include "crypto/cn/c_groestl.h"
@ -290,10 +294,12 @@ static NOINLINE void cn_explode_scratchpad(cryptonight_ctx *ctx)
{ {
constexpr CnAlgo<ALGO> props; constexpr CnAlgo<ALGO> props;
# ifdef XMRIG_VAES
if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) { if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) {
cn_explode_scratchpad_vaes<ALGO>(ctx); cn_explode_scratchpad_vaes<ALGO>(ctx);
return; return;
} }
# endif
constexpr size_t N = (props.memory() / sizeof(__m128i)) / (props.half_mem() ? 2 : 1); constexpr size_t N = (props.memory() / sizeof(__m128i)) / (props.half_mem() ? 2 : 1);
@ -402,10 +408,12 @@ static NOINLINE void cn_implode_scratchpad(cryptonight_ctx *ctx)
{ {
constexpr CnAlgo<ALGO> props; constexpr CnAlgo<ALGO> props;
# ifdef XMRIG_VAES
if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) { if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) {
cn_implode_scratchpad_vaes<ALGO>(ctx); cn_implode_scratchpad_vaes<ALGO>(ctx);
return; return;
} }
# endif
constexpr bool IS_HEAVY = props.isHeavy(); constexpr bool IS_HEAVY = props.isHeavy();
constexpr size_t N = (props.memory() / sizeof(__m128i)) / (props.half_mem() ? 2 : 1); constexpr size_t N = (props.memory() / sizeof(__m128i)) / (props.half_mem() ? 2 : 1);
@ -1008,10 +1016,13 @@ inline void cryptonight_double_hash_asm(const uint8_t *__restrict__ input, size_
ctx[1]->first_half = true; ctx[1]->first_half = true;
} }
# ifdef XMRIG_VAES
if (!props.isHeavy() && Cpu::info()->hasVAES()) { if (!props.isHeavy() && Cpu::info()->hasVAES()) {
cn_explode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]); cn_explode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]);
} }
else { else
# endif
{
cn_explode_scratchpad<ALGO, false, 0>(ctx[0]); cn_explode_scratchpad<ALGO, false, 0>(ctx[0]);
cn_explode_scratchpad<ALGO, false, 0>(ctx[1]); cn_explode_scratchpad<ALGO, false, 0>(ctx[1]);
} }
@ -1053,10 +1064,13 @@ inline void cryptonight_double_hash_asm(const uint8_t *__restrict__ input, size_
ctx[0]->generated_code(ctx); ctx[0]->generated_code(ctx);
} }
# ifdef XMRIG_VAES
if (!props.isHeavy() && Cpu::info()->hasVAES()) { if (!props.isHeavy() && Cpu::info()->hasVAES()) {
cn_implode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]); cn_implode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]);
} }
else { else
# endif
{
cn_implode_scratchpad<ALGO, false, 0>(ctx[0]); cn_implode_scratchpad<ALGO, false, 0>(ctx[0]);
cn_implode_scratchpad<ALGO, false, 0>(ctx[1]); cn_implode_scratchpad<ALGO, false, 0>(ctx[1]);
} }
@ -1115,10 +1129,13 @@ inline void cryptonight_double_hash(const uint8_t *__restrict__ input, size_t si
ctx[1]->first_half = true; ctx[1]->first_half = true;
} }
# ifdef XMRIG_VAES
if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) { if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) {
cn_explode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]); cn_explode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]);
} }
else { else
# endif
{
cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[0]); cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[0]);
cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[1]); cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[1]);
} }
@ -1316,10 +1333,13 @@ inline void cryptonight_double_hash(const uint8_t *__restrict__ input, size_t si
bx10 = cx1; bx10 = cx1;
} }
# ifdef XMRIG_VAES
if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) { if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) {
cn_implode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]); cn_implode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]);
} }
else { else
# endif
{
cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[0]); cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[0]);
cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[1]); cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[1]);
} }
@ -1383,11 +1403,14 @@ void cryptonight_quad_hash_zen(const uint8_t* __restrict__ input, size_t size, u
ctx[3]->first_half = true; ctx[3]->first_half = true;
} }
# ifdef XMRIG_VAES
if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) { if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) {
cn_explode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]); cn_explode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]);
cn_explode_scratchpad_vaes_double<ALGO>(ctx[2], ctx[3]); cn_explode_scratchpad_vaes_double<ALGO>(ctx[2], ctx[3]);
} }
else { else
# endif
{
cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[0]); cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[0]);
cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[1]); cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[1]);
cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[2]); cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[2]);
@ -1513,11 +1536,14 @@ void cryptonight_quad_hash_zen(const uint8_t* __restrict__ input, size_t size, u
if (!SOFT_AES) cx3 = _mm_load_si128(reinterpret_cast<const __m128i*>(&l3[idx3 & MASK])); if (!SOFT_AES) cx3 = _mm_load_si128(reinterpret_cast<const __m128i*>(&l3[idx3 & MASK]));
} }
# ifdef XMRIG_VAES
if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) { if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) {
cn_implode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]); cn_implode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]);
cn_implode_scratchpad_vaes_double<ALGO>(ctx[2], ctx[3]); cn_implode_scratchpad_vaes_double<ALGO>(ctx[2], ctx[3]);
} }
else { else
# endif
{
cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[0]); cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[0]);
cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[1]); cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[1]);
cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[2]); cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[2]);
@ -1761,11 +1787,14 @@ inline void cryptonight_quad_hash(const uint8_t *__restrict__ input, size_t size
} }
} }
# ifdef XMRIG_VAES
if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) { if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) {
cn_explode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]); cn_explode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]);
cn_explode_scratchpad_vaes_double<ALGO>(ctx[2], ctx[3]); cn_explode_scratchpad_vaes_double<ALGO>(ctx[2], ctx[3]);
} }
else { else
# endif
{
cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[0]); cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[0]);
cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[1]); cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[1]);
cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[2]); cn_explode_scratchpad<ALGO, SOFT_AES, 0>(ctx[2]);
@ -1821,11 +1850,14 @@ inline void cryptonight_quad_hash(const uint8_t *__restrict__ input, size_t size
CN_STEP4(3, ax3, bx30, bx31, cx3, l3, mc3, ptr3, idx3); CN_STEP4(3, ax3, bx30, bx31, cx3, l3, mc3, ptr3, idx3);
} }
# ifdef XMRIG_VAES
if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) { if (!SOFT_AES && !props.isHeavy() && Cpu::info()->hasVAES()) {
cn_implode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]); cn_implode_scratchpad_vaes_double<ALGO>(ctx[0], ctx[1]);
cn_implode_scratchpad_vaes_double<ALGO>(ctx[2], ctx[3]); cn_implode_scratchpad_vaes_double<ALGO>(ctx[2], ctx[3]);
} }
else { else
# endif
{
cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[0]); cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[0]);
cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[1]); cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[1]);
cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[2]); cn_implode_scratchpad<ALGO, SOFT_AES, 0>(ctx[2]);