Try to allocate scratchpad from dataset's 1 GB huge pages, if normal huge pages are not available

This commit is contained in:
SChernykh 2020-07-31 13:37:22 +02:00
parent 838cc08680
commit abb78302b8
5 changed files with 37 additions and 1 deletions

View file

@ -193,6 +193,12 @@ void xmrig::RxDataset::allocate(bool hugePages, bool oneGbPages)
}
m_memory = new VirtualMemory(maxSize(), hugePages, oneGbPages, false, m_node);
if (m_memory->isOneGbPages()) {
m_scratchpadOffset = maxSize() + RANDOMX_CACHE_MAX_SIZE;
m_scratchpadLimit = m_memory->capacity();
}
m_dataset = randomx_create_dataset(m_memory->raw());
# ifdef XMRIG_OS_LINUX
@ -201,3 +207,19 @@ void xmrig::RxDataset::allocate(bool hugePages, bool oneGbPages)
}
# endif
}
uint8_t* xmrig::RxDataset::tryAllocateScrathpad()
{
uint8_t* p = reinterpret_cast<uint8_t*>(raw());
if (!p) {
return nullptr;
}
const size_t offset = m_scratchpadOffset.fetch_add(RANDOMX_SCRATCHPAD_L3_MAX_SIZE);
if (offset + RANDOMX_SCRATCHPAD_L3_MAX_SIZE > m_scratchpadLimit) {
return nullptr;
}
return p + offset;
}