diff --git a/primitives/primproc/primproc.cpp b/primitives/primproc/primproc.cpp index 672659d1d..ad3459081 100644 --- a/primitives/primproc/primproc.cpp +++ b/primitives/primproc/primproc.cpp @@ -220,7 +220,7 @@ class QszMonThd }; #endif -#define DUMP_CACHE_CONTENTS +// #define DUMP_CACHE_CONTENTS #ifdef DUMP_CACHE_CONTENTS void* waitForSIGUSR1(void* p) { diff --git a/tests/counting_allocator.cpp b/tests/counting_allocator.cpp index 94d69690d..c3f3571d9 100644 --- a/tests/counting_allocator.cpp +++ b/tests/counting_allocator.cpp @@ -17,25 +17,28 @@ #include #include #include +#include #include #include +#include + #include "countingallocator.h" -#include "rowgroup.h" using namespace allocators; // Example class to be managed by the allocator struct TestClass { - int value; + int value[1024]; TestClass(int val) : value(val) { } }; -static const constexpr int64_t MemoryAllowance = 10 * 1024 * 1024; +static const constexpr int64_t MemoryAllowance = 1 * 1024 * 1024; +static const constexpr int64_t MemoryLimitStep = MemoryAllowance / 100; // Test Fixture for AtomicCounterAllocator class CountingAllocatorTest : public ::testing::Test @@ -49,7 +52,8 @@ class CountingAllocatorTest : public ::testing::Test // Constructor CountingAllocatorTest() - : allocatedMemory(MemoryAllowance), allocator(&allocatedMemory, MemoryAllowance / 100) + : allocatedMemory(MemoryAllowance) + , allocator(&allocatedMemory, MemoryAllowance / 100, MemoryAllowance / 100) { } @@ -63,7 +67,15 @@ TEST_F(CountingAllocatorTest, Allocation) const std::size_t numObjects = 5; TestClass* ptr = allocator.allocate(numObjects); EXPECT_NE(ptr, nullptr); - EXPECT_EQ(allocatedMemory.load(), MemoryAllowance - numObjects * static_cast(sizeof(TestClass))); + if (MemoryLimitStep > numObjects * static_cast(sizeof(TestClass))) + { + EXPECT_EQ(allocatedMemory.load() - allocator.getCurrentLocalMemoryUsage(), + MemoryAllowance - numObjects * static_cast(sizeof(TestClass))); + } + else + { + EXPECT_EQ(allocatedMemory.load(), MemoryAllowance - numObjects * static_cast(sizeof(TestClass))); + } allocator.deallocate(ptr, numObjects); } @@ -72,8 +84,15 @@ TEST_F(CountingAllocatorTest, Deallocation) { const std::size_t numObjects = 3; TestClass* ptr = allocator.allocate(numObjects); - EXPECT_EQ(allocatedMemory.load(), MemoryAllowance - numObjects * static_cast(sizeof(TestClass))); - + if (MemoryLimitStep > numObjects * static_cast(sizeof(TestClass))) + { + EXPECT_EQ(allocatedMemory.load() - allocator.getCurrentLocalMemoryUsage(), + MemoryAllowance - numObjects * static_cast(sizeof(TestClass))); + } + else + { + EXPECT_EQ(allocatedMemory.load(), MemoryAllowance - numObjects * static_cast(sizeof(TestClass))); + } allocator.deallocate(ptr, numObjects); EXPECT_EQ(allocatedMemory.load(), MemoryAllowance); } @@ -94,34 +113,42 @@ TEST_F(CountingAllocatorTest, AllocatorEquality) TEST_F(CountingAllocatorTest, AllocateSharedUsesAllocator) { // Create a shared_ptr using allocate_shared with the custom allocator - std::shared_ptr ptr = std::allocate_shared(allocator, 100); + CountingAllocator allocatorSmallerStep(&allocatedMemory, MemoryAllowance / 100, + MemoryAllowance / 1000); + std::shared_ptr ptr1 = std::allocate_shared(allocatorSmallerStep, 100); + std::shared_ptr ptr2 = std::allocate_shared(allocatorSmallerStep, 100); + std::shared_ptr ptr3 = std::allocate_shared(allocatorSmallerStep, 100); // Check that the counter has increased by the size of TestClass plus control block // Exact size depends on the implementation, so we verify it's at least sizeof(TestClass) - EXPECT_LE(allocatedMemory.load(), MemoryAllowance - static_cast(sizeof(TestClass))); + EXPECT_LE(allocatedMemory.load(), MemoryAllowance - 3 * static_cast(sizeof(TestClass))); // Reset the shared_ptr and check that the counter decreases appropriately - ptr.reset(); + ptr1.reset(); + ptr2.reset(); + ptr3.reset(); // After deallocation, the counter should return to zero EXPECT_EQ(allocatedMemory.load(), MemoryAllowance); - auto deleter = [this](TestClass* ptr) { this->allocator.deallocate(ptr, 1); }; - ptr.reset(allocator.allocate(1), deleter); - EXPECT_LE(allocatedMemory.load(), MemoryAllowance - static_cast(sizeof(TestClass))); + // auto deleter = [&allocatorSmallerStep](TestClass* ptr) { allocatorSmallerStep.deallocate(ptr, 1); }; + // ptr1.reset(allocatorSmallerStep.allocate(3), deleter); + // EXPECT_LE(allocatedMemory.load(), MemoryAllowance - static_cast(sizeof(TestClass))); - ptr.reset(); + ptr1.reset(); EXPECT_EQ(allocatedMemory.load(), MemoryAllowance); - size_t allocSize = 16ULL * rowgroup::rgCommonSize; - auto buf = boost::allocate_shared(allocator, allocSize); + using RGDataBufType = uint8_t[]; + size_t allocSize = 16ULL * 8192; + auto buf = boost::allocate_shared(allocator, allocSize); + EXPECT_LE(allocatedMemory.load(), MemoryAllowance - allocSize); buf.reset(); EXPECT_EQ(allocatedMemory.load(), MemoryAllowance); - CountingAllocator allocator1(&allocatedMemory, MemoryAllowance / 100); - std::optional> allocator2(allocator1); - auto buf1 = boost::allocate_shared(*allocator2, allocSize); + CountingAllocator allocator1(&allocatedMemory, MemoryAllowance / 100, MemoryAllowance / 100); + std::optional> allocator2(allocator1); + auto buf1 = boost::allocate_shared(*allocator2, allocSize); EXPECT_LE(allocatedMemory.load(), MemoryAllowance - allocSize); buf1.reset(); @@ -136,11 +163,27 @@ TEST_F(CountingAllocatorTest, ThreadSafety) auto worker = [this]() { + std::vector ptrs; + CountingAllocator allocatorLocal(&allocatedMemory, MemoryAllowance / 100, + MemoryAllowance / 1000); for (std::size_t i = 0; i < allocationsPerThread; ++i) { - TestClass* ptr = allocator.allocate(1); - allocator.deallocate(ptr, 1); + ptrs.push_back(allocatorLocal.allocate(1)); } + + int64_t usedMemory = allocationsPerThread * sizeof(TestClass); + EXPECT_EQ(allocatorLocal.getCurrentLocalMemoryUsage(), allocationsPerThread * sizeof(TestClass)); + EXPECT_GE(usedMemory - allocatorLocal.getlastMemoryLimitCheckpoint(), 0LL); + EXPECT_LE(allocatedMemory.load(), MemoryAllowance - allocatorLocal.getlastMemoryLimitCheckpoint()); + + for (auto* ptr : ptrs) + { + allocatorLocal.deallocate(ptr, 1); + } + + EXPECT_EQ(allocatorLocal.getCurrentLocalMemoryUsage(), 0); + EXPECT_EQ(allocatorLocal.getlastMemoryLimitCheckpoint(), 0); + EXPECT_GE(allocatedMemory.load(), allocationsPerThread * sizeof(TestClass)); }; std::vector threads; @@ -156,7 +199,7 @@ TEST_F(CountingAllocatorTest, ThreadSafety) th.join(); } - // After all allocations and deallocations, the counter should be zero + // After all allocations and deallocations, the counter should be zero minus the remainder EXPECT_EQ(allocatedMemory.load(), MemoryAllowance); } @@ -172,8 +215,8 @@ TEST_F(CountingAllocatorTest, AllocateZeroObjects) TEST_F(CountingAllocatorTest, CopyAssignable) { - CountingAllocator allocator1(&allocatedMemory); - CountingAllocator allocator2(&allocatedMemory); - allocator1 = allocator2; - EXPECT_EQ(allocator1, allocator2); + CountingAllocator allocator1(&allocatedMemory); + CountingAllocator allocator2(&allocatedMemory); + allocator1 = allocator2; + EXPECT_EQ(allocator1, allocator2); } \ No newline at end of file diff --git a/utils/common/countingallocator.h b/utils/common/countingallocator.h index 4d9a36f6f..d0c140ee8 100644 --- a/utils/common/countingallocator.h +++ b/utils/common/countingallocator.h @@ -17,99 +17,217 @@ #pragma once +#include #include -#include -#include #include #include -#include -#include +#include -namespace allocators +#include + +namespace allocators { -// const constexpr std::uint64_t CounterUpdateUnitSize = 4 * 1024 * 1024; -const constexpr std::int64_t MemoryLimitLowerBound = 100 * 1024 * 1024; // WIP +// WIP placement + + +// const constexpr std::uint64_t CounterUpdateUnitSize = 4 * 1024 * 1024; +const constexpr std::int64_t MemoryLimitLowerBound = 500 * 1024 * 1024; // WIP +const constexpr std::int64_t CheckPointStepSize = 100 * 1024 * 1024; // WIP // Custom Allocator that tracks allocated memory using an atomic counter template -class CountingAllocator { -public: - using value_type = T; +class CountingAllocator +{ + public: + using value_type = T; - // Constructor accepting a reference to an atomic counter - explicit CountingAllocator(std::atomic* memoryLimit, const uint64_t lowerBound = MemoryLimitLowerBound) noexcept - : memoryLimit_(memoryLimit), memoryLimitLowerBound(lowerBound) {} + bool needCheckPoint(const int64_t sizeChange, const int64_t diffSinceLastCheckPoint, + const int64_t checkPointStepSize) + { + return std::llabs(sizeChange + diffSinceLastCheckPoint) > checkPointStepSize; + } - // Copy constructor (template to allow conversion between different types) - template - CountingAllocator(const CountingAllocator& other) noexcept - : memoryLimit_(other.memoryLimit_), memoryLimitLowerBound(other.memoryLimitLowerBound) {} + int64_t int_distance(const int64_t x, const int64_t y) + { + return (x > y) ? x - y : y - x; + } - // Allocate memory for n objects of type T - template - typename std::enable_if::value, U*>::type - allocate(std::size_t n) + // INVARIANT: sizeChange > 0 + void changeLocalAndGlobalMemoryLimits(const int64_t sizeChange) + { + // This routine must be used for mem allocation accounting path only! + // The case Current > last checkpoint(we deallocated mem since the last checkpoint), sizeIncrease is + // negative b/c we now move into the opposite direction. The case Last Checkpoint > Current (we allocated + // mem since the last checkpoint), sizeIncrease is positive + int64_t sizeChangeWDirection = + (currentLocalMemoryUsage_ <= lastMemoryLimitCheckpoint_) ? -sizeChange : sizeChange; + int64_t diffSinceLastCheckPoint = int_distance(currentLocalMemoryUsage_, lastMemoryLimitCheckpoint_); + if (needCheckPoint(sizeChangeWDirection, diffSinceLastCheckPoint, checkPointStepSize_)) { - auto memCounted = memoryLimit_->fetch_sub(n * sizeof(T), std::memory_order_relaxed); - if (memCounted < memoryLimitLowerBound) { - memoryLimit_->fetch_add(n * sizeof(T), std::memory_order_relaxed); - throw std::bad_alloc(); - } - - T* ptr = static_cast(::operator new(n * sizeof(T))); - // std::cout << "[Allocate] " << n * sizeof(T) << " bytes at " << static_cast(ptr) - // << ". current timit: " << std::dec << memoryLimit_.load() << std::hex << " bytes.\n"; - // std::cout << std::dec; - return ptr; + // std::cout << "changeLocalAndGlobalMemoryLimits " << sizeChange << " bytes at " + // << " diffSinceLastCheckPoint " << diffSinceLastCheckPoint << ". current timit: " << std::dec + // << memoryLimit_->load() << std::hex << " bytes.\n"; + // std::cout << std::dec; + + // auto lastMemoryLimitCheckpointDiff = diffSinceLastCheckPoint + sizeChangeWDirection; + int64_t lastMemoryLimitCheckpointDiff = (currentLocalMemoryUsage_ <= lastMemoryLimitCheckpoint_) + ? sizeChange - diffSinceLastCheckPoint + : sizeChange + diffSinceLastCheckPoint; + assert(lastMemoryLimitCheckpointDiff > 0); + // { + // std::cout << "[Allocate::changeLocalAndGlobalMemoryLimits!!!] lastMemoryLimitCheckpoint_ " + // << lastMemoryLimitCheckpoint_ << " currentLocalMemoryUsage_ " << currentLocalMemoryUsage_ + // << " sizeChangeWDirection " << sizeChangeWDirection << " lastMemoryLimitCheckpointDiff " << lastMemoryLimitCheckpointDiff + // << std::endl; + // } + + // lastMemoryLimitCheckpointDiff sign signifies a direction we move allocating memory. + auto currentGlobalMemoryLimit = + memoryLimit_->fetch_sub(lastMemoryLimitCheckpointDiff, std::memory_order_relaxed); + if (currentGlobalMemoryLimit < memoryLimitLowerBound_) + { + memoryLimit_->fetch_add(lastMemoryLimitCheckpointDiff, std::memory_order_relaxed); + // ? what to do with local counters here + throw std::bad_alloc(); + } + lastMemoryLimitCheckpoint_ += lastMemoryLimitCheckpointDiff; } - template - typename std::enable_if::value, typename std::remove_extent::type*>::type - allocate(std::size_t n) + currentLocalMemoryUsage_ += sizeChange; + } + + // Constructor accepting a reference to an atomic counter + explicit CountingAllocator(std::atomic* memoryLimit, + const uint64_t lowerBound = MemoryLimitLowerBound, + const uint64_t checkPointStepSize = CheckPointStepSize) noexcept + : memoryLimit_(memoryLimit), memoryLimitLowerBound_(lowerBound), checkPointStepSize_(checkPointStepSize) + { + } + + // Copy constructor (template to allow conversion between different types) + template + CountingAllocator(const CountingAllocator& other) noexcept + : memoryLimit_(other.memoryLimit_) + , memoryLimitLowerBound_(other.memoryLimitLowerBound_) + , checkPointStepSize_(other.checkPointStepSize_) + { + } + + // Allocate memory for n objects of type T + template + typename std::enable_if::value, U*>::type allocate(std::size_t n) + { + auto sizeAllocated = n * sizeof(T); + + changeLocalAndGlobalMemoryLimits(sizeAllocated); + + T* ptr = static_cast(::operator new(sizeAllocated)); + // std::cout << "[Allocate] non-array " << n * sizeof(T) << " bytes at " << static_cast(ptr) + // << ". current timit: " << std::dec << memoryLimit_->load() << std::hex << " bytes.\n"; + // std::cout << std::dec; + return ptr; + } + + template + typename std::enable_if::value, typename std::remove_extent::type*>::type allocate( + std::size_t n) + { + auto sizeAllocated = n * sizeof(T); + + changeLocalAndGlobalMemoryLimits(sizeAllocated); + + T ptr = static_cast(::operator new[](n)); + // std::cout << "[Allocate] array " << n * sizeof(T) << " bytes at " << static_cast(ptr) + // << ". current timit: " << std::dec << memoryLimit_->load() << std::hex << " bytes.\n"; + return ptr; + } + + // Deallocate memory for n objects of type T + void deallocate(T* ptr, std::size_t n) noexcept + { + ::operator delete(ptr); + + int64_t sizeToDeallocate = n * sizeof(T); + + // std::cout << "[Deallocate start] " << sizeToDeallocate << " bytes from " << static_cast(ptr) + // << ". current timit: " << std::dec << memoryLimit_->load() << std::hex << " bytes.\n"; + // std::cout << std::dec; + + int64_t sizeChangeWDirection = + (currentLocalMemoryUsage_ >= lastMemoryLimitCheckpoint_) ? -sizeToDeallocate : sizeToDeallocate; + int64_t diffSinceLastCheckPoint = int_distance(currentLocalMemoryUsage_, lastMemoryLimitCheckpoint_); + + if (needCheckPoint(sizeChangeWDirection, diffSinceLastCheckPoint, checkPointStepSize_)) { - auto memCounted = memoryLimit_->fetch_sub(n * sizeof(T), std::memory_order_relaxed); - if (memCounted < memoryLimitLowerBound) { - memoryLimit_->fetch_add(n * sizeof(T), std::memory_order_relaxed); - throw std::bad_alloc(); - } - - T ptr = static_cast(::operator new[](n)); - // std::cout << "[Allocate] " << n * sizeof(T) << " bytes at " << static_cast(ptr) - // << ". current timit: " << std::dec << memoryLimit_.load() << std::hex << " bytes.\n"; - return ptr; + // Invariant is lastMemoryLimitCheckpoint_ >= currentLocalMemoryUsage_ - sizeToDeallocate + // and lastMemoryLimitCheckpoint_ value must be negative. + // int64_t lastMemoryLimitCheckpointDiff = + // labs(lastMemoryLimitCheckpoint_ - currentLocalMemoryUsage_ - sizeToDeallocate); + // auto lastMemoryLimitCheckpointDiff = diffSinceLastCheckPoint + sizeChangeWDirection; + int64_t lastMemoryLimitCheckpointDiff = + (currentLocalMemoryUsage_ >= lastMemoryLimitCheckpoint_) + ? sizeToDeallocate - (currentLocalMemoryUsage_ - lastMemoryLimitCheckpoint_) + : diffSinceLastCheckPoint + sizeToDeallocate; + + assert(lastMemoryLimitCheckpointDiff > 0); + + // std::cout << "[Deallocate checkpoint!!!] lastMemoryLimitCheckpoint_ " << lastMemoryLimitCheckpoint_ + // << " currentLocalMemoryUsage_ " << currentLocalMemoryUsage_ << " sizeChangeWDirection " + // << sizeChangeWDirection << " lastMemoryLimitCheckpointDiff " << lastMemoryLimitCheckpointDiff + // << std::endl; + + // assert(lastMemoryLimitCheckpointDiff < 0); + memoryLimit_->fetch_add(lastMemoryLimitCheckpointDiff, std::memory_order_relaxed); + + lastMemoryLimitCheckpoint_ -= (lastMemoryLimitCheckpoint_ == 0) ? 0 : lastMemoryLimitCheckpointDiff; } + currentLocalMemoryUsage_ = currentLocalMemoryUsage_ - sizeToDeallocate; - // Deallocate memory for n objects of type T - void deallocate(T* ptr, std::size_t n) noexcept - { - ::operator delete(ptr); - memoryLimit_->fetch_add(n * sizeof(T), std::memory_order_relaxed); - // std::cout << "[Deallocate] " << n * sizeof(T) << " bytes from " << static_cast(ptr) - // << ". current timit: " << std::dec << memoryLimit_.load() << std::hex << " bytes.\n"; - // std::cout << std::dec; - } + // std::cout << "[Deallocate end] " << n * sizeof(T) << " bytes from " << static_cast(ptr) + // << ". current timit: " << std::dec << memoryLimit_->load() << std::hex << " bytes.\n"; - // Equality operators (allocators are equal if they share the same counter) - template - bool operator==(const CountingAllocator& other) const noexcept - { - return memoryLimit_ == other.memoryLimit_; - } + // std::cout << std::dec; + } - template - bool operator!=(const CountingAllocator& other) const noexcept - { - return !(*this == other); - } + // Equality operators (allocators are equal if they share the same counter) + template + bool operator==(const CountingAllocator& other) const noexcept + { + return memoryLimit_ == other.memoryLimit_; + } -private: - std::atomic* memoryLimit_ = nullptr; - int64_t memoryLimitLowerBound = 0; + template + bool operator!=(const CountingAllocator& other) const noexcept + { + return !(*this == other); + } - // Grant access to other instances of CountingAllocator with different types - template - friend class CountingAllocator; + int64_t getMemoryLimitLowerBound() const noexcept + { + return memoryLimitLowerBound_; + } + + int64_t getlastMemoryLimitCheckpoint() const noexcept + { + return lastMemoryLimitCheckpoint_; + } + + int64_t getCurrentLocalMemoryUsage() const noexcept + { + return currentLocalMemoryUsage_; + } + + private: + std::atomic* memoryLimit_ = nullptr; + int64_t memoryLimitLowerBound_ = MemoryLimitLowerBound; + int64_t checkPointStepSize_ = CheckPointStepSize; + int64_t lastMemoryLimitCheckpoint_ = 0; + int64_t currentLocalMemoryUsage_ = 0; + + // Grant access to other instances of CountingAllocator with different types + template + friend class CountingAllocator; }; } // namespace allocators \ No newline at end of file diff --git a/utils/common/stlpoolallocator.h b/utils/common/stlpoolallocator.h index 0864deb08..8a10c6e93 100644 --- a/utils/common/stlpoolallocator.h +++ b/utils/common/stlpoolallocator.h @@ -102,7 +102,7 @@ STLPoolAllocator::STLPoolAllocator(joblist::ResourceManager* rm) if (rm) { auto alloc = rm->getAllocator(); - pa.reset(new PoolAllocator(alloc)); + pa.reset(new PoolAllocator(alloc, DEFAULT_SIZE)); } else { diff --git a/utils/joiner/tuplejoiner.cpp b/utils/joiner/tuplejoiner.cpp index 58883244e..c76bcfc0a 100644 --- a/utils/joiner/tuplejoiner.cpp +++ b/utils/joiner/tuplejoiner.cpp @@ -67,9 +67,9 @@ TupleJoiner::TupleJoiner(const rowgroup::RowGroup& smallInput, const rowgroup::R // _pool.reset(new boost::shared_ptr[bucketCount]); for (i = 0; i < bucketCount; i++) { - STLPoolAllocator> alloc(resourceManager_); + // STLPoolAllocator> alloc(resourceManager_); // _pool[i] = alloc.getPoolAllocator(); - // auto alloc = resourceManager_->getAllocator>(); + auto alloc = resourceManager_->getAllocator>(); ld[i].reset(new ldhash_t(10, hasher(), ldhash_t::key_equal(), alloc)); } } @@ -79,9 +79,9 @@ TupleJoiner::TupleJoiner(const rowgroup::RowGroup& smallInput, const rowgroup::R _pool.reset(new boost::shared_ptr[bucketCount]); for (i = 0; i < bucketCount; i++) { - STLPoolAllocator> alloc(resourceManager_); + // STLPoolAllocator> alloc(resourceManager_); // _pool[i] = alloc.getPoolAllocator(); - // auto alloc = resourceManager_->getAllocator>(); + auto alloc = resourceManager_->getAllocator>(); sth[i].reset(new sthash_t(10, hasher(), sthash_t::key_equal(), alloc)); } } @@ -91,9 +91,9 @@ TupleJoiner::TupleJoiner(const rowgroup::RowGroup& smallInput, const rowgroup::R _pool.reset(new boost::shared_ptr[bucketCount]); for (i = 0; i < bucketCount; i++) { - STLPoolAllocator> alloc(resourceManager_); + // STLPoolAllocator> alloc(resourceManager_); // _pool[i] = alloc.getPoolAllocator(); - // auto alloc = resourceManager_->getAllocator>(); + auto alloc = resourceManager_->getAllocator>(); h[i].reset(new hash_t(10, hasher(), hash_t::key_equal(), alloc)); } } @@ -184,9 +184,9 @@ TupleJoiner::TupleJoiner(const rowgroup::RowGroup& smallInput, const rowgroup::R ht.reset(new boost::scoped_ptr[bucketCount]); for (i = 0; i < bucketCount; i++) { - STLPoolAllocator> alloc(resourceManager_); + // STLPoolAllocator> alloc(resourceManager_); // _pool[i] = alloc.getPoolAllocator(); - // auto alloc = resourceManager_->getAllocator>(); + auto alloc = resourceManager_->getAllocator>(); ht[i].reset(new typelesshash_t(10, hasher(), typelesshash_t::key_equal(), alloc)); } m_bucketLocks.reset(new boost::mutex[bucketCount]); @@ -1833,9 +1833,9 @@ void TupleJoiner::clearData() for (uint i = 0; i < bucketCount; i++) { - STLPoolAllocator> alloc(resourceManager_); + // STLPoolAllocator> alloc(resourceManager_); // _pool[i] = alloc.getPoolAllocator(); - // auto alloc = resourceManager_->getAllocator>(); + auto alloc = resourceManager_->getAllocator>(); if (typelessJoin) ht[i].reset(new typelesshash_t(10, hasher(), typelesshash_t::key_equal(), alloc)); else if (smallRG.getColTypes()[smallKeyColumns[0]] == CalpontSystemCatalog::LONGDOUBLE) diff --git a/utils/joiner/tuplejoiner.h b/utils/joiner/tuplejoiner.h index 282642b1f..c8cb8e498 100644 --- a/utils/joiner/tuplejoiner.h +++ b/utils/joiner/tuplejoiner.h @@ -478,37 +478,37 @@ class TupleJoiner } private: - typedef std::unordered_multimap, - utils::STLPoolAllocator > > - hash_t; - typedef std::unordered_multimap, - utils::STLPoolAllocator > > - sthash_t; - typedef std::unordered_multimap< - TypelessData, rowgroup::Row::Pointer, hasher, std::equal_to, - utils::STLPoolAllocator > > - typelesshash_t; - // MCOL-1822 Add support for Long Double AVG/SUM small side - typedef std::unordered_multimap< - long double, rowgroup::Row::Pointer, hasher, LongDoubleEq, - utils::STLPoolAllocator > > - ldhash_t; + // typedef std::unordered_multimap, + // utils::STLPoolAllocator > > + // hash_t; + // typedef std::unordered_multimap, + // utils::STLPoolAllocator > > + // sthash_t; + // typedef std::unordered_multimap< + // TypelessData, rowgroup::Row::Pointer, hasher, std::equal_to, + // utils::STLPoolAllocator > > + // typelesshash_t; + // // MCOL-1822 Add support for Long Double AVG/SUM small side + // typedef std::unordered_multimap< + // long double, rowgroup::Row::Pointer, hasher, LongDoubleEq, + // utils::STLPoolAllocator > > + // ldhash_t; - // typedef std::unordered_multimap, - // allocators::CountingAllocator > > - // hash_t; - // typedef std::unordered_multimap, - // allocators::CountingAllocator > > - // sthash_t; - // typedef std::unordered_multimap< - // TypelessData, rowgroup::Row::Pointer, hasher, std::equal_to, - // allocators::CountingAllocator > > - // typelesshash_t; - // // MCOL-1822 Add support for Long Double AVG/SUM small side - // typedef std::unordered_multimap< - // long double, rowgroup::Row::Pointer, hasher, LongDoubleEq, - // allocators::CountingAllocator > > - // ldhash_t; + typedef std::unordered_multimap, + allocators::CountingAllocator > > + hash_t; + typedef std::unordered_multimap, + allocators::CountingAllocator > > + sthash_t; + typedef std::unordered_multimap< + TypelessData, rowgroup::Row::Pointer, hasher, std::equal_to, + allocators::CountingAllocator > > + typelesshash_t; + // MCOL-1822 Add support for Long Double AVG/SUM small side + typedef std::unordered_multimap< + long double, rowgroup::Row::Pointer, hasher, LongDoubleEq, + allocators::CountingAllocator > > + ldhash_t; typedef hash_t::iterator iterator; typedef typelesshash_t::iterator thIterator;