1
0
mirror of https://github.com/mariadb-corporation/mariadb-columnstore-engine.git synced 2025-07-30 19:23:07 +03:00

fix(PrimProc): MCOL-5651 Add a workaround to avoid choosing an incorrect TupleHashJoinStep as a joiner [stable-23.10] (#3331)

* fix(PrimProc): MCOL-5651 Add a workaround to avoid choosing an incorrect TupleHashJoinStep as a joiner
This commit is contained in:
Alexey Antipovsky
2024-11-08 13:51:25 +01:00
committed by GitHub
parent 0a71892d97
commit 11136b3545
7 changed files with 242 additions and 118 deletions

View File

@ -1478,7 +1478,7 @@ const JobStepVector doSimpleFilter(SimpleFilter* sf, JobInfo& jobInfo)
{ {
JobStepVector jsv; JobStepVector jsv;
if (sf == 0) if (sf == nullptr)
return jsv; return jsv;
// cout << "doSimpleFilter " << endl; // cout << "doSimpleFilter " << endl;

View File

@ -3078,7 +3078,7 @@ void createPostJoinFilters(const JobInfo& jobInfo, TableInfoMap& tableInfoMap,
if (jobInfo.trace) if (jobInfo.trace)
{ {
if (postJoinFilters.size()) if (!postJoinFilters.empty())
{ {
cout << "Post join filters created." << endl; cout << "Post join filters created." << endl;
for (auto* filter : postJoinFilters) for (auto* filter : postJoinFilters)
@ -3100,37 +3100,37 @@ SP_JoinInfo joinToLargeTable(uint32_t large, TableInfoMap& tableInfoMap, JobInfo
set<uint32_t>& tableSet = tableInfoMap[large].fJoinedTables; set<uint32_t>& tableSet = tableInfoMap[large].fJoinedTables;
vector<uint32_t>& adjList = tableInfoMap[large].fAdjacentList; vector<uint32_t>& adjList = tableInfoMap[large].fAdjacentList;
uint32_t prevLarge = (uint32_t)getPrevLarge(large, tableInfoMap); uint32_t prevLarge = (uint32_t)getPrevLarge(large, tableInfoMap);
bool root = (prevLarge == (uint32_t)-1) ? true : false; bool root = (prevLarge == (uint32_t)-1);
uint32_t link = large; uint32_t link = large;
uint32_t cId = -1; uint32_t cId = -1;
// Get small sides ready. // Get small sides ready.
for (vector<uint32_t>::iterator i = adjList.begin(); i != adjList.end(); i++) for (unsigned int& adj : adjList)
{ {
if (tableInfoMap[*i].fVisited == false) if (!tableInfoMap[adj].fVisited)
{ {
cId = *i; cId = adj;
smallSides.push_back(joinToLargeTable(*i, tableInfoMap, jobInfo, joinOrder, joinEdgesToRestore)); smallSides.push_back(joinToLargeTable(adj, tableInfoMap, jobInfo, joinOrder, joinEdgesToRestore));
tableSet.insert(tableInfoMap[*i].fJoinedTables.begin(), tableInfoMap[*i].fJoinedTables.end()); tableSet.insert(tableInfoMap[adj].fJoinedTables.begin(), tableInfoMap[adj].fJoinedTables.end());
} }
} }
// Join with its small sides, if not a leaf node. // Join with its small sides, if not a leaf node.
if (smallSides.size() > 0) if (!smallSides.empty())
{ {
// non-leaf node, need a join // non-leaf node, need a join
SJSTEP spjs = tableInfoMap[large].fQuerySteps.back(); SJSTEP spjs = tableInfoMap[large].fQuerySteps.back();
BatchPrimitive* bps = dynamic_cast<BatchPrimitive*>(spjs.get()); auto* bps = dynamic_cast<BatchPrimitive*>(spjs.get());
SubAdapterStep* tsas = dynamic_cast<SubAdapterStep*>(spjs.get()); auto* tsas = dynamic_cast<SubAdapterStep*>(spjs.get());
TupleHashJoinStep* thjs = dynamic_cast<TupleHashJoinStep*>(spjs.get()); auto* thjs = dynamic_cast<TupleHashJoinStep*>(spjs.get());
// @bug6158, try to put BPS on large side if possible // @bug6158, try to put BPS on large side if possible
if (tsas && smallSides.size() == 1) if (tsas && smallSides.size() == 1)
{ {
SJSTEP sspjs = tableInfoMap[cId].fQuerySteps.back(); SJSTEP sspjs = tableInfoMap[cId].fQuerySteps.back();
BatchPrimitive* sbps = dynamic_cast<BatchPrimitive*>(sspjs.get()); auto* sbps = dynamic_cast<BatchPrimitive*>(sspjs.get());
TupleHashJoinStep* sthjs = dynamic_cast<TupleHashJoinStep*>(sspjs.get()); auto* sthjs = dynamic_cast<TupleHashJoinStep*>(sspjs.get());
if (sbps || (sthjs && sthjs->tokenJoin() == cId)) if (sbps || (sthjs && sthjs->tokenJoin() == cId))
{ {
@ -3143,7 +3143,7 @@ SP_JoinInfo joinToLargeTable(uint32_t large, TableInfoMap& tableInfoMap, JobInfo
largeJoinInfo->fDl = tableInfoMap[large].fDl; largeJoinInfo->fDl = tableInfoMap[large].fDl;
largeJoinInfo->fRowGroup = tableInfoMap[large].fRowGroup; largeJoinInfo->fRowGroup = tableInfoMap[large].fRowGroup;
TableJoinMap::iterator mit = jobInfo.tableJoinMap.find(make_pair(large, cId)); auto mit = jobInfo.tableJoinMap.find(make_pair(large, cId));
if (mit == jobInfo.tableJoinMap.end()) if (mit == jobInfo.tableJoinMap.end())
throw runtime_error("Join step not found."); throw runtime_error("Join step not found.");
@ -3158,7 +3158,7 @@ SP_JoinInfo joinToLargeTable(uint32_t large, TableInfoMap& tableInfoMap, JobInfo
bps = sbps; bps = sbps;
thjs = sthjs; thjs = sthjs;
tsas = NULL; tsas = nullptr;
} }
} }
@ -3173,7 +3173,7 @@ SP_JoinInfo joinToLargeTable(uint32_t large, TableInfoMap& tableInfoMap, JobInfo
size_t dcf = 0; // for dictionary column filters, 0 if thjs is null. size_t dcf = 0; // for dictionary column filters, 0 if thjs is null.
RowGroup largeSideRG = tableInfoMap[large].fRowGroup; RowGroup largeSideRG = tableInfoMap[large].fRowGroup;
if (thjs && thjs->tokenJoin() == large) if (thjs && thjs->tokenJoin() == large && thjs->tupleId1() != thjs->tupleId2())
{ {
dcf = thjs->getLargeKeys().size(); dcf = thjs->getLargeKeys().size();
largeSideRG = thjs->getLargeRowGroup(); largeSideRG = thjs->getLargeRowGroup();
@ -3195,9 +3195,9 @@ SP_JoinInfo joinToLargeTable(uint32_t large, TableInfoMap& tableInfoMap, JobInfo
vector<vector<uint32_t>> smallKeyIndices; vector<vector<uint32_t>> smallKeyIndices;
vector<vector<uint32_t>> largeKeyIndices; vector<vector<uint32_t>> largeKeyIndices;
for (vector<SP_JoinInfo>::iterator i = smallSides.begin(); i != smallSides.end(); i++) for (auto& smallSide : smallSides)
{ {
JoinInfo* info = i->get(); JoinInfo* info = smallSide.get();
smallSideDLs.push_back(info->fDl); smallSideDLs.push_back(info->fDl);
smallSideRGs.push_back(info->fRowGroup); smallSideRGs.push_back(info->fRowGroup);
jointypes.push_back(info->fJoinData.fTypes[0]); jointypes.push_back(info->fJoinData.fTypes[0]);
@ -3207,8 +3207,8 @@ SP_JoinInfo joinToLargeTable(uint32_t large, TableInfoMap& tableInfoMap, JobInfo
vector<uint32_t> largeIndices; vector<uint32_t> largeIndices;
const vector<uint32_t>& keys1 = info->fJoinData.fLeftKeys; const vector<uint32_t>& keys1 = info->fJoinData.fLeftKeys;
const vector<uint32_t>& keys2 = info->fJoinData.fRightKeys; const vector<uint32_t>& keys2 = info->fJoinData.fRightKeys;
vector<uint32_t>::const_iterator k1 = keys1.begin(); auto k1 = keys1.begin();
vector<uint32_t>::const_iterator k2 = keys2.begin(); auto k2 = keys2.begin();
uint32_t stid = getTableKey(jobInfo, *k1); uint32_t stid = getTableKey(jobInfo, *k1);
tableNames.push_back(jobInfo.keyInfo->tupleKeyVec[stid].fTable); tableNames.push_back(jobInfo.keyInfo->tupleKeyVec[stid].fTable);
@ -3267,7 +3267,8 @@ SP_JoinInfo joinToLargeTable(uint32_t large, TableInfoMap& tableInfoMap, JobInfo
traces.push_back(oss.str()); traces.push_back(oss.str());
} }
if (bps || tsas) // If the tupleIDs are the same it's not a join, so a new TupleHashJoinStep must be created
if (bps || tsas || (thjs && thjs->tupleId1() == thjs->tupleId2()))
{ {
thjs = new TupleHashJoinStep(jobInfo); thjs = new TupleHashJoinStep(jobInfo);
thjs->tableOid1(smallSides[0]->fTableOid); thjs->tableOid1(smallSides[0]->fTableOid);
@ -4379,10 +4380,14 @@ inline void joinTables(JobStepVector& joinSteps, TableInfoMap& tableInfoMap, Job
{ {
uint32_t largestTable = getLargestTable(jobInfo, tableInfoMap, overrideLargeSideEstimate); uint32_t largestTable = getLargestTable(jobInfo, tableInfoMap, overrideLargeSideEstimate);
if (jobInfo.outerOnTable.size() == 0) if (jobInfo.outerOnTable.empty())
{
joinToLargeTable(largestTable, tableInfoMap, jobInfo, joinOrder, jobInfo.joinEdgesToRestore); joinToLargeTable(largestTable, tableInfoMap, jobInfo, joinOrder, jobInfo.joinEdgesToRestore);
}
else else
{
joinTablesInOrder(largestTable, joinSteps, tableInfoMap, jobInfo, joinOrder); joinTablesInOrder(largestTable, joinSteps, tableInfoMap, jobInfo, joinOrder);
}
} }
void makeNoTableJobStep(JobStepVector& querySteps, JobStepVector& projectSteps, void makeNoTableJobStep(JobStepVector& querySteps, JobStepVector& projectSteps,
@ -4407,14 +4412,14 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
const boost::shared_ptr<TupleKeyInfo>& keyInfo = jobInfo.keyInfo; const boost::shared_ptr<TupleKeyInfo>& keyInfo = jobInfo.keyInfo;
cout << "query steps:" << endl; cout << "query steps:" << endl;
for (JobStepVector::iterator i = querySteps.begin(); i != querySteps.end(); ++i) for (const auto& step: querySteps)
{ {
TupleHashJoinStep* thjs = dynamic_cast<TupleHashJoinStep*>(i->get()); auto* thjs = dynamic_cast<TupleHashJoinStep*>(step.get());
if (thjs == NULL) if (thjs == nullptr)
{ {
int64_t id = ((*i)->tupleId() != (uint64_t)-1) ? (*i)->tupleId() : -1; int64_t id = (step->tupleId() != (uint64_t)-1) ? step->tupleId() : -1;
cout << typeid(*(i->get())).name() << ": " << (*i)->oid() << " " << id << " " cout << typeid(step.get()).name() << ": " << step->oid() << " " << id << " "
<< (int)((id != -1) ? getTableKey(jobInfo, id) : -1) << endl; << (int)((id != -1) ? getTableKey(jobInfo, id) : -1) << endl;
} }
else else
@ -4430,16 +4435,18 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
cout << "project steps:" << endl; cout << "project steps:" << endl;
for (JobStepVector::iterator i = projectSteps.begin(); i != projectSteps.end(); ++i) for (const auto& prStep: projectSteps)
{ {
cout << typeid(*(i->get())).name() << ": " << (*i)->oid() << " " << (*i)->tupleId() << " " cout << typeid(prStep.get()).name() << ": " << prStep->oid() << " " << prStep->tupleId() << " "
<< getTableKey(jobInfo, (*i)->tupleId()) << endl; << getTableKey(jobInfo, prStep->tupleId()) << endl;
} }
cout << "delivery steps:" << endl; cout << "delivery steps:" << endl;
for (DeliveredTableMap::iterator i = deliverySteps.begin(); i != deliverySteps.end(); ++i) for (const auto& [_, value]: deliverySteps)
cout << typeid(*(i->second.get())).name() << endl; {
cout << typeid(value.get()).name() << endl;
}
cout << "\nTable Info: (key oid name alias view sub)" << endl; cout << "\nTable Info: (key oid name alias view sub)" << endl;
@ -4452,7 +4459,7 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
CalpontSystemCatalog::OID oid = keyInfo->tupleKeyVec[i].fId; CalpontSystemCatalog::OID oid = keyInfo->tupleKeyVec[i].fId;
string alias = keyInfo->tupleKeyVec[i].fTable; string alias = keyInfo->tupleKeyVec[i].fTable;
if (alias.length() < 1) if (alias.empty())
alias = "N/A"; alias = "N/A";
string name = keyInfo->keyName[i]; string name = keyInfo->keyName[i];
@ -4462,10 +4469,10 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
string view = keyInfo->tupleKeyVec[i].fView; string view = keyInfo->tupleKeyVec[i].fView;
if (view.length() < 1) if (view.empty())
view = "N/A"; view = "N/A";
int sid = keyInfo->tupleKeyVec[i].fSubId; auto sid = keyInfo->tupleKeyVec[i].fSubId;
cout << i << "\t" << oid << "\t" << name << "\t" << alias << "\t" << view << "\t" << hex << sid << dec cout << i << "\t" << oid << "\t" << name << "\t" << alias << "\t" << view << "\t" << hex << sid << dec
<< endl; << endl;
} }
@ -4479,7 +4486,7 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
int64_t tid = jobInfo.keyInfo->colKeyToTblKey[i]; int64_t tid = jobInfo.keyInfo->colKeyToTblKey[i];
string alias = keyInfo->tupleKeyVec[i].fTable; string alias = keyInfo->tupleKeyVec[i].fTable;
if (alias.length() < 1) if (alias.empty())
alias = "N/A"; alias = "N/A";
// Expression IDs are borrowed from systemcatalog IDs, which are not used in tuple. // Expression IDs are borrowed from systemcatalog IDs, which are not used in tuple.
@ -4502,10 +4509,10 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
string view = keyInfo->tupleKeyVec[i].fView; string view = keyInfo->tupleKeyVec[i].fView;
if (view.length() < 1) if (view.empty())
view = "N/A"; view = "N/A";
int sid = keyInfo->tupleKeyVec[i].fSubId; auto sid = keyInfo->tupleKeyVec[i].fSubId;
cout << i << "\t" << oid << "\t" << tid << "\t" << name << "\t" << alias << "\t" << view << "\t" << hex cout << i << "\t" << oid << "\t" << tid << "\t" << name << "\t" << alias << "\t" << view << "\t" << hex
<< sid << dec << endl; << sid << dec << endl;
} }
@ -4514,7 +4521,7 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
} }
// @bug 2771, handle no table select query // @bug 2771, handle no table select query
if (jobInfo.tableList.size() < 1) if (jobInfo.tableList.empty())
{ {
makeNoTableJobStep(querySteps, projectSteps, deliverySteps, jobInfo); makeNoTableJobStep(querySteps, projectSteps, deliverySteps, jobInfo);
return; return;
@ -4537,33 +4544,33 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
} }
// Set of the columns being projected. // Set of the columns being projected.
for (TupleInfoVector::iterator i = jobInfo.pjColList.begin(); i != jobInfo.pjColList.end(); i++) for (auto i = jobInfo.pjColList.begin(); i != jobInfo.pjColList.end(); i++)
jobInfo.returnColSet.insert(i->key); jobInfo.returnColSet.insert(i->key);
// Strip constantbooleanquerySteps // Strip constantbooleanquerySteps
for (uint64_t i = 0; i < querySteps.size();) for (uint64_t i = 0; i < querySteps.size();)
{ {
TupleConstantBooleanStep* bs = dynamic_cast<TupleConstantBooleanStep*>(querySteps[i].get()); auto* bs = dynamic_cast<TupleConstantBooleanStep*>(querySteps[i].get());
ExpressionStep* es = dynamic_cast<ExpressionStep*>(querySteps[i].get()); auto* es = dynamic_cast<ExpressionStep*>(querySteps[i].get());
if (bs != NULL) if (bs != nullptr)
{ {
// cosntant step // cosntant step
if (bs->boolValue() == false) if (!bs->boolValue())
jobInfo.constantFalse = true; jobInfo.constantFalse = true;
querySteps.erase(querySteps.begin() + i); querySteps.erase(querySteps.begin() + i);
} }
else if (es != NULL && es->tableKeys().size() == 0) else if (es != nullptr && es->tableKeys().empty())
{ {
// constant expression // constant expression
ParseTree* p = es->expressionFilter(); // filter ParseTree* p = es->expressionFilter(); // filter
if (p != NULL) if (p != nullptr)
{ {
Row r; // dummy row Row r; // dummy row
if (funcexp::FuncExp::instance()->evaluate(r, p) == false) if (!funcexp::FuncExp::instance()->evaluate(r, p))
jobInfo.constantFalse = true; jobInfo.constantFalse = true;
querySteps.erase(querySteps.begin() + i); querySteps.erase(querySteps.begin() + i);
@ -4582,7 +4589,7 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
{ {
bool exist = false; bool exist = false;
for (JobStepVector::iterator j = steps.begin(); j != steps.end() && !exist; ++j) for (auto j = steps.begin(); j != steps.end() && !exist; ++j)
{ {
if (jobInfo.functionJoins[i] == j->get()) if (jobInfo.functionJoins[i] == j->get())
exist = true; exist = true;
@ -4597,37 +4604,37 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
// Make sure each query step has an output DL // Make sure each query step has an output DL
// This is necessary for toString() method on most steps // This is necessary for toString() method on most steps
for (JobStepVector::iterator it = steps.begin(); it != steps.end(); ++it) for (auto& step: steps)
{ {
// if (dynamic_cast<OrDelimiter*>(it->get())) // if (dynamic_cast<OrDelimiter*>(it->get()))
// continue; // continue;
if (it->get()->outputAssociation().outSize() == 0) if (step->outputAssociation().outSize() == 0)
{ {
JobStepAssociation jsa; JobStepAssociation jsa;
AnyDataListSPtr adl(new AnyDataList()); AnyDataListSPtr adl(new AnyDataList());
RowGroupDL* dl = new RowGroupDL(1, jobInfo.fifoSize); auto* dl = new RowGroupDL(1, jobInfo.fifoSize);
dl->OID(it->get()->oid()); dl->OID(step->oid());
adl->rowGroupDL(dl); adl->rowGroupDL(dl);
jsa.outAdd(adl); jsa.outAdd(adl);
it->get()->outputAssociation(jsa); step->outputAssociation(jsa);
} }
} }
// Populate the TableInfo map with the job steps keyed by table ID. // Populate the TableInfo map with the job steps keyed by table ID.
JobStepVector joinSteps; JobStepVector joinSteps;
JobStepVector& expSteps = jobInfo.crossTableExpressions; JobStepVector& expSteps = jobInfo.crossTableExpressions;
JobStepVector::iterator it = querySteps.begin(); auto it = querySteps.begin();
JobStepVector::iterator end = querySteps.end(); auto end = querySteps.end();
while (it != end) while (it != end)
{ {
// Separate table joins from other predicates. // Separate table joins from other predicates.
TupleHashJoinStep* thjs = dynamic_cast<TupleHashJoinStep*>(it->get()); auto* thjs = dynamic_cast<TupleHashJoinStep*>(it->get());
ExpressionStep* exps = dynamic_cast<ExpressionStep*>(it->get()); auto* exps = dynamic_cast<ExpressionStep*>(it->get());
SubAdapterStep* subs = dynamic_cast<SubAdapterStep*>(it->get()); auto* subs = dynamic_cast<SubAdapterStep*>(it->get());
if (thjs != NULL && thjs->tupleId1() != thjs->tupleId2()) if (thjs && thjs->tupleId1() != thjs->tupleId2())
{ {
// simple column and constant column semi join // simple column and constant column semi join
if (thjs->tableOid2() == 0 && thjs->schema2().empty()) if (thjs->tableOid2() == 0 && thjs->schema2().empty())
@ -4685,8 +4692,8 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
// keep a join map // keep a join map
pair<uint32_t, uint32_t> tablePair(tid1, tid2); pair<uint32_t, uint32_t> tablePair(tid1, tid2);
TableJoinMap::iterator m1 = jobInfo.tableJoinMap.find(tablePair); auto m1 = jobInfo.tableJoinMap.find(tablePair);
TableJoinMap::iterator m2 = jobInfo.tableJoinMap.end(); auto m2 = jobInfo.tableJoinMap.end();
if (m1 == jobInfo.tableJoinMap.end()) if (m1 == jobInfo.tableJoinMap.end())
{ {
@ -4782,17 +4789,17 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
m1->second.fJoinId = m2->second.fJoinId = thjs->joinId(); m1->second.fJoinId = m2->second.fJoinId = thjs->joinId();
} }
// Separate the expressions // Separate the expressions
else if (exps != NULL && subs == NULL) else if (exps && !subs)
{ {
const vector<uint32_t>& tables = exps->tableKeys(); const vector<uint32_t>& tables = exps->tableKeys();
const vector<uint32_t>& columns = exps->columnKeys(); const vector<uint32_t>& columns = exps->columnKeys();
bool tableInOuterQuery = false; bool tableInOuterQuery = false;
set<uint32_t> tableSet; // involved unique tables set<uint32_t> tableSet; // involved unique tables
for (uint64_t i = 0; i < tables.size(); ++i) for (unsigned int table: tables)
{ {
if (find(jobInfo.tableList.begin(), jobInfo.tableList.end(), tables[i]) != jobInfo.tableList.end()) if (find(jobInfo.tableList.begin(), jobInfo.tableList.end(), table) != jobInfo.tableList.end())
tableSet.insert(tables[i]); tableSet.insert(table);
else else
tableInOuterQuery = true; tableInOuterQuery = true;
} }
@ -4814,10 +4821,10 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
tableInfoMap[tables[i]].fProjectCols.push_back(c); tableInfoMap[tables[i]].fProjectCols.push_back(c);
jobInfo.pjColList.push_back(getTupleInfo(c, jobInfo)); jobInfo.pjColList.push_back(getTupleInfo(c, jobInfo));
jobInfo.returnColSet.insert(c); jobInfo.returnColSet.insert(c);
const SimpleColumn* sc = dynamic_cast<const SimpleColumn*>(exps->columns()[i]); const auto* sc = dynamic_cast<const SimpleColumn*>(exps->columns()[i]);
if (sc != NULL) if (sc)
jobInfo.deliveredCols.push_back(SRCP(sc->clone())); jobInfo.deliveredCols.emplace_back(sc->clone());
} }
} }
@ -4831,8 +4838,8 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
// single table and not in join on clause // single table and not in join on clause
uint32_t tid = tables[0]; uint32_t tid = tables[0];
for (uint64_t i = 0; i < columns.size(); ++i) for (unsigned int column : columns)
tableInfoMap[tid].fColsInExp1.push_back(columns[i]); tableInfoMap[tid].fColsInExp1.push_back(column);
tableInfoMap[tid].fOneTableExpSteps.push_back(*it); tableInfoMap[tid].fOneTableExpSteps.push_back(*it);
} }
@ -4848,9 +4855,8 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
} }
// resolve after join: cross table or on clause conditions // resolve after join: cross table or on clause conditions
for (uint64_t i = 0; i < columns.size(); ++i) for (unsigned int cid : columns)
{ {
uint32_t cid = columns[i];
uint32_t tid = getTableKey(jobInfo, cid); uint32_t tid = getTableKey(jobInfo, cid);
tableInfoMap[tid].fColsInExp2.push_back(cid); tableInfoMap[tid].fColsInExp2.push_back(cid);
} }
@ -4887,7 +4893,7 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
outjoinPredicateAdjust(tableInfoMap, jobInfo); outjoinPredicateAdjust(tableInfoMap, jobInfo);
// @bug4021, make sure there is real column to scan // @bug4021, make sure there is real column to scan
for (TableInfoMap::iterator it = tableInfoMap.begin(); it != tableInfoMap.end(); it++) for (auto it = tableInfoMap.begin(); it != tableInfoMap.end(); it++)
{ {
uint32_t tableUid = it->first; uint32_t tableUid = it->first;
@ -4895,8 +4901,8 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
continue; continue;
JobStepVector& steps = tableInfoMap[tableUid].fQuerySteps; JobStepVector& steps = tableInfoMap[tableUid].fQuerySteps;
JobStepVector::iterator s = steps.begin(); auto s = steps.begin();
JobStepVector::iterator p = steps.end(); auto p = steps.end();
for (; s != steps.end(); s++) for (; s != steps.end(); s++)
{ {
@ -4910,7 +4916,7 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
if (s == steps.end()) if (s == steps.end())
{ {
map<uint64_t, SRCP>::iterator t = jobInfo.tableColMap.find(tableUid); auto t = jobInfo.tableColMap.find(tableUid);
if (t == jobInfo.tableColMap.end()) if (t == jobInfo.tableColMap.end())
{ {
@ -4919,7 +4925,7 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
throw runtime_error(msg); throw runtime_error(msg);
} }
SimpleColumn* sc = dynamic_cast<SimpleColumn*>(t->second.get()); auto* sc = dynamic_cast<SimpleColumn*>(t->second.get());
CalpontSystemCatalog::OID oid = sc->oid(); CalpontSystemCatalog::OID oid = sc->oid();
CalpontSystemCatalog::OID tblOid = tableOid(sc, jobInfo.csc); CalpontSystemCatalog::OID tblOid = tableOid(sc, jobInfo.csc);
CalpontSystemCatalog::ColType ct = sc->colType(); CalpontSystemCatalog::ColType ct = sc->colType();
@ -4946,30 +4952,30 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
} }
// @bug3767, error out scalar subquery with aggregation and correlated additional comparison. // @bug3767, error out scalar subquery with aggregation and correlated additional comparison.
if (jobInfo.hasAggregation && (jobInfo.correlateSteps.size() > 0)) if (jobInfo.hasAggregation && (!jobInfo.correlateSteps.empty()))
{ {
// expression filter // expression filter
ExpressionStep* exp = NULL; ExpressionStep* exp = nullptr;
for (it = jobInfo.correlateSteps.begin(); it != jobInfo.correlateSteps.end(); it++) for (it = jobInfo.correlateSteps.begin(); it != jobInfo.correlateSteps.end(); it++)
{ {
if (((exp = dynamic_cast<ExpressionStep*>(it->get())) != NULL) && (!exp->functionJoin())) if (((exp = dynamic_cast<ExpressionStep*>(it->get())) != nullptr) && (!exp->functionJoin()))
break; break;
exp = NULL; exp = nullptr;
} }
// correlated join step // correlated join step
TupleHashJoinStep* thjs = NULL; TupleHashJoinStep* thjs = nullptr;
for (it = jobInfo.correlateSteps.begin(); it != jobInfo.correlateSteps.end(); it++) for (it = jobInfo.correlateSteps.begin(); it != jobInfo.correlateSteps.end(); it++)
{ {
if ((thjs = dynamic_cast<TupleHashJoinStep*>(it->get())) != NULL) if ((thjs = dynamic_cast<TupleHashJoinStep*>(it->get())) != nullptr)
break; break;
} }
// @bug5202, error out not equal correlation and aggregation in subquery. // @bug5202, error out not equal correlation and aggregation in subquery.
if ((exp != NULL) && (thjs != NULL) && (thjs->getJoinType() & CORRELATED)) if (exp && thjs && (thjs->getJoinType() & CORRELATED))
throw IDBExcept(IDBErrorInfo::instance()->errorMsg(ERR_NON_SUPPORT_NEQ_AGG_SUB), throw IDBExcept(IDBErrorInfo::instance()->errorMsg(ERR_NON_SUPPORT_NEQ_AGG_SUB),
ERR_NON_SUPPORT_NEQ_AGG_SUB); ERR_NON_SUPPORT_NEQ_AGG_SUB);
} }
@ -4985,7 +4991,7 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
it++; it++;
} }
for (TupleInfoVector::iterator j = jobInfo.pjColList.begin(); j != jobInfo.pjColList.end(); j++) for (auto j = jobInfo.pjColList.begin(); j != jobInfo.pjColList.end(); j++)
{ {
if (jobInfo.keyInfo->tupleKeyVec[j->tkey].fId == CNX_EXP_TABLE_ID) if (jobInfo.keyInfo->tupleKeyVec[j->tkey].fId == CNX_EXP_TABLE_ID)
continue; continue;
@ -5000,9 +5006,9 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
for (it = retExp.begin(); it != retExp.end(); ++it) for (it = retExp.begin(); it != retExp.end(); ++it)
{ {
ExpressionStep* exp = dynamic_cast<ExpressionStep*>(it->get()); auto* exp = dynamic_cast<ExpressionStep*>(it->get());
if (exp == NULL) if (exp == nullptr)
throw runtime_error("Not an expression."); throw runtime_error("Not an expression.");
for (uint64_t i = 0; i < exp->columnKeys().size(); ++i) for (uint64_t i = 0; i < exp->columnKeys().size(); ++i)
@ -5023,7 +5029,7 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
TableInfoMap::iterator mit; TableInfoMap::iterator mit;
for (mit = tableInfoMap.begin(); mit != tableInfoMap.end(); mit++) for (mit = tableInfoMap.begin(); mit != tableInfoMap.end(); mit++)
if (combineJobStepsByTable(mit, jobInfo) == false) if (!combineJobStepsByTable(mit, jobInfo))
throw runtime_error("combineJobStepsByTable failed."); throw runtime_error("combineJobStepsByTable failed.");
// 2. join the combined steps together to form the spanning tree // 2. join the combined steps together to form the spanning tree
@ -5031,9 +5037,9 @@ void associateTupleJobSteps(JobStepVector& querySteps, JobStepVector& projectSte
joinTables(joinSteps, tableInfoMap, jobInfo, joinOrder, overrideLargeSideEstimate); joinTables(joinSteps, tableInfoMap, jobInfo, joinOrder, overrideLargeSideEstimate);
// 3. put the steps together // 3. put the steps together
for (vector<uint32_t>::iterator i = joinOrder.begin(); i != joinOrder.end(); ++i) for (uint32_t i: joinOrder)
querySteps.insert(querySteps.end(), tableInfoMap[*i].fQuerySteps.begin(), querySteps.insert(querySteps.end(), tableInfoMap[i].fQuerySteps.begin(),
tableInfoMap[*i].fQuerySteps.end()); tableInfoMap[i].fQuerySteps.end());
adjustLastStep(querySteps, deliverySteps, jobInfo); // to match the select clause adjustLastStep(querySteps, deliverySteps, jobInfo); // to match the select clause
} }

View File

@ -97,10 +97,10 @@ TupleHashJoinStep::TupleHashJoinStep(const JobInfo& jobInfo)
fExtendedInfo = "THJS: "; fExtendedInfo = "THJS: ";
joinType = INIT; joinType = INIT;
joinThreadCount = resourceManager->getJlNumScanReceiveThreads(); joinThreadCount = resourceManager->getJlNumScanReceiveThreads();
largeBPS = NULL; largeBPS = nullptr;
moreInput = true; moreInput = true;
fQtc.stepParms().stepType = StepTeleStats::T_HJS; fQtc.stepParms().stepType = StepTeleStats::T_HJS;
outputDL = NULL; outputDL = nullptr;
ownsOutputDL = false; ownsOutputDL = false;
djsSmallUsage = jobInfo.smallSideUsage; djsSmallUsage = jobInfo.smallSideUsage;
djsSmallLimit = jobInfo.smallSideLimit; djsSmallLimit = jobInfo.smallSideLimit;

View File

@ -0,0 +1,63 @@
DROP DATABASE IF EXISTS `mcol_5651`;
CREATE DATABASE `mcol_5651`;
USE `mcol_5651`;
CREATE TABLE mcs (id INTEGER NOT NULL, type VARCHAR(10) NOT NULL, sn VARCHAR(30), descr VARCHAR(100), sdate DATETIME) ENGINE=Columnstore;
INSERT INTO mcs VALUES
(1,'a','1',NULL,'2024-01-11 18:36:01'),
(2,'a','1',NULL,'2024-01-11 18:36:03'),
(3,'a','1',NULL,'2024-01-11 18:36:04'),
(4,'a','1',NULL,'2024-01-11 18:36:06'),
(5,'a','1',NULL,'2024-01-11 18:36:07'),
(6,'a','1',NULL,'2024-01-12 13:04:15'),
(7,'a','1',NULL,'2024-01-12 13:04:17'),
(8,'a','1',NULL,'2024-01-12 13:04:18'),
(9,'a','1',NULL,'2024-01-12 13:04:20'),
(10,'a','1',NULL,'2024-01-11 18:35:58'),
(11,'a','1',NULL,'2024-01-11 18:30:00'),
(12,'a','1',NULL,'2024-01-11 18:30:00'),
(13,'a','1',NULL,'2024-01-11 18:30:03'),
(14,'a','1',NULL,'2024-01-11 18:30:03'),
(15,'a','1','a','2024-01-11 18:30:02'),
(16,'a','1',NULL,'2024-01-11 18:30:03'),
(17,'a','1',NULL,'2024-01-11 18:30:03'),
(18,'a','1',NULL,'2024-01-11 18:30:03'),
(19,'a','1',NULL,'2024-01-12 18:53:02'),
(20,'a','1',NULL,'2024-01-12 18:53:02'),
(21,'a','1',NULL,'2024-01-12 19:27:47'),
(22,'a','1',NULL,'2024-01-12 19:27:48'),
(23,'a','1',NULL,'2024-01-13 01:47:26'),
(24,'a','1',NULL,'2024-01-13 01:47:26'),
(25,'a','1',NULL,'2024-01-13 01:47:26'),
(26,'a','1',NULL,'2024-01-13 01:47:26'),
(27,'a','1',NULL,'2024-01-13 01:47:26'),
(28,'a','1',NULL,'2024-01-13 01:47:26');
SELECT s1.id, count(*)
FROM mcs AS s1
WHERE s1.type = 'a'
AND s1.sdate BETWEEN '2026-01-05 16:21:00' - INTERVAL 24 MONTH AND '2026-01-05 16:21:00'
AND EXISTS (
SELECT s.SN, s.sdate
FROM mcs AS s
WHERE s.type = 'a'
AND s.sdate BETWEEN '2026-01-05 16:21:00' - INTERVAL 24 MONTH AND '2026-01-05 16:21:00'
AND s.descr = 'a'
AND s.SN = s1.sn
AND s1.sdate BETWEEN s.sdate - INTERVAL 10 HOUR AND s.sdate + INTERVAL 10 HOUR
)
GROUP BY 1 ORDER BY 2,1;
id count(*)
1 1
2 1
3 1
4 1
5 1
10 1
11 1
12 1
13 1
14 1
15 1
16 1
17 1
18 1
DROP DATABASE `mcol_5651`;

View File

@ -0,0 +1,56 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS `mcol_5651`;
--enable_warnings
CREATE DATABASE `mcol_5651`;
USE `mcol_5651`;
CREATE TABLE mcs (id INTEGER NOT NULL, type VARCHAR(10) NOT NULL, sn VARCHAR(30), descr VARCHAR(100), sdate DATETIME) ENGINE=Columnstore;
INSERT INTO mcs VALUES
(1,'a','1',NULL,'2024-01-11 18:36:01'),
(2,'a','1',NULL,'2024-01-11 18:36:03'),
(3,'a','1',NULL,'2024-01-11 18:36:04'),
(4,'a','1',NULL,'2024-01-11 18:36:06'),
(5,'a','1',NULL,'2024-01-11 18:36:07'),
(6,'a','1',NULL,'2024-01-12 13:04:15'),
(7,'a','1',NULL,'2024-01-12 13:04:17'),
(8,'a','1',NULL,'2024-01-12 13:04:18'),
(9,'a','1',NULL,'2024-01-12 13:04:20'),
(10,'a','1',NULL,'2024-01-11 18:35:58'),
(11,'a','1',NULL,'2024-01-11 18:30:00'),
(12,'a','1',NULL,'2024-01-11 18:30:00'),
(13,'a','1',NULL,'2024-01-11 18:30:03'),
(14,'a','1',NULL,'2024-01-11 18:30:03'),
(15,'a','1','a','2024-01-11 18:30:02'),
(16,'a','1',NULL,'2024-01-11 18:30:03'),
(17,'a','1',NULL,'2024-01-11 18:30:03'),
(18,'a','1',NULL,'2024-01-11 18:30:03'),
(19,'a','1',NULL,'2024-01-12 18:53:02'),
(20,'a','1',NULL,'2024-01-12 18:53:02'),
(21,'a','1',NULL,'2024-01-12 19:27:47'),
(22,'a','1',NULL,'2024-01-12 19:27:48'),
(23,'a','1',NULL,'2024-01-13 01:47:26'),
(24,'a','1',NULL,'2024-01-13 01:47:26'),
(25,'a','1',NULL,'2024-01-13 01:47:26'),
(26,'a','1',NULL,'2024-01-13 01:47:26'),
(27,'a','1',NULL,'2024-01-13 01:47:26'),
(28,'a','1',NULL,'2024-01-13 01:47:26');
SELECT s1.id, count(*)
FROM mcs AS s1
WHERE s1.type = 'a'
AND s1.sdate BETWEEN '2026-01-05 16:21:00' - INTERVAL 24 MONTH AND '2026-01-05 16:21:00'
AND EXISTS (
SELECT s.SN, s.sdate
FROM mcs AS s
WHERE s.type = 'a'
AND s.sdate BETWEEN '2026-01-05 16:21:00' - INTERVAL 24 MONTH AND '2026-01-05 16:21:00'
AND s.descr = 'a'
AND s.SN = s1.sn
AND s1.sdate BETWEEN s.sdate - INTERVAL 10 HOUR AND s.sdate + INTERVAL 10 HOUR
)
GROUP BY 1 ORDER BY 2,1;
# cleanup
DROP DATABASE `mcol_5651`;

View File

@ -39,7 +39,7 @@
#include <sstream> #include <sstream>
#include <set> #include <set>
#include "serviceexemgr.h" #include "serviceexemgr.h"
#include <stdlib.h> #include <cstdlib>
using namespace std; using namespace std;
#include <boost/thread.hpp> #include <boost/thread.hpp>
@ -148,7 +148,7 @@ BatchPrimitiveProcessor::BatchPrimitiveProcessor()
pp.setLogicalBlockMode(true); pp.setLogicalBlockMode(true);
pp.setBlockPtr((int*)blockData); pp.setBlockPtr((int*)blockData);
pp.setBlockPtrAux((int*)blockDataAux); pp.setBlockPtrAux((int*)blockDataAux);
pthread_mutex_init(&objLock, NULL); pthread_mutex_init(&objLock, nullptr);
} }
BatchPrimitiveProcessor::BatchPrimitiveProcessor(ByteStream& b, double prefetch, BatchPrimitiveProcessor::BatchPrimitiveProcessor(ByteStream& b, double prefetch,
@ -209,7 +209,7 @@ BatchPrimitiveProcessor::BatchPrimitiveProcessor(ByteStream& b, double prefetch,
pp.setBlockPtr((int*)blockData); pp.setBlockPtr((int*)blockData);
pp.setBlockPtrAux((int*)blockDataAux); pp.setBlockPtrAux((int*)blockDataAux);
sendThread = bppst; sendThread = bppst;
pthread_mutex_init(&objLock, NULL); pthread_mutex_init(&objLock, nullptr);
initBPP(b); initBPP(b);
} }
@ -417,7 +417,6 @@ void BatchPrimitiveProcessor::initBPP(ByteStream& bs)
for (i = 0; i < joinerCount; i++) for (i = 0; i < joinerCount; i++)
{ {
smallSideRowLengths[i] = smallSideRGs[i].getRowSize(); smallSideRowLengths[i] = smallSideRGs[i].getRowSize();
;
smallSideRowData[i] = RGData(smallSideRGs[i], tJoinerSizes[i]); smallSideRowData[i] = RGData(smallSideRGs[i], tJoinerSizes[i]);
smallSideRGs[i].setData(&smallSideRowData[i]); smallSideRGs[i].setData(&smallSideRowData[i]);
smallSideRGs[i].resetRowGroup(0); smallSideRGs[i].resetRowGroup(0);
@ -467,7 +466,7 @@ void BatchPrimitiveProcessor::initBPP(ByteStream& bs)
{ {
hasFilterStep = true; hasFilterStep = true;
if (dynamic_cast<StrFilterCmd*>(filterSteps[i].get()) != NULL) if (dynamic_cast<StrFilterCmd*>(filterSteps[i].get()) != nullptr)
filtOnString = true; filtOnString = true;
} }
else if (type == Command::DICT_STEP || type == Command::RID_TO_STRING) else if (type == Command::DICT_STEP || type == Command::RID_TO_STRING)
@ -498,10 +497,9 @@ void BatchPrimitiveProcessor::initBPP(ByteStream& bs)
bs >> *(fAggregator.get()); bs >> *(fAggregator.get());
// If there's UDAF involved, set up for PM processing // If there's UDAF involved, set up for PM processing
for (uint64_t i = 0; i < fAggregator->getAggFunctions().size(); i++) for (const auto & pcol : fAggregator->getAggFunctions())
{ {
RowUDAFFunctionCol* rowUDAF = auto* rowUDAF = dynamic_cast<RowUDAFFunctionCol*>(pcol.get());
dynamic_cast<RowUDAFFunctionCol*>(fAggregator->getAggFunctions()[i].get());
if (rowUDAF) if (rowUDAF)
{ {
@ -553,10 +551,10 @@ void BatchPrimitiveProcessor::resetBPP(ByteStream& bs, const SP_UM_MUTEX& w, con
ridMap = 0; ridMap = 0;
baseRid = absRids[0] & 0xffffffffffffe000ULL; baseRid = absRids[0] & 0xffffffffffffe000ULL;
for (uint32_t i = 0; i < ridCount; i++) for (uint32_t j = 0; j < ridCount; j++)
{ {
relRids[i] = absRids[i] - baseRid; relRids[j] = absRids[j] - baseRid;
ridMap |= 1 << (relRids[i] >> 9); ridMap |= 1 << (relRids[j] >> 9);
} }
} }
else else
@ -583,7 +581,7 @@ void BatchPrimitiveProcessor::resetBPP(ByteStream& bs, const SP_UM_MUTEX& w, con
projectSteps[i]->resetCommand(bs); projectSteps[i]->resetCommand(bs);
} }
idbassert(bs.length() == 0); idbassert(bs.empty());
/* init vars not part of the BS */ /* init vars not part of the BS */
currentBlockOffset = 0; currentBlockOffset = 0;
@ -1098,7 +1096,7 @@ void BatchPrimitiveProcessor::initProcessor()
} }
} }
if (fAggregator.get() != NULL) if (fAggregator.get() != nullptr)
{ {
fAggRowGroupData.reinit(fAggregateRG); fAggRowGroupData.reinit(fAggregateRG);
fAggregateRG.setData(&fAggRowGroupData); fAggregateRG.setData(&fAggRowGroupData);
@ -1164,7 +1162,6 @@ uint32_t BatchPrimitiveProcessor::executeTupleJoin(uint32_t startRid, RowGroup&
for (j = 0; j < joinerCount; j++) for (j = 0; j < joinerCount; j++)
{ {
bool found; bool found;
if (UNLIKELY(joinTypes[j] & ANTI)) if (UNLIKELY(joinTypes[j] & ANTI))
{ {
if (joinTypes[j] & WITHFCNEXP) if (joinTypes[j] & WITHFCNEXP)
@ -1184,7 +1181,7 @@ uint32_t BatchPrimitiveProcessor::executeTupleJoin(uint32_t startRid, RowGroup&
largeKey = oldRow.getIntField(colIndex); largeKey = oldRow.getIntField(colIndex);
uint bucket = bucketPicker((char*)&largeKey, 8, bpSeed) & ptMask; uint bucket = bucketPicker((char*)&largeKey, 8, bpSeed) & ptMask;
bool joinerIsEmpty = tJoiners[j][bucket]->empty() ? true : false; bool joinerIsEmpty = tJoiners[j][bucket]->empty();
found = (tJoiners[j][bucket]->find(largeKey) != tJoiners[j][bucket]->end()); found = (tJoiners[j][bucket]->find(largeKey) != tJoiners[j][bucket]->end());
isNull = oldRow.isNullValue(colIndex); isNull = oldRow.isNullValue(colIndex);
@ -1221,8 +1218,8 @@ uint32_t BatchPrimitiveProcessor::executeTupleJoin(uint32_t startRid, RowGroup&
{ {
bool hasNull = false; bool hasNull = false;
for (uint32_t z = 0; z < tlLargeSideKeyColumns[j].size(); z++) for (unsigned int column: tlLargeSideKeyColumns[j])
if (oldRow.isNullValue(tlLargeSideKeyColumns[j][z])) if (oldRow.isNullValue(column))
{ {
hasNull = true; hasNull = true;
break; break;
@ -1396,7 +1393,7 @@ void BatchPrimitiveProcessor::execute()
{ {
ColumnCommand* col = dynamic_cast<ColumnCommand*>(filterSteps[0].get()); ColumnCommand* col = dynamic_cast<ColumnCommand*>(filterSteps[0].get());
if ((col != NULL) && (col->getFilterCount() == 0) && (col->getLBID() != 0)) if ((col != nullptr) && (col->getFilterCount() == 0) && (col->getLBID() != 0))
{ {
// stored in last pos in relLBID[] and asyncLoaded[] // stored in last pos in relLBID[] and asyncLoaded[]
uint64_t p = projectCount; uint64_t p = projectCount;
@ -2452,7 +2449,7 @@ SBPP BatchPrimitiveProcessor::duplicate()
for (i = 0; i < projectCount; ++i) for (i = 0; i < projectCount; ++i)
bpp->projectSteps[i] = projectSteps[i]->duplicate(); bpp->projectSteps[i] = projectSteps[i]->duplicate();
if (fAggregator.get() != NULL) if (fAggregator.get() != nullptr)
{ {
bpp->fAggregateRG = fAggregateRG; bpp->fAggregateRG = fAggregateRG;
bpp->fAggregator.reset(new RowAggregation(fAggregator->getGroupByCols(), fAggregator->getAggFunctions())); bpp->fAggregator.reset(new RowAggregation(fAggregator->getGroupByCols(), fAggregator->getAggFunctions()));
@ -2551,7 +2548,7 @@ void BatchPrimitiveProcessor::asyncLoadProjectColumns()
// only care about column commands // only care about column commands
ColumnCommand* col = dynamic_cast<ColumnCommand*>(projectSteps[i].get()); ColumnCommand* col = dynamic_cast<ColumnCommand*>(projectSteps[i].get());
if (col != NULL) if (col != nullptr)
{ {
asyncLoaded[i] = asyncLoaded[i] && (relLBID[i] % blocksReadAhead != 0); asyncLoaded[i] = asyncLoaded[i] && (relLBID[i] % blocksReadAhead != 0);
relLBID[i] += col->getWidth(); relLBID[i] += col->getWidth();
@ -2706,12 +2703,14 @@ inline void BatchPrimitiveProcessor::getJoinResults(const Row& r, uint32_t jInde
{ {
bool hasNullValue = false; bool hasNullValue = false;
for (uint32_t i = 0; i < tlLargeSideKeyColumns[jIndex].size(); i++) for (unsigned int column: tlLargeSideKeyColumns[jIndex])
if (r.isNullValue(tlLargeSideKeyColumns[jIndex][i])) {
if (r.isNullValue(column))
{ {
hasNullValue = true; hasNullValue = true;
break; break;
} }
}
if (hasNullValue) if (hasNullValue)
{ {

View File

@ -599,7 +599,7 @@ void TupleJoiner::match(rowgroup::Row& largeSideRow, uint32_t largeRowIndex, uin
if (UNLIKELY(inUM() && (joinType & MATCHNULLS) && !isNull && !typelessJoin)) if (UNLIKELY(inUM() && (joinType & MATCHNULLS) && !isNull && !typelessJoin))
{ {
if (largeRG.getColType(largeKeyColumns[0]) == CalpontSystemCatalog::LONGDOUBLE) if (largeRG.getColType(largeKeyColumns[0]) == CalpontSystemCatalog::LONGDOUBLE && ld)
{ {
uint bucket = bucketPicker((char*)&(joblist::LONGDOUBLENULL), sizeof(joblist::LONGDOUBLENULL), bpSeed) & uint bucket = bucketPicker((char*)&(joblist::LONGDOUBLENULL), sizeof(joblist::LONGDOUBLENULL), bpSeed) &
bucketMask; bucketMask;
@ -608,7 +608,7 @@ void TupleJoiner::match(rowgroup::Row& largeSideRow, uint32_t largeRowIndex, uin
for (; range.first != range.second; ++range.first) for (; range.first != range.second; ++range.first)
matches->push_back(range.first->second); matches->push_back(range.first->second);
} }
else if (!largeRG.usesStringTable()) else if (!smallRG.usesStringTable())
{ {
auto nullVal = getJoinNullValue(); auto nullVal = getJoinNullValue();
uint bucket = bucketPicker((char*)&nullVal, sizeof(nullVal), bpSeed) & bucketMask; uint bucket = bucketPicker((char*)&nullVal, sizeof(nullVal), bpSeed) & bucketMask;