1
0
mirror of https://github.com/mariadb-corporation/mariadb-columnstore-engine.git synced 2025-08-01 06:46:55 +03:00

MCOL-271 empty strings should not be NULLs (#2794)

This patch improves handling of NULLs in textual fields in ColumnStore.
Previously empty strings were considered NULLs and it could be a problem
if data scheme allows for empty strings. It was also one of major
reasons of behavior difference between ColumnStore and other engines in
MariaDB family.

Also, this patch fixes some other bugs and incorrect behavior, for
example, incorrect comparison for "column <= ''" which evaluates to
constant True for all purposes before this patch.
This commit is contained in:
Sergey Zefirov
2023-03-30 17:26:45 +01:00
committed by Roman Nozdrin
parent 0ea592da80
commit b53c231ca6
417 changed files with 12459 additions and 3520 deletions

View File

@ -36,9 +36,6 @@ using namespace std;
using namespace logging;
const char* nullString = " "; // this is not NULL to preempt segfaults.
const int nullStringLen = 0;
namespace
{
const char* signatureNotFound = joblist::CPSTRNOTFOUND.c_str();
@ -50,6 +47,7 @@ inline bool PrimitiveProcessor::compare(const datatypes::Charset& cs, uint8_t CO
size_t length1, const char* str2, size_t length2) throw()
{
int error = 0;
utils::NullString s1 (str1, length1), s2 (str2, length2);
bool rc = primitives::StringComparator(cs).op(&error, COP, ConstString(str1, length1),
ConstString(str2, length2));
if (error)
@ -75,7 +73,7 @@ Notes:
void PrimitiveProcessor::p_TokenByScan(const TokenByScanRequestHeader* h, TokenByScanResultHeader* ret,
unsigned outSize, boost::shared_ptr<DictEqualityFilter> eqFilter)
{
const DataValue* args;
const NonNullDataValue* args;
const uint8_t* niceBlock; // block cast to a byte-indexed type
const uint8_t* niceInput; // h cast to a byte-indexed type
const uint16_t* offsets;
@ -122,7 +120,7 @@ void PrimitiveProcessor::p_TokenByScan(const TokenByScanRequestHeader* h, TokenB
siglen = offsets[offsetIndex - 1] - offsets[offsetIndex];
sig = reinterpret_cast<const char*>(&niceBlock[offsets[offsetIndex]]);
argsOffset = sizeof(TokenByScanRequestHeader);
args = reinterpret_cast<const DataValue*>(&niceInput[argsOffset]);
args = reinterpret_cast<const NonNullDataValue*>(&niceInput[argsOffset]);
if (eqFilter)
{
@ -138,7 +136,6 @@ void PrimitiveProcessor::p_TokenByScan(const TokenByScanRequestHeader* h, TokenB
goto no_store;
}
cmpResult = compare(cs, h->COP1, sig, siglen, args->data, args->len);
switch (h->NVALS)
@ -159,8 +156,8 @@ void PrimitiveProcessor::p_TokenByScan(const TokenByScanRequestHeader* h, TokenB
if (cmpResult && h->BOP == BOP_OR)
goto store;
argsOffset += sizeof(uint16_t) + args->len;
args = (DataValue*)&niceInput[argsOffset];
argsOffset += sizeof(*args) + args->len;
args = (NonNullDataValue*)&niceInput[argsOffset];
cmpResult = compare(cs, h->COP2, sig, siglen, args->data, args->len);
@ -183,8 +180,8 @@ void PrimitiveProcessor::p_TokenByScan(const TokenByScanRequestHeader* h, TokenB
if (cmpResult && h->BOP == BOP_OR)
goto store;
argsOffset += sizeof(uint16_t) + args->len;
args = (DataValue*)&niceInput[argsOffset];
argsOffset += sizeof(*args) + args->len;
args = (NonNullDataValue*)&niceInput[argsOffset];
}
if (i == h->NVALS && cmpResult)
@ -195,7 +192,6 @@ void PrimitiveProcessor::p_TokenByScan(const TokenByScanRequestHeader* h, TokenB
}
store:
if (h->OutputType == OT_DATAVALUE)
{
if ((ret->NBYTES + sizeof(DataValue) + siglen) > outSize)
@ -211,6 +207,7 @@ void PrimitiveProcessor::p_TokenByScan(const TokenByScanRequestHeader* h, TokenB
throw logging::DictionaryBufferOverflow();
}
retDataValues->isnull = false; //retDataValues->data == nullptr; XXX: SZ: verify.
retDataValues->len = siglen;
memcpy(retDataValues->data, sig, siglen);
rdvOffset += sizeof(DataValue) + siglen;
@ -260,6 +257,7 @@ void PrimitiveProcessor::p_TokenByScan(const TokenByScanRequestHeader* h, TokenB
throw logging::DictionaryBufferOverflow();
}
retDataValues->isnull = false;
retDataValues->len = args->len;
memcpy(retDataValues->data, args->data, args->len);
rdvOffset += sizeof(DataValue) + args->len;
@ -326,8 +324,8 @@ void PrimitiveProcessor::nextSig(int NVALS, const PrimToken* tokens, p_DataValue
goto again;
}
ret->len = nullStringLen;
ret->data = (const uint8_t*)nullString;
ret->len = 0;
ret->data = (const uint8_t*)nullptr;
}
else
{
@ -358,6 +356,7 @@ void PrimitiveProcessor::nextSig(int NVALS, const PrimToken* tokens, p_DataValue
}
/* XXXPAT: Need to check for the NULL token here */
ret->len = tokens[dict_OffsetIndex].len;
ret->data = &niceBlock[tokens[dict_OffsetIndex].offset];
@ -562,6 +561,7 @@ void PrimitiveProcessor::p_Dictionary(const DictInput* in, vector<uint8_t>* out,
}
outValue = reinterpret_cast<DataValue*>(&(*out)[header.NBYTES]);
outValue->isnull = false;
outValue->len = filter->len;
memcpy(outValue->data, filter->data, filter->len);
header.NBYTES += sizeof(DataValue) + filter->len;
@ -592,7 +592,10 @@ void PrimitiveProcessor::p_Dictionary(const DictInput* in, vector<uint8_t>* out,
out->resize(out->size() * SCALE_FACTOR);
}
idbassert(sigptr.data != nullptr || !sigptr.len);
outValue = reinterpret_cast<DataValue*>(&(*out)[header.NBYTES]);
outValue->isnull = sigptr.data == nullptr;
outValue->len = sigptr.len;
memcpy(outValue->data, sigptr.data, sigptr.len);
header.NBYTES += sizeof(DataValue) + sigptr.len;
@ -615,8 +618,10 @@ void PrimitiveProcessor::p_Dictionary(const DictInput* in, vector<uint8_t>* out,
DataValue* tmpDV = reinterpret_cast<DataValue*>(&(*out)[header.NBYTES + sizeof(uint16_t)]);
*tmp16 = aggCount;
tmpDV->isnull = 0;
tmpDV->len = min.len;
memcpy(tmpDV->data, min.data, min.len);
idbassert(0); /// this is just plain wrong.
header.NBYTES += 2 * sizeof(uint16_t) + min.len;
tmpDV = reinterpret_cast<DataValue*>(&(*out)[header.NBYTES]);