You've already forked mariadb-columnstore-engine
mirror of
https://github.com/mariadb-corporation/mariadb-columnstore-engine.git
synced 2025-07-02 17:22:27 +03:00
Refactor - add "all_off" instead cycle.
This commit is contained in:
committed by
Leonid Fedorov
parent
0747099456
commit
b25dce3181
@ -378,10 +378,10 @@ int BulkLoad::loadJobInfo(const string& fullName, bool bUseTempJobFile, int argc
|
|||||||
// tableAUXColOid = 0
|
// tableAUXColOid = 0
|
||||||
if (tableAUXColOid > 3000)
|
if (tableAUXColOid > 3000)
|
||||||
{
|
{
|
||||||
JobColumn curColumn("aux", tableAUXColOid, execplan::AUX_COL_DATATYPE_STRING,
|
JobColumn curColumn("aux", tableAUXColOid, execplan::AUX_COL_DATATYPE_STRING, execplan::AUX_COL_WIDTH,
|
||||||
execplan::AUX_COL_WIDTH, execplan::AUX_COL_WIDTH,
|
execplan::AUX_COL_WIDTH, execplan::AUX_COL_COMPRESSION_TYPE,
|
||||||
execplan::AUX_COL_COMPRESSION_TYPE, execplan::AUX_COL_COMPRESSION_TYPE,
|
execplan::AUX_COL_COMPRESSION_TYPE, execplan::AUX_COL_MINVALUE,
|
||||||
execplan::AUX_COL_MINVALUE, execplan::AUX_COL_MAXVALUE, true, 1);
|
execplan::AUX_COL_MAXVALUE, true, 1);
|
||||||
curColumn.fFldColRelation = BULK_FLDCOL_COLUMN_DEFAULT;
|
curColumn.fFldColRelation = BULK_FLDCOL_COLUMN_DEFAULT;
|
||||||
curJob.jobTableList[i].colList.push_back(curColumn);
|
curJob.jobTableList[i].colList.push_back(curColumn);
|
||||||
JobFieldRef fieldRef(BULK_FLDCOL_COLUMN_DEFAULT, curJob.jobTableList[i].colList.size() - 1);
|
JobFieldRef fieldRef(BULK_FLDCOL_COLUMN_DEFAULT, curJob.jobTableList[i].colList.size() - 1);
|
||||||
@ -1210,26 +1210,21 @@ int BulkLoad::manageImportDataFileList(Job& job, int tableNo, TableInfo* tableIn
|
|||||||
std::vector<std::string> loadFilesList;
|
std::vector<std::string> loadFilesList;
|
||||||
bool bUseStdin = false;
|
bool bUseStdin = false;
|
||||||
|
|
||||||
// Check if all the import files are parquet file
|
// Check if all the import files are the same type.
|
||||||
bool isParquet = false;
|
const auto& fileNameA = (fCmdLineImportFiles.empty()) ? "" : fCmdLineImportFiles.front();
|
||||||
for (unsigned int i = 0; i < fCmdLineImportFiles.size(); i++)
|
bool allFilesHaveSameType =
|
||||||
{
|
!fCmdLineImportFiles.empty() &&
|
||||||
if (fCmdLineImportFiles[i].rfind(".parquet") != std::string::npos)
|
std::all_of(std::next(fCmdLineImportFiles.begin()), fCmdLineImportFiles.end(),
|
||||||
{
|
[&fileNameA](auto& fileName) { return fileName.rfind(fileNameA) != std::string::npos; });
|
||||||
if (!isParquet)
|
|
||||||
isParquet = true;
|
if (!fCmdLineImportFiles.empty() && !allFilesHaveSameType)
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if (isParquet)
|
|
||||||
{
|
{
|
||||||
ostringstream oss;
|
ostringstream oss;
|
||||||
oss << "Import files exist parquet file while not all of them are parquet files.";
|
oss << "Input files have different types.";
|
||||||
fLog.logMsg(oss.str(), ERR_FILE_TYPE_DIFF, MSGLVL_ERROR);
|
fLog.logMsg(oss.str(), ERR_FILE_TYPE_DIFF, MSGLVL_ERROR);
|
||||||
return ERR_FILE_TYPE_DIFF;
|
return ERR_FILE_TYPE_DIFF;
|
||||||
}
|
}
|
||||||
}
|
const bool isParquet = allFilesHaveSameType && fileNameA.rfind(".parquet") != std::string::npos;
|
||||||
}
|
|
||||||
|
|
||||||
if (isParquet)
|
if (isParquet)
|
||||||
{
|
{
|
||||||
@ -1411,7 +1406,6 @@ int BulkLoad::buildImportDataFileList(const std::string& location, const std::st
|
|||||||
fullPath += token;
|
fullPath += token;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// If running mode2, then support a filename with wildcards
|
// If running mode2, then support a filename with wildcards
|
||||||
if (fBulkMode == BULK_MODE_REMOTE_MULTIPLE_SRC)
|
if (fBulkMode == BULK_MODE_REMOTE_MULTIPLE_SRC)
|
||||||
{
|
{
|
||||||
|
Reference in New Issue
Block a user