You've already forked mariadb-columnstore-engine
mirror of
https://github.com/mariadb-corporation/mariadb-columnstore-engine.git
synced 2025-07-29 08:21:15 +03:00
MCOL-4576: remove S3 options from cpimport. (#2307)
This commit is contained in:
@ -561,11 +561,6 @@ void WECmdArgs::usage()
|
||||
<< "\t-T\tTimezone used for TIMESTAMP datatype.\n"
|
||||
<< "\t\tPossible values: \"SYSTEM\" (default)\n"
|
||||
<< "\t\t : Offset in the form +/-HH:MM\n"
|
||||
<< "\t-y\tS3 Authentication Key (for S3 imports)\n"
|
||||
<< "\t-K\tS3 Authentication Secret (for S3 imports)\n"
|
||||
<< "\t-t\tS3 Bucket (for S3 imports)\n"
|
||||
<< "\t-H\tS3 Hostname (for S3 imports, Amazon's S3 default)\n"
|
||||
<< "\t-g\tS3 Region (for S3 imports)\n"
|
||||
<< "\t-L\tDirectory for the output .err and .bad files.\n"
|
||||
<< "\t\tDefault is " << string(MCSLOGDIR);
|
||||
|
||||
@ -598,7 +593,7 @@ void WECmdArgs::parseCmdLineArgs(int argc, char** argv)
|
||||
if (argc > 0)
|
||||
fPrgmName = string(MCSBINDIR) + "/" + "cpimport.bin"; // argv[0] is splitter but we need cpimport
|
||||
|
||||
while ((aCh = getopt(argc, argv, "d:j:w:s:v:l:r:b:e:B:f:q:ihm:E:C:P:I:n:p:c:ST:Ny:K:t:H:g:U:L:")) != EOF)
|
||||
while ((aCh = getopt(argc, argv, "d:j:w:s:v:l:r:b:e:B:f:q:ihm:E:C:P:I:n:p:c:ST:N:U:L:")) != EOF)
|
||||
{
|
||||
switch (aCh)
|
||||
{
|
||||
@ -906,7 +901,7 @@ void WECmdArgs::parseCmdLineArgs(int argc, char** argv)
|
||||
fConsoleOutput = false;
|
||||
break;
|
||||
}
|
||||
|
||||
/*
|
||||
case 'y': //-y S3 Key
|
||||
{
|
||||
fS3Key = optarg;
|
||||
@ -936,7 +931,7 @@ void WECmdArgs::parseCmdLineArgs(int argc, char** argv)
|
||||
fS3Region = optarg;
|
||||
break;
|
||||
}
|
||||
|
||||
*/
|
||||
case 'U': //-U username of the files owner
|
||||
{
|
||||
fUsername = optarg;
|
||||
|
@ -481,6 +481,15 @@ void WEFileReadThread::openInFile()
|
||||
use ms3 lib to d/l data into mem
|
||||
use boost::iostreams to wrap the mem in a stream interface
|
||||
point infile's stream buffer to it.
|
||||
MCOL-4576: The options to setup S3 with cpimport have been removed and this
|
||||
code is unreachable. However we may need to resurrect it at some point in some form.
|
||||
Performance issues with extremely large data files as well as the fact files larger
|
||||
than system memory will cause an OOM error. Multipart downloads/uploads need to be
|
||||
implemented or more likely a different streaming solution developed with external API tools
|
||||
|
||||
MCOL-4576 work around is to use 3rd party CLI tools and pipe data file from S3 bucket
|
||||
into cpimport stdin. 3rd party tooling for large object downloads will be more efficient.
|
||||
|
||||
*/
|
||||
|
||||
if (fSdh.getDebugLvl())
|
||||
|
Reference in New Issue
Block a user