diff --git a/CMakeLists.txt b/CMakeLists.txt index a05c92d5b..ee1b17d45 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -174,8 +174,8 @@ endif() INCLUDE(check_compiler_flag) -MY_CHECK_AND_SET_COMPILER_FLAG("-g -O3 -fno-omit-frame-pointer -fno-strict-aliasing -Wall -fno-tree-vectorize -D_GLIBCXX_ASSERTIONS -DDBUG_OFF -DHAVE_CONFIG_H" RELEASE RELWITHDEBINFO MINSIZEREL) -MY_CHECK_AND_SET_COMPILER_FLAG("-ggdb3 -fno-omit-frame-pointer -fno-tree-vectorize -D_GLIBCXX_ASSERTIONS -DSAFE_MUTEX -DSAFEMALLOC -DENABLED_DEBUG_SYNC -O0 -Wall -D_DEBUG -DHAVE_CONFIG_H" DEBUG) +MY_CHECK_AND_SET_COMPILER_FLAG("-g -O3 -std=c++11 -fno-omit-frame-pointer -fno-strict-aliasing -Wall -fno-tree-vectorize -D_GLIBCXX_ASSERTIONS -DDBUG_OFF -DHAVE_CONFIG_H" RELEASE RELWITHDEBINFO MINSIZEREL) +MY_CHECK_AND_SET_COMPILER_FLAG("-ggdb3 -std=c++11 -fno-omit-frame-pointer -fno-tree-vectorize -D_GLIBCXX_ASSERTIONS -DSAFE_MUTEX -DSAFEMALLOC -DENABLED_DEBUG_SYNC -O0 -Wall -D_DEBUG -DHAVE_CONFIG_H" DEBUG) # enable security hardening features, like most distributions do # in our benchmarks that costs about ~1% of performance, depending on the load diff --git a/build/bootstrap.4.5 b/build/bootstrap.4.5 deleted file mode 100755 index ac400b225..000000000 --- a/build/bootstrap.4.5 +++ /dev/null @@ -1,63 +0,0 @@ -#!/bin/bash -# -# $Id: bootstrap 1333 2011-01-18 17:25:26Z rdempsey $ -# -# This script is responsible for setting up a fresh development tree -# following a git checkout. It copies static files and shared include -# files to the export tree prior to building the software. - -mkdir -p export/{include,lib,etc,share,bin,sbin,post} -if [ ! $$ ]; then - exit 1 -fi - -cp build/gdb export/bin -cp build/libgcc_s.so.1 export/lib -cp build/libstdc++.so.6.0.14 export/lib - -cp build/files.lst $HOME/InfiniDB-MySQL/export/. -cp dbcon/mysql/install_calpont_mysql.sh $HOME/InfiniDB-MySQL/export/. -cp dbcon/mysql/my.cnf $HOME/InfiniDB-MySQL/export/. -cp dbcon/mysql/mysql-Calpont $HOME/InfiniDB-MySQL/export/. - -cp build/Doxyfile export/etc -if [ ! $$ ]; then - exit 1 -fi - -cp -p -r utils/net-snmp export/include - -if [ `uname -m` == "x86_64" ]; then - cp -p utils/net-snmp-built-libs/lib64/* export/lib - cp -p utils/net-snmp-built-libs/bin64/* export/sbin -else - egrep -qs Constantine /etc/redhat-release - if [ $? -eq 0 ]; then - cp -p utils/net-snmp-built-libs/fc12lib/libnetsnmp*.so.* export/lib - cp -p utils/net-snmp-built-libs/fc12bin/* export/sbin - else - cp -p utils/net-snmp-built-libs/fc6lib/libnetsnmp*.so.* export/lib - cp -p utils/net-snmp-built-libs/fc6bin/* export/sbin - fi -fi - -pushd export/lib >/dev/null -for lib in libnetsnmpagent.so libnetsnmphelpers.so libnetsnmpmibs.so libnetsnmp.so libnetsnmptrapd.so; do - rm -f ${lib} ${lib}.5 - ln -s ${lib}.5.2.1 ${lib}.5 - ln -s ${lib}.5.2.1 ${lib} -done -popd >/dev/null - -#./build/genVersion.sh - -for x in \ -$(find . -name '[mM]akefile' \ - | xargs grep -l ^bootstrap: \ - | cut -f 2 -d / \ - | sort -u) -do - echo bootstrapping $x - make BOOTSTRAP=1 -C $x bootstrap -done - diff --git a/build/build-branch.mysql b/build/build-branch.mysql deleted file mode 100755 index 165f74397..000000000 --- a/build/build-branch.mysql +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash - -branch=$1 - -prefix=/usr/local/Calpont -extra_ld_flags="-Wl,-rpath -Wl,$prefix/mysql/lib/mysql -Wl,-rpath -Wl,$prefix/lib" - -export LD_LIBRARY_PATH=/usr/local/gcc45/lib64:/usr/local/gmp43/lib:/usr/local/mpfr24/lib:/usr/local/mpc08/lib -export PATH=/usr/local/gcc45/bin:/usr/local/bin:/bin:/usr/bin -export CC=/usr/local/gcc45/bin/gcc -export CXX=/usr/local/gcc45/bin/g++ - -cd $HOME -updateGenii.pl -b $branch > updateBranch-$branch.log 2>&1 -rm -rf mysql-5.1.39 -tar -zxf mysql-5.1.39.tar.gz -cd mysql-5.1.39 -cp $HOME/$branch/dbcon/mysql/configure.in . -cd sql -cp $HOME/$branch/dbcon/mysql/*.yy . -cp $HOME/$branch/dbcon/mysql/*.h . -cp $HOME/$branch/dbcon/mysql/*.cc . -if [ -f $HOME/$branch/dbcon/mysql/Makefile.am.mysql ]; then - cp $HOME/$branch/dbcon/mysql/Makefile.am.mysql Makefile.am -fi -if [ -f $HOME/$branch/utils/loggingcpp/errorids.h ]; then - cp $HOME/$branch/utils/loggingcpp/errorids.h . -fi -cd .. - -for extras in include mysys; do - if [ -d $HOME/$branch/dbcon/mysql/$extras ]; then - cp $HOME/$branch/dbcon/mysql/$extras/*.h $extras - fi -done - -autoreconf -./configure --prefix=$prefix/mysql --without-libedit --with-readline \ - --with-plugins=csv,heap,myisam,myisammrg,partition --with-mysqld-ldflags="$extra_ld_flags" \ - --with-client-ldflags="$extra_ld_flags" --with-extra-charsets=all --with-ssl -(make && make install) > $HOME/make-branch-$branch.log 2>&1 -cd $HOME/$branch/dbcon/mysql -cp files.lst $prefix/mysql -cp install_calpont_mysql.sh $prefix/mysql -cp my.cnf $prefix/mysql -cp mysql-Calpont $prefix/mysql - diff --git a/build/build-genii.mysql b/build/build-genii.mysql deleted file mode 100755 index 2ac0b091e..000000000 --- a/build/build-genii.mysql +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash - -branch=genii - -prefix=/usr/local/Calpont -extra_ld_flags="-Wl,-rpath -Wl,$prefix/mysql/lib/mysql -Wl,-rpath -Wl,$prefix/lib" - -export LD_LIBRARY_PATH=/usr/local/gcc45/lib64:/usr/local/gmp43/lib:/usr/local/mpfr24/lib:/usr/local/mpc08/lib -export PATH=/usr/local/gcc45/bin:/usr/local/bin:/bin:/usr/bin -export CC=/usr/local/gcc45/bin/gcc -export CXX=/usr/local/gcc45/bin/g++ - -cd $HOME -updateGenii.pl > updateGenii.log 2>&1 -rm -rf mysql-5.1.39 -tar -zxf mysql-5.1.39.tar.gz -cd mysql-5.1.39 -cp $HOME/$branch/dbcon/mysql/configure.in . -cd sql -cp $HOME/$branch/dbcon/mysql/*.yy . -cp $HOME/$branch/dbcon/mysql/*.h . -cp $HOME/$branch/dbcon/mysql/*.cc . -if [ -f $HOME/$branch/dbcon/mysql/Makefile.am.mysql ]; then - cp $HOME/$branch/dbcon/mysql/Makefile.am.mysql Makefile.am -fi -if [ -f $HOME/$branch/utils/loggingcpp/errorids.h ]; then - cp $HOME/$branch/utils/loggingcpp/errorids.h . -fi -cd .. - -for extras in include mysys; do - if [ -d $HOME/$branch/dbcon/mysql/$extras ]; then - cp $HOME/$branch/dbcon/mysql/$extras/*.h $extras - fi -done - -autoreconf -./configure --prefix=$prefix/mysql --without-libedit --with-readline \ - --with-plugins=csv,heap,myisam,myisammrg,partition --with-mysqld-ldflags="$extra_ld_flags" \ - --with-client-ldflags="$extra_ld_flags" --with-extra-charsets=all --with-ssl -(make && make install) > $HOME/make-$branch.log 2>&1 -cd $HOME/$branch/dbcon/mysql -cp files.lst $prefix/mysql -cp install_calpont_mysql.sh $prefix/mysql -cp my.cnf $prefix/mysql -cp mysql-Calpont $prefix/mysql - diff --git a/build/mini-tests.sh b/build/mini-tests.sh deleted file mode 100755 index af4f02afd..000000000 --- a/build/mini-tests.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/bash - - -prefix=/usr/local -for arg in "$@"; do - if [ `expr -- "$arg" : '--prefix='` -eq 9 ]; then - prefix="`echo $arg | awk -F= '{print $2}'`" - else - echo "ignoring unknown argument: $arg" 1>&2 - fi -done - -client="${prefix}/Calpont/mysql/bin/mysql --defaults-file=${prefix}/Calpont/mysql/my.cnf --user=root" - -echo " - create database calpont; - use calpont; - create table caltest (col1 int, col2 int) engine=infinidb; - show create table caltest; -" > /tmp/minitest.$$ -${client} < /tmp/minitest.$$ > /tmp/minitest.out.$$ 2>&1 -if [ $? -ne 0 ]; then - echo "test failed!" 1>&2 - exit 1 -fi - -egrep -qsi 'engine=infinidb' /tmp/minitest.out.$$ -if [ $? -ne 0 ]; then - cat /tmp/minitest.out.$$ - echo "test failed!" 1>&2 - exit 1 -fi - -echo " - use calpont; - set autocommit=0; - insert into caltest values (1, 2); - insert into caltest values (3, 4); - commit; - select * from caltest; -" > /tmp/minitest.$$ - -${client} < /tmp/minitest.$$ -rc=$? - -rm -f /tmp/*.$$ - -exit $rc - diff --git a/build/nightly-mini-tester.sh b/build/nightly-mini-tester.sh deleted file mode 100755 index 3eae93c6f..000000000 --- a/build/nightly-mini-tester.sh +++ /dev/null @@ -1,103 +0,0 @@ -#!/bin/bash - -prefix=/usr/local -builddir= -for arg in "$@"; do - if [ `expr -- "$arg" : '--prefix='` -eq 9 ]; then - prefix="`echo $arg | awk -F= '{print $2}'`" - elif [ `expr -- "$arg" : '--builddir='` -eq 11 ]; then - builddir="`echo $arg | awk -F= '{print $2}'`" - else - echo "ignoring unknown argument: $arg" 1>&2 - fi -done - -if [ -z "$builddir" ]; then - echo "I really need a builddir to continue!" 1>&2 - exit 1 -fi - -if [ ! -d ${builddir}/export/Calpont ]; then - echo "I did't find a Calpont dir in ${builddir}/export!" 1>&2 - exit 1 -fi - -# stop any current procs -${prefix}/Calpont/bin/mcsadmin shutdownsystem y -if [ -x ${prefix}/Calpont/bin/infinidb ]; then - ${prefix}/Calpont/bin/infinidb stop -fi - -# really stop current procs -sleep 5 -for proc in DMLProc DDLProc ExeMgr PrimProc controllernode workernode; do - pkill -9 $proc - sleep 1 -done -if [ -x ${prefix}/Calpont/mysql/mysql-Calpont ]; then - ${prefix}/Calpont/mysql/mysql-Calpont stop -fi - -# cleanup - -# remove shm segs -if [ -x ${prefix}/Calpont/bin/clearShm ]; then - ${prefix}/Calpont/bin/clearShm stop -fi -# remove Calpont dir -/usr/local/bin/rootStuff -rm -rf ${prefix}/Calpont 2>/dev/null -/usr/local/bin/rootStuff - -# (we'll leave the logging stuff in place for now) - -# install the binaries -tar -C ${builddir}/export -cf - Calpont | tar -C ${prefix} -xf - -if [ $? -ne 0 ]; then - echo "There was a problem installing the binaries!" 1>&2 - exit 1 -fi -#chown -R root.root ${prefix}/Calpont -find ${prefix}/Calpont -type d | xargs chmod +rx -find ${prefix}/Calpont -type f | xargs chmod +r - -mkdir -p ${prefix}/Calpont/data1/systemFiles/dbrm - -if [ ! -f ${prefix}/Calpont/etc/Columnstore.xml.rpmsave ]; then - cp ${prefix}/Calpont/etc/Columnstore.xml.singleserver ${prefix}/Calpont/etc/Columnstore.xml.rpmsave -fi - -if [ ! -f ${prefix}/Calpont/mysql/my.cnf ]; then - cp ${builddir}/dbcon/mysql/my.cnf ${prefix}/Calpont/mysql -fi - -#fix the port numbers -sed -i -e 's/port.*=.*3306/port=14406/' ${prefix}/Calpont/mysql/my.cnf - -# configure the s/w -${prefix}/Calpont/bin/postConfigure -n - -# restart (argh) -#${prefix}/Calpont/bin/mcsadmin RestartSystem y - -sleep 30 -pkill DMLProc -sleep 30 - -# perform the tests -if [ ! -x ${builddir}/build/mini-tests.sh ]; then - echo "There was a problem trying to start testing the s/w!" 1>&2 - exit 1 -fi -${builddir}/build/mini-tests.sh --prefix=${prefix} -if [ $? -ne 0 ]; then - echo "There were problems running the tests!" 1>&2 - exit 1 -fi - -# stop the system -${prefix}/Calpont/bin/infinidb stop -${prefix}/Calpont/mysql/mysql-Calpont stop - -exit 0 - diff --git a/build/src-build b/build/src-build deleted file mode 100755 index eea71f8cc..000000000 --- a/build/src-build +++ /dev/null @@ -1,137 +0,0 @@ -#!/bin/bash - -# Copyright (C) 2014 InfiniDB, Inc. -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; version 2 of -# the License. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, -# MA 02110-1301, USA. - -prefix=/usr/local -for arg in "$@"; do - if [ `expr -- "$arg" : '--prefix='` -eq 9 ]; then - prefix="`echo $arg | awk -F= '{print $2}'`" - else - echo "ignoring unknown argument: $arg" 1>&2 - fi -done - -if [ ! -d build ]; then - echo "No build directory found, giving up. (maybe you're not in the right directory?)" 1>&2 - exit 1 -fi - -bison --version >/dev/null 2>&1 -if [ $? -ne 0 ]; then - echo "No 'bison' program installed, giving up." 1>&2 - exit 1 -fi -flex --version >/dev/null 2>&1 -if [ $? -ne 0 ]; then - echo "No 'flex' program installed, giving up." 1>&2 - exit 1 -fi -autoreconf --version >/dev/null 2>&1 -if [ $? -ne 0 ]; then - echo "No 'autoreconf' program installed, giving up." 1>&2 - exit 1 -fi -expect -v >/dev/null 2>&1 -if [ $? -ne 0 ]; then - echo "No 'expect' program installed, giving up." 1>&2 - exit 1 -fi - -mysqldir= -for testdir in ./mysql ./mysql-master ../mysql ../mysql-master; do - if [ -d $testdir ]; then - mysqldir=$testdir - break - fi -done - -if [ -z "$mysqldir" ]; then - echo "Didn't find a MySQL tree, looking for a ZIP file..." - for testfile in ./mysql-master.zip ../mysql-master.zip; do - if [ -f $testfile ]; then - unzip -qq $testfile - break - fi - done - for testdir in ./mysql ./mysql-master; do - if [ -d $testdir ]; then - mysqldir=$testdir - break - fi - done -fi - -if [ -z "$mysqldir" ]; then - echo "Didn't find a MySQL tree and didn't find a ZIP file. Going to github..." - git --version >/dev/null 2>&1 - if [ $? -ne 0 ]; then - echo "No 'git' program installed, giving up (maybe you could download infinidb-mysql?)." 1>&2 - exit 1 - fi - git clone https://github.com/infinidb/mysql --branch=master --quiet - if [ $? -ne 0 ]; then - echo "Cloning from github didn't work, giving up." 1>&2 - exit 1 - fi - if [ ! -x ./mysql/idbbuild.sh ]; then - echo "Didn't find mysql build script, giving up." 1>&2 - exit 1 - fi - mysqldir=./mysql -fi - -echo "Using MySQL tree in $mysqldir" - -echo "Building MySQL..." - -( cd $mysqldir; - ./idbbuild.sh --prefix=$prefix/Calpont && make install ) >./mysql_build.log 2>&1 - -if [ $? -ne 0 -o ! -x $prefix/Calpont/mysql/libexec/mysqld ]; then - echo "Something went wrong building MySQL, giving up. (check ./mysql_build.log)" 1>&2 - exit 1 -fi - -if [ $mysqldir = "./mysql" -o $mysqldir = "./mysql-master" ]; then - ( bn=$(basename $(pwd)); cd ..; ln -s $bn/$mysqldir mysql ) >/dev/null 2>&1 -fi - -if [ ! -f ../mysql/include/mysql_version.h ]; then - echo "Couldn't find mysql_version.h in ../mysql/include, giving up." 1>&2 - exit 1 -fi - -echo "Building InfiniDB..." - -( cp -r utils/autoconf/* .; - autoreconf; - libtoolize --force --install; - mv -f $mysqldir/Makefile $mysqldir/Makefile.bak; - ./configure --prefix=$prefix && make && make install ) >./infinidb_build.log 2>&1 - -if [ $? -ne 0 -o ! -x $prefix/Calpont/bin/PrimProc ]; then - echo "Something went wrong building InfiniDB, giving up. (check ./infinidb_build.log)" 1>&2 - exit 1 -fi - -cp dbcon/mysql/my.cnf $prefix/Calpont/mysql - -echo "InfiniDB binaries and libs are in $prefix/Calpont" - -exit 0 - diff --git a/dbcon/joblist/fdtests.pl b/dbcon/joblist/fdtests.pl deleted file mode 100755 index 382d4caa7..000000000 --- a/dbcon/joblist/fdtests.pl +++ /dev/null @@ -1,199 +0,0 @@ -#!/usr/bin/perl -w -# $Id: fdtests.pl 5967 2009-10-21 14:56:59Z rdempsey $ -# - -use DBI; -use DBD::mysql; - -$databasename='calpont'; -$cnffile='/usr/local/mariadb/columnstore/mysql/my.cnf'; - -$data_source = 'DBI:mysql:database=' . $databasename . ':mysql_read_default_file=' . $cnffile . ''; -$username = 'root'; -$auth = ''; -%attr = (); - -$dbh = DBI->connect($data_source, $username, $auth, \%attr); - -sub create_table -{ - my ($dbh, $tn) = @_; - my $stmt = "create table " . $tn . " (col1 float, col2 double) engine=infinidb;"; - $dbh->do($stmt); -} - -sub drop_table -{ - my ($dbh, $tn) = @_; - my $stmt = "drop table " . $tn . ";"; - $dbh->do($stmt); -} - -sub insert_rows -{ - my ($dbh, $tn) = @_; - my $stmt; - $dbh->do("set autocommit=off;"); - - $stmt = "insert into " . $tn . " values (4.04e20, 4.04e250);"; $dbh->do($stmt); - $stmt = "insert into " . $tn . " values (3.03, 3.03);"; $dbh->do($stmt); - $stmt = "insert into " . $tn . " values (2.02, 2.02);"; $dbh->do($stmt); - $stmt = "insert into " . $tn . " values (1.01, 1.01);"; $dbh->do($stmt); - $stmt = "insert into " . $tn . " values (0, 0);"; $dbh->do($stmt); - $stmt = "insert into " . $tn . " values (-1.01, -1.01);"; $dbh->do($stmt); - $stmt = "insert into " . $tn . " values (-2.02, -2.02);"; $dbh->do($stmt); - $stmt = "insert into " . $tn . " values (-3.03, -3.03);"; $dbh->do($stmt); - $stmt = "insert into " . $tn . " values (-4.04e20, -4.04e250);"; $dbh->do($stmt); - $stmt = "insert into " . $tn . " values (null, null);"; $dbh->do($stmt); - $stmt = "insert into " . $tn . " values (3.14159265358979323846, 3.14159265358979323846);"; $dbh->do($stmt); - - $dbh->do("commit;"); - $dbh->do("set autocommit=on;"); -} - -sub run_tests -{ - my ($dbh, $tn) = @_; - my $stmt; - my @row_ary; - my $cnt; - - $stmt = "select count(*) from " . $tn . ";"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 11); - - $stmt = "select count(*) from " . $tn . " where col1 < 4.05e20;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 10); - - $stmt = "select count(*) from " . $tn . " where col1 < 0.99;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 5); - - $stmt = "select count(*) from " . $tn . " where col1 < -2.03;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 2); - - $stmt = "select count(*) from " . $tn . " where col1 < -4.05e20;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 0); - - $stmt = "select count(*) from " . $tn . " where col1 between 0 - 0.0005 and 0 + 0.0005;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select count(*) from " . $tn . " where col2 < 4.05e250;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 10); - - $stmt = "select count(*) from " . $tn . " where col2 < 0.99;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 5); - - $stmt = "select count(*) from " . $tn . " where col2 < -2.03;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 2); - - $stmt = "select count(*) from " . $tn . " where col2 < -4.05e250;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 0); - - $stmt = "select count(*) from " . $tn . " where col2 between 0 - 0.0005 and 0 + 0.0005;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select count(*) from " . $tn . " where col1 between 3.14159 - 0.00001 and 3.14159 + 0.00001 ;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select count(*) from " . $tn . " where col2 between " . - "3.14159265358979 - 0.00000000000001 and 3.14159265358979 + 0.00000000000001;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select sum(col1) from " . $tn . ";"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt < 3.14159 - 0.00001 || $cnt > 3.14159 + 0.00001); - - $stmt = "select min(col1) from " . $tn . ";"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt < -4.04e20 - 0.01e20 || $cnt > -4.04e20 + 0.01e20); - - $stmt = "select max(col1) from " . $tn . ";"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt < 4.04e20 - 0.01e20 || $cnt > 4.04e20 + 0.01e20); - - $stmt = "select avg(col1) from " . $tn . ";"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt < 0.314159 - 0.000001 || $cnt > 0.314159 + 0.000001); - - $stmt = "select sum(col2) from " . $tn . ";"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt < 3.14159 - 0.00001 || $cnt > 3.14159 + 0.00001); - - $stmt = "select min(col2) from " . $tn . ";"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt < -4.04e250 - 0.01e250 || $cnt > -4.04e250 + 0.01e250); - - $stmt = "select max(col2) from " . $tn . ";"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt < 4.04e250 - 0.01e250 || $cnt > 4.04e250 + 0.01e250); - - $stmt = "select avg(col2) from " . $tn . ";"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt < 0.314159 - 0.000001 || $cnt > 0.314159 + 0.000001); - -} - -create_table($dbh, 'fdtest'); - -insert_rows($dbh, 'fdtest'); - -run_tests($dbh, 'fdtest'); - -drop_table($dbh, 'fdtest'); - -$dbh->disconnect; - diff --git a/dbcon/joblist/nulltests.pl b/dbcon/joblist/nulltests.pl deleted file mode 100755 index 51c8ad705..000000000 --- a/dbcon/joblist/nulltests.pl +++ /dev/null @@ -1,476 +0,0 @@ -#! /usr/bin/perl -w -# -# $Id: nulltests.pl 5967 2009-10-21 14:56:59Z rdempsey $ - -use DBI; -use DBD::mysql; - -$| = 1; - -$database='rjd'; -$cfgfile='/home/rdempsey/mysql/etc/my.cnf'; - -$data_source = 'DBI:mysql:database=' . $database . ':mysql_read_default_file=' . $cfgfile . ''; -$username = 'root'; -$auth = ''; -%attr = (); - -$dbh = DBI->connect($data_source, $username, $auth, \%attr); - -sub create_table -{ - my ($dbh, $tn, $colttype) = @_; - my $stmt = "create table " . $tn . " (col1 " . $colttype . ") engine=infinidb;"; - $dbh->do($stmt); -} - -sub drop_table -{ - my ($dbh, $tn) = @_; - my $stmt = "drop table " . $tn . ";"; - $dbh->do($stmt); -} - -sub insert_int_rows -{ - my ($dbh, $tn) = @_; - my $stmt = "insert into " . $tn . " values (null);"; - $dbh->do($stmt); - $stmt = "insert into " . $tn . " values (1);"; - $dbh->do($stmt); -} - -sub insert_char_rows -{ - my ($dbh, $tn) = @_; - my $stmt = "insert into " . $tn . " values (null);"; - $dbh->do($stmt); - $stmt = "insert into " . $tn . " values ('');"; - $dbh->do($stmt); - $stmt = "insert into " . $tn . " values ('A');"; - $dbh->do($stmt); -} - -sub insert_date_rows -{ - my ($dbh, $tn) = @_; - my $stmt = "insert into " . $tn . " values (null);"; - $dbh->do($stmt); - $stmt = "insert into " . $tn . " values ('2008-05-14');"; - $dbh->do($stmt); -} - -sub check_int_counts -{ - my ($dbh, $tn) = @_; - - my $stmt = "select count(*) from " . $tn . ";"; - my @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - my $cnt = $row_ary[0]; - die if ($cnt != 2); - - $stmt = "select count(*) from " . $tn . " where col1 is null;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select count(*) from " . $tn . " where col1 is not null;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select count(*) from " . $tn . " where col1 = 1;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select count(*) from " . $tn . " where col1 <> 1;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 0); - - $stmt = "select count(*) from " . $tn . " where col1 < 127;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select count(*) from " . $tn . " where col1 > -126;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - -} - -sub check_char_counts -{ - my ($dbh, $tn) = @_; - - my $stmt = "select count(*) from " . $tn . ";"; - my @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - my $cnt = $row_ary[0]; - die if ($cnt != 3); - - $stmt = "select count(*) from " . $tn . " where col1 is null;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 2); - - $stmt = "select count(*) from " . $tn . " where col1 is not null;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select count(*) from " . $tn . " where col1 = 'A';"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select count(*) from " . $tn . " where col1 <> 'A';"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 0); - - $stmt = "select count(*) from " . $tn . " where col1 < 'Z';"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select count(*) from " . $tn . " where col1 > '!';"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - -} - -sub check_date_counts -{ - my ($dbh, $tn) = @_; - - my $stmt = "select count(*) from " . $tn . ";"; - my @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - my $cnt = $row_ary[0]; - die if ($cnt != 2); - - $stmt = "select count(*) from " . $tn . " where col1 is null;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select count(*) from " . $tn . " where col1 is not null;"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select count(*) from " . $tn . " where col1 = '2008-05-14';"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select count(*) from " . $tn . " where col1 <> '2008-05-14';"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 0); - - $stmt = "select count(*) from " . $tn . " where col1 < '2018-05-14';"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - - $stmt = "select count(*) from " . $tn . " where col1 > '1998-05-14';"; - @row_ary = $dbh->selectrow_array($stmt); - die if ($#row_ary != 0); - $cnt = $row_ary[0]; - die if ($cnt != 1); - -} - -sub dotinyinttest -{ - my ($dbh) = @_; - my $tn = 'tinyinttest'; - print "running tinyint tests..."; - create_table($dbh, $tn, 'tinyint'); - insert_int_rows($dbh, $tn); - check_int_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dosmallinttest -{ - my ($dbh) = @_; - my $tn = 'smallinttest'; - print "running smallint tests..."; - create_table($dbh, $tn, 'smallint'); - insert_int_rows($dbh, $tn); - check_int_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dointtest -{ - my ($dbh) = @_; - my $tn = 'inttest'; - print "running int tests..."; - create_table($dbh, $tn, 'int'); - insert_int_rows($dbh, $tn); - check_int_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dobiginttest -{ - my ($dbh) = @_; - my $tn = 'biginttest'; - print "running bigint tests..."; - create_table($dbh, $tn, 'bigint'); - insert_int_rows($dbh, $tn); - check_int_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dochar1test -{ - my ($dbh) = @_; - my $tn = 'char1test'; - print "running char1 tests..."; - create_table($dbh, $tn, 'char(1)'); - insert_char_rows($dbh, $tn); - check_char_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dochar2test -{ - my ($dbh) = @_; - my $tn = 'char2test'; - print "running char2 tests..."; - create_table($dbh, $tn, 'char(2)'); - insert_char_rows($dbh, $tn); - check_char_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dochar4test -{ - my ($dbh) = @_; - my $tn = 'char4test'; - print "running char4 tests..."; - create_table($dbh, $tn, 'char(4)'); - insert_char_rows($dbh, $tn); - check_char_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dochar8test -{ - my ($dbh) = @_; - my $tn = 'char8test'; - print "running char8 tests..."; - create_table($dbh, $tn, 'char(8)'); - insert_char_rows($dbh, $tn); - check_char_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dovarchar1test -{ - my ($dbh) = @_; - my $tn = 'varchar1test'; - print "running varchar1 tests..."; - create_table($dbh, $tn, 'varchar(1)'); - insert_char_rows($dbh, $tn); - check_char_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dovarchar3test -{ - my ($dbh) = @_; - my $tn = 'varchar3test'; - print "running varchar3 tests..."; - create_table($dbh, $tn, 'varchar(3)'); - insert_char_rows($dbh, $tn); - check_char_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dovarchar7test -{ - my ($dbh) = @_; - my $tn = 'varchar7test'; - print "running varchar7 tests..."; - create_table($dbh, $tn, 'varchar(7)'); - insert_char_rows($dbh, $tn); - check_char_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dochar40test -{ - my ($dbh) = @_; - my $tn = 'char40test'; - print "running char40 tests..."; - create_table($dbh, $tn, 'char(40)'); - insert_char_rows($dbh, $tn); - check_char_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dovarchar40test -{ - my ($dbh) = @_; - my $tn = 'varchar40test'; - print "running varchar40 tests..."; - create_table($dbh, $tn, 'varchar(40)'); - insert_char_rows($dbh, $tn); - check_char_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dodec4test -{ - my ($dbh) = @_; - my $tn = 'dec4test'; - print "running decimal4 tests..."; - create_table($dbh, $tn, 'decimal(4,2)'); - insert_int_rows($dbh, $tn); - check_int_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dodec9test -{ - my ($dbh) = @_; - my $tn = 'dec9test'; - print "running decimal9 tests..."; - create_table($dbh, $tn, 'decimal(9,2)'); - insert_int_rows($dbh, $tn); - check_int_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dodec18test -{ - my ($dbh) = @_; - my $tn = 'dec18test'; - print "running decimal18 tests..."; - create_table($dbh, $tn, 'decimal(18,2)'); - insert_int_rows($dbh, $tn); - check_int_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dodatetest -{ - my ($dbh) = @_; - my $tn = 'datetest'; - print "running date tests..."; - create_table($dbh, $tn, 'date'); - insert_date_rows($dbh, $tn); - check_date_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dodatetimetest -{ - my ($dbh) = @_; - my $tn = 'datetimetest'; - print "running datetime tests..."; - create_table($dbh, $tn, 'datetime'); - insert_date_rows($dbh, $tn); - check_date_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dofloattest -{ - my ($dbh) = @_; - my $tn = 'floattest'; - print "running float tests..."; - create_table($dbh, $tn, 'float'); - insert_int_rows($dbh, $tn); - check_int_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -sub dodoubletest -{ - my ($dbh) = @_; - my $tn = 'doubletest'; - print "running double tests..."; - create_table($dbh, $tn, 'double'); - insert_int_rows($dbh, $tn); - check_int_counts($dbh, $tn); - drop_table($dbh, $tn); - print "done\n"; -} - -dotinyinttest($dbh); -dosmallinttest($dbh); -dointtest($dbh); -dobiginttest($dbh); - -dochar1test($dbh); -dochar2test($dbh); -dochar4test($dbh); -dochar8test($dbh); - -dovarchar1test($dbh); -dovarchar3test($dbh); -dovarchar7test($dbh); - -dochar40test($dbh); -dovarchar40test($dbh); - -dodec4test($dbh); -dodec9test($dbh); -dodec18test($dbh); -#dodec30test($dbh); - -dodatetest($dbh); -dodatetimetest($dbh); - -dofloattest($dbh); -dodoubletest($dbh); - -$dbh->disconnect; - diff --git a/dbcon/mysql/CMakeLists.txt b/dbcon/mysql/CMakeLists.txt index 8ee9c0d6b..4461d1dd5 100644 --- a/dbcon/mysql/CMakeLists.txt +++ b/dbcon/mysql/CMakeLists.txt @@ -87,11 +87,13 @@ install(FILES syscatalog_mysql.sql calremoveuserpriority.sql calshowprocesslist.sql columnstore_info.sql - my.cnf DESTINATION ${ENGINE_MYSQLDIR} COMPONENT storage-engine) install(PROGRAMS install_calpont_mysql.sh mysql-Columnstore DESTINATION ${ENGINE_MYSQLDIR} COMPONENT storage-engine) +install(FILES columnstore.cnf + DESTINATION /etc/my.cnf.d COMPONENT storage-engine) + #AM_CPPFLAGS = $(idb_common_includes) $(idb_cppflags) #AM_CFLAGS = $(idb_cflags) diff --git a/dbcon/mysql/columnstore.cnf b/dbcon/mysql/columnstore.cnf new file mode 100644 index 000000000..21a9ca571 --- /dev/null +++ b/dbcon/mysql/columnstore.cnf @@ -0,0 +1,53 @@ +# The following options will be passed to all MySQL clients +[client] +#password = your_password +socket = /usr/local/mariadb/columnstore/mysql/lib/mysql/mysql.sock + +# Here follows entries for some specific programs + +# The MySQL server +[mysqld] +socket = /usr/local/mariadb/columnstore/mysql/lib/mysql/mysql.sock +datadir = /usr/local/mariadb/columnstore/mysql/db +sql_mode="ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION" +#columnstore_processing_handlers_fallback = OFF; + +# Enable compression by default on create, set to 0 to turn off +#columnstore_compression_type=2 + +# Default for string table threshhold +#columnstore_stringtable_threshold=20 + +# infinidb local query flag +#columnstore_local_query=0 + +#columnstore_diskjoin_smallsidelimit=0 +#columnstore_diskjoin_largesidelimit=0 +#columnstore_diskjoin_bucketsize=100 +#columnstore_um_mem_limit=0 + +#columnstore_use_import_for_batchinsert=1 +#columnstore_import_for_batchinsert_delimiter=7 + +basedir = /usr/local/mariadb/columnstore/mysql/ +character-sets-dir = /usr/local/mariadb/columnstore/mysql/share/charsets/ +lc-messages-dir = /usr/local/mariadb/columnstore/mysql/share/ +plugin_dir = /usr/local/mariadb/columnstore/mysql/lib/plugin + +# Replication Master Server (default) +# binary logging is required for replication +# log-bin=mysql-bin +binlog_format=ROW + +# required unique id between 1 and 2^32 - 1 +# defaults to 1 if master-host +# uses to 2+ if slave-host +server-id = 1 + +# binary logging - not required for slaves, but recommended +log-bin=/usr/local/mariadb/columnstore/mysql/db/mysql-bin +relay-log=/usr/local/mariadb/columnstore/mysql/db/relay-bin +relay-log-index = /usr/local/mariadb/columnstore/mysql/db/relay-bin.index +relay-log-info-file = /usr/local/mariadb/columnstore/mysql/db/relay-bin.info + +lower_case_table_names=1 diff --git a/dbcon/mysql/install_calpont_mysql.sh b/dbcon/mysql/install_calpont_mysql.sh index 4a3f24554..36f2c871a 100755 --- a/dbcon/mysql/install_calpont_mysql.sh +++ b/dbcon/mysql/install_calpont_mysql.sh @@ -24,9 +24,7 @@ for arg in "$@"; do fi done -df=$installdir/mysql/my.cnf - -$installdir/mysql/bin/mysql --defaults-extra-file=$df --force --user=root mysql 2> ${tmpdir}/mysql_install.log < ${tmpdir}/mysql_install.log </dev/null <$installdir/mysql/syscatalog_mysql.sql -$installdir/mysql/bin/mysql --defaults-extra-file=$df --user=root mysql 2>/dev/null <$installdir/mysql/calsetuserpriority.sql -$installdir/mysql/bin/mysql --defaults-extra-file=$df --user=root mysql 2>/dev/null <$installdir/mysql/calremoveuserpriority.sql -$installdir/mysql/bin/mysql --defaults-extra-file=$df --user=root mysql 2>/dev/null <$installdir/mysql/calshowprocesslist.sql -$installdir/mysql/bin/mysql --defaults-extra-file=$df --user=root mysql 2>/dev/null <$installdir/mysql/columnstore_info.sql +$installdir/mysql/bin/mysql --user=root mysql 2>/dev/null <$installdir/mysql/syscatalog_mysql.sql +$installdir/mysql/bin/mysql --user=root mysql 2>/dev/null <$installdir/mysql/calsetuserpriority.sql +$installdir/mysql/bin/mysql --user=root mysql 2>/dev/null <$installdir/mysql/calremoveuserpriority.sql +$installdir/mysql/bin/mysql --user=root mysql 2>/dev/null <$installdir/mysql/calshowprocesslist.sql +$installdir/mysql/bin/mysql --user=root mysql 2>/dev/null <$installdir/mysql/columnstore_info.sql diff --git a/dbcon/mysql/my.cnf b/dbcon/mysql/my.cnf deleted file mode 100644 index 046476547..000000000 --- a/dbcon/mysql/my.cnf +++ /dev/null @@ -1,184 +0,0 @@ -# -# $Id: my.cnf 9502 2013-05-09 15:37:30Z pleblanc $ -# -# Example MySQL config file for very large systems. -# -# This is for a large system with memory of 1G-2G where the system runs mainly -# MySQL. -# -# You can copy this file to -# /etc/my.cnf to set global options, -# mysql-data-dir/my.cnf to set server-specific options (in this -# installation this directory is /var/lib/mysql) or -# ~/.my.cnf to set user-specific options. -# -# In this file, you can use all long options that a program supports. -# If you want to know which options a program supports, run the program -# with the "--help" option. - -# The following options will be passed to all MySQL clients -[client] -#password = your_password -port = 3306 -socket = /usr/local/mariadb/columnstore/mysql/lib/mysql/mysql.sock - -# Here follows entries for some specific programs - -# The MySQL server -[mysqld] -port = 3306 -socket = /usr/local/mariadb/columnstore/mysql/lib/mysql/mysql.sock -datadir = /usr/local/mariadb/columnstore/mysql/db -skip-external-locking -key_buffer_size = 512M -max_allowed_packet = 1M -table_cache = 512 -sort_buffer_size = 4M -read_buffer_size = 4M -read_rnd_buffer_size = 16M -myisam_sort_buffer_size = 64M -thread_cache_size = 8 -query_cache_size = 0 -# Try number of CPU's*2 for thread_concurrency -#thread_concurrency = 8 -thread_stack = 512K -lower_case_table_names=1 -group_concat_max_len=512 -sql_mode="ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION" -#columnstore_processing_handlers_fallback = OFF; - -# Enable compression by default on create, set to 0 to turn off -#columnstore_compression_type=2 - -# Default for string table threshhold -#columnstore_stringtable_threshold=20 - -# infinidb local query flag -#columnstore_local_query=0 - -#columnstore_diskjoin_smallsidelimit=0 -#columnstore_diskjoin_largesidelimit=0 -#columnstore_diskjoin_bucketsize=100 -#columnstore_um_mem_limit=0 - -#columnstore_use_import_for_batchinsert=1 -#columnstore_import_for_batchinsert_delimiter=7 - -basedir = /usr/local/mariadb/columnstore/mysql/ -character-sets-dir = /usr/local/mariadb/columnstore/mysql/share/charsets/ -lc-messages-dir = /usr/local/mariadb/columnstore/mysql/share/ -plugin_dir = /usr/local/mariadb/columnstore/mysql/lib/plugin - -# Don't listen on a TCP/IP port at all. This can be a security enhancement, -# if all processes that need to connect to mysqld run on the same host. -# All interaction with mysqld must be made via Unix sockets or named pipes. -# Note that using this option without enabling named pipes on Windows -# (via the "enable-named-pipe" option) will render mysqld useless! -# -#skip-networking - -# Replication Master Server (default) -# binary logging is required for replication -# log-bin=mysql-bin -binlog_format=ROW - -# required unique id between 1 and 2^32 - 1 -# defaults to 1 if master-host -# uses to 2+ if slave-host -server-id = 1 - -# Replication Slave (comment out master section to use this) -# -# To configure this host as a replication slave, you can choose between -# two methods : -# -# 1) Use the CHANGE MASTER TO command (fully described in our manual) - -# the syntax is: -# -# CHANGE MASTER TO MASTER_HOST=, MASTER_PORT=, -# MASTER_USER=, MASTER_PASSWORD= ; -# -# where you replace , , by quoted strings and -# by the master's port number (3306 by default). -# -# Example: -# -# CHANGE MASTER TO MASTER_HOST='125.564.12.1', MASTER_PORT=3306, -# MASTER_USER='joe', MASTER_PASSWORD='secret'; -# -# OR -# -# 2) Set the variables below. However, in case you choose this method, then -# start replication for the first time (even unsuccessfully, for example -# if you mistyped the password in master-password and the slave fails to -# connect), the slave will create a master.info file, and any later -# change in this file to the variables' values below will be ignored and -# overridden by the content of the master.info file, unless you shutdown -# the slave server, delete master.info and restart the slaver server. -# For that reason, you may want to leave the lines below untouched -# (commented) and instead use CHANGE MASTER TO (see above) -# -# -# The replication master for this slave - required -#master-host = -# -# The username the slave will use for authentication when connecting -# to the master - required -#master-user = -# -# The password the slave will authenticate with when connecting to -# the master - required -#master-password = -# -# The port the master is listening on. -# optional - defaults to 3306 -#master-port = -# -# binary logging - not required for slaves, but recommended -log-bin=/usr/local/mariadb/columnstore/mysql/db/mysql-bin -relay-log=/usr/local/mariadb/columnstore/mysql/db/relay-bin -relay-log-index = /usr/local/mariadb/columnstore/mysql/db/relay-bin.index -relay-log-info-file = /usr/local/mariadb/columnstore/mysql/db/relay-bin.info - -# Point the following paths to different dedicated disks -#tmpdir = /tmp/ -#log-update = /path-to-dedicated-directory/hostname - -# Uncomment the following if you are using InnoDB tables -#innodb_data_home_dir = /usr/local/mariadb/columnstore/mysql/lib/mysql/ -#innodb_data_file_path = ibdata1:2000M;ibdata2:10M:autoextend -#innodb_log_group_home_dir = /usr/local/mariadb/columnstore/mysql/lib/mysql/ -#innodb_log_arch_dir = /usr/local/mariadb/columnstore/mysql/lib/mysql/ -# You can set .._buffer_pool_size up to 50 - 80 % -# of RAM but beware of setting memory usage too high -#innodb_buffer_pool_size = 384M -#innodb_additional_mem_pool_size = 20M -# Set .._log_file_size to 25 % of buffer pool size -#innodb_log_file_size = 100M -#innodb_log_buffer_size = 8M -#innodb_flush_log_at_trx_commit = 1 -#innodb_lock_wait_timeout = 50 - -[mysqldump] -quick -max_allowed_packet = 16M - -[mysql] -no-auto-rehash -# Remove the next comment character if you are not familiar with SQL -#safe-updates - -[isamchk] -key_buffer_size = 256M -sort_buffer_size = 256M -read_buffer = 2M -write_buffer = 2M - -[myisamchk] -key_buffer_size = 256M -sort_buffer_size = 256M -read_buffer = 2M -write_buffer = 2M - -[mysqlhotcopy] -interactive-timeout diff --git a/dbcon/mysql/schemaSync.pl b/dbcon/mysql/schemaSync.pl deleted file mode 100755 index 29b4d8ef4..000000000 --- a/dbcon/mysql/schemaSync.pl +++ /dev/null @@ -1,237 +0,0 @@ -#!/usr/bin/perl -w -# -# $Id: schemaSync.pl 9731 2013-08-01 16:58:35Z rdempsey $ - -use DBI; -use DBD::mysql; - -$| = 1; - -$cfgfile='/usr/local/mariadb/columnstore/mysql/my.cnf'; - -$username = 'root'; -$auth = ''; -%attr = (); - -@calpont_objects = (); - -sub gather_calpont_objects -{ - $stmt = <prepare($stmt); - $sth->execute(); - my @row_ary = $sth->fetchrow_array(); - while ($#row_ary >= 0) - { - push(@calpont_objects, [@row_ary]); - @row_ary = $sth->fetchrow_array(); - } -} - -sub gather_mysql_objects -{ -} - -sub diff_calpont_mysql -{ -} - -sub diff_mysql_calpont -{ -} - -sub conflicts -{ -} - -$database='calpontsys'; -$data_source = 'DBI:mysql:database=' . $database . ':mysql_read_default_file=' . $cfgfile . ''; -$dbh = DBI->connect($data_source, $username, $auth, \%attr); -gather_calpont_objects; -$dbh->disconnect; - -sub datatype2name -{ - my ($dt) = @_; - if ($dt == 0) - { - $dts = 'bit'; - } - elsif ($dt == 1) - { - $dts = 'tinyint'; - } - elsif ($dt == 2) - { - $dts = 'char'; - } - elsif ($dt == 3) - { - $dts = 'smallint'; - } - elsif ($dt == 4) - { - $dts = 'decimal'; - } - elsif ($dt == 5) - { - $dts = 'medint'; - } - elsif ($dt == 6) - { - $dts = 'int'; - } - elsif ($dt == 7) - { - $dts = 'float'; - } - elsif ($dt == 8) - { - $dts = 'date'; - } - elsif ($dt == 9) - { - $dts = 'bigint'; - } - elsif ($dt == 10) - { - $dts = 'double'; - } - elsif ($dt == 11) - { - $dts = 'datetime'; - } - elsif ($dt == 12) - { - $dts = 'varchar'; - } - elsif ($dt == 13) - { - $dts = 'varbinary'; - } - elsif ($dt == 14) - { - $dts = 'clob'; - } - elsif ($dt == 15) - { - $dts = 'blob'; - } - elsif ($dt == 16) - { - $dts = 'tinyint unsigned'; - } - elsif ($dt == 17) - { - $dts = 'smallint unsigned'; - } - elsif ($dt == 18) - { - $dts = 'decimal unsigned'; - } - elsif ($dt == 19) - { - $dts = 'medint unsigned'; - } - elsif ($dt == 20) - { - $dts = 'int unsigned'; - } - elsif ($dt == 21) - { - $dts = 'float unsigned'; - } - elsif ($dt == 22) - { - $dts = 'bigint unsigned'; - } - elsif ($dt == 23) - { - $dts = 'double unsigned'; - } - else - { - $dts = "$dt"; - } - return $dts; -} - -%schemas = (); -foreach $co (@calpont_objects) -{ - $schema = "$@$co->[0]"; - $schemas{$schema} = 1; -} - -%schematables = (); -foreach $schema (keys %schemas) -{ - foreach $co (@calpont_objects) - { - next if ("$@$co->[0]" ne $schema); - $schematable = $schema . ".$@$co->[1]"; - $schematables{$schematable} = 1; - } -} - -$curdb=''; -foreach $schema (keys %schemas) -{ - foreach $schematable (keys %schematables) - { - next if (!($schematable =~ /^$schema\./)); - $first = 1; - foreach $co (@calpont_objects) - { - $schtbl = "$@$co->[0].$@$co->[1]"; - next if ($schematable ne $schtbl); - $ty = "$@$co->[5]"; - $nm = datatype2name($ty); - $sc = "$@$co->[6]"; - if (($nm eq 'smallint' || $nm eq 'int' || $nm eq 'bigint') && $sc > 0) - { - $nm = 'decimal'; - } - if ($first == 1) - { - if ("$@$co->[0]" ne $curdb) - { - print "create database if not exists $@$co->[0];\nuse $@$co->[0];\n\n"; - $curdb = "$@$co->[0]"; - } - print "create table if not exists $@$co->[1] (\n"; - $first = 0; - } - else - { - print ", "; - } - print "$@$co->[2] $nm"; - if ($nm eq 'char' || $nm eq 'varchar' || $nm eq 'varbinary') - { - print "($@$co->[8])"; - } - elsif ($nm eq 'decimal') - { - print "($@$co->[7],$@$co->[6])"; - } - print " comment 'compression=$@$co->[10]'\n"; - } - print ") engine=infinidb comment='schema sync only';\n\n"; - } -} - -gather_mysql_objects; -diff_calpont_mysql; -diff_mysql_calpont; -conflicts; - diff --git a/oam/install_scripts/columnstoreAlias b/oam/install_scripts/columnstoreAlias index 623cf26b8..ae3dfd13c 100644 --- a/oam/install_scripts/columnstoreAlias +++ b/oam/install_scripts/columnstoreAlias @@ -1,6 +1,6 @@ # MariaDB Columnstore Alias Commands # -alias mcsmysql='/usr/local/mariadb/columnstore/mysql/bin/mysql --defaults-extra-file=/usr/local/mariadb/columnstore/mysql/my.cnf -u root' +alias mcsmysql='/usr/local/mariadb/columnstore/mysql/bin/mysql -u root' alias ma=/usr/local/mariadb/columnstore/bin/mcsadmin alias mcsadmin=/usr/local/mariadb/columnstore/bin/mcsadmin alias cpimport=/usr/local/mariadb/columnstore/bin/cpimport diff --git a/oam/install_scripts/mariadb-command-line.sh b/oam/install_scripts/mariadb-command-line.sh index 86719deb1..eda201387 100755 --- a/oam/install_scripts/mariadb-command-line.sh +++ b/oam/install_scripts/mariadb-command-line.sh @@ -57,7 +57,6 @@ EOD cat${tmpdir}/mariadb-command-line.sql >> ${tmpdir}/mariadb-command-line.log $installdir/mysql/bin/mysql \ - --defaults-extra-file=$installdir/mysql/my.cnf \ --user=root \ calpontsys < ${tmpdir}/mariadb-command-line.sql >> ${tmpdir}/mariadb-command-line.log 2>&1 diff --git a/oam/install_scripts/master-rep-columnstore.sh b/oam/install_scripts/master-rep-columnstore.sh index 313199d62..d8369ed9c 100644 --- a/oam/install_scripts/master-rep-columnstore.sh +++ b/oam/install_scripts/master-rep-columnstore.sh @@ -52,7 +52,6 @@ EOD cat ${tmpdir}/idb_master-rep.sql >>${tmpdir}/master-rep-status-$hostipaddr.log $installdir/mysql/bin/mysql \ - --defaults-extra-file=$installdir/mysql/my.cnf \ --user=root \ calpontsys <${tmpdir}/idb_master-rep.sql >>${tmpdir}/master-rep-status-$hostipaddr.log 2>&1 @@ -70,7 +69,6 @@ EOD cat ${tmpdir}/idb_master-rep.sql >>${tmpdir}/master-rep-status-$hostipaddr.log $installdir/mysql/bin/mysql \ - --defaults-extra-file=$installdir/mysql/my.cnf \ --user=root \ calpontsys <${tmpdir}/idb_master-rep.sql >>${tmpdir}/master-rep-status-$hostipaddr.log 2>&1 @@ -86,7 +84,6 @@ EOD cat ${tmpdir}/idb_master-rep.sql >>${tmpdir}/master-rep-status-$hostipaddr.log $installdir/mysql/bin/mysql \ - --defaults-extra-file=$installdir/mysql/my.cnf \ --user=root \ calpontsys <${tmpdir}/idb_master-rep.sql >>${tmpdir}/master-rep-status-$hostipaddr.log 2>&1 @@ -99,7 +96,6 @@ EOD cat ${tmpdir}/idb_master-rep.sql >${tmpdir}/show-master-status.log $installdir/mysql/bin/mysql \ - --defaults-extra-file=$installdir/mysql/my.cnf \ --user=root \ calpontsys <${tmpdir}/idb_master-rep.sql >>${tmpdir}/show-master-status.log diff --git a/oam/install_scripts/module_installer.sh b/oam/install_scripts/module_installer.sh index 038611675..d72c94877 100755 --- a/oam/install_scripts/module_installer.sh +++ b/oam/install_scripts/module_installer.sh @@ -139,13 +139,13 @@ fi MySQLRep=`$COLUMNSTORE_INSTALL_DIR/bin/getConfig Installation MySQLRep` if [ $MySQLRep = "y" ]; then - if test -f $COLUMNSTORE_INSTALL_DIR/mysql/my.cnf ; then + if test -f /etc/my.cnf.d/columnstore.cnf ; then echo "Run Upgrade on my.cnf on Module" $COLUMNSTORE_INSTALL_DIR/bin/mycnfUpgrade > ${tmpDir}/mycnfUpgrade.log 2>&1 fi fi -if test -f $COLUMNSTORE_INSTALL_DIR/mysql/my.cnf ; then +if test -f /etc//my.cnf.d/columnstore.cnf ; then mysqlPort=`$COLUMNSTORE_INSTALL_DIR/bin/getConfig Installation MySQLPort` echo "Run Mysql Port update on my.cnf on Module" $COLUMNSTORE_INSTALL_DIR/bin/mycnfUpgrade $mysqlPort > ${tmpDir}/mycnfUpgrade_port.log 2>&1 diff --git a/oam/install_scripts/post-mysql-install b/oam/install_scripts/post-mysql-install index 8a014ba5e..95059c341 100755 --- a/oam/install_scripts/post-mysql-install +++ b/oam/install_scripts/post-mysql-install @@ -23,7 +23,6 @@ checkForError() { #--------------------------------------------------------------------------- echo "checking for engine columnstore..." $installdir/mysql/bin/mysql \ - --defaults-extra-file=$installdir/mysql/my.cnf \ --user=root \ --execute='select * from mysql.plugin where name="columnstore";' \ calpontsys | grep -i columnstore @@ -92,7 +91,7 @@ if [ -d $installdir/mysql/lib64/mysql/plugin -a -n "$libcalmysql" ]; then fi if [ $installdir != "/usr/local/mariadb/columnstore" ]; then - sed -i -e s@/usr/local/mariadb/columnstore@$installdir@g $installdir/mysql/my.cnf + sed -i -e s@/usr/local/mariadb/columnstore@$installdir@g /etc/my.cnf.d/columnstore.cnf fi if [ -x $installdir/mysql/mysql-Columnstore ]; then diff --git a/oam/install_scripts/post-mysqld-install b/oam/install_scripts/post-mysqld-install index c11daabec..629625492 100755 --- a/oam/install_scripts/post-mysqld-install +++ b/oam/install_scripts/post-mysqld-install @@ -71,7 +71,7 @@ chown -R $user:$user $installdir/mysql # Initiate databases if needed if [ $installdir != "/usr/local/mariadb/columnstore" ]; then - sed -i -e s@/usr/local/mariadb/columnstore@$installdir@g $installdir/mysql/my.cnf + sed -i -e s@/usr/local/mariadb/columnstore@$installdir@g /etc/my.cnf.d/columnstore.cnf fi # InfiniDB testing hook... @@ -92,7 +92,7 @@ if [ -d $installdir/mysql/db/calpontsys ]; then if [ -x $installdir/mysql/bin/mysql_upgrade ]; then echo "Running mysql_upgrade script" if [[ ${password} == " " ]]; then - $installdir/mysql/bin/mysql_upgrade --defaults-file=$installdir/mysql/my.cnf > $tmpdir/mysql_upgrade.log + $installdir/mysql/bin/mysql_upgrade > $tmpdir/mysql_upgrade.log if [ $? -ne 0 ]; then echo "ERROR: mysql_upgrade failure, check $tmpdir/mysql_upgrade.log" $installdir/mysql/mysql-Columnstore stop @@ -100,7 +100,7 @@ if [ -d $installdir/mysql/db/calpontsys ]; then exit 2; fi else - $installdir/mysql/bin/mysql_upgrade --defaults-file=$installdir/mysql/my.cnf --password=$password > $tmpdir/mysql_upgrade.log + $installdir/mysql/bin/mysql_upgrade --password=$password > $tmpdir/mysql_upgrade.log if [ $? -ne 0 ]; then echo "ERROR: mysql_upgrade failure, check $tmpdir/mysql_upgrade.log" $installdir/mysql/mysql-Columnstore stop @@ -117,7 +117,7 @@ if [ -d $installdir/mysql/db/calpontsys ]; then fi ### Don't give the user the notes, we'll fix them ourselves... -$installdir/mysql/scripts/mysql_install_db --rpm --user=$user --defaults-extra-file=$installdir/mysql/my.cnf --basedir=$installdir/mysql >/dev/null +$installdir/mysql/scripts/mysql_install_db --rpm --user=$user --basedir=$installdir/mysql >/dev/null # Change permissions again to fix any new files. chown -R $user:$user $mysql_datadir diff --git a/oam/install_scripts/slave-rep-columnstore.sh b/oam/install_scripts/slave-rep-columnstore.sh index bb4c5b6af..310c72b49 100644 --- a/oam/install_scripts/slave-rep-columnstore.sh +++ b/oam/install_scripts/slave-rep-columnstore.sh @@ -57,7 +57,6 @@ EOD cat ${tmpdir}/idb_slave-rep.sql >>${tmpdir}/slave-rep-status.log $installdir/mysql/bin/mysql \ - --defaults-extra-file=$installdir/mysql/my.cnf \ --user=root \ calpontsys <${tmpdir}/idb_slave-rep.sql >>${tmpdir}/slave-rep-status.log 2>&1 @@ -80,7 +79,6 @@ EOD cat ${tmpdir}/idb_slave-rep.sql >>${tmpdir}/slave-rep-status.log $installdir/mysql/bin/mysql \ - --defaults-extra-file=$installdir/mysql/my.cnf \ --user=root \ calpontsys <${tmpdir}/idb_slave-rep.sql >>${tmpdir}/slave-rep-status.log 2>&1 @@ -96,7 +94,6 @@ EOD cat ${tmpdir}/idb_slave-rep.sql >>${tmpdir}/slave-rep-status.log $installdir/mysql/bin/mysql \ - --defaults-extra-file=$installdir/mysql/my.cnf \ --user=root \ calpontsys <${tmpdir}/idb_slave-rep.sql >>${tmpdir}/slave-rep-status.log 2>&1 @@ -112,7 +109,6 @@ EOD cat ${tmpdir}/idb_slave-rep.sql >>${tmpdir}/slave-rep-status.log $installdir/mysql/bin/mysql \ - --defaults-extra-file=$installdir/mysql/my.cnf \ --user=root \ calpontsys <${tmpdir}/idb_slave-rep.sql >>${tmpdir}/slave-rep-status.log 2>&1 diff --git a/oam/oamcpp/liboamcpp.cpp b/oam/oamcpp/liboamcpp.cpp index 8777e1589..3a5caab12 100644 --- a/oam/oamcpp/liboamcpp.cpp +++ b/oam/oamcpp/liboamcpp.cpp @@ -9206,7 +9206,7 @@ int Oam::glusterctl(GLUSTER_COMMANDS command, std::string argument1, std::string ******************************************************************************************/ bool Oam::changeMyCnf( std::string paramater, std::string value ) { - string mycnfFile = startup::StartUp::installDir() + "/mysql/my.cnf"; + string mycnfFile = "/etc/my.cnf.d/columnstore.cnf"; ifstream file (mycnfFile.c_str()); if (!file) diff --git a/oamapps/columnstoreSupport/approximateRowCount.sh b/oamapps/columnstoreSupport/approximateRowCount.sh index db1deb425..cbbe5ef71 100755 --- a/oamapps/columnstoreSupport/approximateRowCount.sh +++ b/oamapps/columnstoreSupport/approximateRowCount.sh @@ -10,8 +10,7 @@ if [ -z "$MYSQLCMD" ]; then INSTALLDIR="/usr/local/mariadb/columnstore" - MYSQLCNF=$INSTALLDIR/mysql/my.cnf - MYSQLCMD="$INSTALLDIR/mysql/bin/mysql --defaults-extra-file=$MYSQLCNF -u root" + MYSQLCMD="$INSTALLDIR/mysql/bin/mysql -u root" fi # diff --git a/oamapps/columnstoreSupport/columnstoreSupport.cpp b/oamapps/columnstoreSupport/columnstoreSupport.cpp index ccf7714c4..7597f504c 100644 --- a/oamapps/columnstoreSupport/columnstoreSupport.cpp +++ b/oamapps/columnstoreSupport/columnstoreSupport.cpp @@ -872,7 +872,7 @@ int main(int argc, char* argv[]) { // check if mysql is supported and get info string logFile = tmpDir + "/idbmysql.log"; - string columnstoreMysql = installDir + "/mysql/bin/mysql --defaults-extra-file=" + installDir + "/mysql/my.cnf -u root "; + string columnstoreMysql = installDir + "/mysql/bin/mysql -u root "; string cmd = columnstoreMysql + " -e 'status' > " + logFile + " 2>&1"; system(cmd.c_str()); @@ -886,8 +886,8 @@ int main(int argc, char* argv[]) //needs a password, was password entered on command line if ( mysqlpw == " " ) { - //go check my.cnf - string file = installDir + "/mysql/my.cnf"; + //go check columnstore.cnf + string file = "/etc/my.cnf.d/columnstore.cnf"; ifstream oldFile (file.c_str()); vector lines; @@ -909,8 +909,8 @@ int main(int argc, char* argv[]) if (pos == string::npos) { - //password arg in my.cnf, go get password - cout << "NOTE: Using password from my.cnf" << endl; + //password arg in columnstore.cnf, go get password + cout << "NOTE: Using password from columnstore.cnf" << endl; mysqlpw = buf.substr(pos1 + 1, 80); cout << mysqlpw << endl; break; @@ -923,7 +923,7 @@ int main(int argc, char* argv[]) if ( mysqlpw == " " ) { - cout << "NOTE: No password provide on command line or found uncommented in my.cnf" << endl; + cout << "NOTE: No password provide on command line or found uncommented in columnstore.cnf" << endl; cout << endl; string prompt = " *** Enter MariaDB Columnstore password > "; mysqlpw = getpass(prompt.c_str()); @@ -946,7 +946,7 @@ int main(int argc, char* argv[]) if (!FAILED) { // check if mysql is supported and get info - string columnstoreMysql = installDir + "/mysql/bin/mysql --defaults-extra-file=" + installDir + "/mysql/my.cnf -u root " + pwprompt; + string columnstoreMysql = installDir + "/mysql/bin/mysql -u root " + pwprompt; string cmd = columnstoreMysql + " -V > /dev/null 2>&1"; int ret = system(cmd.c_str()); @@ -1047,11 +1047,11 @@ int main(int argc, char* argv[]) system("echo ' ' >> columnstoreSupportReport.txt"); system("echo '******************** DBMS Columnstore config file ********************' >> columnstoreSupportReport.txt"); system("echo ' ' >> columnstoreSupportReport.txt"); - string cmd = "echo '################# cat /mysql/my.cnf ################# ' >> columnstoreSupportReport.txt"; + string cmd = "echo '################# cat /etc/my.cnf.d/columnstore.cnf ################# ' >> columnstoreSupportReport.txt"; system(cmd.c_str()); cmd = "echo ' ' >> columnstoreSupportReport.txt"; system(cmd.c_str()); - cmd = "cat " + installDir + "/mysql/my.cnf 2>/dev/null >> columnstoreSupportReport.txt"; + cmd = "cat /etc/my.cnf.d/columnstore.cnf 2>/dev/null >> columnstoreSupportReport.txt"; system(cmd.c_str()); system("echo ' ' >> columnstoreSupportReport.txt"); diff --git a/oamapps/columnstoreSupport/getMinMax.sh b/oamapps/columnstoreSupport/getMinMax.sh index 90434680d..a4d32421d 100755 --- a/oamapps/columnstoreSupport/getMinMax.sh +++ b/oamapps/columnstoreSupport/getMinMax.sh @@ -10,8 +10,7 @@ if [ -z "$MYSQLCMD" ]; then INSTALLDIR="/usr/local/mariadb/columnstore" - MYSQLCNF=$INSTALLDIR/mysql/my.cnf - MYSQLCMD="$INSTALLDIR/mysql/bin/mysql --defaults-extra-file=$MYSQLCNF -u root" + MYSQLCMD="$INSTALLDIR/mysql/bin/mysql -u root" fi # diff --git a/oamapps/columnstoreSupport/minMaxCheck.sh b/oamapps/columnstoreSupport/minMaxCheck.sh index ca6b158e1..9429452be 100755 --- a/oamapps/columnstoreSupport/minMaxCheck.sh +++ b/oamapps/columnstoreSupport/minMaxCheck.sh @@ -45,7 +45,7 @@ # if [ -z "$MYSQLCMD" ]; then - MYSQLCMD="/usr/local/mariadb/columnstore/mysql/bin/mysql --defaults-extra-file=/usr/local/mariadb/columnstore/mysql/my.cnf -u root" + MYSQLCMD="/usr/local/mariadb/columnstore/mysql/bin/mysql -u root" fi if [ -z "$INSTALLDIR" ]; then diff --git a/oamapps/columnstoreSupport/sqlLogs.sh b/oamapps/columnstoreSupport/sqlLogs.sh index e34ebe5b9..3e654a37d 100755 --- a/oamapps/columnstoreSupport/sqlLogs.sh +++ b/oamapps/columnstoreSupport/sqlLogs.sh @@ -19,7 +19,7 @@ fi if [ -z "$MYSQLCMD" ]; then - MYSQLCMD="$COLUMNSTORE_INSTALL_DIR/mysql/bin/mysql --defaults-extra-file=$COLUMNSTORE_INSTALL_DIR/mysql/my.cnf -u root" + MYSQLCMD="$COLUMNSTORE_INSTALL_DIR/mysql/bin/mysql -u root" export MYSQLCMD fi diff --git a/oamapps/postConfigure/mycnfUpgrade.cpp b/oamapps/postConfigure/mycnfUpgrade.cpp index 53cb0ce3c..a0e7abe75 100644 --- a/oamapps/postConfigure/mycnfUpgrade.cpp +++ b/oamapps/postConfigure/mycnfUpgrade.cpp @@ -125,22 +125,22 @@ int main(int argc, char* argv[]) } //my.cnf file - string mycnfFile = startup::StartUp::installDir() + "/mysql/my.cnf"; + string mycnfFile = "/etc/my.cnf.d/columnstore.cnf"; ifstream mycnffile (mycnfFile.c_str()); if (!mycnffile) { - cerr << "mycnfUpgrade - my.cnf file not found: " << mycnfFile << endl; + cerr << "mycnfUpgrade - columnstore.cnf file not found: " << mycnfFile << endl; exit (1); } //my.cnf.rpmsave file - string mycnfsaveFile = startup::StartUp::installDir() + "/mysql/my.cnf.rpmsave"; + string mycnfsaveFile = "/etc/my.cnf/columnstore.cnf.rpmsave"; ifstream mycnfsavefile (mycnfsaveFile.c_str()); if (!mycnfsavefile) { - cerr << "mycnfUpgrade - my.cnf.rpmsave file not found: " << mycnfsaveFile << endl; + cerr << "mycnfUpgrade - columnstore.cnf.rpmsave file not found: " << mycnfsaveFile << endl; exit (1); } @@ -154,7 +154,7 @@ int main(int argc, char* argv[]) if (!includefile) { - cerr << "mycnfUpgrade - my.cnf include argument file not found: " << includeFile << endl; + cerr << "mycnfUpgrade - columnstore.cnf include argument file not found: " << includeFile << endl; exit (1); } @@ -164,7 +164,7 @@ int main(int argc, char* argv[]) if (!excludefile) { - cerr << "mycnfUpgrade - my.cnf exclude argument file not found: " << endl; + cerr << "mycnfUpgrade - columnstore.cnf exclude argument file not found: " << endl; exit (1); } @@ -177,7 +177,7 @@ int main(int argc, char* argv[]) includeArg = line; boost::regex icludeArgRegEx("^#*\\s*" + includeArg + "\\s*="); - //see if in my.cnf.rpmsave + //see if in columnstore.cnf.rpmsave ifstream mycnfsavefile (mycnfsaveFile.c_str()); char line[200]; string oldbuf; @@ -188,10 +188,10 @@ int main(int argc, char* argv[]) if ( boost::regex_search(oldbuf.begin(), oldbuf.end(), icludeArgRegEx) ) { - //found in my.cnf.rpmsave, check if this is commented out + //found in columnstore.cnf.rpmsave, check if this is commented out if ( line[0] != '#' ) { - // no, check in my.cnf and replace if exist or add if it doesn't + // no, check in columnstore.cnf and replace if exist or add if it doesn't ifstream mycnffile (mycnfFile.c_str()); vector lines; @@ -214,7 +214,7 @@ int main(int argc, char* argv[]) lines.push_back(newbuf); } - //write out a new my.cnf + //write out a new columnstore.cnf mycnffile.close(); unlink (mycnfFile.c_str()); ofstream newFile (mycnfFile.c_str()); @@ -251,7 +251,7 @@ int main(int argc, char* argv[]) lines.push_back(newbuf); } - //write out a new my.cnf + //write out a new columnstore.cnf mycnffile.close(); unlink (mycnfFile.c_str()); ofstream newFile (mycnfFile.c_str()); diff --git a/oamapps/postConfigure/postConfigure.cpp b/oamapps/postConfigure/postConfigure.cpp index 97f5051b0..645357e8a 100644 --- a/oamapps/postConfigure/postConfigure.cpp +++ b/oamapps/postConfigure/postConfigure.cpp @@ -762,16 +762,16 @@ int main(int argc, char* argv[]) catch (...) {} - // run my.cnf upgrade script + // run columnstore.cnf upgrade script if ( reuseConfig == "y" ) { cmd = installDir + "/bin/mycnfUpgrade > " + tmpDir + "/mycnfUpgrade.log 2>&1"; int rtnCode = system(cmd.c_str()); if (WEXITSTATUS(rtnCode) != 0) - cout << "Error: Problem upgrade my.cnf, check " << tmpDir << "/mycnfUpgrade.log" << endl; + cout << "Error: Problem upgrade columnstore.cnf, check " << tmpDir << "/mycnfUpgrade.log" << endl; else - cout << "NOTE: my.cnf file was upgraded based on my.cnf.rpmsave" << endl; + cout << "NOTE: columnstore.cnf file was upgraded based on columnstore.cnf.rpmsave" << endl; } //check mysql port changes diff --git a/procmon/processmonitor.cpp b/procmon/processmonitor.cpp index 421f23690..976abd42c 100644 --- a/procmon/processmonitor.cpp +++ b/procmon/processmonitor.cpp @@ -5117,7 +5117,7 @@ int ProcessMonitor::changeMyCnf(std::string type) log.writeLog(__LINE__, "changeMyCnf function called for " + type, LOG_TYPE_DEBUG); - string mycnfFile = startup::StartUp::installDir() + "/mysql/my.cnf"; + string mycnfFile = "/etc/my.cnf.d/columnstore.cnf"; ifstream file (mycnfFile.c_str()); if (!file) @@ -5126,8 +5126,6 @@ int ProcessMonitor::changeMyCnf(std::string type) return oam::API_FAILURE; } - string dbDir = startup::StartUp::installDir() + "/mysql/db"; - //get server-id based on ExeMgrx setup string serverID = "0"; string localModuleName = config.moduleName(); @@ -6691,7 +6689,7 @@ int ProcessMonitor::runUpgrade() for ( int i = 0 ; i < 10 ; i++ ) { //run upgrade script - string cmd = startup::StartUp::installDir() + "/mysql/bin/mysql_upgrade --defaults-file=" + startup::StartUp::installDir() + "/mysql/my.cnf " + + string cmd = startup::StartUp::installDir() + "/mysql/bin/mysql_upgrade " + passwordOption + " > " + tmpLog + " 2>&1"; log.writeLog(__LINE__, "runUpgrade, cmd = " + cmd, LOG_TYPE_DEBUG); diff --git a/utils/scenarios/common/sh/bulkExecResult.sh b/utils/scenarios/common/sh/bulkExecResult.sh deleted file mode 100755 index 6f1ff7f42..000000000 --- a/utils/scenarios/common/sh/bulkExecResult.sh +++ /dev/null @@ -1,124 +0,0 @@ -#!/bin/bash -# -# $1 = testRunID -# -#========================================================================================= -# MySQL load SQL test results -#========================================================================================= -function getTestInfo { -# - testID=`cat $dirName/testInfo.txt | grep testID |awk -F"=" '{print $2}'` -} - -function copySQLResultFiles { -# - rm -f /usr/local/mariadb/columnstore/data/bulk/data/import/TestSummary.* - rm -f /usr/local/mariadb/columnstore/data/bulk/data/import/TestTime.* - rm -f /usr/local/mariadb/columnstore/data/bulk/data/import/TestStats.* -# - cp -f testResultSummary.txt /usr/local/mariadb/columnstore/data/bulk/data/import/TestSummary.tbl - cp -f testResultTime.txt /usr/local/mariadb/columnstore/data/bulk/data/import/TestTime.tbl - cp -f testResultStats.txt /usr/local/mariadb/columnstore/data/bulk/data/import/TestStats.tbl -} - - -function CalLoadSQLTestResult { -# - /usr/local/mariadb/columnstore/bin/colxml perfstats -t TestSummary -j 1001 - /usr/local/mariadb/columnstore/bin/colxml perfstats -t TestTime -j 1002 - /usr/local/mariadb/columnstore/bin/colxml perfstats -t TestStats -j 1003 -# - /usr/local/mariadb/columnstore/bin/cpimport -j 1001 - /usr/local/mariadb/columnstore/bin/cpimport -j 1002 - /usr/local/mariadb/columnstore/bin/cpimport -j 1003 -} -# -function MySQLLoadSQLTestResult { -# - copySQLResultFiles - mysql lqrefd01 -hws_tkerr_tx.calpont.com -uroot -pqalpont! "|sed "s//g"|awk -F">" '{print $3}'` - stackConfig="" -# - IOType=$grpTestType -# - echo 0 >tmpDM.txt - echo 0 >tmpUM.txt - echo 0 >tmpPM.txt - cat 1/getSystemStatus.b.log |grep Module |grep -v "DISABLED"|grep -v System| - while read c1 moduleName restOfLine; do - moduleType=${moduleName:0:2} - case "$moduleType" in - dm) - ((numDM++)) - echo $numDM >tmpDM.txt - ;; - um) - ((numUM++)) - echo $numUM >tmpYM.txt - ;; - pm) - ((numPM++)) - echo $numPM >tmpPM.txt - ;; - esac - done - numDM=`cat tmpDM.txt` - numUM=`cat tmpUM.txt` - numPM=`cat tmpPM.txt` -} -#========================================================================================= -# Create SQL test stats -#========================================================================================= -function getSQLTestStats { -# - getSQLStats=0 -# - if [ $1 -eq 0 ]; then - pathName="." - elif [ $2 -eq 0 ]; then - pathName="./$1" - else - pathName="./$1/$2" - getSQLStats=1 - fi - st=`cat $pathName/starttime.log` - et=`cat $pathName/stoptime.log` - echo $testRunID\|$1\|$2\|0\|0\|$st\|$et\| >>testResultTime.txt -# - if [ $getSQLStats -eq 1 ] && [ $fileExt = "sql" ]; then -# get timing information -# - if [ $grpTestType = "M" ]; then - sfn=$2.sql - else - sfn=$fileName - fi - cat $sfn | grep CalpontStart >tmp1.txt - cat $pathName/$sfn.log|grep Calpont |grep -v now |grep -v calgetstats >tmp2.txt - -# - cat tmp1.txt | - while read c1 c2 c3 idx qNum restofLine; do - st=`cat tmp2.txt |grep "CalpontStart ( ${idx} ${qNum} )" |awk -F" " '{print $6 " " $7}'` - et=`cat tmp2.txt |grep "CalpontEnd ( ${idx} ${qNum} )" |awk -F" " '{print $6 " " $7}'` - qstats=`cat tmp2.txt |grep "CalpontStats ( ${idx} ${qNum} )"` - echo $testRunID\|$1\|$2\|$qNum\|$idx\|$st\|$et\| >>testResultTime.txt - done -# -# get query stat information -# - cat $pathName/$sfn.log|grep CalpontStats|grep -v calgetstats|sed 's/\;//g'|sed 's/-/ /g'|sed 's/MB//g'|sed 's/|//g' >tmp2.txt - cat tmp1.txt | - while read c1 c2 c3 idx qNum restofLine; do - statLine=`cat tmp2.txt |grep "CalpontStats ( ${idx} ${qNum} )"` -#iteration 18, use the following line - qstats=`echo $statLine |awk -F" " '{print $9"|"$11"|"$13"|"$15"|"$17"|"$19"|"$21"|"$23"|"$25"|"$28"."$29"|"}'` -#iteration 17 and back, use the following line -# qstats=`echo $statLine |awk -F" " '{print $9"|"$11"|"$13"|"$15"|"$17"|"$19"|"$21"|"$23"|"$25"|"$26"."$27"|"}'` - qstats=`echo $qstats|sed 's/|\.|/||/g'` - echo $testRunID\|$1\|$2\|$qNum\|$idx\|$qstats>>testResultStats.txt - done - -# -# print number of queries having errors -# - errCount=`grep "^ERROR" $pathName/$sfn.err.log | wc -l` - echo "error count for iteration " $1 "; session " $2 " is " $errCount >> testErrorCount.txt - fi - rm -f tmp*.txt -} -#========================================================================================= -# Create SQL test summary -#========================================================================================= -function getSQLTestSummary { -# - numStmts=`cat testResultTime.txt| grep -v "|0|"|grep -v "#"|wc -l` - touch tmp2.txt - cat testResultTime.txt| grep -v "|0|"|grep -v "#"| - while read statLine; do - st=`echo $statLine|awk -F"|" '{print $6}'` - et=`echo $statLine|awk -F"|" '{print $7}'` - if [ ! -z "$st" ] && [ ! -z "$et" ]; then - echo Y >>tmp2.txt - fi - done - numStmtsProcessed=`grep Y tmp2.txt|wc -l` - if [ $numStmts -eq $numStmtsProcessed ]; then - runCompleted=Y - else - runCompleted=N - fi - echo $testID\|$testRunID\|$testRunDesc\|$execServer\|$stackName\|$numDM\|$numUM\|$numPM\|$calpontDB\|$swVersion.$swRelease\|$grpTestNum\|$fileName\|$iterations\|$sessions\|$IOType\|$numStmts\|$numStmtsProcessed\|$runCompleted\| > testResultSummary.txt - rm -f tmp*.txt - -} -#========================================================================================= -# Create bulk test stats -#========================================================================================= -function getBulkTestStats { -# - cat Job_9999.xml |grep ">testResultStats.txt - done -} -#========================================================================================= -# Create bulk test summary -#========================================================================================= -function getBulkTestSummary { -# -# - st=`cat starttime.log` - et=`cat stoptime.log` -# - numTables=`cat Job_9999.xml |grep ">tmp1.txt - fi - done - tablesLoaded=`grep Y tmp1.txt|wc -l` -# - if [ $numTables -eq $tablesLoaded ]; then - runCompleted=Y - else - runCompleted=N - fi - echo $testID\|$testRunID\|$testRunDesc\|$execServer\|$stackName\|$numDM\|$numUM\|$numPM\|$calpontDB\|$fileName\|$numTables\|$tablesLoaded\|$runCompleted\|$rowCntMatched\|$st\|$et\| > testResultSummary.txt - rm -f tmp*.txt -} -#========================================================================================= -# get SQL test results -#========================================================================================= -function getSQLTestResult { -# - getSQLTestStats 0 0 - for (( i=1; i<=$iterations; i++ )) - do - getSQLTestStats $i 0 - for (( s=1; s<=sessions; s++ )) - do - getSQLTestStats $i $s - done - done - getSQLTestSummary -} -#========================================================================================= -# get Bulk test results -#========================================================================================= -function getBulkTestResult { -# - getBulkTestStats - getBulkTestSummary -} - -#========================================================================================= -# Main -#========================================================================================= -# - if [ $# -ne 1 ]; then - echo Syntax: collExtcResult.sh testRunID - echo Exiting..... - exit 1 - fi -# -# Verified existance of testRunID -# - curDir=`pwd` - testRunID=$1 - host=`hostname -s` - dirName=/root/genii/testResult/$testRunID -# - if [ ! -d $dirName ]; then - echo TestRunID $testRunID does not exist on this server \($host\). - echo Please make sure the test was executed on this server. - echo Exit..... - exit 1 - fi - cd $dirName -#----------------------------------------------------------------------------------------- -# Initialize files -#----------------------------------------------------------------------------------------- - rm -f testResult*.txt - touch testResultTime.txt - touch testResultStats.txt -# - getTestInfo - case "$testID" in - 1) - getSQLTestResult - ;; - 2) - getBulkTestResult - ;; - esac - rm -rf tmp*.txt - - - diff --git a/utils/scenarios/common/sh/execSQLScript_m.sh b/utils/scenarios/common/sh/execSQLScript_m.sh deleted file mode 100755 index f742c1a66..000000000 --- a/utils/scenarios/common/sh/execSQLScript_m.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -# -# This script does the following: -# 1) Executes supplied SQL script on reference database and captures output to file -# 2) Executes supplied SQL script on test database and captures output to file -# 3) diff both output files and captures different to file -# -#$1 = Test database name -#$2 = Ref server name -#$3 = Ref database name -#$4 = Ref user name -#$5 = Ref user password -#$6 = SQL script to execute -# - logFileName=`basename $6` -# -# Execute script on reference database -# - if [ $2 != "NA" ]; then - mysql $3 -h$2 -u$4 -p$5 <$6 > $logFileName.ref.log - fi -# -# Execute script on test database -# - /usr/local/mariadb/columnstore/mysql/bin/mysql --defaults-extra-file=/usr/local/mariadb/columnstore/mysql/my.cnf -f -u root $1 <$6 > $logFileName.test.log 2>&1 - diff $logFileName.ref.log $logFileName.test.log > $logFileName.diff.log diff --git a/utils/scenarios/common/sh/insertExecResult.sh b/utils/scenarios/common/sh/insertExecResult.sh deleted file mode 100755 index e37b1b270..000000000 --- a/utils/scenarios/common/sh/insertExecResult.sh +++ /dev/null @@ -1,189 +0,0 @@ -#!/bin/bash -# -# $1 = testRunID -# -#========================================================================================= -# MySQL load SQL test results -#========================================================================================= -function getTestInfo { -# - testID=`cat $dirName/testInfo.txt | grep testID |awk -F"=" '{print $2}'` - loadedFlag=`cat $dirName/testInfo.txt | grep loadedFlag |awk -F"=" '{print $2}'` - if [ -z $loadedFlag ]; then - loadedFlag=N - fi -# - rm -f insertSummaryScript.sql - rm -f insertTimeScript.sql - rm -f insertStatsScript.sql -} - -#========================================================================================= -# insert SQL test results -#========================================================================================= -function insertSQLTestResult { -# -# insert test summary table -# - summaryFileName="insertSummaryScript.sql" - cat $dirName/testResultSummary.txt | - while read summaryLine; do - testID=`echo $summaryLine|awk -F"|" '{print $1}'` - testRunID=`echo $summaryLine|awk -F"|" '{print $2}'` - testRunDesc=`echo $summaryLine|awk -F"|" '{print $3}'` - execServer=`echo $summaryLine|awk -F"|" '{print $4}'` - stackName=`echo $summaryLine|awk -F"|" '{print $5}'` - numDM=`echo $summaryLine|awk -F"|" '{print $6}'` - numUM=`echo $summaryLine|awk -F"|" '{print $7}'` - numPM=`echo $summaryLine|awk -F"|" '{print $8}'` - calpontDB=`echo $summaryLine|awk -F"|" '{print $9}'` - swRelease=`echo $summaryLine|awk -F"|" '{print $10}'` - grpTestNum=`echo $summaryLine|awk -F"|" '{print $11}'` - scriptFileName=`echo $summaryLine|awk -F"|" '{print $12}'` - numIterations=`echo $summaryLine|awk -F"|" '{print $13}'` - numSessions=`echo $summaryLine|awk -F"|" '{print $14}'` - IOType=`echo $summaryLine|awk -F"|" '{print $15}'` - numStmts=`echo $summaryLine|awk -F"|" '{print $16}'` - numStmtsProcessed=`echo $summaryLine|awk -F"|" '{print $17}'` - numCompleted=`echo $summaryLine|awk -F"|" '{print $18}'` -# - vals="$testID,$testRunID,\"$testRunDesc\",\"$execServer\",\"$stackName\",$numDM,$numUM,$numPM,\"$calpontDB\",\"$swRelease\",$grpTestNum,\"$scriptFileName\",$numIterations,$numSessions,\"$IOType\",$numStmts,$numStmtsProcessed,\"$numCompleted\"" - stmt="insert into testSummary values ($vals);" - echo $stmt >> $summaryFileName - done - -# -# insert test time table -# - timeFileName="insertTimeScript.sql" - cat $dirName/testResultTime.txt | - while read timeLine; do - testRunID=`echo $timeLine|awk -F"|" '{print $1}'` - iterNum=`echo $timeLine|awk -F"|" '{print $2}'` - sessNum=`echo $timeLine|awk -F"|" '{print $3}'` - SQLSeqNum=`echo $timeLine|awk -F"|" '{print $4}'` - SQLIdxNum=`echo $timeLine|awk -F"|" '{print $5}'` - startTime=`echo $timeLine|awk -F"|" '{print $6}'` - endTime=`echo $timeLine|awk -F"|" '{print $7}'` -# - vals="$testRunID,$iterNum,$sessNum,$SQLSeqNum,$SQLIdxNum,\"$startTime\",\"$endTime\"" - vals=`echo $vals |sed 's/""/NULL/g'` - stmt="insert into testTime values ($vals);" - echo $stmt >> $timeFileName - done -# -# insert test stats table -# - statsFileName="insertStatsScript.sql" - cat $dirName/testResultStats.txt | - while read statsLine; do - vals=`echo $statsLine |sed 's/|/,/g'` - stmt="insert into testStats values ($vals);" - stmt=`echo $stmt |sed 's/,,/,NULL,/g'|sed 's/,,/,NULL,/g'|sed 's/,)/)/g'` - echo $stmt >> $statsFileName - done -# - mysql lqrefd01 -hws_tkerr_tx -uroot -pqalpont! > $summaryFileName - done -# -# insert test stats table -# - statsFileName="insertStatsScript.sql" - cat $dirName/testResultStats.txt | - while read statsLine; do - testRunID=`echo $statsLine|awk -F"|" '{print $1}'` - tableName=`echo $statsLine|awk -F"|" '{print $2}'` - sourceFile=`echo $statsLine|awk -F"|" '{print $3}'` - loadTime=`echo $statsLine|awk -F"|" '{print $4}'` - rowCntProcessed=`echo $statsLine|awk -F"|" '{print $5}'` - rowCntInserted=`echo $statsLine|awk -F"|" '{print $6}'` - rowCntDB=`echo $statsLine|awk -F"|" '{print $7}'` -# - vals="$testRunID,\"$tableName\",\"$sourceFile\",$loadTime,$rowCntProcessed,$rowCntInserted,$rowCntDB" - stmt="insert into bulkStats values ($vals);" - stmt=`echo $stmt |sed 's/,,/,NULL,/g'|sed 's/,,/,NULL,/g'|sed 's/,)/)/g'` - echo $stmt >> $statsFileName - done -# - mysql lqrefd01 -hws_tkerr_tx -uroot -pqalpont! > $dirName/testInfo.txt -# - exit 0 diff --git a/utils/scenarios/common/sh/remote_command.sh b/utils/scenarios/common/sh/remote_command.sh deleted file mode 100755 index 93beb83b3..000000000 --- a/utils/scenarios/common/sh/remote_command.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/expect -# -# $Id: remote_command.sh 1122 2008-12-24 16:15:40Z dhill $ -# -# Remote command execution script to another server -# Argument 1 - Remote Server Host Name or IP address -# Argument 2 - Remote Server root password -# Argument 3 - Command -set timeout 600 -set USERNAME root -set SERVER [lindex $argv 0] -set PASSWORD [lindex $argv 1] -set COMMAND [lindex $argv 2] -set DEBUG [lindex $argv 3] -log_user $DEBUG -spawn -noecho /bin/bash -expect -re "# " -# -# send command -# -send "ssh $USERNAME@$SERVER $COMMAND\n" -expect { - -re "Host key verification failed" { send_user "FAILED: Host key verification failed\n" ; exit -1} - -re "service not known" { send_user " FAILED: Invalid Host\n" ; exit -1} - -re "ssh: connect to host" { send_user " FAILED: Invalid Host\n" ; exit -1 } - -re "authenticity" { send "yes\n" - expect { - -re "word: " { send "$PASSWORD\n" } abort - } - } - -re "word: " { send "$PASSWORD\n" } abort -} -expect { - -re "# " exit - -re "Permission denied" { send_user " FAILED: Invalid password\n" ; exit -1 } - -re "(y or n)" { send "y\n" - expect -re "# " { exit } - } -} -exit - diff --git a/utils/scenarios/common/sh/restore100gb.sh b/utils/scenarios/common/sh/restore100gb.sh deleted file mode 100755 index dd844ebfd..000000000 --- a/utils/scenarios/common/sh/restore100gb.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -# -#========================================================================================= -mount /dev/sdj1 /mnt/qadbs -# -rm -rf /usr/local/mariadb/columnstore/data1/000.dir -rm -rf /usr/local/mariadb/columnstore/data2/000.dir -rm -rf /usr/local/mariadb/columnstore/data3/000.dir -rm -rf /usr/local/mariadb/columnstore/data4/000.dir -rm -f /usr/local/mariadb/columnstore/data1/systemFiles/dbrm/* -# -cp /mnt/qadbs/tpch/1m-1-10-100/dbrm/* /usr/local/mariadb/columnstore/data1/systemFiles/dbrm -# -cp -r /mnt/qadbs/tpch/1m-1-10-100/data1/000.dir /usr/local/mariadb/columnstore/data1 & -cp -r /mnt/qadbs/tpch/1m-1-10-100/data2/000.dir /usr/local/mariadb/columnstore/data2 & -cp -r /mnt/qadbs/tpch/1m-1-10-100/data3/000.dir /usr/local/mariadb/columnstore/data3 & -cp -r /mnt/qadbs/tpch/1m-1-10-100/data4/000.dir /usr/local/mariadb/columnstore/data4 & -# - diff --git a/utils/scenarios/common/sh/testExecEngine.sh b/utils/scenarios/common/sh/testExecEngine.sh deleted file mode 100755 index 8136ed3e1..000000000 --- a/utils/scenarios/common/sh/testExecEngine.sh +++ /dev/null @@ -1,248 +0,0 @@ -#!/bin/bash -# -#========================================================================================= -# Generate unique test run ID -#========================================================================================= -function genTestRunID { -# - testRunID="notthereyet" - dirName="" - while [ $testRunID = "notthereyet" ] - do - dirName=$(date +%Y%m%d%H%M%S) - if [ ! -d /root/genii/testResult/$dirName ] - then - mkdir /root/genii/testResult/$dirName - testRunID=$dirName - fi - done -} -#========================================================================================= -# Create unique test ID, create a directory for it, and cd into the test directory -#========================================================================================= -function prepareTestDir { -# - genTestRunID - cd /root/genii/testResult/$testRunID - echo TestResultDir=`pwd` -# -} -#========================================================================================= -# get test info -#========================================================================================= -function getTestInfo { -# - testID=`cat testInfo.txt | grep testID |awk -F"=" '{print $2}'` - testDB=`cat testInfo.txt | grep testDB |awk -F"=" '{print $2}'` - fileName=`cat testInfo.txt | grep scriptName |awk -F"=" '{print $2}'` - sessions=`cat testInfo.txt | grep sessions |awk -F"=" '{print $2}'` - iterations=`cat testInfo.txt | grep iterations |awk -F"=" '{print $2}'` - timeoutVal=`cat testInfo.txt | grep timeoutVal |awk -F"=" '{print $2}'` - testType=`cat testInfo.txt | grep grpTestType |awk -F"=" '{print $2}'` - dbmsType=`cat testInfo.txt | grep dbmsType |awk -F"=" '{print $2}'` -} -#========================================================================================= -# log test info -#========================================================================================= -function logTestInfo { -# - cp $execDir/testInfo.txt . - cp $fileName $logFileNamePrefix - execServer=`hostname -s` - echo execServer=$execServer >>testInfo.txt -} -#========================================================================================= -# log environment info -#========================================================================================= -function logEnvInfo { -# -# $1 = test run -# $2 = a or b, b=before test run, a=after test run -# - cp /usr/local/mariadb/columnstore/etc/Columnstore.xml $1/. - /usr/local/mariadb/columnstore/bin/mcsadmin getcalpontsoftwareinfo > $1/getcalpontsoftwareinfo.log - /usr/local/mariadb/columnstore/bin/mcsadmin getsystemstatus > $1/getSystemStatus.$2.log - /usr/local/mariadb/columnstore/bin/mcsadmin getsystemnetwork > $1/getSystemNetwork.$2.log - /usr/local/mariadb/columnstore/bin/mcsadmin getprocessstatus > $1/getProcessStatus.$2.log -} -#========================================================================================= -# log environment info -#========================================================================================= -function breakScriptFile { -# - numUsers=$1 - idx=1 - cat $logFileNamePrefix | - while read lineText; do - i=`expr index "$lineText" ^` - if [ $i -gt 0 ]; then - ((idx++)) - if [ $idx -gt $numUsers ]; then - idx=1 - fi - else - echo "$lineText" >> $idx.sql - fi - done -} - -#========================================================================================= -# Extract and output start and ending times -#========================================================================================= -function logTimes { -# - local dirName="" - - if [ $1 -eq 0 ] - then - dirName="." - elif [ $2 -eq 0 ] - then - dirName=$1 - else - dirName=$1/$2 - fi - date "+%Y-%m-%d %H:%M:%S" > $dirName/$3time.log - -} -#========================================================================================= -# Extract and output start and ending times -#========================================================================================= -function getTimes { -# - if [ $1 -eq 0 ] - then - st=`cat starttime.log` - et=`cat stoptime.log` - elif [ $2 -eq 0 ] - then - st=`cat $1\/starttime.log` - et=`cat $1\/stoptime.log` - else - st=`cat $1\/$2\/starttime.log` - et=`cat $1\/$2\/stoptime.log` - fi - echo $testRunID $1 $2 $st $et >>testResultSummary.txt -} -#========================================================================================= -# Waiting for sessions to finish..... -#========================================================================================= -function waitForDone { -# -# $1 = Test run number -# -# assume at least one session is not done -# - stillGoing=1 -# -# Assuem all sesions are done. -# Keep checking all N sessions. If any one session is not done yet, keep going -# - while [ $stillGoing -gt 0 ] - do - stillGoing=0 - for (( sess=1; sess<=sessions; sess++ )) - do - if [ ${pids[sess]} -ne 0 ] - then - lines=`ps -p ${pids[sess]} |wc -l` - if [ $lines -eq 1 ] - then - logTimes $1 $sess stop - pids[$sess]=0 -# wc -l $1\/$sess\/$logFileNamePrefix.log > $1\/$sess\/rowCnt.txt - else - stillGoing=1 - break - fi - fi - done - if [ $stillGoing -eq 1 ] - then - sleep 1 - fi - done -} -#========================================================================================= -# Execute a single test run -#========================================================================================= -function execOneTestRun { -# $1 = Test run Number -# - logEnvInfo $1 b - logTimes $1 0 start -# - for (( sess=1; sess<=$sessions; sess++ )) - do - mkdir $1\/$sess - logTimes $1 $sess start - if [ $fileExt = "sh" ]; then - $fileName $1\/$sess\/$logFileNamePrefix.log & - else - if [ $testType = "M" ]; then - sfn=$sess.sql - else - sfn=`basename $fileName` - fi - if [ $dbmsType = "M" ]; then - pathSfn=$1\/$sess - /usr/local/mariadb/columnstore/mysql/bin/mysql --defaults-extra-file=/usr/local/mariadb/columnstore/mysql/my.cnf -u root $testDB <$sfn 2> $pathSfn\/$sfn.err.log |grep "^Calpont" > $pathSfn\/$sfn.log & - else - su - oracle -c "sqlplus /nolog @/home/qa/srv/common/script/callogin.sql $testDB $testDB xe srvqaperf2 <$sfn" |grep "^Calpont" > $1\/$sess\/$sfn.log & - fi - fi - pids[$sess]=$! - done -#check for back ground status here - waitForDone $1 -# - logTimes $1 0 stop - logEnvInfo $1 a -} -#========================================================================================= -# Execute a all test run -#========================================================================================= -function execAllTestRuns { -# $1 = Test run Number -# - cp $fileName $logFileNamePrefix - logTimes 0 0 start - for (( iter=1; iter<=$iterations; iter++ )) - do - echo iteration=$iter - mkdir $iter - execOneTestRun $iter - done - logTimes 0 0 stop -} -#========================================================================================= -# Main -#========================================================================================= -# -# Check file extension. Only .sql and .sh files are supported for now -# - execDir=$autopilotExecDir - rm -f /tmp/autopilotExecDir.txt - prepareTestDir - cp $execDir/testInfo.txt . - getTestInfo - fileExt=${fileName##*.} - logFileNamePrefix=`basename $fileName` -# - if [ $fileExt != "sql" ] && [ $fileExt != "sh" ]; then - echo Unsupported file. Only .sql and .sh files are currently supported. - exit - fi -# -# save the current directory so that we can change back to it after the test is done -# - echo filename=$fileName - logTestInfo - if [ "$testType" = "M" ]; then - breakScriptFile $sessions - fi - execAllTestRuns - echo testResultDir=$testRunID >>testInfo.txt - cp testInfo.txt $execDir - - diff --git a/utils/scenarios/common/sql/load_TestStats.sql b/utils/scenarios/common/sql/load_TestStats.sql deleted file mode 100644 index 018928ea5..000000000 --- a/utils/scenarios/common/sql/load_TestStats.sql +++ /dev/null @@ -1,13 +0,0 @@ -/***************************************************************************** -* Script Name: load_TestQuery.sql -* Date Created: 2008.08.25 -* Author: Joseph Williams -* Purpose: Script to load the performance test result from a CSV file. -******************************************************************************/ -LOAD DATA INFILE '/usr/local/mariadb/columnstore/data/bulk/data/import/TestStats.tbl' -INTO TABLE TestStats -FIELDS -TERMINATED BY '|' -ENCLOSED BY '"' -LINES TERMINATED BY '\n' -(IterNum,SessNum,SQLSeqNum,SQLIdxNum,MaxMemPct,NumTempFiles,TempFieSpace,PhyIO,CacheIO,BlocksTouched,CasPartBlks,MsgBytesIn,MsgBytesOut,QuerySetupTime); diff --git a/utils/scenarios/common/sql/load_TestSummary.sql b/utils/scenarios/common/sql/load_TestSummary.sql deleted file mode 100644 index 2a28a3a58..000000000 --- a/utils/scenarios/common/sql/load_TestSummary.sql +++ /dev/null @@ -1,14 +0,0 @@ -/***************************************************************************** -* Script Name: load_TestSumamry.sql -* Date Created: 2008.08.25 -* Author: Joseph Williams -* Purpose: Script to load the performance test result from a CSV file. -******************************************************************************/ -LOAD DATA INFILE '/usr/local/mariadb/columnstore/data/bulk/data/import/TestSummary.tbl' -INTO TABLE TestSummary -FIELDS -TERMINATED BY '|' -ENCLOSED BY '"' -LINES TERMINATED BY '\n' -(TestID,TestRunID,TestRunDesc,ExecServer,StackName,numDM,numUM,numPM,CalpontDB,ScriptFileName,NumIterations,NumSessions,DataVolume,IOType,NumStmts,NumStmtsProcessed,RunCompleted); - \ No newline at end of file diff --git a/utils/scenarios/common/sql/load_TestTime.sql b/utils/scenarios/common/sql/load_TestTime.sql deleted file mode 100644 index 6ea77ff7d..000000000 --- a/utils/scenarios/common/sql/load_TestTime.sql +++ /dev/null @@ -1,13 +0,0 @@ -/***************************************************************************** -* Script Name: load_TestTime.sql -* Date Created: 2008.08.25 -* Author: Joseph Williams -* Purpose: Script to load the performance test result from a CSV file. -******************************************************************************/ -LOAD DATA INFILE '/usr/local/mariadb/columnstore/data/bulk/data/import/TestTime.tbl' -INTO TABLE TestTime -FIELDS -TERMINATED BY '|' -ENCLOSED BY '"' -LINES TERMINATED BY '\n' -(IterNum,SessNum,SQLSeqNum,SQLIdxNum,StartTime,EndTime); diff --git a/utils/scenarios/common/sql/testResultTables/tblBulkStats.sql b/utils/scenarios/common/sql/testResultTables/tblBulkStats.sql deleted file mode 100644 index 93fd4d87b..000000000 --- a/utils/scenarios/common/sql/testResultTables/tblBulkStats.sql +++ /dev/null @@ -1,9 +0,0 @@ -Create table BulkStats ( -TestRunID bigint, -TableName varchar(25), -SourceFile varchar(25), -LoadTime int, -RowCntProcessed bigint, -RowCntInserted bigint, -RowCntDB bigint -); \ No newline at end of file diff --git a/utils/scenarios/common/sql/testResultTables/tblBulkSummary.sql b/utils/scenarios/common/sql/testResultTables/tblBulkSummary.sql deleted file mode 100644 index c1125fba9..000000000 --- a/utils/scenarios/common/sql/testResultTables/tblBulkSummary.sql +++ /dev/null @@ -1,18 +0,0 @@ -Create table BulkSummary ( -TestID int, -TestRunID bigint, -TestRunDesc varchar(255), -ExecServer varchar(15), -StackName varchar(15), -numDM tinyint, -numUM tinyint, -numPM tinyint, -CalpontDB varchar(15), -ScriptFileName varchar(255), -NumTables tinyint, -NumTablesLoaded tinyint, -RunCompleted char(1), -RowCntsMatched char(1), -StartTime datetime, -EndTime datetime -); \ No newline at end of file diff --git a/utils/scenarios/common/sql/testResultTables/tblTestGroup.sql b/utils/scenarios/common/sql/testResultTables/tblTestGroup.sql deleted file mode 100644 index b79e78410..000000000 --- a/utils/scenarios/common/sql/testResultTables/tblTestGroup.sql +++ /dev/null @@ -1,4 +0,0 @@ -Create table TestGroup ( -GroupID int, -GroupDesc varchar(50) -); diff --git a/utils/scenarios/common/sql/testResultTables/tblTestStats.sql b/utils/scenarios/common/sql/testResultTables/tblTestStats.sql deleted file mode 100644 index c3277ab78..000000000 --- a/utils/scenarios/common/sql/testResultTables/tblTestStats.sql +++ /dev/null @@ -1,17 +0,0 @@ -create table TestStats ( -TestRunID bigint, -IterNum int, -SessNum int, -SQLSeqNum int, -SQLIdxNum int, -MaxMemPct int, -NumTempFiles int, -TempFileSpace int, -PhyIO int, -CacheIO int, -BlocksTouched int, -CasPartBlks int, -MsgBytesIn int, -MsgBytesOut int, -QuerySetupTime decimal(18,6) -); diff --git a/utils/scenarios/common/sql/testResultTables/tblTestSummary.sql b/utils/scenarios/common/sql/testResultTables/tblTestSummary.sql deleted file mode 100644 index 21338c654..000000000 --- a/utils/scenarios/common/sql/testResultTables/tblTestSummary.sql +++ /dev/null @@ -1,21 +0,0 @@ -create table TestSummary ( -TestID int, -TestRunID bigint, -TestRunDesc varchar(255), -ExecServer varchar(15), -StackName varchar(15), -numDM tinyint, -numUM tinyint, -numPM tinyint -CalpontDB varchar(15), -Software varchar(20), -GroupID int, -ScriptFileName varchar(255), -NumIterations int, -NumSessions int, -IOType char(1), -NumStmts int, -NumStmtsProcessed int, -RunCompleted char(1) -); - diff --git a/utils/scenarios/common/sql/testResultTables/tblTestTime.sql b/utils/scenarios/common/sql/testResultTables/tblTestTime.sql deleted file mode 100644 index 1699be86a..000000000 --- a/utils/scenarios/common/sql/testResultTables/tblTestTime.sql +++ /dev/null @@ -1,9 +0,0 @@ -create table TestTime ( -TestRunID bigint, -IterNum int, -SessNum int, -SQLSeqNum int, -SQLIdxNum int, -StartTime datetime, -EndTime datetime -); diff --git a/utils/scenarios/common/sql/testResultTables/tblTestType.sql b/utils/scenarios/common/sql/testResultTables/tblTestType.sql deleted file mode 100644 index 68edf0103..000000000 --- a/utils/scenarios/common/sql/testResultTables/tblTestType.sql +++ /dev/null @@ -1,4 +0,0 @@ -Create table TestType ( -TestID int, -TestDesc varchar(50) -); diff --git a/utils/scenarios/dwweek/data/continue.txt b/utils/scenarios/dwweek/data/continue.txt deleted file mode 100644 index 573541ac9..000000000 --- a/utils/scenarios/dwweek/data/continue.txt +++ /dev/null @@ -1 +0,0 @@ -0 diff --git a/utils/scenarios/dwweek/test/dwControlBulk.sh b/utils/scenarios/dwweek/test/dwControlBulk.sh deleted file mode 100644 index bb5b5a90a..000000000 --- a/utils/scenarios/dwweek/test/dwControlBulk.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash -# -#$1 = dwweek -#$2 = start hour -#$3 = stop hour -#$4 = interval in minute -# - if [ $# -ne 4 ]; then - echo Syntax: dwControlBulk.sh testDB startHour stopHour intervalInMinutes - echo Exiting..... - exit 1 - fi -# - testDB=$1 - startHour=$2 - stopHour=$(($3 - 1)) - interval=$4 -# - jobNum=0 -# - keepGoing=1 - while [ $keepGoing -eq 1 ]; do - vTime=$(date "+%H:%M:%S %x") - vHour=${vTime:0:2} - if [ $vHour -ge $startHour ] && [ $vHour -le $stopHour ]; then - vMin=${vTime:3:2} - vHour=`expr $vHour + 0` - vMin=`expr $vMin + 0` - minutes=$((($vHour + 1) * 60 + $vMin - ($startHour + 1) * 60)) - remainder=`expr $minutes % $interval` - if [ $remainder -eq 0 ]; then - ((jobNum++)) - if [ $jobNum -gt 68 ]; then - jobNum=1 - fi - if [ $jobNum -lt 10 ]; then - dirName=${vTime:15:4}${vTime:9:2}${vTime:12:2}_0$jobNum - mkdir $dirName - cd $dirName - /root/genii/utils/scenarios/dwweek/test/dwSubmitCpimport.sh $testDB lineitem_0$jobNum.tbl - else - dirName=${vTime:15:4}${vTime:9:2}${vTime:12:2}_$jobNum - mkdir $dirName - cd $dirName - /root/genii/utils/scenarios/dwweek/test/dwSubmitCpimport.sh $testDB lineitem_$jobNum.tbl - fi - cd .. - timeToSleep=1 - else - timeToSleep=1 - fi - else - timeToSleep=5 - fi - sleep $timeToSleep - if [ -f /root/genii/utils/scenarios/dwweek/data/continue.txt ]; then - keepGoing=`cat /root/genii/utils/scenarios/dwweek/data/continue.txt` - fi - done - - - diff --git a/utils/scenarios/dwweek/test/dwControlGroup.sh b/utils/scenarios/dwweek/test/dwControlGroup.sh deleted file mode 100644 index 986d46942..000000000 --- a/utils/scenarios/dwweek/test/dwControlGroup.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# -#$1 = testDB -#$2 = start hour -#$3 = stop hour -#$4 = grpNum -#$5 = numConCur -#$6 = secsToPause -# - if [ $# -ne 6 ]; then - echo Syntax: dwControlGroup.sh testDB startHour stopHour groupNum NumConcurrentUsers secsToPause - echo Exiting..... - exit 1 - fi -# - testDB=$1 - startHour=$2 - stopHour=$(($3 - 1)) - grpNum=$4 - numConCur=$5 - secsToPause=$6 -# - jobNum=0 -# - keepGoing=1 - while [ $keepGoing -eq 1 ]; do - vTime=$(date "+%H:%M:%S %x") - vHour=${vTime:0:2} - if [ $vHour -ge $startHour ] && [ $vHour -le $stopHour ]; then - dirName=Group$grpNum - mkdir -p $dirName - cd $dirName - /root/genii/utils/scenarios/perf/test/pfSubmitGroupTest.sh $grpNum $testDB $numConCur 1 S 0 M - sleep $secsToPause - cd .. - else - sleep 60 - fi - if [ -f /root/genii/utils/scenarios/dwweek/data/continue.txt ]; then - keepGoing=`cat /root/genii/utils/scenarios/dwweek/data/continue.txt` - fi - done - - - diff --git a/utils/scenarios/dwweek/test/dwControlNightly.sh b/utils/scenarios/dwweek/test/dwControlNightly.sh deleted file mode 100644 index 748eb0f36..000000000 --- a/utils/scenarios/dwweek/test/dwControlNightly.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash -# -#$1 = testDB -#$2 = start hour -# - if [ $# -ne 2 ]; then - echo Syntax: dwControlNightly.sh testDB startHour - echo Exiting..... - exit 1 - fi -# - testDB=$1 - startHour=$2 -# - keepGoing=1 - nightlyDone=0 - while [ $keepGoing -eq 1 ]; do - vTime=$(date "+%H:%M:%S %x") - vHour=${vTime:0:2} - if [ $vHour -eq $startHour ]; then - if [ $nightlyDone -eq 0 ]; then - dirName=nightly - mkdir -p $dirName - cd $dirName -# -# group to run nightly stats, pre-DML - /root/genii/utils/scenarios/perf/test/pfSubmitGroupTest.sh 204 $testDB 1 1 S 0 M - rm -rf * -# group to delete and update lineitem rows - /root/genii/utils/scenarios/perf/test/pfSubmitGroupTest.sh 203 $testDB 1 1 S 0 M - rm -rf * -# group to run nightly stats, post-DML - /root/genii/utils/scenarios/perf/test/pfSubmitGroupTest.sh 204 $testDB 1 1 S 0 M - rm -rf * -# -# backup database -# /root/genii/utils/scenarios/dwweek/test/dwbackup.sh - cd .. - nightlyDone=1 - fi - else - nightlyDone=0 - fi - sleep 60 - if [ -f /root/genii/utils/scenarios/dwweek/data/continue.txt ]; then - keepGoing=`cat /root/genii/utils/scenarios/dwweek/data/continue.txt` - fi - done - - diff --git a/utils/scenarios/dwweek/test/dwControlReport_notused.sh b/utils/scenarios/dwweek/test/dwControlReport_notused.sh deleted file mode 100644 index b63ca69f4..000000000 --- a/utils/scenarios/dwweek/test/dwControlReport_notused.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -# -#$1 = testDB -#$2 = start hour -# - if [ $# -ne 2 ]; then - echo Syntax: dwControlReport.sh testDB startHour - echo Exiting..... - exit 1 - fi -# - testDB=$1 - startHour=$2 -# - keepGoing=1 - nightlyDone=0 - while [ $keepGoing -eq 1 ]; do - vTime=$(date "+%H:%M:%S %x") - vHour=${vTime:0:2} - if [ $vHour -eq $startHour ]; then - if [ $nightlyDone -eq 0 ]; then - dirName=nightly - mkdir -p $dirName - cd $dirName -# -# group to run nightly, lengthy reports - /root/genii/utils/scenarios/perf/test/pfSubmitGroupTest.sh 205 $testDB 1 1 S 0 M - rm -rf * - cd .. - nightlyDone=1 - fi - else - nightlyDone=0 - fi - sleep 60 - done - - diff --git a/utils/scenarios/dwweek/test/dwSubmitCpimport.sh b/utils/scenarios/dwweek/test/dwSubmitCpimport.sh deleted file mode 100644 index d697556e0..000000000 --- a/utils/scenarios/dwweek/test/dwSubmitCpimport.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/bash -#========================================================================================= -# -#$1 = testDB -#$2 = sourceFileName -# - if [ $# -lt 2 ]; then - echo Syntax dwSubmitCpimport.sh dbName sourceFileName - echo Exiting..... - exit 1 - fi -# - testDB=$1 - sourceFileName=$2 -# - testID=2 -#--------------------------------------------------------------------------- -# Create a cpimport script, which will be executed by a PM remotely. -#--------------------------------------------------------------------------- - bulkScriptName="bulkScript.sh" -# - echo \#!/bin/bash > $bulkScriptName - echo \# >> $bulkScriptName -# - echo rm -f /usr/local/mariadb/columnstore/data/bulk/log/Jobxml_9999.log >> $bulkScriptName - echo rm -f /usr/local/mariadb/columnstore/data/bulk/log/Job_9999.log >> $bulkScriptName - echo /usr/local/mariadb/columnstore/bin/colxml $testDB -t lineitem -l $sourceFileName -j 9999 >> $bulkScriptName - echo /usr/local/mariadb/columnstore/bin/cpimport -j 9999 >> $bulkScriptName - echo cp /usr/local/mariadb/columnstore/data/bulk/job/Job_9999.xml . >> $bulkScriptName - echo cp /usr/local/mariadb/columnstore/data/bulk/log/Jobxml_9999.log . >> $bulkScriptName - echo cp /usr/local/mariadb/columnstore/data/bulk/log/Job_9999.log . >> $bulkScriptName - chmod 777 $bulkScriptName -# -#append current directory path to to script file name - scriptFileName=`pwd`\/$bulkScriptName -# - autopilotExecDir=`pwd` - export autopilotExecDir -# - echo testID=$testID >testInfo.txt - echo testDB=$testDB >>testInfo.txt - echo testType=NA >>testInfo.txt - echo scriptName=$scriptFileName >>testInfo.txt - echo sessions=1 >>testInfo.txt - echo iterations=1 >>testInfo.txt - /root/genii/utils/scenarios/common/sh/testExecEngine.sh > testExec.log - testRunID=`cat testInfo.txt |grep testResultDir |awk -F"=" '{print $2}'` - /root/genii/utils/scenarios/common/sh/collExecResult.sh $testRunID >collExecResult.log - /root/genii/utils/scenarios/common/sh/insertExecResult.sh $testRunID >bulkExecResult.log diff --git a/utils/scenarios/dwweek/test/dwbackup.sh b/utils/scenarios/dwweek/test/dwbackup.sh deleted file mode 100644 index 332392ab5..000000000 --- a/utils/scenarios/dwweek/test/dwbackup.sh +++ /dev/null @@ -1,52 +0,0 @@ -#!/bin/bash -# - numRoots=4 -# - for ((idx=1; $idx<=$numRoots; idx++)); do - rm -rf /usr/local/mariadb/columnstore/data$idx/dwbackup/2 & - pids[$idx]=$! - done -# - keepChecking=1 - while [ $keepChecking -eq 1 ]; do - keepChecking=0 - for ((idx=1; $idx<=$numRoots; idx++)); do - if [ ${pids[idx]} -ne 0 ] - then - lines=`ps -p ${pids[idx]} |wc -l` - if [ $lines -eq 1 ] - then - pids[$idx]=0 - else - keepChecking=1 - fi - fi - done - sleep 5 - done -# - for ((idx=1; $idx<=$numRoots; idx++)); do - mv /usr/local/mariadb/columnstore/data$idx/dwbackup/1 /usr/local/mariadb/columnstore/data$idx/dwbackup/2 - mkdir -p /usr/local/mariadb/columnstore/data$idx/dwbackup/1 - cp -r /usr/local/mariadb/columnstore/data$idx/000.dir /usr/local/mariadb/columnstore/data$idx/dwbackup/1 & - pids[$idx]=$! - done -# - keepChecking=1 - while [ $keepChecking -eq 1 ]; do - keepChecking=0 - for ((idx=1; $idx<=$numRoots; idx++)); do - if [ ${pids[idx]} -ne 0 ] - then - lines=`ps -p ${pids[idx]} |wc -l` - if [ $lines -eq 1 ] - then - pids[$idx]=0 - else - keepChecking=1 - fi - fi - done - sleep 5 - done - cp -r /mnt/OAM/dbrm /usr/local/mariadb/columnstore/data1/dwbackup/1/. diff --git a/utils/scenarios/dwweek/testcase/dwDMrun.sh b/utils/scenarios/dwweek/testcase/dwDMrun.sh deleted file mode 100644 index df32dfd6b..000000000 --- a/utils/scenarios/dwweek/testcase/dwDMrun.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# -# test #1 -# run user queries from 7:00am to 6:00pm -# - /root/genii/utils/scenarios/dwweek/test/dwControlGroup.sh dwweek 7 18 200 3 0 & - sleep 5 - /root/genii/utils/scenarios/dwweek/test/dwControlGroup.sh dwweek 7 18 201 3 15 & - sleep 5 - /root/genii/utils/scenarios/dwweek/test/dwControlGroup.sh dwweek 7 18 202 4 30 & -# -# run user query group #3 from 6:00pm to midnight -# Each run should take over one hour so we will stop initiating jobs after 10:00pm -# Effectively, jobs will finished sometime after 11:00pm -# - /root/genii/utils/scenarios/dwweek/test/dwControlGroup.sh dwweek 18 22 3 2 0 & -# -# Nightly delete, update, and backup -midnight to 7:00am -# - /root/genii/utils/scenarios/dwweek/test/dwControlNightly.sh dwweek 0 & diff --git a/utils/scenarios/dwweek/testcase/dwPMrun.sh b/utils/scenarios/dwweek/testcase/dwPMrun.sh deleted file mode 100644 index 873ed75a8..000000000 --- a/utils/scenarios/dwweek/testcase/dwPMrun.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -# - /root/genii/utils/scenarios/dwweek/test/dwControlBulk.sh dwweek 7 24 15 & - diff --git a/utils/scenarios/dwweek/testcase/readme.txt b/utils/scenarios/dwweek/testcase/readme.txt deleted file mode 100644 index 3f40253bb..000000000 --- a/utils/scenarios/dwweek/testcase/readme.txt +++ /dev/null @@ -1,11 +0,0 @@ -The dwDMrun.sh script will start serveral back ground. They will start and stop queries jobs at -designated times. This script must be executed on the DM or UM. - -The dwPMrun.sh script will start cpimport jobs at designated time. It must be executed on -the active PM. - -To prevent the next job from starting (to cancel test), echo a 0 to the follow file -/home/qa/srv/autopilot/dwweek/data/continue.txt - -echo 0 > /home/qa/srv/autopilot/dwweek/data/continue.txt - diff --git a/utils/scenarios/perf/data/perfGrpQueryList.txt b/utils/scenarios/perf/data/perfGrpQueryList.txt deleted file mode 100644 index dc7de7178..000000000 --- a/utils/scenarios/perf/data/perfGrpQueryList.txt +++ /dev/null @@ -1,310 +0,0 @@ -# GroupID SequenceNum FileName -# -# Group query #1 test -# -#1# 1 q2.3.1.1.1.sql -#1# 2 q2.3.1.1.2.sql -# -# Group query #2 test -# -#2# 1 q4.2.sql -# -# Group query #3 test -# -#3# 1 q4.3.sql -#3# 2 q4.4.sql -#3# 3 q4.6.1.sql -#3# 4 q4.6.2.sql -#3# 5 q4.6.sql -# -# Group query #4 test -# -#4# 1 q2.3.4.1.sql -#4# 2 q2.3.4.2.sql -#4# 3 q2.3.4.3.sql -#4# 4 q2.3.4.4.sql -#4# 5 q4.8.1.sql -#4# 6 q4.8.2.sql -#4# 7 q4.8.3.sql -#4# 8 q4.8.4.sql -#4# 9 q4.8.5.sql -#4# 10 q4.8.6.sql -#4# 11 q4.8.7.sql -#4# 12 q4.8.8.sql -#4# 13 q4.8.9.sql -#4# 14 q5.3.1.sql -# -# Group query #5 test -# -#5# 1 q2.3.2.1.1.sql -#5# 2 q2.3.2.1.2.sql -#5# 3 q2.3.2.1.3.sql -#5# 4 q2.3.2.1.4.sql -#5# 5 q2.3.6.sql -#5# 6 q5.1.1.sql -#5# 7 q5.1.2.sql -#5# 8 q5.1.3.sql -#5# 9 q5.2.1.sql -#5# 10 q5.2.2.sql -#5# 11 q5.2.3.sql -#5# 12 q5.2.4.sql -#5# 13 q5.2.5.sql -#5# 14 q5.4.1.sql -#5# 15 q5.4.2.sql -#5# 16 q5.4.3.sql -# -# Temp test groups -# -#6# 1 lineitemCPTest.sql -# -# Temp test groups -# -#7# 1 nation.sql -#7# 1 q1.sql -#7# 2 q2.sql -#7# 3 q3.sql -#7# 4 q4.sql -# -# PM scan test -# -#101# 1 pmscan.1.sql -#101# 2 pmscan.2.sql -#101# 3 pmscan.3.sql -#101# 4 pmscan.4.sql -# -# PM Join -# -#102# 1 pmmj.2tbls.1.sql -#102# 2 pmmj.2tbls.2.sql -#102# 3 pmmj.3tbls.1.sql -#102# 4 pmmj.3tbls.2.sql -# -# Aggregation functions -# -#103# 1 aggr.1tbl.avg.sql -#103# 2 aggr.1tbl.count.num.col.sql -#103# 3 aggr.1tbl.count.str.col.sql -#103# 4 aggr.1tbl.count.table.sql -#103# 5 aggr.1tbl.max.num.sql -#103# 6 aggr.1tbl.max.str.sql -#103# 7 aggr.1tbl.min.num.sql -#103# 8 aggr.1tbl.min.str.sql -#103# 9 aggr.1tbl.sum.sql -#103# 10 aggr.2tbls.avg.sql -#103# 11 aggr.2tbls.count.num.col.1.sql -#103# 12 aggr.2tbls.count.num.col.2.sql -#103# 13 aggr.2tbls.count.str.col.sql -#103# 14 aggr.2tbls.count.table.sql -#103# 15 aggr.2tbls.max.num.sql -#103# 16 aggr.2tbls.max.str.sql -#103# 17 aggr.2tbls.min.num.sql -#103# 18 aggr.2tbls.min.str.sql -#103# 19 aggr.2tbls.sum.sql -# -# Aggregation join -# -#104# 1 aj.pmmj.1.sql -#104# 2 aj.pmmj.2.sql -#104# 3 aj.ummj.1.sql -#104# 4 aj.ummj.2.sql -# -# UM Join -# -#105# 1 ummj.2tbls.1.sql -#105# 2 ummj.2tbls.2.sql -#105# 3 ummj.3tbls.1.sql -#105# 4 ummj.3tbls.2.sql -# -# DM post process for group by and order by clauses -# -#106# 1 dmpp.1tbl.groupby.1.sql -#106# 2 dmpp.1tbl.groupby.2.sql -#106# 3 dmpp.1tbl.groupby.orderby.num.1.sql -#106# 4 dmpp.1tbl.groupby.orderby.num.2.sql -#106# 5 dmpp.1tbl.groupby.orderby.num.str.sql -#106# 6 dmpp.1tbl.groupby.orderby.str.sql -#106# 7 dmpp.1tbl.orderby.1.sql -#106# 8 dmpp.1tbl.orderby.2.sql -#106# 9 dmpp.2tbls.groupby.1.sql -#106# 10 dmpp.2tbls.groupby.2.sql -#106# 11 dmpp.2tbls.groupby.orderby.num.1.sql -#106# 12 dmpp.2tbls.groupby.orderby.num.2.sql -#106# 13 dmpp.2tbls.groupby.orderby.str.sql -#106# 14 dmpp.2tbls.orderby.1.sql -#106# 15 dmpp.2tbls.orderby.2.sql -# -# -# -# Bench_HJ_Orders_Line.sql -# -#107# 1 q107.1.sql -#107# 2 q107.2.sql -#107# 3 q107.3.sql -#107# 4 q107.4.sql -#107# 5 q107.5.sql -#107# 6 q107.6.sql -#107# 7 q107.7.sql -#107# 8 q107.8.sql -#107# 9 q107.9.sql -#107# 10 q107.10.sql -#107# 11 q107.11.sql -#107# 12 q107.12.sql -#107# 13 q107.13.sql -#107# 14 q107.14.sql -#107# 15 q107.15.sql -#107# 16 q107.16.sql -# -# -# TPCH06_Modified.sql -# -#108# 2 q108.2.sql -#108# 3 q108.3.sql -# -# -# Bench_HJ_Part_PartSupp.sql -# -#109# 1 q109.1.sql -#109# 2 q109.2.sql -#109# 3 q109.3.sql -#109# 4 q109.4.sql -#109# 5 q109.5.sql -#109# 6 q109.6.sql -#109# 7 q109.7.sql -#109# 8 q109.8.sql -#109# 9 q109.9.sql -#109# 10 q109.10.sql -#109# 11 q109.11.sql -#109# 12 q109.12.sql -#109# 13 q109.13.sql -# -# -# Bench_HJ_Cust_Orders.sql -# -#110# 1 q110.1.sql -#110# 2 q110.2.sql -#110# 3 q110.3.sql -#110# 4 q110.4.sql -#110# 5 q110.5.sql -#110# 6 q110.6.sql -#110# 7 q110.7.sql -#110# 8 q110.8.sql -#110# 9 q110.9.sql -#110# 10 q110.10.sql -#110# 11 q110.11.sql -#110# 12 q110.12.sql -#110# 13 q110.13.sql -#110# 14 q110.14.sql -#110# 15 q110.15.sql -#110# 16 q110.16.sql -# -# -# Bench_Scan_Aggregate_7x.sql -# -# -#111# 1 q111.1.sql -#111# 2 q111.2.sql -#111# 3 q111.3.sql -#111# 4 q111.4.sql -#111# 5 q111.5.sql -#111# 6 q111.6.sql -#111# 7 q111.7.sql -#111# 8 q111.8.sql -#111# 9 q111.9.sql -#111# 10 q111.10.sql -#111# 11 q111.11.sql -#111# 12 q111.12.sql -#111# 13 q111.13.sql -#111# 14 q111.14.sql -#111# 15 q111.15.sql -#111# 16 q111.16.sql -#211# 17 q111.17.sql -#211# 18 q111.18.sql -#111# 19 q111.19.sql -#111# 20 q111.20.sql -#211# 21 q111.21.sql -#211# 22 q111.22.sql -# -# -# Bench_DBT3_Aggregation.sql -# -# -#112# 1 q112.1.sql -#112# 2 q112.2.sql -#112# 3 q112.3.sql -#112# 4 q112.4.sql -#112# 5 q112.5.sql -#112# 6 q112.6.sql -# -# -# dwweek - 4 concurrent users -#200# 1 week_scenario_jt_1.sql -#200# 2 week_scenario_jt_2.sql -#200# 3 week_scenario_jt_3.sql -#200# 4 week_scenario_jt_4.sql -#200# 5 week_scenario_jt_1.1.sql -#200# 6 week_scenario_jt_2.1.sql -#200# 7 week_scenario_jt_3.1.sql -#200# 8 week_scenario_jt_4.1.sql -# -# dwweek - 3 concurrent users with 15-second pause -#201# 1 week_scenario_jt_5_1.sql -#201# 2 week_scenario_jt_5_2.sql -#201# 3 week_scenario_jt_5_3.sql -#201# 4 week_scenario_jt_7.sql -#201# 5 week_scenario_jt_5_1.1.sql -#201# 6 week_scenario_jt_5_2.1.sql -#201# 7 week_scenario_jt_5_3.1.sql -#201# 8 week_scenario_jt_7.1.sql -# -# dwweek - 3 concurrent users with 30-second pause -#202# 1 q2.3.4.1.sql -#202# 2 q2.3.4.2.sql -#202# 3 q2.3.4.3.sql -#202# 4 q2.3.4.4.sql -#202# 5 q4.8.1.sql -#202# 6 q4.8.2.sql -#202# 7 q4.8.3.sql -#202# 8 q4.8.4.sql -#202# 9 q4.8.5.sql -#202# 10 q4.8.6.sql -#202# 11 q4.8.7.sql -#202# 12 q4.8.8.sql -#202# 13 q4.8.9.sql -#202# 14 q5.3.1.sql -# -# dwweek - delete and update -# -#203# 1 delete.mod.sql -#203# 2 update.sql -# -# dwweek - nightly stats -# -#204# 1 lineitemstats.sql -# -# dwweek - nightly lengthy reports -# -# -# dwweek - 4 concurrent users -#300# 1 week_scenario_jt_1.sql -# -# dwweek - 4 concurrent users -#301# 1 week_scenario_jt_1.sql -#301# 2 week_scenario_jt_2.sql -# -# dwweek - 4 concurrent users -#302# 1 week_scenario_jt_1.sql -#302# 2 week_scenario_jt_2.sql -#302# 3 week_scenario_jt_3.sql -# -# dwweek - 4 concurrent users -#303# 1 week_scenario_jt_1.sql -#303# 2 week_scenario_jt_2.sql -#303# 3 week_scenario_jt_3.sql -#303# 4 week_scenario_jt_4.sql - - - - - - - diff --git a/utils/scenarios/perf/setup/SQLTestSummary.sql b/utils/scenarios/perf/setup/SQLTestSummary.sql deleted file mode 100644 index fff8be82d..000000000 --- a/utils/scenarios/perf/setup/SQLTestSummary.sql +++ /dev/null @@ -1,65 +0,0 @@ -create table SQLTestSummary ( -TestID int, -TestRunID char(14), -TestRunDesc varchar(255), -ExecServer varchar(15), -StackName varchar(15), -numDM tinyint, -numUM tinyint, -numPM tinyint, -CalpontDB varchar(15), -ScriptFileName varchar(255), -NumIterations tinyint, -NumSessions tinyint, -NumSQLStmts tinyint, -DataVolume char(1), -IOType char(1), -NumStmts int, -NumStmtsProcessed int, -RunCompleted char(1) -)engine=infinidb; - - -create table SQLTestTime ( -TestRunID char(14), -IterNum tinyint, -SessNum tinyint, -SQLSeqNum tinyint, -StartTime datetime, -EndTime datetime -)engine=infinidb; - - - -create table BulkTestSummary ( -TestID int, -TestRunID char(14), -TestRunDesc varchar(255), -ExecServer varchar(15), -StackName varchar(15), -numDM tinyint, -numUM tinyint, -numPM tinyint, -CalpontDB varchar(15), -ScriptFileName varchar(255), -NumTables tinyint, -NumTablesLoaded tinyint, -RunCompleted char(1) -)engine=infinidb; - - -create table BulkTestStats ( -TestRunID char(14), -TableName varchar(25), -SourceFile varchar(25), -LoadTime int, -RowsProcessed bigint, -RowsInserted bigint -)engine=infinidb; - - - - - - - diff --git a/utils/scenarios/perf/sh/pfCopyResults.sh b/utils/scenarios/perf/sh/pfCopyResults.sh deleted file mode 100755 index e1e68453e..000000000 --- a/utils/scenarios/perf/sh/pfCopyResults.sh +++ /dev/null @@ -1,6 +0,0 @@ -#! /bin/sh -# -#$1 = testRunID -# -host=`hostname -s` -smbclient //calweb/perf -Wcalpont -Uoamuser%Calpont1 -D "/results/${host}" -c "lcd /root/genii/testResult;recurse;prompt OFF;mput $1" diff --git a/utils/scenarios/perf/sh/pfExeSQLscript.sh b/utils/scenarios/perf/sh/pfExeSQLscript.sh deleted file mode 100755 index 8b8064a51..000000000 --- a/utils/scenarios/perf/sh/pfExeSQLscript.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -# -# This script does the following: -# 1) Executes supplied SQL script on test database and captures output to file -# -#$1 = Test database name -#$2 = SQL script to execute - if [ $# -lt 2 ] - then - echo***** Syntax: pfExeSQLScript.sh testDBName scriptFileName - exit 1 - fi -# - logFileName=`basename $2` -# -# Execute script on reference database -# -# mysql $3 -h$2 -u$4 -p$5 <$6 > $logFileName.test.log -# -# Execute script on test database -# - /usr/local/mariadb/columnstore/mysql/bin/mysql --defaults-extra-file=/usr/local/mariadb/columnstore/mysql/my.cnf -u root $1 <$2 > $logFileName.test.log - exit 0 diff --git a/utils/scenarios/perf/sh/pfExeShellScript.sh b/utils/scenarios/perf/sh/pfExeShellScript.sh deleted file mode 100755 index 80b2bdca1..000000000 --- a/utils/scenarios/perf/sh/pfExeShellScript.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -# -# This script does the following: diff --git a/utils/scenarios/perf/sh/pfGetGroupQueries.sh b/utils/scenarios/perf/sh/pfGetGroupQueries.sh deleted file mode 100755 index 07ad51a70..000000000 --- a/utils/scenarios/perf/sh/pfGetGroupQueries.sh +++ /dev/null @@ -1,91 +0,0 @@ -#!/bin/bash -# -function getGroupQueries { - gid=$1 - tt=$2 - dt=$3 -# - groupNum=$gid - idx=`cat idx.txt` - cat /root/genii/utils/scenarios/perf/data/perfGrpQueryList.txt |grep "#$gid#" | - while read gid queryNum fileName; do - ((idx++)) - if [ $tt = "D" ] || [ $tt = "C" ]; then - echo select \'CalpontFlush \( $idx $queryNum \)\', calflushcache\(\)\; >> $scriptFileName - fi -# - if [ $dt = "M" ]; then - echo select \'CalpontStart \( $idx $queryNum \)\', now\(\)\; >> $scriptFileName - else - echo execute calpont.caltraceon\(9\)\; >> $scriptFileName - echo select \'CalpontStart \( $idx $queryNum \)\', sysdate from dual\; >> $scriptFileName - fi -# - cat /root/genii/utils/scenarios/perf/sql/$groupNum/$fileName >>$scriptFileName -# - if [ $dt = "M" ]; then - echo select \'CalpontEnd \( $idx $queryNum \)\', now\(\)\; >> $scriptFileName - echo select \'CalpontStats \( $idx $queryNum \)\', calgetstats\(\)\; >> $scriptFileName - else - echo select \'CalpontEnd \( $idx $queryNum \)\', sysdate from dual\; >> $scriptFileName - echo select \'CalpontStats \( $idx $queryNum \)\', calpont.getstats\(\) from dual\; >> $scriptFileName - fi -# - if [ $tt = "C" ]; then - ((idx++)) - if [ $dt = "M" ]; then - echo select \'CalpontStart \( $idx $queryNum \)\', now\(\)\; >> $scriptFileName - else - echo select \'CalpontStart \( $idx $queryNum \)\', sysdate from dual\; >> $scriptFileName - fi - cat /root/genii/utils/scenarios/perf/sql/$groupNum/$fileName >>$scriptFileName - if [ $dt = "M" ]; then - echo select \'CalpontEnd \( $idx $queryNum \)\', now\(\)\; >> $scriptFileName - echo select \'CalpontStats \( $idx $queryNum \)\', calgetstats\(\)\; >> $scriptFileName - else - echo select \'CalpontEnd \( $idx $queryNum \)\', sysdate from dual\; >> $scriptFileName - echo select \'CalpontStats \( $idx $queryNum \)\', calpont.getstats\(\) from dual\; >> $scriptFileName - fi - fi - if [ $tt = "M" ]; then - echo ^ >> $scriptFileName - fi - echo $idx > idx.txt - done -} - if [ $# -lt 3 ]; then - echo ***** pfGetGroupQueries.sh queryGroupNumber testType dbmsType - echo testType=S Stream run. No primproc disk cache flush - echo testType=D Disk run. Flush cache before executing each query - echo testType=C Cache run. Flush cache before 1st execution. No flush before 2nd execution. - echo testType=M Stream run. All queries from groups 1 to 5 - exit 1 - fi -# - grpID=$1 - testType=$2 - dbmsType=$3 -# - if [ $testType = M ]; then - scriptFileName=GroupQueryMixed.sql - else - scriptFileName=GroupQuery$grpID.sql - fi - rm -rf $scriptFileName -# - groupNum=$groupNum - echo $idx >idx.txt -# -# - case "$testType" in - S|D|C) - getGroupQueries $grpID $testType $dbmsType - ;; - M) - for (( g=1; g<=5; g++)); do - getGroupQueries $g $testType $dbmsType - done - ;; - esac - rm -f idx.txt - exit 0 diff --git a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.3.1.sql b/utils/scenarios/perf/source/iteration17queries/Deferred/q4.3.1.sql deleted file mode 100755 index 0d1a979dc..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.3.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_orderdate, o_custkey from orders where o_orderkey in (select l_orderkey from lineitem where l_partkey < 100000); diff --git a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.3.2.sql b/utils/scenarios/perf/source/iteration17queries/Deferred/q4.3.2.sql deleted file mode 100755 index 089072a98..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.3.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_orderdate, o_custkey from orders where exists (select * from lineitem where l_partkey < 100000 and l_orderkey = o_orderkey); diff --git a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.3.3.sql b/utils/scenarios/perf/source/iteration17queries/Deferred/q4.3.3.sql deleted file mode 100755 index 8c74c7d4f..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.3.3.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_orderdate, o_custkey from orders where o_orderkey not in (select l_orderkey from lineitem where l_partkey < 100000); diff --git a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.3.4.sql b/utils/scenarios/perf/source/iteration17queries/Deferred/q4.3.4.sql deleted file mode 100755 index 97b61b944..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.3.4.sql +++ /dev/null @@ -1 +0,0 @@ -select o_orderdate, o_custkey from orders where not exists (select * from lineitem where l_partkey < 100000 and l_orderkey = o_orderkey); diff --git a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.4.1.sql b/utils/scenarios/perf/source/iteration17queries/Deferred/q4.4.1.sql deleted file mode 100755 index 136ea3b11..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.4.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_orderdate, o_custkey from orders where o_custkey < 1000 and o_orderkey in (select l_orderkey from lineitem where l_partkey < 100000); diff --git a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.4.2.sql b/utils/scenarios/perf/source/iteration17queries/Deferred/q4.4.2.sql deleted file mode 100755 index b00debd52..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.4.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_orderdate, o_custkey from orders where o_custkey < 1000 and exists (select * from lineitem where l_partkey < 100000 and l_orderkey = o_orderkey); diff --git a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.4.3.sql b/utils/scenarios/perf/source/iteration17queries/Deferred/q4.4.3.sql deleted file mode 100755 index ddedf6a23..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.4.3.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_orderdate, o_custkey from orders where o_custkey < 1000 and o_orderkey not in (select l_orderkey from lineitem where l_partkey < 100000); diff --git a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.4.4.sql b/utils/scenarios/perf/source/iteration17queries/Deferred/q4.4.4.sql deleted file mode 100755 index aaba3c7d8..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.4.4.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_orderdate, o_custkey from orders where o_custkey < 1000 and not exists (select * from lineitem where l_partkey < 100000 and l_orderkey = o_orderkey); diff --git a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.5.sql b/utils/scenarios/perf/source/iteration17queries/Deferred/q4.5.sql deleted file mode 100755 index a04d8ee13..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.5.sql +++ /dev/null @@ -1 +0,0 @@ -Select orders.* from lineitem, orders where o_custkey < 100000 and l_partkey < 10000 and l_orderkey = o_orderkey; diff --git a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.7.1.sql b/utils/scenarios/perf/source/iteration17queries/Deferred/q4.7.1.sql deleted file mode 100755 index cd9e8461c..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.7.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_receiptdate - l_shipdate from lineitem where l_orderkey < 1000000 and l_commitdate < l_receiptdate; diff --git a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.7.2.sql b/utils/scenarios/perf/source/iteration17queries/Deferred/q4.7.2.sql deleted file mode 100755 index f431f69a4..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Deferred/q4.7.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select .x. from lineitem where l_orderkey < 1000000; diff --git a/utils/scenarios/perf/source/iteration17queries/Deferred/q5.1.1.original.sql b/utils/scenarios/perf/source/iteration17queries/Deferred/q5.1.1.original.sql deleted file mode 100755 index 754ad010b..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Deferred/q5.1.1.original.sql +++ /dev/null @@ -1,11 +0,0 @@ -select p_brand, sum(l_quantity) tot_qty, - avg(l_quantity) avg_qty, - sum(l_extendedprice * (1- l_discount )) tot_price, - avg(l_extendedprice * (1- l_discount )) avg_price, count(*) -from lineitem, part -where l_shipdate between '1996-04-01' and '1996-04-14' -and l_partkey = p_partkey -and p_size = 5 -group by rollup( p_brand) -order by 1; - diff --git a/utils/scenarios/perf/source/iteration17queries/Deferred/q5.3.1.original.sql b/utils/scenarios/perf/source/iteration17queries/Deferred/q5.3.1.original.sql deleted file mode 100755 index 4f811349d..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Deferred/q5.3.1.original.sql +++ /dev/null @@ -1,20 +0,0 @@ -select revenue_month, - decode(district,32768,' n/a',null,' ','District ' || district) district, - decode( trunc(max(latest_date),'MM') + interval '1' month - interval '1' day, - max(latest_date),null, 'Thru-' || to_char(max(latest_date),'mm-dd-yyyy')) latest_date, - sales_items, - total_revenue, - max(latest_date) -120 max_date, - decode(sign(decode(district,32768,5000000000,null,5000000000,1600000000)-total_revenue),1,'','Revenue Exceeds Threshold') Trend_Alert -from ( -select to_char(l_shipdate,'YYYY-MM') Revenue_Month, - l_district district, - max(l_shipdate) Latest_date, - sum(l_extendedprice) Total_Revenue, count(*) Sales_items -from v_load_lines -where l_shipdate >= trunc(to_date('&max_date'),'MM') -group by to_char(l_shipdate,'YYYY-MM'), l_district -ORDER BY 1,2) -group by revenue_month, district, total_revenue, sales_items -order by 1,2; - diff --git a/utils/scenarios/perf/source/iteration17queries/Deferred/q5.4.1.original.sql b/utils/scenarios/perf/source/iteration17queries/Deferred/q5.4.1.original.sql deleted file mode 100755 index 4ff8ab21e..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Deferred/q5.4.1.original.sql +++ /dev/null @@ -1,9 +0,0 @@ -select to_char(l_shipdate,'yyyy-mm'), sum(l_extendedprice), avg(p_retailprice) -from lineitem, part -where l_shipdate between '1993-01-01' and '1994-06-30' -and l_partkey = p_partkey -and p_retailprice >= 2095 -and p_size <= 5 -group by rollup( to_char(l_shipdate,'yyyy-mm')) -order by 1,2; - diff --git a/utils/scenarios/perf/source/iteration17queries/Group1/grp1_1_test.sql b/utils/scenarios/perf/source/iteration17queries/Group1/grp1_1_test.sql deleted file mode 100644 index 5ef6c492b..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group1/grp1_1_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select l_shipdate, l_orderkey, l_partkey from lineitem where l_orderkey < 1000000 order by 1,2,3; -select calgetstats(); -select now(); -Select l_shipdate, l_orderkey, l_partkey from lineitem where l_orderkey < 1000000 order by 1,2,3; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group1/grp1_2_test.sql b/utils/scenarios/perf/source/iteration17queries/Group1/grp1_2_test.sql deleted file mode 100644 index c7a9f4e55..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group1/grp1_2_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select * from lineitem where l_orderkey < 1000000 order by l_orderkey, l_linenumber; -select calgetstats(); -select now(); -Select * from lineitem where l_orderkey < 1000000 order by l_orderkey, l_linenumber; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group1/q2.3.1.1.1.sql b/utils/scenarios/perf/source/iteration17queries/Group1/q2.3.1.1.1.sql deleted file mode 100755 index 5ae6c28da..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group1/q2.3.1.1.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_shipdate, l_orderkey, l_partkey from lineitem where l_orderkey < 1000000 order by 1, 2, 3; diff --git a/utils/scenarios/perf/source/iteration17queries/Group1/q2.3.1.1.2.sql b/utils/scenarios/perf/source/iteration17queries/Group1/q2.3.1.1.2.sql deleted file mode 100755 index 9d4e0343c..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group1/q2.3.1.1.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select * from lineitem where l_orderkey < 1000000 order by l_orderkey, l_linenumber; diff --git a/utils/scenarios/perf/source/iteration17queries/Group2/grp2_1_test.sql b/utils/scenarios/perf/source/iteration17queries/Group2/grp2_1_test.sql deleted file mode 100644 index 77bd499b2..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group2/grp2_1_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -select l_shipdate, l_partkey from lineitem where l_orderkey < 1000000 order by 1, 2; -select calgetstats(); -select now(); -select l_shipdate, l_partkey from lineitem where l_orderkey < 1000000 order by 1, 2; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group2/q4.2.sql b/utils/scenarios/perf/source/iteration17queries/Group2/q4.2.sql deleted file mode 100755 index 935a0eb28..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group2/q4.2.sql +++ /dev/null @@ -1 +0,0 @@ -select l_shipdate, l_partkey from lineitem where l_orderkey < 1000000 order by 1, 2; diff --git a/utils/scenarios/perf/source/iteration17queries/Group3/grp3_1_test.sql b/utils/scenarios/perf/source/iteration17queries/Group3/grp3_1_test.sql deleted file mode 100644 index c9cb778e6..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group3/grp3_1_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select o_orderdate, o_custkey from lineitem, orders where l_partkey < 100000 and l_orderkey = o_orderkey order by 1, 2; -select calgetstats(); -select now(); -Select o_orderdate, o_custkey from lineitem, orders where l_partkey < 100000 and l_orderkey = o_orderkey order by 1, 2; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group3/grp3_2_test.sql b/utils/scenarios/perf/source/iteration17queries/Group3/grp3_2_test.sql deleted file mode 100644 index bfcc02043..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group3/grp3_2_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select o_orderdate, o_custkey from lineitem, orders where o_custkey < 1000 and l_orderkey = o_orderkey order by 1, 2; -select calgetstats(); -select now(); -Select o_orderdate, o_custkey from lineitem, orders where o_custkey < 1000 and l_orderkey = o_orderkey order by 1, 2; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group3/grp3_3_test.sql b/utils/scenarios/perf/source/iteration17queries/Group3/grp3_3_test.sql deleted file mode 100644 index 0ea812c78..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group3/grp3_3_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -select c_custkey, o_orderkey from customer left outer join orders on c_custkey = o_custkey where c_custkey < 10000 and c_nationkey = 4 order by 1, 2; -select calgetstats(); -select now(); -select c_custkey, o_orderkey from customer left outer join orders on c_custkey = o_custkey where c_custkey < 10000 and c_nationkey = 4 order by 1, 2; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group3/grp3_4_test.sql b/utils/scenarios/perf/source/iteration17queries/Group3/grp3_4_test.sql deleted file mode 100644 index adc7cca0a..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group3/grp3_4_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -select c_custkey, o_orderkey from orders right outer join customer on c_custkey = o_custkey where c_custkey < 10000 and c_nationkey = 4 order by 1, 2; -select calgetstats(); -select now(); -select c_custkey, o_orderkey from orders right outer join customer on c_custkey = o_custkey where c_custkey < 10000 and c_nationkey = 4 order by 1, 2; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group3/grp3_5_test.sql b/utils/scenarios/perf/source/iteration17queries/Group3/grp3_5_test.sql deleted file mode 100644 index ff50e9f6a..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group3/grp3_5_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select * from lineitem, orders where o_custkey < 100000 and l_partkey < 10000 and l_orderkey = o_orderkey order by l_orderkey, l_linenumber; -select calgetstats(); -select now(); -Select * from lineitem, orders where o_custkey < 100000 and l_partkey < 10000 and l_orderkey = o_orderkey order by l_orderkey, l_linenumber; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group3/q4.3.sql b/utils/scenarios/perf/source/iteration17queries/Group3/q4.3.sql deleted file mode 100755 index 4061a45ff..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group3/q4.3.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_orderdate, o_custkey from lineitem, orders where l_partkey < 100000 and l_orderkey = o_orderkey order by 1, 2; diff --git a/utils/scenarios/perf/source/iteration17queries/Group3/q4.4.sql b/utils/scenarios/perf/source/iteration17queries/Group3/q4.4.sql deleted file mode 100755 index ff14283ad..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group3/q4.4.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_orderdate, o_custkey from orders,lineitem where o_custkey < 1000 and o_orderkey = l_orderkey order by 1, 2; diff --git a/utils/scenarios/perf/source/iteration17queries/Group3/q4.6.1.sql b/utils/scenarios/perf/source/iteration17queries/Group3/q4.6.1.sql deleted file mode 100755 index 15bf67a70..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group3/q4.6.1.sql +++ /dev/null @@ -1 +0,0 @@ -select c_custkey, o_orderkey from customer left outer join orders on c_custkey = o_custkey where c_custkey < 10000 and c_nationkey = 4 order by 1, 2; diff --git a/utils/scenarios/perf/source/iteration17queries/Group3/q4.6.2.sql b/utils/scenarios/perf/source/iteration17queries/Group3/q4.6.2.sql deleted file mode 100755 index e24218777..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group3/q4.6.2.sql +++ /dev/null @@ -1 +0,0 @@ -select c_custkey, o_orderkey from orders right outer join customer on c_custkey = o_custkey where c_custkey < 10000 and c_nationkey = 4 order by 1, 2; diff --git a/utils/scenarios/perf/source/iteration17queries/Group3/q4.6.sql b/utils/scenarios/perf/source/iteration17queries/Group3/q4.6.sql deleted file mode 100755 index b6db75547..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group3/q4.6.sql +++ /dev/null @@ -1 +0,0 @@ -Select * from lineitem, orders where o_custkey < 100000 and l_partkey < 10000 and l_orderkey = o_orderkey order by l_orderkey, l_linenumber; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_10_test.sql b/utils/scenarios/perf/source/iteration17queries/Group4/grp4_10_test.sql deleted file mode 100644 index dc562bd9e..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_10_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select l_shipmode, max(l_shipdate), min(l_shipdate) from lineitem where l_orderkey < 1000000 group by l_shipmode order by 1; -select calgetstats(); -select now(); -Select l_shipmode, max(l_shipdate), min(l_shipdate) from lineitem where l_orderkey < 1000000 group by l_shipmode order by 1; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_11_test.sql b/utils/scenarios/perf/source/iteration17queries/Group4/grp4_11_test.sql deleted file mode 100644 index 367e52416..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_11_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select o_orderpriority, min(o_orderstatus), max(o_orderstatus) from orders where o_orderkey < 1000000 group by o_orderpriority order by o_orderpriority; -select calgetstats(); -select now(); -Select o_orderpriority, min(o_orderstatus), max(o_orderstatus) from orders where o_orderkey < 1000000 group by o_orderpriority order by o_orderpriority; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_12_test.sql b/utils/scenarios/perf/source/iteration17queries/Group4/grp4_12_test.sql deleted file mode 100644 index 9263ed205..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_12_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select o_shippriority, sum(o_totalprice), avg(o_totalprice), count(*) from orders where o_orderkey < 1000000 group by o_shippriority; -select calgetstats(); -select now(); -Select o_shippriority, sum(o_totalprice), avg(o_totalprice), count(*) from orders where o_orderkey < 1000000 group by o_shippriority; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_13_test.sql b/utils/scenarios/perf/source/iteration17queries/Group4/grp4_13_test.sql deleted file mode 100644 index 361240048..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_13_test.sql +++ /dev/null @@ -1,45 +0,0 @@ -select - l_returnflag, - l_linestatus, - sum(l_quantity) as sum_qty, - sum(l_extendedprice) as sum_base_price, - avg(l_quantity) as avg_qty, - avg(l_extendedprice) as avg_price, - avg(l_discount) as avg_disc, - count(*) as count_order -from - lineitem -where - l_shipdate <= date '1998-09-26' - -group by l_returnflag, - l_linestatus -order by - l_returnflag, - l_linestatus; -select calgetstats(); -select now(); -select - l_returnflag, - l_linestatus, - sum(l_quantity) as sum_qty, - sum(l_extendedprice) as sum_base_price, - avg(l_quantity) as avg_qty, - avg(l_extendedprice) as avg_price, - avg(l_discount) as avg_disc, - count(*) as count_order -from - lineitem -where - l_shipdate <= date '1998-09-26' - -group by l_returnflag, - l_linestatus -order by - l_returnflag, - l_linestatus; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_14_test.sql b/utils/scenarios/perf/source/iteration17queries/Group4/grp4_14_test.sql deleted file mode 100644 index ee32b56b5..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_14_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select l_shipdate Revenue_day, l_discount district, max(l_shipdate) Latest_date, sum(l_extendedprice) Total_Revenue, count(*) Sales_Items from lineitem group by l_shipdate, l_discount order by 1,2; -select calgetstats(); -select now(); -Select l_shipdate Revenue_day, l_discount district, max(l_shipdate) Latest_date, sum(l_extendedprice) Total_Revenue, count(*) Sales_Items from lineitem group by l_shipdate, l_discount order by 1,2; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_1_test.sql b/utils/scenarios/perf/source/iteration17queries/Group4/grp4_1_test.sql deleted file mode 100644 index 542c0dc42..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_1_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select min(l_suppkey) from lineitem; -select calgetstats(); -select now(); -Select min(l_suppkey) from lineitem; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_2_test.sql b/utils/scenarios/perf/source/iteration17queries/Group4/grp4_2_test.sql deleted file mode 100644 index a0480c48a..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_2_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select max(l_shipdate) from lineitem; -select calgetstats(); -select now(); -Select max(l_shipdate) from lineitem; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_3_test.sql b/utils/scenarios/perf/source/iteration17queries/Group4/grp4_3_test.sql deleted file mode 100644 index 0da953421..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_3_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select sum(l_orderkey) from lineitem where l_suppkey < 100000; -select calgetstats(); -select now(); -Select sum(l_orderkey) from lineitem where l_suppkey < 100000; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_4_test.sql b/utils/scenarios/perf/source/iteration17queries/Group4/grp4_4_test.sql deleted file mode 100644 index 3db0ef0a7..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_4_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select avg(l_extendedprice) from lineitem; -select calgetstats(); -select now(); -Select avg(l_extendedprice) from lineitem; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_5_test.sql b/utils/scenarios/perf/source/iteration17queries/Group4/grp4_5_test.sql deleted file mode 100644 index e21740a6d..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_5_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select count(*) from lineitem where l_orderkey < 1000000; -select calgetstats(); -select now(); -Select count(*) from lineitem where l_orderkey < 1000000; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_6_test.sql b/utils/scenarios/perf/source/iteration17queries/Group4/grp4_6_test.sql deleted file mode 100644 index 4ba59d402..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_6_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select max(l_shipdate) from lineitem where l_orderkey < 1000000; -select calgetstats(); -select now(); -Select max(l_shipdate) from lineitem where l_orderkey < 1000000; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_7_test.sql b/utils/scenarios/perf/source/iteration17queries/Group4/grp4_7_test.sql deleted file mode 100644 index 582b6ca52..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_7_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select min(o_orderstatus), max(o_orderstatus) from orders where o_orderkey < 1000000; -select calgetstats(); -select now(); -Select min(o_orderstatus), max(o_orderstatus) from orders where o_orderkey < 1000000; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_8_test.sql b/utils/scenarios/perf/source/iteration17queries/Group4/grp4_8_test.sql deleted file mode 100644 index 60278cc57..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_8_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select sum(o_totalprice), avg(o_totalprice), count(*) from orders where o_orderkey < 1000000; -select calgetstats(); -select now(); -Select sum(o_totalprice), avg(o_totalprice), count(*) from orders where o_orderkey < 1000000; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_9_test.sql b/utils/scenarios/perf/source/iteration17queries/Group4/grp4_9_test.sql deleted file mode 100644 index 2fcd209fb..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/grp4_9_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select l_quantity, count(*) from lineitem where l_orderkey < 1000000 group by l_quantity order by l_quantity; -select calgetstats(); -select now(); -Select l_quantity, count(*) from lineitem where l_orderkey < 1000000 group by l_quantity order by l_quantity; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q2.3.4.1.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q2.3.4.1.sql deleted file mode 100755 index 692770771..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q2.3.4.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select min(l_suppkey) from lineitem; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q2.3.4.2.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q2.3.4.2.sql deleted file mode 100755 index 683dddea2..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q2.3.4.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select max(l_shipdate) from lineitem; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q2.3.4.3.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q2.3.4.3.sql deleted file mode 100755 index cd920adae..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q2.3.4.3.sql +++ /dev/null @@ -1 +0,0 @@ -Select sum(l_orderkey) from lineitem where l_suppkey < 100000; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q2.3.4.4.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q2.3.4.4.sql deleted file mode 100755 index aae97d63f..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q2.3.4.4.sql +++ /dev/null @@ -1 +0,0 @@ -Select avg(l_extendedprice) from lineitem; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.1.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.1.sql deleted file mode 100755 index c8a040dab..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select count(*) from lineitem where l_orderkey < 1000000; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.2.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.2.sql deleted file mode 100755 index 09cc7a021..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select max(l_shipdate) from lineitem where l_orderkey < 1000000; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.3.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.3.sql deleted file mode 100755 index c602223bf..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.3.sql +++ /dev/null @@ -1 +0,0 @@ -Select min(o_orderstatus), max(o_orderstatus) from orders where o_orderkey < 1000000; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.4.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.4.sql deleted file mode 100755 index a236b16e5..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.4.sql +++ /dev/null @@ -1 +0,0 @@ -Select sum(o_totalprice), avg(o_totalprice), count(*) from orders where o_orderkey < 1000000; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.5.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.5.sql deleted file mode 100755 index 503faa7d7..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.5.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_quantity, count(*) from lineitem where l_orderkey < 1000000 group by l_quantity order by l_quantity; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.6.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.6.sql deleted file mode 100755 index 95485cbf1..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.6.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_shipmode, max(l_shipdate), min(l_shipdate) from lineitem where l_orderkey < 1000000 group by l_shipmode order by 1; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.7.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.7.sql deleted file mode 100755 index 80a788e34..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.7.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_orderpriority, min(o_orderstatus), max(o_orderstatus) from orders where o_orderkey < 1000000 group by o_orderpriority order by o_orderpriority; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.8.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.8.sql deleted file mode 100755 index a283c0bbf..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.8.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_shippriority, sum(o_totalprice), avg(o_totalprice), count(*) from orders where o_orderkey < 1000000 group by o_shippriority; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.9.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.9.sql deleted file mode 100755 index 7f53f8a96..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.9.sql +++ /dev/null @@ -1,19 +0,0 @@ -select - l_returnflag, - l_linestatus, - sum(l_quantity) as sum_qty, - sum(l_extendedprice) as sum_base_price, - avg(l_quantity) as avg_qty, - avg(l_extendedprice) as avg_price, - avg(l_discount) as avg_disc, - count(*) as count_order -from - lineitem -where - l_shipdate <= date '1998-09-26' - -group by l_returnflag, - l_linestatus -order by - l_returnflag, - l_linestatus; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.9_formatted.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.9_formatted.sql deleted file mode 100755 index 674d8aa10..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q4.8.9_formatted.sql +++ /dev/null @@ -1,18 +0,0 @@ -select - l_returnflag, - l_linestatus, - sum(l_quantity) as sum_qty, - sum(l_extendedprice) as sum_base_price, - avg(l_quantity) as avg_qty, - avg(l_extendedprice) as avg_price, - avg(l_discount) as avg_disc, - count(*) as count_order -from - lineitem -where - l_shipdate <= date '1998-09-26' -group by l_returnflag, - l_linestatus -order by - l_returnflag, - l_linestatus; diff --git a/utils/scenarios/perf/source/iteration17queries/Group4/q5.3.1.sql b/utils/scenarios/perf/source/iteration17queries/Group4/q5.3.1.sql deleted file mode 100755 index 3eb2914b4..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group4/q5.3.1.sql +++ /dev/null @@ -1,9 +0,0 @@ -select l_shipdate Revenue_day, - l_discount district, - max(l_shipdate) Latest_date, - sum(l_extendedprice) Total_Revenue, -count(*) Sales_items -from lineitem -group by l_shipdate, l_discount -order by 1,2; - diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_12_test.sql b/utils/scenarios/perf/source/iteration17queries/Group5/grp5_12_test.sql deleted file mode 100644 index c4e953499..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_12_test.sql +++ /dev/null @@ -1,11 +0,0 @@ -select l_shipdate, l_suppkey, l_quantity, l_extendedprice, l_comment -from lineitem where l_orderkey = 6000000000 order by 1, 2, 3, 4, 5; -select calgetstats(); -select now(); -select l_shipdate, l_suppkey, l_quantity, l_extendedprice, l_comment -from lineitem where l_orderkey = 6000000000 order by 1, 2, 3, 4, 5; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_13_test.sql b/utils/scenarios/perf/source/iteration17queries/Group5/grp5_13_test.sql deleted file mode 100644 index b9be6dbff..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_13_test.sql +++ /dev/null @@ -1,19 +0,0 @@ -select max(l_orderkey), max(l_partkey), max(l_suppkey), count(*) from lineitem -where l_partkey < 25000000 -and l_suppkey < 1250000 -and l_orderkey < 100000000 -and l_linenumber = 4 -and l_quantity <= 5; -select calgetstats(); -select now(); -select max(l_orderkey), max(l_partkey), max(l_suppkey), count(*) from lineitem -where l_partkey < 25000000 -and l_suppkey < 1250000 -and l_orderkey < 100000000 -and l_linenumber = 4 -and l_quantity <= 5; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_14_test.sql b/utils/scenarios/perf/source/iteration17queries/Group5/grp5_14_test.sql deleted file mode 100644 index 13e2a9521..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_14_test.sql +++ /dev/null @@ -1,20 +0,0 @@ -select l_shipdate, sum(l_extendedprice), avg(p_retailprice) -from part, lineitem -where l_shipdate between '1993-01-01' and '1994-06-30' -and p_retailprice >= 2095 -and p_size <= 5 -and p_partkey = l_partkey -group by l_shipdate -order by 1; -Select calgetstats(); -Select now(); -select l_shipdate, sum(l_extendedprice), avg(p_retailprice) -from part, lineitem -where l_shipdate between '1993-01-01' and '1994-06-30' -and p_retailprice >= 2095 -and p_size <= 5 -and p_partkey = l_partkey -group by l_shipdate -order by 1; -Select calgetstats(); -quit diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_15_test.sql b/utils/scenarios/perf/source/iteration17queries/Group5/grp5_15_test.sql deleted file mode 100644 index 16ce3750c..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_15_test.sql +++ /dev/null @@ -1,13 +0,0 @@ -select count(*) from partsupp where ps_suppkey in -(1,10,100,1000,10000,1000000,1000000, - 2,20,200,2000,20000,2000000,2000000); -select calgetstats(); -select now(); -select count(*) from partsupp where ps_suppkey in -(1,10,100,1000,10000,1000000,1000000, - 2,20,200,2000,20000,2000000,2000000); -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_16_test.sql b/utils/scenarios/perf/source/iteration17queries/Group5/grp5_16_test.sql deleted file mode 100644 index 0e4675f54..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_16_test.sql +++ /dev/null @@ -1,13 +0,0 @@ -select count(*) from orders -where o_custkey in (1,10,100,1000,10000,1000000,1000000, -2,20,200,2000,20000,2000000,2000000); -select calgetstats(); -select now(); -select count(*) from orders -where o_custkey in (1,10,100,1000,10000,1000000,1000000, -2,20,200,2000,20000,2000000,2000000); -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_1_test.sql b/utils/scenarios/perf/source/iteration17queries/Group5/grp5_1_test.sql deleted file mode 100644 index e030da7c6..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_1_test.sql +++ /dev/null @@ -1,12 +0,0 @@ -select count(*) from part, lineitem - where p_retailprice < 913.65 - and p_partkey = l_suppkey - and l_shipdate < '1992-04-09'; -Select calgetstats(); -Select now(); -select count(*) from part, lineitem - where p_retailprice < 913.65 - and p_partkey = l_suppkey - and l_shipdate < '1992-04-09'; -Select calgetstats(); -quit diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_2_test.sql b/utils/scenarios/perf/source/iteration17queries/Group5/grp5_2_test.sql deleted file mode 100644 index 39f2863bf..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_2_test.sql +++ /dev/null @@ -1,12 +0,0 @@ -select count(*) from part, lineitem - where p_retailprice < 944.23 - and p_partkey = l_suppkey - and l_shipdate < '1992-04-09'; -Select calgetstats(); -Select now(); -select count(*) from part, lineitem - where p_retailprice < 944.23 - and p_partkey = l_suppkey - and l_shipdate < '1992-04-09'; -Select calgetstats(); -quit diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_3_test.sql b/utils/scenarios/perf/source/iteration17queries/Group5/grp5_3_test.sql deleted file mode 100644 index 6eb921e2a..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_3_test.sql +++ /dev/null @@ -1,12 +0,0 @@ -select count(*) from part, lineitem - where p_retailprice < 904.01 - and p_partkey = l_suppkey - and l_shipdate < '1993-04-07'; -Select calgetstats(); -Select now(); -select count(*) from part, lineitem - where p_retailprice < 904.01 - and p_partkey = l_suppkey - and l_shipdate < '1993-04-07'; -Select calgetstats(); -quit diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_4_test.sql b/utils/scenarios/perf/source/iteration17queries/Group5/grp5_4_test.sql deleted file mode 100644 index e84852536..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_4_test.sql +++ /dev/null @@ -1,12 +0,0 @@ -select count(*) from part, lineitem - where p_retailprice < 913.65 - and p_partkey = l_suppkey - and l_shipdate < '1993-04-07'; -Select calgetstats(); -Select now(); -select count(*) from part, lineitem - where p_retailprice < 913.65 - and p_partkey = l_suppkey - and l_shipdate < '1993-04-07'; -Select calgetstats(); -quit diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_5_test.sql b/utils/scenarios/perf/source/iteration17queries/Group5/grp5_5_test.sql deleted file mode 100644 index 5f77c3647..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_5_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -Select p_brand, sum(l_quantity) tot_qty, avg(l_quantity) avg_qty, count(*) from lineitem, part where l_shipdate between '1996-04-01' and '1996-04-14' and l_partkey = p_partkey and p_size = 5 group by p_brand order by 1; -select calgetstats(); -select now(); -Select p_brand, sum(l_quantity) tot_qty, avg(l_quantity) avg_qty, count(*) from lineitem, part where l_shipdate between '1996-04-01' and '1996-04-14' and l_partkey = p_partkey and p_size = 5 group by p_brand order by 1; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_6_test.sql b/utils/scenarios/perf/source/iteration17queries/Group5/grp5_6_test.sql deleted file mode 100644 index 2a1a07d3d..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_6_test.sql +++ /dev/null @@ -1,20 +0,0 @@ -select p_brand, sum(l_quantity) tot_qty, - avg(l_quantity) avg_qty -from lineitem, part -where l_shipdate between '1996-04-01' and '1996-04-14' -and l_partkey = p_partkey -and p_size = 5 -group by p_brand -order by 1; -Select calgetstats(); -Select now(); -select p_brand, sum(l_quantity) tot_qty, - avg(l_quantity) avg_qty -from lineitem, part -where l_shipdate between '1996-04-01' and '1996-04-14' -and l_partkey = p_partkey -and p_size = 5 -group by p_brand -order by 1; -Select calgetstats(); -quit diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_8_test.sql b/utils/scenarios/perf/source/iteration17queries/Group5/grp5_8_test.sql deleted file mode 100644 index eebc96a60..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/grp5_8_test.sql +++ /dev/null @@ -1,9 +0,0 @@ -select * from lineitem where l_orderkey = 6000000000 order by l_orderkey, l_linenumber; -select calgetstats(); -select now(); -select * from lineitem where l_orderkey = 6000000000 order by l_orderkey, l_linenumber; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.2.1.1.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.2.1.1.sql deleted file mode 100755 index b646f8c63..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.2.1.1.sql +++ /dev/null @@ -1,4 +0,0 @@ -select count(*) from part, lineitem - where p_retailprice < 913.65 - and p_partkey = l_suppkey - and l_shipdate < '1992-04-09'; diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.2.1.2.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.2.1.2.sql deleted file mode 100755 index 500f86798..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.2.1.2.sql +++ /dev/null @@ -1,4 +0,0 @@ -select count(*) from part, lineitem - where p_retailprice < 944.23 - and p_partkey = l_suppkey - and l_shipdate < '1992-04-09'; diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.2.1.3.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.2.1.3.sql deleted file mode 100755 index 32b709b1d..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.2.1.3.sql +++ /dev/null @@ -1,4 +0,0 @@ -select count(*) from part, lineitem - where p_retailprice < 904.01 - and p_partkey = l_suppkey - and l_shipdate < '1993-04-07'; diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.2.1.4.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.2.1.4.sql deleted file mode 100755 index 72b5f571f..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.2.1.4.sql +++ /dev/null @@ -1,4 +0,0 @@ -select count(*) from part, lineitem - where p_retailprice < 913.65 - and p_partkey = l_suppkey - and l_shipdate < '1993-04-07'; diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.6.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.6.sql deleted file mode 100755 index d3b7990e7..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q2.3.6.sql +++ /dev/null @@ -1,9 +0,0 @@ -select p_brand, sum(l_quantity) tot_qty, - avg(l_quantity) avg_qty, count(*) -from part, lineitem -where l_shipdate between '1996-04-01' and '1996-04-14' -and p_size = 5 -and p_partkey = l_partkey -group by p_brand -order by 1; - diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q5.1.1.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q5.1.1.sql deleted file mode 100755 index abfcef2fc..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q5.1.1.sql +++ /dev/null @@ -1,8 +0,0 @@ -select p_brand, sum(l_quantity) tot_qty, - avg(l_quantity) avg_qty -from part, lineitem -where l_shipdate between '1996-04-01' and '1996-04-14' -and p_size = 5 -and p_partkey = l_partkey -group by p_brand -order by 1; diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q5.1.2.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q5.1.2.sql deleted file mode 100755 index 20e15a8d1..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q5.1.2.sql +++ /dev/null @@ -1,9 +0,0 @@ -select n_name, l_commitdate, sum(s_acctbal) sum_bal, -sum(l_extendedprice) sum_price, min(s_suppkey) minskey, count(*) -from nation, supplier, lineitem -where s_nationkey in (1,2) -and l_commitdate between '1998-01-01' and '1998-01-07' -and n_nationkey = s_nationkey -and s_suppkey = l_suppkey -group by n_name, l_commitdate -order by 1, 2; diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q5.1.3.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q5.1.3.sql deleted file mode 100755 index 3066bf538..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q5.1.3.sql +++ /dev/null @@ -1 +0,0 @@ -select * from lineitem where l_orderkey = 600000 order by l_orderkey, l_linenumber; diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.1.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.1.sql deleted file mode 100755 index a092414b6..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.1.sql +++ /dev/null @@ -1,13 +0,0 @@ -select n_name, sum(l_quantity), sum(l_extendedprice), - max(c26_nbr_10), - sum(c23_nbr_10), - avg(c28_nbr_10), - min(c38_nbr_14), - max(c61_nbr_20), - count(c76_nbr_4), - avg(c89_nbr_7) -from nation, demographics200 -where c23_nbr_10 between 1950 and 2000 - and n_regionkey = 1 - and n_nationkey = c76_nbr_4 -group by n_name order by n_name; diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.2.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.2.sql deleted file mode 100755 index b46a849e0..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.2.sql +++ /dev/null @@ -1,12 +0,0 @@ -select n_name, sum(l_quantity), sum(l_extendedprice), - max(c26_nbr_10), - sum(c23_nbr_10), - avg(c28_nbr_10), - min(c38_nbr_14), - max(c61_nbr_20), - count(c76_nbr_4), - avg(c89_nbr_7) -from nation, demographics200 -where c76_nbr_4 = 5 and c38_nbr_14 < 170000 - and n_nationkey = c83_nbr_6 -group by n_name order by n_name; diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.3.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.3.sql deleted file mode 100755 index 10f95e264..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.3.sql +++ /dev/null @@ -1,10 +0,0 @@ -select count(*),max(c26_nbr_10),max(c23_nbr_10),max(c28_nbr_10), -max(c38_nbr_14),max(c61_nbr_20),max(c76_nbr_4),max(c89_nbr_7) -from demographics200 -where l_orderkey < 7000009 - and c23_nbr_10 < 70000 - and c26_nbr_10 < 700000 - and c28_nbr_10 < 7000 - and c38_nbr_14 < 7050000 - and c61_nbr_20 < 70500000 - and c76_nbr_4 < 28 ; diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.4.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.4.sql deleted file mode 100755 index 20b161998..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.4.sql +++ /dev/null @@ -1,3 +0,0 @@ -select l_shipdate, l_suppkey, l_quantity, l_extendedprice, l_comment -from lineitem where l_orderkey = 600000 order by 1, 2, 3, 4, 5; - diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.5.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.5.sql deleted file mode 100755 index 32cf13146..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q5.2.5.sql +++ /dev/null @@ -1,7 +0,0 @@ -select max(l_orderkey), max(l_partkey), max(l_suppkey), count(*) from lineitem -where l_partkey < 25000000 -and l_suppkey < 1250000 -and l_orderkey < 100000000 -and l_linenumber = 4 -and l_quantity <= 5; - diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q5.4.1.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q5.4.1.sql deleted file mode 100755 index 32aa140fe..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q5.4.1.sql +++ /dev/null @@ -1,8 +0,0 @@ -select l_shipdate, sum(l_extendedprice), avg(p_retailprice) -from part, lineitem -where l_shipdate between '1993-01-01' and '1994-06-30' -and p_retailprice >= 2095 -and p_size <= 5 -and p_partkey = l_partkey -group by l_shipdate -order by 1; diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q5.4.2.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q5.4.2.sql deleted file mode 100755 index aac7a5898..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q5.4.2.sql +++ /dev/null @@ -1,4 +0,0 @@ -select count(*) from partsupp where ps_suppkey in - (1,10,100,1000,10000,1000000,1000000, - 2,20,200,2000,20000,2000000,2000000); - diff --git a/utils/scenarios/perf/source/iteration17queries/Group5/q5.4.3.sql b/utils/scenarios/perf/source/iteration17queries/Group5/q5.4.3.sql deleted file mode 100755 index fc00d4e26..000000000 --- a/utils/scenarios/perf/source/iteration17queries/Group5/q5.4.3.sql +++ /dev/null @@ -1,4 +0,0 @@ -select count(*) from orders -where o_custkey in (1,10,100,1000,10000,1000000,1000000, - 2,20,200,2000,20000,2000000,2000000); - diff --git a/utils/scenarios/perf/source/iteration17queries/special/um_join.sql b/utils/scenarios/perf/source/iteration17queries/special/um_join.sql deleted file mode 100644 index a00198bd6..000000000 --- a/utils/scenarios/perf/source/iteration17queries/special/um_join.sql +++ /dev/null @@ -1,15 +0,0 @@ -select p_partkey, s_suppkey from part, lineitem, supplier -where p_partkey = l_partkey and l_suppkey = s_suppkey and - p_partkey between 0 and 50020000 and p_size between 0 and 1 and - l_partkey between 49975000 and 200000000 and l_shipdate between '1992-01-01' and '1992-04-24'; -select calgetstats(); -select now(); -select p_partkey, s_suppkey from part, lineitem, supplier -where p_partkey = l_partkey and l_suppkey = s_suppkey and - p_partkey between 0 and 50020000 and p_size between 0 and 1 and - l_partkey between 49975000 and 200000000 and l_shipdate between '1992-01-01' and '1992-04-24'; -select calgetstats(); -quit - - - diff --git a/utils/scenarios/perf/source/pf/common/buildsartimes.sh b/utils/scenarios/perf/source/pf/common/buildsartimes.sh deleted file mode 100755 index 3e254bb0f..000000000 --- a/utils/scenarios/perf/source/pf/common/buildsartimes.sh +++ /dev/null @@ -1,58 +0,0 @@ -#! /bin/bash -# -# -# clean up any leftover work files -# -rm -rf stoptimes.txt -rm -rf starttimes.txt -rm -rf sarstep.sh -rm -rf sartest.sh -rm -rf testtimes.txt -rm -rf temptimes.txt -# -# pull start and stop times for each individual test and plug them into the sar statement -# -mkdir sar -k=1 -cat startstoptimes.txt | - while read a b c d e f g h; do - starttime="$a $b $c $d" - stoptime="$e $f $g $h" - echo "LC_ALL=C sar -A -s $d -e $h -f /var/log/sa/sa$c > sar/q$k.txt" >> sarstep.sh - ((k++)) - done -chmod 755 sarstep.sh -# -# Grab the start time from the first line and save it -# -head -1 startstoptimes.txt | - while read a b c d e f g h; do - teststart="$a $b $c $d" -echo $c $d >> testtimes.txt - done -# -# Grab the end time from the last line and save it -# -tail -1 startstoptimes.txt | - while read a b c d e f g h; do - teststop="$e $f $g $h" -echo $h >> testtimes.txt - done -# -# put start stop times on one line and then pull the times and plug them into the sar statement -# -cat testtimes.txt | sed '$!N;s/\n/ /' > temptimes.txt -t=1 -cat temptimes.txt | - while read i j k; do - test="$i $j $k" -echo "LC_ALL=C sar -A -s $j -e $k -f /var/log/sa/sa$i > sar/t$t.txt" >> sartest.sh - done -chmod 755 sartest.sh -# -# Clean up all temp files -# -rm -rf stoptimes.txt -rm -rf starttimes.txt -rm -rf testtimes.txt -rm -rf temptimes.txt diff --git a/utils/scenarios/perf/source/pf/common/copyfiles.sh b/utils/scenarios/perf/source/pf/common/copyfiles.sh deleted file mode 100755 index 9fd1c695a..000000000 --- a/utils/scenarios/perf/source/pf/common/copyfiles.sh +++ /dev/null @@ -1,34 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: copyfiles.sh -#* Date Created: 2008.11.24 -#* Author: Stephen Cargile -#* Purpose: copy the test files to \\calweb\perf for archiving and furthter processing -#* -#* Input Parameters: -#* numUMs - number of UMs used in the test -#* numPMs - number of PMs used in the test -#* setStg - 1 or 2: number of arrays used -#* relNum - Release number -#* dbSize - Size of database -#* testNo - the number that this config has been tested -#* streamNum - the type of stream (0 or 17) -#* dirNo - the number of the current directory -#* -#******************************************************************************/ -# -# Get user input (command line parameters passed from Step2) -# -numUMs=$1 -numPMs=$2 -setStg=$3 -relNum=$4 -dbSize=$5 -testNo=$6 -streamNum=$7 -dirNo=$8 - -smbclient //calweb/perf -Wcalpont -Uoamuser%Calpont1 -c "mkdir 1${numUMs}${numPMs}_${setStg}array;cd 1${numUMs}${numPMs}_${setStg}array;mkdir rel${relNum}_${dbSize};cd rel${relNum}_${dbSize};mkdir test${testNo};cd test${testNo};mkdir s${streamNum};cd s${streamNum};mkdir run${dirNo};cd run${dirNo};mkdir exemgr;mkdir ep;mkdir sar;cd ep;prompt OFF;mput *.png;cd ..;mput *.txt;mput *.log;mput *.xml;cd sar;lcd sar;mput *" -# -# End of script diff --git a/utils/scenarios/perf/source/pf/common/exeStreamTest.sh b/utils/scenarios/perf/source/pf/common/exeStreamTest.sh deleted file mode 100755 index 993ad3a4a..000000000 --- a/utils/scenarios/perf/source/pf/common/exeStreamTest.sh +++ /dev/null @@ -1,81 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: exeStreamTest.sh -#* Date Created: 2008.11.13 -#* Author: Daniel Lee -#* Purpose: Execute a TPCH stream test. -#* -#* Parameter: streamNum - Stream number (0, 1, etc) -#* dbSize - Database size (1, 10, 100, 1t etc) -#* iteration - Software release iteration (15, 16, 17 etc) -#* repeatNum - Number of times to repeat the test -#* restart - before each test (Y, N) -#* -#* Modified: 2008.12.09 -#* Author: Stephen Cargile -#* Purpose: point output to new 'results' directory -#* -#* Modified: 2009.01.21 -#* Author: Stephen Cargile -#* Purpose: add functionality to collect start and stop times -#* -#******************************************************************************/ -# -#----------------------------------------------------------------------------- -# command line parameters -#----------------------------------------------------------------------------- -streamNum=$1 -dbSize=$2 -iteration=$3 -repeatNum=$4 -restart=$5 -#----------------------------------------------------------------------------- -# set variables -#----------------------------------------------------------------------------- -testID=/home/pf/auto/results/tpch${dbSize}_s${streamNum}_i${iteration}_`date +%s` -exeCommand=/home/pf/auto/tpchtest/sqlplan/tpch$dbSize/s$streamNum/script/i$iteration/tpch${dbSize}_s${streamNum}.sh -#----------------------------------------------------------------------------- -# Make test directory and change to it -#----------------------------------------------------------------------------- -cd /home/pf/auto/results/ -mkdir $testID -cd $testID -logFileName=tpch${dbSize}_s${streamNum}.log -#----------------------------------------------------------------------------- -# Loop N times to repeat the test -#----------------------------------------------------------------------------- -k=1 -while [ $k -le $repeatNum ] -do - if [ $restart == Y ] || [ $restart == y ] - then - /usr/local/mariadb/columnstore/bin/mcsadmin restartsystem y - sleep 90 - fi - mkdir $k - cd $k - cp /usr/local/mariadb/columnstore/etc/Columnstore.xml . - /usr/local/mariadb/columnstore/bin/mcsadmin getCalpontSoftware >CalpontSoftware.txt - ls -al /mnt/pm*/usr/local/mariadb/columnstore/data* > dbRoots.txt - /usr/local/mariadb/columnstore/bin/mcsadmin getProcessStatus >stackConfigBefore.txt - $exeCommand > $logFileName 2>&1 -# - if [ $streamNum == 1_7 ] - then - completed=0 - while [ $completed -lt 7 ] - do - sleep 5 - completed=`cat *scc.log |grep completed. | wc -l` - done - fi -# - /usr/local/mariadb/columnstore/bin/mcsadmin getProcessStatus >stackConfigAfter.txt - /home/pf/auto/common/extractstartstoptimes.sh - cd .. - ((k++)) -done -cd .. -# -# End of script diff --git a/utils/scenarios/perf/source/pf/common/execution_plan_graph_100GB_s0.sh b/utils/scenarios/perf/source/pf/common/execution_plan_graph_100GB_s0.sh deleted file mode 100755 index 8827f54de..000000000 --- a/utils/scenarios/perf/source/pf/common/execution_plan_graph_100GB_s0.sh +++ /dev/null @@ -1,44 +0,0 @@ -# no default value for build -if [ $# -lt 1 ]; then - echo "usage: $0 build [stack]" - exit -fi -build="BUILD $1" - -# default stack is qperfd02 -stack=PERFSTACK -if [ $# -eq 2 ]; then - build="BUILD $1" - stack=`echo $2 | tr '[:lower:]' '[:upper:]'` -fi - -# loop through the 22 queries -for q in q01-tpch14 q02-tpch02 q03-tpch09 q04-tpch20 q05-tpch06 q06-tpch17 q07-tpch18 q08-tpch08 q09-tpch21 q10-tpch13 q11-tpch03 q12-tpch22 q13-tpch16 q14-tpch04 q15-tpch11 q16-tpch15 q17-tpch01 q18-tpch10 q19-tpch19 q20-tpch05 q21-tpch07 q22-tpch12 -do - # convert the query to upper case - query=`echo $q | tr '[:lower:]' '[:upper:]'` - - # tpch ID for matching the dot file - qid=${q##*tpch} - - # the jobstep pair - jsdot=tpch100_s0_${qid}.hex_js.dot - jspng=tpch100_s0_${q}.png - - if [ -f $jsdot ]; then - sed -e "1a\ -t [label=\"${query}\\\l${stack}\\\l${build}\" shape=plaintext]" $jsdot | dot -Tpng -o${jspng} - else - echo "$jsdot does not exist" - fi - - # the jobstep_result pair - jsrdot=tpch100_s0_${qid}.hex_jsr.dot - jsrpng=tpch100_s0_${q}_results.png - if [ -f $jsrdot ]; then - sed "1a\ -t [label=\"${query}\\\l${stack}\\\l${build}\" shape=plaintext]" $jsrdot | dot -Tpng -o${jsrpng} - else - echo "$jsrdot does not exist" - fi -done diff --git a/utils/scenarios/perf/source/pf/common/execution_plan_graph_100GB_s7.sh b/utils/scenarios/perf/source/pf/common/execution_plan_graph_100GB_s7.sh deleted file mode 100755 index 91cf781fd..000000000 --- a/utils/scenarios/perf/source/pf/common/execution_plan_graph_100GB_s7.sh +++ /dev/null @@ -1,46 +0,0 @@ -# no default value for build -if [ $# -lt 1 ]; then - echo "usage: $0 build [stack]" - exit -fi -build="BUILD $1" - -# default stack is qperfd02 -stack=QPERFD02 -if [ $# -eq 2 ]; then - build="BUILD $1" - stack=`echo $2 | tr '[:lower:]' '[:upper:]'` -fi - -# loop through the seven streams. -for stream in 1 2 3 4 5 6 7 -do - - # loop through the 22 queries. - for q in 01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 21 22 - do - echo "Processing stream $stream query $q" - jsdot=tpch100_s${stream}_${q}.hex_js.dot - jsrdot=tpch100_s${stream}_${q}.hex_jsr.dot - - jspng=tpch100_s${stream}_${q}.js.png - jsrpng=tpch100_s${stream}_${q}.jsr.png - - query=tpch100_s${stream}_${q} - - if [ -f $jsdot ]; then - sed -e "1a\ - t [label=\"${query}\\\l${stack}\\\l${build}\" shape=plaintext]" $jsdot | dot -Tpng -o${jspng} - else - echo "$jsdot does not exist" - fi - - if [ -f $jsrdot ]; then - sed "1a\ - t [label=\"${query}\\\l${stack}\\\l${build}\" shape=plaintext]" $jsrdot | dot -Tpng -o${jsrpng} - else - echo "$jsrdot does not exist" - fi - done -done - diff --git a/utils/scenarios/perf/source/pf/common/execution_plan_graph_1TB_s0.sh b/utils/scenarios/perf/source/pf/common/execution_plan_graph_1TB_s0.sh deleted file mode 100755 index 8f13ad3a9..000000000 --- a/utils/scenarios/perf/source/pf/common/execution_plan_graph_1TB_s0.sh +++ /dev/null @@ -1,44 +0,0 @@ -# no default value for build -if [ $# -lt 1 ]; then - echo "usage: $0 build [stack]" - exit -fi -build="BUILD $1" - -# default stack is qperfd02 -stack=QPERFD02 -if [ $# -eq 2 ]; then - build="BUILD $1" - stack=`echo $2 | tr '[:lower:]' '[:upper:]'` -fi - -# loop through the 22 queries -for q in q01-tpch14 q02-tpch02 q03-tpch09 q04-tpch20 q05-tpch06 q06-tpch17 q07-tpch18 q08-tpch08 q09-tpch21 q10-tpch13 q11-tpch03 q12-tpch22 q13-tpch16 q14-tpch04 q15-tpch11 q16-tpch15 q17-tpch01 q18-tpch10 q19-tpch19 q20-tpch05 q21-tpch07 q22-tpch12 -do - # convert the query to upper case - query=`echo $q | tr '[:lower:]' '[:upper:]'` - - # tpch ID for matching the dot file - qid=${q##*tpch} - - # the jobstep pair - jsdot=tpch1t_s0_${qid}.hex_js.dot - jspng=tpch1t_s0_${q}.png - - if [ -f $jsdot ]; then - sed -e "1a\ -t [label=\"${query}\\\l${stack}\\\l${build}\" shape=plaintext]" $jsdot | dot -Tpng -o${jspng} - else - echo "$jsdot does not exist" - fi - - # the jobstep_result pair - jsrdot=tpch1t_s0_${qid}.hex_jsr.dot - jsrpng=tpch1t_s0_${q}_results.png - if [ -f $jsrdot ]; then - sed "1a\ -t [label=\"${query}\\\l${stack}\\\l${build}\" shape=plaintext]" $jsrdot | dot -Tpng -o${jsrpng} - else - echo "$jsrdot does not exist" - fi -done diff --git a/utils/scenarios/perf/source/pf/common/extractandcopylogdata.sh b/utils/scenarios/perf/source/pf/common/extractandcopylogdata.sh deleted file mode 100755 index d27389495..000000000 --- a/utils/scenarios/perf/source/pf/common/extractandcopylogdata.sh +++ /dev/null @@ -1,38 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: copyfiles.sh -#* Date Created: 2008.11.24 -#* Author: Stephen Cargile -#* Purpose: copy the test files to \\calweb\perf for archiving and furthter processing -#* -#* Input Parameters: -#* numUMs - number of UMs used in the test -#* numPMs - number of PMs used in the test -#* setStg - 1 or 2: number of arrays used -#* relNum - Release number -#* dbSize - Size of database -#* testNo - the number that this config has been tested -#* streamNum - the type of stream (0 or 17) -#* dirNo - the number of the current directory -#* -#******************************************************************************/ -# -# Get user input (command line parameters passed from Step2) -# -numUMs=$1 -numPMs=$2 -setStg=$3 -relNum=$4 -dbSize=$5 -testNo=$6 -streamNum=$7 -dirNo=$8 - -awk -F'[;-]' '/Query Stats/ {print $2,"\t", $4,"\t", $6,"\t" $8,"\t" $10,"\t" $12,"\t" $14,"\t" $16,"\t" $18,"\t" $20}' tpch${dbSize}_s${streamNum}.log | sed 's/MB//' >> afile.txt - -grep "time:" tpch${dbSize}_s${streamNum}.log | cut -d" " -f3 >> time.txt - -smbclient //calweb/perf -Wcalpont -Uoamuser%Calpont1 -c "mkdir 1${numUMs}${numPMs}_${setStg}array;cd 1${numUMs}${numPMs}_${setStg}array;mkdir rel${relNum}_${dbSize};cd rel${relNum}_${dbSize};mkdir test${testNo};cd test${testNo};mkdir s${streamNum};cd s${streamNum};mkdir run${dirNo};cd run${dirNo};mkdir exemgr;mkdir ep;cd ep;prompt OFF;mput *.png;cd ..;mput *.txt;mput *.log;mput *.xml" -# -# End of script diff --git a/utils/scenarios/perf/source/pf/common/extractandcopys17logdata.sh b/utils/scenarios/perf/source/pf/common/extractandcopys17logdata.sh deleted file mode 100755 index 28c4c3fa1..000000000 --- a/utils/scenarios/perf/source/pf/common/extractandcopys17logdata.sh +++ /dev/null @@ -1,46 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: copyfiles.sh -#* Date Created: 2008.11.24 -#* Author: Stephen Cargile -#* Purpose: copy the test files to \\calweb\perf for archiving and furthter processing -#* -#* Input Parameters: -#* numUMs - number of UMs used in the test -#* numPMs - number of PMs used in the test -#* setStg - 1 or 2: number of arrays used -#* relNum - Release number -#* dbSize - Size of database -#* testNo - the number that this config has been tested -#* streamNum - the type of stream (0 or 17) -#* dirNo - the number of the current directory -#* -#******************************************************************************/ -# -# Get user input (command line parameters passed from Step2) -# -numUMs=$1 -numPMs=$2 -setStg=$3 -relNum=$4 -dbSize=$5 -testNo=$6 -streamNum=$7 -dirNo=$8 - -k=1 -while [ $k -le 7 ]; do -awk -F'[;-]' '/Query Stats/ {print $2,"\t", $4,"\t", $6,"\t" $8,"\t" $10,"\t" $12,"\t" $14,"\t" $16,"\t" $18,"\t" $20}' tpch${dbSize}_s${k}_scc.log | sed 's/MB//' >> afile_s${k}.txt -((k++)) -done - -t=1 -while [ $t -le 7 ]; do -grep "time:" tpch${dbSize}_s${t}_scc.log | cut -d" " -f3 >> time_s${t}.txt -((t++)) -done - -smbclient //calweb/perf -Wcalpont -Uoamuser%Calpont1 -c "mkdir 1${numUMs}${numPMs}_${setStg}array;cd 1${numUMs}${numPMs}_${setStg}array;mkdir rel${relNum}_${dbSize};cd rel${relNum}_${dbSize};mkdir test${testNo};cd test${testNo};mkdir s${streamNum};cd s${streamNum};mkdir run${dirNo};cd run${dirNo};mkdir exemgr;mkdir ep;cd ep;prompt OFF;mput *.png;cd ..;mput *.txt;mput *.log;mput *.xml" -# -# End of script diff --git a/utils/scenarios/perf/source/pf/common/extractlogdata.sh b/utils/scenarios/perf/source/pf/common/extractlogdata.sh deleted file mode 100755 index 6477d2494..000000000 --- a/utils/scenarios/perf/source/pf/common/extractlogdata.sh +++ /dev/null @@ -1,24 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: extractlogdata.sh -#* Date Created: 2008.12.02 -#* Author: Stephen Cargile -#* Purpose: extract stats data plus time info -#* -#* Input Parameters: -#* dbSize - Size of database -#* streamNum - the type of stream (0 or 17) -#* -#******************************************************************************/ -# -# Get user input -# -dbSize=$1 -streamNum=$2 -# -awk -F'[;-]' '/Query Stats/ {print $2,"\t", $4,"\t", $6,"\t" $8,"\t" $10,"\t" $12,"\t" $14,"\t" $16,"\t" $18,"\t" $20}' tpch${dbSize}_s${streamNum}.log | sed 's/MB//' >> afile.txt - -grep "time:" tpch${dbSize}_s${streamNum}.log | cut -d" " -f3 >> time.txt - -# End of script diff --git a/utils/scenarios/perf/source/pf/common/extracts17logdata.sh b/utils/scenarios/perf/source/pf/common/extracts17logdata.sh deleted file mode 100755 index 4c4718cb7..000000000 --- a/utils/scenarios/perf/source/pf/common/extracts17logdata.sh +++ /dev/null @@ -1,30 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: extractlogdata.sh -#* Date Created: 2008.12.02 -#* Author: Stephen Cargile -#* Purpose: extract stats data plus time info -#* -#* Input Parameters: -#* dbSize - Size of database -#* -#******************************************************************************/ -# -# Get user input -# -dbSize=$1 -# -k=1 -while [ $k -le 7 ]; do -awk -F'[;-]' '/Query Stats/ {print $2,"\t", $4,"\t", $6,"\t" $8,"\t" $10,"\t" $12,"\t" $14,"\t" $16,"\t" $18,"\t" $20}' tpch${dbSize}_s${k}_scc.log | sed 's/MB//' >> afile_s${k}.txt -((k++)) -done - -t=1 -while [ $t -le 7 ]; do -grep "time:" tpch${dbSize}_s${t}_scc.log | cut -d" " -f3 >> time_s${t}.txt -((t++)) -done - -# End of script diff --git a/utils/scenarios/perf/source/pf/common/extractsardata.sh b/utils/scenarios/perf/source/pf/common/extractsardata.sh deleted file mode 100755 index a08484cbc..000000000 --- a/utils/scenarios/perf/source/pf/common/extractsardata.sh +++ /dev/null @@ -1,12 +0,0 @@ -#! /bin/bash -# -# -# extract sar data for each individual query or step -# -./sarstep.sh -# -# extract sar data for each individual query or step -# -./sartest.sh -# -# diff --git a/utils/scenarios/perf/source/pf/common/extractstartstoptimes.sh b/utils/scenarios/perf/source/pf/common/extractstartstoptimes.sh deleted file mode 100755 index fb826a4ce..000000000 --- a/utils/scenarios/perf/source/pf/common/extractstartstoptimes.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /bin/bash -# -# clean up previous run data -# -rm -rf stoptimes.txt -rm -rf starttimes.txt -rm -rf sarstep.txt -rm -rf sartest.txt -rm -rf testtimes.txt -rm -rf temptimes.txt -# -# Search for the line with date and time, pull it out, delete every third line (the dot file copy time) -# and then make the start and stop times to be side-by-side on one line and save the output -# -egrep -w '2008|2009|2010' tpch1*_s*.log | cut -d" " -f1,2,3,4 | sed 'n;n;d;' | sed '$!N;s/\n/ /' > startstoptimes.txt -# -# pull just the start and stop times out of each line and save them to individual files -# -#cat startstoptimes.txt | awk -F" " '{print $4}' > starttimes.txt -#cat startstoptimes.txt | awk -F" " '{print $8}' > stoptimes.txt diff --git a/utils/scenarios/perf/source/pf/common/sendPlanExec.sh b/utils/scenarios/perf/source/pf/common/sendPlanExec.sh deleted file mode 100755 index 329181b5f..000000000 --- a/utils/scenarios/perf/source/pf/common/sendPlanExec.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -# $1 = session ID -# $2 = caltraceon() value. 9, 264 etc -# $3 = sendPlan hex file name -# -# jsfn = job step file name -# jsrfn = job step result file name -# -fn=`basename $3` -jsfn=$fn'_js.dot' -jsrfn=$fn'_jsr.dot' -# -echo ---------------------------------------------------------------------------------------- -echo sendPlan start: $3 -date -echo -/usr/local/mariadb/columnstore/bin/sendPlan -v -s$1 -t$2 $3 -echo -date -echo sendPlan end: $3 -echo -sleep 1 -echo Copying dot files...... -/bin/cp -f /mnt/tmp/jobstep.dot $jsfn -/bin/cp -f /mnt/tmp/jobstep_results.dot $jsrfn -echo Finish copying dot files. -date -echo ---------------------------------------------------------------------------------------- -echo diff --git a/utils/scenarios/perf/source/pf/common/setDBRoots.sh b/utils/scenarios/perf/source/pf/common/setDBRoots.sh deleted file mode 100755 index b30a21001..000000000 --- a/utils/scenarios/perf/source/pf/common/setDBRoots.sh +++ /dev/null @@ -1,85 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: setDBRoots.sh -#* Date Created: 2008.11.21 -#* Author: Stephen Cargile -#* Purpose: Set the proper device names in Columnstore.xml and then unmount & mount -#* the devices to the appropriate mount points -#* -#* Parameters: setNum - 1 or 2: dbroot set number -#* -#******************************************************************************/ -# -#----------------------------------------------------------------------------- -# Get dbroot set number from user input (command line parameter) -#----------------------------------------------------------------------------- -setNum=$1 -echo *****-----*****-----*****-----*****-----***** -echo Start - Set dbroots to RAID Configuration $setNum -echo *****-----*****-----*****-----*****-----***** -#----------------------------------------------------------------------------- -# unmount dbroots from all PMs -#----------------------------------------------------------------------------- -echo unmounting PM1 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf3 qalpont! "umount -a" -echo unmounting PM2 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf4 qalpont! "umount -a" -echo unmounting PM3 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf5 qalpont! "umount -a" -echo unmounting PM4 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf8 qalpont! "umount -a" -#----------------------------------------------------------------------------- -# save current fstab to fstab.auto then move 'set number' fstab to 'real' fstab -#----------------------------------------------------------------------------- -echo *-* -echo doing the hokey pokey with fstabs on PM1 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf3 qalpont! "rm -f /etc/fstab.auto" -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf3 qalpont! "mv /etc/fstab /etc/fstab.auto" -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf3 qalpont! "cp /etc/fstab.[$setNum] /etc/fstab" -echo *-* -echo doing the funky chicken with fstabs on PM2 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf4 qalpont! "rm -f /etc/fstab.auto" -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf4 qalpont! "mv /etc/fstab /etc/fstab.auto" -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf4 qalpont! "cp /etc/fstab.[$setNum] /etc/fstab" -echo *-* -echo doing the swim with fstabs on PM3 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf5 qalpont! "rm -f /etc/fstab.auto" -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf5 qalpont! "mv /etc/fstab /etc/fstab.auto" -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf5 qalpont! "cp /etc/fstab.[$setNum] /etc/fstab" -echo *-* -echo doing the stroll with fstabs on PM4 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf8 qalpont! "rm -f /etc/fstab.auto" -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf8 qalpont! "mv /etc/fstab /etc/fstab.auto" -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf8 qalpont! "cp /etc/fstab.[$setNum] /etc/fstab" -#----------------------------------------------------------------------------- -# re-mount dbroots on all PMs -#----------------------------------------------------------------------------- -echo *-* -echo mounting PM1 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf3 qalpont! "mount -a" -echo mounting PM2 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf4 qalpont! "mount -a" -echo mounting PM3 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf5 qalpont! "mount -a" -echo mounting PM4 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf8 qalpont! "mount -a" -#----------------------------------------------------------------------------- -echo -#----------------------------------------------------------------------------- -echo set disk scheduler to deadline for newly mounted LUNs -#----------------------------------------------------------------------------- -echo -echo setting disk scheduler to deadline on PM1 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf3 qalpont! "/etc/rc.d/rc.local" -echo setting disk scheduler to deadline on PM2 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf4 qalpont! "/etc/rc.d/rc.local" -echo setting disk scheduler to deadline on PM3 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf5 qalpont! "/etc/rc.d/rc.local" -echo setting disk scheduler to deadline on PM4 -/usr/local/mariadb/columnstore/bin/remote_command.sh srvqaperf8 qalpont! "/etc/rc.d/rc.local" -# -echo -----*****-----*****-----*****-----*****-----** -echo End - set dbroots to RAID Configuration $setNum -echo -----*****-----*****-----*****-----*****-----** -# End of script diff --git a/utils/scenarios/perf/source/pf/common/setPMConfig.sh b/utils/scenarios/perf/source/pf/common/setPMConfig.sh deleted file mode 100755 index 91b0d8675..000000000 --- a/utils/scenarios/perf/source/pf/common/setPMConfig.sh +++ /dev/null @@ -1,48 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: setPMConfig.sh -#* Date Created: 2009.03.05 -#* Author: Stephen Cargile -#* Purpose: Set PM configuration (ie: 1-4 PMs) -#* -#* Parameter: numPMs - number of PMs to be enabled -#* -#******************************************************************************/ -# -echo Start - Set PM configuration -# -# Stopping the Calpont software -/usr/local/mariadb/columnstore/bin/mcsadmin stopsystem y ACK_YES -# -# set maximum number of PMs possible for the stack -maxPMs=4 -#----------------------------------------------------------------------------- -# Get number of PMs from user input (command line parameters) -#----------------------------------------------------------------------------- -numPMs=$1 -#----------------------------------------------------------------------------- -#Enable all PMs -#----------------------------------------------------------------------------- -k=1 -while [ $k -le $maxPMs ]; do - /usr/local/mariadb/columnstore/bin/mcsadmin enableModule pm$k ACK_YES - ((k++)) -done -#----------------------------------------------------------------------------- -#Disable non-used PMs -#----------------------------------------------------------------------------- -k=$maxPMs -while [ $k -gt $numPMs ]; do - /usr/local/mariadb/columnstore/bin/mcsadmin disableModule pm$k ACK_YES - ((k--)) -done -#----------------------------------------------------------------------------- -# Starting the Calpont software -/usr/local/mariadb/columnstore/bin/mcsadmin startsystem y ACK_YES -sleep 60 -echo *-*-*-*-*-*-*-*-*-*-*-*-*-*-* -echo End - Set PM configuration -echo *-*-*-*-*-*-*-*-*-*-*-*-*-*-* -# -# End of script diff --git a/utils/scenarios/perf/source/pf/common/setStackConfig.sh b/utils/scenarios/perf/source/pf/common/setStackConfig.sh deleted file mode 100755 index 0ce2e14ae..000000000 --- a/utils/scenarios/perf/source/pf/common/setStackConfig.sh +++ /dev/null @@ -1,64 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: setStackConfig.sh -#* Date Created: 2008.11.12 -#* Author: Daniel Lee -#* Purpose: Set stack configuration (ie: 2-4 for 2 UMs and 4 PMs) -#* -#* Parameter: numUMs - number of UMs to be enabled -#* numPMs - number of PMs to be enabled -#* -#******************************************************************************/ -# -echo Start - Set stack configuration -# -# Stopping the Calpont software -/usr/local/mariadb/columnstore/bin/mcsadmin stopsystem y ACK_YES -# -# set maximum number of UMs and PMs possible the stack -maxUMs=2 -maxPMs=4 -#----------------------------------------------------------------------------- -# Get number of UMs and PMs from user input (command line parameters) -#----------------------------------------------------------------------------- -numUMs=$1 -numPMs=$2 -#----------------------------------------------------------------------------- -#Enable all UMs -#----------------------------------------------------------------------------- -k=1 -while [ $k -le $maxUMs ]; do - /usr/local/mariadb/columnstore/bin/mcsadmin enableModule um$k ACK_YES - ((k++)) -done -#----------------------------------------------------------------------------- -#Disable non-used UMs -#----------------------------------------------------------------------------- -k=$maxUMs -while [ $k -gt $numUMs ]; do - /usr/local/mariadb/columnstore/bin/mcsadmin disableModule um$k ACK_YES - ((k--)) -done -#----------------------------------------------------------------------------- -#Enable all PMs -#----------------------------------------------------------------------------- -k=1 -while [ $k -le $maxPMs ]; do - /usr/local/mariadb/columnstore/bin/mcsadmin enableModule pm$k ACK_YES - ((k++)) -done -#----------------------------------------------------------------------------- -#Disable non-used PMs -#----------------------------------------------------------------------------- -k=$maxPMs -while [ $k -gt $numPMs ]; do - /usr/local/mariadb/columnstore/bin/mcsadmin disableModule pm$k ACK_YES - ((k--)) -done -#----------------------------------------------------------------------------- -echo *-*-*-*-*-*-*-*-*-*-*-*-*-*-* -echo End - Set stack configuration -echo *-*-*-*-*-*-*-*-*-*-*-*-*-*-* -# -# End of script diff --git a/utils/scenarios/perf/source/pf/common/setTestEnv.sh b/utils/scenarios/perf/source/pf/common/setTestEnv.sh deleted file mode 100755 index e4fbc334a..000000000 --- a/utils/scenarios/perf/source/pf/common/setTestEnv.sh +++ /dev/null @@ -1,27 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: setTestEnv.sh -#* Date Created: 2008.11.12 -#* Author: Daniel Lee -#* Purpose: Set test environment by setting stack configurations and dbroots. -#* The stack will be started. -#* -#* Parameter: numUMs - number of UMs to be enabled -#* numPMs - number of PMs to be enabled -#* setNum - 1 or 2, dbroot set number -#* -#******************************************************************************/ -# -# Get number of UMs, PMs, and dbroot set number from user input (command line parameters) -# -numUMs=$1 -numPMs=$2 -setNum=$3 -# -/home/pf/auto/common/setStackConfig.sh $numUMs $numPMs -/home/pf/auto/common/setDBRoots.sh $setNum -/usr/local/mariadb/columnstore/bin/mcsadmin startsystem -sleep 90 -# -# End of script diff --git a/utils/scenarios/perf/source/pf/common/step1.sh b/utils/scenarios/perf/source/pf/common/step1.sh deleted file mode 100755 index e4fbc334a..000000000 --- a/utils/scenarios/perf/source/pf/common/step1.sh +++ /dev/null @@ -1,27 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: setTestEnv.sh -#* Date Created: 2008.11.12 -#* Author: Daniel Lee -#* Purpose: Set test environment by setting stack configurations and dbroots. -#* The stack will be started. -#* -#* Parameter: numUMs - number of UMs to be enabled -#* numPMs - number of PMs to be enabled -#* setNum - 1 or 2, dbroot set number -#* -#******************************************************************************/ -# -# Get number of UMs, PMs, and dbroot set number from user input (command line parameters) -# -numUMs=$1 -numPMs=$2 -setNum=$3 -# -/home/pf/auto/common/setStackConfig.sh $numUMs $numPMs -/home/pf/auto/common/setDBRoots.sh $setNum -/usr/local/mariadb/columnstore/bin/mcsadmin startsystem -sleep 90 -# -# End of script diff --git a/utils/scenarios/perf/source/pf/common/step2.sh b/utils/scenarios/perf/source/pf/common/step2.sh deleted file mode 100755 index b489005eb..000000000 --- a/utils/scenarios/perf/source/pf/common/step2.sh +++ /dev/null @@ -1,76 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: exeStreamTest.sh -#* Date Created: 2008.11.13 -#* Author: Daniel Lee -#* Purpose: Execute a TPCH stream test. -#* -#* Parameter: streamNum - Stream number (0, 1, etc) -#* dbSize - Database size (1, 10, 100, 1t etc) -#* iteration - Software release iteration (15, 16, 17 etc) -#* repeatNum - Number of times to repeat the test -#* restart - before each test (Y, N) -#* -#* Modified: 2008.12.09 -#* Author: Stephen Cargile -#* Purpose: point output to new 'results' directory -#* -#******************************************************************************/ -# -#----------------------------------------------------------------------------- -# command line parameters -#----------------------------------------------------------------------------- -streamNum=$1 -dbSize=$2 -iteration=$3 -repeatNum=$4 -restart=$5 -#----------------------------------------------------------------------------- -# set variables -#----------------------------------------------------------------------------- -testID=/home/pf/auto/results/tpch${dbSize}_s${streamNum}_i${iteration}_`date +%s` -exeCommand=/home/qa/srv/tpchtest/sqlplan/tpch$dbSize/s$streamNum/script/i$iteration/tpch${dbSize}_s${streamNum}.sh -#----------------------------------------------------------------------------- -# Make test directory and change to it -#----------------------------------------------------------------------------- -cd /home/pf/auto/results/ -mkdir $testID -cd $testID -logFileName=tpch${dbSize}_s${streamNum}.log -#----------------------------------------------------------------------------- -# Loop N times to repeat the test -#----------------------------------------------------------------------------- -k=1 -while [ $k -le $repeatNum ] -do - if [ $restart == Y ] || [ $restart == y ] - then - /usr/local/mariadb/columnstore/bin/mcsadmin restartsystem y - sleep 90 - fi - mkdir $k - cd $k - cp /usr/local/mariadb/columnstore/etc/Columnstore.xml . - /usr/local/mariadb/columnstore/bin/mcsadmin getCalpontSoftware >CalpontSoftware.txt - ls -al /usr/local/mariadb/columnstore/data* > dbRoots.txt - /usr/local/mariadb/columnstore/bin/mcsadmin getProcessStatus >stackConfigBefore.txt - $exeCommand > $logFileName 2>&1 -# - if [ $streamNum == 1_7 ] - then - completed=0 - while [ $completed -lt 7 ] - do - sleep 5 - completed=`cat *scc.log |grep completed. | wc -l` - done - fi -# - /usr/local/mariadb/columnstore/bin/mcsadmin getProcessStatus >stackConfigAfter.txt - cd .. - ((k++)) -done -cd .. -# -# End of script diff --git a/utils/scenarios/perf/source/pf/fm/copyfiles.sh b/utils/scenarios/perf/source/pf/fm/copyfiles.sh deleted file mode 100755 index 44db9d380..000000000 --- a/utils/scenarios/perf/source/pf/fm/copyfiles.sh +++ /dev/null @@ -1,31 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: copyfiles.sh -#* Date Created: 2009.02.04 -#* Author: Stephen Cargile -#* Purpose: copy the data files to \\calweb\perf for archiving and further processing -#* -#* Input Parameters: -#* host - fqdn of host -#* date - day of month -#* starttime -#* endtime -#* -#******************************************************************************/ -# -# Get user input (command line parameters passed from Step2) -# -date=$1 -starttime=$2 -endtime=$3 -host=$(hostname) -# -cd /tmp/$host/sar -# -smbclient //calweb/perf -Wcalpont -Uoamuser%Calpont1 -c "mkdir ${host};cd ${host};mkdir sar;cd sar;prompt OFF;mput sar_data_*.txt" -# -cd /tmp/$host/ps -smbclient //calweb/perf -Wcalpont -Uoamuser%Calpont1 -c "mkdir ${host};cd ${host};mkdir ps;cd ps;prompt OFF;mput ps_*.txt" -# -# End of script diff --git a/utils/scenarios/perf/source/pf/fm/copyfiles_2_fileserver.sh b/utils/scenarios/perf/source/pf/fm/copyfiles_2_fileserver.sh deleted file mode 100755 index 44db9d380..000000000 --- a/utils/scenarios/perf/source/pf/fm/copyfiles_2_fileserver.sh +++ /dev/null @@ -1,31 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: copyfiles.sh -#* Date Created: 2009.02.04 -#* Author: Stephen Cargile -#* Purpose: copy the data files to \\calweb\perf for archiving and further processing -#* -#* Input Parameters: -#* host - fqdn of host -#* date - day of month -#* starttime -#* endtime -#* -#******************************************************************************/ -# -# Get user input (command line parameters passed from Step2) -# -date=$1 -starttime=$2 -endtime=$3 -host=$(hostname) -# -cd /tmp/$host/sar -# -smbclient //calweb/perf -Wcalpont -Uoamuser%Calpont1 -c "mkdir ${host};cd ${host};mkdir sar;cd sar;prompt OFF;mput sar_data_*.txt" -# -cd /tmp/$host/ps -smbclient //calweb/perf -Wcalpont -Uoamuser%Calpont1 -c "mkdir ${host};cd ${host};mkdir ps;cd ps;prompt OFF;mput ps_*.txt" -# -# End of script diff --git a/utils/scenarios/perf/source/pf/fm/def.txt b/utils/scenarios/perf/source/pf/fm/def.txt deleted file mode 100755 index fb2cf392c..000000000 --- a/utils/scenarios/perf/source/pf/fm/def.txt +++ /dev/null @@ -1 +0,0 @@ -if [ "090211 17:13:32" \> "090211 17:13:38" ]; then echo yes; fi diff --git a/utils/scenarios/perf/source/pf/fm/gatherdata.sh b/utils/scenarios/perf/source/pf/fm/gatherdata.sh deleted file mode 100755 index f26e5dbef..000000000 --- a/utils/scenarios/perf/source/pf/fm/gatherdata.sh +++ /dev/null @@ -1,52 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: gatherdata.sh -#* Date Created: 2009.02.05 -#* Author: Stephen Cargile -#* Purpose: gather up sar, sql calpont logs & ps files based on user input -#* -#* Parameter: date - day of month in question (dd) -#* starttime - start of period (hh:mm) -#* endtime - end of period (hh:mm) -#* -#******************************************************************************/ -# -#----------------------------------------------------------------------------- -# command line parameters -#----------------------------------------------------------------------------- -date=$1 -starttime=$2 -endtime=$3 -# -host=$(hostname -s) -# -# clean up previous data files -if [ -d /tmp/$host ] -then - rm -rf /tmp/$host -fi -# -mkdir /tmp/$host -# -# call sar script to get sar data -echo calling sar script to get sar data -/home/pf/auto/fm/getsar.sh $1 $2 $3 -# -# call logs script to get Calpont logs -echo calling logs script to get Calpont logs -/home/pf/auto/fm/getlogs.sh -# -# call ps script to pull ps files -echo calling ps script to get ps files -/home/pf/auto/fm/getps.sh $1 $2 $3 -# -# call sql script to pull sql data -echo calling mysql script to get mysql data -/home/pf/auto/fm/getsql.sh $1 $2 $3 -# -# call copyfile script to copy files to calling server (DM) -echo copying files to server -#/home/pf/auto/fm/copyfiles.sh -# -# End of script diff --git a/utils/scenarios/perf/source/pf/fm/getlogs.sh b/utils/scenarios/perf/source/pf/fm/getlogs.sh deleted file mode 100755 index a93adf9e1..000000000 --- a/utils/scenarios/perf/source/pf/fm/getlogs.sh +++ /dev/null @@ -1,33 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: getlogs.sh -#* Date Created: 2009.02.09 -#* Author: Stephen Cargile -#* Purpose: copy Calpont log files to temp -#* -#* Parameters: date - day of month in question (dd) -#* starttime - start of sar period (hh:mm) -#* endtime - end of sar period (hh:mm) -#* -#******************************************************************************/ -# -#----------------------------------------------------------------------------- -# command line parameters -#----------------------------------------------------------------------------- -#date=$1 -#starttime=$2 -#endtime=$3 -# -host=$(hostname -s) -# -# clean up previous data files -if [ -d /tmp/$host/logs ] -then - rm -rf /tmp/$host/logs -fi -# -mkdir /tmp/$host/logs -cp -r /var/log/mariadb/columnstore/* /tmp/$host/logs -# -# End of script diff --git a/utils/scenarios/perf/source/pf/fm/getps.sh b/utils/scenarios/perf/source/pf/fm/getps.sh deleted file mode 100755 index dbde00d41..000000000 --- a/utils/scenarios/perf/source/pf/fm/getps.sh +++ /dev/null @@ -1,55 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: getps.sh -#* Date Created: 2009.02.05 -#* Author: Stephen Cargile -#* Purpose: retrieve ps files between start and stop times -#* -#* Input Parameters: -#* date - day of month -#* starttime - beginning of time period (hh:mm) -#* endtime - end of time period (hh:mm) -#* -#******************************************************************************/ -# -#----------------------------------------------------------------------------- -# command line parameters -#----------------------------------------------------------------------------- -date=$1 -starttime=$2 -endtime=$3 -# -host=$(hostname -s) -# -# clean up previous data files -if [ -d /tmp/$host/ps ] -then - rm -rf /tmp/$host/ps -fi -# -mkdir /tmp/$host/ps -# -cd /var/log/prat/ps/`date +%m$1%y` -#----------------------------------------------------------------------------- -# Loop thru the file names and copy them to tmp -#----------------------------------------------------------------------------- -st=`echo $starttime | awk -F":" '{ printf "%.4d\n", $1$2 }'` -sm=`echo $starttime | awk -F":" '{ print $2 }'` -et=`echo $endtime | awk -F":" '{ printf "%.4d\n", $1$2 }'` -k=$st -file=`echo $k | awk '{ printf "%.4d\n", $0 }'` -while [ $k -ge $st ] && [ $k -le $et ]; do - if [ $sm -ge 60 ]; then - k=`expr $k + 39` - sm=`expr $sm - 61` - elif [ $k -ge $st ] && [ $k -le $et ]; then - cp ps_$file.txt /tmp/$host/ps - fi - k=`expr $k + 0` - k=$((k + 1)) - file=`echo $k | awk '{ printf "%.4d\n", $0 }'` - ((sm++)) -done -# -# End of script diff --git a/utils/scenarios/perf/source/pf/fm/getsar.sh b/utils/scenarios/perf/source/pf/fm/getsar.sh deleted file mode 100755 index f1f617422..000000000 --- a/utils/scenarios/perf/source/pf/fm/getsar.sh +++ /dev/null @@ -1,65 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: getsar.sh -#* Date Created: 2009.02.04 -#* Author: Stephen Cargile -#* Purpose: Build a sar command based on user input and create the data file -#* -#* Parameters: date - day of month in question (dd) -#* starttime - start of sar period (hh:mm) -#* endtime - end of sar period (hh:mm) -#* -#******************************************************************************/ -# -#----------------------------------------------------------------------------- -# command line parameters -#----------------------------------------------------------------------------- -date=$1 -starttime=$2 -endtime=$3 -# -host1=$(hostname -s) -host2=srvqaperf3 -host3=srvqaperf4 -host4=srvqaperf5 -host5=srvqaperf8 -# -# clean up previous data files -if [ -d /tmp/$host1/sar ] -then - rm -rf /tmp/$host1/sar -fi -# -mkdir /tmp/$host1/sar -# -#------------------------------------------------------------------------------ -# Create sar statements and extract data to text files -# -echo "LC_ALL=C sar -P ALL -s $2:00 -e $3:00 -f /var/log/sa/sa$1 > /tmp/$host1/sar/cpu_$1_$host1.txt" >> /tmp/$host1/sar/sarcpu.sh -chmod 755 /tmp/$host1/sar/sarcpu.sh -/tmp/$host1/sar/sarcpu.sh -# -echo "LC_ALL=C sar -r -s $2:00 -e $3:00 -f /var/log/sa/sa$1 > /tmp/$host1/sar/mem_$1_$host1.txt" >> /tmp/$host1/sar/sarmem.sh -chmod 755 /tmp/$host1/sar/sarmem.sh -/tmp/$host1/sar/sarmem.sh -# -echo "LC_ALL=C sar -n DEV -s $2:00 -e $3:00 -f /var/log/sa/sa$1 > /tmp/$host1/sar/net_$1_$host1.txt" >> /tmp/$host1/sar/sarnet.sh -chmod 755 /tmp/$host1/sar/sarnet.sh -/tmp/$host1/sar/sarnet.sh -# -#------------------------------------------------------------------------------ -# Copy files to file server -# -cd /tmp/$host1/sar/ -smbclient //calweb/perf -Wcalpont -Uoamuser%Calpont1 -c "cd ${host1};prompt OFF;mput *_$1_$host1.txt" -# -#------------------------------------------------------------------------------ -# Execute the script on the other servers in the stack -# -/usr/local/mariadb/columnstore/bin/remote_command.sh $host2 qalpont! "/home/pf/auto/fm/sar.sh $1 $2 $3" 1 -/usr/local/mariadb/columnstore/bin/remote_command.sh $host3 qalpont! "/home/pf/auto/fm/sar.sh $1 $2 $3" 1 -/usr/local/mariadb/columnstore/bin/remote_command.sh $host4 qalpont! "/home/pf/auto/fm/sar.sh $1 $2 $3" 1 -/usr/local/mariadb/columnstore/bin/remote_command.sh $host5 qalpont! "/home/pf/auto/fm/sar.sh $1 $2 $3" 1 -# -# End of Script diff --git a/utils/scenarios/perf/source/pf/fm/getsql.orig b/utils/scenarios/perf/source/pf/fm/getsql.orig deleted file mode 100755 index 5284101e0..000000000 --- a/utils/scenarios/perf/source/pf/fm/getsql.orig +++ /dev/null @@ -1,48 +0,0 @@ -#!/bin/bash -# -#/******************************************************************************* -#* Script Name: getsql.sh -#* Date Created: 2009.02.17 -#* Author: Joseph Wiiliams -#* Purpose: extract lines from log file within time block -#* -#* Parameter: date - A day of month in question (dd) -#* starttime - A start time in (HH:mm) -#* endtime - An end time in (HH:mm) -#* -#******************************************************************************/ -# -#----------------------------------------------------------------------------- -# command line parameters -#----------------------------------------------------------------------------- -date=$1 -starttime=$2 -endtime=$3 -# -host=$(hostname -s) -# -# change date format to match sql log date format -newdate=`date +%y%m$1` -# -# clean up previous data files -if [ -d /tmp/$host/sql ] -then - rm -rf /tmp/$host/sql -fi -mkdir -p /tmp/$host/sql -# -# create the beginning and ending time search variables -startdate="$newdate $2" -enddate="$newdate $3" -cat $host.log | grep $startdate -# -# create the awk command and write it to a temporary run file -cmd="/$startdate/,/$enddate/ {print \$0} " -echo $cmd > /tmp/$host/sql/cmd.$$ -# -# execute the command -awk -f /tmp/$host/sql/cmd.$$ /usr/local/mariadb/columnstore/mysql/db/$host.log > /tmp/$host/sql/temp.log -# -exit -# -# End of Script diff --git a/utils/scenarios/perf/source/pf/fm/getsql.sh b/utils/scenarios/perf/source/pf/fm/getsql.sh deleted file mode 100755 index 033fd82cb..000000000 --- a/utils/scenarios/perf/source/pf/fm/getsql.sh +++ /dev/null @@ -1,94 +0,0 @@ -#!/bin/bash -# -#/******************************************************************************* -#* Script Name: getsql.sh -#* Date Created: 2009.02.17 -#* Author: Joseph Williams -#* Purpose: extract lines from log file within time block -#* -#* Parameter: date - A day of month in question (dd) -#* starttime - A start time in (HH:mm) -#* endtime - An end time in (HH:mm) -#* -#******************************************************************************/ -# -#----------------------------------------------------------------------------- -# command line parameters -#----------------------------------------------------------------------------- -date=$1 -starttime=$2 -endtime=$3 -# -host=$(hostname -s) -# -# change date format to match sql log date format -newdate=`date +%y%m$1` -# -# clean up previous data files -if [ -d /tmp/$host/sql ] -then - rm -rf /tmp/$host/sql -fi -mkdir -p /tmp/$host/sql -# -# create the beginning and ending time search variables -st=`echo $starttime | awk -F":" '{ printf "%.4d\n", $1$2 }'` -sh=`echo $starttime | awk -F":" '{ print $1 }'` -sm=`echo $starttime | awk -F":" '{ print $2 }'` -et=`echo $endtime | awk -F":" '{ printf "%.4d\n", $1$2 }'` -eh=`echo $endtime | awk -F":" '{ print $1 }'` -em=`echo $endtime | awk -F":" '{ print $2 }'` -start="$newdate $sh:$sm" -end="$newdate $eh:$em" -foundstart="no" -foundend="no" -# -#----------------------------------------------------------------------------- -# Search through the file looking for start and end time matches -#----------------------------------------------------------------------------- -k=$st -while [ $k -ge $st ] && [ $k -le $et ] && [ $foundstart == "no" ]; do - if [ $sm -ge 60 ]; then - k=`expr $k + 39` - sm=`expr $sm - 61` - elif [ $k -ge $st ] && [ $k -le $et ]; then - grep -q "$newdate $sh:$sm" /usr/local/mariadb/columnstore/mysql/db/$host.log - if [ "$?" -eq "0" ] && [ $foundstart == "no" ]; then - start="$newdate $sh:$sm" - foundstart="yes" - fi - fi - if [ $foundstart == "no" ]; then - k=`expr $k + 0` - k=$((k + 1)) - ((sm++)) - fi -done -while [ $k -ge $st ] && [ $k -le $et ] && [ $foundend == "no" ]; do - if [ $em -ge 60 ]; then - k=`expr $k + 39` - em=`expr $em - 61` - elif [ $k -ge $st ] && [ $k -le $et ]; then - grep -q "$newdate $eh:$em" /usr/local/mariadb/columnstore/mysql/db/$host.log - if [ "$?" -eq "0" ] && [ $foundend == "no" ]; then - end="$newdate $eh:$em" - foundend="yes" - fi - fi - if [ $foundend == "no" ]; then - k=`expr $k + 0` - k=$((k + 1)) - ((em++)) - fi -done -# -# create the awk command and write it to a temporary run file -cmd="/$start/,/$end/ {print \$0} " -echo $cmd >> /tmp/$host/sql/cmd.$$ -# -# execute the command -awk -f /tmp/$host/sql/cmd.$$ /usr/local/mariadb/columnstore/mysql/db/$host.log > /tmp/$host/sql/temp.log -# -exit -# -# End of Script diff --git a/utils/scenarios/perf/source/pf/fm/getsql.stc b/utils/scenarios/perf/source/pf/fm/getsql.stc deleted file mode 100755 index 588c5b4cb..000000000 --- a/utils/scenarios/perf/source/pf/fm/getsql.stc +++ /dev/null @@ -1,76 +0,0 @@ -#!/bin/bash -x -# -#/******************************************************************************* -#* Script Name: getsql.sh -#* Date Created: 2009.02.17 -#* Author: Joseph Williams -#* Purpose: extract lines from log file within time block -#* -#* Parameter: date - A day of month in question (dd) -#* starttime - A start time in (HH:mm) -#* endtime - An end time in (HH:mm) -#* -#******************************************************************************/ -# -#----------------------------------------------------------------------------- -# command line parameters -#----------------------------------------------------------------------------- -date=$1 -starttime=$2 -endtime=$3 -# 21 -host=$(hostname -s) -# -# change date format to match sql log date format -newdate=`date +%y%m$1` -# -# clean up previous data files -if [ -d /tmp/$host/sql ] -then - rm -rf /tmp/$host/sql -fi -mkdir -p /tmp/$host/sql -# 33 -# create the beginning and ending time search variables -start="$newdate $starttime" -end="$newdate $endtime" -# -#----------------------------------------------------------------------------- -# Search through the file looking for start and end time matches -#----------------------------------------------------------------------------- -st=`echo $starttime | awk -F":" '{ printf "%02s",$1$2 }'` -sh=`echo $starttime | awk -F":" '{ printf "%02s",$1 }'` -sm=`echo $starttime | awk -F":" '{ printf "%02s",$2 }'` -et=`echo $endtime | awk -F":" '{ printf "%02s",$1$2 }'` -eh=`echo $endtime | awk -F":" '{ printf "%02s",$1 }'` -em=`echo $endtime | awk -F":" '{ printf "%02s",$2 }'` -k=$st -em=$((em + 1)) -minctr=$sm -while [ $k -ge $st ] && [ $k -le $et ] -do - if [ $minctr -ge 60 ]; then -# k=$((k + 39)) - k=`expr $k + 39` - minctr=`expr $minctr - 61` - elif [ $k -ge $st ] && [ $k -le $et ]; then - grep -q -m 1 "$newdate $sh:$minctr" /usr/local/Calpont/mysql/db/$host.log - grep -q "$newdate $eh:$em" /usr/local/Calpont/mysql/db/$host.log -# grep "$end" /usr/local/Calpont/mysql/db/$host.log - fi - k=$((k + 1)) -# ((k++)) - minctr=$((minctr + 1)) -# ((minctr++)) -done -# -# create the awk command and write it to a temporary run file -cmd="/$start/,/$end/ {print \$0} " -echo $cmd >> /tmp/$host/sql/cmd.$$ -# -# execute the command -awk -f /tmp/$host/sql/cmd.$$ /usr/local/Calpont/mysql/db/$host.log > /tmp/$host/sql/temp.log -# -exit -# -# End of Script diff --git a/utils/scenarios/perf/source/pf/fm/move2win.sh b/utils/scenarios/perf/source/pf/fm/move2win.sh deleted file mode 100755 index 3ba822c2b..000000000 --- a/utils/scenarios/perf/source/pf/fm/move2win.sh +++ /dev/null @@ -1,41 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: move2win.sh -#* Date Created: 2008.12.08 -#* Author: Stephen Cargile -#* Purpose: copy the sar files to \\ws_scargile_tx\perf for furthter processing -#* -#* Input Parameters: -#* numUMs - number of UMs used in the test -#* numPMs - number of PMs used in the test -#* setStg - 1 or 2: number of arrays used -#* relNum - Release number -#* dbSize - Size of database -#* testNo - the number that this config has been tested -#* streamNum - the type of stream (0 or 17) -#* dirNo - the number of the current directory -#* -#******************************************************************************/ -# -# Get user input (command line parameters passed from Step2) -# -numUMs=$1 -numPMs=$2 -setStg=$3 -relNum=$4 -dbSize=$5 -testNo=$6 -streamNum=$7 -dirNo=$8 - -smbclient //ws_scargile_tx/perf -Wcalpont -Uoamuser%Calpont1 -c "mkdir 1${numUMs}${numPMs}_${setStg}array" -smbclient //ws_scargile_tx/perf -Wcalpont -Uoamuser%Calpont1 -c "cd 1${numUMs}${numPMs}_${setStg}array" -smbclient //ws_scargile_tx/perf -Wcalpont -Uoamuser%Calpont1 -c "cd 1${numUMs}${numPMs}_${setStg}array;mkdir rel${relNum}_${dbSize}" -smbclient //ws_scargile_tx/perf -Wcalpont -Uoamuser%Calpont1 -c "cd 1${numUMs}${numPMs}_${setStg}array\rel${relNum}_${dbSize};mkdir test${testNo}" -smbclient //ws_scargile_tx/perf -Wcalpont -Uoamuser%Calpont1 -c "cd 1${numUMs}${numPMs}_${setStg}array\rel${relNum}_${dbSize}\test${testNo};mkdir s${streamNum}" -smbclient //ws_scargile_tx/perf -Wcalpont -Uoamuser%Calpont1 -c "cd 1${numUMs}${numPMs}_${setStg}array\rel${relNum}_${dbSize}\test${testNo}\s${streamNum};mkdir run${dirNo}" -smbclient //ws_scargile_tx/perf -Wcalpont -Uoamuser%Calpont1 -c "cd 1${numUMs}${numPMs}_${setStg}array\rel${relNum}_${dbSize}\test${testNo}\s${streamNum}\run${dirNo};mkdir sar" -smbclient //ws_scargile_tx/perf -Wcalpont -Uoamuser%Calpont1 -c "cd 1${numUMs}${numPMs}_${setStg}array\rel${relNum}_${dbSize}\test${testNo}\s${streamNum}\run${dirNo}\sar;prompt OFF;mput *.txt" -# -# End of script diff --git a/utils/scenarios/perf/source/pf/fm/ps b/utils/scenarios/perf/source/pf/fm/ps deleted file mode 100755 index a306fefcd..000000000 --- a/utils/scenarios/perf/source/pf/fm/ps +++ /dev/null @@ -1,2 +0,0 @@ -# run system activity report tool every 1 minute -*/1 * * * * root /etc/pscollect diff --git a/utils/scenarios/perf/source/pf/fm/ps.sh b/utils/scenarios/perf/source/pf/fm/ps.sh deleted file mode 100755 index 2c616e7ca..000000000 --- a/utils/scenarios/perf/source/pf/fm/ps.sh +++ /dev/null @@ -1,29 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: ps.sh -#* Date Created: 2009.01.27 -#* Author: Stephen Cargile -#* Purpose: capture system activity every x seconds -#* -#******************************************************************************/ -# -cd /usr/local/prat/kernel - if [ ! -d /usr/local/prat/kernel/`date +%m%d%y` ] - then - mkdir `date +%m%d%y` - else - cd `date +%m%d%y` - hostname > ps_`date +%R`.txt - date >> ps_`date +%R`.txt - /bin/ps -leaf >> ps_`date +%R`.txt - fi - - if [ -f /usr/bin/pstree ] - then - /usr/bin/pstree -G > pstree_`date +%R`.txt - else - echo "binary /usr/bin/pstree not installed." > pstree_`date +%R`.txt - fi - -# End of script diff --git a/utils/scenarios/perf/source/pf/fm/pscollect b/utils/scenarios/perf/source/pf/fm/pscollect deleted file mode 100755 index 179899965..000000000 --- a/utils/scenarios/perf/source/pf/fm/pscollect +++ /dev/null @@ -1,29 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: pscollect -#* Date Created: 2009.01.27 -#* Author: Stephen Cargile -#* Purpose: capture system activity every x seconds -#* -#******************************************************************************/ -# -cd /var/log/prat/ps - if [ ! -d /var/log/prat/ps/`date +%m%d%y` ] - then - mkdir `date +%m%d%y` - else - cd `date +%m%d%y` - hostname -s > ps_`date +%H%M`.txt - date >> ps_`date +%H%M`.txt - /bin/ps -leaf >> ps_`date +%H%M`.txt - fi - -# if [ -f /usr/bin/pstree ] -# then -# /usr/bin/pstree -G > pstree_`date +%R`.txt -# else -# echo "binary /usr/bin/pstree not installed." > pstree_`date +%R`.txt -# fi - -# End of script diff --git a/utils/scenarios/perf/source/pf/fm/sar.sh b/utils/scenarios/perf/source/pf/fm/sar.sh deleted file mode 100755 index 7af040078..000000000 --- a/utils/scenarios/perf/source/pf/fm/sar.sh +++ /dev/null @@ -1,53 +0,0 @@ -#! /bin/sh -# -#/******************************************************************************* -#* Script Name: getsar.sh -#* Date Created: 2009.02.04 -#* Author: Stephen Cargile -#* Purpose: Build a sar command based on user input and create the data file -#* -#* Parameters: date - day of month in question (dd) -#* starttime - start of sar period (hh:mm) -#* endtime - end of sar period (hh:mm) -#* -#******************************************************************************/ -# -#----------------------------------------------------------------------------- -# command line parameters -#----------------------------------------------------------------------------- -date=$1 -starttime=$2 -endtime=$3 -# -host=$(hostname -s) -# -# clean up previous data files -if [ -d /tmp/$host/sar ] -then - rm -rf /tmp/$host/sar -fi -# -mkdir /tmp/$host/sar -# -#------------------------------------------------------------------------------ -# Create sar statements and extract data to text files -# -echo "LC_ALL=C sar -P ALL -s $2:00 -e $3:00 -f /var/log/sa/sa$1 > /tmp/$host/sar/cpu_$1_$host.txt" >> /tmp/$host/sar/sarcpu.sh -chmod 755 /tmp/$host/sar/sarcpu.sh -/tmp/$host/sar/sarcpu.sh -# -echo "LC_ALL=C sar -r -s $2:00 -e $3:00 -f /var/log/sa/sa$1 > /tmp/$host/sar/mem_$1_$host.txt" >> /tmp/$host/sar/sarmem.sh -chmod 755 /tmp/$host/sar/sarmem.sh -/tmp/$host/sar/sarmem.sh -# -echo "LC_ALL=C sar -n DEV -s $2:00 -e $3:00 -f /var/log/sa/sa$1 > /tmp/$host/sar/net_$1_$host.txt" >> /tmp/$host/sar/sarnet.sh -chmod 755 /tmp/$host/sar/sarnet.sh -/tmp/$host/sar/sarnet.sh -# -#------------------------------------------------------------------------------ -# Copy files to file server -# -cd /tmp/$host/sar/ -smbclient //calweb/perf -Wcalpont -Uoamuser%Calpont1 -c "cd ${host};prompt OFF;mput *_$1_$host.txt" -# -# End of Script diff --git a/utils/scenarios/perf/source/pf/scripts/115_100_s0.sh b/utils/scenarios/perf/source/pf/scripts/115_100_s0.sh deleted file mode 100755 index 3fdc2fe72..000000000 --- a/utils/scenarios/perf/source/pf/scripts/115_100_s0.sh +++ /dev/null @@ -1,23 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo BEGIN AUTOMATED TEST - S0 100GB 115 2 Array 1 Pass -echo ------------------------------------------------------------------ -echo -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 5PM 2Array stream0 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/step1.sh 1 5 2 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF AUTOMATED TEST - S0 100GB 115 2 Array 1 Pass -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/1server_all.sh b/utils/scenarios/perf/source/pf/scripts/1server_all.sh deleted file mode 100755 index 133eb0cd5..000000000 --- a/utils/scenarios/perf/source/pf/scripts/1server_all.sh +++ /dev/null @@ -1,115 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo Single server -echo ------------------------------------------------------------------- -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 1Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 1 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 1Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 1 2 1 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 1Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 1 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 2Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 2 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 2Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 2 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo executing setLowMem100 script to prep for following stream1_7 tests -echo ------------------------------------------------------------------ -echo -/usr/local/mariadb/columnstore/bin/setLowMem100.sh -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 1Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 1 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 1Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 1 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 1Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 1 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 2Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 2 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 2Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 2 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF WEEKEND AUTOMATED TEST - 203.9 S0 1T & S17 100GB -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/1um_1a_100_s0.sh b/utils/scenarios/perf/source/pf/scripts/1um_1a_100_s0.sh deleted file mode 100755 index 8bd9498a9..000000000 --- a/utils/scenarios/perf/source/pf/scripts/1um_1a_100_s0.sh +++ /dev/null @@ -1,36 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo BEGIN AUTOMATED TEST - 203.7 S0 100GB 1UM 1 Array 1 Pass -echo ------------------------------------------------------------------ -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 1Array stream0 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 1 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 1Array stream0 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 1 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 1Array stream0 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 1 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF AUTOMATED TEST - 203.11 S0 100GB 1UM 1 Array 1 Pass -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/1um_1a_100_s17.sh b/utils/scenarios/perf/source/pf/scripts/1um_1a_100_s17.sh deleted file mode 100755 index f85109440..000000000 --- a/utils/scenarios/perf/source/pf/scripts/1um_1a_100_s17.sh +++ /dev/null @@ -1,42 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo BEGIN 1203 AUTOMATED TEST - 203.9 S17 100GB 1UM 1 Array 1 Pass -echo ------------------------------------------------------------------ -echo executing setLowMem100 script to prep for following stream1_7 tests -echo ------------------------------------------------------------------ -echo -# -/usr/local/mariadb/columnstore/bin/setLowMem100.sh -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 1Array stream1_7 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 1 -/home/pf/auto/common/step2.sh 1_7 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 1Array stream1_7 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 1 -/home/pf/auto/common/step2.sh 1_7 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 1Array stream1_7 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 1 -/home/pf/auto/common/step2.sh 1_7 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF 1203 AUTOMATED TEST - 203.9 S17 100GB 1UM 1 Array 1 Pass -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/1um_1a_4pm_1t_s0.sh b/utils/scenarios/perf/source/pf/scripts/1um_1a_4pm_1t_s0.sh deleted file mode 100755 index 73808dc76..000000000 --- a/utils/scenarios/perf/source/pf/scripts/1um_1a_4pm_1t_s0.sh +++ /dev/null @@ -1,11 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 1Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 1 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/1um_1pm_1a_100_s0.sh b/utils/scenarios/perf/source/pf/scripts/1um_1pm_1a_100_s0.sh deleted file mode 100755 index ce6081b6e..000000000 --- a/utils/scenarios/perf/source/pf/scripts/1um_1pm_1a_100_s0.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo BEGIN AUTOMATED TEST - 203.13 1UM 1PM 1Array 100GB S0 1 Pass -echo ------------------------------------------------------------------ -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 1Array stream0 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 1 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF AUTOMATED TEST - 203.13 1UM 1PM 1Array 100GB S0 1 Pass -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/1um_1pm_2a_1t_s0.sh b/utils/scenarios/perf/source/pf/scripts/1um_1pm_2a_1t_s0.sh deleted file mode 100755 index b4881881b..000000000 --- a/utils/scenarios/perf/source/pf/scripts/1um_1pm_2a_1t_s0.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo BEGIN AUTOMATED TEST - 203.7 1UM 1PM 2Arrays 1TB S0 2 Pass -echo ------------------------------------------------------------------ -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Arrays stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF AUTOMATED TEST - 203.7 1UM 1PM 2Arrays 1TB S0 2 Pass -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/1um_2a_100_1t_s0.sh b/utils/scenarios/perf/source/pf/scripts/1um_2a_100_1t_s0.sh deleted file mode 100755 index d2bbef597..000000000 --- a/utils/scenarios/perf/source/pf/scripts/1um_2a_100_1t_s0.sh +++ /dev/null @@ -1,78 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo BEGIN AUTOMATED TEST - S0 100GB and 1TB 1UM 2 Array 3 Pass -echo ------------------------------------------------------------------ -echo -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Array stream0 100GB i16 3x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 2Array stream0 100GB i16 3x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/step1.sh 1 2 2 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 2Array stream0 100GB i16 3x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/step1.sh 1 4 2 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF S0 100GB TESTS! -echo ------------------------------------------------------------------ -echo -echo ------------------------------------------------------------------ -echo BEGIN AUTOMATED TEST - S0 1TB 1UM 2 Array 3 Pass -echo ------------------------------------------------------------------ -echo -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Array stream0 1TB i16 3x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 2Array stream0 1TB i16 3x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/step1.sh 1 2 2 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 2Array stream0 1TB i16 3x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/step1.sh 1 4 2 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF AUTOMATED TEST - S0 100GB and 1TB 1UM 2 Array 3 Pass -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/1um_2a_100_s0.sh b/utils/scenarios/perf/source/pf/scripts/1um_2a_100_s0.sh deleted file mode 100755 index cbaad3e00..000000000 --- a/utils/scenarios/perf/source/pf/scripts/1um_2a_100_s0.sh +++ /dev/null @@ -1,41 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo BEGIN AUTOMATED TEST - 207 S0 100GB 1UM 2 Array 1 Pass -echo ------------------------------------------------------------------ -echo -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Array stream0 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 2Array stream0 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/step1.sh 1 2 2 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 2Array stream0 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/step1.sh 1 4 2 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF AUTOMATED TEST - 207 S0 100GB 1UM 2 Array 1 Pass -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/1um_2a_100_s17.sh b/utils/scenarios/perf/source/pf/scripts/1um_2a_100_s17.sh deleted file mode 100755 index ba05c1793..000000000 --- a/utils/scenarios/perf/source/pf/scripts/1um_2a_100_s17.sh +++ /dev/null @@ -1,42 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo BEGIN AUTOMATED TEST - 203.9 S17 100GB 1UM 2 Array 1 Pass -echo ------------------------------------------------------------------ -echo executing setLowMem100 script to prep for following stream1_7 tests -echo ------------------------------------------------------------------ -echo -# -/usr/local/mariadb/columnstore/bin/setLowMem100.sh -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Array stream1_7 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 1_7 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 2Array stream1_7 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 2 -/home/pf/auto/common/step2.sh 1_7 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 2Array stream1_7 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 2 -/home/pf/auto/common/step2.sh 1_7 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF AUTOMATED TEST - 203.9 S17 100GB 1UM 2 Array 1 Pass -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/1um_allarrays_allPMs_s0_s17.sh b/utils/scenarios/perf/source/pf/scripts/1um_allarrays_allPMs_s0_s17.sh deleted file mode 100755 index 3dc4ccff2..000000000 --- a/utils/scenarios/perf/source/pf/scripts/1um_allarrays_allPMs_s0_s17.sh +++ /dev/null @@ -1,111 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 1Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 1 1 1 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 1Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 1 2 1 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 1Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 1 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 2Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 2 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 2Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 2 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo executing setLowMem100 script to prep for following stream1_7 tests -echo ------------------------------------------------------------------ -echo -/usr/local/mariadb/columnstore/bin/setLowMem100.sh -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 1Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 1 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 1Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 1 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 1Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 1 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 2Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 2 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 2Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 2 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF WEEKEND AUTOMATED TEST - 203.9 S0 1T & S17 100GB -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/1um_allconfigs_s0.sh b/utils/scenarios/perf/source/pf/scripts/1um_allconfigs_s0.sh deleted file mode 100755 index abf3edc8f..000000000 --- a/utils/scenarios/perf/source/pf/scripts/1um_allconfigs_s0.sh +++ /dev/null @@ -1,110 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo START OF 12-12 WEEKEND AUTOMATED TEST - 203.13 S0 1T & 100GB -echo ------------------------------------------------------------------ -echo -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 1Array stream0 100G i16 1x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 1 1 1 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 1Array stream0 100G i16 1x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 1 2 1 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 1Array stream0 100G i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 1 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Array stream0 100G i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 2Array stream0 100G i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 2 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 2Array stream0 100 i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 2 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 1Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 1 1 1 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 1Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 1 2 1 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 1Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 1 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 2Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 2 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 2Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 2 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF WEEKEND AUTOMATED TEST - 203.13 S0 1T & 100GB -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/2a_100_s0.sh b/utils/scenarios/perf/source/pf/scripts/2a_100_s0.sh deleted file mode 100755 index a83788b28..000000000 --- a/utils/scenarios/perf/source/pf/scripts/2a_100_s0.sh +++ /dev/null @@ -1,23 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo BEGIN AUTOMATED TEST - S0 100GB 1UM 1PM 2 Array 1 Pass -echo ------------------------------------------------------------------ -echo -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Array stream0 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF AUTOMATED TEST - S0 100GB 1UM 1PM 2 Array 1 Pass -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/2a_100g_1t_s0_s17.sh b/utils/scenarios/perf/source/pf/scripts/2a_100g_1t_s0_s17.sh deleted file mode 100755 index f988de10f..000000000 --- a/utils/scenarios/perf/source/pf/scripts/2a_100g_1t_s0_s17.sh +++ /dev/null @@ -1,127 +0,0 @@ -#! /bin/sh -# -echo -echo ----------------------------------------------------------------------------------------------- -echo BEGIN AUTOMATED TEST - 1 2 and 4 PMs S0 100GB and 1TB and S1_7 100GB with 2 Arrays 1 Pass -echo ----------------------------------------------------------------------------------------------- -echo -# -echo -echo ------------------------------------------------------------------ -echo BEGINNING OF 1 2 and 4 PM 2 Array S0 100GB TESTS! -echo ------------------------------------------------------------------ -echo -echo ------------------------------------------------------------------ -echo starting 1PM 2Array stream0 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/setTestEnv.sh 1 1 2 -/home/pf/auto/common/exeStreamTest.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 2PM 2Array stream0 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/setTestEnv.sh 1 2 2 -/home/pf/auto/common/exeStreamTest.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 4PM 2Array stream0 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/setTestEnv.sh 1 4 2 -/home/pf/auto/common/exeStreamTest.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF 1 2 and 4 PM 2 Array S0 100GB TESTS! -echo ------------------------------------------------------------------ -echo -echo ------------------------------------------------------------------ -echo BEGINNING 1 2 and 4 PM 2 Array S0 1TB TESTS! -echo ------------------------------------------------------------------ -echo -echo ------------------------------------------------------------------ -echo starting 1PM 2Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/setTestEnv.sh 1 1 2 -/home/pf/auto/common/exeStreamTest.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 2PM 2Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/setTestEnv.sh 1 2 2 -/home/pf/auto/common/exeStreamTest.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 4PM 2Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/setTestEnv.sh 1 4 2 -/home/pf/auto/common/exeStreamTest.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF 1 2 and 4 PM 2 Array S0 1TB TESTS! -echo ------------------------------------------------------------------ -echo -echo ------------------------------------------------------------------ -echo executing setLowMem100 script to prep for following stream1_7 tests -echo ------------------------------------------------------------------ -echo -# -/usr/local/mariadb/columnstore/bin/setLowMem100.sh -# -echo -echo ------------------------------------------------------------------ -echo BEGINNING OF 1 2 and 4 PM 2 Array S1_7 100GB TESTS! -echo ------------------------------------------------------------------ -echo -echo ------------------------------------------------------------------ -echo starting 1PM 2Array stream1_7 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/setTestEnv.sh 1 1 2 -/home/pf/auto/common/exeStreamTest.sh 1_7 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 2PM 2Array stream1_7 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/setTestEnv.sh 1 2 2 -/home/pf/auto/common/exeStreamTest.sh 1_7 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 4PM 2Array stream1_7 100GB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/setTestEnv.sh 1 4 2 -/home/pf/auto/common/exeStreamTest.sh 1_7 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF 1 2 and 4 PM 2 Array S1_7 100GB TESTS! -echo ------------------------------------------------------------------ -echo -echo ------------------------------------------------------------------------------------------------ -echo END OF AUTOMATED TEST - 1 2 and 4 PMs S0 100GB and 1TB and S1_7 100GB with 2 Arrays 1 Pass -echo ------------------------------------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/2a_1t_s0.sh b/utils/scenarios/perf/source/pf/scripts/2a_1t_s0.sh deleted file mode 100755 index e2482a821..000000000 --- a/utils/scenarios/perf/source/pf/scripts/2a_1t_s0.sh +++ /dev/null @@ -1,53 +0,0 @@ -#! /bin/sh -# -echo -echo ----------------------------------------------------------------------------------------------- -echo BEGIN AUTOMATED TEST - 1 2 4 PMs S0 1TB 2 Arrays 1 Pass -echo ----------------------------------------------------------------------------------------------- -echo -# -echo -echo ------------------------------------------------------------------ -echo BEGINNING 1 2 and 4 PM 2 Array S0 1TB TESTS! -echo ------------------------------------------------------------------ -echo -echo ------------------------------------------------------------------ -echo starting 1PM 2Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/setTestEnv.sh 1 1 2 -/home/pf/auto/common/exeStreamTest.sh 0 1t 17 1 Y -/home/pf/auto/common/extractlogdata.sh 1t 0 -# -echo -echo ------------------------------------------------------------------ -echo starting 2PM 2Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/setTestEnv.sh 1 2 2 -/home/pf/auto/common/exeStreamTest.sh 0 1t 17 1 Y -/home/pf/auto/common/extractlogdata.sh 1t 0 -# -echo -echo ------------------------------------------------------------------ -echo starting 4PM 2Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -# -/home/pf/auto/common/setTestEnv.sh 1 4 2 -/home/pf/auto/common/exeStreamTest.sh 0 1t 17 1 Y -/home/pf/auto/common/extractlogdata.sh 1t 0 -# -echo -echo ------------------------------------------------------------------ -echo END OF 1 2 and 4 PM 2 Array S0 1TB TESTS! -echo ------------------------------------------------------------------ -echo -echo -echo ------------------------------------------------------------------------------------------------ -echo END OF AUTOMATED TEST - 1 2 and 4 PMs S0 1TB 2 Arrays 1 Pass -echo ------------------------------------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/2um_allconfigs_s0.sh b/utils/scenarios/perf/source/pf/scripts/2um_allconfigs_s0.sh deleted file mode 100755 index 72144cbf2..000000000 --- a/utils/scenarios/perf/source/pf/scripts/2um_allconfigs_s0.sh +++ /dev/null @@ -1,77 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo START OF AUTOMATED TEST - 203.13 2UMs S0 100GB and 1TB -echo ------------------------------------------------------------------ -echo -# -echo -echo ------------------------------------------------------------------ -echo starting 2UM 1PM 1Array stream0 100G i16 2x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 2 1 1 -/home/pf/auto/common/step2.sh 0 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 2UM 2PM 1Array stream0 100G i16 2x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 2 2 1 -/home/pf/auto/common/step2.sh 0 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 2UM 1PM 2Array stream0 100G i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 2 1 2 -/home/pf/auto/common/step2.sh 0 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 2UM 2PM 2Array stream0 100G i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 2 2 2 -/home/pf/auto/common/step2.sh 0 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 2UM 1PM 1Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 2 1 1 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 2UM 2PM 1Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 2 2 1 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 2UM 1PM 2Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 2 1 2 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 2UM 2PM 2Array stream0 1TB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 2 2 2 -/home/pf/auto/common/step2.sh 0 1t 16 2 Y -# -echo ------------------------------------------------------------------ -echo END OF AUTOMATED TEST - 203.13 2UMs S0 1T and 100GB -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/2um_allconfigs_s17.sh b/utils/scenarios/perf/source/pf/scripts/2um_allconfigs_s17.sh deleted file mode 100755 index ebda7d39e..000000000 --- a/utils/scenarios/perf/source/pf/scripts/2um_allconfigs_s17.sh +++ /dev/null @@ -1,54 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo -echo START OF TEST - 203.13 S17 100GB 2UMs -echo -echo ------------------------------------------------------------------ -echo -echo -echo ------------------------------------------------------------------ -echo executing setLowMem100 script to prep for following stream1_7 tests -echo ------------------------------------------------------------------ -echo -/usr/local/mariadb/columnstore/bin/setLowMem100.sh -# -echo -echo ------------------------------------------------------------------ -echo starting 2UM 1PM 1Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 2 1 1 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 2UM 2PM 1Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 2 2 1 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 2UM 1PM 2Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 2 1 2 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 2UM 2PM 2Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 2 2 2 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF TEST - 203.13 S17 100GB 2UMs -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/afile.txt b/utils/scenarios/perf/source/pf/scripts/afile.txt deleted file mode 100644 index e69de29bb..000000000 diff --git a/utils/scenarios/perf/source/pf/scripts/allconfigs_s0.sh b/utils/scenarios/perf/source/pf/scripts/allconfigs_s0.sh deleted file mode 100755 index abf3edc8f..000000000 --- a/utils/scenarios/perf/source/pf/scripts/allconfigs_s0.sh +++ /dev/null @@ -1,110 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo START OF 12-12 WEEKEND AUTOMATED TEST - 203.13 S0 1T & 100GB -echo ------------------------------------------------------------------ -echo -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 1Array stream0 100G i16 1x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 1 1 1 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 1Array stream0 100G i16 1x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 1 2 1 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 1Array stream0 100G i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 1 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Array stream0 100G i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 2Array stream0 100G i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 2 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 2Array stream0 100 i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 2 -/home/pf/auto/common/step2.sh 0 100 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 1Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 1 1 1 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 1Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------- -echo -/home/pf/auto/common/step1.sh 1 2 1 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 1Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 1 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 2Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 2 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 2Array stream0 1TB i16 1x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 2 -/home/pf/auto/common/step2.sh 0 1t 16 1 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF WEEKEND AUTOMATED TEST - 203.13 S0 1T & 100GB -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/allconfigs_s17.sh b/utils/scenarios/perf/source/pf/scripts/allconfigs_s17.sh deleted file mode 100755 index ccd16cf66..000000000 --- a/utils/scenarios/perf/source/pf/scripts/allconfigs_s17.sh +++ /dev/null @@ -1,70 +0,0 @@ -#! /bin/sh -# -echo -echo ------------------------------------------------------------------ -echo -echo START OF TEST - 203.13 S17 100GB -echo -echo ------------------------------------------------------------------ -echo -echo -echo ------------------------------------------------------------------ -echo executing setLowMem100 script to prep for following stream1_7 tests -echo ------------------------------------------------------------------ -echo -/usr/local/mariadb/columnstore/bin/setLowMem100.sh -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 1Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 1 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 1Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 1 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 1Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 1 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 1PM 2Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 1 2 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 2PM 2Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 2 2 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo starting 1UM 4PM 2Array stream1_7 100GB i16 2x w/restart -echo ------------------------------------------------------------------ -echo -/home/pf/auto/common/step1.sh 1 4 2 -/home/pf/auto/common/step2.sh 1_7 100 16 2 Y -# -echo -echo ------------------------------------------------------------------ -echo END OF TEST - 203.13 S17 100GB -echo ------------------------------------------------------------------ -echo -# End of script diff --git a/utils/scenarios/perf/source/pf/scripts/time.txt b/utils/scenarios/perf/source/pf/scripts/time.txt deleted file mode 100644 index e69de29bb..000000000 diff --git a/utils/scenarios/perf/source/pf/scripts/tpch100_s0.bkup b/utils/scenarios/perf/source/pf/scripts/tpch100_s0.bkup deleted file mode 100755 index ed3f06466..000000000 --- a/utils/scenarios/perf/source/pf/scripts/tpch100_s0.bkup +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -echo "starting tpch100 stream0 test.........." -echo -sid=1 -for q in 14 2 9 20 6 17 18 8 21 13 3 22 16 4 11 15 1 10 19 5 7 12; do - qq=`printf %02d $q` - /home/qa/srv/common/script/sendPlanExec.sh $sid 9 /home/qa/srv/tpchtest/sqlplan/tpch100/s0/hex/i16/tpch100_s0_${qq}.hex - ((sid++)) -done -echo "tpch100 Stream0 test completed." - diff --git a/utils/scenarios/perf/source/pf/scripts/tpch100_s0.sh b/utils/scenarios/perf/source/pf/scripts/tpch100_s0.sh deleted file mode 100755 index ed3f06466..000000000 --- a/utils/scenarios/perf/source/pf/scripts/tpch100_s0.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -echo "starting tpch100 stream0 test.........." -echo -sid=1 -for q in 14 2 9 20 6 17 18 8 21 13 3 22 16 4 11 15 1 10 19 5 7 12; do - qq=`printf %02d $q` - /home/qa/srv/common/script/sendPlanExec.sh $sid 9 /home/qa/srv/tpchtest/sqlplan/tpch100/s0/hex/i16/tpch100_s0_${qq}.hex - ((sid++)) -done -echo "tpch100 Stream0 test completed." - diff --git a/utils/scenarios/perf/sql/1/q2.3.1.1.1.sql b/utils/scenarios/perf/sql/1/q2.3.1.1.1.sql deleted file mode 100644 index 5ae6c28da..000000000 --- a/utils/scenarios/perf/sql/1/q2.3.1.1.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_shipdate, l_orderkey, l_partkey from lineitem where l_orderkey < 1000000 order by 1, 2, 3; diff --git a/utils/scenarios/perf/sql/1/q2.3.1.1.2.sql b/utils/scenarios/perf/sql/1/q2.3.1.1.2.sql deleted file mode 100644 index 9d4e0343c..000000000 --- a/utils/scenarios/perf/sql/1/q2.3.1.1.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select * from lineitem where l_orderkey < 1000000 order by l_orderkey, l_linenumber; diff --git a/utils/scenarios/perf/sql/101/pmscan.1.sql b/utils/scenarios/perf/sql/101/pmscan.1.sql deleted file mode 100644 index 694a94c89..000000000 --- a/utils/scenarios/perf/sql/101/pmscan.1.sql +++ /dev/null @@ -1 +0,0 @@ -select l_partkey from lineitem where l_partkey between 1000000 and 1001000; diff --git a/utils/scenarios/perf/sql/101/pmscan.2.sql b/utils/scenarios/perf/sql/101/pmscan.2.sql deleted file mode 100644 index 694a13b14..000000000 --- a/utils/scenarios/perf/sql/101/pmscan.2.sql +++ /dev/null @@ -1 +0,0 @@ -select * from lineitem where l_partkey between 1000000 and 1001000; diff --git a/utils/scenarios/perf/sql/101/pmscan.3.sql b/utils/scenarios/perf/sql/101/pmscan.3.sql deleted file mode 100644 index 0f4443608..000000000 --- a/utils/scenarios/perf/sql/101/pmscan.3.sql +++ /dev/null @@ -1 +0,0 @@ -select max(l_partkey), count(*) from lineitem where l_partkey <= 20000000; diff --git a/utils/scenarios/perf/sql/101/pmscan.4.sql b/utils/scenarios/perf/sql/101/pmscan.4.sql deleted file mode 100644 index d40c3e603..000000000 --- a/utils/scenarios/perf/sql/101/pmscan.4.sql +++ /dev/null @@ -1 +0,0 @@ -select min(l_orderkey), max(l_partkey), min(l_suppkey), avg(l_linenumber), sum(l_extendedprice), avg(l_discount), count(l_tax), count(l_shipdate) from lineitem where l_shipdate <= '1992-08-31'; diff --git a/utils/scenarios/perf/sql/102/pmmj.2tbls.1.sql b/utils/scenarios/perf/sql/102/pmmj.2tbls.1.sql deleted file mode 100644 index 656a71b14..000000000 --- a/utils/scenarios/perf/sql/102/pmmj.2tbls.1.sql +++ /dev/null @@ -1,4 +0,0 @@ -select p_partkey from part, lineitem -where p_partkey = l_partkey and -p_partkey between 0 and 48500 and p_size between 0 and 1 and -l_partkey between 0 and 200000000 and l_shipdate between '1992-01-01' and '1992-04-09'; diff --git a/utils/scenarios/perf/sql/102/pmmj.2tbls.2.sql b/utils/scenarios/perf/sql/102/pmmj.2tbls.2.sql deleted file mode 100644 index 7dc8d180a..000000000 --- a/utils/scenarios/perf/sql/102/pmmj.2tbls.2.sql +++ /dev/null @@ -1,4 +0,0 @@ -select p_partkey from part, lineitem -where p_partkey = l_partkey and -p_partkey between 0 and 502000 and p_size between 0 and 1 and -l_partkey between 450000 and 200000000 and l_shipdate between '1992-01-01' and '1992-04-09'; diff --git a/utils/scenarios/perf/sql/102/pmmj.3tbls.1.sql b/utils/scenarios/perf/sql/102/pmmj.3tbls.1.sql deleted file mode 100644 index 41685d522..000000000 --- a/utils/scenarios/perf/sql/102/pmmj.3tbls.1.sql +++ /dev/null @@ -1,4 +0,0 @@ -select p_partkey, s_suppkey from part, lineitem, supplier -where p_partkey = l_partkey and l_suppkey = s_suppkey and -p_partkey between 0 and 48500 and p_size between 0 and 1 and -l_partkey between 0 and 200000000 and l_shipdate between '1992-01-01' and '1992-04-09'; diff --git a/utils/scenarios/perf/sql/102/pmmj.3tbls.2.sql b/utils/scenarios/perf/sql/102/pmmj.3tbls.2.sql deleted file mode 100644 index 05b9db052..000000000 --- a/utils/scenarios/perf/sql/102/pmmj.3tbls.2.sql +++ /dev/null @@ -1,4 +0,0 @@ -select p_partkey, s_suppkey from part, lineitem, supplier -where p_partkey = l_partkey and l_suppkey = s_suppkey and -p_partkey between 0 and 502000 and p_size between 0 and 1 and -l_partkey between 450000 and 200000000 and l_shipdate between '1992-01-01' and '1992-04-09'; diff --git a/utils/scenarios/perf/sql/103/aggr.1tbl.avg.sql b/utils/scenarios/perf/sql/103/aggr.1tbl.avg.sql deleted file mode 100644 index 86e71027d..000000000 --- a/utils/scenarios/perf/sql/103/aggr.1tbl.avg.sql +++ /dev/null @@ -1 +0,0 @@ -Select avg(o_totalprice) from orders where o_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/103/aggr.1tbl.count.num.col.sql b/utils/scenarios/perf/sql/103/aggr.1tbl.count.num.col.sql deleted file mode 100644 index b092c4b9d..000000000 --- a/utils/scenarios/perf/sql/103/aggr.1tbl.count.num.col.sql +++ /dev/null @@ -1 +0,0 @@ -Select count(l_orderkey) from lineitem where l_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/103/aggr.1tbl.count.str.col.sql b/utils/scenarios/perf/sql/103/aggr.1tbl.count.str.col.sql deleted file mode 100644 index 4fe9429b1..000000000 --- a/utils/scenarios/perf/sql/103/aggr.1tbl.count.str.col.sql +++ /dev/null @@ -1 +0,0 @@ -Select count(l_linestatus) from lineitem where l_orderkey < 1000000; \ No newline at end of file diff --git a/utils/scenarios/perf/sql/103/aggr.1tbl.count.table.sql b/utils/scenarios/perf/sql/103/aggr.1tbl.count.table.sql deleted file mode 100644 index c8a040dab..000000000 --- a/utils/scenarios/perf/sql/103/aggr.1tbl.count.table.sql +++ /dev/null @@ -1 +0,0 @@ -Select count(*) from lineitem where l_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/103/aggr.1tbl.max.num.sql b/utils/scenarios/perf/sql/103/aggr.1tbl.max.num.sql deleted file mode 100644 index 09cc7a021..000000000 --- a/utils/scenarios/perf/sql/103/aggr.1tbl.max.num.sql +++ /dev/null @@ -1 +0,0 @@ -Select max(l_shipdate) from lineitem where l_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/103/aggr.1tbl.max.str.sql b/utils/scenarios/perf/sql/103/aggr.1tbl.max.str.sql deleted file mode 100644 index b72398671..000000000 --- a/utils/scenarios/perf/sql/103/aggr.1tbl.max.str.sql +++ /dev/null @@ -1 +0,0 @@ -Select max(o_orderstatus) from orders where o_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/103/aggr.1tbl.min.num.sql b/utils/scenarios/perf/sql/103/aggr.1tbl.min.num.sql deleted file mode 100644 index a4ab2c03d..000000000 --- a/utils/scenarios/perf/sql/103/aggr.1tbl.min.num.sql +++ /dev/null @@ -1 +0,0 @@ -Select min(l_shipdate) from lineitem where l_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/103/aggr.1tbl.min.str.sql b/utils/scenarios/perf/sql/103/aggr.1tbl.min.str.sql deleted file mode 100644 index 7c9872529..000000000 --- a/utils/scenarios/perf/sql/103/aggr.1tbl.min.str.sql +++ /dev/null @@ -1 +0,0 @@ -Select min(o_orderstatus) from orders where o_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/103/aggr.1tbl.sum.sql b/utils/scenarios/perf/sql/103/aggr.1tbl.sum.sql deleted file mode 100644 index 94d5ec471..000000000 --- a/utils/scenarios/perf/sql/103/aggr.1tbl.sum.sql +++ /dev/null @@ -1 +0,0 @@ -Select sum(o_totalprice) from orders where o_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/103/aggr.2tbls.avg.sql b/utils/scenarios/perf/sql/103/aggr.2tbls.avg.sql deleted file mode 100644 index c1ce957f3..000000000 --- a/utils/scenarios/perf/sql/103/aggr.2tbls.avg.sql +++ /dev/null @@ -1 +0,0 @@ -Select avg(o_totalprice), avg(l_extendedprice) from orders, lineitem where o_orderkey = l_orderkey and o_orderkey < 1000000 and l_partkey < 1000000; diff --git a/utils/scenarios/perf/sql/103/aggr.2tbls.count.num.col.1.sql b/utils/scenarios/perf/sql/103/aggr.2tbls.count.num.col.1.sql deleted file mode 100644 index ce2020dab..000000000 --- a/utils/scenarios/perf/sql/103/aggr.2tbls.count.num.col.1.sql +++ /dev/null @@ -1,2 +0,0 @@ -Select count(l_quantity), count(p_size) from lineitem, part where l_partkey = p_partkey and l_orderkey < 1000000 and p_partkey < 1000000; - diff --git a/utils/scenarios/perf/sql/103/aggr.2tbls.count.num.col.2.sql b/utils/scenarios/perf/sql/103/aggr.2tbls.count.num.col.2.sql deleted file mode 100644 index 4062b3073..000000000 --- a/utils/scenarios/perf/sql/103/aggr.2tbls.count.num.col.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select count(l_extendedprice), count(o_totalprice) from lineitem, orders where o_orderkey = l_orderkey and l_orderkey < 1000000 and o_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/103/aggr.2tbls.count.str.col.sql b/utils/scenarios/perf/sql/103/aggr.2tbls.count.str.col.sql deleted file mode 100644 index cd0fe649b..000000000 --- a/utils/scenarios/perf/sql/103/aggr.2tbls.count.str.col.sql +++ /dev/null @@ -1 +0,0 @@ -Select count(l_linestatus), count(o_orderstatus) from lineitem, orders where o_orderkey = l_orderkey and l_orderkey < 1000000 and o_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/103/aggr.2tbls.count.table.sql b/utils/scenarios/perf/sql/103/aggr.2tbls.count.table.sql deleted file mode 100644 index c8a040dab..000000000 --- a/utils/scenarios/perf/sql/103/aggr.2tbls.count.table.sql +++ /dev/null @@ -1 +0,0 @@ -Select count(*) from lineitem where l_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/103/aggr.2tbls.max.num.sql b/utils/scenarios/perf/sql/103/aggr.2tbls.max.num.sql deleted file mode 100644 index 688e3c7b4..000000000 --- a/utils/scenarios/perf/sql/103/aggr.2tbls.max.num.sql +++ /dev/null @@ -1 +0,0 @@ -Select max(l_shipdate), max(o_totalprice) from lineitem, orders where o_orderkey = l_orderkey and o_orderkey < 1000000 and l_partkey < 1000000; diff --git a/utils/scenarios/perf/sql/103/aggr.2tbls.max.str.sql b/utils/scenarios/perf/sql/103/aggr.2tbls.max.str.sql deleted file mode 100644 index 82b24f2af..000000000 --- a/utils/scenarios/perf/sql/103/aggr.2tbls.max.str.sql +++ /dev/null @@ -1 +0,0 @@ -Select max(o_orderstatus), max(l_shipmode) from orders, lineitem where o_orderkey = l_orderkey and o_orderkey < 1000000 and l_partkey < 1000000; diff --git a/utils/scenarios/perf/sql/103/aggr.2tbls.min.num.sql b/utils/scenarios/perf/sql/103/aggr.2tbls.min.num.sql deleted file mode 100644 index d56a2d48b..000000000 --- a/utils/scenarios/perf/sql/103/aggr.2tbls.min.num.sql +++ /dev/null @@ -1,2 +0,0 @@ -Select min(l_shipdate), min(o_totalprice) from lineitem, orders where o_orderkey = l_orderkey and o_orderkey < 1000000 and l_partkey < 1000000; - diff --git a/utils/scenarios/perf/sql/103/aggr.2tbls.min.str.sql b/utils/scenarios/perf/sql/103/aggr.2tbls.min.str.sql deleted file mode 100644 index ce63a4b26..000000000 --- a/utils/scenarios/perf/sql/103/aggr.2tbls.min.str.sql +++ /dev/null @@ -1,2 +0,0 @@ -Select min(o_orderstatus), min(l_shipmode) from orders, lineitem where o_orderkey = l_orderkey and o_orderkey < 1000000 and l_partkey < 1000000; - diff --git a/utils/scenarios/perf/sql/103/aggr.2tbls.sum.sql b/utils/scenarios/perf/sql/103/aggr.2tbls.sum.sql deleted file mode 100644 index 7281560a3..000000000 --- a/utils/scenarios/perf/sql/103/aggr.2tbls.sum.sql +++ /dev/null @@ -1 +0,0 @@ -Select sum(o_totalprice), sum(l_extendedprice) from orders, lineitem where o_orderkey < 1000000 and l_partkey < 1000000 and o_orderkey = l_orderkey; diff --git a/utils/scenarios/perf/sql/104/aj.pmmj.1.sql b/utils/scenarios/perf/sql/104/aj.pmmj.1.sql deleted file mode 100644 index 27d3358d4..000000000 --- a/utils/scenarios/perf/sql/104/aj.pmmj.1.sql +++ /dev/null @@ -1,4 +0,0 @@ -Select min(p_partkey), max(s_suppkey) from part, lineitem, supplier -where p_partkey = l_partkey and l_suppkey = s_suppkey and -p_partkey between 0 and 48500 and p_size between 0 and 1 and -l_partkey between 0 and 200000000 and l_shipdate between '1992-01-01' and '1992-04-09'; diff --git a/utils/scenarios/perf/sql/104/aj.pmmj.2.sql b/utils/scenarios/perf/sql/104/aj.pmmj.2.sql deleted file mode 100644 index 648c61d1d..000000000 --- a/utils/scenarios/perf/sql/104/aj.pmmj.2.sql +++ /dev/null @@ -1,4 +0,0 @@ -select max(p_partkey), min(s_suppkey) from part, lineitem, supplier -where p_partkey = l_partkey and l_suppkey = s_suppkey and -p_partkey between 0 and 502000 and p_size between 0 and 1 and -l_partkey between 450000 and 200000000 and l_shipdate between '1992-01-01' and '1992-04-09'; diff --git a/utils/scenarios/perf/sql/104/aj.ummj.1.sql b/utils/scenarios/perf/sql/104/aj.ummj.1.sql deleted file mode 100644 index 15ac9c157..000000000 --- a/utils/scenarios/perf/sql/104/aj.ummj.1.sql +++ /dev/null @@ -1,4 +0,0 @@ -select min(p_partkey), max(s_suppkey) from part, lineitem, supplier -where p_partkey = l_partkey and l_suppkey = s_suppkey and -p_partkey between 0 and 5020000 and p_size between 0 and 1 and -l_partkey between 4965000 and 200000000 and l_shipdate between '1992-01-01' and '1992-04-10'; diff --git a/utils/scenarios/perf/sql/104/aj.ummj.2.sql b/utils/scenarios/perf/sql/104/aj.ummj.2.sql deleted file mode 100644 index 10fc0bc08..000000000 --- a/utils/scenarios/perf/sql/104/aj.ummj.2.sql +++ /dev/null @@ -1,4 +0,0 @@ -select max(p_partkey), min(s_suppkey) from part, lineitem, supplier -where p_partkey = l_partkey and l_suppkey = s_suppkey and -p_partkey between 0 and 50020000 and p_size between 0 and 1 and -l_partkey between 49975000 and 200000000 and l_shipdate between '1992-01-01' and '1992-04-24'; diff --git a/utils/scenarios/perf/sql/105/ummj.2tbls.1.sql b/utils/scenarios/perf/sql/105/ummj.2tbls.1.sql deleted file mode 100644 index 2832c9612..000000000 --- a/utils/scenarios/perf/sql/105/ummj.2tbls.1.sql +++ /dev/null @@ -1,4 +0,0 @@ -select p_partkey from part, lineitem -where p_partkey = l_partkey and -p_partkey between 0 and 5020000 and p_size between 0 and 1 and -l_partkey between 4965000 and 200000000 and l_shipdate between '1992-01-01' and '1992-04-10'; diff --git a/utils/scenarios/perf/sql/105/ummj.2tbls.2.sql b/utils/scenarios/perf/sql/105/ummj.2tbls.2.sql deleted file mode 100644 index 5771c2586..000000000 --- a/utils/scenarios/perf/sql/105/ummj.2tbls.2.sql +++ /dev/null @@ -1,4 +0,0 @@ -select p_partkey from part, lineitem -where p_partkey = l_partkey and -p_partkey between 0 and 50020000 and p_size between 0 and 1 and -l_partkey between 49975000 and 200000000 and l_shipdate between '1992-01-01' and '1992-04-24'; diff --git a/utils/scenarios/perf/sql/105/ummj.3tbls.1.sql b/utils/scenarios/perf/sql/105/ummj.3tbls.1.sql deleted file mode 100644 index 91541ab3d..000000000 --- a/utils/scenarios/perf/sql/105/ummj.3tbls.1.sql +++ /dev/null @@ -1,5 +0,0 @@ -select p_partkey, s_suppkey from part, lineitem, supplier -where p_partkey = l_partkey and l_suppkey = s_suppkey and -p_partkey between 0 and 5020000 and p_size between 0 and 1 and -l_partkey between 4965000 and 200000000 and l_shipdate between '1992-01-01' and '1992-04-10'; - diff --git a/utils/scenarios/perf/sql/105/ummj.3tbls.2.sql b/utils/scenarios/perf/sql/105/ummj.3tbls.2.sql deleted file mode 100644 index 043d42973..000000000 --- a/utils/scenarios/perf/sql/105/ummj.3tbls.2.sql +++ /dev/null @@ -1,5 +0,0 @@ -select p_partkey, s_suppkey from part, lineitem, supplier -where p_partkey = l_partkey and l_suppkey = s_suppkey and -p_partkey between 0 and 50020000 and p_size between 0 and 1 and -l_partkey between 49975000 and 200000000 and l_shipdate between '1992-01-01' and '1992-04-24'; - diff --git a/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.1.sql b/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.1.sql deleted file mode 100644 index 582fd23bf..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_quantity, count(*) from lineitem where l_orderkey < 1000000 group by l_quantity; diff --git a/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.2.sql b/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.2.sql deleted file mode 100644 index 321b66697..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_shipmode, max(l_shipdate), min(l_shipdate) from lineitem where l_orderkey < 1000000 group by l_shipmode; diff --git a/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.orderby.num.1.sql b/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.orderby.num.1.sql deleted file mode 100644 index 503faa7d7..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.orderby.num.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_quantity, count(*) from lineitem where l_orderkey < 1000000 group by l_quantity order by l_quantity; diff --git a/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.orderby.num.2.sql b/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.orderby.num.2.sql deleted file mode 100644 index 95485cbf1..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.orderby.num.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_shipmode, max(l_shipdate), min(l_shipdate) from lineitem where l_orderkey < 1000000 group by l_shipmode order by 1; diff --git a/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.orderby.num.str.sql b/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.orderby.num.str.sql deleted file mode 100644 index 5b547984e..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.orderby.num.str.sql +++ /dev/null @@ -1,17 +0,0 @@ -SELECT -L_RETURNFLAG, -L_LINESTATUS, -SUM(L_QUANTITY) AS SUM_QTY, -SUM(L_EXTENDEDPRICE) AS SUM_BASE_PRICE, -AVG(L_QUANTITY) AS AVG_QTY, -AVG(L_EXTENDEDPRICE) AS AVG_PRICE, -AVG(L_DISCOUNT) AS AVG_DISC, -COUNT(*) AS COUNT_ORDER -FROM -LINEITEM -WHERE -L_SHIPDATE <= '1998-09-26' -GROUP BY -L_RETURNFLAG, -L_LINESTATUS -ORDER BY L_RETURNFLAG, L_LINESTATUS; diff --git a/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.orderby.str.sql b/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.orderby.str.sql deleted file mode 100644 index 80a788e34..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.1tbl.groupby.orderby.str.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_orderpriority, min(o_orderstatus), max(o_orderstatus) from orders where o_orderkey < 1000000 group by o_orderpriority order by o_orderpriority; diff --git a/utils/scenarios/perf/sql/106/dmpp.1tbl.orderby.1.sql b/utils/scenarios/perf/sql/106/dmpp.1tbl.orderby.1.sql deleted file mode 100644 index 5c3b00a46..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.1tbl.orderby.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_quantity from lineitem where l_orderkey < 1000000 order by l_quantity; diff --git a/utils/scenarios/perf/sql/106/dmpp.1tbl.orderby.2.sql b/utils/scenarios/perf/sql/106/dmpp.1tbl.orderby.2.sql deleted file mode 100644 index f5422cc59..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.1tbl.orderby.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_shipmode from lineitem where l_orderkey < 1000000 order by 1; diff --git a/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.1.sql b/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.1.sql deleted file mode 100644 index 0743ad637..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select count(l_quantity), count(p_size) from lineitem, part where l_partkey = p_partkey and l_orderkey < 1000000 group by l_quantity; diff --git a/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.2.sql b/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.2.sql deleted file mode 100644 index 5735ecfab..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.2.sql +++ /dev/null @@ -1,2 +0,0 @@ -Select l_shipmode, max(l_shipdate), min(l_shipdate), max(o_orderdate), min(o_orderdate) from lineitem, orders where o_orderkey = l_orderkey and l_orderkey < 1000000 group by l_shipmode; - diff --git a/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.num.1.sql b/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.num.1.sql deleted file mode 100644 index 6605fdc78..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.num.1.sql +++ /dev/null @@ -1,2 +0,0 @@ -Select l_quantity, max(p_retailprice) from lineitem, part where l_orderkey < 1000000 and p_partkey = l_orderkey group by l_quantity order by l_quantity; - diff --git a/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.num.1.sql.orig b/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.num.1.sql.orig deleted file mode 100644 index 53e4da7e9..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.num.1.sql.orig +++ /dev/null @@ -1,2 +0,0 @@ -Select l_quantity, max(p_retailprice) from lineitem, part where l_orderkey < 1000000 and l_orderkey = p_partkey group by l_quantity order by l_quantity; - diff --git a/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.num.2.sql b/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.num.2.sql deleted file mode 100644 index e533e44d9..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.num.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_shipmode, max(l_shipdate), min(l_shipdate), max(p_size) from lineitem, part where l_orderkey < 1000000 and p_partkey = l_orderkey group by l_shipmode order by 1; diff --git a/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.num.2.sql.orig b/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.num.2.sql.orig deleted file mode 100644 index b25613a24..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.num.2.sql.orig +++ /dev/null @@ -1 +0,0 @@ -Select l_shipmode, max(l_shipdate), min(l_shipdate), max(p_size) from lineitem, part where l_orderkey < 1000000 and l_orderkey = p_partkey group by l_shipmode order by 1; diff --git a/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.str.sql b/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.str.sql deleted file mode 100644 index 43eccff64..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.2tbls.groupby.orderby.str.sql +++ /dev/null @@ -1,7 +0,0 @@ -select o_orderpriority, min(o_orderstatus), max(o_orderstatus), count(l_orderkey) -from orders, lineitem -where o_orderkey < 1000000 -and o_orderkey = l_orderkey -group by o_orderpriority -order by o_orderpriority; - diff --git a/utils/scenarios/perf/sql/106/dmpp.2tbls.orderby.1.sql b/utils/scenarios/perf/sql/106/dmpp.2tbls.orderby.1.sql deleted file mode 100644 index ec22063db..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.2tbls.orderby.1.sql +++ /dev/null @@ -1,2 +0,0 @@ -Select l_quantity, p_size from lineitem, part where p_partkey = l_partkey and l_orderkey < 1000000 order by l_quantity; - diff --git a/utils/scenarios/perf/sql/106/dmpp.2tbls.orderby.2.sql b/utils/scenarios/perf/sql/106/dmpp.2tbls.orderby.2.sql deleted file mode 100644 index 70e019c86..000000000 --- a/utils/scenarios/perf/sql/106/dmpp.2tbls.orderby.2.sql +++ /dev/null @@ -1,2 +0,0 @@ -Select l_shipmode, o_shippriority from lineitem, orders where o_orderkey = l_orderkey and l_orderkey < 1000000 order by 1; - diff --git a/utils/scenarios/perf/sql/107/q107.1.sql b/utils/scenarios/perf/sql/107/q107.1.sql deleted file mode 100644 index 5890ada01..000000000 --- a/utils/scenarios/perf/sql/107/q107.1.sql +++ /dev/null @@ -1 +0,0 @@ -select count(*) from orders where o_orderdate > '1997-08-01' and o_totalprice < 1365 ; diff --git a/utils/scenarios/perf/sql/107/q107.10.sql b/utils/scenarios/perf/sql/107/q107.10.sql deleted file mode 100644 index 22a0b355a..000000000 --- a/utils/scenarios/perf/sql/107/q107.10.sql +++ /dev/null @@ -1,8 +0,0 @@ -select o_orderpriority, max(l_shipdate), avg(o_totalprice), count(*) -from orders, lineitem -where o_orderdate > '1997-08-01' and o_totalprice < 1365 - and o_orderkey = l_orderkey -and l_shipdate > '1997-08-01' and l_suppkey < 6000000 -group by o_orderpriority -order by o_orderpriority; - diff --git a/utils/scenarios/perf/sql/107/q107.11.sql b/utils/scenarios/perf/sql/107/q107.11.sql deleted file mode 100644 index 071673f43..000000000 --- a/utils/scenarios/perf/sql/107/q107.11.sql +++ /dev/null @@ -1 +0,0 @@ -select count(*) '~800 Million Items' from lineitem where l_shipdate > '1997-08-01' and l_suppkey < 8000000; diff --git a/utils/scenarios/perf/sql/107/q107.12.sql b/utils/scenarios/perf/sql/107/q107.12.sql deleted file mode 100644 index 0259fe526..000000000 --- a/utils/scenarios/perf/sql/107/q107.12.sql +++ /dev/null @@ -1,8 +0,0 @@ -select o_orderpriority, max(l_shipdate), avg(o_totalprice), count(*) -from orders, lineitem -where o_orderdate > '1997-08-01' and o_totalprice < 1365 - and o_orderkey = l_orderkey -and l_shipdate > '1997-08-01' and l_suppkey < 8000000 -group by o_orderpriority -order by o_orderpriority; - diff --git a/utils/scenarios/perf/sql/107/q107.13.sql b/utils/scenarios/perf/sql/107/q107.13.sql deleted file mode 100644 index 0259fe526..000000000 --- a/utils/scenarios/perf/sql/107/q107.13.sql +++ /dev/null @@ -1,8 +0,0 @@ -select o_orderpriority, max(l_shipdate), avg(o_totalprice), count(*) -from orders, lineitem -where o_orderdate > '1997-08-01' and o_totalprice < 1365 - and o_orderkey = l_orderkey -and l_shipdate > '1997-08-01' and l_suppkey < 8000000 -group by o_orderpriority -order by o_orderpriority; - diff --git a/utils/scenarios/perf/sql/107/q107.14.sql b/utils/scenarios/perf/sql/107/q107.14.sql deleted file mode 100644 index 8b71b1d43..000000000 --- a/utils/scenarios/perf/sql/107/q107.14.sql +++ /dev/null @@ -1 +0,0 @@ - select count(*) '~1 Billion Items' from lineitem where l_shipdate > '1997-08-01' and l_suppkey < 10000000; diff --git a/utils/scenarios/perf/sql/107/q107.15.sql b/utils/scenarios/perf/sql/107/q107.15.sql deleted file mode 100644 index 33fa40c3b..000000000 --- a/utils/scenarios/perf/sql/107/q107.15.sql +++ /dev/null @@ -1,8 +0,0 @@ -select o_orderpriority, max(l_shipdate), avg(o_totalprice), count(*) -from orders, lineitem -where o_orderdate > '1997-08-01' and o_totalprice < 1365 - and o_orderkey = l_orderkey -and l_shipdate > '1997-08-01' and l_suppkey < 10000000 -group by o_orderpriority -order by o_orderpriority; - diff --git a/utils/scenarios/perf/sql/107/q107.16.sql b/utils/scenarios/perf/sql/107/q107.16.sql deleted file mode 100644 index 0bb4032de..000000000 --- a/utils/scenarios/perf/sql/107/q107.16.sql +++ /dev/null @@ -1,8 +0,0 @@ -select o_orderpriority, max(l_shipdate), avg(o_totalprice), count(*) -from orders, lineitem -where o_orderdate > '1997-08-01' and o_totalprice < 1365 - and o_orderkey = l_orderkey -and l_shipdate > '1997-08-01' and l_suppkey < 10000000 -group by o_orderpriority -order by o_orderpriority; -select calflushcache(); diff --git a/utils/scenarios/perf/sql/107/q107.2.sql b/utils/scenarios/perf/sql/107/q107.2.sql deleted file mode 100644 index 5b2cd7b56..000000000 --- a/utils/scenarios/perf/sql/107/q107.2.sql +++ /dev/null @@ -1 +0,0 @@ -select count(*) '~200 Million Items' from lineitem where l_shipdate > '1997-08-01' and l_suppkey < 2000000; diff --git a/utils/scenarios/perf/sql/107/q107.3.sql b/utils/scenarios/perf/sql/107/q107.3.sql deleted file mode 100644 index 709ae60f8..000000000 --- a/utils/scenarios/perf/sql/107/q107.3.sql +++ /dev/null @@ -1,8 +0,0 @@ -select o_orderpriority, max(l_shipdate), avg(o_totalprice), count(*) -from orders, lineitem -where o_orderdate > '1997-08-01' and o_totalprice < 1365 - and o_orderkey = l_orderkey -and l_shipdate > '1997-08-01' and l_suppkey < 2000000 -group by o_orderpriority -order by o_orderpriority; - diff --git a/utils/scenarios/perf/sql/107/q107.4.sql b/utils/scenarios/perf/sql/107/q107.4.sql deleted file mode 100644 index 709ae60f8..000000000 --- a/utils/scenarios/perf/sql/107/q107.4.sql +++ /dev/null @@ -1,8 +0,0 @@ -select o_orderpriority, max(l_shipdate), avg(o_totalprice), count(*) -from orders, lineitem -where o_orderdate > '1997-08-01' and o_totalprice < 1365 - and o_orderkey = l_orderkey -and l_shipdate > '1997-08-01' and l_suppkey < 2000000 -group by o_orderpriority -order by o_orderpriority; - diff --git a/utils/scenarios/perf/sql/107/q107.5.sql b/utils/scenarios/perf/sql/107/q107.5.sql deleted file mode 100644 index 6345d1e9b..000000000 --- a/utils/scenarios/perf/sql/107/q107.5.sql +++ /dev/null @@ -1 +0,0 @@ - select count(*) '~400 Million Items' from lineitem where l_shipdate > '1997-08-01' and l_suppkey < 4000000; diff --git a/utils/scenarios/perf/sql/107/q107.6.sql b/utils/scenarios/perf/sql/107/q107.6.sql deleted file mode 100644 index bb35fac93..000000000 --- a/utils/scenarios/perf/sql/107/q107.6.sql +++ /dev/null @@ -1,8 +0,0 @@ -select o_orderpriority, max(l_shipdate), avg(o_totalprice), count(*) -from orders, lineitem -where o_orderdate > '1997-08-01' and o_totalprice < 1365 - and o_orderkey = l_orderkey -and l_shipdate > '1997-08-01' and l_suppkey < 4000000 -group by o_orderpriority -order by o_orderpriority; - diff --git a/utils/scenarios/perf/sql/107/q107.7.sql b/utils/scenarios/perf/sql/107/q107.7.sql deleted file mode 100644 index bb35fac93..000000000 --- a/utils/scenarios/perf/sql/107/q107.7.sql +++ /dev/null @@ -1,8 +0,0 @@ -select o_orderpriority, max(l_shipdate), avg(o_totalprice), count(*) -from orders, lineitem -where o_orderdate > '1997-08-01' and o_totalprice < 1365 - and o_orderkey = l_orderkey -and l_shipdate > '1997-08-01' and l_suppkey < 4000000 -group by o_orderpriority -order by o_orderpriority; - diff --git a/utils/scenarios/perf/sql/107/q107.8.sql b/utils/scenarios/perf/sql/107/q107.8.sql deleted file mode 100644 index 8ddbf4947..000000000 --- a/utils/scenarios/perf/sql/107/q107.8.sql +++ /dev/null @@ -1,2 +0,0 @@ - select count(*) '~600 Million Items' from lineitem where l_shipdate > '1997-08-01' and l_suppkey < 6000000; - diff --git a/utils/scenarios/perf/sql/107/q107.9.sql b/utils/scenarios/perf/sql/107/q107.9.sql deleted file mode 100644 index 22a0b355a..000000000 --- a/utils/scenarios/perf/sql/107/q107.9.sql +++ /dev/null @@ -1,8 +0,0 @@ -select o_orderpriority, max(l_shipdate), avg(o_totalprice), count(*) -from orders, lineitem -where o_orderdate > '1997-08-01' and o_totalprice < 1365 - and o_orderkey = l_orderkey -and l_shipdate > '1997-08-01' and l_suppkey < 6000000 -group by o_orderpriority -order by o_orderpriority; - diff --git a/utils/scenarios/perf/sql/108/q108.2.sql b/utils/scenarios/perf/sql/108/q108.2.sql deleted file mode 100644 index 00a0560d4..000000000 --- a/utils/scenarios/perf/sql/108/q108.2.sql +++ /dev/null @@ -1,6 +0,0 @@ -SELECT avg(L_EXTENDEDPRICE), avg(L_DISCOUNT) -FROM lineitem -WHERE L_SHIPDATE between '1997-01-01' and '1997-12-31' and - L_DISCOUNT BETWEEN 0.02 AND 0.04 AND - L_QUANTITY < 25; - diff --git a/utils/scenarios/perf/sql/108/q108.3.sql b/utils/scenarios/perf/sql/108/q108.3.sql deleted file mode 100644 index 00a0560d4..000000000 --- a/utils/scenarios/perf/sql/108/q108.3.sql +++ /dev/null @@ -1,6 +0,0 @@ -SELECT avg(L_EXTENDEDPRICE), avg(L_DISCOUNT) -FROM lineitem -WHERE L_SHIPDATE between '1997-01-01' and '1997-12-31' and - L_DISCOUNT BETWEEN 0.02 AND 0.04 AND - L_QUANTITY < 25; - diff --git a/utils/scenarios/perf/sql/109/q109.1.sql b/utils/scenarios/perf/sql/109/q109.1.sql deleted file mode 100644 index e1eb4404e..000000000 --- a/utils/scenarios/perf/sql/109/q109.1.sql +++ /dev/null @@ -1,2 +0,0 @@ -select calflushcache(); - select count(*) from part where p_size = 50 and p_retailprice < 1250; diff --git a/utils/scenarios/perf/sql/109/q109.10.sql b/utils/scenarios/perf/sql/109/q109.10.sql deleted file mode 100644 index f3e9600e2..000000000 --- a/utils/scenarios/perf/sql/109/q109.10.sql +++ /dev/null @@ -1,8 +0,0 @@ -select p_mfgr, count(*), avg(ps_availqty), avg(p_retailprice), avg(ps_supplycost) -from part, partsupp -where p_size = 50 and p_retailprice < 1250 - and ps_partkey = p_partkey -and ps_suppkey <= 7500000 -group by p_mfgr -order by p_mfgr; - diff --git a/utils/scenarios/perf/sql/109/q109.11.sql b/utils/scenarios/perf/sql/109/q109.11.sql deleted file mode 100644 index 0956b28de..000000000 --- a/utils/scenarios/perf/sql/109/q109.11.sql +++ /dev/null @@ -1,3 +0,0 @@ -select calflushcache(); - select count(*) '~800 Million Parts' from partsupp where ps_suppkey <= 10000000; - diff --git a/utils/scenarios/perf/sql/109/q109.12.sql b/utils/scenarios/perf/sql/109/q109.12.sql deleted file mode 100644 index 0fe882368..000000000 --- a/utils/scenarios/perf/sql/109/q109.12.sql +++ /dev/null @@ -1,8 +0,0 @@ -select p_mfgr, count(*), avg(ps_availqty), avg(p_retailprice), avg(ps_supplycost) -from part, partsupp -where p_size = 50 and p_retailprice < 1250 - and ps_partkey = p_partkey -and ps_suppkey <= 10000000 -group by p_mfgr -order by p_mfgr; - diff --git a/utils/scenarios/perf/sql/109/q109.13.sql b/utils/scenarios/perf/sql/109/q109.13.sql deleted file mode 100644 index 1f78b15ce..000000000 --- a/utils/scenarios/perf/sql/109/q109.13.sql +++ /dev/null @@ -1,8 +0,0 @@ - select p_mfgr, count(*), avg(ps_availqty), avg(p_retailprice), avg(ps_supplycost) -from part, partsupp -where p_size = 50 and p_retailprice < 1250 - and ps_partkey = p_partkey -and ps_suppkey <= 10000000 -group by p_mfgr -order by p_mfgr; - diff --git a/utils/scenarios/perf/sql/109/q109.2.sql b/utils/scenarios/perf/sql/109/q109.2.sql deleted file mode 100644 index 781284ac0..000000000 --- a/utils/scenarios/perf/sql/109/q109.2.sql +++ /dev/null @@ -1,3 +0,0 @@ -select calflushcache(); -select count(*) '~200 Million Parts' from partsupp where ps_suppkey <= 2500000; - diff --git a/utils/scenarios/perf/sql/109/q109.3.sql b/utils/scenarios/perf/sql/109/q109.3.sql deleted file mode 100644 index d61e110fb..000000000 --- a/utils/scenarios/perf/sql/109/q109.3.sql +++ /dev/null @@ -1,8 +0,0 @@ -select p_mfgr, count(*), avg(ps_availqty), avg(p_retailprice), avg(ps_supplycost) -from part, partsupp -where p_size = 50 and p_retailprice < 1250 - and ps_partkey = p_partkey -and ps_suppkey <= 2500000 -group by p_mfgr -order by p_mfgr; - diff --git a/utils/scenarios/perf/sql/109/q109.4.sql b/utils/scenarios/perf/sql/109/q109.4.sql deleted file mode 100644 index d61e110fb..000000000 --- a/utils/scenarios/perf/sql/109/q109.4.sql +++ /dev/null @@ -1,8 +0,0 @@ -select p_mfgr, count(*), avg(ps_availqty), avg(p_retailprice), avg(ps_supplycost) -from part, partsupp -where p_size = 50 and p_retailprice < 1250 - and ps_partkey = p_partkey -and ps_suppkey <= 2500000 -group by p_mfgr -order by p_mfgr; - diff --git a/utils/scenarios/perf/sql/109/q109.5.sql b/utils/scenarios/perf/sql/109/q109.5.sql deleted file mode 100644 index da456cc7e..000000000 --- a/utils/scenarios/perf/sql/109/q109.5.sql +++ /dev/null @@ -1,3 +0,0 @@ -select calflushcache(); - select count(*) '~400 Million Parts' from partsupp where ps_suppkey <= 5000000; - diff --git a/utils/scenarios/perf/sql/109/q109.6.sql b/utils/scenarios/perf/sql/109/q109.6.sql deleted file mode 100644 index f279448ca..000000000 --- a/utils/scenarios/perf/sql/109/q109.6.sql +++ /dev/null @@ -1,8 +0,0 @@ -select p_mfgr, count(*), avg(ps_availqty), avg(p_retailprice), avg(ps_supplycost) -from part, partsupp -where p_size = 50 and p_retailprice < 1250 - and ps_partkey = p_partkey -and ps_suppkey <= 5000000 -group by p_mfgr -order by p_mfgr; - diff --git a/utils/scenarios/perf/sql/109/q109.7.sql b/utils/scenarios/perf/sql/109/q109.7.sql deleted file mode 100644 index f279448ca..000000000 --- a/utils/scenarios/perf/sql/109/q109.7.sql +++ /dev/null @@ -1,8 +0,0 @@ -select p_mfgr, count(*), avg(ps_availqty), avg(p_retailprice), avg(ps_supplycost) -from part, partsupp -where p_size = 50 and p_retailprice < 1250 - and ps_partkey = p_partkey -and ps_suppkey <= 5000000 -group by p_mfgr -order by p_mfgr; - diff --git a/utils/scenarios/perf/sql/109/q109.8.sql b/utils/scenarios/perf/sql/109/q109.8.sql deleted file mode 100644 index 5b5fb3872..000000000 --- a/utils/scenarios/perf/sql/109/q109.8.sql +++ /dev/null @@ -1,3 +0,0 @@ -select calflushcache(); - select count(*) '~600 Million Parts' from partsupp where ps_suppkey <= 7500000; - diff --git a/utils/scenarios/perf/sql/109/q109.9.sql b/utils/scenarios/perf/sql/109/q109.9.sql deleted file mode 100644 index f3e9600e2..000000000 --- a/utils/scenarios/perf/sql/109/q109.9.sql +++ /dev/null @@ -1,8 +0,0 @@ -select p_mfgr, count(*), avg(ps_availqty), avg(p_retailprice), avg(ps_supplycost) -from part, partsupp -where p_size = 50 and p_retailprice < 1250 - and ps_partkey = p_partkey -and ps_suppkey <= 7500000 -group by p_mfgr -order by p_mfgr; - diff --git a/utils/scenarios/perf/sql/110/q110.1.sql b/utils/scenarios/perf/sql/110/q110.1.sql deleted file mode 100644 index a034947bd..000000000 --- a/utils/scenarios/perf/sql/110/q110.1.sql +++ /dev/null @@ -1,2 +0,0 @@ -select calflushcache(); -select count(*) 'Small Side Join Count' from customer where c_acctbal > 9963 and c_nationkey < 5; diff --git a/utils/scenarios/perf/sql/110/q110.10.sql b/utils/scenarios/perf/sql/110/q110.10.sql deleted file mode 100644 index 41ee5338d..000000000 --- a/utils/scenarios/perf/sql/110/q110.10.sql +++ /dev/null @@ -1,9 +0,0 @@ - -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1993-04-26' -group by c_nationkey -order by 1; - diff --git a/utils/scenarios/perf/sql/110/q110.11.sql b/utils/scenarios/perf/sql/110/q110.11.sql deleted file mode 100644 index 990272fe5..000000000 --- a/utils/scenarios/perf/sql/110/q110.11.sql +++ /dev/null @@ -1,3 +0,0 @@ -select calflushcache(); - select count(*) '400 Million Orders' from orders where o_orderdate <= '1993-10-03'; - diff --git a/utils/scenarios/perf/sql/110/q110.12.sql b/utils/scenarios/perf/sql/110/q110.12.sql deleted file mode 100644 index 635e1b99e..000000000 --- a/utils/scenarios/perf/sql/110/q110.12.sql +++ /dev/null @@ -1,8 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1993-10-03' -group by c_nationkey -order by 1; - diff --git a/utils/scenarios/perf/sql/110/q110.13.sql b/utils/scenarios/perf/sql/110/q110.13.sql deleted file mode 100644 index 635e1b99e..000000000 --- a/utils/scenarios/perf/sql/110/q110.13.sql +++ /dev/null @@ -1,8 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1993-10-03' -group by c_nationkey -order by 1; - diff --git a/utils/scenarios/perf/sql/110/q110.14.sql b/utils/scenarios/perf/sql/110/q110.14.sql deleted file mode 100644 index 25c03524f..000000000 --- a/utils/scenarios/perf/sql/110/q110.14.sql +++ /dev/null @@ -1,3 +0,0 @@ -select calflushcache(); -select count(*) '500 Million Orders' from orders where o_orderdate <= '1994-03-13'; - diff --git a/utils/scenarios/perf/sql/110/q110.15.sql b/utils/scenarios/perf/sql/110/q110.15.sql deleted file mode 100644 index cc225cb32..000000000 --- a/utils/scenarios/perf/sql/110/q110.15.sql +++ /dev/null @@ -1,8 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1994-03-13' -group by c_nationkey -order by 1; - diff --git a/utils/scenarios/perf/sql/110/q110.16.sql b/utils/scenarios/perf/sql/110/q110.16.sql deleted file mode 100644 index cc225cb32..000000000 --- a/utils/scenarios/perf/sql/110/q110.16.sql +++ /dev/null @@ -1,8 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1994-03-13' -group by c_nationkey -order by 1; - diff --git a/utils/scenarios/perf/sql/110/q110.2.sql b/utils/scenarios/perf/sql/110/q110.2.sql deleted file mode 100644 index 94f54b1dc..000000000 --- a/utils/scenarios/perf/sql/110/q110.2.sql +++ /dev/null @@ -1,3 +0,0 @@ -select calflushcache(); - select count(*) '100 Million Orders' from orders where o_orderdate <= '1992-06-09'; - diff --git a/utils/scenarios/perf/sql/110/q110.3.sql b/utils/scenarios/perf/sql/110/q110.3.sql deleted file mode 100644 index 8b9afc872..000000000 --- a/utils/scenarios/perf/sql/110/q110.3.sql +++ /dev/null @@ -1,8 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1992-06-09' -group by c_nationkey -order by 1; - diff --git a/utils/scenarios/perf/sql/110/q110.4.sql b/utils/scenarios/perf/sql/110/q110.4.sql deleted file mode 100644 index 8b9afc872..000000000 --- a/utils/scenarios/perf/sql/110/q110.4.sql +++ /dev/null @@ -1,8 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1992-06-09' -group by c_nationkey -order by 1; - diff --git a/utils/scenarios/perf/sql/110/q110.5.sql b/utils/scenarios/perf/sql/110/q110.5.sql deleted file mode 100644 index 2111da683..000000000 --- a/utils/scenarios/perf/sql/110/q110.5.sql +++ /dev/null @@ -1,3 +0,0 @@ -select calflushcache(); - select count(*) '200 Million Orders' from orders where o_orderdate <= '1992-11-16'; - diff --git a/utils/scenarios/perf/sql/110/q110.6.sql b/utils/scenarios/perf/sql/110/q110.6.sql deleted file mode 100644 index 0ba892fb6..000000000 --- a/utils/scenarios/perf/sql/110/q110.6.sql +++ /dev/null @@ -1,8 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1992-11-16' -group by c_nationkey -order by 1; - diff --git a/utils/scenarios/perf/sql/110/q110.7.sql b/utils/scenarios/perf/sql/110/q110.7.sql deleted file mode 100644 index 0ba892fb6..000000000 --- a/utils/scenarios/perf/sql/110/q110.7.sql +++ /dev/null @@ -1,8 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1992-11-16' -group by c_nationkey -order by 1; - diff --git a/utils/scenarios/perf/sql/110/q110.8.sql b/utils/scenarios/perf/sql/110/q110.8.sql deleted file mode 100644 index 5e1495a43..000000000 --- a/utils/scenarios/perf/sql/110/q110.8.sql +++ /dev/null @@ -1,3 +0,0 @@ -select calflushcache(); -select count(*) '300 Million Orders' from orders where o_orderdate <= '1993-04-26'; - diff --git a/utils/scenarios/perf/sql/110/q110.9.sql b/utils/scenarios/perf/sql/110/q110.9.sql deleted file mode 100644 index 981d8fd18..000000000 --- a/utils/scenarios/perf/sql/110/q110.9.sql +++ /dev/null @@ -1,8 +0,0 @@ - select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1993-04-26' -group by c_nationkey -order by 1; - diff --git a/utils/scenarios/perf/sql/111/q111.1.sql b/utils/scenarios/perf/sql/111/q111.1.sql deleted file mode 100644 index 5eff5f28a..000000000 --- a/utils/scenarios/perf/sql/111/q111.1.sql +++ /dev/null @@ -1,2 +0,0 @@ -select calflushcache(); -select count(ps_suppkey) 'Count 400 Million 4 byte Ints: From 800 Million Rows' from partsupp where ps_suppkey <= 5000000; diff --git a/utils/scenarios/perf/sql/111/q111.10.sql b/utils/scenarios/perf/sql/111/q111.10.sql deleted file mode 100644 index 3cd0d028c..000000000 --- a/utils/scenarios/perf/sql/111/q111.10.sql +++ /dev/null @@ -1 +0,0 @@ -select count(o_orderkey) 'Count 750M 8 byte BigInts: From 1.5 Billion Rows' from orders where o_orderkey > 3000000000; diff --git a/utils/scenarios/perf/sql/111/q111.11.sql b/utils/scenarios/perf/sql/111/q111.11.sql deleted file mode 100644 index bd00e3d34..000000000 --- a/utils/scenarios/perf/sql/111/q111.11.sql +++ /dev/null @@ -1 +0,0 @@ -select count(o_custkey) 'Count 750M 4 byte Ints: From 1.5 Billion Rows' from orders where o_custkey <= 75000000; diff --git a/utils/scenarios/perf/sql/111/q111.12.sql b/utils/scenarios/perf/sql/111/q111.12.sql deleted file mode 100644 index bd00e3d34..000000000 --- a/utils/scenarios/perf/sql/111/q111.12.sql +++ /dev/null @@ -1 +0,0 @@ -select count(o_custkey) 'Count 750M 4 byte Ints: From 1.5 Billion Rows' from orders where o_custkey <= 75000000; diff --git a/utils/scenarios/perf/sql/111/q111.13.sql b/utils/scenarios/perf/sql/111/q111.13.sql deleted file mode 100644 index 833406bb4..000000000 --- a/utils/scenarios/perf/sql/111/q111.13.sql +++ /dev/null @@ -1,3 +0,0 @@ -select calflushcache(); -select o_orderstatus, count(*), sum(o_totalprice), avg(o_totalprice) from orders where o_orderkey > 3000000000 group by o_orderstatus order by o_orderstatus; - diff --git a/utils/scenarios/perf/sql/111/q111.14.sql b/utils/scenarios/perf/sql/111/q111.14.sql deleted file mode 100644 index b34405e6e..000000000 --- a/utils/scenarios/perf/sql/111/q111.14.sql +++ /dev/null @@ -1 +0,0 @@ -select o_orderstatus, count(*), sum(o_totalprice), avg(o_totalprice) from orders where o_orderkey > 3000000000 group by o_orderstatus order by o_orderstatus; diff --git a/utils/scenarios/perf/sql/111/q111.15.sql b/utils/scenarios/perf/sql/111/q111.15.sql deleted file mode 100644 index 0f4999f1b..000000000 --- a/utils/scenarios/perf/sql/111/q111.15.sql +++ /dev/null @@ -1 +0,0 @@ -select o_orderstatus, count(*), sum(o_totalprice), avg(o_totalprice) from orders where o_custkey <= 75000000 group by o_orderstatus order by o_orderstatus; diff --git a/utils/scenarios/perf/sql/111/q111.16.sql b/utils/scenarios/perf/sql/111/q111.16.sql deleted file mode 100644 index 0f4999f1b..000000000 --- a/utils/scenarios/perf/sql/111/q111.16.sql +++ /dev/null @@ -1 +0,0 @@ -select o_orderstatus, count(*), sum(o_totalprice), avg(o_totalprice) from orders where o_custkey <= 75000000 group by o_orderstatus order by o_orderstatus; diff --git a/utils/scenarios/perf/sql/111/q111.17.sql b/utils/scenarios/perf/sql/111/q111.17.sql deleted file mode 100644 index 07f010f90..000000000 --- a/utils/scenarios/perf/sql/111/q111.17.sql +++ /dev/null @@ -1 +0,0 @@ -select count(l_linestatus) 'Count 21 Billion Char(1)s: From ~42 Billion Rows' from lineitem where l_linestatus <> 'O'; diff --git a/utils/scenarios/perf/sql/111/q111.18.sql b/utils/scenarios/perf/sql/111/q111.18.sql deleted file mode 100644 index 07f010f90..000000000 --- a/utils/scenarios/perf/sql/111/q111.18.sql +++ /dev/null @@ -1 +0,0 @@ -select count(l_linestatus) 'Count 21 Billion Char(1)s: From ~42 Billion Rows' from lineitem where l_linestatus <> 'O'; diff --git a/utils/scenarios/perf/sql/111/q111.19.sql b/utils/scenarios/perf/sql/111/q111.19.sql deleted file mode 100644 index e0b06baa6..000000000 --- a/utils/scenarios/perf/sql/111/q111.19.sql +++ /dev/null @@ -1 +0,0 @@ -select count(l_suppkey) 'Count 21 Billion 4 byte Ints: From ~42 Billion Rows' from lineitem where l_suppkey > 5000000; diff --git a/utils/scenarios/perf/sql/111/q111.2.sql b/utils/scenarios/perf/sql/111/q111.2.sql deleted file mode 100644 index b2ddf4a60..000000000 --- a/utils/scenarios/perf/sql/111/q111.2.sql +++ /dev/null @@ -1 +0,0 @@ -select count(ps_suppkey) 'Count 400 Million 4 byte Ints: From 800 Million Rows' from partsupp where ps_suppkey <= 5000000; diff --git a/utils/scenarios/perf/sql/111/q111.20.sql b/utils/scenarios/perf/sql/111/q111.20.sql deleted file mode 100644 index e0b06baa6..000000000 --- a/utils/scenarios/perf/sql/111/q111.20.sql +++ /dev/null @@ -1 +0,0 @@ -select count(l_suppkey) 'Count 21 Billion 4 byte Ints: From ~42 Billion Rows' from lineitem where l_suppkey > 5000000; diff --git a/utils/scenarios/perf/sql/111/q111.21.sql b/utils/scenarios/perf/sql/111/q111.21.sql deleted file mode 100644 index 60d7e37ac..000000000 --- a/utils/scenarios/perf/sql/111/q111.21.sql +++ /dev/null @@ -1 +0,0 @@ -select l_linestatus, l_returnflag, count(*) from lineitem where l_linestatus <> 'O' group by l_linestatus, l_returnflag; diff --git a/utils/scenarios/perf/sql/111/q111.22.sql b/utils/scenarios/perf/sql/111/q111.22.sql deleted file mode 100644 index 60d7e37ac..000000000 --- a/utils/scenarios/perf/sql/111/q111.22.sql +++ /dev/null @@ -1 +0,0 @@ -select l_linestatus, l_returnflag, count(*) from lineitem where l_linestatus <> 'O' group by l_linestatus, l_returnflag; diff --git a/utils/scenarios/perf/sql/111/q111.3.sql b/utils/scenarios/perf/sql/111/q111.3.sql deleted file mode 100644 index fa9f47643..000000000 --- a/utils/scenarios/perf/sql/111/q111.3.sql +++ /dev/null @@ -1 +0,0 @@ -select count(ps_supplycost) 'Count 400 Million 8 byte Decimals: From 800 Million Rows' from partsupp where ps_supplycost <= 501; diff --git a/utils/scenarios/perf/sql/111/q111.4.sql b/utils/scenarios/perf/sql/111/q111.4.sql deleted file mode 100644 index fa9f47643..000000000 --- a/utils/scenarios/perf/sql/111/q111.4.sql +++ /dev/null @@ -1 +0,0 @@ -select count(ps_supplycost) 'Count 400 Million 8 byte Decimals: From 800 Million Rows' from partsupp where ps_supplycost <= 501; diff --git a/utils/scenarios/perf/sql/111/q111.5.sql b/utils/scenarios/perf/sql/111/q111.5.sql deleted file mode 100644 index 610170a65..000000000 --- a/utils/scenarios/perf/sql/111/q111.5.sql +++ /dev/null @@ -1 +0,0 @@ -select count(*), avg(ps_availqty), sum(ps_availqty), avg(ps_supplycost), sum(ps_supplycost) from partsupp where ps_suppkey <= 5000000; diff --git a/utils/scenarios/perf/sql/111/q111.6.sql b/utils/scenarios/perf/sql/111/q111.6.sql deleted file mode 100644 index 610170a65..000000000 --- a/utils/scenarios/perf/sql/111/q111.6.sql +++ /dev/null @@ -1 +0,0 @@ -select count(*), avg(ps_availqty), sum(ps_availqty), avg(ps_supplycost), sum(ps_supplycost) from partsupp where ps_suppkey <= 5000000; diff --git a/utils/scenarios/perf/sql/111/q111.7.sql b/utils/scenarios/perf/sql/111/q111.7.sql deleted file mode 100644 index e4eb86cc8..000000000 --- a/utils/scenarios/perf/sql/111/q111.7.sql +++ /dev/null @@ -1 +0,0 @@ -select count(*), avg(ps_availqty), sum(ps_availqty), avg(ps_supplycost), sum(ps_supplycost) from partsupp where ps_supplycost <= 501; diff --git a/utils/scenarios/perf/sql/111/q111.8.sql b/utils/scenarios/perf/sql/111/q111.8.sql deleted file mode 100644 index e4eb86cc8..000000000 --- a/utils/scenarios/perf/sql/111/q111.8.sql +++ /dev/null @@ -1 +0,0 @@ -select count(*), avg(ps_availqty), sum(ps_availqty), avg(ps_supplycost), sum(ps_supplycost) from partsupp where ps_supplycost <= 501; diff --git a/utils/scenarios/perf/sql/111/q111.9.sql b/utils/scenarios/perf/sql/111/q111.9.sql deleted file mode 100644 index 3cd0d028c..000000000 --- a/utils/scenarios/perf/sql/111/q111.9.sql +++ /dev/null @@ -1 +0,0 @@ -select count(o_orderkey) 'Count 750M 8 byte BigInts: From 1.5 Billion Rows' from orders where o_orderkey > 3000000000; diff --git a/utils/scenarios/perf/sql/112/q112.1.sql b/utils/scenarios/perf/sql/112/q112.1.sql deleted file mode 100644 index cfb17388c..000000000 --- a/utils/scenarios/perf/sql/112/q112.1.sql +++ /dev/null @@ -1,4 +0,0 @@ -select calflushcache(); -select l_discount, count(*) from lineitem where l_shipdate between '1994-12-01' and '1996-01-31' - group by 1 order by 1; - diff --git a/utils/scenarios/perf/sql/112/q112.2.sql b/utils/scenarios/perf/sql/112/q112.2.sql deleted file mode 100644 index df7992f5c..000000000 --- a/utils/scenarios/perf/sql/112/q112.2.sql +++ /dev/null @@ -1,3 +0,0 @@ -select l_discount, count(*) from lineitem where l_shipdate between '1994-12-01' and '1996-01-31' - group by 1 order by 1; - diff --git a/utils/scenarios/perf/sql/112/q112.3.sql b/utils/scenarios/perf/sql/112/q112.3.sql deleted file mode 100644 index 8924a39f7..000000000 --- a/utils/scenarios/perf/sql/112/q112.3.sql +++ /dev/null @@ -1,4 +0,0 @@ -select calflushcache(); -select l_returnflag,l_discount,l_linenumber, count(*) from lineitem where l_shipdate between '1994-12-01' and '1996-01-31' - group by 1,2,3 order by 1,2,3; - diff --git a/utils/scenarios/perf/sql/112/q112.4.sql b/utils/scenarios/perf/sql/112/q112.4.sql deleted file mode 100644 index 8eb0cd222..000000000 --- a/utils/scenarios/perf/sql/112/q112.4.sql +++ /dev/null @@ -1,3 +0,0 @@ -select l_returnflag,l_discount,l_linenumber, count(*) from lineitem where l_shipdate between '1994-12-01' and '1996-01-31' - group by 1,2,3 order by 1,2,3; - diff --git a/utils/scenarios/perf/sql/112/q112.5.sql b/utils/scenarios/perf/sql/112/q112.5.sql deleted file mode 100644 index 3540912d3..000000000 --- a/utils/scenarios/perf/sql/112/q112.5.sql +++ /dev/null @@ -1,4 +0,0 @@ -select calflushcache(); -select l_returnflag,l_discount,l_linenumber,l_linestatus,l_tax, count(*) from lineitem where l_shipdate between '1994-12-01' and '1996-01-31' - group by 1,2,3,4,5 order by 1,2,3,4,5; - diff --git a/utils/scenarios/perf/sql/112/q112.6.sql b/utils/scenarios/perf/sql/112/q112.6.sql deleted file mode 100644 index 0367dc17c..000000000 --- a/utils/scenarios/perf/sql/112/q112.6.sql +++ /dev/null @@ -1,3 +0,0 @@ -select l_returnflag,l_discount,l_linenumber,l_linestatus,l_tax, count(*) from lineitem where l_shipdate between '1994-12-01' and '1996-01-31' - group by 1,2,3,4,5 order by 1,2,3,4,5; - diff --git a/utils/scenarios/perf/sql/2/q4.2.sql b/utils/scenarios/perf/sql/2/q4.2.sql deleted file mode 100644 index 935a0eb28..000000000 --- a/utils/scenarios/perf/sql/2/q4.2.sql +++ /dev/null @@ -1 +0,0 @@ -select l_shipdate, l_partkey from lineitem where l_orderkey < 1000000 order by 1, 2; diff --git a/utils/scenarios/perf/sql/200/week_scenario_jt_1.1.sql b/utils/scenarios/perf/sql/200/week_scenario_jt_1.1.sql deleted file mode 100644 index 9f46d6525..000000000 --- a/utils/scenarios/perf/sql/200/week_scenario_jt_1.1.sql +++ /dev/null @@ -1,7 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1994-03-13' -group by c_nationkey -order by 1; diff --git a/utils/scenarios/perf/sql/200/week_scenario_jt_1.sql b/utils/scenarios/perf/sql/200/week_scenario_jt_1.sql deleted file mode 100644 index 9f46d6525..000000000 --- a/utils/scenarios/perf/sql/200/week_scenario_jt_1.sql +++ /dev/null @@ -1,7 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1994-03-13' -group by c_nationkey -order by 1; diff --git a/utils/scenarios/perf/sql/200/week_scenario_jt_2.1.sql b/utils/scenarios/perf/sql/200/week_scenario_jt_2.1.sql deleted file mode 100644 index fcfa78528..000000000 --- a/utils/scenarios/perf/sql/200/week_scenario_jt_2.1.sql +++ /dev/null @@ -1,9 +0,0 @@ -select '1992-01-01' + interval rand() * 365 day into @date_var; -select concat('Calpont: random date: ',@date_var); -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_shipdate = @date_var -group by 1,2,3; - diff --git a/utils/scenarios/perf/sql/200/week_scenario_jt_2.sql b/utils/scenarios/perf/sql/200/week_scenario_jt_2.sql deleted file mode 100644 index a24c5417e..000000000 --- a/utils/scenarios/perf/sql/200/week_scenario_jt_2.sql +++ /dev/null @@ -1,8 +0,0 @@ -select '1992-01-01' + interval rand() * 150 day into @date_var; -select concat('Calpont: random date: ',@date_var); -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_shipdate = @date_var -group by 1,2,3; diff --git a/utils/scenarios/perf/sql/200/week_scenario_jt_3.1.sql b/utils/scenarios/perf/sql/200/week_scenario_jt_3.1.sql deleted file mode 100644 index fed772125..000000000 --- a/utils/scenarios/perf/sql/200/week_scenario_jt_3.1.sql +++ /dev/null @@ -1,8 +0,0 @@ -select '1992-01-01' + interval rand() * 365 day into @date_var; -select concat('Calpont: random date: ',@date_var); -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_receiptdate between @date_var and @date_var + interval 5 day -group by 1,2,3; diff --git a/utils/scenarios/perf/sql/200/week_scenario_jt_3.sql b/utils/scenarios/perf/sql/200/week_scenario_jt_3.sql deleted file mode 100644 index b1f5d62ba..000000000 --- a/utils/scenarios/perf/sql/200/week_scenario_jt_3.sql +++ /dev/null @@ -1,8 +0,0 @@ -select '1992-01-01' + interval rand() * 150 day into @date_var; -select concat('Calpont: random date: ',@date_var); -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_receiptdate between @date_var and @date_var + interval 5 day -group by 1,2,3; diff --git a/utils/scenarios/perf/sql/200/week_scenario_jt_4.1.sql b/utils/scenarios/perf/sql/200/week_scenario_jt_4.1.sql deleted file mode 100644 index 35e31e38d..000000000 --- a/utils/scenarios/perf/sql/200/week_scenario_jt_4.1.sql +++ /dev/null @@ -1,8 +0,0 @@ -select '1992-01-01' + interval rand() * 365 day into @date_var; -select concat('Calpont: random date: ',@date_var); -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_commitdate between @date_var and @date_var + interval 15 day -group by 1,2,3; diff --git a/utils/scenarios/perf/sql/200/week_scenario_jt_4.sql b/utils/scenarios/perf/sql/200/week_scenario_jt_4.sql deleted file mode 100644 index f2b0f76ac..000000000 --- a/utils/scenarios/perf/sql/200/week_scenario_jt_4.sql +++ /dev/null @@ -1,8 +0,0 @@ -select '1992-01-01' + interval rand() * 150 day into @date_var; -select concat('Calpont: random date: ',@date_var); -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_commitdate between @date_var and @date_var + interval 15 day -group by 1,2,3; diff --git a/utils/scenarios/perf/sql/201/sleep15.sql b/utils/scenarios/perf/sql/201/sleep15.sql deleted file mode 100644 index aecc63a00..000000000 --- a/utils/scenarios/perf/sql/201/sleep15.sql +++ /dev/null @@ -1 +0,0 @@ -\! sleep 15 diff --git a/utils/scenarios/perf/sql/201/week_scenario_jt_5_1.1.sql b/utils/scenarios/perf/sql/201/week_scenario_jt_5_1.1.sql deleted file mode 100644 index 0e5325a03..000000000 --- a/utils/scenarios/perf/sql/201/week_scenario_jt_5_1.1.sql +++ /dev/null @@ -1,3 +0,0 @@ -select l_discount, count(*) from lineitem where l_shipdate between '1992-01-01' and '1996-06-30' - group by 1 order by 1; - diff --git a/utils/scenarios/perf/sql/201/week_scenario_jt_5_1.sql b/utils/scenarios/perf/sql/201/week_scenario_jt_5_1.sql deleted file mode 100644 index 0e5325a03..000000000 --- a/utils/scenarios/perf/sql/201/week_scenario_jt_5_1.sql +++ /dev/null @@ -1,3 +0,0 @@ -select l_discount, count(*) from lineitem where l_shipdate between '1992-01-01' and '1996-06-30' - group by 1 order by 1; - diff --git a/utils/scenarios/perf/sql/201/week_scenario_jt_5_2.1.sql b/utils/scenarios/perf/sql/201/week_scenario_jt_5_2.1.sql deleted file mode 100644 index f5ce48366..000000000 --- a/utils/scenarios/perf/sql/201/week_scenario_jt_5_2.1.sql +++ /dev/null @@ -1,2 +0,0 @@ -select l_returnflag,l_discount,l_linenumber, count(*) from lineitem where l_shipdate between '1992-01-01' and '1996-06-30' - group by 1,2,3 order by 1,2,3; diff --git a/utils/scenarios/perf/sql/201/week_scenario_jt_5_2.sql b/utils/scenarios/perf/sql/201/week_scenario_jt_5_2.sql deleted file mode 100644 index f5ce48366..000000000 --- a/utils/scenarios/perf/sql/201/week_scenario_jt_5_2.sql +++ /dev/null @@ -1,2 +0,0 @@ -select l_returnflag,l_discount,l_linenumber, count(*) from lineitem where l_shipdate between '1992-01-01' and '1996-06-30' - group by 1,2,3 order by 1,2,3; diff --git a/utils/scenarios/perf/sql/201/week_scenario_jt_5_3.1.sql b/utils/scenarios/perf/sql/201/week_scenario_jt_5_3.1.sql deleted file mode 100644 index 6f22f9744..000000000 --- a/utils/scenarios/perf/sql/201/week_scenario_jt_5_3.1.sql +++ /dev/null @@ -1,2 +0,0 @@ -select l_returnflag,l_discount,l_linenumber,l_linestatus,l_tax, count(*) from lineitem where l_shipdate between '1992-01-01' and '1996-06-30' - group by 1,2,3,4,5 order by 1,2,3,4,5; diff --git a/utils/scenarios/perf/sql/201/week_scenario_jt_5_3.sql b/utils/scenarios/perf/sql/201/week_scenario_jt_5_3.sql deleted file mode 100644 index 6f22f9744..000000000 --- a/utils/scenarios/perf/sql/201/week_scenario_jt_5_3.sql +++ /dev/null @@ -1,2 +0,0 @@ -select l_returnflag,l_discount,l_linenumber,l_linestatus,l_tax, count(*) from lineitem where l_shipdate between '1992-01-01' and '1996-06-30' - group by 1,2,3,4,5 order by 1,2,3,4,5; diff --git a/utils/scenarios/perf/sql/201/week_scenario_jt_6.1.sql b/utils/scenarios/perf/sql/201/week_scenario_jt_6.1.sql deleted file mode 100644 index 8fc273075..000000000 --- a/utils/scenarios/perf/sql/201/week_scenario_jt_6.1.sql +++ /dev/null @@ -1,17 +0,0 @@ -select r1.r_name Sales_Region, n1.n_name Sales_Nation, - r2.r_name Supplier_Region, n2.n_name Supplier_Nation, - count(*) -from region r1 - join nation n1 on (r1.r_regionkey = n1.n_regionkey) - join customer on (c_nationkey = n1.n_nationkey) - join orders on (c_custkey = o_custkey) - join lineitem on (l_orderkey = o_orderkey) - join supplier on l_suppkey = s_suppkey - join nation n2 on (s_nationkey = n2.n_nationkey) - join region r2 on (r2.r_regionkey = n2.n_regionkey) -where l_shipdate between '1992-01-02' and '1992-12-31' - and o_orderdate between '1992-01-02' and '1992-12-31' - and n1.n_nationkey = 4 - and n2.n_nationkey in (5,6,7,8) -group by 1,2,3,4 -order by 1,2,3,4; diff --git a/utils/scenarios/perf/sql/201/week_scenario_jt_6.1.sql.original b/utils/scenarios/perf/sql/201/week_scenario_jt_6.1.sql.original deleted file mode 100644 index 8fc273075..000000000 --- a/utils/scenarios/perf/sql/201/week_scenario_jt_6.1.sql.original +++ /dev/null @@ -1,17 +0,0 @@ -select r1.r_name Sales_Region, n1.n_name Sales_Nation, - r2.r_name Supplier_Region, n2.n_name Supplier_Nation, - count(*) -from region r1 - join nation n1 on (r1.r_regionkey = n1.n_regionkey) - join customer on (c_nationkey = n1.n_nationkey) - join orders on (c_custkey = o_custkey) - join lineitem on (l_orderkey = o_orderkey) - join supplier on l_suppkey = s_suppkey - join nation n2 on (s_nationkey = n2.n_nationkey) - join region r2 on (r2.r_regionkey = n2.n_regionkey) -where l_shipdate between '1992-01-02' and '1992-12-31' - and o_orderdate between '1992-01-02' and '1992-12-31' - and n1.n_nationkey = 4 - and n2.n_nationkey in (5,6,7,8) -group by 1,2,3,4 -order by 1,2,3,4; diff --git a/utils/scenarios/perf/sql/201/week_scenario_jt_6.sql b/utils/scenarios/perf/sql/201/week_scenario_jt_6.sql deleted file mode 100644 index 017171617..000000000 --- a/utils/scenarios/perf/sql/201/week_scenario_jt_6.sql +++ /dev/null @@ -1,17 +0,0 @@ -select r1.r_name Sales_Region, n1.n_name Sales_Nation, - r2.r_name Supplier_Region, n2.n_name Supplier_Nation, - count(*) -from region r1 - join nation n1 on (r1.r_regionkey = n1.n_regionkey) - join customer on (c_nationkey = n1.n_nationkey) - join orders on (c_custkey = o_custkey) - join lineitem on (l_orderkey = o_orderkey) - join supplier on l_suppkey = s_suppkey - join nation n2 on (s_nationkey = n2.n_nationkey) - join region r2 on (r2.r_regionkey = n2.n_regionkey) -where l_shipdate between '1992-01-02' and '1992-02-28' - and o_orderdate between '1992-01-02' and '1992-02-28' - and n1.n_nationkey = 4 - and n2.n_nationkey in (5,6,7,8) -group by 1,2,3,4 -order by 1,2,3,4; diff --git a/utils/scenarios/perf/sql/201/week_scenario_jt_6.sql.original b/utils/scenarios/perf/sql/201/week_scenario_jt_6.sql.original deleted file mode 100644 index 8fc273075..000000000 --- a/utils/scenarios/perf/sql/201/week_scenario_jt_6.sql.original +++ /dev/null @@ -1,17 +0,0 @@ -select r1.r_name Sales_Region, n1.n_name Sales_Nation, - r2.r_name Supplier_Region, n2.n_name Supplier_Nation, - count(*) -from region r1 - join nation n1 on (r1.r_regionkey = n1.n_regionkey) - join customer on (c_nationkey = n1.n_nationkey) - join orders on (c_custkey = o_custkey) - join lineitem on (l_orderkey = o_orderkey) - join supplier on l_suppkey = s_suppkey - join nation n2 on (s_nationkey = n2.n_nationkey) - join region r2 on (r2.r_regionkey = n2.n_regionkey) -where l_shipdate between '1992-01-02' and '1992-12-31' - and o_orderdate between '1992-01-02' and '1992-12-31' - and n1.n_nationkey = 4 - and n2.n_nationkey in (5,6,7,8) -group by 1,2,3,4 -order by 1,2,3,4; diff --git a/utils/scenarios/perf/sql/201/week_scenario_jt_7.1.sql b/utils/scenarios/perf/sql/201/week_scenario_jt_7.1.sql deleted file mode 100644 index 9e0ed7fd0..000000000 --- a/utils/scenarios/perf/sql/201/week_scenario_jt_7.1.sql +++ /dev/null @@ -1,8 +0,0 @@ -select max(l_orderkey), max(l_partkey), max(l_suppkey), count(l_orderkey) -from lineitem -where l_partkey < 45000000 -and l_suppkey < 2250000 -and l_orderkey < 200000000 -and l_shipdate between '1992-03-01' and '1992-03-31' -and l_linenumber <= 4 -and l_quantity <= 25; diff --git a/utils/scenarios/perf/sql/201/week_scenario_jt_7.sql b/utils/scenarios/perf/sql/201/week_scenario_jt_7.sql deleted file mode 100644 index 9e0ed7fd0..000000000 --- a/utils/scenarios/perf/sql/201/week_scenario_jt_7.sql +++ /dev/null @@ -1,8 +0,0 @@ -select max(l_orderkey), max(l_partkey), max(l_suppkey), count(l_orderkey) -from lineitem -where l_partkey < 45000000 -and l_suppkey < 2250000 -and l_orderkey < 200000000 -and l_shipdate between '1992-03-01' and '1992-03-31' -and l_linenumber <= 4 -and l_quantity <= 25; diff --git a/utils/scenarios/perf/sql/202/q2.3.4.1.sql b/utils/scenarios/perf/sql/202/q2.3.4.1.sql deleted file mode 100644 index 692770771..000000000 --- a/utils/scenarios/perf/sql/202/q2.3.4.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select min(l_suppkey) from lineitem; diff --git a/utils/scenarios/perf/sql/202/q2.3.4.2.sql b/utils/scenarios/perf/sql/202/q2.3.4.2.sql deleted file mode 100644 index 683dddea2..000000000 --- a/utils/scenarios/perf/sql/202/q2.3.4.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select max(l_shipdate) from lineitem; diff --git a/utils/scenarios/perf/sql/202/q2.3.4.3.sql b/utils/scenarios/perf/sql/202/q2.3.4.3.sql deleted file mode 100644 index cd920adae..000000000 --- a/utils/scenarios/perf/sql/202/q2.3.4.3.sql +++ /dev/null @@ -1 +0,0 @@ -Select sum(l_orderkey) from lineitem where l_suppkey < 100000; diff --git a/utils/scenarios/perf/sql/202/q2.3.4.4.sql b/utils/scenarios/perf/sql/202/q2.3.4.4.sql deleted file mode 100644 index aae97d63f..000000000 --- a/utils/scenarios/perf/sql/202/q2.3.4.4.sql +++ /dev/null @@ -1 +0,0 @@ -Select avg(l_extendedprice) from lineitem; diff --git a/utils/scenarios/perf/sql/202/q4.8.1.sql b/utils/scenarios/perf/sql/202/q4.8.1.sql deleted file mode 100644 index c8a040dab..000000000 --- a/utils/scenarios/perf/sql/202/q4.8.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select count(*) from lineitem where l_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/202/q4.8.2.sql b/utils/scenarios/perf/sql/202/q4.8.2.sql deleted file mode 100644 index 09cc7a021..000000000 --- a/utils/scenarios/perf/sql/202/q4.8.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select max(l_shipdate) from lineitem where l_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/202/q4.8.3.sql b/utils/scenarios/perf/sql/202/q4.8.3.sql deleted file mode 100644 index c602223bf..000000000 --- a/utils/scenarios/perf/sql/202/q4.8.3.sql +++ /dev/null @@ -1 +0,0 @@ -Select min(o_orderstatus), max(o_orderstatus) from orders where o_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/202/q4.8.4.sql b/utils/scenarios/perf/sql/202/q4.8.4.sql deleted file mode 100644 index a236b16e5..000000000 --- a/utils/scenarios/perf/sql/202/q4.8.4.sql +++ /dev/null @@ -1 +0,0 @@ -Select sum(o_totalprice), avg(o_totalprice), count(*) from orders where o_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/202/q4.8.5.sql b/utils/scenarios/perf/sql/202/q4.8.5.sql deleted file mode 100644 index 503faa7d7..000000000 --- a/utils/scenarios/perf/sql/202/q4.8.5.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_quantity, count(*) from lineitem where l_orderkey < 1000000 group by l_quantity order by l_quantity; diff --git a/utils/scenarios/perf/sql/202/q4.8.6.sql b/utils/scenarios/perf/sql/202/q4.8.6.sql deleted file mode 100644 index 95485cbf1..000000000 --- a/utils/scenarios/perf/sql/202/q4.8.6.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_shipmode, max(l_shipdate), min(l_shipdate) from lineitem where l_orderkey < 1000000 group by l_shipmode order by 1; diff --git a/utils/scenarios/perf/sql/202/q4.8.7.sql b/utils/scenarios/perf/sql/202/q4.8.7.sql deleted file mode 100644 index 80a788e34..000000000 --- a/utils/scenarios/perf/sql/202/q4.8.7.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_orderpriority, min(o_orderstatus), max(o_orderstatus) from orders where o_orderkey < 1000000 group by o_orderpriority order by o_orderpriority; diff --git a/utils/scenarios/perf/sql/202/q4.8.8.sql b/utils/scenarios/perf/sql/202/q4.8.8.sql deleted file mode 100644 index a283c0bbf..000000000 --- a/utils/scenarios/perf/sql/202/q4.8.8.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_shippriority, sum(o_totalprice), avg(o_totalprice), count(*) from orders where o_orderkey < 1000000 group by o_shippriority; diff --git a/utils/scenarios/perf/sql/202/q4.8.9.sql b/utils/scenarios/perf/sql/202/q4.8.9.sql deleted file mode 100644 index 7f53f8a96..000000000 --- a/utils/scenarios/perf/sql/202/q4.8.9.sql +++ /dev/null @@ -1,19 +0,0 @@ -select - l_returnflag, - l_linestatus, - sum(l_quantity) as sum_qty, - sum(l_extendedprice) as sum_base_price, - avg(l_quantity) as avg_qty, - avg(l_extendedprice) as avg_price, - avg(l_discount) as avg_disc, - count(*) as count_order -from - lineitem -where - l_shipdate <= date '1998-09-26' - -group by l_returnflag, - l_linestatus -order by - l_returnflag, - l_linestatus; diff --git a/utils/scenarios/perf/sql/202/q5.3.1.sql b/utils/scenarios/perf/sql/202/q5.3.1.sql deleted file mode 100644 index 3eb2914b4..000000000 --- a/utils/scenarios/perf/sql/202/q5.3.1.sql +++ /dev/null @@ -1,9 +0,0 @@ -select l_shipdate Revenue_day, - l_discount district, - max(l_shipdate) Latest_date, - sum(l_extendedprice) Total_Revenue, -count(*) Sales_items -from lineitem -group by l_shipdate, l_discount -order by 1,2; - diff --git a/utils/scenarios/perf/sql/202/sleep30.sql b/utils/scenarios/perf/sql/202/sleep30.sql deleted file mode 100644 index 02111ab80..000000000 --- a/utils/scenarios/perf/sql/202/sleep30.sql +++ /dev/null @@ -1 +0,0 @@ -\! sleep 30 diff --git a/utils/scenarios/perf/sql/203/delete.mod.sql b/utils/scenarios/perf/sql/203/delete.mod.sql deleted file mode 100644 index ea1000389..000000000 --- a/utils/scenarios/perf/sql/203/delete.mod.sql +++ /dev/null @@ -1,12 +0,0 @@ -delete from lineitem where l_shipdate between '1992-02-01' and '1992-02-29' and l_orderkey <= 500000000 and l_linestatus = 'X'; -delete from lineitem where l_shipdate between '1992-02-01' and '1992-02-29' and l_orderkey <= 1000000000 and l_linestatus = 'X'; -delete from lineitem where l_shipdate between '1992-02-01' and '1992-02-29' and l_orderkey <= 1500000000 and l_linestatus = 'X'; -delete from lineitem where l_shipdate between '1992-02-01' and '1992-02-29' and l_orderkey <= 2000000000 and l_linestatus = 'X'; -delete from lineitem where l_shipdate between '1992-02-01' and '1992-02-29' and l_orderkey <= 2500000000 and l_linestatus = 'X'; -delete from lineitem where l_shipdate between '1992-02-01' and '1992-02-29' and l_orderkey <= 3000000000 and l_linestatus = 'X'; -delete from lineitem where l_shipdate between '1992-02-01' and '1992-02-29' and l_orderkey <= 3500000000 and l_linestatus = 'X'; -delete from lineitem where l_shipdate between '1992-02-01' and '1992-02-29' and l_orderkey <= 4000000000 and l_linestatus = 'X'; -delete from lineitem where l_shipdate between '1992-02-01' and '1992-02-29' and l_orderkey <= 4500000000 and l_linestatus = 'X'; -delete from lineitem where l_shipdate between '1992-02-01' and '1992-02-29' and l_orderkey <= 5000000000 and l_linestatus = 'X'; -delete from lineitem where l_shipdate between '1992-02-01' and '1992-02-29' and l_orderkey <= 5500000000 and l_linestatus = 'X'; -delete from lineitem where l_shipdate between '1992-02-01' and '1992-02-29' and l_orderkey <= 6000000000 and l_linestatus = 'X'; diff --git a/utils/scenarios/perf/sql/203/delete.sql b/utils/scenarios/perf/sql/203/delete.sql deleted file mode 100644 index 1a7cc31cb..000000000 --- a/utils/scenarios/perf/sql/203/delete.sql +++ /dev/null @@ -1,3 +0,0 @@ -delete from lineitem -where l_linestatus = 'X'; - diff --git a/utils/scenarios/perf/sql/203/update.sql b/utils/scenarios/perf/sql/203/update.sql deleted file mode 100644 index 1c65db5b5..000000000 --- a/utils/scenarios/perf/sql/203/update.sql +++ /dev/null @@ -1,4 +0,0 @@ -update lineitem -set l_linestatus = 'X' -where l_shipdate >= '1992-02-01' and l_shipdate <= '1992-02-29'; - diff --git a/utils/scenarios/perf/sql/204/lineitemstats.sql b/utils/scenarios/perf/sql/204/lineitemstats.sql deleted file mode 100644 index 004b7f2e8..000000000 --- a/utils/scenarios/perf/sql/204/lineitemstats.sql +++ /dev/null @@ -1 +0,0 @@ -select count(*) from lineitem; diff --git a/utils/scenarios/perf/sql/3/q4.3.sql b/utils/scenarios/perf/sql/3/q4.3.sql deleted file mode 100644 index 97756a25a..000000000 --- a/utils/scenarios/perf/sql/3/q4.3.sql +++ /dev/null @@ -1 +0,0 @@ -Select /*! INFINIDB_ORDERED */ o_orderdate, o_custkey from orders,lineitem where l_partkey < 100000 and l_orderkey = o_orderkey order by 1, 2; diff --git a/utils/scenarios/perf/sql/3/q4.4.sql b/utils/scenarios/perf/sql/3/q4.4.sql deleted file mode 100644 index ff14283ad..000000000 --- a/utils/scenarios/perf/sql/3/q4.4.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_orderdate, o_custkey from orders,lineitem where o_custkey < 1000 and o_orderkey = l_orderkey order by 1, 2; diff --git a/utils/scenarios/perf/sql/3/q4.6.1.sql b/utils/scenarios/perf/sql/3/q4.6.1.sql deleted file mode 100644 index 15bf67a70..000000000 --- a/utils/scenarios/perf/sql/3/q4.6.1.sql +++ /dev/null @@ -1 +0,0 @@ -select c_custkey, o_orderkey from customer left outer join orders on c_custkey = o_custkey where c_custkey < 10000 and c_nationkey = 4 order by 1, 2; diff --git a/utils/scenarios/perf/sql/3/q4.6.2.sql b/utils/scenarios/perf/sql/3/q4.6.2.sql deleted file mode 100644 index 54191b4f0..000000000 --- a/utils/scenarios/perf/sql/3/q4.6.2.sql +++ /dev/null @@ -1 +0,0 @@ -select c_custkey, o_orderkey from customer right outer join orders on c_custkey = o_custkey where c_custkey < 10000 and c_nationkey = 4 order by 1, 2; diff --git a/utils/scenarios/perf/sql/3/q4.6.sql b/utils/scenarios/perf/sql/3/q4.6.sql deleted file mode 100644 index b93f2dfe2..000000000 --- a/utils/scenarios/perf/sql/3/q4.6.sql +++ /dev/null @@ -1 +0,0 @@ -Select * from orders, lineitem where o_custkey < 100000 and l_partkey < 10000 and l_orderkey = o_orderkey order by l_orderkey, l_linenumber; diff --git a/utils/scenarios/perf/sql/300/week_scenario_jt_1.1.sql b/utils/scenarios/perf/sql/300/week_scenario_jt_1.1.sql deleted file mode 100644 index 9f46d6525..000000000 --- a/utils/scenarios/perf/sql/300/week_scenario_jt_1.1.sql +++ /dev/null @@ -1,7 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1994-03-13' -group by c_nationkey -order by 1; diff --git a/utils/scenarios/perf/sql/300/week_scenario_jt_1.sql b/utils/scenarios/perf/sql/300/week_scenario_jt_1.sql deleted file mode 100644 index 9f46d6525..000000000 --- a/utils/scenarios/perf/sql/300/week_scenario_jt_1.sql +++ /dev/null @@ -1,7 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1994-03-13' -group by c_nationkey -order by 1; diff --git a/utils/scenarios/perf/sql/300/week_scenario_jt_2.1.sql b/utils/scenarios/perf/sql/300/week_scenario_jt_2.1.sql deleted file mode 100644 index 79067f7ce..000000000 --- a/utils/scenarios/perf/sql/300/week_scenario_jt_2.1.sql +++ /dev/null @@ -1,8 +0,0 @@ -select '1992-01-01' + interval rand() * 365 day into @date_var; -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_shipdate = @date_var -group by 1,2,3; - diff --git a/utils/scenarios/perf/sql/300/week_scenario_jt_2.sql b/utils/scenarios/perf/sql/300/week_scenario_jt_2.sql deleted file mode 100644 index d9968d3d1..000000000 --- a/utils/scenarios/perf/sql/300/week_scenario_jt_2.sql +++ /dev/null @@ -1,9 +0,0 @@ -select '1992-01-01' + interval rand() * 150 day into @date_var; -select @date_var; -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_shipdate = @date_var -group by 1,2,3; - diff --git a/utils/scenarios/perf/sql/300/week_scenario_jt_3.1.sql b/utils/scenarios/perf/sql/300/week_scenario_jt_3.1.sql deleted file mode 100644 index f459841b5..000000000 --- a/utils/scenarios/perf/sql/300/week_scenario_jt_3.1.sql +++ /dev/null @@ -1,7 +0,0 @@ -select '1992-01-01' + interval rand() * 365 day into @date_var; -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_receiptdate between @date_var and @date_var + interval 5 day -group by 1,2,3; diff --git a/utils/scenarios/perf/sql/300/week_scenario_jt_3.sql b/utils/scenarios/perf/sql/300/week_scenario_jt_3.sql deleted file mode 100644 index 51bc20080..000000000 --- a/utils/scenarios/perf/sql/300/week_scenario_jt_3.sql +++ /dev/null @@ -1,8 +0,0 @@ -select '1992-01-01' + interval rand() * 150 day into @date_var; -select @date_var; -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_receiptdate between @date_var and @date_var + interval 5 day -group by 1,2,3; diff --git a/utils/scenarios/perf/sql/300/week_scenario_jt_4.1.sql b/utils/scenarios/perf/sql/300/week_scenario_jt_4.1.sql deleted file mode 100644 index 767bb7257..000000000 --- a/utils/scenarios/perf/sql/300/week_scenario_jt_4.1.sql +++ /dev/null @@ -1,7 +0,0 @@ -select '1992-01-01' + interval rand() * 365 day into @date_var; -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_commitdate between @date_var and @date_var + interval 15 day -group by 1,2,3; diff --git a/utils/scenarios/perf/sql/300/week_scenario_jt_4.sql b/utils/scenarios/perf/sql/300/week_scenario_jt_4.sql deleted file mode 100644 index 89831e758..000000000 --- a/utils/scenarios/perf/sql/300/week_scenario_jt_4.sql +++ /dev/null @@ -1,8 +0,0 @@ -select '1992-01-01' + interval rand() * 150 day into @date_var; -select @date_var; -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_commitdate between @date_var and @date_var + interval 15 day -group by 1,2,3; diff --git a/utils/scenarios/perf/sql/301/week_scenario_jt_1.sql b/utils/scenarios/perf/sql/301/week_scenario_jt_1.sql deleted file mode 100644 index 9f46d6525..000000000 --- a/utils/scenarios/perf/sql/301/week_scenario_jt_1.sql +++ /dev/null @@ -1,7 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1994-03-13' -group by c_nationkey -order by 1; diff --git a/utils/scenarios/perf/sql/301/week_scenario_jt_2.sql b/utils/scenarios/perf/sql/301/week_scenario_jt_2.sql deleted file mode 100644 index 79067f7ce..000000000 --- a/utils/scenarios/perf/sql/301/week_scenario_jt_2.sql +++ /dev/null @@ -1,8 +0,0 @@ -select '1992-01-01' + interval rand() * 365 day into @date_var; -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_shipdate = @date_var -group by 1,2,3; - diff --git a/utils/scenarios/perf/sql/302/week_scenario_jt_1.sql b/utils/scenarios/perf/sql/302/week_scenario_jt_1.sql deleted file mode 100644 index 9f46d6525..000000000 --- a/utils/scenarios/perf/sql/302/week_scenario_jt_1.sql +++ /dev/null @@ -1,7 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1994-03-13' -group by c_nationkey -order by 1; diff --git a/utils/scenarios/perf/sql/302/week_scenario_jt_2.sql b/utils/scenarios/perf/sql/302/week_scenario_jt_2.sql deleted file mode 100644 index 79067f7ce..000000000 --- a/utils/scenarios/perf/sql/302/week_scenario_jt_2.sql +++ /dev/null @@ -1,8 +0,0 @@ -select '1992-01-01' + interval rand() * 365 day into @date_var; -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_shipdate = @date_var -group by 1,2,3; - diff --git a/utils/scenarios/perf/sql/302/week_scenario_jt_3.sql b/utils/scenarios/perf/sql/302/week_scenario_jt_3.sql deleted file mode 100644 index f459841b5..000000000 --- a/utils/scenarios/perf/sql/302/week_scenario_jt_3.sql +++ /dev/null @@ -1,7 +0,0 @@ -select '1992-01-01' + interval rand() * 365 day into @date_var; -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_receiptdate between @date_var and @date_var + interval 5 day -group by 1,2,3; diff --git a/utils/scenarios/perf/sql/303/week_scenario_jt_1.sql b/utils/scenarios/perf/sql/303/week_scenario_jt_1.sql deleted file mode 100644 index 9f46d6525..000000000 --- a/utils/scenarios/perf/sql/303/week_scenario_jt_1.sql +++ /dev/null @@ -1,7 +0,0 @@ -select c_nationkey, count(*), sum(o_totalprice) Revenue, avg(c_acctbal) -from customer, orders -where c_acctbal > 9963 and c_nationkey < 5 -and o_custkey = c_custkey -and o_orderdate <= '1994-03-13' -group by c_nationkey -order by 1; diff --git a/utils/scenarios/perf/sql/303/week_scenario_jt_2.sql b/utils/scenarios/perf/sql/303/week_scenario_jt_2.sql deleted file mode 100644 index 79067f7ce..000000000 --- a/utils/scenarios/perf/sql/303/week_scenario_jt_2.sql +++ /dev/null @@ -1,8 +0,0 @@ -select '1992-01-01' + interval rand() * 365 day into @date_var; -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_shipdate = @date_var -group by 1,2,3; - diff --git a/utils/scenarios/perf/sql/303/week_scenario_jt_3.sql b/utils/scenarios/perf/sql/303/week_scenario_jt_3.sql deleted file mode 100644 index f459841b5..000000000 --- a/utils/scenarios/perf/sql/303/week_scenario_jt_3.sql +++ /dev/null @@ -1,7 +0,0 @@ -select '1992-01-01' + interval rand() * 365 day into @date_var; -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_receiptdate between @date_var and @date_var + interval 5 day -group by 1,2,3; diff --git a/utils/scenarios/perf/sql/303/week_scenario_jt_4.sql b/utils/scenarios/perf/sql/303/week_scenario_jt_4.sql deleted file mode 100644 index 767bb7257..000000000 --- a/utils/scenarios/perf/sql/303/week_scenario_jt_4.sql +++ /dev/null @@ -1,7 +0,0 @@ -select '1992-01-01' + interval rand() * 365 day into @date_var; -select l_shipdate, l_returnflag Returnflag, l_linestatus Status, - avg(l_extendedprice) avgprice, count(*), - min(l_extendedprice) minprice, max(l_extendedprice) maxprice -from lineitem -where l_commitdate between @date_var and @date_var + interval 15 day -group by 1,2,3; diff --git a/utils/scenarios/perf/sql/4/q2.3.4.1.sql b/utils/scenarios/perf/sql/4/q2.3.4.1.sql deleted file mode 100644 index 692770771..000000000 --- a/utils/scenarios/perf/sql/4/q2.3.4.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select min(l_suppkey) from lineitem; diff --git a/utils/scenarios/perf/sql/4/q2.3.4.2.sql b/utils/scenarios/perf/sql/4/q2.3.4.2.sql deleted file mode 100644 index 683dddea2..000000000 --- a/utils/scenarios/perf/sql/4/q2.3.4.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select max(l_shipdate) from lineitem; diff --git a/utils/scenarios/perf/sql/4/q2.3.4.3.sql b/utils/scenarios/perf/sql/4/q2.3.4.3.sql deleted file mode 100644 index cd920adae..000000000 --- a/utils/scenarios/perf/sql/4/q2.3.4.3.sql +++ /dev/null @@ -1 +0,0 @@ -Select sum(l_orderkey) from lineitem where l_suppkey < 100000; diff --git a/utils/scenarios/perf/sql/4/q2.3.4.4.sql b/utils/scenarios/perf/sql/4/q2.3.4.4.sql deleted file mode 100644 index aae97d63f..000000000 --- a/utils/scenarios/perf/sql/4/q2.3.4.4.sql +++ /dev/null @@ -1 +0,0 @@ -Select avg(l_extendedprice) from lineitem; diff --git a/utils/scenarios/perf/sql/4/q4.8.1.sql b/utils/scenarios/perf/sql/4/q4.8.1.sql deleted file mode 100644 index c8a040dab..000000000 --- a/utils/scenarios/perf/sql/4/q4.8.1.sql +++ /dev/null @@ -1 +0,0 @@ -Select count(*) from lineitem where l_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/4/q4.8.2.sql b/utils/scenarios/perf/sql/4/q4.8.2.sql deleted file mode 100644 index 09cc7a021..000000000 --- a/utils/scenarios/perf/sql/4/q4.8.2.sql +++ /dev/null @@ -1 +0,0 @@ -Select max(l_shipdate) from lineitem where l_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/4/q4.8.3.sql b/utils/scenarios/perf/sql/4/q4.8.3.sql deleted file mode 100644 index c602223bf..000000000 --- a/utils/scenarios/perf/sql/4/q4.8.3.sql +++ /dev/null @@ -1 +0,0 @@ -Select min(o_orderstatus), max(o_orderstatus) from orders where o_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/4/q4.8.4.sql b/utils/scenarios/perf/sql/4/q4.8.4.sql deleted file mode 100644 index a236b16e5..000000000 --- a/utils/scenarios/perf/sql/4/q4.8.4.sql +++ /dev/null @@ -1 +0,0 @@ -Select sum(o_totalprice), avg(o_totalprice), count(*) from orders where o_orderkey < 1000000; diff --git a/utils/scenarios/perf/sql/4/q4.8.5.sql b/utils/scenarios/perf/sql/4/q4.8.5.sql deleted file mode 100644 index 503faa7d7..000000000 --- a/utils/scenarios/perf/sql/4/q4.8.5.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_quantity, count(*) from lineitem where l_orderkey < 1000000 group by l_quantity order by l_quantity; diff --git a/utils/scenarios/perf/sql/4/q4.8.6.sql b/utils/scenarios/perf/sql/4/q4.8.6.sql deleted file mode 100644 index 95485cbf1..000000000 --- a/utils/scenarios/perf/sql/4/q4.8.6.sql +++ /dev/null @@ -1 +0,0 @@ -Select l_shipmode, max(l_shipdate), min(l_shipdate) from lineitem where l_orderkey < 1000000 group by l_shipmode order by 1; diff --git a/utils/scenarios/perf/sql/4/q4.8.7.sql b/utils/scenarios/perf/sql/4/q4.8.7.sql deleted file mode 100644 index 80a788e34..000000000 --- a/utils/scenarios/perf/sql/4/q4.8.7.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_orderpriority, min(o_orderstatus), max(o_orderstatus) from orders where o_orderkey < 1000000 group by o_orderpriority order by o_orderpriority; diff --git a/utils/scenarios/perf/sql/4/q4.8.8.sql b/utils/scenarios/perf/sql/4/q4.8.8.sql deleted file mode 100644 index a283c0bbf..000000000 --- a/utils/scenarios/perf/sql/4/q4.8.8.sql +++ /dev/null @@ -1 +0,0 @@ -Select o_shippriority, sum(o_totalprice), avg(o_totalprice), count(*) from orders where o_orderkey < 1000000 group by o_shippriority; diff --git a/utils/scenarios/perf/sql/4/q4.8.9.sql b/utils/scenarios/perf/sql/4/q4.8.9.sql deleted file mode 100644 index 7f53f8a96..000000000 --- a/utils/scenarios/perf/sql/4/q4.8.9.sql +++ /dev/null @@ -1,19 +0,0 @@ -select - l_returnflag, - l_linestatus, - sum(l_quantity) as sum_qty, - sum(l_extendedprice) as sum_base_price, - avg(l_quantity) as avg_qty, - avg(l_extendedprice) as avg_price, - avg(l_discount) as avg_disc, - count(*) as count_order -from - lineitem -where - l_shipdate <= date '1998-09-26' - -group by l_returnflag, - l_linestatus -order by - l_returnflag, - l_linestatus; diff --git a/utils/scenarios/perf/sql/4/q5.3.1.sql b/utils/scenarios/perf/sql/4/q5.3.1.sql deleted file mode 100644 index 3eb2914b4..000000000 --- a/utils/scenarios/perf/sql/4/q5.3.1.sql +++ /dev/null @@ -1,9 +0,0 @@ -select l_shipdate Revenue_day, - l_discount district, - max(l_shipdate) Latest_date, - sum(l_extendedprice) Total_Revenue, -count(*) Sales_items -from lineitem -group by l_shipdate, l_discount -order by 1,2; - diff --git a/utils/scenarios/perf/sql/5/q2.3.2.1.1.sql b/utils/scenarios/perf/sql/5/q2.3.2.1.1.sql deleted file mode 100644 index b646f8c63..000000000 --- a/utils/scenarios/perf/sql/5/q2.3.2.1.1.sql +++ /dev/null @@ -1,4 +0,0 @@ -select count(*) from part, lineitem - where p_retailprice < 913.65 - and p_partkey = l_suppkey - and l_shipdate < '1992-04-09'; diff --git a/utils/scenarios/perf/sql/5/q2.3.2.1.2.sql b/utils/scenarios/perf/sql/5/q2.3.2.1.2.sql deleted file mode 100644 index 500f86798..000000000 --- a/utils/scenarios/perf/sql/5/q2.3.2.1.2.sql +++ /dev/null @@ -1,4 +0,0 @@ -select count(*) from part, lineitem - where p_retailprice < 944.23 - and p_partkey = l_suppkey - and l_shipdate < '1992-04-09'; diff --git a/utils/scenarios/perf/sql/5/q2.3.2.1.3.sql b/utils/scenarios/perf/sql/5/q2.3.2.1.3.sql deleted file mode 100644 index 32b709b1d..000000000 --- a/utils/scenarios/perf/sql/5/q2.3.2.1.3.sql +++ /dev/null @@ -1,4 +0,0 @@ -select count(*) from part, lineitem - where p_retailprice < 904.01 - and p_partkey = l_suppkey - and l_shipdate < '1993-04-07'; diff --git a/utils/scenarios/perf/sql/5/q2.3.2.1.4.sql b/utils/scenarios/perf/sql/5/q2.3.2.1.4.sql deleted file mode 100644 index 72b5f571f..000000000 --- a/utils/scenarios/perf/sql/5/q2.3.2.1.4.sql +++ /dev/null @@ -1,4 +0,0 @@ -select count(*) from part, lineitem - where p_retailprice < 913.65 - and p_partkey = l_suppkey - and l_shipdate < '1993-04-07'; diff --git a/utils/scenarios/perf/sql/5/q2.3.6.sql b/utils/scenarios/perf/sql/5/q2.3.6.sql deleted file mode 100644 index d3b7990e7..000000000 --- a/utils/scenarios/perf/sql/5/q2.3.6.sql +++ /dev/null @@ -1,9 +0,0 @@ -select p_brand, sum(l_quantity) tot_qty, - avg(l_quantity) avg_qty, count(*) -from part, lineitem -where l_shipdate between '1996-04-01' and '1996-04-14' -and p_size = 5 -and p_partkey = l_partkey -group by p_brand -order by 1; - diff --git a/utils/scenarios/perf/sql/5/q5.1.1.sql b/utils/scenarios/perf/sql/5/q5.1.1.sql deleted file mode 100644 index abfcef2fc..000000000 --- a/utils/scenarios/perf/sql/5/q5.1.1.sql +++ /dev/null @@ -1,8 +0,0 @@ -select p_brand, sum(l_quantity) tot_qty, - avg(l_quantity) avg_qty -from part, lineitem -where l_shipdate between '1996-04-01' and '1996-04-14' -and p_size = 5 -and p_partkey = l_partkey -group by p_brand -order by 1; diff --git a/utils/scenarios/perf/sql/5/q5.1.2.sql b/utils/scenarios/perf/sql/5/q5.1.2.sql deleted file mode 100644 index 20e15a8d1..000000000 --- a/utils/scenarios/perf/sql/5/q5.1.2.sql +++ /dev/null @@ -1,9 +0,0 @@ -select n_name, l_commitdate, sum(s_acctbal) sum_bal, -sum(l_extendedprice) sum_price, min(s_suppkey) minskey, count(*) -from nation, supplier, lineitem -where s_nationkey in (1,2) -and l_commitdate between '1998-01-01' and '1998-01-07' -and n_nationkey = s_nationkey -and s_suppkey = l_suppkey -group by n_name, l_commitdate -order by 1, 2; diff --git a/utils/scenarios/perf/sql/5/q5.1.3.sql b/utils/scenarios/perf/sql/5/q5.1.3.sql deleted file mode 100644 index 3066bf538..000000000 --- a/utils/scenarios/perf/sql/5/q5.1.3.sql +++ /dev/null @@ -1 +0,0 @@ -select * from lineitem where l_orderkey = 600000 order by l_orderkey, l_linenumber; diff --git a/utils/scenarios/perf/sql/5/q5.2.1.sql b/utils/scenarios/perf/sql/5/q5.2.1.sql deleted file mode 100644 index a092414b6..000000000 --- a/utils/scenarios/perf/sql/5/q5.2.1.sql +++ /dev/null @@ -1,13 +0,0 @@ -select n_name, sum(l_quantity), sum(l_extendedprice), - max(c26_nbr_10), - sum(c23_nbr_10), - avg(c28_nbr_10), - min(c38_nbr_14), - max(c61_nbr_20), - count(c76_nbr_4), - avg(c89_nbr_7) -from nation, demographics200 -where c23_nbr_10 between 1950 and 2000 - and n_regionkey = 1 - and n_nationkey = c76_nbr_4 -group by n_name order by n_name; diff --git a/utils/scenarios/perf/sql/5/q5.2.2.sql b/utils/scenarios/perf/sql/5/q5.2.2.sql deleted file mode 100644 index b46a849e0..000000000 --- a/utils/scenarios/perf/sql/5/q5.2.2.sql +++ /dev/null @@ -1,12 +0,0 @@ -select n_name, sum(l_quantity), sum(l_extendedprice), - max(c26_nbr_10), - sum(c23_nbr_10), - avg(c28_nbr_10), - min(c38_nbr_14), - max(c61_nbr_20), - count(c76_nbr_4), - avg(c89_nbr_7) -from nation, demographics200 -where c76_nbr_4 = 5 and c38_nbr_14 < 170000 - and n_nationkey = c83_nbr_6 -group by n_name order by n_name; diff --git a/utils/scenarios/perf/sql/5/q5.2.3.sql b/utils/scenarios/perf/sql/5/q5.2.3.sql deleted file mode 100644 index 10f95e264..000000000 --- a/utils/scenarios/perf/sql/5/q5.2.3.sql +++ /dev/null @@ -1,10 +0,0 @@ -select count(*),max(c26_nbr_10),max(c23_nbr_10),max(c28_nbr_10), -max(c38_nbr_14),max(c61_nbr_20),max(c76_nbr_4),max(c89_nbr_7) -from demographics200 -where l_orderkey < 7000009 - and c23_nbr_10 < 70000 - and c26_nbr_10 < 700000 - and c28_nbr_10 < 7000 - and c38_nbr_14 < 7050000 - and c61_nbr_20 < 70500000 - and c76_nbr_4 < 28 ; diff --git a/utils/scenarios/perf/sql/5/q5.2.4.sql b/utils/scenarios/perf/sql/5/q5.2.4.sql deleted file mode 100644 index 20b161998..000000000 --- a/utils/scenarios/perf/sql/5/q5.2.4.sql +++ /dev/null @@ -1,3 +0,0 @@ -select l_shipdate, l_suppkey, l_quantity, l_extendedprice, l_comment -from lineitem where l_orderkey = 600000 order by 1, 2, 3, 4, 5; - diff --git a/utils/scenarios/perf/sql/5/q5.2.5.sql b/utils/scenarios/perf/sql/5/q5.2.5.sql deleted file mode 100644 index 32cf13146..000000000 --- a/utils/scenarios/perf/sql/5/q5.2.5.sql +++ /dev/null @@ -1,7 +0,0 @@ -select max(l_orderkey), max(l_partkey), max(l_suppkey), count(*) from lineitem -where l_partkey < 25000000 -and l_suppkey < 1250000 -and l_orderkey < 100000000 -and l_linenumber = 4 -and l_quantity <= 5; - diff --git a/utils/scenarios/perf/sql/5/q5.4.1.sql b/utils/scenarios/perf/sql/5/q5.4.1.sql deleted file mode 100644 index 32aa140fe..000000000 --- a/utils/scenarios/perf/sql/5/q5.4.1.sql +++ /dev/null @@ -1,8 +0,0 @@ -select l_shipdate, sum(l_extendedprice), avg(p_retailprice) -from part, lineitem -where l_shipdate between '1993-01-01' and '1994-06-30' -and p_retailprice >= 2095 -and p_size <= 5 -and p_partkey = l_partkey -group by l_shipdate -order by 1; diff --git a/utils/scenarios/perf/sql/5/q5.4.2.sql b/utils/scenarios/perf/sql/5/q5.4.2.sql deleted file mode 100644 index aac7a5898..000000000 --- a/utils/scenarios/perf/sql/5/q5.4.2.sql +++ /dev/null @@ -1,4 +0,0 @@ -select count(*) from partsupp where ps_suppkey in - (1,10,100,1000,10000,1000000,1000000, - 2,20,200,2000,20000,2000000,2000000); - diff --git a/utils/scenarios/perf/sql/5/q5.4.3.sql b/utils/scenarios/perf/sql/5/q5.4.3.sql deleted file mode 100644 index fc00d4e26..000000000 --- a/utils/scenarios/perf/sql/5/q5.4.3.sql +++ /dev/null @@ -1,4 +0,0 @@ -select count(*) from orders -where o_custkey in (1,10,100,1000,10000,1000000,1000000, - 2,20,200,2000,20000,2000000,2000000); - diff --git a/utils/scenarios/perf/sql/6/lineitemCPTest.sql b/utils/scenarios/perf/sql/6/lineitemCPTest.sql deleted file mode 100644 index 3280f4387..000000000 --- a/utils/scenarios/perf/sql/6/lineitemCPTest.sql +++ /dev/null @@ -1 +0,0 @@ -select * from lineitem where l_orderkey = 50000000 and l_partkey=107079816; diff --git a/utils/scenarios/perf/test/pfSubmitBulkTest.sh b/utils/scenarios/perf/test/pfSubmitBulkTest.sh deleted file mode 100755 index 1a7d73eaa..000000000 --- a/utils/scenarios/perf/test/pfSubmitBulkTest.sh +++ /dev/null @@ -1,79 +0,0 @@ -#!/bin/bash -#========================================================================================= -# - if [ $# -lt 2 ]; then - echo Syntax pfSumbitBulkTest.sh dbName PM1ServerName - echo Exiting..... - exit 1 - fi -# - testID=2 -# - testDB=$1 - PM1=$2 - curDir=`pwd` -#--------------------------------------------------------------------------- -# Create a cpimport script, which will be executed by a PM remotely. -#--------------------------------------------------------------------------- - cpimportScriptName="cpimportScript.sh" -# - echo \#\!/bin/bash > $cpimportScriptName - echo \# >> $cpimportScriptName - echo cd /usr/local/mariadb/columnstore/data/bulk/log >> $cpimportScriptName - echo rm -f Jobxml_9999.log >> $cpimportScriptName - echo rm -f Job_9999.log >> $cpimportScriptName - echo rm -f fileStats.txt >> $cpimportScriptName - echo rm -f finished.txt >> $cpimportScriptName - echo "ls -alh /usr/local/mariadb/columnstore/data/bulk/data/import/*.tbl > fileStats.txt" >> $cpimportScriptName -# echo "wc -l /usr/local/mariadb/columnstore/data/bulk/data/import/*.tbl >> fileStats.txt" >> $cpimportScriptName -# - echo /usr/local/mariadb/columnstore/bin/colxml $testDB -r 2 -j 9999 >> $cpimportScriptName - echo sleep 5 >> $cpimportScriptName - echo sync >> $cpimportScriptName -# - echo \# /usr/local/mariadb/columnstore/bin/cpimport -j 9999 -i >> $cpimportScriptName - echo sleep 5 >> $cpimportScriptName - echo touch finished.txt >> $cpimportScriptName - echo sync >> $cpimportScriptName - chmod 777 $cpimportScriptName -#--------------------------------------------------------------------------- -# Create a bulktest script, which will be submitted to the execution engine. -#--------------------------------------------------------------------------- - bulkScriptName="bulkScript.sh" -# - echo \#/bin/bash > $bulkScriptName - echo \# >> $bulkScriptName -# - echo "/root/genii/utils/scenarios/common/sh/remote_command.sh $PM1 qalpont! \"/mnt/parentOAM$curDir/$cpimportScriptName\"" >> $bulkScriptName -# - echo sleep 5 >> $bulkScriptName - echo "while [ ! -f /mnt/pm1/usr/local/mariadb/columnstore/data/bulk/log/finished.txt ]; do" >> $bulkScriptName - echo sleep 5 >> $bulkScriptName - echo echo waiting...... >> $bulkScriptName - echo done >> $bulkScriptName -# - echo cp /mnt/pm1/usr/local/mariadb/columnstore/data/bulk/job/Job_9999.xml . >> $bulkScriptName - echo cp /mnt/pm1/usr/local/mariadb/columnstore/data/bulk/log/Jobxml_9999.log . >> $bulkScriptName - echo cp /mnt/pm1/usr/local/mariadb/columnstore/data/bulk/log/Job_9999.log . >> $bulkScriptName - echo cp /mnt/pm1/usr/local/mariadb/columnstore/data/bulk/log/fileStats.txt . >> $bulkScriptName -# - chmod 777 $bulkScriptName -# -#append current directory path to to script file name - scriptFileName=`pwd`\/$bulkScriptName -# - echo testID=$testID >testInfo.txt - echo testDB=$testDB >>testInfo.txt - echo scriptName=$scriptFileName >>testInfo.txt - echo sessions=1 >>testInfo.txt - echo iterations=1 >>testInfo.txt -# - autopilotExecDir=`pwd` - export autopilotExecDir -# - /root/genii/utils/scenarios/common/sh/testExecEngine.sh > testExec.log - testRunID=`cat testInfo.txt |grep testResultDir |awk -F"=" '{print $2}'` - /root/genii/utils/scenarios/common/sh/collExecResult.sh $testRunID >collExecResult.log -# /root/genii/utils/scenarios/common/sh/insertExecResult.sh $testRunID >bulkExecResult.log - - diff --git a/utils/scenarios/perf/test/pfSubmitGroupTest.sh b/utils/scenarios/perf/test/pfSubmitGroupTest.sh deleted file mode 100755 index 74280dde0..000000000 --- a/utils/scenarios/perf/test/pfSubmitGroupTest.sh +++ /dev/null @@ -1,61 +0,0 @@ -#!/bin/bash -# - grpNum=$1 - testDB=$2 - numConCur=$3 - numRepeat=$4 - testType=$5 - timeoutVal=$6 - dbmsType=$7 - -# - testID=1 -# - if [ $# -lt 6 ]; then - echo Syntax: pfSubmitGroupTest.sh grpNum testDB numOfConcurrentUsers numOfRuns testType timeoutValue [dbmsType] - echo Exit..... - exit 1 - fi -# - testType=`echo $testType |tr "[:lower:]" "[:upper:]"` - dbmsType=`echo $dbmsType |tr "[:lower:]" "[:upper:]"` -# - if [ $testType = "M" ]; then - scriptFileName=GroupQueryMixed.sql - else - scriptFileName=GroupQuery$grpNum.sql - fi -# - if [ $dbmsType != "O" ]; then - dbmsType=M - fi - /root/genii/utils/scenarios/perf/sh/pfGetGroupQueries.sh $grpNum $testType $dbmsType - -# -#append current directory path to to script file name - scriptFileName=`pwd`\/$scriptFileName -# -# Create test info file for the execution engine -# - echo testID=$testID >testInfo.txt - echo testDB=$testDB >>testInfo.txt - echo scriptName=$scriptFileName >>testInfo.txt - echo sessions=$numConCur >>testInfo.txt - echo iterations=$numRepeat >>testInfo.txt - echo timeoutVal=$timeoutVal >>testInfo.txt - echo grpTestType=$testType >>testInfo.txt - echo grpTestNum=$grpNum >>testInfo.txt - echo dbmsType=$dbmsType >>testInfo.txt -# - autopilotExecDir=`pwd` - export autopilotExecDir -# - /root/genii/utils/scenarios/common/sh/testExecEngine.sh > $scriptFileName.log - testRunID=`cat testInfo.txt |grep testResultDir |awk -F"=" '{print $2}'` - /root/genii/utils/scenarios/common/sh/collExecResult.sh $testRunID > collExecResult.log - #/root/genii/utils/scenarios/common/sh/insertExecResult.sh $testRunID > insertExecResult.log - #/root/genii/utils/scenarios/perf/sh/pfCopyResults.sh $testRunID >copyResults.log - - - - diff --git a/utils/scenarios/perf/testcase/grptest.sh b/utils/scenarios/perf/testcase/grptest.sh deleted file mode 100644 index 57bac4d81..000000000 --- a/utils/scenarios/perf/testcase/grptest.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -# -# This test should be executed on the UM module where mySQL front end is installed. -# -# grpNum= group number -# testDB= database to be used -# numConcur number of concurrent user -# numRepeat number of iterations to repeat the test -# testType D = disk run. Flush disk catch before running a query -# C = cache run. Run each query twice, one disk and one cache -# S = stream run. Flush disk cache one, then run all queries in the group without flush -# M = mixed run. Use query groups 1 to 5 as one group. all users will pick queries from the group. -# Each query will be executed only once. -# timeoutVal Timeout value to abort the test. Not yet implemented. -# dbmsType DBMS type, M for mySQL. -# - grpNum=1 - testDB=tpch100 - numConcur=2 - numRepeat=1 - testType=C - timeoutVal=0 - dbmsType=M -# - /root/genii/utils/scenarios/perf/test/pfSubmitGroupTest.sh $grpNum $testDB $numConcur $numRepeat $testType $timeoutVal $dbmsType \ No newline at end of file