diff --git ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
index 33f227f..79d6a48 100644
--- ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
+++ ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
@@ -141,6 +141,10 @@ public boolean accept(File filePath) {
private String hadoopVersion;
+ private String initScript;
+
+ private String cleanupScript;
+
public void setHadoopVersion(String ver) {
this.hadoopVersion = ver;
}
@@ -197,6 +201,22 @@ public String getTemplate() {
return template;
}
+ public String getInitScript() {
+ return initScript;
+ }
+
+ public void setInitScript(String initScript) {
+ this.initScript = initScript;
+ }
+
+ public String getCleanupScript() {
+ return cleanupScript;
+ }
+
+ public void setCleanupScript(String cleanupScript) {
+ this.cleanupScript = cleanupScript;
+ }
+
public void setHiveRootDirectory(File hiveRootDirectory) {
try {
this.hiveRootDirectory = hiveRootDirectory.getCanonicalPath();
@@ -444,6 +464,8 @@ public void execute() throws BuildException {
ctx.put("clusterMode", clusterMode);
ctx.put("hiveConfDir", escapePath(hiveConfDir));
ctx.put("hadoopVersion", hadoopVersion);
+ ctx.put("initScript", initScript);
+ ctx.put("cleanupScript", cleanupScript);
File outFile = new File(outDir, className + ".java");
FileWriter writer = new FileWriter(outFile);
diff --git data/conf/hive-site.xml data/conf/hive-site.xml
index fe8080a..42260a9 100644
--- data/conf/hive-site.xml
+++ data/conf/hive-site.xml
@@ -112,6 +112,12 @@
+ test.data.scripts
+ ${hive.root}/data/scripts
+
+
+
+
hive.jar.path
${maven.local.repository}/org/apache/hive/hive-exec/${hive.version}/hive-exec-${hive.version}.jar
diff --git data/conf/tez/hive-site.xml data/conf/tez/hive-site.xml
index 0c99bb6..0b3877c 100644
--- data/conf/tez/hive-site.xml
+++ data/conf/tez/hive-site.xml
@@ -121,6 +121,12 @@
+ test.data.scripts
+ ${hive.root}/data/scripts
+
+
+
+
hive.jar.path
${maven.local.repository}/org/apache/hive/hive-exec/${hive.version}/hive-exec-${hive.version}.jar
diff --git data/scripts/q_test_cleanup.sql data/scripts/q_test_cleanup.sql
index 31bd720..8ec0f9f 100644
--- data/scripts/q_test_cleanup.sql
+++ data/scripts/q_test_cleanup.sql
@@ -7,4 +7,12 @@ DROP TABLE IF EXISTS srcbucket;
DROP TABLE IF EXISTS srcbucket2;
DROP TABLE IF EXISTS srcpart;
DROP TABLE IF EXISTS primitives;
-
+DROP TABLE IF EXISTS dest1;
+DROP TABLE IF EXISTS dest2;
+DROP TABLE IF EXISTS dest3;
+DROP TABLE IF EXISTS dest4;
+DROP TABLE IF EXISTS dest4_sequencefile;
+DROP TABLE IF EXISTS dest_j1;
+DROP TABLE IF EXISTS dest_g1;
+DROP TABLE IF EXISTS dest_g2;
+DROP TABLE IF EXISTS fetchtask_ioexception;
diff --git data/scripts/q_test_init.sql data/scripts/q_test_init.sql
index 12afdf3..c4f3f4c 100644
--- data/scripts/q_test_init.sql
+++ data/scripts/q_test_init.sql
@@ -3,7 +3,7 @@
--
DROP TABLE IF EXISTS src;
-CREATE TABLE src (key STRING, value STRING) STORED AS TEXTFILE;
+CREATE TABLE src (key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS TEXTFILE;
LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt" INTO TABLE src;
@@ -12,7 +12,7 @@ LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt" INTO TABLE src;
--
DROP TABLE IF EXISTS src1;
-CREATE TABLE src1 (key STRING, value STRING) STORED AS TEXTFILE;
+CREATE TABLE src1 (key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS TEXTFILE;
LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv3.txt" INTO TABLE src1;
@@ -21,7 +21,7 @@ LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv3.txt" INTO TABLE src1;
--
DROP TABLE IF EXISTS src_json;
-CREATE TABLE src_json (json STRING) STORED AS TEXTFILE;
+CREATE TABLE src_json (json STRING COMMENT 'default') STORED AS TEXTFILE;
LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/json.txt" INTO TABLE src_json;
@@ -31,7 +31,7 @@ LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/json.txt" INTO TABLE src_json;
--
DROP TABLE IF EXISTS src_sequencefile;
-CREATE TABLE src_sequencefile (key STRING, value STRING) STORED AS SEQUENCEFILE;
+CREATE TABLE src_sequencefile (key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS SEQUENCEFILE;
LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.seq" INTO TABLE src_sequencefile;
@@ -56,7 +56,7 @@ LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/complex.seq" INTO TABLE src_th
--
DROP TABLE IF EXISTS srcbucket;
-CREATE TABLE srcbucket (key INT, value STRING)
+CREATE TABLE srcbucket (key INT COMMENT 'default', value STRING COMMENT 'default')
CLUSTERED BY (key) INTO 2 BUCKETS
STORED AS TEXTFILE;
@@ -69,7 +69,7 @@ LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/srcbucket1.txt" INTO TABLE src
--
DROP TABLE IF EXISTS srcbucket2;
-CREATE TABLE srcbucket2 (key INT, value STRING)
+CREATE TABLE srcbucket2 (key INT COMMENT 'default', value STRING COMMENT 'default')
CLUSTERED BY (key) INTO 4 BUCKETS
STORED AS TEXTFILE;
@@ -82,7 +82,7 @@ LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/srcbucket21.txt" INTO TABLE sr
--
DROP TABLE IF EXISTS srcpart;
-CREATE TABLE srcpart (key STRING, value STRING)
+CREATE TABLE srcpart (key STRING COMMENT 'default', value STRING COMMENT 'default')
PARTITIONED BY (ds STRING, hr STRING)
STORED AS TEXTFILE;
@@ -99,20 +99,46 @@ LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-09", hr="12");
+--
+-- Table alltypesorc
+--
+DROP TABLE IF EXISTS alltypesorc;
+CREATE TABLE alltypesorc(
+ ctinyint TINYINT COMMENT 'default',
+ csmallint SMALLINT COMMENT 'default',
+ cint INT COMMENT 'default',
+ cbigint BIGINT COMMENT 'default',
+ cfloat FLOAT COMMENT 'default',
+ cdouble DOUBLE COMMENT 'default',
+ cstring1 STRING COMMENT 'default',
+ cstring2 STRING COMMENT 'default',
+ ctimestamp1 TIMESTAMP COMMENT 'default',
+ ctimestamp2 TIMESTAMP COMMENT 'default',
+ cboolean1 BOOLEAN COMMENT 'default',
+ cboolean2 BOOLEAN COMMENT 'default')
+ STORED AS ORC;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/alltypesorc"
+OVERWRITE INTO TABLE alltypesorc;
+
+
+--
+-- Table primitives
+--
DROP TABLE IF EXISTS primitives;
CREATE TABLE primitives (
- id INT,
- bool_col BOOLEAN,
- tinyint_col TINYINT,
- smallint_col SMALLINT,
- int_col INT,
- bigint_col BIGINT,
- float_col FLOAT,
- double_col DOUBLE,
- date_string_col STRING,
- string_col STRING,
- timestamp_col TIMESTAMP)
-PARTITIONED BY (year INT, month INT)
+ id INT COMMENT 'default',
+ bool_col BOOLEAN COMMENT 'default',
+ tinyint_col TINYINT COMMENT 'default',
+ smallint_col SMALLINT COMMENT 'default',
+ int_col INT COMMENT 'default',
+ bigint_col BIGINT COMMENT 'default',
+ float_col FLOAT COMMENT 'default',
+ double_col DOUBLE COMMENT 'default',
+ date_string_col STRING COMMENT 'default',
+ string_col STRING COMMENT 'default',
+ timestamp_col TIMESTAMP COMMENT 'default')
+PARTITIONED BY (year INT COMMENT 'default', month INT COMMENT 'default')
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ','
ESCAPED BY '\\'
@@ -130,3 +156,61 @@ OVERWRITE INTO TABLE primitives PARTITION(year=2009, month=3);
LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/types/primitives/090401.txt"
OVERWRITE INTO TABLE primitives PARTITION(year=2009, month=4);
+--
+-- Function qtest_get_java_boolean
+--
+DROP FUNCTION IF EXISTS qtest_get_java_boolean;
+CREATE FUNCTION qtest_get_java_boolean AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaBoolean';
+
+--
+-- Table dest1
+--
+DROP TABLE IF EXISTS dest1;
+
+CREATE TABLE dest1 (key STRING COMMENT 'default', value STRING COMMENT 'default')
+STORED AS
+INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
+OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat';
+
+--
+-- Table dest2
+--
+DROP TABLE IF EXISTS dest2;
+
+CREATE TABLE dest2 (key STRING COMMENT 'default', value STRING COMMENT 'default')
+STORED AS
+INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
+OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat';
+
+--
+-- Table dest3
+--
+DROP TABLE IF EXISTS dest3;
+
+CREATE TABLE dest3 (key STRING COMMENT 'default', value STRING COMMENT 'default')
+PARTITIONED BY (ds STRING, hr STRING)
+STORED AS
+INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
+OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat';
+ALTER TABLE dest3 ADD PARTITION (ds = '2008-04-08');
+ALTER TABLE dest3 ADD PARTITION (hr = '12');
+
+--
+-- Table dest4
+--
+DROP TABLE IF EXISTS dest4;
+
+CREATE TABLE dest4 (key STRING COMMENT 'default', value STRING COMMENT 'default')
+STORED AS
+INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
+OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat';
+
+--
+-- Table dest4_sequencefile
+--
+DROP TABLE IF EXISTS dest4_sequencefile;
+
+CREATE TABLE dest4_sequencefile (key STRING COMMENT 'default', value STRING COMMENT 'default')
+STORED AS
+INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
+OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat';
\ No newline at end of file
diff --git hbase-handler/src/test/templates/TestHBaseCliDriver.vm hbase-handler/src/test/templates/TestHBaseCliDriver.vm
index 01d596a..4b3e136 100644
--- hbase-handler/src/test/templates/TestHBaseCliDriver.vm
+++ hbase-handler/src/test/templates/TestHBaseCliDriver.vm
@@ -45,9 +45,12 @@ public class $className extends TestCase {
protected void setUp() {
MiniClusterType miniMR = MiniClusterType.valueForString("$clusterMode");
+ String initScript = "$initScript";
+ String cleanupScript = "$cleanupScript";
try {
- qt = new HBaseQTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR, setup);
+ qt = new HBaseQTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR,
+ setup, initScript, cleanupScript);
} catch (Exception e) {
System.err.println("Exception: " + e.getMessage());
e.printStackTrace();
diff --git hbase-handler/src/test/templates/TestHBaseNegativeCliDriver.vm hbase-handler/src/test/templates/TestHBaseNegativeCliDriver.vm
index 45c7338..82f0584 100644
--- hbase-handler/src/test/templates/TestHBaseNegativeCliDriver.vm
+++ hbase-handler/src/test/templates/TestHBaseNegativeCliDriver.vm
@@ -45,9 +45,12 @@ public class $className extends TestCase {
protected void setUp() {
MiniClusterType miniMR = MiniClusterType.valueForString("$clusterMode");
+ String initScript = "$initScript";
+ String cleanupScript = "$cleanupScript";
try {
- qt = new HBaseQTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR, setup);
+ qt = new HBaseQTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR,
+ setup, initScript, cleanupScript);
} catch (Exception e) {
System.err.println("Exception: " + e.getMessage());
e.printStackTrace();
diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
index 9edd7f3..633ba92 100644
--- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
+++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
@@ -88,7 +88,7 @@ public CheckResults(String outDir, String logDir, MiniClusterType miniMr,
String hadoopVer, String locationSubdir)
throws Exception
{
- super(outDir, logDir, miniMr, hadoopVer);
+ super(outDir, logDir, miniMr, hadoopVer, "", "");
this.locationSubdir = locationSubdir;
}
}
diff --git itests/qtest/pom.xml itests/qtest/pom.xml
index 249956f..72215a9 100644
--- itests/qtest/pom.xml
+++ itests/qtest/pom.xml
@@ -420,7 +420,9 @@
resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/compiler/" className="TestParse"
logFile="${project.build.directory}/testparsegen.log"
hadoopVersion="${active.hadoop.version}"
- logDirectory="${project.build.directory}/qfile-results/positive/"/>
+ logDirectory="${project.build.directory}/qfile-results/positive/"
+ initScript="q_test_init.sql"
+ cleanupScript="q_test_cleanup.sql"/>
+ logDirectory="${project.build.directory}/qfile-results/negative/"
+ initScript="q_test_init.sql"
+ cleanupScript="q_test_cleanup.sql"/>
+ hadoopVersion="${active.hadoop.version}"
+ initScript="q_test_init.sql"
+ cleanupScript="q_test_cleanup.sql"/>
+ hadoopVersion="${active.hadoop.version}"
+ initScript="q_test_init.sql"
+ cleanupScript="q_test_cleanup.sql"/>
+ hadoopVersion="${active.hadoop.version}"
+ initScript="q_test_init.sql"
+ cleanupScript="q_test_cleanup.sql"/>
+ initScript="q_test_init.sql"
+ cleanupScript="q_test_cleanup.sql"/>
@@ -513,7 +524,8 @@
logFile="${project.build.directory}/testminitezclidrivergen.log"
logDirectory="${project.build.directory}/qfile-results/clientpositive/"
hadoopVersion="${active.hadoop.version}"
- />
+ initScript="q_test_init.sql"
+ cleanupScript="q_test_cleanup.sql"/>
@@ -533,7 +545,8 @@
logFile="${project.build.directory}/testnegativeminimrclidrivergen.log"
logDirectory="${project.build.directory}/qfile-results/clientnegative/"
hadoopVersion="${hadoopVersion}"
- />
+ initScript="q_test_init.sql"
+ cleanupScript="q_test_cleanup.sql"/>
+ logDirectory="${project.build.directory}/qfile-results/hbase-handler/positive/"
+ initScript="q_test_init.sql"
+ cleanupScript="q_test_cleanup.sql"/>
+ logDirectory="${project.build.directory}/qfile-results/hbase-handler/minimrpositive/"
+ initScript="q_test_init.sql"
+ cleanupScript="q_test_cleanup.sql"/>
+ logDirectory="${project.build.directory}/qfile-results/hbase-handler/negative"
+ initScript="q_test_init.sql"
+ cleanupScript="q_test_cleanup.sql"/>
@@ -588,7 +607,7 @@
resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/" className="TestBeeLineDriver"
logFile="${project.build.directory}/testbeelinedrivergen.log"
logDirectory="${project.build.directory}/qfile-results/beelinepositive/"
- hadoopVersion="${hadoopVersion}" />
+ hadoopVersion="${hadoopVersion}"/>
@@ -606,7 +625,8 @@
logFile="${project.build.directory}/testcontribclidrivergen.log"
logDirectory="${project.build.directory}/qfile-results/contribclientpositive"
hadoopVersion="${hadoopVersion}"
- />
+ initScript="q_test_init.sql"
+ cleanupScript="q_test_cleanup.sql"/>
+ logDirectory="${project.build.directory}/qfile-results/contribclientnegative"
+ initScript="q_test_init.sql"
+ cleanupScript="q_test_cleanup.sql"/>
diff --git itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
index 96a0de2..3a9e4fa 100644
--- itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
+++ itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
@@ -25,10 +25,11 @@
*/
public class HBaseQTestUtil extends QTestUtil {
public HBaseQTestUtil(
- String outDir, String logDir, MiniClusterType miniMr, HBaseTestSetup setup)
+ String outDir, String logDir, MiniClusterType miniMr, HBaseTestSetup setup,
+ String initScript, String cleanupScript)
throws Exception {
- super(outDir, logDir, miniMr, null);
+ super(outDir, logDir, miniMr, null, initScript, cleanupScript);
setup.preTest(conf);
super.init();
}
diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 2fefa06..b25b1f3 100644
--- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -38,7 +38,6 @@
import java.io.PrintStream;
import java.io.Serializable;
import java.io.StringWriter;
-import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
@@ -115,6 +114,8 @@
public static final String UTF_8 = "UTF-8";
private static final Log LOG = LogFactory.getLog("QTestUtil");
+ private final String defaultInitScript = "q_test_init.sql";
+ private final String defaultCleanupScript = "q_test_cleanup.sql";
private String testWarehouse;
private final String testFiles;
@@ -142,6 +143,10 @@
private boolean miniMr = false;
private String hadoopVer = null;
private QTestSetup setup = null;
+ private boolean isSessionStateStarted = false;
+
+ private String initScript;
+ private String cleanupScript;
static {
for (String srcTable : System.getProperty("test.src.tables", "").trim().split(",")) {
@@ -225,8 +230,9 @@ public void normalizeNames(File path) throws Exception {
}
}
- public QTestUtil(String outDir, String logDir) throws Exception {
- this(outDir, logDir, MiniClusterType.none, null, "0.20");
+ public QTestUtil(String outDir, String logDir, String initScript, String cleanupScript) throws
+ Exception {
+ this(outDir, logDir, MiniClusterType.none, null, "0.20", initScript, cleanupScript);
}
public String getOutputDirectory() {
@@ -297,13 +303,14 @@ public static MiniClusterType valueForString(String type) {
}
}
- public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, String hadoopVer)
+ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, String hadoopVer,
+ String initScript, String cleanupScript)
throws Exception {
- this(outDir, logDir, clusterType, null, hadoopVer);
+ this(outDir, logDir, clusterType, null, hadoopVer, initScript, cleanupScript);
}
public QTestUtil(String outDir, String logDir, MiniClusterType clusterType,
- String confDir, String hadoopVer)
+ String confDir, String hadoopVer, String initScript, String cleanupScript)
throws Exception {
this.outDir = outDir;
this.logDir = logDir;
@@ -354,6 +361,20 @@ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType,
testFiles = dataDir;
+ // Use the current directory if it is not specified
+ String scriptsDir = conf.get("test.data.scripts");
+ if (scriptsDir == null) {
+ scriptsDir = new File(".").getAbsolutePath() + "/data/scripts";
+ }
+ if (initScript.isEmpty()) {
+ initScript = defaultInitScript;
+ }
+ if (cleanupScript.isEmpty()) {
+ cleanupScript = defaultCleanupScript;
+ }
+ this.initScript = scriptsDir + "/" + initScript;
+ this.cleanupScript = scriptsDir + "/" + cleanupScript;
+
overWrite = "true".equalsIgnoreCase(System.getProperty("test.output.overwrite"));
setup = new QTestSetup();
@@ -593,14 +614,15 @@ public void clearTestSideEffects() throws Exception {
}
public void cleanUp() throws Exception {
- // Drop any tables that remain due to unsuccessful runs
- for (String s : new String[] {"src", "src1", "src_json", "src_thrift",
- "src_sequencefile", "srcpart", "srcbucket", "srcbucket2", "dest1",
- "dest2", "dest3", "dest4", "dest4_sequencefile", "dest_j1", "dest_j2",
- "dest_g1", "dest_g2", "fetchtask_ioexception",
- AllVectorTypesRecord.TABLE_NAME}) {
- db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, s);
+ if(!isSessionStateStarted) {
+ startSessionState();
+ }
+ String cleanupCommands = readEntireFileIntoString(new File(cleanupScript));
+ LOG.info("Cleanup (" + cleanupScript + "):\n" + cleanupCommands);
+ if(cliDriver == null) {
+ cliDriver = new CliDriver();
}
+ cliDriver.processLine(cleanupCommands);
// delete any contents in the warehouse dir
Path p = new Path(testWarehouse);
@@ -653,119 +675,20 @@ private void runCmd(String cmd) throws Exception {
}
public void createSources() throws Exception {
-
- startSessionState();
+ if(!isSessionStateStarted) {
+ startSessionState();
+ }
conf.setBoolean("hive.test.init.phase", true);
- // Create a bunch of tables with columns key and value
- LinkedList cols = new LinkedList();
- cols.add("key");
- cols.add("value");
-
- LinkedList part_cols = new LinkedList();
- part_cols.add("ds");
- part_cols.add("hr");
- db.createTable("srcpart", cols, part_cols, TextInputFormat.class,
- IgnoreKeyTextOutputFormat.class);
-
- Path fpath;
- HashMap part_spec = new HashMap();
- for (String ds : new String[] {"2008-04-08", "2008-04-09"}) {
- for (String hr : new String[] {"11", "12"}) {
- part_spec.clear();
- part_spec.put("ds", ds);
- part_spec.put("hr", hr);
- // System.out.println("Loading partition with spec: " + part_spec);
- // db.createPartition(srcpart, part_spec);
- fpath = new Path(testFiles, "kv1.txt");
- // db.loadPartition(fpath, srcpart.getName(), part_spec, true);
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toUri().getPath()
- + "' OVERWRITE INTO TABLE srcpart PARTITION (ds='" + ds + "',hr='"
- + hr + "')");
- }
+ String initCommands = readEntireFileIntoString(new File(this.initScript));
+ LOG.info("Initial setup (" + initScript + "):\n" + initCommands);
+ if(cliDriver == null) {
+ cliDriver = new CliDriver();
}
- ArrayList bucketCols = new ArrayList();
- bucketCols.add("key");
- runCreateTableCmd("CREATE TABLE srcbucket(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE");
- // db.createTable("srcbucket", cols, null, TextInputFormat.class,
- // IgnoreKeyTextOutputFormat.class, 2, bucketCols);
- for (String fname : new String[] {"srcbucket0.txt", "srcbucket1.txt"}) {
- fpath = new Path(testFiles, fname);
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toUri().getPath()
- + "' INTO TABLE srcbucket");
- }
-
- runCreateTableCmd("CREATE TABLE srcbucket2(key int, value string) "
- + "CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE");
- // db.createTable("srcbucket", cols, null, TextInputFormat.class,
- // IgnoreKeyTextOutputFormat.class, 2, bucketCols);
- for (String fname : new String[] {"srcbucket20.txt", "srcbucket21.txt",
- "srcbucket22.txt", "srcbucket23.txt"}) {
- fpath = new Path(testFiles, fname);
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toUri().getPath()
- + "' INTO TABLE srcbucket2");
- }
-
- for (String tname : new String[] {"src", "src1"}) {
- db.createTable(tname, cols, null, TextInputFormat.class,
- IgnoreKeyTextOutputFormat.class);
- }
- db.createTable("src_sequencefile", cols, null,
- SequenceFileInputFormat.class, SequenceFileOutputFormat.class);
-
- Table srcThrift =
- new Table(SessionState.get().getCurrentDatabase(), "src_thrift");
- srcThrift.setInputFormatClass(SequenceFileInputFormat.class.getName());
- srcThrift.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
- srcThrift.setSerializationLib(ThriftDeserializer.class.getName());
- srcThrift.setSerdeParam(serdeConstants.SERIALIZATION_CLASS, Complex.class
- .getName());
- srcThrift.setSerdeParam(serdeConstants.SERIALIZATION_FORMAT,
- TBinaryProtocol.class.getName());
- db.createTable(srcThrift);
-
- LinkedList json_cols = new LinkedList();
- json_cols.add("json");
- db.createTable("src_json", json_cols, null, TextInputFormat.class,
- IgnoreKeyTextOutputFormat.class);
-
- // load the input data into the src table
- fpath = new Path(testFiles, "kv1.txt");
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toUri().getPath() + "' INTO TABLE src");
-
- // load the input data into the src table
- fpath = new Path(testFiles, "kv3.txt");
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toUri().getPath() + "' INTO TABLE src1");
-
- // load the input data into the src_sequencefile table
- fpath = new Path(testFiles, "kv1.seq");
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toUri().getPath()
- + "' INTO TABLE src_sequencefile");
-
- // load the input data into the src_thrift table
- fpath = new Path(testFiles, "complex.seq");
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toUri().getPath()
- + "' INTO TABLE src_thrift");
-
- // load the json data into the src_json table
- fpath = new Path(testFiles, "json.txt");
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toUri().getPath()
- + "' INTO TABLE src_json");
-
- FileSystem localFs = FileSystem.getLocal(conf);
- // create and load data into orc table
- fpath = new Path(testFiles, AllVectorTypesRecord.TABLE_NAME);
-
- runCreateTableCmd(AllVectorTypesRecord.TABLE_CREATE_COMMAND);
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toUri().getPath()
- + "' INTO TABLE "+AllVectorTypesRecord.TABLE_NAME);
-
- runCmd("DROP FUNCTION IF EXISTS qtest_get_java_boolean ");
- runCmd("CREATE FUNCTION qtest_get_java_boolean "
- + " AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaBoolean'");
+ cliDriver.processLine("set test.data.dir=" + testFiles + ";");
+ cliDriver.processLine(initCommands);
conf.setBoolean("hive.test.init.phase", false);
-
}
public void init() throws Exception {
@@ -786,33 +709,6 @@ public void init() throws Exception {
public void init(String tname) throws Exception {
cleanUp();
createSources();
-
- LinkedList cols = new LinkedList();
- cols.add("key");
- cols.add("value");
-
- LinkedList part_cols = new LinkedList();
- part_cols.add("ds");
- part_cols.add("hr");
-
- db.createTable("dest1", cols, null, TextInputFormat.class,
- IgnoreKeyTextOutputFormat.class);
- db.createTable("dest2", cols, null, TextInputFormat.class,
- IgnoreKeyTextOutputFormat.class);
-
- db.createTable("dest3", cols, part_cols, TextInputFormat.class,
- IgnoreKeyTextOutputFormat.class);
- Table dest3 = db.getTable("dest3");
-
- HashMap part_spec = new HashMap();
- part_spec.put("ds", "2008-04-08");
- part_spec.put("hr", "12");
- db.createPartition(dest3, part_spec);
-
- db.createTable("dest4", cols, null, TextInputFormat.class,
- IgnoreKeyTextOutputFormat.class);
- db.createTable("dest4_sequencefile", cols, null,
- SequenceFileInputFormat.class, SequenceFileOutputFormat.class);
}
public void cliInit(String tname) throws Exception {
@@ -865,24 +761,40 @@ public String cliInit(String tname, boolean recreate) throws Exception {
}
SessionState.start(ss);
- cliDriver = new CliDriver();
+ if(cliDriver == null) {
+ cliDriver = new CliDriver();
+ }
if (tname.equals("init_file.q")) {
ss.initFiles.add("../../data/scripts/test_init_file.sql");
}
cliDriver.processInitFiles(ss);
+
return outf.getAbsolutePath();
}
private CliSessionState startSessionState()
- throws FileNotFoundException, UnsupportedEncodingException {
+ throws IOException {
HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
"org.apache.hadoop.hive.ql.security.DummyAuthenticator");
CliSessionState ss = new CliSessionState(conf);
assert ss != null;
+ ss.in = System.in;
+ ss.out = System.out;
+ ss.err = System.out;
+ SessionState oldSs = SessionState.get();
+ if (oldSs != null && clusterType == MiniClusterType.tez) {
+ oldSs.close();
+ }
+ if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
+ oldSs.out.close();
+ }
SessionState.start(ss);
+
+ isSessionStateStarted = true;
+
return ss;
}
@@ -1571,7 +1483,7 @@ public void run() {
{
QTestUtil[] qt = new QTestUtil[qfiles.length];
for (int i = 0; i < qfiles.length; i++) {
- qt[i] = new QTestUtil(resDir, logDir, MiniClusterType.none, null, "0.20");
+ qt[i] = new QTestUtil(resDir, logDir, MiniClusterType.none, null, "0.20", "", "");
qt[i].addFile(qfiles[i]);
qt[i].clearTestSideEffects();
}
diff --git ql/src/test/templates/TestCliDriver.vm ql/src/test/templates/TestCliDriver.vm
index 4776c75..f055444 100644
--- ql/src/test/templates/TestCliDriver.vm
+++ ql/src/test/templates/TestCliDriver.vm
@@ -37,9 +37,12 @@ public class $className extends TestCase {
MiniClusterType miniMR = MiniClusterType.valueForString("$clusterMode");
String hiveConfDir = "$hiveConfDir";
+ String initScript = "$initScript";
+ String cleanupScript = "$cleanupScript";
try {
String hadoopVer = "$hadoopVersion";
- qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR, hiveConfDir, hadoopVer);
+ qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR,
+ hiveConfDir, hadoopVer, initScript, cleanupScript);
// do a one time initialization
qt.cleanUp();
diff --git ql/src/test/templates/TestCompareCliDriver.vm ql/src/test/templates/TestCompareCliDriver.vm
index f6f43b8..fdc9565 100644
--- ql/src/test/templates/TestCompareCliDriver.vm
+++ ql/src/test/templates/TestCompareCliDriver.vm
@@ -38,9 +38,12 @@ public class $className extends TestCase {
MiniClusterType miniMR = MiniClusterType.valueForString("$clusterMode");
String hiveConfDir = "$hiveConfDir";
+ String initScript = "$initScript";
+ String cleanupScript = "$cleanupScript";
try {
String hadoopVer = "$hadoopVersion";
- qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR, hiveConfDir, hadoopVer);
+ qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR,
+ hiveConfDir, hadoopVer, initScript, cleanupScript);
// do a one time initialization
qt.cleanUp();
diff --git ql/src/test/templates/TestNegativeCliDriver.vm ql/src/test/templates/TestNegativeCliDriver.vm
index 991d5ac..742044a 100644
--- ql/src/test/templates/TestNegativeCliDriver.vm
+++ ql/src/test/templates/TestNegativeCliDriver.vm
@@ -35,10 +35,13 @@ public class $className extends TestCase {
static {
MiniClusterType miniMR = MiniClusterType.valueForString("$clusterMode");
+ String initScript = "$initScript";
+ String cleanupScript = "$cleanupScript";
try {
String hadoopVer = "$hadoopVersion";
- qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR, hadoopVer);
+ qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR, hadoopVer,
+ initScript, cleanupScript);
// do a one time initialization
qt.cleanUp();
qt.createSources();
diff --git ql/src/test/templates/TestParse.vm ql/src/test/templates/TestParse.vm
index c476536..8c7d747 100644
--- ql/src/test/templates/TestParse.vm
+++ ql/src/test/templates/TestParse.vm
@@ -35,10 +35,13 @@ public class $className extends TestCase {
static {
MiniClusterType miniMR = MiniClusterType.valueForString("$clusterMode");
+ String initScript = "$initScript";
+ String cleanupScript = "$cleanupScript";
try {
String hadoopVer = "$hadoopVersion";
- qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR, hadoopVer);
+ qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR, hadoopVer,
+ initScript, cleanupScript);
qt.init(null);
} catch (Exception e) {
System.err.println("Exception: " + e.getMessage());
diff --git ql/src/test/templates/TestParseNegative.vm ql/src/test/templates/TestParseNegative.vm
index f62f17e..33b238e 100755
--- ql/src/test/templates/TestParseNegative.vm
+++ ql/src/test/templates/TestParseNegative.vm
@@ -36,10 +36,13 @@ public class $className extends TestCase {
static {
MiniClusterType miniMR = MiniClusterType.valueForString("$clusterMode");
+ String initScript = "$initScript";
+ String cleanupScript = "$cleanupScript";
try {
String hadoopVer = "$hadoopVersion";
- qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR, hadoopVer);
+ qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR, hadoopVer,
+ initScript, cleanupScript);
} catch (Exception e) {
System.err.println("Exception: " + e.getMessage());
e.printStackTrace();