Index: src/test/e2e/hcatalog/tools/generate/generate_data.pl =================================================================== --- src/test/e2e/hcatalog/tools/generate/generate_data.pl (revision 1293654) +++ src/test/e2e/hcatalog/tools/generate/generate_data.pl (working copy) @@ -314,18 +314,12 @@ } elsif ($format eq "rc") { print $hivefp " stored as rcfile -location '$location' -TBLPROPERTIES ( - 'hcat.isd'='org.apache.hcatalog.rcfile.RCFileInputDriver', - 'hcat.osd'='org.apache.hcatalog.rcfile.RCFileOutputDriver' -);\n"; +location '$location';\n"; } elsif ($format eq "json") { - print $hivefp " STORED AS -INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat' -INPUTDRIVER 'org.apache.hcatalog.pig.drivers.LoadFuncBasedInputDriver' OUTPUTDRIVER 'org.apache.hcatalog.pig.drivers.StoreFuncBasedOutputDriver'; + print $hivefp " +row format serde 'org.apache.hcatalog.data.JsonSerDe' +stored as textfile location '$location' -TBLPROPERTIES ('hcat.pig.loader'='org.apache.pig.builtin.JsonLoader', 'hcat.pig.storer'='org.apache.pig.builtin.JsonStorage', 'hcat.pig.loader.args'= -'s:chararray, i:int, d:double, m:map[chararray], bb:{t:(a:int, b:chararray)}, 'hcat.pig.args.delimiter'='\t') ;\n"; } else { die "Unknown format $format\n"; @@ -528,9 +522,7 @@ row format delimited fields terminated by ':' stored as textfile - location '$hdfsTargetDir/$tableName';\n - alter table $tableName set TBLPROPERTIES - ('hcat.pig.loader.args'=':', 'hcat.pig.storer.args'=':');\n"; + location '$hdfsTargetDir/$tableName';\n"; for (my $i = 0; $i < $numRows; $i++) { printf HDFS "%d:%d:%d:%ld:%.2f:%.2f:%s\n", (int(rand(2**8) - 2**7)), @@ -669,11 +661,8 @@ d double, m map, bb array>) - STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat' - INPUTDRIVER 'org.apache.hcatalog.pig.drivers.LoadFuncBasedInputDriver' OUTPUTDRIVER 'org.apache.hcatalog.pig.drivers.StoreFuncBasedOutputDriver' - location '$hdfsTargetDir/$tableName' - TBLPROPERTIES ('hcat.pig.loader'='org.apache.pig.builtin.JsonLoader', 'hcat.pig.storer'='org.apache.pig.builtin.JsonStorage', 'hcat.pig.loader.args'= -'s:chararray, i:int, d:double, m:map[chararray], bb:{t:(a:int, b:chararray)}', 'hcat.pig.args.delimiter'='\t');\n"; + STORED AS TEXTFILE + location '$hdfsTargetDir/$tableName';\n"; open(PLAIN, ">$tableName.plain") or die("Cannot open file $tableName.hive.sql, $!\n"); for (my $i = 0; $i < $numRows; $i++) { Index: src/test/e2e/hcatalog/tests/pig.conf =================================================================== --- src/test/e2e/hcatalog/tests/pig.conf (revision 1293654) +++ src/test/e2e/hcatalog/tests/pig.conf (working copy) @@ -164,10 +164,7 @@ d double, m map, bb array>) - STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat' - INPUTDRIVER 'org.apache.hcatalog.pig.drivers.LoadFuncBasedInputDriver' OUTPUTDRIVER 'org.apache.hcatalog.pig.drivers.StoreFuncBasedOutputDriver' - TBLPROPERTIES ('hcat.pig.loader'='org.apache.pig.builtin.JsonLoader', 'hcat.pig.storer'='org.apache.pig.builtin.JsonStorage', 'hcat.pig.loader.args'= -'s:chararray, i:int, d:double, m:map[chararray], bb:{t:(a:int, b:chararray)}', 'hcat.pig.args.delimiter'=' '); + STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'; \ ,'pig' => q\a = load 'all100kjson' using org.apache.hcatalog.pig.HCatLoader(); b = foreach a generate s, i, d; @@ -184,11 +181,7 @@ name string, age int, gpa double) -stored as rcfile -TBLPROPERTIES ( - 'hcat.isd'='org.apache.hcatalog.rcfile.RCFileInputDriver', - 'hcat.osd'='org.apache.hcatalog.rcfile.RCFileOutputDriver' -); +stored as rcfile; \ ,'pig' => q\a = load 'all100krc' using org.apache.hcatalog.pig.HCatLoader(); b = foreach a generate name, age; Index: src/test/e2e/hcatalog/tests/hadoop.conf =================================================================== --- src/test/e2e/hcatalog/tests/hadoop.conf (revision 1293654) +++ src/test/e2e/hcatalog/tests/hadoop.conf (working copy) @@ -171,11 +171,7 @@ name string, age int, gpa double) -stored as rcfile -TBLPROPERTIES ( - 'hcat.isd'='org.apache.hcatalog.rcfile.RCFileInputDriver', - 'hcat.osd'='org.apache.hcatalog.rcfile.RCFileOutputDriver' -); +stored as rcfile; \, ,'hadoop' => q\ jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.WriteRC -libjars :HCAT_JAR: :THRIFTSERVER: all100krc hadoop_write_3 Index: src/test/e2e/hcatalog/conf/default.conf =================================================================== --- src/test/e2e/hcatalog/conf/default.conf (revision 1293654) +++ src/test/e2e/hcatalog/conf/default.conf (working copy) @@ -62,7 +62,7 @@ , 'hcat_data_dir' => '/user/hcat/tests/data' , 'hivehome' => $ENV{'PH_HIVE_HOME'} , 'hcathome' => $ENV{'HCAT_INSTALL_DIR'} - , 'hcatalog.jar' => "$ENV{HCAT_JAR},$ENV{HIVE_ROOT}/build/dist/lib/hive-serde-$hive_version.jar,$ENV{HIVE_ROOT}/build/dist/lib/hive-exec-$hive_version.jar,$ENV{PIG_HOME}/pig-0.9.2-withouthadoop.jar,$ENV{HIVE_ROOT}/build/dist/lib/hive-metastore-$hive_version.jar,$ENV{HIVE_ROOT}/build/dist/lib/libfb303-0.7.0.jar,$ENV{HIVE_ROOT}/build/dist/lib/jdo2-api-2.3-ec.jar,$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/hbase-0.90.5.jar,$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/zookeeper-3.4.0.jar,$ENV{HCAT_ROOT}/storage-drivers/hbase/build/hbase-storage-driver/hbase-storage-driver-0.1.0.jar,$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-hbase-handler-$hive_version.jar" + , 'hcatalog.jar' => "$ENV{HCAT_JAR},$ENV{HIVE_ROOT}/build/dist/lib/hive-serde-$hive_version.jar,$ENV{HIVE_ROOT}/build/dist/lib/hive-exec-$hive_version.jar,$ENV{PIG_HOME}/pig-0.9.2-withouthadoop.jar,$ENV{HIVE_ROOT}/build/dist/lib/hive-metastore-$hive_version.jar,$ENV{HIVE_ROOT}/build/dist/lib/libfb303-0.7.0.jar,$ENV{HIVE_ROOT}/build/dist/lib/jdo2-api-2.3-ec.jar,$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/hbase-0.92.0-SNAPSHOT.jar,$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/zookeeper-3.4.3.jar,$ENV{HCAT_ROOT}/storage-drivers/hbase/build/hbase-storage-driver/hbase-storage-driver-0.1.0.jar,$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-hbase-handler-$hive_version.jar" #PIG , 'testconfigpath' => "$ENV{PH_CLUSTER}" @@ -72,7 +72,7 @@ , 'pigpath' => "$ENV{PIG_HOME}" , 'pigjar' => "$ENV{PIG_JAR}" # Pig jar that doesn't have Antlr , 'oldpigpath' => "$ENV{PH_OLDPIG}" - , 'additionaljars' => "$ENV{HCAT_ROOT}/build/hcatalog/hcatalog-0.4.0.jar:$ENV{HCAT_ROOT}/hive/external/build/metastore/hive-metastore-$hive_version.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/libthrift.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-exec-$hive_version.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/libfb303.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/jdo2-api-2.3-ec.jar::$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/hbase-0.90.5.jar:$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/zookeeper-3.4.0.jar:$ENV{HCAT_ROOT}/storage-drivers/hbase/build/hbase-storage-driver/hbase-storage-driver-0.1.0.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-hbase-handler-$hive_version.jar:$ENV{'HCAT_INSTALL_DIR'}/etc/hcatalog" + , 'additionaljars' => "$ENV{HCAT_ROOT}/build/hcatalog/hcatalog-0.4.0.jar:$ENV{HCAT_ROOT}/hive/external/build/metastore/hive-metastore-$hive_version.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/libthrift.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-exec-$hive_version.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/libfb303.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/jdo2-api-2.3-ec.jar::$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/hbase-0.92.0-SNAPSHOT.jar:$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/zookeeper-3.4.3.jar:$ENV{HCAT_ROOT}/storage-drivers/hbase/build/hbase-storage-driver/hbase-storage-driver-0.1.0.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-hbase-handler-$hive_version.jar:$ENV{'HCAT_INSTALL_DIR'}/etc/hcatalog" #HADOOP , 'hadoopHome' => "$ENV{HCAT_ROOT}/lib" Index: src/test/e2e/hcatalog/build.xml =================================================================== --- src/test/e2e/hcatalog/build.xml (revision 1293654) +++ src/test/e2e/hcatalog/build.xml (working copy) @@ -56,6 +56,7 @@ +